Merge "Clarify error message for bootclasspath whitelist."
diff --git a/core/Makefile b/core/Makefile
index d020335..8452494 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -4,15 +4,6 @@
# intermedites-dir-for
LOCAL_PATH := $(BUILD_SYSTEM)
-# Pick a reasonable string to use to identify files.
-ifneq (,$(filter eng.%,$(BUILD_NUMBER)))
- # BUILD_NUMBER has a timestamp in it, which means that
- # it will change every time. Pick a stable value.
- FILE_NAME_TAG := eng.$(USER)
-else
- FILE_NAME_TAG := $(BUILD_NUMBER)
-endif
-
# -----------------------------------------------------------------
# Define rules to copy PRODUCT_COPY_FILES defined by the product.
# PRODUCT_COPY_FILES contains words like <source file>:<dest file>[:<owner>].
@@ -238,28 +229,37 @@
# The string used to uniquely identify the combined build and product; used by the OTA server.
ifeq (,$(strip $(BUILD_FINGERPRINT)))
- ifneq ($(filter eng.%,$(BUILD_NUMBER)),)
- BF_BUILD_NUMBER := $(USER)$(shell $(DATE) +%m%d%H%M)
+ ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
+ BF_BUILD_NUMBER := $(USER)$$($(DATE_FROM_FILE) +%m%d%H%M)
else
- BF_BUILD_NUMBER := $(BUILD_NUMBER)
+ BF_BUILD_NUMBER := $(file <$(BUILD_NUMBER_FILE))
endif
BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
endif
-ifneq ($(words $(BUILD_FINGERPRINT)),1)
- $(error BUILD_FINGERPRINT cannot contain spaces: "$(BUILD_FINGERPRINT)")
-endif
+# unset it for safety.
+BF_BUILD_NUMBER :=
-$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) > $(PRODUCT_OUT)/build_fingerprint.txt)
-BUILD_FINGERPRINT_FROM_FILE := $$(cat $(PRODUCT_OUT)/build_fingerprint.txt)
+BUILD_FINGERPRINT_FILE := $(PRODUCT_OUT)/build_fingerprint.txt
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) >$(BUILD_FINGERPRINT_FILE) && grep " " $(BUILD_FINGERPRINT_FILE)))
+ $(error BUILD_FINGERPRINT cannot contain spaces: "$(file <$(BUILD_FINGERPRINT_FILE))")
+endif
+BUILD_FINGERPRINT_FROM_FILE := $$(cat $(BUILD_FINGERPRINT_FILE))
+# unset it for safety.
+BUILD_FINGERPRINT :=
# The string used to uniquely identify the system build; used by the OTA server.
# This purposefully excludes any product-specific variables.
ifeq (,$(strip $(BUILD_THUMBPRINT)))
- BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
+ BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER_FROM_FILE):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
endif
-ifneq ($(words $(BUILD_THUMBPRINT)),1)
- $(error BUILD_THUMBPRINT cannot contain spaces: "$(BUILD_THUMBPRINT)")
+
+BUILD_THUMBPRINT_FILE := $(PRODUCT_OUT)/build_thumbprint.txt
+ifneq (,$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_THUMBPRINT) >$(BUILD_THUMBPRINT_FILE) && grep " " $(BUILD_THUMBPRINT_FILE)))
+ $(error BUILD_THUMBPRINT cannot contain spaces: "$(file <$(BUILD_THUMBPRINT_FILE))")
endif
+BUILD_THUMBPRINT_FROM_FILE := $$(cat $(BUILD_THUMBPRINT_FILE))
+# unset it for safety.
+BUILD_THUMBPRINT :=
KNOWN_OEM_THUMBPRINT_PROPERTIES := \
ro.product.brand \
@@ -347,7 +347,7 @@
PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
BUILD_FINGERPRINT="$(BUILD_FINGERPRINT_FROM_FILE)" \
- $(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT)") \
+ $(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT_FROM_FILE)") \
TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
TARGET_CPU_ABI_LIST_32_BIT="$(TARGET_CPU_ABI_LIST_32_BIT)" \
TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
@@ -2480,7 +2480,7 @@
# $1: root directory
# $2: add prefix
define fs_config
-(cd $(1); find . -type d | sed 's,$$,/,'; find . \! -type d) | cut -c 3- | sort | sed 's,^,$(2),' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC)
+(cd $(1); find . -type d | sed 's,$$,/,'; find . \! -type d) | cut -c 3- | sort | sed 's,^,$(2),' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) -R "$(2)"
endef
# Depending on the various images guarantees that the underlying
@@ -3230,3 +3230,9 @@
ifneq ($(sdk_repo_goal),)
include $(TOPDIR)development/build/tools/sdk_repo.mk
endif
+
+#------------------------------------------------------------------
+# Find lsdump paths
+FIND_LSDUMPS_FILE := $(PRODUCT_OUT)/lsdump_paths.txt
+$(FIND_LSDUMPS_FILE) : $(LSDUMP_PATHS)
+ $(hide) rm -rf $@ && echo "$^" > $@
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index c3694ab2..baa2344 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -196,6 +196,7 @@
LOCAL_PREBUILT_OBJ_FILES:=
LOCAL_PREBUILT_STATIC_JAVA_LIBRARIES:=
LOCAL_PREBUILT_STRIP_COMMENTS:=
+LOCAL_PRIVATE_PLATFORM_APIS:=
LOCAL_PRIVILEGED_MODULE:=
# '',full,custom,disabled,obfuscation,optimization
LOCAL_PRODUCT_MODULE:=
@@ -239,6 +240,8 @@
LOCAL_SOONG_PROGUARD_DICT :=
LOCAL_SOONG_RESOURCE_EXPORT_PACKAGE :=
LOCAL_SOONG_RRO_DIRS :=
+LOCAL_DROIDDOC_STUBS_JAR :=
+LOCAL_DROIDDOC_DOC_ZIP :=
# '',true
LOCAL_SOURCE_FILES_ALL_GENERATED:=
LOCAL_SRC_FILES:=
diff --git a/core/config.mk b/core/config.mk
index 4942be7..7448623 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -89,15 +89,6 @@
dist_goal := $(strip $(filter dist,$(MAKECMDGOALS)))
MAKECMDGOALS := $(strip $(filter-out dist,$(MAKECMDGOALS)))
-# Tell python not to spam the source tree with .pyc files. This
-# only has an effect on python 2.6 and above.
-export PYTHONDONTWRITEBYTECODE := 1
-
-ifneq ($(filter --color=always, $(GREP_OPTIONS)),)
-$(warning The build system needs unmodified output of grep.)
-$(error Please remove --color=always from your $$GREP_OPTIONS)
-endif
-
UNAME := $(shell uname -sm)
SRC_TARGET_DIR := $(TOPDIR)build/target
@@ -580,6 +571,7 @@
SOONG_JAVAC_WRAPPER := $(SOONG_HOST_OUT_EXECUTABLES)/soong_javac_wrapper
SOONG_ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/soong_zip
MERGE_ZIPS := $(SOONG_HOST_OUT_EXECUTABLES)/merge_zips
+XMLLINT := $(SOONG_HOST_OUT_EXECUTABLES)/xmllint
ZIP2ZIP := $(SOONG_HOST_OUT_EXECUTABLES)/zip2zip
ZIPTIME := $(prebuilt_build_tools_bin)/ziptime
@@ -893,12 +885,6 @@
$(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES := default
endif
-# These will come from Soong, drop the environment versions
-unexport CLANG
-unexport CLANG_CXX
-unexport CCC_CC
-unexport CCC_CXX
-
# ###############################################################
# Collect a list of the SDK versions that we could compile against
# For use with the LOCAL_SDK_VERSION variable for include $(BUILD_PACKAGE)
diff --git a/core/definitions.mk b/core/definitions.mk
index 2f3f356..f6f5840 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -2686,9 +2686,9 @@
# $(1): source file
# $(2): destination file, must end with .xml.
define copy-xml-file-checked
-$(2): $(1)
+$(2): $(1) $(XMLLINT)
@echo "Copy xml: $$@"
- $(hide) xmllint $$< >/dev/null # Don't print the xml file to stdout.
+ $(hide) $(XMLLINT) $$< >/dev/null # Don't print the xml file to stdout.
$$(copy-file-to-target)
endef
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index 8db9428..a5e7e88 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -73,6 +73,23 @@
# Note: this is technically incorrect. Compiled code contains stack checks which may depend
# on ASAN settings.
+# Use ANDROID_LOG_TAGS to suppress most logging by default...
+ifeq (,$(ART_BOOT_IMAGE_EXTRA_ARGS))
+DEX2OAT_BOOT_IMAGE_LOG_TAGS := ANDROID_LOG_TAGS="*:e"
+else
+# ...unless the boot image is generated specifically for testing, then allow all logging.
+DEX2OAT_BOOT_IMAGE_LOG_TAGS := ANDROID_LOG_TAGS="*:v"
+endif
+
+# An additional message to print on dex2oat failure.
+DEX2OAT_FAILURE_MESSAGE := ERROR: Dex2oat failed to compile a boot image.
+DEX2OAT_FAILURE_MESSAGE += It is likely that the boot classpath is inconsistent.
+ifeq ($(ONE_SHOT_MAKEFILE),)
+ DEX2OAT_FAILURE_MESSAGE += Rebuild with ART_BOOT_IMAGE_EXTRA_ARGS="--runtime-arg -verbose:verifier" to see verification errors.
+else
+ DEX2OAT_FAILURE_MESSAGE += Build with m, mma, or mmma instead of mm or mmm to remedy the situation.
+endif
+
$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_BOOT_IMAGE_FLAGS := $(my_boot_image_flags)
$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_IMAGE_LOCATION := $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_LOCATION)
@@ -85,7 +102,7 @@
@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art.rel
- $(hide) ANDROID_LOG_TAGS="*:e" $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
+ $(hide) $(DEX2OAT_BOOT_IMAGE_LOG_TAGS) $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
--runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
$(PRIVATE_BOOT_IMAGE_FLAGS) \
$(addprefix --dex-file=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
@@ -103,11 +120,12 @@
--multi-image --no-inline-from=core-oj.jar \
--abort-on-hard-verifier-error \
--abort-on-soft-verifier-error \
- $(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS) && \
- ANDROID_ROOT=$(PRODUCT_OUT)/system ANDROID_DATA=$(dir $@) $(PATCHOAT) \
- --input-image-location=$(PRIVATE_IMAGE_LOCATION) \
- --output-image-relocation-file=$@.rel \
- --instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
- --base-offset-delta=0x10000000
+ $(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS) \
+ || ( echo "$(DEX2OAT_FAILURE_MESSAGE)" ; false ) && \
+ $(DEX2OAT_BOOT_IMAGE_LOG_TAGS) ANDROID_ROOT=$(PRODUCT_OUT)/system ANDROID_DATA=$(dir $@) $(PATCHOAT) \
+ --input-image-location=$(PRIVATE_IMAGE_LOCATION) \
+ --output-image-relocation-directory=$(dir $@) \
+ --instruction-set=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH) \
+ --base-offset-delta=0x10000000
endif
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 3a943bb..2b2800b 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -139,8 +139,10 @@
--dex-location=$(PRIVATE_DEX_LOCATION) \
--reference-profile-file=$@
dex_preopt_profile_src_file:=
-# Remove compressed APK extension.
+
+# Remove compressed APK extension.
my_installed_profile := $(patsubst %.gz,%,$(LOCAL_INSTALLED_MODULE)).prof
+
# my_installed_profile := $(LOCAL_INSTALLED_MODULE).prof
$(eval $(call copy-one-file,$(my_built_profile),$(my_installed_profile)))
build_installed_profile:=$(my_built_profile):$(my_installed_profile)
@@ -244,12 +246,39 @@
# For non system server jars, use speed-profile when we have a profile.
LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=speed-profile
else
- LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=$(my_default_compiler_filter)
+ LOCAL_DEX_PREOPT_FLAGS += --compiler-filter=$(my_default_compiler_filter)
endif
endif
endif
endif
+my_generate_dm := $(PRODUCT_DEX_PREOPT_GENERATE_DM_FILES)
+ifeq (,$(filter $(LOCAL_DEX_PREOPT_FLAGS),--compiler-filter=verify))
+# Generating DM files only makes sense for verify, avoid doing for non verify compiler filter APKs.
+my_generate_dm := false
+endif
+
+# No reason to use a dm file if the dex is already uncompressed.
+ifeq ($(LOCAL_UNCOMPRESS_DEX),true)
+my_generate_dm := false
+endif
+
+ifeq (true,$(my_generate_dm))
+LOCAL_DEX_PREOPT_FLAGS += --copy-dex-files=false
+LOCAL_DEX_PREOPT := nostripping
+my_built_dm := $(dir $(LOCAL_BUILT_MODULE))generated.dm
+my_installed_dm := $(patsubst %.apk,%,$(LOCAL_INSTALLED_MODULE)).dm
+my_copied_vdex := $(dir $(LOCAL_BUILT_MODULE))primary.vdex
+$(eval $(call copy-one-file,$(built_vdex),$(my_copied_vdex)))
+$(my_built_dm): PRIVATE_INPUT_VDEX := $(my_copied_vdex)
+$(my_built_dm): $(my_copied_vdex) $(ZIPTIME)
+ $(hide) mkdir -p $(dir $@)
+ $(hide) rm -f $@
+ $(hide) zip -qD -j -X -9 $@ $(PRIVATE_INPUT_VDEX)
+ $(ZIPTIME) $@
+$(eval $(call copy-one-file,$(my_built_dm),$(my_installed_dm)))
+endif
+
# PRODUCT_SYSTEM_SERVER_DEBUG_INFO overrides WITH_DEXPREOPT_DEBUG_INFO.
my_system_server_debug_info := $(PRODUCT_SYSTEM_SERVER_DEBUG_INFO)
ifeq (,$(filter eng, $(TARGET_BUILD_VARIANT)))
@@ -274,14 +303,26 @@
$(built_art): $(built_odex)
endif
-# Add the installed_odex to the list of installed files for this module.
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
-ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
+ifneq (true,$(my_generate_dm))
+ # Add the installed_odex to the list of installed files for this module if we aren't generating a
+ # dm file.
+ ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
+ ALL_MODULES.$(my_register_name).INSTALLED += $(installed_vdex)
+ ALL_MODULES.$(my_register_name).INSTALLED += $(installed_art)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
-ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
+ ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
+ ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_vdex)
+ ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_art)
+
+ # Make sure to install the .odex and .vdex when you run "make <module_name>"
+ $(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art)
+else
+ ALL_MODULES.$(my_register_name).INSTALLED += $(my_installed_dm)
+ ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(my_built_dm) $(my_installed_dm)
+
+ # Make sure to install the .dm when you run "make <module_name>"
+ $(my_all_targets): $(installed_dm)
+endif
# Record dex-preopt config.
DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
@@ -295,10 +336,6 @@
DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS) := $(sort \
$(DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS)) $(LOCAL_MODULE))
-
-# Make sure to install the .odex and .vdex when you run "make <module_name>"
-$(my_all_targets): $(installed_odex) $(installed_vdex) $(installed_art)
-
endif # LOCAL_DEX_PREOPT
# Profile doesn't depend on LOCAL_DEX_PREOPT.
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 43fc780..20663d1 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -67,6 +67,8 @@
include $(BUILD_SYSTEM)/java_common.mk
+include $(BUILD_SYSTEM)/sdk_check.mk
+
$(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
# List of dependencies for anything that needs all java sources in place
diff --git a/core/java.mk b/core/java.mk
index f92cbca..dc20444 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -357,6 +357,8 @@
include $(BUILD_SYSTEM)/java_common.mk
+include $(BUILD_SYSTEM)/sdk_check.mk
+
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HAS_RS_SOURCES := $(if $(renderscript_sources),true)
$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RS_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/renderscript
@@ -381,7 +383,7 @@
# Make sure there's something to build.
ifdef full_classes_jar
ifndef need_compile_java
-$(error $(LOCAL_PATH): Target java module does not define any source or resource files)
+$(call pretty-error,Target java module does not define any source or resource files)
endif
endif
diff --git a/core/main.mk b/core/main.mk
index 0c165ca..0317a89 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -61,12 +61,23 @@
# when using ninja.
$(shell mkdir -p $(OUT_DIR) && \
echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt)
+BUILD_NUMBER_FILE := $(OUT_DIR)/build_number.txt
+
ifeq ($(HOST_OS),darwin)
DATE_FROM_FILE := date -r $(BUILD_DATETIME_FROM_FILE)
else
DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
endif
+# Pick a reasonable string to use to identify files.
+ifeq ($(strip $(HAS_BUILD_NUMBER)),false)
+ # BUILD_NUMBER has a timestamp in it, which means that
+ # it will change every time. Pick a stable value.
+ FILE_NAME_TAG := eng.$(USER)
+else
+ FILE_NAME_TAG := $(file <$(BUILD_NUMBER_FILE))
+endif
+
# Make an empty directory, which can be used to make empty jars
EMPTY_DIRECTORY := $(OUT_DIR)/empty
$(shell mkdir -p $(EMPTY_DIRECTORY) && rm -rf $(EMPTY_DIRECTORY)/*)
@@ -1245,6 +1256,9 @@
.PHONY: findbugs
findbugs: $(INTERNAL_FINDBUGS_HTML_TARGET) $(INTERNAL_FINDBUGS_XML_TARGET)
+.PHONY: findlsdumps
+findlsdumps: $(FIND_LSDUMPS_FILE)
+
#xxx scrape this from ALL_MODULE_NAME_TAGS
.PHONY: modules
modules:
diff --git a/core/ninja_config.mk b/core/ninja_config.mk
index ca2dcee..2256f98 100644
--- a/core/ninja_config.mk
+++ b/core/ninja_config.mk
@@ -19,9 +19,7 @@
boottarball-nodeps \
brillo_tests \
btnod \
- build-art% \
build_kernel-nodeps \
- clean-oat% \
continuous_instrumentation_tests \
continuous_native_tests \
cts \
@@ -47,11 +45,9 @@
systemimage-nodeps \
systemtarball-nodeps \
target-files-package \
- test-art% \
user \
userdataimage \
userdebug \
- valgrind-test-art% \
vts \
win_sdk \
winsdk-tools
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index cb1d401..d4f5522 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -20,19 +20,19 @@
ifdef LOCAL_PREBUILT_MODULE_FILE
my_prebuilt_src_file := $(LOCAL_PREBUILT_MODULE_FILE)
+else ifdef LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
+ my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
+ LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) :=
+else ifdef LOCAL_SRC_FILES_$(my_32_64_bit_suffix)
+ my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$(my_32_64_bit_suffix))
+ LOCAL_SRC_FILES_$(my_32_64_bit_suffix) :=
+else ifdef LOCAL_SRC_FILES
+ my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES)
+ LOCAL_SRC_FILES :=
+else ifdef LOCAL_REPLACE_PREBUILT_APK_INSTALLED
+ # This is handled specially below
else
- ifdef LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
- my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
- LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) :=
- else
- ifdef LOCAL_SRC_FILES_$(my_32_64_bit_suffix)
- my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$(my_32_64_bit_suffix))
- LOCAL_SRC_FILES_$(my_32_64_bit_suffix) :=
- else
- my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES)
- LOCAL_SRC_FILES :=
- endif
- endif
+ $(call pretty-error,No source files specified)
endif
LOCAL_CHECKED_MODULE := $(my_prebuilt_src_file)
diff --git a/core/product.mk b/core/product.mk
index ce14853..6cccebf 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -134,6 +134,7 @@
PRODUCT_DEX_PREOPT_BOOT_FLAGS \
PRODUCT_DEX_PREOPT_PROFILE_DIR \
PRODUCT_DEX_PREOPT_BOOT_IMAGE_PROFILE_LOCATION \
+ PRODUCT_DEX_PREOPT_GENERATE_DM_FILES \
PRODUCT_USE_PROFILE_FOR_BOOT_IMAGE \
PRODUCT_SYSTEM_SERVER_COMPILER_FILTER \
PRODUCT_SANITIZER_MODULE_CONFIGS \
diff --git a/core/product_config.mk b/core/product_config.mk
index 0c46541..2620adb 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -396,6 +396,8 @@
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_COMPILER_FILTER))
PRODUCT_DEX_PREOPT_DEFAULT_FLAGS := \
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_DEFAULT_FLAGS))
+PRODUCT_DEX_PREOPT_GENERATE_DM_FILES := \
+ $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_GENERATE_DM_FILES))
PRODUCT_DEX_PREOPT_BOOT_FLAGS := \
$(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_DEX_PREOPT_BOOT_FLAGS))
PRODUCT_DEX_PREOPT_PROFILE_DIR := \
diff --git a/core/sdk_check.mk b/core/sdk_check.mk
new file mode 100644
index 0000000..49ea2a8
--- /dev/null
+++ b/core/sdk_check.mk
@@ -0,0 +1,32 @@
+
+# Enforcement checks that LOCAL_SDK_VERSION and LOCAL_PRIVATE_PLATFORM_APIS are
+# set correctly.
+# Should be included by java targets that allow specifying LOCAL_SDK_VERSION.
+# The JAVA_SDK_ENFORCEMENT_WARNING and JAVA_SDK_ENFORCEMENT_ERROR variables may
+# be set to a particular module class to enable warnings and errors for that
+# subtype.
+
+whitelisted_modules := framework-res__auto_generated_rro
+
+ifeq ($(LOCAL_SDK_VERSION)$(LOCAL_PRIVATE_PLATFORM_APIS),)
+ ifeq (,$(filter $(LOCAL_MODULE),$(whitelisted_modules)))
+ ifneq ($(JAVA_SDK_ENFORCEMENT_WARNING)$(JAVA_SDK_ENFORCEMENT_ERROR),)
+ my_message := Must specify LOCAL_SDK_VERSION or LOCAL_PRIVATE_PLATFORM_APIS,
+ ifeq ($(LOCAL_MODULE_CLASS),$(JAVA_SDK_ENFORCEMENT_ERROR))
+ $(call pretty-error,$(my_message))
+ endif
+ ifeq ($(LOCAL_MODULE_CLASS),$(JAVA_SDK_ENFORCEMENT_WARNING))
+ $(call pretty-warning,$(my_message))
+ endif
+ my_message :=
+ endif
+ endif
+else ifneq ($(LOCAL_SDK_VERSION),)
+ ifneq ($(LOCAL_PRIVATE_PLATFORM_APIS),)
+ my_message := Specifies both LOCAL_SDK_VERSION ($(LOCAL_SDK_VERSION)) and
+ my_message += LOCAL_PRIVATE_PLATFORM_APIS ($(LOCAL_PRIVATE_PLATFORM_APIS))
+ my_message += but should specify only one
+ $(call pretty-error,$(my_message))
+ my_message :=
+ endif
+endif
diff --git a/core/soong_app_prebuilt.mk b/core/soong_app_prebuilt.mk
index c553c4c..4c34215 100644
--- a/core/soong_app_prebuilt.mk
+++ b/core/soong_app_prebuilt.mk
@@ -34,13 +34,13 @@
$(intermediates.COMMON)/proguard_dictionary)
endif
-ifneq ($(TURBINE_DISABLED),false)
+ifneq ($(TURBINE_ENABLED),false)
ifdef LOCAL_SOONG_HEADER_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
else
$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
endif
-endif # TURBINE_DISABLED != false
+endif # TURBINE_ENABLED != false
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(LOCAL_BUILT_MODULE)))
diff --git a/core/soong_config.mk b/core/soong_config.mk
index a084f79..8b2dfd1 100644
--- a/core/soong_config.mk
+++ b/core/soong_config.mk
@@ -42,6 +42,9 @@
$(call add_json_str, Make_suffix, -$(TARGET_PRODUCT))
+$(call add_json_str, BuildId, $(BUILD_ID))
+$(call add_json_str, BuildNumberFromFile, $$$(BUILD_NUMBER_FROM_FILE))
+
$(call add_json_val, Platform_sdk_version, $(PLATFORM_SDK_VERSION))
$(call add_json_csv, Platform_version_active_codenames, $(PLATFORM_VERSION_ALL_CODENAMES))
$(call add_json_csv, Platform_version_future_codenames, $(PLATFORM_VERSION_FUTURE_CODENAMES))
diff --git a/core/soong_java_prebuilt.mk b/core/soong_java_prebuilt.mk
index f3ed376..7d32e48 100644
--- a/core/soong_java_prebuilt.mk
+++ b/core/soong_java_prebuilt.mk
@@ -23,6 +23,15 @@
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_jar)))
$(eval $(call copy-one-file,$(LOCAL_PREBUILT_MODULE_FILE),$(full_classes_pre_proguard_jar)))
+ifdef LOCAL_DROIDDOC_STUBS_JAR
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_STUBS_JAR),$(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar))
+ALL_DOCS += $(OUT_DOCS)/$(LOCAL_MODULE)-stubs.srcjar
+endif
+
+ifdef LOCAL_DROIDDOC_DOC_ZIP
+$(eval $(call copy-one-file,$(LOCAL_DROIDDOC_DOC_ZIP),$(OUT_DOCS)/$(LOCAL_MODULE)-docs.zip))
+endif
+
ifdef LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_JACOCO_REPORT_CLASSES_JAR),\
$(intermediates.COMMON)/jacoco-report-classes.jar))
@@ -30,13 +39,13 @@
$(intermediates.COMMON)/jacoco-report-classes.jar)
endif
-ifneq ($(TURBINE_DISABLED),false)
+ifneq ($(TURBINE_ENABLED),false)
ifdef LOCAL_SOONG_HEADER_JAR
$(eval $(call copy-one-file,$(LOCAL_SOONG_HEADER_JAR),$(full_classes_header_jar)))
else
$(eval $(call copy-one-file,$(full_classes_jar),$(full_classes_header_jar)))
endif
-endif # TURBINE_DISABLED != false
+endif # TURBINE_ENABLED != false
ifdef LOCAL_SOONG_DEX_JAR
ifndef LOCAL_IS_HOST_MODULE
diff --git a/core/tasks/tools/build_custom_image.mk b/core/tasks/tools/build_custom_image.mk
index 4d05237..a1151e9 100644
--- a/core/tasks/tools/build_custom_image.mk
+++ b/core/tasks/tools/build_custom_image.mk
@@ -151,7 +151,7 @@
cat $(PRIVATE_DICT_FILE) >> $(PRIVATE_INTERMEDIATES)/image_info.txt)
# Generate the image.
$(if $(filter oem,$(PRIVATE_MOUNT_POINT)), \
- $(hide) echo "oem.buildnumber=$(BUILD_NUMBER)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
+ $(hide) echo "oem.buildnumber=$(BUILD_NUMBER_FROM_FILE)" >> $(PRIVATE_STAGING_DIR)/oem.prop)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
build/make/tools/releasetools/build_image.py \
$(PRIVATE_STAGING_DIR) $(PRIVATE_INTERMEDIATES)/image_info.txt $@ $(TARGET_OUT)
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 858384b..e83d6fa 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -271,6 +271,7 @@
# to soong_ui.
BUILD_DATETIME :=
+HAS_BUILD_NUMBER := true
ifndef BUILD_NUMBER
# BUILD_NUMBER should be set to the source control value that
# represents the current state of the source code. E.g., a
@@ -282,4 +283,5 @@
# from this date/time" value. Make it start with a non-digit so that
# anyone trying to parse it as an integer will probably get "0".
BUILD_NUMBER := eng.$(shell echo $${USER:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
+ HAS_BUILD_NUMBER := false
endif
diff --git a/target/board/generic/BoardConfig.mk b/target/board/generic/BoardConfig.mk
index 8d93f75..67d019f 100644
--- a/target/board/generic/BoardConfig.mk
+++ b/target/board/generic/BoardConfig.mk
@@ -25,6 +25,7 @@
TARGET_CPU_ABI2 := armeabi
HAVE_HTC_AUDIO_DRIVER := true
BOARD_USES_GENERIC_AUDIO := true
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
# no hardware camera
USE_CAMERA_STUB := true
diff --git a/target/board/generic/sepolicy/OWNERS b/target/board/generic/sepolicy/OWNERS
index 4bd7e34..3828988 100644
--- a/target/board/generic/sepolicy/OWNERS
+++ b/target/board/generic/sepolicy/OWNERS
@@ -1,6 +1,4 @@
-nnk@google.com
jeffv@google.com
-klyubin@google.com
dcashman@google.com
jbires@google.com
sspatil@google.com
diff --git a/target/board/generic/sepolicy/property.te b/target/board/generic/sepolicy/property.te
index a486702..56e02ef 100644
--- a/target/board/generic/sepolicy/property.te
+++ b/target/board/generic/sepolicy/property.te
@@ -1,4 +1,3 @@
type qemu_prop, property_type;
type qemu_cmdline, property_type;
type radio_noril_prop, property_type;
-type opengles_prop, property_type;
diff --git a/target/board/generic/sepolicy/property_contexts b/target/board/generic/sepolicy/property_contexts
index c66a85f..3a61b6b 100644
--- a/target/board/generic/sepolicy/property_contexts
+++ b/target/board/generic/sepolicy/property_contexts
@@ -3,4 +3,3 @@
ro.emu. u:object_r:qemu_prop:s0
ro.emulator. u:object_r:qemu_prop:s0
ro.radio.noril u:object_r:radio_noril_prop:s0
-ro.opengles. u:object_r:opengles_prop:s0
diff --git a/target/board/generic/sepolicy/system_server.te b/target/board/generic/sepolicy/system_server.te
index 9063095..dd70b12 100644
--- a/target/board/generic/sepolicy/system_server.te
+++ b/target/board/generic/sepolicy/system_server.te
@@ -1,2 +1 @@
-get_prop(system_server, opengles_prop)
get_prop(system_server, radio_noril_prop)
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index 9d2ccbd..e066e3a 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -20,6 +20,7 @@
TARGET_ARCH_VARIANT := armv8-a
TARGET_CPU_VARIANT := generic
TARGET_CPU_ABI := arm64-v8a
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
TARGET_2ND_ARCH := arm
TARGET_2ND_CPU_ABI := armeabi-v7a
diff --git a/target/board/generic_arm64_ab/sepolicy/OWNERS b/target/board/generic_arm64_ab/sepolicy/OWNERS
index 4bd7e34..3828988 100644
--- a/target/board/generic_arm64_ab/sepolicy/OWNERS
+++ b/target/board/generic_arm64_ab/sepolicy/OWNERS
@@ -1,6 +1,4 @@
-nnk@google.com
jeffv@google.com
-klyubin@google.com
dcashman@google.com
jbires@google.com
sspatil@google.com
diff --git a/target/board/generic_arm_ab/BoardConfig.mk b/target/board/generic_arm_ab/BoardConfig.mk
index 011bcdf..b21e907 100644
--- a/target/board/generic_arm_ab/BoardConfig.mk
+++ b/target/board/generic_arm_ab/BoardConfig.mk
@@ -28,3 +28,11 @@
# Enable A/B update
TARGET_NO_RECOVERY := true
BOARD_BUILD_SYSTEM_ROOT_IMAGE := true
+
+# TODO(jiyong) These might be SoC specific.
+BOARD_ROOT_EXTRA_FOLDERS += firmware firmware/radio persist
+BOARD_ROOT_EXTRA_SYMLINKS := /vendor/lib/dsp:/dsp
+
+# TODO(b/36764215): remove this setting when the generic system image
+# no longer has QCOM-specific directories under /.
+BOARD_SEPOLICY_DIRS += build/target/board/generic_arm64_ab/sepolicy
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index a73a31b..f71e72b 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -10,6 +10,7 @@
TARGET_ARCH := x86
TARGET_ARCH_VARIANT := x86
TARGET_PRELINK_MODULE := false
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
#emulator now uses 64bit kernel to run 32bit x86 image
#
diff --git a/target/board/generic_x86/sepolicy/OWNERS b/target/board/generic_x86/sepolicy/OWNERS
index 4bd7e34..3828988 100644
--- a/target/board/generic_x86/sepolicy/OWNERS
+++ b/target/board/generic_x86/sepolicy/OWNERS
@@ -1,6 +1,4 @@
-nnk@google.com
jeffv@google.com
-klyubin@google.com
dcashman@google.com
jbires@google.com
sspatil@google.com
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index aea1a0a..a9c5142 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -10,6 +10,7 @@
TARGET_ARCH := x86_64
TARGET_ARCH_VARIANT := x86_64
TARGET_PRELINK_MODULE := false
+TARGET_BOOTLOADER_BOARD_NAME := goldfish_$(TARGET_ARCH)
TARGET_2ND_CPU_ABI := x86
TARGET_2ND_ARCH := x86
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 811c330..96c9e33 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -24,7 +24,7 @@
PRODUCT_COPY_FILES += \
development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
- prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
+ prebuilts/qemu-kernel/x86_64/4.4/kernel-qemu2:kernel-ranchu-64
include $(SRC_TARGET_DIR)/product/full_x86.mk
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 693bdaf..086a76f 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -25,7 +25,7 @@
PRODUCT_COPY_FILES += \
development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
- prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu
+ prebuilts/qemu-kernel/x86_64/4.4/kernel-qemu2:kernel-ranchu
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
diff --git a/target/product/core_base.mk b/target/product/core_base.mk
index 151e8de..7dc0010 100644
--- a/target/product/core_base.mk
+++ b/target/product/core_base.mk
@@ -62,9 +62,4 @@
mdnsd \
requestsync \
-# Wifi modules
-PRODUCT_PACKAGES += \
- wifi-service \
- wificond \
-
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_minimal.mk)
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index 6e7038e..f9030cf 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -56,6 +56,9 @@
PRODUCT_PACKAGES += \
cacerts \
+PRODUCT_PACKAGES += \
+ hiddenapi-package-whitelist.xml \
+
PRODUCT_SYSTEM_DEFAULT_PROPERTIES += \
dalvik.vm.image-dex2oat-Xms=64m \
dalvik.vm.image-dex2oat-Xmx=64m \
diff --git a/target/product/sdk_base.mk b/target/product/sdk_base.mk
index a641be3..b79b8c6 100644
--- a/target/product/sdk_base.mk
+++ b/target/product/sdk_base.mk
@@ -17,7 +17,6 @@
PRODUCT_PROPERTY_OVERRIDES :=
PRODUCT_PACKAGES := \
- ApiDemos \
CellBroadcastReceiver \
CubeLiveWallpapers \
CustomLocale \
@@ -25,7 +24,6 @@
Dialer \
EmulatorSmokeTests \
Gallery2 \
- GestureBuilder \
Launcher3 \
Camera2 \
librs_jni \
@@ -40,8 +38,6 @@
rild \
screenrecord \
SdkSetup \
- SmokeTest \
- SmokeTestApp \
SoftKeyboard \
sqlite3 \
SystemUI \
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index b9820d3..32d71eb 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -24,7 +24,7 @@
PRODUCT_COPY_FILES += \
development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
- prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu-64
+ prebuilts/qemu-kernel/x86_64/4.4/kernel-qemu2:kernel-ranchu-64
$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
index a18c4f8..e40ebb5 100644
--- a/target/product/sdk_phone_x86_64.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -25,7 +25,7 @@
PRODUCT_COPY_FILES += \
development/sys-img/advancedFeatures.ini:advancedFeatures.ini \
device/generic/goldfish/data/etc/encryptionkey.img:encryptionkey.img \
- prebuilts/qemu-kernel/x86_64/3.18/kernel-qemu2:kernel-ranchu
+ prebuilts/qemu-kernel/x86_64/4.4/kernel-qemu2:kernel-ranchu
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
diff --git a/tools/auto_gen_test_config.py b/tools/auto_gen_test_config.py
index da4443c..c7c5bdc 100755
--- a/tools/auto_gen_test_config.py
+++ b/tools/auto_gen_test_config.py
@@ -70,8 +70,9 @@
label = module
runner = instrumentation.attributes[ATTRIBUTE_RUNNER].value
package = manifest.attributes[ATTRIBUTE_PACKAGE].value
- test_type = ('AndroidJUnitTest' if runner.endswith('.AndroidJUnitRunner')
- else 'InstrumentationTest')
+ test_type = ('InstrumentationTest'
+ if runner.endswith('.InstrumentationTestRunner')
+ else 'AndroidJUnitTest')
with open(instrumentation_test_config_template) as template:
config = template.read()
diff --git a/tools/auto_gen_test_config_test.py b/tools/auto_gen_test_config_test.py
index e70eff8..e68c27f 100644
--- a/tools/auto_gen_test_config_test.py
+++ b/tools/auto_gen_test_config_test.py
@@ -155,7 +155,7 @@
self.assertEqual(config_file.read(), EXPECTED_JUNIT_TEST_CONFIG)
def testCreateInstrumentationTestConfig(self):
- """Test creating test config for AndroidJUnitTest.
+ """Test creating test config for InstrumentationTest.
"""
with open(self.manifest_file, 'w') as f:
f.write(MANIFEST_INSTRUMENTATION_TEST)
diff --git a/tools/fs_config/fs_config.c b/tools/fs_config/fs_config.c
index 48f300b..2952875 100644
--- a/tools/fs_config/fs_config.c
+++ b/tools/fs_config/fs_config.c
@@ -67,17 +67,18 @@
}
static void usage() {
- fprintf(stderr, "Usage: fs_config [-D product_out_path] [-S context_file] [-C]\n");
+ fprintf(stderr, "Usage: fs_config [-D product_out_path] [-S context_file] [-R root] [-C]\n");
}
int main(int argc, char** argv) {
char buffer[1024];
const char* context_file = NULL;
const char* product_out_path = NULL;
+ char* root_path = NULL;
struct selabel_handle* sehnd = NULL;
int print_capabilities = 0;
int opt;
- while((opt = getopt(argc, argv, "CS:D:")) != -1) {
+ while((opt = getopt(argc, argv, "CS:R:D:")) != -1) {
switch(opt) {
case 'C':
print_capabilities = 1;
@@ -85,6 +86,9 @@
case 'S':
context_file = optarg;
break;
+ case 'R':
+ root_path = optarg;
+ break;
case 'D':
product_out_path = optarg;
break;
@@ -98,6 +102,14 @@
sehnd = get_sehnd(context_file);
}
+ if (root_path != NULL) {
+ size_t root_len = strlen(root_path);
+ /* Trim any trailing slashes from the root path. */
+ while (root_len && root_path[--root_len] == '/') {
+ root_path[root_len] = '\0';
+ }
+ }
+
while (fgets(buffer, 1023, stdin) != NULL) {
int is_dir = 0;
int i;
@@ -122,6 +134,10 @@
unsigned uid = 0, gid = 0, mode = 0;
uint64_t capabilities;
fs_config(buffer, is_dir, product_out_path, &uid, &gid, &mode, &capabilities);
+ if (root_path != NULL && strcmp(buffer, root_path) == 0) {
+ /* The root of the filesystem needs to be an empty string. */
+ strcpy(buffer, "");
+ }
printf("%s %d %d %o", buffer, uid, gid, mode);
if (sehnd != NULL) {
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index a37de66..f68976e 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -96,7 +96,16 @@
def GetCareMap(which, imgname):
- """Generates the care_map for the given partition."""
+ """Returns the care_map string for the given partition.
+
+ Args:
+ which: The partition name, must be listed in PARTITIONS_WITH_CARE_MAP.
+ imgname: The filename of the image.
+
+ Returns:
+ (which, care_map_ranges): care_map_ranges is the raw string of the care_map
+ RangeSet.
+ """
assert which in PARTITIONS_WITH_CARE_MAP
simg = sparse_img.SparseImage(imgname)
@@ -111,13 +120,13 @@
return [which, care_map_ranges.to_string_raw()]
-def AddSystem(output_zip, prefix="IMAGES/", recovery_img=None, boot_img=None):
+def AddSystem(output_zip, recovery_img=None, boot_img=None):
"""Turn the contents of SYSTEM into a system image and store it in
output_zip. Returns the name of the system image file."""
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "system.img")
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.img")
if os.path.exists(img.input_name):
- print("system.img already exists in %s, no need to rebuild..." % (prefix,))
+ print("system.img already exists; no need to rebuild...")
return img.input_name
def output_sink(fn, data):
@@ -136,65 +145,66 @@
common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink, recovery_img,
boot_img, info_dict=OPTIONS.info_dict)
- block_list = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "system.map")
+ block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system.map")
CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system", img,
block_list=block_list)
return img.name
-def AddSystemOther(output_zip, prefix="IMAGES/"):
+def AddSystemOther(output_zip):
"""Turn the contents of SYSTEM_OTHER into a system_other image
and store it in output_zip."""
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "system_other.img")
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "system_other.img")
if os.path.exists(img.input_name):
- print("system_other.img already exists in %s, no need to rebuild..." % (
- prefix,))
+ print("system_other.img already exists; no need to rebuild...")
return
CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "system_other", img)
-def AddVendor(output_zip, prefix="IMAGES/"):
+def AddVendor(output_zip):
"""Turn the contents of VENDOR into a vendor image and store in it
output_zip."""
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vendor.img")
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.img")
if os.path.exists(img.input_name):
- print("vendor.img already exists in %s, no need to rebuild..." % (prefix,))
+ print("vendor.img already exists; no need to rebuild...")
return img.input_name
- block_list = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vendor.map")
+ block_list = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vendor.map")
CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "vendor", img,
block_list=block_list)
return img.name
-def AddProduct(output_zip, prefix="IMAGES/"):
- """Turn the contents of PRODUCT into a product image and store it in output_zip."""
+def AddProduct(output_zip):
+ """Turn the contents of PRODUCT into a product image and store it in
+ output_zip."""
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "product.img")
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "product.img")
if os.path.exists(img.input_name):
- print("product.img already exists in %s, no need to rebuild..." % (prefix,))
+ print("product.img already exists; no need to rebuild...")
return img.input_name
- block_list = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "product.map")
- CreateImage(OPTIONS.input_tmp, OPTIONS.info_dict, "product", img,
- block_list=block_list)
+ block_list = OutputFile(
+ output_zip, OPTIONS.input_tmp, "IMAGES", "product.map")
+ CreateImage(
+ OPTIONS.input_tmp, OPTIONS.info_dict, "product", img,
+ block_list=block_list)
return img.name
-def AddDtbo(output_zip, prefix="IMAGES/"):
+def AddDtbo(output_zip):
"""Adds the DTBO image.
- Uses the image under prefix if it already exists. Otherwise looks for the
+ Uses the image under IMAGES/ if it already exists. Otherwise looks for the
image under PREBUILT_IMAGES/, signs it as needed, and returns the image name.
"""
-
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "dtbo.img")
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "dtbo.img")
if os.path.exists(img.input_name):
- print("dtbo.img already exists in %s, no need to rebuild..." % (prefix,))
+ print("dtbo.img already exists; no need to rebuild...")
return img.input_name
dtbo_prebuilt_path = os.path.join(
@@ -288,7 +298,7 @@
info_dict[adjusted_blocks_key] = int(adjusted_blocks_value)/4096 - 1
-def AddUserdata(output_zip, prefix="IMAGES/"):
+def AddUserdata(output_zip):
"""Create a userdata image and store it in output_zip.
In most case we just create and store an empty userdata.img;
@@ -297,10 +307,9 @@
in OPTIONS.info_dict.
"""
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "userdata.img")
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "userdata.img")
if os.path.exists(img.input_name):
- print("userdata.img already exists in %s, no need to rebuild..." % (
- prefix,))
+ print("userdata.img already exists; no need to rebuild...")
return
# Skip userdata.img if no size.
@@ -356,7 +365,7 @@
cmd.extend(["--include_descriptors_from_image", img_path])
-def AddVBMeta(output_zip, partitions, prefix="IMAGES/"):
+def AddVBMeta(output_zip, partitions):
"""Creates a VBMeta image and store it in output_zip.
Args:
@@ -365,9 +374,9 @@
values. Only valid partition names are accepted, which include 'boot',
'recovery', 'system', 'vendor', 'dtbo'.
"""
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "vbmeta.img")
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "vbmeta.img")
if os.path.exists(img.input_name):
- print("vbmeta.img already exists in %s; not rebuilding..." % (prefix,))
+ print("vbmeta.img already exists; not rebuilding...")
return img.input_name
avbtool = os.getenv('AVBTOOL') or OPTIONS.info_dict["avb_avbtool"]
@@ -412,11 +421,13 @@
img.Write()
-def AddPartitionTable(output_zip, prefix="IMAGES/"):
+def AddPartitionTable(output_zip):
"""Create a partition table image and store it in output_zip."""
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "partition-table.img")
- bpt = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "partition-table.bpt")
+ img = OutputFile(
+ output_zip, OPTIONS.input_tmp, "IMAGES", "partition-table.img")
+ bpt = OutputFile(
+ output_zip, OPTIONS.input_tmp, "IMAGES", "partition-table.bpt")
# use BPTTOOL from environ, or "bpttool" if empty or not set.
bpttool = os.getenv("BPTTOOL") or "bpttool"
@@ -441,12 +452,12 @@
bpt.Write()
-def AddCache(output_zip, prefix="IMAGES/"):
+def AddCache(output_zip):
"""Create an empty cache image and store it in output_zip."""
- img = OutputFile(output_zip, OPTIONS.input_tmp, prefix, "cache.img")
+ img = OutputFile(output_zip, OPTIONS.input_tmp, "IMAGES", "cache.img")
if os.path.exists(img.input_name):
- print("cache.img already exists in %s, no need to rebuild..." % (prefix,))
+ print("cache.img already exists; no need to rebuild...")
return
image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "cache")
@@ -621,23 +632,26 @@
The images will be created under IMAGES/ in the input target_files.zip.
Args:
- filename: the target_files.zip, or the zip root directory.
+ filename: the target_files.zip, or the zip root directory.
"""
if os.path.isdir(filename):
OPTIONS.input_tmp = os.path.abspath(filename)
- input_zip = None
else:
- OPTIONS.input_tmp, input_zip = common.UnzipTemp(filename)
+ OPTIONS.input_tmp = common.UnzipTemp(filename)
if not OPTIONS.add_missing:
if os.path.isdir(os.path.join(OPTIONS.input_tmp, "IMAGES")):
print("target_files appears to already contain images.")
sys.exit(1)
- # {vendor,product}.img is unlike system.img or system_other.img. Because it could
- # be built from source, or dropped into target_files.zip as a prebuilt blob.
- # We consider either of them as {vendor,product}.img being available, which could
- # be used when generating vbmeta.img for AVB.
+ OPTIONS.info_dict = common.LoadInfoDict(OPTIONS.input_tmp, OPTIONS.input_tmp)
+
+ has_recovery = OPTIONS.info_dict.get("no_recovery") != "true"
+
+ # {vendor,product}.img is unlike system.img or system_other.img. Because it
+ # could be built from source, or dropped into target_files.zip as a prebuilt
+ # blob. We consider either of them as {vendor,product}.img being available,
+ # which could be used when generating vbmeta.img for AVB.
has_vendor = (os.path.isdir(os.path.join(OPTIONS.input_tmp, "VENDOR")) or
os.path.exists(os.path.join(OPTIONS.input_tmp, "IMAGES",
"vendor.img")))
@@ -647,16 +661,14 @@
has_system_other = os.path.isdir(os.path.join(OPTIONS.input_tmp,
"SYSTEM_OTHER"))
- if input_zip:
- OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.input_tmp)
-
- common.ZipClose(input_zip)
+ # Set up the output destination. It writes to the given directory for dir
+ # mode; otherwise appends to the given ZIP.
+ if os.path.isdir(filename):
+ output_zip = None
+ else:
output_zip = zipfile.ZipFile(filename, "a",
compression=zipfile.ZIP_DEFLATED,
allowZip64=True)
- else:
- OPTIONS.info_dict = common.LoadInfoDict(filename, filename)
- output_zip = None
# Always make input_tmp/IMAGES available, since we may stage boot / recovery
# images there even under zip mode. The directory will be cleaned up as part
@@ -665,8 +677,6 @@
if not os.path.isdir(images_dir):
os.makedirs(images_dir)
- has_recovery = (OPTIONS.info_dict.get("no_recovery") != "true")
-
# A map between partition names and their paths, which could be used when
# generating AVB vbmeta image.
partitions = dict()
diff --git a/tools/releasetools/check_target_files_signatures.py b/tools/releasetools/check_target_files_signatures.py
index db63fd3..b9f39a6 100755
--- a/tools/releasetools/check_target_files_signatures.py
+++ b/tools/releasetools/check_target_files_signatures.py
@@ -39,19 +39,18 @@
"""
+import os
+import re
+import subprocess
import sys
+import zipfile
+
+import common
if sys.hexversion < 0x02070000:
print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
-import os
-import re
-import shutil
-import subprocess
-import zipfile
-
-import common
# Work around a bug in Python's zipfile module that prevents opening of zipfiles
# if any entry has an extra field of between 1 and 3 bytes (which is common with
@@ -244,46 +243,41 @@
# This is the list of wildcards of files we extract from |filename|.
apk_extensions = ['*.apk']
- self.certmap, compressed_extension = common.ReadApkCerts(zipfile.ZipFile(filename, "r"))
+ self.certmap, compressed_extension = common.ReadApkCerts(
+ zipfile.ZipFile(filename, "r"))
if compressed_extension:
apk_extensions.append("*.apk" + compressed_extension)
- d, z = common.UnzipTemp(filename, apk_extensions)
- try:
- self.apks = {}
- self.apks_by_basename = {}
- for dirpath, _, filenames in os.walk(d):
- for fn in filenames:
- # Decompress compressed APKs before we begin processing them.
- if compressed_extension and fn.endswith(compressed_extension):
- # First strip the compressed extension from the file.
- uncompressed_fn = fn[:-len(compressed_extension)]
+ d = common.UnzipTemp(filename, apk_extensions)
+ self.apks = {}
+ self.apks_by_basename = {}
+ for dirpath, _, filenames in os.walk(d):
+ for fn in filenames:
+ # Decompress compressed APKs before we begin processing them.
+ if compressed_extension and fn.endswith(compressed_extension):
+ # First strip the compressed extension from the file.
+ uncompressed_fn = fn[:-len(compressed_extension)]
- # Decompress the compressed file to the output file.
- common.Gunzip(os.path.join(dirpath, fn),
- os.path.join(dirpath, uncompressed_fn))
+ # Decompress the compressed file to the output file.
+ common.Gunzip(os.path.join(dirpath, fn),
+ os.path.join(dirpath, uncompressed_fn))
- # Finally, delete the compressed file and use the uncompressed file
- # for further processing. Note that the deletion is not strictly required,
- # but is done here to ensure that we're not using too much space in
- # the temporary directory.
- os.remove(os.path.join(dirpath, fn))
- fn = uncompressed_fn
+ # Finally, delete the compressed file and use the uncompressed file
+ # for further processing. Note that the deletion is not strictly
+ # required, but is done here to ensure that we're not using too much
+ # space in the temporary directory.
+ os.remove(os.path.join(dirpath, fn))
+ fn = uncompressed_fn
+ if fn.endswith(".apk"):
+ fullname = os.path.join(dirpath, fn)
+ displayname = fullname[len(d)+1:]
+ apk = APK(fullname, displayname)
+ self.apks[apk.filename] = apk
+ self.apks_by_basename[os.path.basename(apk.filename)] = apk
- if fn.endswith(".apk"):
- fullname = os.path.join(dirpath, fn)
- displayname = fullname[len(d)+1:]
- apk = APK(fullname, displayname)
- self.apks[apk.filename] = apk
- self.apks_by_basename[os.path.basename(apk.filename)] = apk
-
- self.max_pkg_len = max(self.max_pkg_len, len(apk.package))
- self.max_fn_len = max(self.max_fn_len, len(apk.filename))
- finally:
- shutil.rmtree(d)
-
- z.close()
+ self.max_pkg_len = max(self.max_pkg_len, len(apk.package))
+ self.max_fn_len = max(self.max_fn_len, len(apk.filename))
def CheckSharedUids(self):
"""Look for any instances where packages signed with different
@@ -293,7 +287,7 @@
if apk.shared_uid:
apks_by_uid.setdefault(apk.shared_uid, []).append(apk)
- for uid in sorted(apks_by_uid.keys()):
+ for uid in sorted(apks_by_uid):
apks = apks_by_uid[uid]
for apk in apks[1:]:
if apk.certs != apks[0].certs:
@@ -468,3 +462,5 @@
print " ERROR: %s" % (e,)
print
sys.exit(1)
+ finally:
+ common.Cleanup()
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 370710e..743c6a0 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -221,21 +221,6 @@
vendor_base_fs_file,))
del d["vendor_base_fs_file"]
- try:
- data = read_helper("META/imagesizes.txt")
- for line in data.split("\n"):
- if not line:
- continue
- name, value = line.split(" ", 1)
- if not value:
- continue
- if name == "blocksize":
- d[name] = value
- else:
- d[name + "_size"] = value
- except KeyError:
- pass
-
def makeint(key):
if key in d:
d[key] = int(d[key], 0)
@@ -599,8 +584,7 @@
then unzip bar.zip into that_dir/BOOTABLE_IMAGES.
Returns:
- (tempdir, zipobj): tempdir is the name of the temprary directory; zipobj is
- a zipfile.ZipFile (of the main file), open for reading.
+ The name of the temporary directory.
"""
def unzip_to_dir(filename, dirname):
@@ -622,7 +606,7 @@
else:
unzip_to_dir(filename, tmp)
- return tmp, zipfile.ZipFile(filename, "r")
+ return tmp
def GetSparseImage(which, tmpdir, input_zip, allow_shared_blocks):
@@ -1550,9 +1534,7 @@
b = blockimgdiff.BlockImageDiff(tgt, src, threads=OPTIONS.worker_threads,
version=self.version,
disable_imgdiff=self.disable_imgdiff)
- tmpdir = tempfile.mkdtemp()
- OPTIONS.tempfiles.append(tmpdir)
- self.path = os.path.join(tmpdir, partition)
+ self.path = os.path.join(MakeTempDir(), partition)
b.Compute(self.path)
self._required_cache = b.max_stashed_size
self.touched_src_ranges = b.touched_src_ranges
@@ -1800,17 +1782,23 @@
def ParseCertificate(data):
- """Parse a PEM-format certificate."""
- cert = []
+ """Parses and converts a PEM-encoded certificate into DER-encoded.
+
+ This gives the same result as `openssl x509 -in <filename> -outform DER`.
+
+ Returns:
+ The decoded certificate string.
+ """
+ cert_buffer = []
save = False
for line in data.split("\n"):
if "--END CERTIFICATE--" in line:
break
if save:
- cert.append(line)
+ cert_buffer.append(line)
if "--BEGIN CERTIFICATE--" in line:
save = True
- cert = "".join(cert).decode('base64')
+ cert = "".join(cert_buffer).decode('base64')
return cert
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index 4422b53..e6e8c9f 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -71,8 +71,7 @@
common.Usage(__doc__)
sys.exit(1)
- OPTIONS.input_tmp, input_zip = common.UnzipTemp(
- args[0], ["IMAGES/*", "OTA/*"])
+ OPTIONS.input_tmp = common.UnzipTemp(args[0], ["IMAGES/*", "OTA/*"])
output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
CopyInfo(output_zip)
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index a22145a..dd8dcd0 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -144,6 +144,13 @@
--payload_signer_args <args>
Specify the arguments needed for payload signer.
+
+ --skip_postinstall
+ Skip the postinstall hooks when generating an A/B OTA package (default:
+ False). Note that this discards ALL the hooks, including non-optional
+ ones. Should only be used if caller knows it's safe to do so (e.g. all the
+ postinstall work is to dexopt apps and a data wipe will happen immediately
+ after). Only meaningful when generating A/B OTAs.
"""
from __future__ import print_function
@@ -151,6 +158,7 @@
import multiprocessing
import os.path
import shlex
+import shutil
import subprocess
import sys
import tempfile
@@ -193,8 +201,11 @@
OPTIONS.payload_signer_args = []
OPTIONS.extracted_input = None
OPTIONS.key_passwords = []
+OPTIONS.skip_postinstall = False
+
METADATA_NAME = 'META-INF/com/android/metadata'
+POSTINSTALL_CONFIG = 'META/postinstall_config.txt'
UNZIP_PATTERN = ['IMAGES/*', 'META/*']
@@ -1215,7 +1226,7 @@
WriteMetadata(metadata, output_zip)
-def GetTargetFilesZipForSecondaryImages(input_file):
+def GetTargetFilesZipForSecondaryImages(input_file, skip_postinstall=False):
"""Returns a target-files.zip file for generating secondary payload.
Although the original target-files.zip already contains secondary slot
@@ -1229,6 +1240,7 @@
Args:
input_file: The input target-files.zip file.
+ skip_postinstall: Whether to skip copying the postinstall config file.
Returns:
The filename of the target-files.zip for generating secondary payload.
@@ -1236,8 +1248,11 @@
target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
target_zip = zipfile.ZipFile(target_file, 'w', allowZip64=True)
- input_tmp, input_zip = common.UnzipTemp(input_file, UNZIP_PATTERN)
- for info in input_zip.infolist():
+ input_tmp = common.UnzipTemp(input_file, UNZIP_PATTERN)
+ with zipfile.ZipFile(input_file, 'r') as input_zip:
+ infolist = input_zip.infolist()
+
+ for info in infolist:
unzipped_file = os.path.join(input_tmp, *info.filename.split('/'))
if info.filename == 'IMAGES/system_other.img':
common.ZipWrite(target_zip, unzipped_file, arcname='IMAGES/system.img')
@@ -1247,15 +1262,43 @@
'IMAGES/system.map'):
pass
+ # Skip copying the postinstall config if requested.
+ elif skip_postinstall and info.filename == POSTINSTALL_CONFIG:
+ pass
+
elif info.filename.startswith(('META/', 'IMAGES/')):
common.ZipWrite(target_zip, unzipped_file, arcname=info.filename)
- common.ZipClose(input_zip)
common.ZipClose(target_zip)
return target_file
+def GetTargetFilesZipWithoutPostinstallConfig(input_file):
+ """Returns a target-files.zip that's not containing postinstall_config.txt.
+
+ This allows brillo_update_payload script to skip writing all the postinstall
+ hooks in the generated payload. The input target-files.zip file will be
+ duplicated, with 'META/postinstall_config.txt' skipped. If input_file doesn't
+ contain the postinstall_config.txt entry, the input file will be returned.
+
+ Args:
+ input_file: The input target-files.zip filename.
+
+ Returns:
+ The filename of target-files.zip that doesn't contain postinstall config.
+ """
+ # We should only make a copy if postinstall_config entry exists.
+ with zipfile.ZipFile(input_file, 'r') as input_zip:
+ if POSTINSTALL_CONFIG not in input_zip.namelist():
+ return input_file
+
+ target_file = common.MakeTempFile(prefix="targetfiles-", suffix=".zip")
+ shutil.copyfile(input_file, target_file)
+ common.ZipDelete(target_file, POSTINSTALL_CONFIG)
+ return target_file
+
+
def WriteABOTAPackageWithBrilloScript(target_file, output_file,
source_file=None):
"""Generate an Android OTA package that has A/B update payload."""
@@ -1311,8 +1354,8 @@
return value
# Stage the output zip package for package signing.
- temp_zip_file = tempfile.NamedTemporaryFile()
- output_zip = zipfile.ZipFile(temp_zip_file, "w",
+ staging_file = common.MakeTempFile(suffix='.zip')
+ output_zip = zipfile.ZipFile(staging_file, "w",
compression=zipfile.ZIP_DEFLATED)
if source_file is not None:
@@ -1325,6 +1368,9 @@
# Metadata to comply with Android OTA package format.
metadata = GetPackageMetadata(target_info, source_info)
+ if OPTIONS.skip_postinstall:
+ target_file = GetTargetFilesZipWithoutPostinstallConfig(target_file)
+
# Generate payload.
payload = Payload()
payload.Generate(target_file, source_file)
@@ -1341,7 +1387,8 @@
if OPTIONS.include_secondary:
# We always include a full payload for the secondary slot, even when
# building an incremental OTA. See the comments for "--include_secondary".
- secondary_target_file = GetTargetFilesZipForSecondaryImages(target_file)
+ secondary_target_file = GetTargetFilesZipForSecondaryImages(
+ target_file, OPTIONS.skip_postinstall)
secondary_payload = Payload(secondary=True)
secondary_payload.Generate(secondary_target_file)
secondary_payload.Sign(payload_signer)
@@ -1363,10 +1410,6 @@
else:
print("Warning: cannot find care map file in target_file package")
- # source_info must be None for full OTAs.
- if source_file is None:
- assert source_info is None
-
AddCompatibilityArchiveIfTrebleEnabled(
target_zip, output_zip, target_info, source_info)
@@ -1384,8 +1427,7 @@
# compute the ZIP entry offsets, write back the final metadata and do the
# final signing.
prelim_signing = common.MakeTempFile(suffix='.zip')
- SignOutput(temp_zip_file.name, prelim_signing)
- common.ZipClose(temp_zip_file)
+ SignOutput(staging_file, prelim_signing)
# Open the signed zip. Compute the final metadata that's needed for streaming.
prelim_signing_zip = zipfile.ZipFile(prelim_signing, 'r')
@@ -1469,6 +1511,8 @@
OPTIONS.payload_signer_args = shlex.split(a)
elif o == "--extracted_input_target_files":
OPTIONS.extracted_input = a
+ elif o == "--skip_postinstall":
+ OPTIONS.skip_postinstall = True
else:
return False
return True
@@ -1498,6 +1542,7 @@
"payload_signer=",
"payload_signer_args=",
"extracted_input_target_files=",
+ "skip_postinstall",
], extra_option_handler=option_handler)
if len(args) != 2:
@@ -1586,11 +1631,9 @@
if OPTIONS.extracted_input is not None:
OPTIONS.input_tmp = OPTIONS.extracted_input
- input_zip = zipfile.ZipFile(args[0], "r")
else:
print("unzipping target target-files...")
- OPTIONS.input_tmp, input_zip = common.UnzipTemp(
- args[0], UNZIP_PATTERN)
+ OPTIONS.input_tmp = common.UnzipTemp(args[0], UNZIP_PATTERN)
OPTIONS.target_tmp = OPTIONS.input_tmp
# If the caller explicitly specified the device-specific extensions path via
@@ -1622,16 +1665,17 @@
# Generate a full OTA.
if OPTIONS.incremental_source is None:
- WriteFullOTAPackage(input_zip, output_zip)
+ with zipfile.ZipFile(args[0], 'r') as input_zip:
+ WriteFullOTAPackage(input_zip, output_zip)
# Generate an incremental OTA.
else:
print("unzipping source target-files...")
- OPTIONS.source_tmp, source_zip = common.UnzipTemp(
- OPTIONS.incremental_source,
- UNZIP_PATTERN)
-
- WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
+ OPTIONS.source_tmp = common.UnzipTemp(
+ OPTIONS.incremental_source, UNZIP_PATTERN)
+ with zipfile.ZipFile(args[0], 'r') as input_zip, \
+ zipfile.ZipFile(OPTIONS.incremental_source, 'r') as source_zip:
+ WriteBlockIncrementalOTAPackage(input_zip, source_zip, output_zip)
if OPTIONS.log_diff:
with open(OPTIONS.log_diff, 'w') as out_file:
@@ -1639,7 +1683,6 @@
target_files_diff.recursiveDiff(
'', OPTIONS.source_tmp, OPTIONS.input_tmp, out_file)
- common.ZipClose(input_zip)
common.ZipClose(output_zip)
# Sign the generated zip package unless no_signing is specified.
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 1f9a3ca..fa62c8f 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -104,6 +104,7 @@
import sys
import tempfile
import zipfile
+from xml.etree import ElementTree
import add_img_to_target_files
import common
@@ -290,6 +291,8 @@
new_data = RewriteProps(data)
common.ZipWriteStr(output_tf_zip, out_info, new_data)
+ # Replace the certs in *mac_permissions.xml (there could be multiple, such
+ # as {system,vendor}/etc/selinux/{plat,nonplat}_mac_permissions.xml).
elif info.filename.endswith("mac_permissions.xml"):
print("Rewriting %s with new keys." % (info.filename,))
new_data = ReplaceCerts(data)
@@ -361,31 +364,54 @@
def ReplaceCerts(data):
- """Given a string of data, replace all occurences of a set
- of X509 certs with a newer set of X509 certs and return
- the updated data string."""
- for old, new in OPTIONS.key_map.iteritems():
- try:
- if OPTIONS.verbose:
- print(" Replacing %s.x509.pem with %s.x509.pem" % (old, new))
- f = open(old + ".x509.pem")
- old_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
- f.close()
- f = open(new + ".x509.pem")
- new_cert16 = base64.b16encode(common.ParseCertificate(f.read())).lower()
- f.close()
- # Only match entire certs.
- pattern = "\\b" + old_cert16 + "\\b"
- (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
- if OPTIONS.verbose:
- print(" Replaced %d occurence(s) of %s.x509.pem with "
- "%s.x509.pem" % (num, old, new))
- except IOError as e:
- if e.errno == errno.ENOENT and not OPTIONS.verbose:
- continue
+ """Replaces all the occurences of X.509 certs with the new ones.
- print(" Error accessing %s. %s. Skip replacing %s.x509.pem with "
- "%s.x509.pem." % (e.filename, e.strerror, old, new))
+ The mapping info is read from OPTIONS.key_map. Non-existent certificate will
+ be skipped. After the replacement, it additionally checks for duplicate
+ entries, which would otherwise fail the policy loading code in
+ frameworks/base/services/core/java/com/android/server/pm/SELinuxMMAC.java.
+
+ Args:
+ data: Input string that contains a set of X.509 certs.
+
+ Returns:
+ A string after the replacement.
+
+ Raises:
+ AssertionError: On finding duplicate entries.
+ """
+ for old, new in OPTIONS.key_map.iteritems():
+ if OPTIONS.verbose:
+ print(" Replacing %s.x509.pem with %s.x509.pem" % (old, new))
+
+ try:
+ with open(old + ".x509.pem") as old_fp:
+ old_cert16 = base64.b16encode(
+ common.ParseCertificate(old_fp.read())).lower()
+ with open(new + ".x509.pem") as new_fp:
+ new_cert16 = base64.b16encode(
+ common.ParseCertificate(new_fp.read())).lower()
+ except IOError as e:
+ if OPTIONS.verbose or e.errno != errno.ENOENT:
+ print(" Error accessing %s: %s.\nSkip replacing %s.x509.pem with "
+ "%s.x509.pem." % (e.filename, e.strerror, old, new))
+ continue
+
+ # Only match entire certs.
+ pattern = "\\b" + old_cert16 + "\\b"
+ (data, num) = re.subn(pattern, new_cert16, data, flags=re.IGNORECASE)
+
+ if OPTIONS.verbose:
+ print(" Replaced %d occurence(s) of %s.x509.pem with %s.x509.pem" % (
+ num, old, new))
+
+ # Verify that there're no duplicate entries after the replacement. Note that
+ # it's only checking entries with global seinfo at the moment (i.e. ignoring
+ # the ones with inner packages). (Bug: 69479366)
+ root = ElementTree.fromstring(data)
+ signatures = [signer.attrib['signature'] for signer in root.findall('signer')]
+ assert len(signatures) == len(set(signatures)), \
+ "Found duplicate entries after cert replacement: {}".format(data)
return data
@@ -597,7 +623,7 @@
# Extract keyid using openssl command.
p = common.Run(["openssl", "x509", "-in", key_path, "-text"],
- stdout=subprocess.PIPE)
+ stdout=subprocess.PIPE, stderr=subprocess.PIPE)
keyid, stderr = p.communicate()
assert p.returncode == 0, "Failed to dump certificate: {}".format(stderr)
keyid = re.search(
diff --git a/tools/releasetools/test_add_img_to_target_files.py b/tools/releasetools/test_add_img_to_target_files.py
index e449ca8..9a0f78e 100644
--- a/tools/releasetools/test_add_img_to_target_files.py
+++ b/tools/releasetools/test_add_img_to_target_files.py
@@ -20,7 +20,11 @@
import zipfile
import common
-from add_img_to_target_files import AddPackRadioImages, AddRadioImagesForAbOta
+import test_utils
+from add_img_to_target_files import (
+ AddCareMapTxtForAbOta, AddPackRadioImages, AddRadioImagesForAbOta,
+ GetCareMap)
+from rangelib import RangeSet
OPTIONS = common.OPTIONS
@@ -166,3 +170,170 @@
self.assertRaises(AssertionError, AddPackRadioImages, None,
images + ['baz'])
+
+ @staticmethod
+ def _test_AddCareMapTxtForAbOta():
+ """Helper function to set up the test for test_AddCareMapTxtForAbOta()."""
+ OPTIONS.info_dict = {
+ 'system_verity_block_device' : '/dev/block/system',
+ 'vendor_verity_block_device' : '/dev/block/vendor',
+ }
+
+ # Prepare the META/ folder.
+ meta_path = os.path.join(OPTIONS.input_tmp, 'META')
+ if not os.path.exists(meta_path):
+ os.mkdir(meta_path)
+
+ system_image = test_utils.construct_sparse_image([
+ (0xCAC1, 6),
+ (0xCAC3, 4),
+ (0xCAC1, 6)])
+ vendor_image = test_utils.construct_sparse_image([
+ (0xCAC2, 10)])
+
+ image_paths = {
+ 'system' : system_image,
+ 'vendor' : vendor_image,
+ }
+ return image_paths
+
+ def test_AddCareMapTxtForAbOta(self):
+ image_paths = self._test_AddCareMapTxtForAbOta()
+
+ AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+ with open(care_map_file, 'r') as verify_fp:
+ care_map = verify_fp.read()
+
+ lines = care_map.split('\n')
+ self.assertEqual(4, len(lines))
+ self.assertEqual('system', lines[0])
+ self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+ self.assertEqual('vendor', lines[2])
+ self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+ def test_AddCareMapTxtForAbOta_withNonCareMapPartitions(self):
+ """Partitions without care_map should be ignored."""
+ image_paths = self._test_AddCareMapTxtForAbOta()
+
+ AddCareMapTxtForAbOta(
+ None, ['boot', 'system', 'vendor', 'vbmeta'], image_paths)
+
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+ with open(care_map_file, 'r') as verify_fp:
+ care_map = verify_fp.read()
+
+ lines = care_map.split('\n')
+ self.assertEqual(4, len(lines))
+ self.assertEqual('system', lines[0])
+ self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+ self.assertEqual('vendor', lines[2])
+ self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+ def test_AddCareMapTxtForAbOta_withAvb(self):
+ """Tests the case for device using AVB."""
+ image_paths = self._test_AddCareMapTxtForAbOta()
+ OPTIONS.info_dict = {
+ 'avb_system_hashtree_enable' : 'true',
+ 'avb_vendor_hashtree_enable' : 'true',
+ }
+
+ AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+ with open(care_map_file, 'r') as verify_fp:
+ care_map = verify_fp.read()
+
+ lines = care_map.split('\n')
+ self.assertEqual(4, len(lines))
+ self.assertEqual('system', lines[0])
+ self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+ self.assertEqual('vendor', lines[2])
+ self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+ def test_AddCareMapTxtForAbOta_verityNotEnabled(self):
+ """No care_map.txt should be generated if verity not enabled."""
+ image_paths = self._test_AddCareMapTxtForAbOta()
+ OPTIONS.info_dict = {}
+ AddCareMapTxtForAbOta(None, ['system', 'vendor'], image_paths)
+
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+ self.assertFalse(os.path.exists(care_map_file))
+
+ def test_AddCareMapTxtForAbOta_missingImageFile(self):
+ """Missing image file should be considered fatal."""
+ image_paths = self._test_AddCareMapTxtForAbOta()
+ image_paths['vendor'] = ''
+ self.assertRaises(AssertionError, AddCareMapTxtForAbOta, None,
+ ['system', 'vendor'], image_paths)
+
+ def test_AddCareMapTxtForAbOta_zipOutput(self):
+ """Tests the case with ZIP output."""
+ image_paths = self._test_AddCareMapTxtForAbOta()
+
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(output_file, 'w') as output_zip:
+ AddCareMapTxtForAbOta(output_zip, ['system', 'vendor'], image_paths)
+
+ with zipfile.ZipFile(output_file, 'r') as verify_zip:
+ care_map = verify_zip.read('META/care_map.txt').decode('ascii')
+
+ lines = care_map.split('\n')
+ self.assertEqual(4, len(lines))
+ self.assertEqual('system', lines[0])
+ self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+ self.assertEqual('vendor', lines[2])
+ self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+ def test_AddCareMapTxtForAbOta_zipOutput_careMapEntryExists(self):
+ """Tests the case with ZIP output which already has care_map entry."""
+ image_paths = self._test_AddCareMapTxtForAbOta()
+
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(output_file, 'w') as output_zip:
+ # Create an existing META/care_map.txt entry.
+ common.ZipWriteStr(output_zip, 'META/care_map.txt', 'dummy care_map.txt')
+
+ # Request to add META/care_map.txt again.
+ AddCareMapTxtForAbOta(output_zip, ['system', 'vendor'], image_paths)
+
+ # The one under OPTIONS.input_tmp must have been replaced.
+ care_map_file = os.path.join(OPTIONS.input_tmp, 'META', 'care_map.txt')
+ with open(care_map_file, 'r') as verify_fp:
+ care_map = verify_fp.read()
+
+ lines = care_map.split('\n')
+ self.assertEqual(4, len(lines))
+ self.assertEqual('system', lines[0])
+ self.assertEqual(RangeSet("0-5 10-15").to_string_raw(), lines[1])
+ self.assertEqual('vendor', lines[2])
+ self.assertEqual(RangeSet("0-9").to_string_raw(), lines[3])
+
+ # The existing entry should be scheduled to be replaced.
+ self.assertIn('META/care_map.txt', OPTIONS.replace_updated_files_list)
+
+ def test_GetCareMap(self):
+ sparse_image = test_utils.construct_sparse_image([
+ (0xCAC1, 6),
+ (0xCAC3, 4),
+ (0xCAC1, 6)])
+ OPTIONS.info_dict = {
+ 'system_adjusted_partition_size' : 12,
+ }
+ name, care_map = GetCareMap('system', sparse_image)
+ self.assertEqual('system', name)
+ self.assertEqual(RangeSet("0-5 10-12").to_string_raw(), care_map)
+
+ def test_GetCareMap_invalidPartition(self):
+ self.assertRaises(AssertionError, GetCareMap, 'oem', None)
+
+ def test_GetCareMap_invalidAdjustedPartitionSize(self):
+ sparse_image = test_utils.construct_sparse_image([
+ (0xCAC1, 6),
+ (0xCAC3, 4),
+ (0xCAC1, 6)])
+ OPTIONS.info_dict = {
+ 'system_adjusted_partition_size' : -12,
+ }
+ self.assertRaises(AssertionError, GetCareMap, 'system', sparse_image)
diff --git a/tools/releasetools/test_common.py b/tools/releasetools/test_common.py
index 6da286c..fb26b66 100644
--- a/tools/releasetools/test_common.py
+++ b/tools/releasetools/test_common.py
@@ -13,7 +13,9 @@
# See the License for the specific language governing permissions and
# limitations under the License.
#
+
import os
+import subprocess
import tempfile
import time
import unittest
@@ -23,6 +25,7 @@
import common
import test_utils
import validate_target_files
+from rangelib import RangeSet
KiB = 1024
@@ -400,6 +403,9 @@
'Compressed4.apk' : 'certs/compressed4',
}
+ def setUp(self):
+ self.testdata_dir = test_utils.get_testdata_dir()
+
def tearDown(self):
common.Cleanup()
@@ -477,17 +483,168 @@
self.assertRaises(ValueError, common.ReadApkCerts, input_zip)
def test_ExtractPublicKey(self):
- testdata_dir = test_utils.get_testdata_dir()
- cert = os.path.join(testdata_dir, 'testkey.x509.pem')
- pubkey = os.path.join(testdata_dir, 'testkey.pubkey.pem')
+ cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+ pubkey = os.path.join(self.testdata_dir, 'testkey.pubkey.pem')
with open(pubkey, 'rb') as pubkey_fp:
self.assertEqual(pubkey_fp.read(), common.ExtractPublicKey(cert))
def test_ExtractPublicKey_invalidInput(self):
- testdata_dir = test_utils.get_testdata_dir()
- wrong_input = os.path.join(testdata_dir, 'testkey.pk8')
+ wrong_input = os.path.join(self.testdata_dir, 'testkey.pk8')
self.assertRaises(AssertionError, common.ExtractPublicKey, wrong_input)
+ def test_ParseCertificate(self):
+ cert = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+
+ cmd = ['openssl', 'x509', '-in', cert, '-outform', 'DER']
+ proc = common.Run(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
+ expected, _ = proc.communicate()
+ self.assertEqual(0, proc.returncode)
+
+ with open(cert) as cert_fp:
+ actual = common.ParseCertificate(cert_fp.read())
+ self.assertEqual(expected, actual)
+
+
+class CommonUtilsTest(unittest.TestCase):
+
+ def tearDown(self):
+ common.Cleanup()
+
+ def test_GetSparseImage_emptyBlockMapFile(self):
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([
+ (0xCAC1, 6),
+ (0xCAC3, 3),
+ (0xCAC1, 4)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr('IMAGES/system.map', '')
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir = common.UnzipTemp(target_files)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+
+ self.assertDictEqual(
+ {
+ '__COPY': RangeSet("0"),
+ '__NONZERO-0': RangeSet("1-5 9-12"),
+ },
+ sparse_image.file_map)
+
+ def test_GetSparseImage_invalidImageName(self):
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'system2', None, None, False)
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'unknown', None, None, False)
+
+ def test_GetSparseImage_missingBlockMapFile(self):
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([
+ (0xCAC1, 6),
+ (0xCAC3, 3),
+ (0xCAC1, 4)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 8))
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir = common.UnzipTemp(target_files)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+ False)
+
+ def test_GetSparseImage_sharedBlocks_notAllowed(self):
+ """Tests the case of having overlapping blocks but disallowed."""
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ # Block 10 is shared between two files.
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '/system/file1 1-5 9-10',
+ '/system/file2 10-12']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir = common.UnzipTemp(target_files)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ self.assertRaises(
+ AssertionError, common.GetSparseImage, 'system', tempdir, input_zip,
+ False)
+
+ def test_GetSparseImage_sharedBlocks_allowed(self):
+ """Tests the case for target using BOARD_EXT4_SHARE_DUP_BLOCKS := true."""
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ # Construct an image with a care_map of "0-5 9-12".
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ # Block 10 is shared between two files.
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '/system/file1 1-5 9-10',
+ '/system/file2 10-12']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir = common.UnzipTemp(target_files)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ sparse_image = common.GetSparseImage('system', tempdir, input_zip, True)
+
+ self.assertDictEqual(
+ {
+ '__COPY': RangeSet("0"),
+ '__NONZERO-0': RangeSet("6-8 13-15"),
+ '/system/file1': RangeSet("1-5 9-10"),
+ '/system/file2': RangeSet("11-12"),
+ },
+ sparse_image.file_map)
+
+ # '/system/file2' should be marked with 'uses_shared_blocks', but not with
+ # 'incomplete'.
+ self.assertTrue(
+ sparse_image.file_map['/system/file2'].extra['uses_shared_blocks'])
+ self.assertNotIn(
+ 'incomplete', sparse_image.file_map['/system/file2'].extra)
+
+ # All other entries should look normal without any tags.
+ self.assertFalse(sparse_image.file_map['__COPY'].extra)
+ self.assertFalse(sparse_image.file_map['__NONZERO-0'].extra)
+ self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+
+ def test_GetSparseImage_incompleteRanges(self):
+ """Tests the case of ext4 images with holes."""
+ target_files = common.MakeTempFile(prefix='target_files-', suffix='.zip')
+ with zipfile.ZipFile(target_files, 'w') as target_files_zip:
+ target_files_zip.write(
+ test_utils.construct_sparse_image([(0xCAC2, 16)]),
+ arcname='IMAGES/system.img')
+ target_files_zip.writestr(
+ 'IMAGES/system.map',
+ '\n'.join([
+ '/system/file1 1-5 9-10',
+ '/system/file2 11-12']))
+ target_files_zip.writestr('SYSTEM/file1', os.urandom(4096 * 7))
+ # '/system/file2' has less blocks listed (2) than actual (3).
+ target_files_zip.writestr('SYSTEM/file2', os.urandom(4096 * 3))
+
+ tempdir = common.UnzipTemp(target_files)
+ with zipfile.ZipFile(target_files, 'r') as input_zip:
+ sparse_image = common.GetSparseImage('system', tempdir, input_zip, False)
+
+ self.assertFalse(sparse_image.file_map['/system/file1'].extra)
+ self.assertTrue(sparse_image.file_map['/system/file2'].extra['incomplete'])
+
class InstallRecoveryScriptFormatTest(unittest.TestCase):
"""Checks the format of install-recovery.sh.
diff --git a/tools/releasetools/test_ota_from_target_files.py b/tools/releasetools/test_ota_from_target_files.py
index a4fa4f9..ee5bc53 100644
--- a/tools/releasetools/test_ota_from_target_files.py
+++ b/tools/releasetools/test_ota_from_target_files.py
@@ -24,7 +24,9 @@
import test_utils
from ota_from_target_files import (
_LoadOemDicts, BuildInfo, GetPackageMetadata,
- GetTargetFilesZipForSecondaryImages, Payload, PayloadSigner,
+ GetTargetFilesZipForSecondaryImages,
+ GetTargetFilesZipWithoutPostinstallConfig,
+ Payload, PayloadSigner, POSTINSTALL_CONFIG,
WriteFingerprintAssertion)
@@ -37,6 +39,16 @@
'META/update_engine_config.txt',
"PAYLOAD_MAJOR_VERSION=2\nPAYLOAD_MINOR_VERSION=4\n")
+ # META/postinstall_config.txt
+ target_files_zip.writestr(
+ POSTINSTALL_CONFIG,
+ '\n'.join([
+ "RUN_POSTINSTALL_system=true",
+ "POSTINSTALL_PATH_system=system/bin/otapreopt_script",
+ "FILESYSTEM_TYPE_system=ext4",
+ "POSTINSTALL_OPTIONAL_system=true",
+ ]))
+
# META/ab_partitions.txt
ab_partitions = ['boot', 'system', 'vendor']
target_files_zip.writestr(
@@ -539,10 +551,41 @@
self.assertIn('IMAGES/boot.img', namelist)
self.assertIn('IMAGES/system.img', namelist)
self.assertIn('IMAGES/vendor.img', namelist)
+ self.assertIn(POSTINSTALL_CONFIG, namelist)
self.assertNotIn('IMAGES/system_other.img', namelist)
self.assertNotIn('IMAGES/system.map', namelist)
+ def test_GetTargetFilesZipForSecondaryImages_skipPostinstall(self):
+ input_file = construct_target_files(secondary=True)
+ target_file = GetTargetFilesZipForSecondaryImages(
+ input_file, skip_postinstall=True)
+
+ with zipfile.ZipFile(target_file) as verify_zip:
+ namelist = verify_zip.namelist()
+
+ self.assertIn('META/ab_partitions.txt', namelist)
+ self.assertIn('IMAGES/boot.img', namelist)
+ self.assertIn('IMAGES/system.img', namelist)
+ self.assertIn('IMAGES/vendor.img', namelist)
+
+ self.assertNotIn('IMAGES/system_other.img', namelist)
+ self.assertNotIn('IMAGES/system.map', namelist)
+ self.assertNotIn(POSTINSTALL_CONFIG, namelist)
+
+ def test_GetTargetFilesZipWithoutPostinstallConfig(self):
+ input_file = construct_target_files()
+ target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
+ with zipfile.ZipFile(target_file) as verify_zip:
+ self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+
+ def test_GetTargetFilesZipWithoutPostinstallConfig_missingEntry(self):
+ input_file = construct_target_files()
+ common.ZipDelete(input_file, POSTINSTALL_CONFIG)
+ target_file = GetTargetFilesZipWithoutPostinstallConfig(input_file)
+ with zipfile.ZipFile(target_file) as verify_zip:
+ self.assertNotIn(POSTINSTALL_CONFIG, verify_zip.namelist())
+
class PayloadSignerTest(unittest.TestCase):
diff --git a/tools/releasetools/test_sign_target_files_apks.py b/tools/releasetools/test_sign_target_files_apks.py
index 726d6b9..26f9e10 100644
--- a/tools/releasetools/test_sign_target_files_apks.py
+++ b/tools/releasetools/test_sign_target_files_apks.py
@@ -16,18 +16,27 @@
from __future__ import print_function
-import tempfile
+import base64
+import os.path
import unittest
import zipfile
import common
-from sign_target_files_apks import EditTags, ReplaceVerityKeyId, RewriteProps
+import test_utils
+from sign_target_files_apks import (
+ EditTags, ReplaceCerts, ReplaceVerityKeyId, RewriteProps)
class SignTargetFilesApksTest(unittest.TestCase):
+ MAC_PERMISSIONS_XML = """<?xml version="1.0" encoding="iso-8859-1"?>
+<policy>
+ <signer signature="{}"><seinfo value="platform"/></signer>
+ <signer signature="{}"><seinfo value="media"/></signer>
+</policy>"""
+
def setUp(self):
- self.tempdir = common.MakeTempDir()
+ self.testdata_dir = test_utils.get_testdata_dir()
def tearDown(self):
common.Cleanup()
@@ -88,94 +97,31 @@
"androidboot.hardware=marlin user_debug=31 ehci-hcd.park=3 "
"lpm_levels.sleep_disabled=1 cma=32M@0-0xffffffff loop.max_part=7 "
"buildvariant=userdebug "
- "veritykeyid=id:485900563d272c46ae118605a47419ac09ca8c11\n")
+ "veritykeyid=id:d24f2590e9abab5cff5f59da4c4f0366e3f43e94\n")
- # From build/target/product/security/verity.x509.pem.
- VERITY_CERTIFICATE1 = """-----BEGIN CERTIFICATE-----
-MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
-VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
-AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
-A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
-hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
-6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
-fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
-T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
-AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
-jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
-HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
-oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
-AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
-NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
-JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
-dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
-UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
-yttuAJAEAymk1mipd9+zp38=
------END CERTIFICATE-----
-"""
-
- # From build/target/product/security/testkey.x509.pem.
- VERITY_CERTIFICATE2 = """-----BEGIN CERTIFICATE-----
-MIIEqDCCA5CgAwIBAgIJAJNurL4H8gHfMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
-VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
-VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
-AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
-Fw0wODAyMjkwMTMzNDZaFw0zNTA3MTcwMTMzNDZaMIGUMQswCQYDVQQGEwJVUzET
-MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
-A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
-ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
-hvcNAQEBBQADggENADCCAQgCggEBANaTGQTexgskse3HYuDZ2CU+Ps1s6x3i/waM
-qOi8qM1r03hupwqnbOYOuw+ZNVn/2T53qUPn6D1LZLjk/qLT5lbx4meoG7+yMLV4
-wgRDvkxyGLhG9SEVhvA4oU6Jwr44f46+z4/Kw9oe4zDJ6pPQp8PcSvNQIg1QCAcy
-4ICXF+5qBTNZ5qaU7Cyz8oSgpGbIepTYOzEJOmc3Li9kEsBubULxWBjf/gOBzAzU
-RNps3cO4JFgZSAGzJWQTT7/emMkod0jb9WdqVA2BVMi7yge54kdVMxHEa5r3b97s
-zI5p58ii0I54JiCUP5lyfTwE/nKZHZnfm644oLIXf6MdW2r+6R8CAQOjgfwwgfkw
-HQYDVR0OBBYEFEhZAFY9JyxGrhGGBaR0GawJyowRMIHJBgNVHSMEgcEwgb6AFEhZ
-AFY9JyxGrhGGBaR0GawJyowRoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
-CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
-QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
-CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAJNurL4H8gHfMAwGA1Ud
-EwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHqvlozrUMRBBVEY0NqrrwFbinZa
-J6cVosK0TyIUFf/azgMJWr+kLfcHCHJsIGnlw27drgQAvilFLAhLwn62oX6snb4Y
-LCBOsVMR9FXYJLZW2+TcIkCRLXWG/oiVHQGo/rWuWkJgU134NDEFJCJGjDbiLCpe
-+ZTWHdcwauTJ9pUbo8EvHRkU3cYfGmLaLfgn9gP+pWA7LFQNvXwBnDa6sppCccEX
-31I828XzgXpJ4O+mDL1/dBd+ek8ZPUP0IgdyZm5MTYPhvVqGCHzzTy3sIeJFymwr
-sBbmg2OAUNLEMO6nwmocSdN2ClirfxqCzJOLSDE4QyS9BAH6EhY6UFcOaE0=
------END CERTIFICATE-----
-"""
-
- input_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.zip', dir=self.tempdir)
- with zipfile.ZipFile(input_file.name, 'w') as input_zip:
+ input_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'w') as input_zip:
input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE1)
# Test with the first certificate.
- cert_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.x509.pem', dir=self.tempdir)
- cert_file.write(VERITY_CERTIFICATE1)
- cert_file.close()
+ cert_file = os.path.join(self.testdata_dir, 'verity.x509.pem')
- output_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.zip', dir=self.tempdir)
- with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
- zipfile.ZipFile(output_file.name, 'w') as output_zip:
- ReplaceVerityKeyId(input_zip, output_zip, cert_file.name)
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'r') as input_zip, \
+ zipfile.ZipFile(output_file, 'w') as output_zip:
+ ReplaceVerityKeyId(input_zip, output_zip, cert_file)
- with zipfile.ZipFile(output_file.name) as output_zip:
+ with zipfile.ZipFile(output_file) as output_zip:
self.assertEqual(BOOT_CMDLINE1, output_zip.read('BOOT/cmdline'))
# Test with the second certificate.
- with open(cert_file.name, 'w') as cert_file_fp:
- cert_file_fp.write(VERITY_CERTIFICATE2)
+ cert_file = os.path.join(self.testdata_dir, 'testkey.x509.pem')
- with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
- zipfile.ZipFile(output_file.name, 'w') as output_zip:
- ReplaceVerityKeyId(input_zip, output_zip, cert_file.name)
+ with zipfile.ZipFile(input_file, 'r') as input_zip, \
+ zipfile.ZipFile(output_file, 'w') as output_zip:
+ ReplaceVerityKeyId(input_zip, output_zip, cert_file)
- with zipfile.ZipFile(output_file.name) as output_zip:
+ with zipfile.ZipFile(output_file) as output_zip:
self.assertEqual(BOOT_CMDLINE2, output_zip.read('BOOT/cmdline'))
def test_ReplaceVerityKeyId_no_veritykeyid(self):
@@ -184,16 +130,84 @@
"lpm_levels.sleep_disabled=1 msm_poweroff.download_mode=0 "
"loop.max_part=7\n")
- input_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.zip', dir=self.tempdir)
- with zipfile.ZipFile(input_file.name, 'w') as input_zip:
+ input_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'w') as input_zip:
input_zip.writestr('BOOT/cmdline', BOOT_CMDLINE)
- output_file = tempfile.NamedTemporaryFile(
- delete=False, suffix='.zip', dir=self.tempdir)
- with zipfile.ZipFile(input_file.name, 'r') as input_zip, \
- zipfile.ZipFile(output_file.name, 'w') as output_zip:
+ output_file = common.MakeTempFile(suffix='.zip')
+ with zipfile.ZipFile(input_file, 'r') as input_zip, \
+ zipfile.ZipFile(output_file, 'w') as output_zip:
ReplaceVerityKeyId(input_zip, output_zip, None)
- with zipfile.ZipFile(output_file.name) as output_zip:
+ with zipfile.ZipFile(output_file) as output_zip:
self.assertEqual(BOOT_CMDLINE, output_zip.read('BOOT/cmdline'))
+
+ def test_ReplaceCerts(self):
+ cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+ with open(cert1_path) as cert1_fp:
+ cert1 = cert1_fp.read()
+ cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+ with open(cert2_path) as cert2_fp:
+ cert2 = cert2_fp.read()
+ cert3_path = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+ with open(cert3_path) as cert3_fp:
+ cert3 = cert3_fp.read()
+
+ # Replace cert1 with cert3.
+ input_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert1)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ output_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert3)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ common.OPTIONS.key_map = {
+ cert1_path[:-9] : cert3_path[:-9],
+ }
+
+ self.assertEqual(output_xml, ReplaceCerts(input_xml))
+
+ def test_ReplaceCerts_duplicateEntries(self):
+ cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+ with open(cert1_path) as cert1_fp:
+ cert1 = cert1_fp.read()
+ cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+ with open(cert2_path) as cert2_fp:
+ cert2 = cert2_fp.read()
+
+ # Replace cert1 with cert2, which leads to duplicate entries.
+ input_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert1)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ common.OPTIONS.key_map = {
+ cert1_path[:-9] : cert2_path[:-9],
+ }
+ self.assertRaises(AssertionError, ReplaceCerts, input_xml)
+
+ def test_ReplaceCerts_skipNonExistentCerts(self):
+ cert1_path = os.path.join(self.testdata_dir, 'platform.x509.pem')
+ with open(cert1_path) as cert1_fp:
+ cert1 = cert1_fp.read()
+ cert2_path = os.path.join(self.testdata_dir, 'media.x509.pem')
+ with open(cert2_path) as cert2_fp:
+ cert2 = cert2_fp.read()
+ cert3_path = os.path.join(self.testdata_dir, 'testkey.x509.pem')
+ with open(cert3_path) as cert3_fp:
+ cert3 = cert3_fp.read()
+
+ input_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert1)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ output_xml = self.MAC_PERMISSIONS_XML.format(
+ base64.b16encode(common.ParseCertificate(cert3)).lower(),
+ base64.b16encode(common.ParseCertificate(cert2)).lower())
+
+ common.OPTIONS.key_map = {
+ cert1_path[:-9] : cert3_path[:-9],
+ 'non-existent' : cert3_path[:-9],
+ cert2_path[:-9] : 'non-existent',
+ }
+ self.assertEqual(output_xml, ReplaceCerts(input_xml))
diff --git a/tools/releasetools/test_utils.py b/tools/releasetools/test_utils.py
index ec53731..e64355b 100644
--- a/tools/releasetools/test_utils.py
+++ b/tools/releasetools/test_utils.py
@@ -18,7 +18,11 @@
Utils for running unittests.
"""
+import os
import os.path
+import struct
+
+import common
def get_testdata_dir():
@@ -26,3 +30,67 @@
# The script dir is the one we want, which could be different from pwd.
current_dir = os.path.dirname(os.path.realpath(__file__))
return os.path.join(current_dir, 'testdata')
+
+
+def construct_sparse_image(chunks):
+ """Returns a sparse image file constructed from the given chunks.
+
+ From system/core/libsparse/sparse_format.h.
+ typedef struct sparse_header {
+ __le32 magic; // 0xed26ff3a
+ __le16 major_version; // (0x1) - reject images with higher major versions
+ __le16 minor_version; // (0x0) - allow images with higer minor versions
+ __le16 file_hdr_sz; // 28 bytes for first revision of the file format
+ __le16 chunk_hdr_sz; // 12 bytes for first revision of the file format
+ __le32 blk_sz; // block size in bytes, must be a multiple of 4 (4096)
+ __le32 total_blks; // total blocks in the non-sparse output image
+ __le32 total_chunks; // total chunks in the sparse input image
+ __le32 image_checksum; // CRC32 checksum of the original data, counting
+ // "don't care" as 0. Standard 802.3 polynomial,
+ // use a Public Domain table implementation
+ } sparse_header_t;
+
+ typedef struct chunk_header {
+ __le16 chunk_type; // 0xCAC1 -> raw; 0xCAC2 -> fill;
+ // 0xCAC3 -> don't care
+ __le16 reserved1;
+ __le32 chunk_sz; // in blocks in output image
+ __le32 total_sz; // in bytes of chunk input file including chunk header
+ // and data
+ } chunk_header_t;
+
+ Args:
+ chunks: A list of chunks to be written. Each entry should be a tuple of
+ (chunk_type, block_number).
+
+ Returns:
+ Filename of the created sparse image.
+ """
+ SPARSE_HEADER_MAGIC = 0xED26FF3A
+ SPARSE_HEADER_FORMAT = "<I4H4I"
+ CHUNK_HEADER_FORMAT = "<2H2I"
+
+ sparse_image = common.MakeTempFile(prefix='sparse-', suffix='.img')
+ with open(sparse_image, 'wb') as fp:
+ fp.write(struct.pack(
+ SPARSE_HEADER_FORMAT, SPARSE_HEADER_MAGIC, 1, 0, 28, 12, 4096,
+ sum(chunk[1] for chunk in chunks),
+ len(chunks), 0))
+
+ for chunk in chunks:
+ data_size = 0
+ if chunk[0] == 0xCAC1:
+ data_size = 4096 * chunk[1]
+ elif chunk[0] == 0xCAC2:
+ data_size = 4
+ elif chunk[0] == 0xCAC3:
+ pass
+ else:
+ assert False, "Unsupported chunk type: {}".format(chunk[0])
+
+ fp.write(struct.pack(
+ CHUNK_HEADER_FORMAT, chunk[0], 0, chunk[1], data_size + 12))
+ if data_size != 0:
+ fp.write(os.urandom(data_size))
+
+ return sparse_image
diff --git a/tools/releasetools/testdata/media.x509.pem b/tools/releasetools/testdata/media.x509.pem
new file mode 100644
index 0000000..98cd443
--- /dev/null
+++ b/tools/releasetools/testdata/media.x509.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJAPK5jmEjVyxOMA0GCSqGSIb3DQEBBAUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODA0MTUyMzQwNTdaFw0zNTA5MDEyMzQwNTdaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBAK4lDFoW75f8KGmsZRsyF8w2ug6GlkFo1YoE
+n0DOhYZxI6P/tPbZScM88to6BcI+rKpX2AOImxdZvPWefG8hiQriUIW37VaqYmwJ
+ie+czTY2LKDo0blgP9TYModnkmzMCQxot3Wuf/MJNMw2nvKFWiZn3wxmf9DHz12O
+umVYBnNzA7tiRybquu37cvB+16dqs8uaOBxLfc2AmxQNiR8AITvkAfWNagamHq3D
+qcLxxlZyhbCa4JNCpm+kIer5Ot91c6AowzHXBgGrOvfMhAM+znx3KjpbhrDb6dd3
+w6SKqYAe3O4ngVifRNnkETl5YAV2qZQQuoEJElna2YxsaP94S48CAQOjgfwwgfkw
+HQYDVR0OBBYEFMopPKqLwO0+VC7vQgWiv/K1fk11MIHJBgNVHSMEgcEwgb6AFMop
+PKqLwO0+VC7vQgWiv/K1fk11oYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJAPK5jmEjVyxOMAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBAITelRbV5KhyF6c9qEhwSPUzc6X3
+M/OQ1hvfPMnlJRYlv8qnwxWcriddFyqa4eh21UWBJ6xUL2gpDdUQwAKdj1Hg7hVr
+e3tazbOUJBuOx4t05cQsXK+uFWyvW9GZojonUk2gct6743hGSlM2MLDk0P+34I7L
+cB+ttjecdEZ/bgDG7YiFlTgHkgOHVgB4csjjAHr0I6V6LKs6KChptkxLe9X8GH0K
+fiQVll1ark4Hpt91G0p16Xk8kYphK4HNC2KK7gFo3ETkexDTWTJghJ1q321yfcJE
+RMIh0/nsw2jK0HmZ8rgQW8HyDTjUEGbMFBHCV6lupDSfV0ZWVQfk6AIKGoE=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/platform.x509.pem b/tools/releasetools/testdata/platform.x509.pem
new file mode 100644
index 0000000..087f02e
--- /dev/null
+++ b/tools/releasetools/testdata/platform.x509.pem
@@ -0,0 +1,27 @@
+-----BEGIN CERTIFICATE-----
+MIIEqDCCA5CgAwIBAgIJALOZgIbQVs/6MA0GCSqGSIb3DQEBBAUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4g
+VmlldzEQMA4GA1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UE
+AxMHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0wODA0MTUyMjQwNTBaFw0zNTA5MDEyMjQwNTBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4G
+A1UEChMHQW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASAwDQYJKoZI
+hvcNAQEBBQADggENADCCAQgCggEBAJx4BZKsDV04HN6qZezIpgBuNkgMbXIHsSAR
+vlCGOqvitV0Amt9xRtbyICKAx81Ne9smJDuKgGwms0sTdSOkkmgiSQTcAUk+fArP
+GgXIdPabA3tgMJ2QdNJCgOFrrSqHNDYZUer3KkgtCbIEsYdeEqyYwap3PWgAuer9
+5W1Yvtjo2hb5o2AJnDeoNKbf7be2tEoEngeiafzPLFSW8s821k35CjuNjzSjuqtM
+9TNxqydxmzulh1StDFP8FOHbRdUeI0+76TybpO35zlQmE1DsU1YHv2mi/0qgfbX3
+6iANCabBtJ4hQC+J7RGQiTqrWpGA8VLoL4WkV1PPX8GQccXuyCcCAQOjgfwwgfkw
+HQYDVR0OBBYEFE/koLPdnLop9x1yh8Tnw48ghsKZMIHJBgNVHSMEgcEwgb6AFE/k
+oLPdnLop9x1yh8Tnw48ghsKZoYGapIGXMIGUMQswCQYDVQQGEwJVUzETMBEGA1UE
+CBMKQ2FsaWZvcm5pYTEWMBQGA1UEBxMNTW91bnRhaW4gVmlldzEQMA4GA1UEChMH
+QW5kcm9pZDEQMA4GA1UECxMHQW5kcm9pZDEQMA4GA1UEAxMHQW5kcm9pZDEiMCAG
+CSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbYIJALOZgIbQVs/6MAwGA1Ud
+EwQFMAMBAf8wDQYJKoZIhvcNAQEEBQADggEBAFclUbjZOh9z3g9tRp+G2tZwFAAp
+PIigzXzXeLc9r8wZf6t25iEuVsHHYc/EL9cz3lLFCuCIFM78CjtaGkNGBU2Cnx2C
+tCsgSL+ItdFJKe+F9g7dEtctVWV+IuPoXQTIMdYT0Zk4u4mCJH+jISVroS0dao+S
+6h2xw3Mxe6DAN/DRr/ZFrvIkl5+6bnoUvAJccbmBOM7z3fwFlhfPJIRc97QNY4L3
+J17XOElatuWTG5QhdlxJG3L7aOCA29tYwgKdNHyLMozkPvaosVUz7fvpib1qSN1L
+IC7alMarjdW4OZID2q4u1EYjLk/pvZYTlMYwDlE448/Shebk5INTjLixs1c=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/testdata/verity.x509.pem b/tools/releasetools/testdata/verity.x509.pem
new file mode 100644
index 0000000..86399c3
--- /dev/null
+++ b/tools/releasetools/testdata/verity.x509.pem
@@ -0,0 +1,24 @@
+-----BEGIN CERTIFICATE-----
+MIID/TCCAuWgAwIBAgIJAJcPmDkJqolJMA0GCSqGSIb3DQEBBQUAMIGUMQswCQYD
+VQQGEwJVUzETMBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4g
+VmlldzEQMA4GA1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UE
+AwwHQW5kcm9pZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTAe
+Fw0xNDExMDYxOTA3NDBaFw00MjAzMjQxOTA3NDBaMIGUMQswCQYDVQQGEwJVUzET
+MBEGA1UECAwKQ2FsaWZvcm5pYTEWMBQGA1UEBwwNTW91bnRhaW4gVmlldzEQMA4G
+A1UECgwHQW5kcm9pZDEQMA4GA1UECwwHQW5kcm9pZDEQMA4GA1UEAwwHQW5kcm9p
+ZDEiMCAGCSqGSIb3DQEJARYTYW5kcm9pZEBhbmRyb2lkLmNvbTCCASIwDQYJKoZI
+hvcNAQEBBQADggEPADCCAQoCggEBAOjreE0vTVSRenuzO9vnaWfk0eQzYab0gqpi
+6xAzi6dmD+ugoEKJmbPiuE5Dwf21isZ9uhUUu0dQM46dK4ocKxMRrcnmGxydFn6o
+fs3ODJMXOkv2gKXL/FdbEPdDbxzdu8z3yk+W67udM/fW7WbaQ3DO0knu+izKak/3
+T41c5uoXmQ81UNtAzRGzGchNVXMmWuTGOkg6U+0I2Td7K8yvUMWhAWPPpKLtVH9r
+AL5TzjYNR92izdKcz3AjRsI3CTjtpiVABGeX0TcjRSuZB7K9EK56HV+OFNS6I1NP
+jdD7FIShyGlqqZdUOkAUZYanbpgeT5N7QL6uuqcGpoTOkalu6kkCAwEAAaNQME4w
+HQYDVR0OBBYEFH5DM/m7oArf4O3peeKO0ZIEkrQPMB8GA1UdIwQYMBaAFH5DM/m7
+oArf4O3peeKO0ZIEkrQPMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEB
+AHO3NSvDE5jFvMehGGtS8BnFYdFKRIglDMc4niWSzhzOVYRH4WajxdtBWc5fx0ix
+NF/+hVKVhP6AIOQa+++sk+HIi7RvioPPbhjcsVlZe7cUEGrLSSveGouQyc+j0+m6
+JF84kszIl5GGNMTnx0XRPO+g8t6h5LWfnVydgZfpGRRg+WHewk1U2HlvTjIceb0N
+dcoJ8WKJAFWdcuE7VIm4w+vF/DYX/A2Oyzr2+QRhmYSv1cusgAeC1tvH4ap+J1Lg
+UnOu5Kh/FqPLLSwNVQp4Bu7b9QFfqK8Moj84bj88NqRGZgDyqzuTrFxn6FW7dmyA
+yttuAJAEAymk1mipd9+zp38=
+-----END CERTIFICATE-----
diff --git a/tools/releasetools/validate_target_files.py b/tools/releasetools/validate_target_files.py
index 1b3eb73..f417129 100755
--- a/tools/releasetools/validate_target_files.py
+++ b/tools/releasetools/validate_target_files.py
@@ -192,9 +192,10 @@
datefmt=date_format)
logging.info("Unzipping the input target_files.zip: %s", args[0])
- input_tmp, input_zip = common.UnzipTemp(args[0])
+ input_tmp = common.UnzipTemp(args[0])
- ValidateFileConsistency(input_zip, input_tmp)
+ with zipfile.ZipFile(args[0], 'r') as input_zip:
+ ValidateFileConsistency(input_zip, input_tmp)
info_dict = common.LoadInfoDict(input_tmp)
ValidateInstallRecoveryScript(input_tmp, info_dict)