am 75cdce7b: (-s ours) am 98da9070: am 307a8ec9: am 8eaddc6c: am 071a2f83: (-s ours) am ea2f4e3c: am daee7822: Integrate build_font.py [DO NOT MERGE]
* commit '75cdce7bd6f821eb4103f496fcd3cfbf82c66269':
Integrate build_font.py [DO NOT MERGE]
diff --git a/CleanSpec.mk b/CleanSpec.mk
index a9fea3e..6f436ce 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -200,6 +200,12 @@
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/SprintDM.apk)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/etc/omadm)
+# GCC 4.8
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/STATIC_LIBRARIES)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/SHARED_LIBRARIES)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/EXECUTABLES)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/lib/*.o)
+
# KLP I mean KitKat now API 19.
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
@@ -223,17 +229,26 @@
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+# L development
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
+# L development
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
# Add ro.product.cpu.abilist{32,64} to build.prop.
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+
+# Unset TARGET_PREFER_32_BIT_APPS for 64 bit targets.
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
# Adding dalvik.vm.dex2oat-flags to eng builds
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
-# 4.4.4 (KKWT)
-$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
-$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
-$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
-
# Unset TARGET_PREFER_32_BIT_APPS for 64 bit targets.
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
@@ -250,6 +265,14 @@
# Switching to 32-bit-by-default host multilib build
$(call add-clean-step, rm -rf $(HOST_OUT_INTERMEDIATES))
+# KKWT has become API 20
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
+
+# ims-common.jar added to BOOTCLASSPATH
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/ETC/init.environ.rc_intermediates)
+
# Change ro.zygote for core_64_bit.mk from zygote32_64 to zygote64_32
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/root/default.prop)
$(call add-clean-step, rm -rf $(PRODUCT_OUT)/recovery/root/default.prop)
diff --git a/core/Makefile b/core/Makefile
index 9a0fd7c..12d4915 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -22,7 +22,7 @@
# src:dest pair is the first one to match the same dest"
#$(1): the src:dest pair
define check-product-copy-files
-$(if $(filter %.apk, $(1)),$(error \
+$(if $(filter %.apk, $(call word-colon, 2, $(1))),$(error \
Prebuilt apk found in PRODUCT_COPY_FILES: $(1), use BUILD_PREBUILT instead!))
endef
# filter out the duplicate <source file>:<dest file> pairs.
@@ -109,21 +109,23 @@
build_desc := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT) $(PLATFORM_VERSION) $(BUILD_ID) $(BUILD_NUMBER) $(BUILD_VERSION_TAGS)
$(INSTALLED_BUILD_PROP_TARGET): PRIVATE_BUILD_DESC := $(build_desc)
-# The string used to uniquely identify this build; used by the OTA server.
+# The string used to uniquely identify the combined build and product; used by the OTA server.
ifeq (,$(strip $(BUILD_FINGERPRINT)))
- ifneq ($(filter eng.%,$(BUILD_NUMBER)),)
- # Trim down BUILD_FINGERPRINT: the default BUILD_NUMBER makes it easily exceed
- # the Android system property length limit (PROPERTY_VALUE_MAX=92).
- BF_BUILD_NUMBER := $(USER)$(shell date +%m%d%H%M)
- else
- BF_BUILD_NUMBER := $(BUILD_NUMBER)
- endif
- BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BF_BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
+ BUILD_FINGERPRINT := $(PRODUCT_BRAND)/$(TARGET_PRODUCT)/$(TARGET_DEVICE):$(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
endif
ifneq ($(words $(BUILD_FINGERPRINT)),1)
$(error BUILD_FINGERPRINT cannot contain spaces: "$(BUILD_FINGERPRINT)")
endif
+# The string used to uniquely identify the system build; used by the OTA server.
+# This purposefully excludes any product-specific variables.
+ifeq (,$(strip $(BUILD_THUMBPRINT)))
+ BUILD_THUMBPRINT := $(PLATFORM_VERSION)/$(BUILD_ID)/$(BUILD_NUMBER):$(TARGET_BUILD_VARIANT)/$(BUILD_VERSION_TAGS)
+endif
+ifneq ($(words $(BUILD_THUMBPRINT)),1)
+ $(error BUILD_THUMBPRINT cannot contain spaces: "$(BUILD_THUMBPRINT)")
+endif
+
# Display parameters shown under Settings -> About Phone
ifeq ($(TARGET_BUILD_VARIANT),user)
# User builds should show:
@@ -172,10 +174,17 @@
else
system_prop_file := $(wildcard $(TARGET_DEVICE_DIR)/system.prop)
endif
-
$(INSTALLED_BUILD_PROP_TARGET): $(BUILDINFO_SH) $(INTERNAL_BUILD_ID_MAKEFILE) $(BUILD_SYSTEM)/version_defaults.mk $(system_prop_file)
@echo Target buildinfo: $@
@mkdir -p $(dir $@)
+ $(hide) echo > $@
+ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES),)
+ $(hide) echo "#" >> $@; \
+ echo "# PRODUCT_OEM_PROPERTIES" >> $@; \
+ echo "#" >> $@;
+ $(hide) $(foreach prop,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES), \
+ echo "import /oem/oem.prop $(prop)" >> $@;)
+endif
$(hide) TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \
TARGET_DEVICE="$(TARGET_DEVICE)" \
PRODUCT_NAME="$(TARGET_PRODUCT)" \
@@ -192,17 +201,21 @@
PLATFORM_VERSION="$(PLATFORM_VERSION)" \
PLATFORM_SDK_VERSION="$(PLATFORM_SDK_VERSION)" \
PLATFORM_VERSION_CODENAME="$(PLATFORM_VERSION_CODENAME)" \
+ PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
TARGET_BOOTLOADER_BOARD_NAME="$(TARGET_BOOTLOADER_BOARD_NAME)" \
BUILD_FINGERPRINT="$(BUILD_FINGERPRINT)" \
+ BUILD_THUMBPRINT="$(BUILD_THUMBPRINT)" \
TARGET_BOARD_PLATFORM="$(TARGET_BOARD_PLATFORM)" \
TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
TARGET_CPU_ABI_LIST_32_BIT="$(TARGET_CPU_ABI_LIST_32_BIT)" \
TARGET_CPU_ABI_LIST_64_BIT="$(TARGET_CPU_ABI_LIST_64_BIT)" \
TARGET_CPU_ABI="$(TARGET_CPU_ABI)" \
TARGET_CPU_ABI2="$(TARGET_CPU_ABI2)" \
+ TARGET_2ND_CPU_ABI="$(TARGET_2ND_CPU_ABI)" \
+ TARGET_2ND_CPU_ABI2="$(TARGET_2ND_CPU_ABI2)" \
TARGET_AAPT_CHARACTERISTICS="$(TARGET_AAPT_CHARACTERISTICS)" \
- bash $(BUILDINFO_SH) > $@
+ bash $(BUILDINFO_SH) >> $@
$(hide) $(foreach file,$(system_prop_file), \
if [ -f "$(file)" ]; then \
echo "#" >> $@; \
@@ -218,7 +231,7 @@
echo "#" >> $@; )
$(hide) $(foreach line,$(ADDITIONAL_BUILD_PROPERTIES), \
echo "$(line)" >> $@;)
- $(hide) build/tools/post_process_props.py $@
+ $(hide) build/tools/post_process_props.py $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_PROPERTY_BLACKLIST)
build_desc :=
@@ -464,13 +477,13 @@
$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES)
$(call pretty,"Target boot image: $@")
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE),raw)
+ $(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
.PHONY: bootimage-nodeps
bootimage-nodeps: $(MKBOOTIMG)
@echo "make $@: ignoring dependencies"
$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
- $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE),raw)
+ $(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
endif # TARGET_BOOTIMAGE_USE_EXT2
@@ -623,12 +636,21 @@
endif
ifeq ($(INTERNAL_USERIMAGES_USE_EXT),true)
-INTERNAL_USERIMAGES_DEPS := $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(SIMG2IMG) $(E2FSCK)
+INTERNAL_USERIMAGES_DEPS := $(SIMG2IMG)
+INTERNAL_USERIMAGES_DEPS += $(MKEXTUSERIMG) $(MAKE_EXT4FS) $(E2FSCK)
+ifeq ($(TARGET_USERIMAGES_USE_F2FS),true)
+INTERNAL_USERIMAGES_DEPS += $(MKF2FSUSERIMG) $(MAKE_F2FS)
+endif
else
INTERNAL_USERIMAGES_DEPS := $(MKYAFFS2)
endif
+
INTERNAL_USERIMAGES_BINARY_PATHS := $(sort $(dir $(INTERNAL_USERIMAGES_DEPS)))
+ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY))
+INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
+endif
+
SELINUX_FC := $(TARGET_ROOT_OUT)/file_contexts
INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
@@ -637,14 +659,21 @@
define generate-userimage-prop-dictionary
$(if $(INTERNAL_USERIMAGES_EXT_VARIANT),$(hide) echo "fs_type=$(INTERNAL_USERIMAGES_EXT_VARIANT)" >> $(1))
$(if $(BOARD_SYSTEMIMAGE_PARTITION_SIZE),$(hide) echo "system_size=$(BOARD_SYSTEMIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
$(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "cache_fs_type=$(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
$(if $(BOARD_CACHEIMAGE_PARTITION_SIZE),$(hide) echo "cache_size=$(BOARD_CACHEIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "vendor_fs_type=$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
$(if $(BOARD_VENDORIMAGE_PARTITION_SIZE),$(hide) echo "vendor_size=$(BOARD_VENDORIMAGE_PARTITION_SIZE)" >> $(1))
+$(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
$(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
$(if $(mkyaffs2_extra_flags),$(hide) echo "mkyaffs2_extra_flags=$(mkyaffs2_extra_flags)" >> $(1))
$(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(1)
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY), $(hide) echo "verity=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_PARTITION)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_key=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_mountpoint=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_MOUNTPOINT)" >> $(1))
$(if $(2),$(hide) $(foreach kv,$(2),echo "$(kv)" >> $(1);))
endef
@@ -664,11 +693,32 @@
recovery_binary := $(call intermediates-dir-for,EXECUTABLES,recovery)/recovery
recovery_resources_common := $(call include-path-for, recovery)/res
-# Select the 18x32 font on high-density devices; and the 12x22 font on
-# other devices. Note that the font selected here can be overridden
-# for a particular device by putting a font.png in its private
-# recovery resources.
-ifneq (,$(filter xxhdpi xhdpi,$(subst $(comma),$(space),$(PRODUCT_AAPT_CONFIG))))
+# Set recovery_density to the density bucket of the device.
+recovery_density := unknown
+ifneq (,$(PRODUCT_AAPT_PREF_CONFIG))
+# If PRODUCT_AAPT_PREF_CONFIG includes a dpi bucket, then use that value.
+recovery_density := $(filter %dpi,$(PRODUCT_AAPT_PREF_CONFIG))
+else
+# Otherwise, use the highest density that appears in PRODUCT_AAPT_CONFIG.
+# Order is important here; we'll take the first one that's found.
+recovery_densities := $(filter $(PRODUCT_AAPT_CONFIG_SP),xxxhdpi xxhdpi xhdpi hdpi mdpi ldpi)
+ifneq (,$(recovery_densities))
+recovery_density := $(word 1,$(recovery_densities))
+endif
+endif
+
+ifneq (,$(wildcard $(recovery_resources_common)-$(recovery_density)))
+recovery_resources_common := $(recovery_resources_common)-$(recovery_density)
+else
+recovery_resources_common := $(recovery_resources_common)-xhdpi
+endif
+
+# Select the 18x32 font on high-density devices (xhdpi and up); and
+# the 12x22 font on other devices. Note that the font selected here
+# can be overridden for a particular device by putting a font.png in
+# its private recovery resources.
+
+ifneq (,$(filter xxxhdpi xxhdpi xhdpi,$(recovery_density)))
recovery_font := $(call include-path-for, recovery)/fonts/18x32.png
else
recovery_font := $(call include-path-for, recovery)/fonts/12x22.png
@@ -750,7 +800,9 @@
$(hide) cp -f $(recovery_sepolicy) $(TARGET_RECOVERY_ROOT_OUT)/sepolicy
$(hide) -cp $(TARGET_ROOT_OUT)/init.recovery.*.rc $(TARGET_RECOVERY_ROOT_OUT)/
$(hide) cp -f $(recovery_binary) $(TARGET_RECOVERY_ROOT_OUT)/sbin/
- $(hide) cp -rf $(recovery_resources_common) $(TARGET_RECOVERY_ROOT_OUT)/
+ $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/res
+ $(hide) rm -rf $(TARGET_RECOVERY_ROOT_OUT)/res/*
+ $(hide) cp -rf $(recovery_resources_common)/* $(TARGET_RECOVERY_ROOT_OUT)/res
$(hide) cp -f $(recovery_font) $(TARGET_RECOVERY_ROOT_OUT)/res/images/font.png
$(hide) $(foreach item,$(recovery_resources_private), \
cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/)
@@ -761,7 +813,7 @@
> $(TARGET_RECOVERY_ROOT_OUT)/default.prop
$(hide) $(MKBOOTFS) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
$(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
- $(hide) $(call assert-max-image-size,$@,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE),raw)
+ $(hide) $(call assert-max-image-size,$@,$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))
@echo ----- Made recovery image: $@ --------
$(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET)
@@ -828,11 +880,23 @@
$(call intermediates-dir-for,PACKAGING,systemimage)
BUILT_SYSTEMIMAGE := $(systemimage_intermediates)/system.img
+# Create symlink /system/vendor to /vendor if necessary.
+ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
+define create-system-vendor-symlink
+$(hide) ln -sf /vendor $(TARGET_OUT)/vendor
+endef
+else
+define create-system-vendor-symlink
+endef
+endif
+
# $(1): output file
define build-systemimage-target
@echo "Target system fs image: $(1)"
+ $(call create-system-vendor-symlink)
@mkdir -p $(dir $(1)) $(systemimage_intermediates) && rm -rf $(systemimage_intermediates)/system_image_info.txt
- $(call generate-userimage-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt, skip_fsck=true)
+ $(call generate-userimage-prop-dictionary, $(systemimage_intermediates)/system_image_info.txt, \
+ skip_fsck=true)
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
./build/tools/releasetools/build_image.py \
$(TARGET_OUT) $(systemimage_intermediates)/system_image_info.txt $(1)
@@ -865,7 +929,7 @@
$(INSTALLED_SYSTEMIMAGE): $(BUILT_SYSTEMIMAGE) $(RECOVERY_FROM_BOOT_PATCH) | $(ACP)
@echo "Install system fs image: $@"
$(copy-file-to-target)
- $(hide) $(call assert-max-image-size,$@ $(RECOVERY_FROM_BOOT_PATCH),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE),yaffs)
+ $(hide) $(call assert-max-image-size,$@ $(RECOVERY_FROM_BOOT_PATCH),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
systemimage: $(INSTALLED_SYSTEMIMAGE)
@@ -874,7 +938,7 @@
| $(INTERNAL_USERIMAGES_DEPS)
@echo "make $@: ignoring dependencies"
$(call build-systemimage-target,$(INSTALLED_SYSTEMIMAGE))
- $(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE),yaffs)
+ $(hide) $(call assert-max-image-size,$(INSTALLED_SYSTEMIMAGE),$(BOARD_SYSTEMIMAGE_PARTITION_SIZE))
ifneq (,$(filter systemimage-nodeps snod, $(MAKECMDGOALS)))
ifeq (true,$(WITH_DEXPREOPT))
@@ -885,10 +949,11 @@
#######
## system tarball
define build-systemtarball-target
- $(call pretty,"Target system fs tarball: $(INSTALLED_SYSTEMTARBALL_TARGET)")
- $(MKTARBALL) $(FS_GET_STATS) \
- $(PRODUCT_OUT) system $(PRIVATE_SYSTEM_TAR) \
- $(INSTALLED_SYSTEMTARBALL_TARGET)
+ $(call pretty,"Target system fs tarball: $(INSTALLED_SYSTEMTARBALL_TARGET)")
+ $(call create-system-vendor-symlink)
+ $(MKTARBALL) $(FS_GET_STATS) \
+ $(PRODUCT_OUT) system $(PRIVATE_SYSTEM_TAR) \
+ $(INSTALLED_SYSTEMTARBALL_TARGET)
endef
ifndef SYSTEM_TARBALL_FORMAT
@@ -997,7 +1062,7 @@
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
./build/tools/releasetools/build_image.py \
$(TARGET_OUT_DATA) $(userdataimage_intermediates)/userdata_image_info.txt $(INSTALLED_USERDATAIMAGE_TARGET)
- $(hide) $(call assert-max-image-size,$(INSTALLED_USERDATAIMAGE_TARGET),$(BOARD_USERDATAIMAGE_PARTITION_SIZE),yaffs)
+ $(hide) $(call assert-max-image-size,$(INSTALLED_USERDATAIMAGE_TARGET),$(BOARD_USERDATAIMAGE_PARTITION_SIZE))
endef
# We just build this directly to the install location.
@@ -1052,7 +1117,7 @@
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
./build/tools/releasetools/build_image.py \
$(TARGET_OUT_CACHE) $(cacheimage_intermediates)/cache_image_info.txt $(INSTALLED_CACHEIMAGE_TARGET)
- $(hide) $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE),yaffs)
+ $(hide) $(call assert-max-image-size,$(INSTALLED_CACHEIMAGE_TARGET),$(BOARD_CACHEIMAGE_PARTITION_SIZE))
endef
# We just build this directly to the install location.
@@ -1085,7 +1150,7 @@
$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
./build/tools/releasetools/build_image.py \
$(TARGET_OUT_VENDOR) $(vendorimage_intermediates)/vendor_image_info.txt $(INSTALLED_VENDORIMAGE_TARGET)
- $(hide) $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_VENDORIMAGE_PARTITION_SIZE),yaffs)
+ $(hide) $(call assert-max-image-size,$(INSTALLED_VENDORIMAGE_TARGET),$(BOARD_VENDORIMAGE_PARTITION_SIZE))
endef
# We just build this directly to the install location.
@@ -1121,7 +1186,12 @@
$(HOST_OUT_EXECUTABLES)/mkuserimg.sh \
$(HOST_OUT_EXECUTABLES)/make_ext4fs \
$(HOST_OUT_EXECUTABLES)/simg2img \
- $(HOST_OUT_EXECUTABLES)/e2fsck
+ $(HOST_OUT_EXECUTABLES)/e2fsck \
+ $(HOST_OUT_EXECUTABLES)/xdelta3 \
+ $(HOST_OUT_EXECUTABLES)/syspatch_host \
+ $(HOST_OUT_EXECUTABLES)/build_verity_tree \
+ $(HOST_OUT_EXECUTABLES)/verity_signer \
+ $(HOST_OUT_EXECUTABLES)/append2simg
OTATOOLS := $(DISTTOOLS) \
$(HOST_OUT_EXECUTABLES)/aapt
@@ -1277,11 +1347,18 @@
$(hide) echo "use_set_metadata=1" >> $(zip_root)/META/misc_info.txt
$(hide) echo "multistage_support=1" >> $(zip_root)/META/misc_info.txt
$(hide) echo "update_rename_support=1" >> $(zip_root)/META/misc_info.txt
+ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES),)
+ # OTA scripts are only interested in fingerprint related properties
+ $(hide) echo "oem_fingerprint_properties=$(filter ro.product.brand ro.product.name ro.product.device, $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_OEM_PROPERTIES))" >> $(zip_root)/META/misc_info.txt
+endif
$(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt)
+ $(hide) ./build/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root)
@# Zip everything up, preserving symlinks
$(hide) (cd $(zip_root) && zip -qry ../$(notdir $@) .)
- @# Run fs_config on all the system, boot ramdisk, and recovery ramdisk files in the zip, and save the output
+ @# Run fs_config on all the system, vendor, boot ramdisk,
+ @# and recovery ramdisk files in the zip, and save the output
$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="SYSTEM/" } /^SYSTEM\// {print "system/" $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -S $(SELINUX_FC) > $(zip_root)/META/filesystem_config.txt
+ $(hide) zipinfo -1 $@ | awk 'BEGIN { FS="VENDOR/" } /^VENDOR\// {print "vendor/" $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -S $(SELINUX_FC) > $(zip_root)/META/vendor_filesystem_config.txt
$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="BOOT/RAMDISK/" } /^BOOT\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -S $(SELINUX_FC) > $(zip_root)/META/boot_filesystem_config.txt
$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="RECOVERY/RAMDISK/" } /^RECOVERY\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -S $(SELINUX_FC) > $(zip_root)/META/recovery_filesystem_config.txt
$(hide) (cd $(zip_root) && zip -q ../$(notdir $@) META/*filesystem_config.txt)
@@ -1315,8 +1392,10 @@
@echo "Package OTA: $@"
$(hide) MKBOOTIMG=$(MKBOOTIMG) \
./build/tools/releasetools/ota_from_target_files -v \
+ --block \
-p $(HOST_OUT) \
-k $(KEY_CERT_PAIR) \
+ $(if $(OEM_OTA_CONFIG), -o $(OEM_OTA_CONFIG)) \
$(BUILT_TARGET_FILES_PACKAGE) $@
.PHONY: otapackage
@@ -1338,25 +1417,16 @@
INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
-ifeq ($(TARGET_RELEASETOOLS_EXTENSIONS),)
-# default to common dir for device vendor
-$(INTERNAL_UPDATE_PACKAGE_TARGET): extensions := $(TARGET_DEVICE_DIR)/../common
-else
-$(INTERNAL_UPDATE_PACKAGE_TARGET): extensions := $(TARGET_RELEASETOOLS_EXTENSIONS)
-endif
-
$(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(DISTTOOLS)
@echo "Package: $@"
$(hide) MKBOOTIMG=$(MKBOOTIMG) \
./build/tools/releasetools/img_from_target_files -v \
- -s $(extensions) \
-p $(HOST_OUT) \
$(BUILT_TARGET_FILES_PACKAGE) $@
.PHONY: updatepackage
updatepackage: $(INTERNAL_UPDATE_PACKAGE_TARGET)
-
# -----------------------------------------------------------------
# A zip of the symbols directory. Keep the full paths to make it
# more obvious where these files came from.
@@ -1401,7 +1471,7 @@
# the dependency will be set up later in build/core/main.mk.
$(EMMA_META_ZIP) :
@echo "Collecting Emma coverage meta files."
- $(hide) find $(TARGET_COMMON_OUT_ROOT) -name "coverage.em" | \
+ $(hide) find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "coverage.em" | \
zip -@ -q $@
endif # EMMA_INSTRUMENT=true
@@ -1417,7 +1487,8 @@
@echo "Packaging Proguard obfuscation dictionary files."
$(hide) dict_files=`find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_dictionary`; \
if [ -n "$$dict_files" ]; then \
- zip -q $@ $$dict_files; \
+ unobfuscated_jars=$${dict_files//proguard_dictionary/classes.jar}; \
+ zip -q $@ $$dict_files $$unobfuscated_jars; \
else \
touch $(dir $@)/dummy; \
(cd $(dir $@) && zip -q $(notdir $@) dummy); \
@@ -1543,7 +1614,7 @@
$(INTERNAL_SDK_TARGET): PRIVATE_INPUT_FILES := $(sdk_atree_files)
sdk_font_temp_dir := $(call intermediates-dir-for,PACKAGING,sdk-fonts)
-sdk_font_input_list := frameworks/base/data/fonts external/noto-fonts
+sdk_font_input_list := frameworks/base/data/fonts external/noto-fonts external/google-fonts external/naver-fonts
sdk_font_rename_script := frameworks/base/tools/layoutlib/rename_font/build_font.py
# Set SDK_GNU_ERROR to non-empty to fail when a GNU target is built.
@@ -1606,11 +1677,11 @@
INTERNAL_FINDBUGS_HTML_TARGET := $(PRODUCT_OUT)/findbugs.html
$(INTERNAL_FINDBUGS_XML_TARGET): $(ALL_FINDBUGS_FILES)
@echo UnionBugs: $@
- $(hide) prebuilt/common/findbugs/bin/unionBugs $(ALL_FINDBUGS_FILES) \
+ $(hide) $(FINDBUGS_DIR)/unionBugs $(ALL_FINDBUGS_FILES) \
> $@
$(INTERNAL_FINDBUGS_HTML_TARGET): $(INTERNAL_FINDBUGS_XML_TARGET)
@echo ConvertXmlToText: $@
- $(hide) prebuilt/common/findbugs/bin/convertXmlToText -html:fancy.xsl \
+ $(hide) $(FINDBUGS_DIR)/convertXmlToText -html:fancy.xsl \
$(INTERNAL_FINDBUGS_XML_TARGET) > $@
# -----------------------------------------------------------------
diff --git a/core/apicheck_msg_current.txt b/core/apicheck_msg_current.txt
index 440e7f8..9abd381 100644
--- a/core/apicheck_msg_current.txt
+++ b/core/apicheck_msg_current.txt
@@ -7,7 +7,7 @@
errors above.
2) You can update current.txt by executing the following command:
- make update-api
+ make %UPDATE_API%
To submit the revised current.txt to the main Android repository,
you will need approval.
diff --git a/core/base_rules.mk b/core/base_rules.mk
index e840047..a4f1360 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -114,6 +114,8 @@
else
ifeq (true,$(LOCAL_PROPRIETARY_MODULE))
partition_tag := _VENDOR
+ else ifeq (true,$(LOCAL_OEM_MODULE))
+ partition_tag := _OEM
else
# The definition of should-install-to-system will be different depending
# on which goal (e.g., sdk or just droid) is being built.
@@ -230,7 +232,7 @@
# Emit a java source file with constants for the tags, if
# LOCAL_MODULE_CLASS is "APPS" or "JAVA_LIBRARIES".
-ifneq ($(strip $(filter $(LOCAL_MODULE_CLASS),APPS JAVA_LIBRARIES)),)
+ifneq ($(filter $(LOCAL_MODULE_CLASS),APPS JAVA_LIBRARIES),)
logtags_java_sources := $(patsubst %.logtags,%.java,$(addprefix $(intermediates.COMMON)/src/, $(logtags_sources)))
logtags_sources := $(addprefix $(TOP_DIR)$(LOCAL_PATH)/, $(logtags_sources))
@@ -423,10 +425,10 @@
# This is set by packages that are linking to other packages that export
# shared libraries, allowing them to make use of the code in the linked apk.
-LOCAL_APK_LIBRARIES := $(strip $(LOCAL_APK_LIBRARIES))
-ifdef LOCAL_APK_LIBRARIES
+apk_libraries := $(sort $(LOCAL_APK_LIBRARIES) $(LOCAL_RES_LIBRARIES))
+ifneq ($(apk_libraries),)
link_apk_libraries := \
- $(foreach lib,$(LOCAL_APK_LIBRARIES), \
+ $(foreach lib,$(apk_libraries), \
$(call intermediates-dir-for, \
APPS,$(lib),,COMMON)/classes.jar)
@@ -598,7 +600,7 @@
ALL_MODULES.$(my_register_name).INSTALLED := \
$(strip $(ALL_MODULES.$(my_register_name).INSTALLED) $(LOCAL_INSTALLED_MODULE))
ALL_MODULES.$(my_register_name).BUILT_INSTALLED := \
- $(strip $(ALL_MODULES.$(my_register_name).BUILT_INSTALLED)$(LOCAL_BUILT_MODULE):$(LOCAL_INSTALLED_MODULE))
+ $(strip $(ALL_MODULES.$(my_register_name).BUILT_INSTALLED) $(LOCAL_BUILT_MODULE):$(LOCAL_INSTALLED_MODULE))
endif
ifdef LOCAL_PICKUP_FILES
# Files or directories ready to pick up by the build system
@@ -622,6 +624,9 @@
ifdef LOCAL_2ND_ARCH_VAR_PREFIX
ALL_MODULES.$(my_register_name).FOR_2ND_ARCH := true
endif
+ifdef aidl_sources
+ALL_MODULES.$(my_register_name).AIDL_FILES := $(aidl_sources)
+endif
INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
diff --git a/core/binary.mk b/core/binary.mk
index 48b4081..be0776e 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -154,6 +154,8 @@
ifeq ($(strip $(LOCAL_ADDRESS_SANITIZER)),true)
my_clang := true
+ # Frame pointer based unwinder in ASan requires ARM frame setup.
+ LOCAL_ARM_MODE := arm
my_cflags += $(ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS)
my_ldflags += $(ADDRESS_SANITIZER_CONFIG_EXTRA_LDFLAGS)
my_shared_libraries += $(ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES)
@@ -465,6 +467,9 @@
proto_generated_objects := $(addprefix $(proto_generated_obj_dir)/, \
$(patsubst %.proto,%.pb.o,$(proto_sources_fullpath)))
+# Auto-export the generated proto source dir.
+LOCAL_EXPORT_C_INCLUDE_DIRS += $(proto_generated_cc_sources_dir)
+
# Ensure the transform-proto-to-cc rule is only defined once in multilib build.
ifndef $(my_prefix)_$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined
$(proto_generated_cc_sources): PRIVATE_PROTO_INCLUDES := $(TOP)
@@ -945,7 +950,8 @@
###########################################################
export_includes := $(intermediates)/export_includes
$(export_includes): PRIVATE_EXPORT_C_INCLUDE_DIRS := $(LOCAL_EXPORT_C_INCLUDE_DIRS)
-$(export_includes) : $(LOCAL_MODULE_MAKEFILE)
+# Make sure .pb.h are already generated before any dependent source files get compiled.
+$(export_includes) : $(LOCAL_MODULE_MAKEFILE) $(proto_generated_headers)
@echo Export includes file: $< -- $@
$(hide) mkdir -p $(dir $@) && rm -f $@
ifdef LOCAL_EXPORT_C_INCLUDE_DIRS
diff --git a/core/build_id.mk b/core/build_id.mk
index f94b224..0e8e2cf 100644
--- a/core/build_id.mk
+++ b/core/build_id.mk
@@ -18,6 +18,4 @@
# (like "CRB01"). It must be a single word, and is
# capitalized by convention.
-BUILD_ID := AOSP
-
-DISPLAY_BUILD_NUMBER := true
+export BUILD_ID=MASTER
diff --git a/core/clang/config.mk b/core/clang/config.mk
index f50a0cb..1de46f0 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -81,9 +81,10 @@
CLANG_CONFIG_EXTRA_TARGET_C_INCLUDES := $(LLVM_PREBUILTS_HEADER_PATH) $(TARGET_OUT_HEADERS)/clang
# Address sanitizer clang config
-ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS := -fsanitize=address
+ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan_$(TARGET_ARCH)_android
+ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS := -fsanitize=address -fno-omit-frame-pointer
ADDRESS_SANITIZER_CONFIG_EXTRA_LDFLAGS := -Wl,-u,__asan_preinit
-ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES := libdl libasan_preload
+ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES := libdl $(ADDRESS_SANITIZER_RUNTIME_LIBRARY)
ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES := libasan
# This allows us to use the superset of functionality that compiler-rt
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index e3e8e1d..4fd2888 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -92,6 +92,7 @@
LOCAL_JAR_MANIFEST:=
LOCAL_INSTRUMENTATION_FOR:=
LOCAL_APK_LIBRARIES:=
+LOCAL_RES_LIBRARIES:=
LOCAL_MANIFEST_INSTRUMENTATION_FOR:=
LOCAL_AIDL_INCLUDES:=
LOCAL_JARJAR_RULES:=
@@ -126,6 +127,7 @@
LOCAL_PROTO_JAVA_OUTPUT_PARAMS:=
LOCAL_NO_CRT:=
LOCAL_PROPRIETARY_MODULE:=
+LOCAL_OEM_MODULE:=
LOCAL_PRIVILEGED_MODULE:=
LOCAL_MODULE_OWNER:=
LOCAL_CTS_TEST_PACKAGE:=
@@ -134,6 +136,7 @@
LOCAL_ADDRESS_SANITIZER:=
LOCAL_JAR_EXCLUDE_FILES:=
LOCAL_JAR_PACKAGES:=
+LOCAL_JAR_EXCLUDE_PACKAGES:=
LOCAL_LINT_FLAGS:=
LOCAL_SOURCE_FILES_ALL_GENERATED:= # '',true
# Don't delete the META_INF dir when merging static Java libraries.
@@ -143,6 +146,7 @@
LOCAL_POST_INSTALL_CMD:=
LOCAL_DIST_BUNDLED_BINARIES:=
LOCAL_HAL_STATIC_LIBRARIES:=
+LOCAL_RMTYPEDEFS:=
LOCAL_NO_SYNTAX_CHECK:=
LOCAL_NO_STATIC_ANALYZER:=
LOCAL_32_BIT_ONLY:= # '',true
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 68737a3..37c56dc 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -114,7 +114,7 @@
# into no-op in some builds while mesg is defined earlier. So we explicitly
# disable "-Wunused-but-set-variable" here.
ifneq ($(filter 4.6 4.6.% 4.7 4.7.% 4.8, $($(combo_2nd_arch_prefix)TARGET_GCC_VERSION)),)
-$(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -Wno-unused-but-set-variable -fno-builtin-sin \
+$(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -fno-builtin-sin \
-fno-strict-volatile-bitfields
endif
@@ -244,7 +244,8 @@
$(PRIVATE_LDFLAGS) \
$(PRIVATE_TARGET_LIBATOMIC) \
$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
- $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O))
+ $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O)) \
+ $(PRIVATE_LDLIBS)
endef
define $(combo_2nd_arch_prefix)transform-o-to-executable-inner
@@ -270,7 +271,8 @@
$(PRIVATE_LDFLAGS) \
$(PRIVATE_TARGET_LIBATOMIC) \
$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
- $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O))
+ $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O)) \
+ $(PRIVATE_LDLIBS)
endef
define $(combo_2nd_arch_prefix)transform-o-to-static-executable-inner
diff --git a/core/combo/TARGET_linux-mips.mk b/core/combo/TARGET_linux-mips.mk
index e505a6b..d702cc5 100644
--- a/core/combo/TARGET_linux-mips.mk
+++ b/core/combo/TARGET_linux-mips.mk
@@ -235,7 +235,8 @@
$(PRIVATE_LDFLAGS) \
$(PRIVATE_TARGET_LIBATOMIC) \
$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
- $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O))
+ $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O)) \
+ $(PRIVATE_LDLIBS)
endef
define $(combo_2nd_arch_prefix)transform-o-to-executable-inner
@@ -261,7 +262,8 @@
$(PRIVATE_LDFLAGS) \
$(PRIVATE_TARGET_LIBATOMIC) \
$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
- $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O))
+ $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O)) \
+ $(PRIVATE_LDLIBS)
endef
define $(combo_2nd_arch_prefix)transform-o-to-static-executable-inner
diff --git a/core/combo/TARGET_linux-x86.mk b/core/combo/TARGET_linux-x86.mk
index 4c00891..4370db8 100644
--- a/core/combo/TARGET_linux-x86.mk
+++ b/core/combo/TARGET_linux-x86.mk
@@ -199,7 +199,8 @@
$(PRIVATE_LDFLAGS) \
$(PRIVATE_TARGET_LIBATOMIC) \
$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
- $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O))
+ $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O)) \
+ $(PRIVATE_LDLIBS)
endef
define $(combo_2nd_arch_prefix)transform-o-to-executable-inner
@@ -225,7 +226,8 @@
$(PRIVATE_LDFLAGS) \
$(PRIVATE_TARGET_LIBATOMIC) \
$(if $(PRIVATE_LIBCXX),,$(PRIVATE_TARGET_LIBGCC)) \
- $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O))
+ $(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O)) \
+ $(PRIVATE_LDLIBS)
endef
define $(combo_2nd_arch_prefix)transform-o-to-static-executable-inner
diff --git a/core/combo/include/arch/darwin-x86/AndroidConfig.h b/core/combo/include/arch/darwin-x86/AndroidConfig.h
index 44de4cd..54f3750 100644
--- a/core/combo/include/arch/darwin-x86/AndroidConfig.h
+++ b/core/combo/include/arch/darwin-x86/AndroidConfig.h
@@ -56,13 +56,6 @@
#define HAVE_FORKEXEC
/*
- * Process out-of-memory adjustment. Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-/* #define HAVE_OOM_ADJ */
-
-/*
* IPC model. Choose one:
*
* HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/linux-arm/AndroidConfig.h b/core/combo/include/arch/linux-arm/AndroidConfig.h
index 0eb6c72..c06c8bc 100644
--- a/core/combo/include/arch/linux-arm/AndroidConfig.h
+++ b/core/combo/include/arch/linux-arm/AndroidConfig.h
@@ -63,13 +63,6 @@
#define HAVE_FORKEXEC
/*
- * Process out-of-memory adjustment. Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-#define HAVE_OOM_ADJ
-
-/*
* IPC model. Choose one:
*
* HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/linux-mips/AndroidConfig.h b/core/combo/include/arch/linux-mips/AndroidConfig.h
index 076d711..bb3dc95 100644
--- a/core/combo/include/arch/linux-mips/AndroidConfig.h
+++ b/core/combo/include/arch/linux-mips/AndroidConfig.h
@@ -63,13 +63,6 @@
#define HAVE_FORKEXEC
/*
- * Process out-of-memory adjustment. Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-#define HAVE_OOM_ADJ
-
-/*
* IPC model. Choose one:
*
* HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/linux-x86/AndroidConfig.h b/core/combo/include/arch/linux-x86/AndroidConfig.h
index ebb95b0..5523e49 100644
--- a/core/combo/include/arch/linux-x86/AndroidConfig.h
+++ b/core/combo/include/arch/linux-x86/AndroidConfig.h
@@ -56,13 +56,6 @@
#define HAVE_FORKEXEC
/*
- * Process out-of-memory adjustment. Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-#define HAVE_OOM_ADJ
-
-/*
* IPC model. Choose one:
*
* HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/target_linux-x86/AndroidConfig.h b/core/combo/include/arch/target_linux-x86/AndroidConfig.h
index 5b56b51..c267b2b 100644
--- a/core/combo/include/arch/target_linux-x86/AndroidConfig.h
+++ b/core/combo/include/arch/target_linux-x86/AndroidConfig.h
@@ -49,13 +49,6 @@
#define HAVE_FORKEXEC
/*
- * Process out-of-memory adjustment. Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-#define HAVE_OOM_ADJ
-
-/*
* IPC model. Choose one:
*
* HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/combo/include/arch/windows/AndroidConfig.h b/core/combo/include/arch/windows/AndroidConfig.h
index 0a52674..204740d 100644
--- a/core/combo/include/arch/windows/AndroidConfig.h
+++ b/core/combo/include/arch/windows/AndroidConfig.h
@@ -83,13 +83,6 @@
#endif
/*
- * Process out-of-memory adjustment. Set if running on Linux,
- * where we can write to /proc/<pid>/oom_adj to modify the out-of-memory
- * badness adjustment.
- */
-/* #define HAVE_OOM_ADJ */
-
-/*
* IPC model. Choose one:
*
* HAVE_SYSV_IPC - use the classic SysV IPC mechanisms (semget, shmget).
diff --git a/core/config.mk b/core/config.mk
index ad78ffa..c4d088d 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -37,13 +37,13 @@
$(TOPDIR)frameworks/native/include \
$(TOPDIR)frameworks/native/opengl/include \
$(TOPDIR)frameworks/av/include \
- $(TOPDIR)frameworks/base/include \
- $(TOPDIR)external/skia/include
+ $(TOPDIR)frameworks/base/include
SRC_HOST_HEADERS:=$(TOPDIR)tools/include
SRC_LIBRARIES:= $(TOPDIR)libs
SRC_SERVERS:= $(TOPDIR)servers
SRC_TARGET_DIR := $(TOPDIR)build/target
SRC_API_DIR := $(TOPDIR)prebuilts/sdk/api
+SRC_SYSTEM_API_DIR := $(TOPDIR)prebuilts/sdk/system-api
# Some specific paths to tools
SRC_DROIDDOC_DIR := $(TOPDIR)build/tools/droiddoc
@@ -387,6 +387,8 @@
MKEXT2IMG := $(HOST_OUT_EXECUTABLES)/genext2fs$(HOST_EXECUTABLE_SUFFIX)
MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/make_ext4fs$(HOST_EXECUTABLE_SUFFIX)
MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg.sh
+MAKE_F2FS := $(HOST_OUT_EXECUTABLES)/make_f2fs$(HOST_EXECUTABLE_SUFFIX)
+MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
MKEXT2BOOTIMG := external/genext2fs/mkbootimg_ext2.sh
SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
@@ -399,6 +401,10 @@
LLVM_RS_CC := $(HOST_OUT_EXECUTABLES)/llvm-rs-cc$(HOST_EXECUTABLE_SUFFIX)
BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat$(HOST_EXECUTABLE_SUFFIX)
LINT := prebuilts/sdk/tools/lint
+RMTYPEDEFS := $(HOST_OUT_EXECUTABLES)/rmtypedefs
+APPEND2SIMG := $(HOST_OUT_EXECUTABLES)/append2simg
+VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
+BUILD_VERITY_TREE := $(HOST_OUT_EXECUTABLES)/build_verity_tree
# ACP is always for the build OS, not for the host OS
ACP := $(BUILD_OUT_EXECUTABLES)/acp$(BUILD_EXECUTABLE_SUFFIX)
@@ -406,7 +412,8 @@
# dx is java behind a shell script; no .exe necessary.
DX := $(HOST_OUT_EXECUTABLES)/dx
ZIPALIGN := $(HOST_OUT_EXECUTABLES)/zipalign$(HOST_EXECUTABLE_SUFFIX)
-FINDBUGS := prebuilt/common/findbugs/bin/findbugs
+FINDBUGS_DIR := external/owasp/sanitizer/tools/findbugs/bin
+FINDBUGS := $(FINDBUGS_DIR)/findbugs
EMMA_JAR := external/emma/lib/emma$(COMMON_JAVA_PACKAGE_SUFFIX)
# Tool to merge AndroidManifest.xmls
@@ -580,6 +587,9 @@
$(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/android.jar)))
INTERNAL_PLATFORM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/public_api.txt
+INTERNAL_PLATFORM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/removed.txt
+INTERNAL_PLATFORM_SYSTEM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-api.txt
+INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-removed.txt
# This is the standard way to name a directory containing prebuilt target
# objects. E.g., prebuilt/$(TARGET_PREBUILT_TAG)/libc.so
diff --git a/core/definitions.mk b/core/definitions.mk
index 441c186..d7c68f3 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -1568,7 +1568,7 @@
$(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES),$(PRIVATE_CLASS_INTERMEDIATES_DIR))
$(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list)
$(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
- find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list; \
+ find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list; \
fi
$(hide) tr ' ' '\n' < $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list \
| sort -u > $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
@@ -1593,12 +1593,21 @@
-name $(word 1, $(PRIVATE_JAR_EXCLUDE_FILES)) \
$(addprefix -o -name , $(wordlist 2, 999, $(PRIVATE_JAR_EXCLUDE_FILES))) \
| xargs rm -rf)
-$(if $(PRIVATE_JAR_PACKAGES), $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -mindepth 1 -type d \
- $(foreach pkg, $(PRIVATE_JAR_PACKAGES), \
- -not -path $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))) \
- | xargs rm -rf)
-$(hide) jar $(if $(strip $(PRIVATE_JAR_MANIFEST)),-cfm,-cf) \
- $@ $(PRIVATE_JAR_MANIFEST) -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .
+$(if $(PRIVATE_JAR_PACKAGES), \
+ $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -mindepth 1 -type f \
+ $(foreach pkg, $(PRIVATE_JAR_PACKAGES), \
+ -not -path $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))/\*) -delete ; \
+ find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -empty -delete)
+$(if $(PRIVATE_JAR_EXCLUDE_PACKAGES), $(hide) rm -rf \
+ $(foreach pkg, $(PRIVATE_JAR_EXCLUDE_PACKAGES), \
+ $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))))
+$(if $(PRIVATE_RMTYPEDEFS), $(hide) $(RMTYPEDEFS) -v $(PRIVATE_CLASS_INTERMEDIATES_DIR))
+$(if $(PRIVATE_JAR_MANIFEST), \
+ $(hide) sed -e 's/%BUILD_NUMBER%/$(BUILD_NUMBER)/' \
+ $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf && \
+ jar -cfm $@ $(dir $@)/manifest.mf \
+ -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) ., \
+ $(hide) jar -cf $@ -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .)
endef
define transform-java-to-classes.jar
@@ -1644,12 +1653,21 @@
-name $(word 1, $(PRIVATE_JAR_EXCLUDE_FILES)) \
$(addprefix -o -name , $(wordlist 2, 999, $(PRIVATE_JAR_EXCLUDE_FILES))) \
| xargs rm -rf)
-$(if $(PRIVATE_JAR_PACKAGES), $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -mindepth 1 -type d \
- $(foreach pkg, $(PRIVATE_JAR_PACKAGES), \
- -not -path $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))) \
- | xargs rm -rf)
-$(hide) jar $(if $(strip $(PRIVATE_JAR_MANIFEST)),-cfm,-cf) \
- $@ $(PRIVATE_JAR_MANIFEST) -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .
+$(if $(PRIVATE_JAR_PACKAGES), \
+ $(hide) find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -mindepth 1 -type f \
+ $(foreach pkg, $(PRIVATE_JAR_PACKAGES), \
+ -not -path $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))/\*) -delete ; \
+ find $(PRIVATE_CLASS_INTERMEDIATES_DIR) -empty -delete)
+$(if $(PRIVATE_JAR_EXCLUDE_PACKAGES), $(hide) rm -rf \
+ $(foreach pkg, $(PRIVATE_JAR_EXCLUDE_PACKAGES), \
+ $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))))
+$(if $(PRIVATE_RMTYPEDEFS), $(hide) $(RMTYPEDEFS) -v $(PRIVATE_CLASS_INTERMEDIATES_DIR))
+$(if $(PRIVATE_JAR_MANIFEST), \
+ $(hide) sed -e 's/%BUILD_NUMBER%/$(BUILD_NUMBER)/' \
+ $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf && \
+ jar -cfm $@ $(dir $@)/manifest.mf \
+ -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) ., \
+ $(hide) jar -cf $@ -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) .)
$(hide) mv $(PRIVATE_CLASS_INTERMEDIATES_DIR)/newstamp $(PRIVATE_CLASS_INTERMEDIATES_DIR)/stamp
endef
@@ -1671,9 +1689,10 @@
define transform-classes.jar-to-dex
@echo "target Dex: $(PRIVATE_MODULE)"
@mkdir -p $(dir $@)
+$(hide) rm -f $(dir $@)/classes*.dex
$(hide) $(DX) \
$(if $(findstring windows,$(HOST_OS)),,-JXms16M -JXmx2048M) \
- --dex --output=$@ \
+ --dex --output=$(dir $@) \
$(incremental_dex) \
$(if $(NO_OPTIMIZE_DX), \
--no-optimize) \
@@ -1708,7 +1727,7 @@
define add-assets-to-package
$(hide) $(AAPT) package -u $(PRIVATE_AAPT_FLAGS) \
$(addprefix -c , $(PRIVATE_PRODUCT_AAPT_CONFIG)) \
- $(addprefix --preferred-configurations , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
+ $(addprefix --preferred-density , $(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
$(addprefix -M , $(PRIVATE_ANDROID_MANIFEST)) \
$(addprefix -S , $(PRIVATE_RESOURCE_DIR)) \
$(addprefix -A , $(PRIVATE_ASSET_DIR)) \
@@ -1743,11 +1762,7 @@
#TODO: update the manifest to point to the dex file
define add-dex-to-package
-$(if $(filter classes.dex,$(notdir $(PRIVATE_DEX_FILE))),\
-$(hide) zip -qj $@ $(PRIVATE_DEX_FILE),\
-$(hide) _adtp_classes_dex=$(dir $(PRIVATE_DEX_FILE))classes.dex; \
-cp $(PRIVATE_DEX_FILE) $$_adtp_classes_dex && \
-zip -qj $@ $$_adtp_classes_dex && rm -f $$_adtp_classes_dex)
+$(hide) zip -qj $@ $(dir $(PRIVATE_DEX_FILE))/classes*.dex
endef
# Add java resources added by the current module.
@@ -1996,8 +2011,8 @@
endef
# $(1): The file(s) to check (often $@)
-# $(2): The maximum total image size, in decimal bytes
-# $(3): the type of filesystem "yaffs" or "raw"
+# $(2): The maximum total image size, in decimal bytes.
+# Make sure to take into account any reserved space needed for the FS.
#
# If $(2) is empty, evaluates to "true"
#
@@ -2010,15 +2025,9 @@
total=$$(( $$( echo "$$size" ) )); \
printname=$$(echo -n "$(1)" | tr " " +); \
img_blocksize=$(call image-size-from-data-size,$(BOARD_FLASH_BLOCK_SIZE)); \
- if [ "$(3)" == "yaffs" ]; then \
- reservedblocks=8; \
- else \
- reservedblocks=0; \
- fi; \
twoblocks=$$((img_blocksize * 2)); \
onepct=$$((((($(2) / 100) - 1) / img_blocksize + 1) * img_blocksize)); \
- reserve=$$(((twoblocks > onepct ? twoblocks : onepct) + \
- reservedblocks * img_blocksize)); \
+ reserve=$$((twoblocks > onepct ? twoblocks : onepct)); \
maxsize=$$(($(2) - reserve)); \
echo "$$printname maxsize=$$maxsize blocksize=$$img_blocksize total=$$total reserve=$$reserve"; \
if [ "$$total" -gt "$$maxsize" ]; then \
@@ -2040,8 +2049,7 @@
# $(2): The partition size.
define assert-max-image-size
$(if $(2), \
- $(call assert-max-file-size,$(1),$(call image-size-from-data-size,$(2))), \
- true)
+ $(call assert-max-file-size,$(1),$(call image-size-from-data-size,$(2))))
endef
@@ -2140,17 +2148,19 @@
# $(1) target
# $(2) stable api file
# $(3) api file to be tested
-# $(4) arguments for apicheck
-# $(5) command to run if apicheck failed
-# $(6) target dependent on this api check
-# $(7) additional dependencies
+# $(4) stable removed api file
+# $(5) removed api file to be tested
+# $(6) arguments for apicheck
+# $(7) command to run if apicheck failed
+# $(8) target dependent on this api check
+# $(9) additional dependencies
define check-api
-$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp: $(2) $(3) $(APICHECK) $(7)
+$(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp: $(2) $(3) $(4) $(APICHECK) $(9)
@echo "Checking API:" $(1)
- $(hide) ( $(APICHECK_COMMAND) $(4) $(2) $(3) || ( $(5) ; exit 38 ) )
+ $(hide) ( $(APICHECK_COMMAND) $(6) $(2) $(3) $(4) $(5) || ( $(7) ; exit 38 ) )
$(hide) mkdir -p $$(dir $$@)
$(hide) touch $$@
-$(6): $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp
+$(8): $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/$(strip $(1))-timestamp
endef
## Whether to build from source if prebuilt alternative exists
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index ecfe3dc..655af9b 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -134,15 +134,13 @@
$(full_target): PRIVATE_IN_CUSTOM_ASSET_DIR := $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)/$(LOCAL_DROIDDOC_CUSTOM_ASSET_DIR)
$(full_target): PRIVATE_OUT_ASSET_DIR := $(out_dir)/$(LOCAL_DROIDDOC_ASSET_DIR)
$(full_target): PRIVATE_OUT_CUSTOM_ASSET_DIR := $(out_dir)/$(LOCAL_DROIDDOC_CUSTOM_ASSET_DIR)
+
+html_dir_files :=
ifneq ($(strip $(LOCAL_DROIDDOC_HTML_DIR)),)
$(full_target): PRIVATE_DROIDDOC_HTML_DIR := -htmldir $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR)
+html_dir_files := $(shell find $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR) -type f)
else
-$(full_target): PRIVATE_DROIDDOC_HTML_DIR :=
-endif
-ifneq ($(strip $(LOCAL_ADDITIONAL_HTML_DIR)),)
-$(full_target): PRIVATE_ADDITIONAL_HTML_DIR := -htmldir2 $(LOCAL_PATH)/$(LOCAL_ADDITIONAL_HTML_DIR)
-else
-$(full_target): PRIVATE_ADDITIONAL_HTML_DIR :=
+$(full_target): PRIVATE_DROIDDOC_HTML_DIR :=
endif
ifneq ($(strip $(LOCAL_ADDITIONAL_HTML_DIR)),)
$(full_target): PRIVATE_ADDITIONAL_HTML_DIR := -htmldir2 $(LOCAL_PATH)/$(LOCAL_ADDITIONAL_HTML_DIR)
@@ -153,8 +151,6 @@
# TODO: not clear if this is used any more
$(full_target): PRIVATE_LOCAL_PATH := $(LOCAL_PATH)
-html_dir_files := $(shell find $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR) -type f)
-
$(full_target): $(full_src_files) $(droiddoc_templates) $(droiddoc) $(html_dir_files) $(full_java_lib_deps) $(LOCAL_ADDITIONAL_DEPENDENCIES)
@echo Docs droiddoc: $(PRIVATE_OUT_DIR)
$(hide) mkdir -p $(dir $@)
diff --git a/core/envsetup.mk b/core/envsetup.mk
index 9caf7c0..e968fef 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -126,6 +126,7 @@
TARGET_COPY_OUT_SYSTEM := system
TARGET_COPY_OUT_DATA := data
TARGET_COPY_OUT_VENDOR := system/vendor
+TARGET_COPY_OUT_OEM := oem
TARGET_COPY_OUT_ROOT := root
TARGET_COPY_OUT_RECOVERY := recovery
@@ -231,6 +232,7 @@
HOST_OUT_INTERMEDIATE_LIBRARIES := $(HOST_OUT_INTERMEDIATES)/lib
HOST_OUT_NOTICE_FILES := $(HOST_OUT_INTERMEDIATES)/NOTICE_FILES
HOST_OUT_COMMON_INTERMEDIATES := $(HOST_COMMON_OUT_ROOT)/obj
+HOST_OUT_FAKE := $(HOST_OUT)/fake_packages
HOST_OUT_GEN := $(HOST_OUT)/gen
HOST_OUT_COMMON_GEN := $(HOST_COMMON_OUT_ROOT)/gen
@@ -317,9 +319,26 @@
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_SHARED_LIBRARIES := $(TARGET_OUT_VENDOR)/lib
$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_VENDOR_APPS := $(TARGET_OUT_VENDOR_APPS)
+TARGET_OUT_OEM := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_OEM)
+TARGET_OUT_OEM_EXECUTABLES := $(TARGET_OUT_OEM)/bin
+ifneq ($(filter %64,$(TARGET_ARCH)),)
+TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib64
+else
+TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib
+endif
+# We don't expect Java libraries in the oem.img.
+# TARGET_OUT_OEM_JAVA_LIBRARIES:= $(TARGET_OUT_OEM)/framework
+TARGET_OUT_OEM_APPS := $(TARGET_OUT_OEM)/app
+TARGET_OUT_OEM_ETC := $(TARGET_OUT_OEM)/etc
+
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_EXECUTABLES := $(TARGET_OUT_OEM_EXECUTABLES)
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_SHARED_LIBRARIES := $(TARGET_OUT_OEM)/lib
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_OEM_APPS := $(TARGET_OUT_OEM_APPS)
+
TARGET_OUT_UNSTRIPPED := $(PRODUCT_OUT)/symbols
TARGET_OUT_EXECUTABLES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/system/bin
TARGET_OUT_SHARED_LIBRARIES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/system/lib
+TARGET_OUT_VENDOR_SHARED_LIBRARIES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/$(TARGET_COPY_OUT_VENDOR)/lib
TARGET_ROOT_OUT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)
TARGET_ROOT_OUT_SBIN_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/sbin
TARGET_ROOT_OUT_BIN_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/bin
diff --git a/core/executable_prefer_symlink.mk b/core/executable_prefer_symlink.mk
index f66a5f2..e4df92e 100644
--- a/core/executable_prefer_symlink.mk
+++ b/core/executable_prefer_symlink.mk
@@ -8,17 +8,27 @@
# configuration. Note that we require the TARGET_IS_64_BIT
# check because 32 bit targets may not define TARGET_PREFER_32_BIT_APPS
# et al. since those variables make no sense in that context.
-
ifneq ($(LOCAL_IS_HOST_MODULE),true)
my_symlink := $(addprefix $(TARGET_OUT)/bin/, $(LOCAL_MODULE))
ifeq ($(TARGET_IS_64_BIT),true)
- ifneq ($(TARGET_PREFER_32_BIT_APPS),true)
-$(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_64)
+ ifeq ($(TARGET_SUPPORTS_64_BIT_APPS)|$(TARGET_SUPPORTS_32_BIT_APPS),true|true)
+ # We support both 32 and 64 bit apps, so we will have to
+ # base our decision on whether the target prefers one or the
+ # other.
+ ifneq ($(TARGET_PREFER_32_BIT_APPS),true)
+ $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
+ else
+ $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_64)
+ endif
+ else ifeq ($(TARGET_SUPPORTS_64_BIT_APPS),true)
+ # We support only 64 bit apps.
+ $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_64)
else
-$(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
+ # We support only 32 bit apps.
+ $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
endif
else
-$(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
+ $(my_symlink): PRIVATE_SRC_BINARY_NAME := $(LOCAL_MODULE_STEM_32)
endif
else
my_symlink := $(addprefix $(HOST_OUT)/bin/, $(LOCAL_MODULE))
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index e15bde2..61eb3ff 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -67,6 +67,7 @@
$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
$(full_classes_compiled_jar): PRIVATE_RMTYPEDEFS :=
$(full_classes_compiled_jar): $(java_sources) $(java_resource_sources) $(full_java_lib_deps) \
$(jar_manifest_file) $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES)
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index e5ebb11..7e0e437 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -20,21 +20,65 @@
#######################################
include $(BUILD_SYSTEM)/host_java_library_common.mk
+#######################################
+
+# Enable emma instrumentation only if the module asks so.
+ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
+ifneq (true,$(EMMA_INSTRUMENT))
+LOCAL_EMMA_INSTRUMENT :=
+endif
+endif
+
+full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
+emma_intermediates_dir := $(intermediates.COMMON)/emma_out
+# emma is hardcoded to use the leaf name of its input for the output file --
+# only the output directory can be changed
+full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(notdir $(full_classes_compiled_jar))
+
+LOCAL_INTERMEDIATE_TARGETS += \
+ $(full_classes_compiled_jar) \
+ $(full_classes_emma_jar)
+
+#######################################
include $(BUILD_SYSTEM)/base_rules.mk
#######################################
-$(full_classes_compiled_jar): PRIVATE_JAVAC_DEBUG_FLAGS := -g
+ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
+$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILE := $(intermediates.COMMON)/coverage.em
+$(full_classes_emma_jar): PRIVATE_EMMA_INTERMEDIATES_DIR := $(emma_intermediates_dir)
+ifdef LOCAL_EMMA_COVERAGE_FILTER
+$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILTER := $(LOCAL_EMMA_COVERAGE_FILTER)
+else
+# by default, avoid applying emma instrumentation onto emma classes itself,
+# otherwise there will be exceptions thrown
+$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILTER := *,-emma,-emmarun,-com.vladium.*
+endif
+# this rule will generate both $(PRIVATE_EMMA_COVERAGE_FILE) and
+# $(full_classes_emma_jar)
+$(full_classes_emma_jar) : $(full_classes_compiled_jar) | $(EMMA_JAR)
+ $(transform-classes.jar-to-emma)
-java_alternative_checked_module :=
+$(LOCAL_BUILT_MODULE) : $(full_classes_emma_jar)
+ @echo Copying: $@
+ $(hide) $(ACP) -fp $< $@
+
+else # LOCAL_EMMA_INSTRUMENT
+# Directly build into LOCAL_BUILT_MODULE.
+full_classes_compiled_jar := $(LOCAL_BUILT_MODULE)
+endif # LOCAL_EMMA_INSTRUMENT
+
+$(full_classes_compiled_jar): PRIVATE_JAVAC_DEBUG_FLAGS := -g
# The layers file allows you to enforce a layering between java packages.
# Run build/tools/java-layers.py for more details.
layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-$(LOCAL_BUILT_MODULE): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(LOCAL_BUILT_MODULE): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
-$(LOCAL_BUILT_MODULE): PRIVATE_JAR_EXCLUDE_FILES :=
-$(LOCAL_BUILT_MODULE): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(LOCAL_BUILT_MODULE): $(java_sources) $(java_resource_sources) $(full_java_lib_deps) \
+$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
+$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_RMTYPEDEFS :=
+$(full_classes_compiled_jar): $(java_sources) $(java_resource_sources) $(full_java_lib_deps) \
$(jar_manifest_file) $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES)
$(transform-host-java-to-package)
diff --git a/core/java.mk b/core/java.mk
index 8863ac2..8afa748 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -72,10 +72,10 @@
# Choose leaf name for the compiled jar file.
ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
full_classes_compiled_jar_leaf := classes-no-debug-var.jar
-built_dex_intermediate_leaf := classes-no-local.dex
+built_dex_intermediate_leaf := no-local
else
full_classes_compiled_jar_leaf := classes-full-debug.jar
-built_dex_intermediate_leaf := classes-with-local.dex
+built_dex_intermediate_leaf := with-local
endif
ifeq ($(LOCAL_PROGUARD_ENABLED),disabled)
@@ -96,7 +96,7 @@
# only the output directory can be changed
full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(jarjar_leaf)
full_classes_proguard_jar := $(intermediates.COMMON)/$(proguard_jar_leaf)
-built_dex_intermediate := $(intermediates.COMMON)/$(built_dex_intermediate_leaf)
+built_dex_intermediate := $(intermediates.COMMON)/$(built_dex_intermediate_leaf)/classes.dex
full_classes_stubs_jar := $(intermediates.COMMON)/stubs.jar
ifeq ($(LOCAL_MODULE_CLASS)$(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),APPS)
@@ -321,6 +321,10 @@
$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
$(full_classes_compiled_jar): PRIVATE_WARNINGS_ENABLE := $(LOCAL_WARNINGS_ENABLE)
+ifdef LOCAL_RMTYPEDEFS
+$(full_classes_compiled_jar): | $(RMTYPEDEFS)
+endif
+
# Compile the java files to a .jar file.
# This intentionally depends on java_sources, not all_java_sources.
# Deps for generated source files must be handled separately,
@@ -328,6 +332,8 @@
$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES := $(LOCAL_JAR_EXCLUDE_PACKAGES)
+$(full_classes_compiled_jar): PRIVATE_RMTYPEDEFS := $(LOCAL_RMTYPEDEFS)
$(full_classes_compiled_jar): PRIVATE_DONT_DELETE_JAR_META_INF := $(LOCAL_DONT_DELETE_JAR_META_INF)
$(full_classes_compiled_jar): $(java_sources) $(java_resource_sources) $(full_java_lib_deps) \
$(jar_manifest_file) $(layers_file) $(RenderScript_file_stamp) \
@@ -470,7 +476,9 @@
$(transform-classes.jar-to-dex)
$(built_dex): $(built_dex_intermediate) | $(ACP)
@echo Copying: $@
- $(hide) $(ACP) -fp $< $@
+ $(hide) mkdir -p $(dir $@)
+ $(hide) rm -f $(dir $@)/classes*.dex
+ $(hide) $(ACP) -fp $(dir $<)/classes*.dex $(dir $@)
ifneq ($(GENERATE_DEX_DEBUG),)
$(install-dex-debug)
endif
@@ -497,7 +505,7 @@
$(findbugs_html) : $(findbugs_xml)
@mkdir -p $(dir $@)
@echo ConvertXmlToText: $@
- $(hide) prebuilt/common/findbugs/bin/convertXmlToText -html:fancy.xsl $(PRIVATE_XML_FILE) \
+ $(hide) $(FINDBUGS_DIR)/convertXmlToText -html:fancy.xsl $(PRIVATE_XML_FILE) \
> $@
$(LOCAL_MODULE)-findbugs : $(findbugs_html)
diff --git a/core/main.mk b/core/main.mk
index 77efc19..cdf3200 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -185,12 +185,8 @@
# java version is really openjdk
ifeq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
$(info ************************************************************)
-$(info You are attempting to build with an unsupported JDK.)
-$(info $(space))
-$(info This build requires OpenJDK, but you are using:)
+$(info You asked for an OpenJDK 7 build but your version is)
$(info $(java_version_str).)
-$(info Please follow the machine setup instructions at)
-$(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
$(info ************************************************************)
$(error stop)
endif # java version is not OpenJdk
@@ -391,7 +387,7 @@
sdk_repo_goal := $(strip $(filter sdk_repo,$(MAKECMDGOALS)))
MAKECMDGOALS := $(strip $(filter-out sdk_repo,$(MAKECMDGOALS)))
-ifneq ($(words $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild target-files-package,$(MAKECMDGOALS))),1)
+ifneq ($(words $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild emulator_tests target-files-package,$(MAKECMDGOALS))),1)
$(error The 'sdk' target may not be specified with any other targets)
endif
@@ -965,6 +961,7 @@
$(foreach f,$(INSTALLED_RADIOIMAGE_TARGET), \
$(call dist-for-goals, droidcore, $(f)))
+ ifneq ($(ANDROID_BUILD_EMBEDDED),true)
ifneq ($(TARGET_BUILD_PDK),true)
$(call dist-for-goals, droidcore, \
$(APPS_ZIP) \
@@ -972,6 +969,7 @@
$(PACKAGE_STATS_FILE) \
)
endif
+ endif
ifeq ($(EMMA_INSTRUMENT),true)
$(EMMA_META_ZIP) : $(INSTALLED_SYSTEMIMAGE)
@@ -1011,6 +1009,12 @@
target-native-tests : native-target-tests
tests : host-tests target-tests
+# To catch more build breakage, check build tests modules in eng and userdebug builds.
+ifneq ($(TARGET_BUILD_PDK),true)
+ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
+droidcore : target-tests host-tests
+endif
+endif
.PHONY: lintall
@@ -1035,7 +1039,7 @@
.PHONY: clean
clean:
- @rm -rf $(OUT_DIR)
+ @rm -rf $(OUT_DIR)/*
@echo "Entire build directory removed."
.PHONY: clobber
diff --git a/core/package_internal.mk b/core/package_internal.mk
index d52703a..57b7b1f 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -241,6 +241,7 @@
$(proguard_options_file): $(R_file_stamp)
+resource_export_package :=
ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
# Put this module's resources into a PRODUCT-agnositc package that
# other packages can use to build their own PRODUCT-agnostic R.java (etc.)
@@ -294,9 +295,19 @@
framework_res_package_export_deps := \
$(dir $(framework_res_package_export))src/R.stamp
endif # LOCAL_SDK_RES_VERSION
-$(R_file_stamp): $(framework_res_package_export_deps)
+all_library_res_package_exports := \
+ $(framework_res_package_export) \
+ $(foreach lib,$(LOCAL_RES_LIBRARIES),\
+ $(call intermediates-dir-for,APPS,$(lib),,COMMON)/package-export.apk)
+
+all_library_res_package_export_deps := \
+ $(framework_res_package_export_deps) \
+ $(foreach lib,$(LOCAL_RES_LIBRARIES),\
+ $(call intermediates-dir-for,APPS,$(lib),,COMMON)/src/R.stamp)
+
+$(resource_export_package) $(R_file_stamp) $(LOCAL_BUILT_MODULE): $(all_library_res_package_export_deps)
$(LOCAL_INTERMEDIATE_TARGETS): \
- PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
+ PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
endif # LOCAL_NO_STANDARD_LIBRARIES
ifneq ($(full_classes_jar),)
diff --git a/core/pathmap.mk b/core/pathmap.mk
index f12d19c..7b09f9a 100644
--- a/core/pathmap.mk
+++ b/core/pathmap.mk
@@ -29,10 +29,8 @@
pathmap_INCL := \
bootloader:bootable/bootloader/legacy/include \
camera:system/media/camera/include \
- corecg:external/skia/include/core \
frameworks-base:frameworks/base/include \
frameworks-native:frameworks/native/include \
- graphics:external/skia/include/core \
libc:bionic/libc/include \
libhardware:hardware/libhardware/include \
libhardware_legacy:hardware/libhardware_legacy/include \
@@ -51,6 +49,7 @@
audio-route:system/media/audio_route/include \
wilhelm:frameworks/wilhelm/include \
wilhelm-ut:frameworks/wilhelm/src/ut \
+ mediandk:frameworks/av/media/ndk/ \
speex:external/speex/include
#
@@ -86,7 +85,9 @@
drm \
opengl \
sax \
+ telecomm \
telephony \
+ phone \
wifi \
keystore \
rs \
@@ -110,19 +111,30 @@
v7/gridlayout \
v7/appcompat \
v7/mediarouter \
+ v7/recyclerview \
v8/renderscript \
- v13
+ v13 \
+ v17/leanback
+
+#
+# A list of all source roots under frameworks/support.
+#
+FRAMEWORKS_MULTIDEX_SUBDIRS := \
+ multidex \
+ multidex/instrumentation
#
# A version of FRAMEWORKS_SUPPORT_SUBDIRS that is expanded to full paths from
# the root of the tree.
#
FRAMEWORKS_SUPPORT_JAVA_SRC_DIRS := \
- $(addprefix frameworks/support/,$(FRAMEWORKS_SUPPORT_SUBDIRS))
+ $(addprefix frameworks/support/,$(FRAMEWORKS_SUPPORT_SUBDIRS)) \
+ $(addprefix frameworks/,$(FRAMEWORKS_MULTIDEX_SUBDIRS))
#
# A list of support library modules.
#
FRAMEWORKS_SUPPORT_JAVA_LIBRARIES := \
- $(foreach dir,$(FRAMEWORKS_SUPPORT_SUBDIRS),android-support-$(subst /,-,$(dir)))
+ $(foreach dir,$(FRAMEWORKS_SUPPORT_SUBDIRS),android-support-$(subst /,-,$(dir))) \
+ $(foreach dir,$(FRAMEWORKS_MULTIDEX_SUBDIRS),android-support-$(subst /,-,$(dir)))
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
index b0cccc9..fd63820 100644
--- a/core/pdk_config.mk
+++ b/core/pdk_config.mk
@@ -18,9 +18,14 @@
# if PDK_FUSION_PLATFORM_ZIP is specified, do not override.
ifndef PDK_FUSION_PLATFORM_ZIP
+# Most PDK project paths should be using vendor/pdk/TARGET_DEVICE
+# but some legacy ones (e.g. mini_armv7a_neon generic PDK) were setup
+# with vendor/pdk/TARGET_PRODUCT.
_pdk_fusion_default_platform_zip = $(wildcard \
vendor/pdk/$(TARGET_DEVICE)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
-vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip)
+vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
+vendor/pdk/$(TARGET_PRODUCT)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
+vendor/pdk/$(TARGET_PRODUCT)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip)
ifneq (,$(_pdk_fusion_default_platform_zip))
PDK_FUSION_PLATFORM_ZIP := $(word 1, $(_pdk_fusion_default_platform_zip))
TARGET_BUILD_PDK := true
@@ -48,7 +53,7 @@
# all paths under out dir
PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR += \
target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates \
- target/common/obj/JAVA_LIBRARIES/core_intermediates \
+ target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \
target/common/obj/JAVA_LIBRARIES/core-junit_intermediates \
target/common/obj/JAVA_LIBRARIES/ext_intermediates \
target/common/obj/JAVA_LIBRARIES/framework_intermediates \
@@ -56,6 +61,7 @@
target/common/obj/JAVA_LIBRARIES/android.test.runner_intermediates \
target/common/obj/JAVA_LIBRARIES/telephony-common_intermediates \
target/common/obj/JAVA_LIBRARIES/voip-common_intermediates \
+ target/common/obj/JAVA_LIBRARIES/ims-common_intermediates \
target/common/obj/JAVA_LIBRARIES/mms-common_intermediates \
target/common/obj/JAVA_LIBRARIES/android-ex-camera2_intermediates \
target/common/obj/JAVA_LIBRARIES/android-common_intermediates \
@@ -187,12 +193,16 @@
ifneq (,$(filter platform platform-java, $(MAKECMDGOALS))$(filter true,$(TARGET_BUILD_PDK)))
# files under $(PRODUCT_OUT)/symbols to help debugging.
# Source not included to PDK due to dependency issue, so provide symbols instead.
+
+# We may not be building all of them.
+# The platform.zip just silently ignores the nonexistent ones.
PDK_SYMBOL_FILES_LIST := \
- system/bin/app_process
+ system/bin/app_process32 \
+ system/bin/app_process64
ifdef PDK_FUSION_PLATFORM_ZIP
# symbols should be explicitly pulled for fusion build
-$(foreach f,$(PDK_SYMBOL_FILES_LIST),\
+$(foreach f,$(filter $(PDK_SYMBOL_FILES_LIST), $(_pdk_fusion_file_list)),\
$(eval $(call add-dependency,$(PRODUCT_OUT)/$(f),$(PRODUCT_OUT)/symbols/$(f))))
endif # PLATFORM_ZIP
endif # platform.zip build or PDK
diff --git a/core/post_clean.mk b/core/post_clean.mk
index 213c43c..9f15b6d 100644
--- a/core/post_clean.mk
+++ b/core/post_clean.mk
@@ -15,6 +15,7 @@
# Clean steps that need global knowledge of individual modules.
# This file must be included after all Android.mks have been loaded.
+#######################################################
# Checks the current build configurations against the previous build,
# clean artifacts in TARGET_COMMON_OUT_ROOT if necessary.
# If a package's resource overlay has been changed, its R class needs to be
@@ -51,3 +52,44 @@
previous_package_overlay_config :=
current_package_overlay_config :=
current_all_packages_config :=
+
+#######################################################
+# Check if we need to delete obsolete aidl-generated java files.
+# When an aidl file gets deleted (or renamed), the generated java file is obsolete.
+previous_aidl_config := $(TARGET_OUT_COMMON_INTERMEDIATES)/previous_aidl_config.mk
+current_aidl_config := $(TARGET_OUT_COMMON_INTERMEDIATES)/current_aidl_config.mk
+
+$(shell rm -rf $(current_aidl_config) \
+ && mkdir -p $(dir $(current_aidl_config))\
+ && touch $(current_aidl_config))
+-include $(previous_aidl_config)
+
+intermediates_to_clean :=
+modules_with_aidl_files :=
+$(foreach p, $(ALL_MODULES), \
+ $(if $(ALL_MODULES.$(p).AIDL_FILES),\
+ $(eval modules_with_aidl_files += $(p))\
+ $(shell echo 'AIDL_FILES.$(p) := $(ALL_MODULES.$(p).AIDL_FILES)' >> $(current_aidl_config)))\
+ $(if $(filter-out $(ALL_MODULES.$(p).AIDL_FILES),$(AIDL_FILES.$(p))),\
+ $(eval intermediates_to_clean += $(ALL_MODULES.$(p).INTERMEDIATE_SOURCE_DIR))))
+ifdef intermediates_to_clean
+$(info *** Obsolete aidl-generated files detected, clean intermediate files...)
+$(info *** rm -rf $(intermediates_to_clean))
+$(shell rm -rf $(intermediates_to_clean))
+intermediates_to_clean :=
+endif
+
+# For modules not loaded by the current build (e.g. you are running mm/mmm),
+# we copy the info from the previous bulid.
+$(foreach p, $(filter-out $(modules_with_aidl_files),$(MODULES_WITH_AIDL_FILES)),\
+ $(shell echo 'AIDL_FILES.$(p) := $(AIDL_FILES.$(p))' >> $(current_aidl_config)))
+MODULES_WITH_AIDL_FILES := $(sort $(MODULES_WITH_AIDL_FILES) $(modules_with_aidl_files))
+$(shell echo 'MODULES_WITH_AIDL_FILES := $(MODULES_WITH_AIDL_FILES)' >> $(current_aidl_config))
+
+# Now current becomes previous.
+$(shell mv -f $(current_aidl_config) $(previous_aidl_config))
+
+MODULES_WITH_AIDL_FILES :=
+modules_with_aidl_files :=
+previous_aidl_config :=
+current_aidl_config :=
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index b5e5189..1ee9f9a 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -106,6 +106,7 @@
endif # LOCAL_STRIP_MODULE not true
+ifeq ($(LOCAL_MODULE_CLASS),APPS)
PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
rs_compatibility_jni_libs :=
@@ -125,11 +126,9 @@
$(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
endif
ifeq ($(LOCAL_CERTIFICATE),)
- ifneq ($(filter APPS,$(LOCAL_MODULE_CLASS)),)
- # It is now a build error to add a prebuilt .apk without
- # specifying a key for it.
- $(error No LOCAL_CERTIFICATE specified for prebuilt "$(my_prebuilt_src_file)")
- endif
+ # It is now a build error to add a prebuilt .apk without
+ # specifying a key for it.
+ $(error No LOCAL_CERTIFICATE specified for prebuilt "$(my_prebuilt_src_file)")
else ifeq ($(LOCAL_CERTIFICATE),PRESIGNED)
# The magic string "PRESIGNED" means this package is already checked
# signed with its release key.
@@ -153,8 +152,6 @@
$(built_module) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
endif
-ifneq ($(filter APPS,$(LOCAL_MODULE_CLASS)),)
-
# Disable dex-preopt of prebuilts to save space
LOCAL_DEX_PREOPT := false
diff --git a/core/product.mk b/core/product.mk
index 174b429..7eef2e5 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -100,6 +100,12 @@
PRODUCT_RUNTIMES \
PRODUCT_BOOT_JARS \
PRODUCT_DEX_PREOPT_IMAGE_IN_DATA \
+ PRODUCT_SUPPORTS_VERITY \
+ PRODUCT_OEM_PROPERTIES \
+ PRODUCT_SYSTEM_PROPERTY_BLACKLIST \
+ PRODUCT_VERITY_PARTITION \
+ PRODUCT_VERITY_SIGNING_KEY \
+ PRODUCT_VERITY_MOUNTPOINT
define dump-product
$(info ==== $(1) ====)\
@@ -244,11 +250,13 @@
BOARD_BOOTIMAGE_PARTITION_SIZE \
BOARD_RECOVERYIMAGE_PARTITION_SIZE \
BOARD_SYSTEMIMAGE_PARTITION_SIZE \
+ BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE \
BOARD_USERDATAIMAGE_PARTITION_SIZE \
BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE \
BOARD_CACHEIMAGE_PARTITION_SIZE \
BOARD_FLASH_BLOCK_SIZE \
- BOARD_SYSTEMIMAGE_PARTITION_SIZE \
+ BOARD_VENDORIMAGE_PARTITION_SIZE \
+ BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE \
BOARD_INSTALLER_CMDLINE \
diff --git a/core/product_config.mk b/core/product_config.mk
index 9468362..261b935 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -317,8 +317,10 @@
# Everyone gets nodpi assets which are density-independent.
PRODUCT_AAPT_CONFIG += nodpi
+# Keep a copy of the space-separated config
+PRODUCT_AAPT_CONFIG_SP := $(PRODUCT_AAPT_CONFIG)
+
# Convert spaces to commas.
-comma := ,
PRODUCT_AAPT_CONFIG := \
$(subst $(space),$(comma),$(strip $(PRODUCT_AAPT_CONFIG)))
PRODUCT_AAPT_PREF_CONFIG := \
diff --git a/core/proguard_basic_keeps.flags b/core/proguard_basic_keeps.flags
index 622d4ee..4a85db0 100644
--- a/core/proguard_basic_keeps.flags
+++ b/core/proguard_basic_keeps.flags
@@ -59,3 +59,6 @@
# platform version. We know about them, and they are safe.
# See proguard-android.txt in the SDK package.
-dontwarn android.support.**
+
+# Less spammy.
+-dontnote
diff --git a/core/tasks/apicheck.mk b/core/tasks/apicheck.mk
index 00b78b9..fc98f5b 100644
--- a/core/tasks/apicheck.mk
+++ b/core/tasks/apicheck.mk
@@ -42,6 +42,8 @@
checkapi-last, \
$(SRC_API_DIR)/$(last_released_sdk_version).txt, \
$(INTERNAL_PLATFORM_API_FILE), \
+ frameworks/base/api/removed.txt, \
+ $(INTERNAL_PLATFORM_REMOVED_API_FILE), \
-hide 2 -hide 3 -hide 4 -hide 5 -hide 6 -hide 24 -hide 25 -hide 26 -hide 27 \
-error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
-error 16 -error 17 -error 18 , \
@@ -56,11 +58,13 @@
checkapi-current, \
frameworks/base/api/current.txt, \
$(INTERNAL_PLATFORM_API_FILE), \
+ frameworks/base/api/removed.txt, \
+ $(INTERNAL_PLATFORM_REMOVED_API_FILE), \
-error 2 -error 3 -error 4 -error 5 -error 6 \
-error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
-error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
-error 25 -error 26 -error 27, \
- cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
+ sed -e 's/%UPDATE_API%/update-api/g' $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
checkapi, \
$(call doc-timestamp-for,api-stubs) \
))
@@ -69,5 +73,51 @@
update-api: $(INTERNAL_PLATFORM_API_FILE) | $(ACP)
@echo Copying current.txt
$(hide) $(ACP) $(INTERNAL_PLATFORM_API_FILE) frameworks/base/api/current.txt
+ @echo Copying removed.txt
+ $(hide) $(ACP) $(INTERNAL_PLATFORM_REMOVED_API_FILE) frameworks/base/api/removed.txt
+
+
+#####################Check System API#####################
+.PHONY: checksystemapi
+
+# Check that the System API we're building hasn't broken the last-released
+# SDK version.
+$(eval $(call check-api, \
+ checksystemapi-last, \
+ $(SRC_SYSTEM_API_DIR)/$(last_released_sdk_version).txt, \
+ $(INTERNAL_PLATFORM_SYSTEM_API_FILE), \
+ frameworks/base/api/system-removed.txt, \
+ $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE), \
+ -hide 2 -hide 3 -hide 4 -hide 5 -hide 6 -hide 24 -hide 25 -hide 26 -hide 27 \
+ -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
+ -error 16 -error 17 -error 18 , \
+ cat $(BUILD_SYSTEM)/apicheck_msg_last.txt, \
+ checksystemapi, \
+ $(call doc-timestamp-for,system-api-stubs) \
+ ))
+
+# Check that the System API we're building hasn't changed from the not-yet-released
+# SDK version.
+$(eval $(call check-api, \
+ checksystemapi-current, \
+ frameworks/base/api/system-current.txt, \
+ $(INTERNAL_PLATFORM_SYSTEM_API_FILE), \
+ frameworks/base/api/system-removed.txt, \
+ $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE), \
+ -error 2 -error 3 -error 4 -error 5 -error 6 \
+ -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
+ -error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
+ -error 25 -error 26 -error 27, \
+ sed -e 's/%UPDATE_API%/update-system-api/g' $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
+ checksystemapi, \
+ $(call doc-timestamp-for,system-api-stubs) \
+ ))
+
+.PHONY: update-system-api
+update-system-api: $(INTERNAL_PLATFORM_SYSTEM_API_FILE) | $(ACP)
+ @echo Copying system-current.txt
+ $(hide) $(ACP) $(INTERNAL_PLATFORM_SYSTEM_API_FILE) frameworks/base/api/system-current.txt
+ @echo Copying system-removed.txt
+ $(hide) $(ACP) $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE) frameworks/base/api/system-removed.txt
endif
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index c521fa3..58bb235 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -18,7 +18,7 @@
cts_name := android-cts
DDMLIB_JAR := $(HOST_OUT_JAVA_LIBRARIES)/ddmlib-prebuilt.jar
-junit_host_jar := $(HOST_OUT_JAVA_LIBRARIES)/junit.jar
+JUNIT_HOST_JAR := $(HOST_OUT_JAVA_LIBRARIES)/junit.jar
HOSTTESTLIB_JAR := $(HOST_OUT_JAVA_LIBRARIES)/hosttestlib.jar
TF_JAR := $(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar
CTS_TF_JAR := $(HOST_OUT_JAVA_LIBRARIES)/cts-tradefed.jar
@@ -48,6 +48,7 @@
android.core.tests.libcore.package.harmony_java_text \
android.core.tests.libcore.package.harmony_java_util \
android.core.tests.libcore.package.harmony_javax_security \
+ android.core.tests.libcore.package.okhttp \
android.core.tests.runner
# The list of test packages that apache-harmony-tests (external/apache-harmony/Android.mk)
@@ -58,17 +59,22 @@
android.core.tests.libcore.package.harmony_prefs \
android.core.tests.libcore.package.harmony_sql
+
+CTS_TEST_JAR_LIST := \
+ CtsJdwp
+
# Depend on the full package paths rather than the phony targets to avoid
# rebuilding the packages every time.
CTS_CORE_CASES := $(foreach pkg,$(CTS_CORE_CASE_LIST),$(call intermediates-dir-for,APPS,$(pkg))/package.apk)
+CTS_TEST_JAR_FILES := $(foreach c,$(CTS_TEST_JAR_LIST),$(call intermediates-dir-for,JAVA_LIBRARIES,$(c))/javalib.jar)
-include cts/CtsTestCaseList.mk
CTS_CASE_LIST := $(CTS_CORE_CASE_LIST) $(CTS_TEST_CASE_LIST)
DEFAULT_TEST_PLAN := $(cts_dir)/$(cts_name)/resource/plans
CTS_TEST_CASE_LIST_FILES := $(foreach c, $(CTS_TEST_CASE_LIST), $(call intermediates-dir-for,APPS,$(c))/package.apk)
-$(cts_dir)/all_cts_files_stamp: PRIVATE_JUNIT_HOST_JAR := $(junit_host_jar)
-$(cts_dir)/all_cts_files_stamp: $(CTS_CORE_CASES) $(CTS_TEST_CASES) $(CTS_TEST_CASE_LIST_FILES) $(junit_host_jar) $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(VMTESTSTF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(ACP)
+$(cts_dir)/all_cts_files_stamp: PRIVATE_JUNIT_HOST_JAR := $(JUNIT_HOST_JAR)
+$(cts_dir)/all_cts_files_stamp: $(CTS_CORE_CASES) $(CTS_TEST_CASES) $(CTS_TEST_CASE_LIST_FILES) $(JUNIT_HOST_JAR) $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(VMTESTSTF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(ACP) $(CTS_TEST_JAR_FILES)
# Make necessary directory for CTS
$(hide) rm -rf $(PRIVATE_CTS_DIR)
$(hide) mkdir -p $(TMP_DIR)
@@ -80,6 +86,7 @@
$(hide) $(ACP) -fp $(VMTESTSTF_JAR) $(PRIVATE_DIR)/repository/testcases
$(hide) $(ACP) -fp $(DDMLIB_JAR) $(PRIVATE_JUNIT_HOST_JAR) $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(PRIVATE_DIR)/tools
# Change mode of the executables
+ $(foreach jar,$(CTS_TEST_JAR_LIST),$(call copy-testcase-jar,$(jar)))
$(foreach apk,$(CTS_CASE_LIST),$(call copy-testcase-apk,$(apk)))
$(foreach testcase,$(CTS_TEST_CASES),$(call copy-testcase,$(testcase)))
$(hide) touch $@
@@ -95,8 +102,8 @@
define generate-core-test-description
@echo "Generate core-test description ("$(notdir $(1))")"
$(hide) java -Xmx256M \
- -Xbootclasspath/a:$(PRIVATE_CLASSPATH) \
- -classpath $(PRIVATE_CLASSPATH):$(HOST_OUT_JAVA_LIBRARIES)/descGen.jar:$(HOST_OUT_JAVA_LIBRARIES)/junit.jar:$(HOST_JDK_TOOLS_JAR) \
+ -Xbootclasspath/a:$(PRIVATE_CLASSPATH):$(JUNIT_HOST_JAR) \
+ -classpath $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar:$(HOST_JDK_TOOLS_JAR) \
$(PRIVATE_PARAMS) CollectAllTests $(1) $(2) $(3) "$(4)" $(5) $(6)
endef
@@ -104,15 +111,17 @@
CONSCRYPT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt,,COMMON)
BOUNCYCASTLE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,bouncycastle,,COMMON)
APACHEXML_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-xml,,COMMON)
-OKHTTP_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp,,COMMON)
-APACHEHARMONY_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-harmony-tests,,COMMON)
+OKHTTP_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp-nojarjar,,COMMON)
+OKHTTPTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp-tests-nojarjar,,COMMON)
+OKHTTP_REPACKAGED_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp,,COMMON)
+APACHEHARMONYTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-harmony-tests,,COMMON)
SQLITEJDBC_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,sqlite-jdbc,,COMMON)
JUNIT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-junit,,COMMON)
CORETESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-tests,,COMMON)
JSR166TESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,jsr166-tests,,COMMON)
CONSCRYPTTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt-tests,,COMMON)
-GEN_CLASSPATH := $(CORE_INTERMEDIATES)/classes.jar:$(CONSCRYPT_INTERMEDIATES)/classes.jar:$(BOUNCYCASTLE_INTERMEDIATES)/classes.jar:$(APACHEXML_INTERMEDIATES)/classes.jar:$(APACHEHARMONY_INTERMEDIATES)/classes.jar:$(OKHTTP_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(SQLITEJDBC_INTERMEDIATES)/javalib.jar:$(CORETESTS_INTERMEDIATES)/javalib.jar:$(JSR166TESTS_INTERMEDIATES)/javalib.jar:$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar
+GEN_CLASSPATH := $(CORE_INTERMEDIATES)/classes.jar:$(CONSCRYPT_INTERMEDIATES)/classes.jar:$(BOUNCYCASTLE_INTERMEDIATES)/classes.jar:$(APACHEXML_INTERMEDIATES)/classes.jar:$(APACHEHARMONYTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_INTERMEDIATES)/classes.jar:$(OKHTTPTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_REPACKAGED_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(SQLITEJDBC_INTERMEDIATES)/javalib.jar:$(CORETESTS_INTERMEDIATES)/javalib.jar:$(JSR166TESTS_INTERMEDIATES)/javalib.jar:$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar
CTS_CORE_XMLS := \
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml \
@@ -136,6 +145,7 @@
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging.xml \
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs.xml \
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql.xml \
+ $(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp.xml \
$(CTS_CORE_XMLS): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
# Why does this depend on javalib.jar instead of classes.jar? Because
@@ -143,7 +153,7 @@
# build system requires that dependencies use javalib.jar. If
# javalib.jar is up-to-date, then classes.jar is as well. Depending
# on classes.jar will build the files incorrectly.
-CTS_CORE_XMLS_DEPS := $(CTS_CORE_CASES) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(HOST_OUT_JAVA_LIBRARIES)/junit.jar $(CORE_INTERMEDIATES)/javalib.jar $(BOUNCYCASTLE_INTERMEDIATES)/javalib.jar $(APACHEXML_INTERMEDIATES)/javalib.jar $(APACHEHARMONY_INTERMEDIATES)/javalib.jar $(OKHTTP_INTERMEDIATES)/javalib.jar $(SQLITEJDBC_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(CORETESTS_INTERMEDIATES)/javalib.jar $(JSR166TESTS_INTERMEDIATES)/javalib.jar $(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar build/core/tasks/cts.mk | $(ACP)
+CTS_CORE_XMLS_DEPS := $(CTS_CORE_CASES) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(JUNIT_HOST_JAR) $(CORE_INTERMEDIATES)/javalib.jar $(BOUNCYCASTLE_INTERMEDIATES)/javalib.jar $(APACHEXML_INTERMEDIATES)/javalib.jar $(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar $(OKHTTP_INTERMEDIATES)/javalib.jar $(OKHTTPTESTS_INTERMEDIATES)/javalib.jar $(OKHTTP_REPACKAGED_INTERMEDIATES)/javalib.jar $(SQLITEJDBC_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(CORETESTS_INTERMEDIATES)/javalib.jar $(JSR166TESTS_INTERMEDIATES)/javalib.jar $(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar build/core/tasks/cts.mk | $(ACP)
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml: $(CTS_CORE_XMLS_DEPS)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
@@ -268,31 +278,37 @@
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans,\
cts/tests/core/libcore/harmony_beans/AndroidManifest.xml,\
- $(APACHEHARMONY_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.beans,\
+ $(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.beans,\
libcore/expectations external/apache-harmony/Android.mk)
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging.xml: $(CTS_CORE_XMLS_DEPS)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging,\
cts/tests/core/libcore/harmony_logging/AndroidManifest.xml,\
- $(APACHEHARMONY_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.logging,\
+ $(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.logging,\
libcore/expectations external/apache-harmony/Android.mk)
-
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs.xml: $(CTS_CORE_XMLS_DEPS)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs,\
cts/tests/core/libcore/harmony_prefs/AndroidManifest.xml,\
- $(APACHEHARMONY_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.prefs,\
+ $(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.prefs,\
libcore/expectations external/apache-harmony/Android.mk)
$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql.xml: $(CTS_CORE_XMLS_DEPS)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql,\
cts/tests/core/libcore/harmony_sql/AndroidManifest.xml,\
- $(APACHEHARMONY_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.sql,\
+ $(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.sql,\
libcore/expectations external/apache-harmony/Android.mk)
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp.xml: $(CTS_CORE_XMLS_DEPS)
+ $(hide) mkdir -p $(CTS_TESTCASES_OUT)
+ $(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp,\
+ cts/tests/core/libcore/okhttp/AndroidManifest.xml,\
+ $(OKHTTPTESTS_INTERMEDIATES)/javalib.jar,,\
+ libcore/expectations)
+
# ----- Generate the test descriptions for the vm-tests-tf -----
#
CORE_VM_TEST_TF_DESC := $(CTS_TESTCASES_OUT)/android.core.vm-tests-tf.xml
@@ -305,7 +321,7 @@
$(CORE_VM_TEST_TF_DESC): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
# Please see big comment above on why this line depends on javalib.jar instead of classes.jar
-$(CORE_VM_TEST_TF_DESC): $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(HOST_OUT_JAVA_LIBRARIES)/junit.jar $(CORE_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(VMTESTSTF_JAR) $(DDMLIB_JAR) | $(ACP)
+$(CORE_VM_TEST_TF_DESC): $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(JUNIT_HOST_JAR) $(CORE_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(VMTESTSTF_JAR) $(DDMLIB_JAR) | $(ACP)
$(hide) mkdir -p $(CTS_TESTCASES_OUT)
$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.vm-tests-tf,\
cts/tests/vm-tests-tf/AndroidManifest.xml,\
@@ -351,3 +367,11 @@
$(hide) $(ACP) -fp $(1) $(PRIVATE_DIR)/repository/testcases/$(notdir $1)
endef
+
+define copy-testcase-jar
+
+$(hide) $(ACP) -fp $(call intermediates-dir-for,JAVA_LIBRARIES,$(1))/javalib.jar \
+ $(PRIVATE_DIR)/repository/testcases/$(1).jar
+
+endef
+
diff --git a/core/tasks/oem_image.mk b/core/tasks/oem_image.mk
new file mode 100644
index 0000000..26b9aba
--- /dev/null
+++ b/core/tasks/oem_image.mk
@@ -0,0 +1,46 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# We build oem.img only if it's asked for.
+ifneq ($(filter $(MAKECMDGOALS),oem_image),)
+ifndef BOARD_OEMIMAGE_PARTITION_SIZE
+$(error BOARD_OEMIMAGE_PARTITION_SIZE is not set.)
+endif
+
+INTERNAL_OEMIMAGE_FILES := \
+ $(filter $(TARGET_OUT_OEM)/%,$(ALL_DEFAULT_INSTALLED_MODULES))
+
+oemimage_intermediates := \
+ $(call intermediates-dir-for,PACKAGING,oem)
+BUILT_OEMIMAGE_TARGET := $(PRODUCT_OUT)/oem.img
+# We just build this directly to the install location.
+INSTALLED_OEMIMAGE_TARGET := $(BUILT_OEMIMAGE_TARGET)
+
+$(INSTALLED_OEMIMAGE_TARGET) : $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_OEMIMAGE_FILES)
+ $(call pretty,"Target oem fs image: $@")
+ @mkdir -p $(TARGET_OUT_OEM)
+ @mkdir -p $(oemimage_intermediates) && rm -rf $(oemimage_intermediates)/oem_image_info.txt
+ $(call generate-userimage-prop-dictionary, $(oemimage_intermediates)/oem_image_info.txt, skip_fsck=true)
+ $(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH \
+ ./build/tools/releasetools/build_image.py \
+ $(TARGET_OUT_OEM) $(oemimage_intermediates)/oem_image_info.txt $@
+ $(hide) $(call assert-max-image-size,$@,$(BOARD_OEMIMAGE_PARTITION_SIZE))
+
+.PHONY: oem_image
+oem_image : $(INSTALLED_OEMIMAGE_TARGET)
+$(call dist-for-goals, oem_image, $(INSTALLED_OEMIMAGE_TARGET))
+
+endif # oem_image in $(MAKECMDGOALS)
diff --git a/core/tasks/vendor_module_check.mk b/core/tasks/vendor_module_check.mk
index 9c19422..793dd16 100644
--- a/core/tasks/vendor_module_check.mk
+++ b/core/tasks/vendor_module_check.mk
@@ -27,11 +27,15 @@
invensense \
intel \
lge \
+ moto \
nvidia \
nxp \
+ nxpsw \
qcom \
+ qti \
samsung \
samsung_arm \
+ synaptics \
ti \
trusted_logic \
widevine
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index 6690f70..31c3242 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -41,7 +41,7 @@
# which is the version that we reveal to the end user.
# Update this value when the platform version changes (rather
# than overriding it somewhere else). Can be an arbitrary string.
- PLATFORM_VERSION := 4.4W
+ PLATFORM_VERSION := L
endif
ifeq "" "$(PLATFORM_SDK_VERSION)"
@@ -59,7 +59,12 @@
ifeq "" "$(PLATFORM_VERSION_CODENAME)"
# This is the current development code-name, if the build is not a final
# release build. If this is a final release build, it is simply "REL".
- PLATFORM_VERSION_CODENAME := REL
+ PLATFORM_VERSION_CODENAME := L
+
+ # This is all of the development codenames that are active. Should be either
+ # the same as PLATFORM_VERSION_CODENAME or a comma-separated list of additional
+ # codenames after PLATFORM_VERSION_CODENAME.
+ PLATFORM_VERSION_ALL_CODENAMES := $(PLATFORM_VERSION_CODENAME)
endif
ifeq "" "$(DEFAULT_APP_TARGET_SDK)"
diff --git a/envsetup.sh b/envsetup.sh
index e462b71..b63182f 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -2,7 +2,7 @@
cat <<EOF
Invoke ". build/envsetup.sh" from your shell to add the following functions to your environment:
- lunch: lunch <product_name>-<build_variant>
-- tapas: tapas [<App1> <App2> ...] [arm|x86|mips|armv5] [eng|userdebug|user]
+- tapas: tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
- croot: Changes directory to the top of the tree.
- m: Makes from the top of the tree.
- mm: Builds all of the modules in the current directory, but not their dependencies.
@@ -36,7 +36,7 @@
return
fi
(\cd $T; CALLED_FROM_SETUP=true BUILD_SYSTEM=build/core \
- make --no-print-directory -f build/core/config.mk dumpvar-abs-$1)
+ command make --no-print-directory -f build/core/config.mk dumpvar-abs-$1)
}
# Get the exact value of a build variable.
@@ -48,7 +48,7 @@
return
fi
(\cd $T; CALLED_FROM_SETUP=true BUILD_SYSTEM=build/core \
- make --no-print-directory -f build/core/config.mk dumpvar-$1)
+ command make --no-print-directory -f build/core/config.mk dumpvar-$1)
}
# check to see if the supplied product is one we can build
@@ -207,6 +207,24 @@
unset ANDROID_HOST_OUT
export ANDROID_HOST_OUT=$(get_abs_build_var HOST_OUT)
+ # If prebuilts/android-emulator/<system>/ exists, prepend it to our PATH
+ # to ensure that the corresponding 'emulator' binaries are used.
+ case $(uname -s) in
+ Darwin)
+ ANDROID_EMULATOR_PREBUILTS=$T/prebuilts/android-emulator/darwin-x86_64
+ ;;
+ Linux)
+ ANDROID_EMULATOR_PREBUILTS=$T/prebuilts/android-emulator/linux-x86_64
+ ;;
+ *)
+ ANDROID_EMULATOR_PREBUILTS=
+ ;;
+ esac
+ if [ -n "$ANDROID_EMULATOR_PREBUILTS" -a -d "$ANDROID_EMULATOR_PREBUILTS" ]; then
+ PATH=$ANDROID_EMULATOR_PREBUILTS:$PATH
+ export ANDROID_EMULATOR_PREBUILTS
+ fi
+
# needed for building linux on MacOS
# TODO: fix the path
#export HOST_EXTRACFLAGS="-I "$T/system/kernel_headers/host_include
@@ -567,12 +585,12 @@
complete -F _lunch lunch
# Configures the build to build unbundled apps.
-# Run tapas with one ore more app names (from LOCAL_PACKAGE_NAME)
+# Run tapas with one or more app names (from LOCAL_PACKAGE_NAME)
function tapas()
{
- local arch=$(echo -n $(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5)$'))
- local variant=$(echo -n $(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$'))
- local apps=$(echo -n $(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5)$'))
+ local arch="$(echo $* | xargs -n 1 echo | \grep -E '^(arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
+ local variant="$(echo $* | xargs -n 1 echo | \grep -E '^(user|userdebug|eng)$' | xargs)"
+ local apps="$(echo $* | xargs -n 1 echo | \grep -E -v '^(user|userdebug|eng|arm|x86|mips|armv5|arm64|x86_64|mips64)$' | xargs)"
if [ $(echo $arch | wc -w) -gt 1 ]; then
echo "tapas: Error: Multiple build archs supplied: $arch"
@@ -585,9 +603,12 @@
local product=full
case $arch in
- x86) product=full_x86;;
- mips) product=full_mips;;
- armv5) product=generic_armv5;;
+ x86) product=full_x86;;
+ mips) product=full_mips;;
+ armv5) product=generic_armv5;;
+ arm64) product=aosp_arm64;;
+ x86_64) product=aosp_x86_64;;
+ mips64) product=aosp_mips64;;
esac
if [ -z "$variant" ]; then
variant=eng
@@ -1039,6 +1060,7 @@
fi
OUT_SO_SYMBOLS=$OUT_SO_SYMBOLS$USE64BIT
+ OUT_VENDOR_SO_SYMBOLS=$OUT_VENDOR_SO_SYMBOLS$USE64BIT
echo >|"$OUT_ROOT/gdbclient.cmds" "set solib-absolute-prefix $OUT_SYMBOLS"
echo >>"$OUT_ROOT/gdbclient.cmds" "set solib-search-path $OUT_SO_SYMBOLS:$OUT_SO_SYMBOLS/hw:$OUT_SO_SYMBOLS/ssl/engines:$OUT_SO_SYMBOLS/drm:$OUT_SO_SYMBOLS/egl:$OUT_SO_SYMBOLS/soundfx:$OUT_VENDOR_SO_SYMBOLS:$OUT_VENDOR_SO_SYMBOLS/hw:$OUT_VENDOR_SO_SYMBOLS/egl"
@@ -1454,6 +1476,36 @@
return $retval
}
+function make()
+{
+ local start_time=$(date +"%s")
+ command make "$@"
+ local ret=$?
+ local end_time=$(date +"%s")
+ local tdiff=$(($end_time-$start_time))
+ local hours=$(($tdiff / 3600 ))
+ local mins=$((($tdiff % 3600) / 60))
+ local secs=$(($tdiff % 60))
+ echo
+ if [ $ret -eq 0 ] ; then
+ echo -n -e "#### make completed successfully "
+ else
+ echo -n -e "#### make failed to build some targets "
+ fi
+ if [ $hours -gt 0 ] ; then
+ printf "(%02g:%02g:%02g (hh:mm:ss))" $hours $mins $secs
+ elif [ $mins -gt 0 ] ; then
+ printf "(%02g:%02g (mm:ss))" $mins $secs
+ elif [ $secs -gt 0 ] ; then
+ printf "(%s seconds)" $secs
+ fi
+ echo -e " ####"
+ echo
+ return $ret
+}
+
+
+
if [ "x$SHELL" != "x/bin/bash" ]; then
case `ps -o command -p $$` in
*bash*)
diff --git a/target/board/generic/device.mk b/target/board/generic/device.mk
index fe64bcb..06a7d8a 100644
--- a/target/board/generic/device.mk
+++ b/target/board/generic/device.mk
@@ -25,6 +25,9 @@
PRODUCT_COPY_FILES := \
device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
diff --git a/target/board/generic_arm64/device.mk b/target/board/generic_arm64/device.mk
index 354fb2a..11a6a31 100644
--- a/target/board/generic_arm64/device.mk
+++ b/target/board/generic_arm64/device.mk
@@ -25,6 +25,9 @@
PRODUCT_COPY_FILES := \
device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
PRODUCT_PACKAGES := \
diff --git a/target/board/generic_mips/device.mk b/target/board/generic_mips/device.mk
index b182c4c..590f422 100644
--- a/target/board/generic_mips/device.mk
+++ b/target/board/generic_mips/device.mk
@@ -25,6 +25,9 @@
PRODUCT_COPY_FILES := \
device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
diff --git a/target/board/generic_mips64/device.mk b/target/board/generic_mips64/device.mk
index 58fe976..015686e 100644
--- a/target/board/generic_mips64/device.mk
+++ b/target/board/generic_mips64/device.mk
@@ -25,6 +25,9 @@
PRODUCT_COPY_FILES := \
device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
diff --git a/target/board/generic_x86/device.mk b/target/board/generic_x86/device.mk
index 089f584..b5b0faf 100644
--- a/target/board/generic_x86/device.mk
+++ b/target/board/generic_x86/device.mk
@@ -25,6 +25,9 @@
PRODUCT_COPY_FILES := \
device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
PRODUCT_PACKAGES := \
diff --git a/target/board/generic_x86_64/device.mk b/target/board/generic_x86_64/device.mk
index 089f584..b5b0faf 100755
--- a/target/board/generic_x86_64/device.mk
+++ b/target/board/generic_x86_64/device.mk
@@ -25,6 +25,9 @@
PRODUCT_COPY_FILES := \
device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml
PRODUCT_PACKAGES := \
diff --git a/target/board/vbox_x86/device.mk b/target/board/vbox_x86/device.mk
index a44a87f..b51f801 100644
--- a/target/board/vbox_x86/device.mk
+++ b/target/board/vbox_x86/device.mk
@@ -29,6 +29,9 @@
PRODUCT_COPY_FILES := \
device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
build/target/board/vbox_x86/init.vbox_x86.rc:root/init.vbox_x86.rc \
$(LOCAL_KERNEL):kernel
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index fc407d3..bce5bb5 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -42,7 +42,8 @@
$(LOCAL_DIR)/aosp_mips.mk \
$(LOCAL_DIR)/full_mips.mk \
$(LOCAL_DIR)/aosp_arm64.mk \
- $(LOCAL_DIR)/aosp_mips64.mk
+ $(LOCAL_DIR)/aosp_mips64.mk \
+ $(LOCAL_DIR)/aosp_x86_64.mk
else
PRODUCT_MAKEFILES := \
$(LOCAL_DIR)/core.mk \
@@ -63,5 +64,7 @@
$(LOCAL_DIR)/sdk.mk \
$(LOCAL_DIR)/sdk_x86.mk \
$(LOCAL_DIR)/sdk_mips.mk \
- $(LOCAL_DIR)/large_emu_hw.mk
+ $(LOCAL_DIR)/sdk_arm64.mk \
+ $(LOCAL_DIR)/sdk_x86_64.mk \
+ $(LOCAL_DIR)/sdk_mips64.mk
endif
diff --git a/target/product/base.mk b/target/product/base.mk
index 4b8326d..981f965 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -35,12 +35,15 @@
fsck_msdos \
ime \
input \
+ inputflinger \
javax.obex \
libandroid \
libandroid_runtime \
libandroid_servers \
libaudioeffect_jni \
libaudioflinger \
+ libaudiopolicyservice \
+ libaudiopolicymanager \
libbundlewrapper \
libcamera_client \
libcameraservice \
@@ -48,6 +51,7 @@
libeffectproxy \
libeffects \
libinput \
+ libinputflinger \
libiprouteutil \
libjnigraphics \
libldnhncr \
@@ -55,8 +59,10 @@
libmedia_jni \
libmediaplayerservice \
libmtp \
+ libnetd_client \
libnetlink \
libnetutils \
+ libpdfium \
libreference-ril \
libreverbwrapper \
libril \
@@ -65,6 +71,8 @@
libskia \
libsonivox \
libsoundpool \
+ libsoundtrigger \
+ libsoundtriggerservice \
libsqlite \
libstagefright \
libstagefright_amrnb_common \
@@ -77,6 +85,8 @@
libutils \
libvisualizer \
libvorbisidec \
+ libmediandk \
+ libwifi-service \
media \
media_cmd \
mediaserver \
@@ -93,7 +103,6 @@
racoon \
run-as \
schedtest \
- screenshot \
sdcard \
services \
settings \
diff --git a/target/product/core.mk b/target/product/core.mk
index 8c88b94..875d08c 100644
--- a/target/product/core.mk
+++ b/target/product/core.mk
@@ -22,16 +22,35 @@
PRODUCT_PACKAGES += \
BasicDreams \
Browser \
+ Calculator \
+ Calendar \
+ CalendarProvider \
+ CertInstaller \
Contacts \
+ DeskClock \
DocumentsUI \
DownloadProviderUi \
+ Email \
+ Exchange2 \
ExternalStorageProvider \
+ FusedLocation \
+ InputDevices \
KeyChain \
+ Keyguard \
+ LatinIME \
+ Launcher2 \
+ ManagedProvisioning \
PicoTts \
PacProcessor \
libpac \
+ PrintSpooler \
ProxyHandler \
+ QuickSearchBox \
+ Settings \
SharedStorageBackup \
- VpnDialogs
+ Telecomm \
+ TeleService \
+ VpnDialogs \
+ MmsService
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_base.mk)
diff --git a/target/product/core_base.mk b/target/product/core_base.mk
index d7d74bc..f14e53d 100644
--- a/target/product/core_base.mk
+++ b/target/product/core_base.mk
@@ -30,14 +30,12 @@
libandroidfw \
libaudiopreprocessing \
libaudioutils \
- libbcc \
libfilterpack_imageproc \
libgabi++ \
libmdnssd \
libnfc_ndef \
libpowermanager \
libspeexresampler \
- libstagefright_chromium_http \
libstagefright_soft_aacdec \
libstagefright_soft_aacenc \
libstagefright_soft_amrdec \
@@ -48,6 +46,7 @@
libstagefright_soft_gsmdec \
libstagefright_soft_h264dec \
libstagefright_soft_h264enc \
+ libstagefright_soft_hevcdec \
libstagefright_soft_mp3dec \
libstagefright_soft_mpeg4dec \
libstagefright_soft_mpeg4enc \
@@ -58,26 +57,7 @@
libvariablespeed \
libwebrtc_audio_preprocessing \
mdnsd \
- mms-common \
requestsync \
- telephony-common \
- voip-common
+ wifi-service
$(call inherit-product, $(SRC_TARGET_DIR)/product/core_minimal.mk)
-# Override the PRODUCT_BOOT_JARS set in core_minimal.mk. The order matters.
-PRODUCT_BOOT_JARS := \
- core-libart \
- conscrypt \
- okhttp \
- core-junit \
- bouncycastle \
- ext \
- framework \
- framework2 \
- telephony-common \
- voip-common \
- mms-common \
- android.policy \
- services \
- apache-xml \
- webviewchromium
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index c371368..368e468 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -36,15 +36,18 @@
com.android.media.remotedisplay \
com.android.media.remotedisplay.xml \
drmserver \
+ ethernet-service \
framework-res \
idmap \
installd \
+ ims-common \
ip \
ip-up-vpn \
ip6tables \
iptables \
keystore \
keystore.default \
+ libbcc \
libOpenMAXAL \
libOpenSLES \
libdownmix \
@@ -56,10 +59,17 @@
libwilhelm \
logd \
make_ext4fs \
+ e2fsck \
+ resize2fs \
+ mms-common \
screencap \
sensorservice \
+ telephony-common \
uiautomator \
- webview
+ uncrypt \
+ voip-common \
+ webview \
+ wifi-service
PRODUCT_COPY_FILES += \
frameworks/native/data/etc/android.software.webview.xml:system/etc/permissions/android.software.webview.xml
@@ -74,10 +84,16 @@
ext \
framework \
framework2 \
+ telephony-common \
+ voip-common \
+ ims-common \
+ mms-common \
android.policy \
services \
apache-xml \
- webviewchromium
+ ethernet-service \
+ webviewchromium \
+ wifi-service
PRODUCT_RUNTIMES := runtime_libart_default
diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk
index 52244a3..48abfa5 100644
--- a/target/product/core_tiny.mk
+++ b/target/product/core_tiny.mk
@@ -22,21 +22,12 @@
ContactsProvider \
CertInstaller \
FusedLocation \
- InputDevices \
- bluetooth-health \
- hostapd \
- wpa_supplicant.conf
+ InputDevices
PRODUCT_PACKAGES += \
- audio \
clatd \
clatd.conf \
- dhcpcd.conf \
- network \
- pand \
- pppd \
- sdptool \
- wpa_supplicant
+ pppd
PRODUCT_PACKAGES += \
audio.primary.default \
@@ -57,6 +48,7 @@
com.android.location.provider.xml \
framework-res \
installd \
+ ims-common \
ip \
ip-up-vpn \
ip6tables \
@@ -73,17 +65,22 @@
libdrmframework_jni \
libdrmframework \
make_ext4fs \
+ e2fsck \
+ resize2fs \
nullwebview \
screencap \
sensorservice \
uiautomator \
+ uncrypt \
telephony-common \
voip-common \
- mms-common
+ logd \
+ mms-common \
+ wifi-service
# The order matters
PRODUCT_BOOT_JARS := \
- core \
+ core-libart \
conscrypt \
okhttp \
core-junit \
@@ -93,22 +90,28 @@
framework2 \
telephony-common \
voip-common \
+ ims-common \
mms-common \
android.policy \
services \
apache-xml \
- nullwebview
+ nullwebview \
+ wifi-service
-PRODUCT_RUNTIMES := runtime_libdvm_default
+PRODUCT_RUNTIMES := runtime_libart_default
+
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+ ro.zygote=zygote32
+PRODUCT_COPY_FILES += \
+ system/core/rootdir/init.zygote32.rc:root/init.zygote32.rc
PRODUCT_PROPERTY_OVERRIDES += \
ro.carrier=unknown
+$(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
$(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
# Overrides
PRODUCT_BRAND := tiny
PRODUCT_DEVICE := tiny
PRODUCT_NAME := core_tiny
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
diff --git a/target/product/full_base.mk b/target/product/full_base.mk
index 059697e..1daa523 100644
--- a/target/product/full_base.mk
+++ b/target/product/full_base.mk
@@ -26,7 +26,6 @@
libWnnEngDic \
libWnnJpnDic \
libwnndict \
- VideoEditor \
WAPPushManager
PRODUCT_PACKAGES += \
diff --git a/target/product/full_base_telephony.mk b/target/product/full_base_telephony.mk
index f98e9a2..2fd2ce8 100644
--- a/target/product/full_base_telephony.mk
+++ b/target/product/full_base_telephony.mk
@@ -27,7 +27,8 @@
ro.com.android.dataroaming=true
PRODUCT_COPY_FILES := \
- device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml
+ device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
+ frameworks/native/data/etc/handheld_core_hardware.xml:system/etc/permissions/handheld_core_hardware.xml
$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/telephony.mk)
diff --git a/target/product/generic_no_telephony.mk b/target/product/generic_no_telephony.mk
index 12797f4..3ee6de8 100644
--- a/target/product/generic_no_telephony.mk
+++ b/target/product/generic_no_telephony.mk
@@ -17,33 +17,15 @@
# This is a generic phone product that isn't specialized for a specific device.
# It includes the base Android platform.
-PRODUCT_POLICY := android.policy_phone
-
PRODUCT_PACKAGES := \
- DeskClock \
Bluetooth \
- Calculator \
- Calendar \
Camera2 \
- CertInstaller \
- Email \
- Exchange2 \
- FusedLocation \
Gallery2 \
- InputDevices \
- Keyguard \
- LatinIME \
- Launcher2 \
Music \
MusicFX \
OneTimeInitializer \
- PrintSpooler \
Provision \
- QuickSearchBox \
- Settings \
SystemUI \
- TeleService \
- CalendarProvider \
WallpaperCropper
PRODUCT_PACKAGES += \
@@ -83,7 +65,6 @@
$(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
$(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
$(call inherit-product-if-exists, external/naver-fonts/fonts.mk)
-$(call inherit-product-if-exists, external/sil-fonts/fonts.mk)
$(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
$(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
diff --git a/target/product/languages_full.mk b/target/product/languages_full.mk
index 4cddc06..024d6ac 100644
--- a/target/product/languages_full.mk
+++ b/target/product/languages_full.mk
@@ -21,4 +21,4 @@
# These are all the locales that have translations and are displayable
# by TextView in this branch.
-PRODUCT_LOCALES := en_US en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR rm_CH sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET hi_IN en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE
+PRODUCT_LOCALES := en_US en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR rm_CH sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET hi_IN en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE my_MM mr_MH ml_KL is_IS mk_MK ky_KG eu_PV gl_GA bn_WB ta_TN kn_KA te_AP uz_UZ ur_PK kk_KZ
diff --git a/target/product/large_emu_hw.mk b/target/product/large_emu_hw.mk
deleted file mode 100644
index a918c1d..0000000
--- a/target/product/large_emu_hw.mk
+++ /dev/null
@@ -1,52 +0,0 @@
-#
-# Copyright (C) 2007 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-# http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a generic product for devices with large display but not specialized
-# for a specific device. It includes the base Android platform.
-
-PRODUCT_POLICY := android.policy_mid
-
-PRODUCT_PACKAGES := \
- CarHome \
- DeskClock \
- Bluetooth \
- Calculator \
- Calendar \
- CertInstaller \
- Email \
- Exchange2 \
- Gallery2 \
- LatinIME \
- Launcher2 \
- Music \
- Provision \
- QuickSearchBox \
- Settings \
- Sync \
- Updater \
- CalendarProvider \
- SyncProvider \
- bluetooth-health \
- hostapd \
- wpa_supplicant.conf
-
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
-
-# Overrides
-PRODUCT_BRAND := generic
-PRODUCT_DEVICE := generic
-PRODUCT_NAME := large_emu_hw
diff --git a/target/product/sdk.mk b/target/product/sdk.mk
index 5926f19..d4f2355 100644
--- a/target/product/sdk.mk
+++ b/target/product/sdk.mk
@@ -14,56 +14,38 @@
# limitations under the License.
#
-PRODUCT_POLICY := android.policy_phone
PRODUCT_PROPERTY_OVERRIDES :=
PRODUCT_PACKAGES := \
- Calculator \
- DeskClock \
- Email \
- Exchange2 \
- FusedLocation \
- Gallery \
- Keyguard \
- Music \
- Mms \
- OpenWnn \
- PrintSpooler \
- libWnnEngDic \
- libWnnJpnDic \
- libwnndict \
- TeleService \
- PinyinIME \
- Protips \
- SoftKeyboard \
- SystemUI \
- Launcher2 \
+ ApiDemos \
+ CubeLiveWallpapers \
+ CustomLocale \
Development \
DevelopmentSettings \
+ Dialer \
+ EmulatorSmokeTests \
Fallback \
- Settings \
- SdkSetup \
- CustomLocale \
- sqlite3 \
- InputDevices \
- LatinIME \
- CertInstaller \
- LiveWallpapersPicker \
- ApiDemos \
+ Gallery \
GestureBuilder \
- CubeLiveWallpapers \
- QuickSearchBox \
- WidgetPreview \
+ LegacyCamera \
librs_jni \
- ConnectivityTest \
- GpsLocationTest \
- CalendarProvider \
- Calendar \
+ libwnndict \
+ libWnnEngDic \
+ libWnnJpnDic \
+ LiveWallpapersPicker \
+ Mms \
+ Music \
+ OpenWnn \
+ PinyinIME \
+ Protips \
+ rild \
+ SdkSetup \
SmokeTest \
SmokeTestApp \
- rild \
- LegacyCamera \
- Dialer
+ SoftKeyboard \
+ sqlite3 \
+ SystemUI \
+ WidgetPreview
# Define the host tools and libs that are parts of the SDK.
-include sdk/build/product_sdk.mk
@@ -83,6 +65,9 @@
frameworks/base/data/sounds/effects/VideoRecord.ogg:system/media/audio/ui/VideoRecord.ogg \
frameworks/native/data/etc/handheld_core_hardware.xml:system/etc/permissions/handheld_core_hardware.xml \
device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_telephony.xml:system/etc/media_codecs_google_telephony.xml \
+ frameworks/av/media/libstagefright/data/media_codecs_google_video.xml:system/etc/media_codecs_google_video.xml \
device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
frameworks/native/data/etc/android.hardware.touchscreen.multitouch.jazzhand.xml:system/etc/permissions/android.hardware.touchscreen.multitouch.jazzhand.xml \
frameworks/native/data/etc/android.hardware.camera.autofocus.xml:system/etc/permissions/android.hardware.camera.autofocus.xml \
@@ -98,19 +83,21 @@
$(call inherit-product-if-exists, external/google-fonts/coming-soon/fonts.mk)
$(call inherit-product-if-exists, external/noto-fonts/fonts.mk)
$(call inherit-product-if-exists, external/naver-fonts/fonts.mk)
-$(call inherit-product-if-exists, external/sil-fonts/fonts.mk)
$(call inherit-product-if-exists, frameworks/base/data/keyboards/keyboards.mk)
$(call inherit-product-if-exists, frameworks/webview/chromium/chromium.mk)
$(call inherit-product, $(SRC_TARGET_DIR)/product/core.mk)
-# Overrides
-PRODUCT_BRAND := generic
-PRODUCT_NAME := sdk
-PRODUCT_DEVICE := generic
+# include available languages for TTS in the system image
+-include external/svox/pico/lang/PicoLangDeDeInSystem.mk
+-include external/svox/pico/lang/PicoLangEnGBInSystem.mk
+-include external/svox/pico/lang/PicoLangEnUsInSystem.mk
+-include external/svox/pico/lang/PicoLangEsEsInSystem.mk
+-include external/svox/pico/lang/PicoLangFrFrInSystem.mk
+-include external/svox/pico/lang/PicoLangItItInSystem.mk
# locale + densities. en_US is both first and in alphabetical order to
# ensure this is the default locale.
-PRODUCT_LOCALES = \
+PRODUCT_LOCALES := \
en_US \
ldpi \
hdpi \
@@ -174,10 +161,7 @@
zh_CN \
zh_TW
-# include available languages for TTS in the system image
--include external/svox/pico/lang/PicoLangDeDeInSystem.mk
--include external/svox/pico/lang/PicoLangEnGBInSystem.mk
--include external/svox/pico/lang/PicoLangEnUsInSystem.mk
--include external/svox/pico/lang/PicoLangEsEsInSystem.mk
--include external/svox/pico/lang/PicoLangFrFrInSystem.mk
--include external/svox/pico/lang/PicoLangItItInSystem.mk
+# Overrides
+PRODUCT_BRAND := generic
+PRODUCT_NAME := sdk
+PRODUCT_DEVICE := generic
diff --git a/target/product/sdk_arm64.mk b/target/product/sdk_arm64.mk
new file mode 100644
index 0000000..9aa58b3
--- /dev/null
+++ b/target/product/sdk_arm64.mk
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk.mk)
+
+# Overrides
+PRODUCT_BRAND := generic_arm64
+PRODUCT_NAME := sdk_arm64
+PRODUCT_DEVICE := generic_arm64
+PRODUCT_MODEL := Android SDK built for arm64
diff --git a/target/product/sdk_mips64.mk b/target/product/sdk_mips64.mk
new file mode 100644
index 0000000..c46eaed
--- /dev/null
+++ b/target/product/sdk_mips64.mk
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk.mk)
+
+# Overrides
+PRODUCT_BRAND := generic_mips64
+PRODUCT_NAME := sdk_mips64
+PRODUCT_DEVICE := generic_mips64
+PRODUCT_MODEL := Android SDK built for mips64
diff --git a/target/product/sdk_x86.mk b/target/product/sdk_x86.mk
index 873d0c0..6c5e746 100644
--- a/target/product/sdk_x86.mk
+++ b/target/product/sdk_x86.mk
@@ -19,7 +19,7 @@
# build quite specifically for the emulator, and might not be
# entirely appropriate to inherit from for on-device configurations.
-include $(SRC_TARGET_DIR)/product/sdk.mk
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk.mk)
# Overrides
PRODUCT_BRAND := generic_x86
diff --git a/target/product/sdk_x86_64.mk b/target/product/sdk_x86_64.mk
new file mode 100644
index 0000000..62f2dbb
--- /dev/null
+++ b/target/product/sdk_x86_64.mk
@@ -0,0 +1,29 @@
+#
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/sdk.mk)
+
+# Overrides
+PRODUCT_BRAND := generic_x86_64
+PRODUCT_NAME := sdk_x86_64
+PRODUCT_DEVICE := generic_x86_64
+PRODUCT_MODEL := Android SDK built for x86_64
diff --git a/target/product/security/Android.mk b/target/product/security/Android.mk
new file mode 100644
index 0000000..5a40397
--- /dev/null
+++ b/target/product/security/Android.mk
@@ -0,0 +1,12 @@
+LOCAL_PATH:= $(call my-dir)
+
+#######################################
+# verity_key
+include $(CLEAR_VARS)
+
+LOCAL_MODULE := verity_key
+LOCAL_SRC_FILES := $(LOCAL_MODULE)
+LOCAL_MODULE_CLASS := ETC
+LOCAL_MODULE_PATH := $(TARGET_ROOT_OUT)
+
+include $(BUILD_PREBUILT)
diff --git a/target/product/security/verity_key b/target/product/security/verity_key
new file mode 100644
index 0000000..8db965f
--- /dev/null
+++ b/target/product/security/verity_key
Binary files differ
diff --git a/target/product/security/verity_private_dev_key b/target/product/security/verity_private_dev_key
new file mode 100644
index 0000000..92528e9
--- /dev/null
+++ b/target/product/security/verity_private_dev_key
@@ -0,0 +1,28 @@
+-----BEGIN PRIVATE KEY-----
+MIIEvAIBADANBgkqhkiG9w0BAQEFAASCBKYwggSiAgEAAoIBAQDQxdVrH2RB1eg5
+17/gBmLzW1Ds10RG6ctNZMhxppMOLnEZViKGv1VNRhxqK/JKTv2UujgZ94SJcDub
+G+DwAwaGZKQqDYUa0VU2cng8TYPcnYGPdJ7Usckp6tdg64vns7e+VVf0dOyEovR+
+JyeYUz05OhUMYP9xJIhpA2XnXe5Ekb9iTFSYo9uBpoXDD4IY7aOqUxSbv9wMtyIp
+dl+oTm0+kqRRi4KoxGHV0CzDseEUuWG/Kp/7aVF9Sg45NcC6KYvrGysUKA+Bt09O
+feDn/HRpT9SfRElJa5DRms33UBUtnom15F4yd4vvFgubB0nkPOUuwfZhTFfgeuY4
+H2bHkjKbAgMBAAECggEAMpFYqkPGQvQO9cO+ZALoAM4Dgfp6PTrv1WUt7+lLAUpa
+dqqYXk8F2Fu9EjJm03ziix237QI5Bhk7Nsy/5SK2d+L0qILx1JcTrsZ3PRQBdnRo
+J1k2B4qwkQii9oTXNF4hiWaekUWo7E+ULOJLAuhWkf/xjTgJZ1xT9iuuiSYFSnIa
+9ABNH0vCaKEkW/4ri6fdtXmO26C/ltJlnozl86x07PIFh4uBas7/40E8ykFP00CS
+zdhMh+2DGyCb1Q0eJ1IfGILNatkLNEd2BHgQ7qNBkN9yShZfhvIPblr5gSUlZplX
+diV20ZGLAfByKWgZZWKkwl9KzaisL/J/4dr2UlSVEQKBgQDxAYTsgoTkkP0TKzr3
+i3ljT8OuVOj6TwZVBJYe2MIJ3veivS3gWB53FpsKthbib7y8ifIakn15mQkNCK5R
+7H7F5lvZCNnB6shY5Dz7nLJxKLALcAg+d12l3gTbFQeFDs0iQQJF7P8hs/GPF7kY
+Layb7EF0uzYjyHJCKtFdaZaeZwKBgQDdwvCb7NJVeGTcE97etL+8acu9y4GlqKEF
+o0Vkw8TjNKj/KuDkbkAk9hXxU1ZCmDU3y6r8CVHYl0Sqh08plEhkYB/j3sFy81zY
+3xu/rLFysBwjeJHHlPjRTYkdKr9pABmm8NIEShvu9u8i+mpOhjbX72HxZL+i4Fou
+gz58wEdBrQKBgG8CfyKdn+7UJe3tbLTXRquK8xxauhGJ0uXYPfmpZ/8596C7OOVs
+UWQTQoj1hKb6RtolRCIfNbKL3hJl3D2aDG7Fg6r9m6fpqCzhvIE9FShwUF6EVRfI
+zZb4JA5xqkwMnEpZ3V0uI/p3Mx3xFG3ho+8SLLhC/1YOHysBI/y+BQWjAoGAYiqQ
+PkXYWhOAeleleeqDUdF3al3y1zVNimRbLJ7owjcmdEYz5YrUhEgXMIvWjIY6UKes
+2gL6IynbMK3TIjHM1fojQ8jw04TdXfdtnizBJGbHHgCab8IHXwe2oZ2xu7ZapKbI
+ITP5J5BSDabSdk49attB/Qy/NEeiRCK+/5RSNsUCgYAg6vX9VqMEkhPHeoFfdLGD
+EQPPN6QLrQ4Zif0GKxH96znNSv0rXdNp9t0kyapdgzMuCwIEuOkCSiKgmfjTWnYO
+qh5HMUuD2VbfWwI9jVujQMRmqiaFF7VxxA1bP5j1hJlI6cn1Fjlpi+NsNZN4nm3Q
+92SEwX2vDgjrU0NAtFFL1Q==
+-----END PRIVATE KEY-----
diff --git a/target/product/verity.mk b/target/product/verity.mk
new file mode 100644
index 0000000..b14eaa4
--- /dev/null
+++ b/target/product/verity.mk
@@ -0,0 +1,24 @@
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Provides dependencies necessary for verified boot
+
+PRODUCT_SUPPORTS_VERITY := true
+PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity_private_dev_key
+PRODUCT_VERITY_MOUNTPOINT := system
+
+PRODUCT_PACKAGES += \
+ verity_key
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 46a73f8..dfbb9db 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -8,6 +8,7 @@
echo "ro.build.version.incremental=$BUILD_NUMBER"
echo "ro.build.version.sdk=$PLATFORM_SDK_VERSION"
echo "ro.build.version.codename=$PLATFORM_VERSION_CODENAME"
+echo "ro.build.version.all_codenames=$PLATFORM_VERSION_ALL_CODENAMES"
echo "ro.build.version.release=$PLATFORM_VERSION"
echo "ro.build.date=`date`"
echo "ro.build.date.utc=`date +%s`"
@@ -25,9 +26,16 @@
# instead (see below).
echo "# ro.product.cpu.abi and ro.product.cpu.abi2 are obsolete,"
echo "# use ro.product.cpu.abilist instead."
-echo "ro.product.cpu.abi=$TARGET_CPU_ABI"
-if [ -n "$TARGET_CPU_ABI2" ] ; then
- echo "ro.product.cpu.abi2=$TARGET_CPU_ABI2"
+if [ -n "$TARGET_2ND_CPU_ABI" ] ; then
+ echo "ro.product.cpu.abi=$TARGET_2ND_CPU_ABI"
+ if [ -n "$TARGET_2ND_CPU_ABI2" ] ; then
+ echo "ro.product.cpu.abi2=$TARGET_2ND_CPU_ABI2"
+ fi
+else
+ echo "ro.product.cpu.abi=$TARGET_CPU_ABI"
+ if [ -n "$TARGET_CPU_ABI2" ] ; then
+ echo "ro.product.cpu.abi2=$TARGET_CPU_ABI2"
+ fi
fi
echo "ro.product.cpu.abilist=$TARGET_CPU_ABI_LIST"
echo "ro.product.cpu.abilist32=$TARGET_CPU_ABI_LIST_32_BIT"
@@ -46,9 +54,10 @@
echo "# ro.build.product is obsolete; use ro.product.device"
echo "ro.build.product=$TARGET_DEVICE"
-echo "# Do not try to parse ro.build.description or .fingerprint"
+echo "# Do not try to parse description, fingerprint, or thumbprint"
echo "ro.build.description=$PRIVATE_BUILD_DESC"
echo "ro.build.fingerprint=$BUILD_FINGERPRINT"
+echo "ro.build.thumbprint=$BUILD_THUMBPRINT"
echo "ro.build.characteristics=$TARGET_AAPT_CHARACTERISTICS"
echo "# end build properties"
diff --git a/tools/droiddoc/templates-ds/package.cs b/tools/droiddoc/templates-ds/package.cs
index ea3e4f4..d67d5d9 100644
--- a/tools/droiddoc/templates-ds/package.cs
+++ b/tools/droiddoc/templates-ds/package.cs
@@ -45,6 +45,7 @@
<?cs /if ?>
<?cs /def ?>
+<?cs call:class_table("Annotations", package.annotations) ?>
<?cs call:class_table("Interfaces", package.interfaces) ?>
<?cs call:class_table("Classes", package.classes) ?>
<?cs call:class_table("Enums", package.enums) ?>
diff --git a/tools/droiddoc/templates-sac/assets/images/sac_logo.png b/tools/droiddoc/templates-sac/assets/images/sac_logo.png
index 54b9a4c..6dc126a 100644
--- a/tools/droiddoc/templates-sac/assets/images/sac_logo.png
+++ b/tools/droiddoc/templates-sac/assets/images/sac_logo.png
Binary files differ
diff --git a/tools/droiddoc/templates-sac/package.cs b/tools/droiddoc/templates-sac/package.cs
index 99eaff2..abd49f1 100644
--- a/tools/droiddoc/templates-sac/package.cs
+++ b/tools/droiddoc/templates-sac/package.cs
@@ -45,6 +45,7 @@
<?cs /if ?>
<?cs /def ?>
+<?cs call:class_table("Annotations", package.annotations) ?>
<?cs call:class_table("Interfaces", package.interfaces) ?>
<?cs call:class_table("Classes", package.classes) ?>
<?cs call:class_table("Enums", package.enums) ?>
diff --git a/tools/droiddoc/templates-sdk/assets/css/default.css b/tools/droiddoc/templates-sdk/assets/css/default.css
index 8c866c1..3b647c2 100644
--- a/tools/droiddoc/templates-sdk/assets/css/default.css
+++ b/tools/droiddoc/templates-sdk/assets/css/default.css
@@ -2774,22 +2774,19 @@
width:100%;
}
#butterbar {
- width:100%;
+ width:940px;
margin:0 auto;
}
#butterbar-message {
- background-color:rgba(255, 187, 51, .4);
- font-size:13px;
- padding: 5px 0;
- text-align:center;
+ background-color:#f80;
+ float:right;
+ font-size:12px;
+ font-weight:bold;
+ padding:0 10px;
+ border-radius: 0 0 5px 5px;
}
-a#butterbar-message {
- cursor:pointer;
- display:block;
-}
-a#butterbar-message:hover {
- text-decoration:underline;
-}
+#butterbar-message a {color:#fff !important}
+#butterbar-message a:hover {text-decoration:underline;}
/* --------------------------------------------------------------------------
Misc
diff --git a/tools/droiddoc/templates-sdk/customizations.cs b/tools/droiddoc/templates-sdk/customizations.cs
index 79cdd89..e0e3ca1 100644
--- a/tools/droiddoc/templates-sdk/customizations.cs
+++ b/tools/droiddoc/templates-sdk/customizations.cs
@@ -353,6 +353,7 @@
<?cs
if:subcount(class.package) ?>
<ul>
+ <?cs call:list("Annotations", class.package.annotations) ?>
<?cs call:list("Interfaces", class.package.interfaces) ?>
<?cs call:list("Classes", class.package.classes) ?>
<?cs call:list("Enums", class.package.enums) ?>
@@ -361,6 +362,7 @@
</ul><?cs
elif:subcount(package) ?>
<ul>
+ <?cs call:class_link_list("Annotations", package.annotations) ?>
<?cs call:class_link_list("Interfaces", package.interfaces) ?>
<?cs call:class_link_list("Classes", package.classes) ?>
<?cs call:class_link_list("Enums", package.enums) ?>
diff --git a/tools/droiddoc/templates-sdk/package.cs b/tools/droiddoc/templates-sdk/package.cs
index 99eaff2..abd49f1 100644
--- a/tools/droiddoc/templates-sdk/package.cs
+++ b/tools/droiddoc/templates-sdk/package.cs
@@ -45,6 +45,7 @@
<?cs /if ?>
<?cs /def ?>
+<?cs call:class_table("Annotations", package.annotations) ?>
<?cs call:class_table("Interfaces", package.interfaces) ?>
<?cs call:class_table("Classes", package.classes) ?>
<?cs call:class_table("Enums", package.enums) ?>
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index 5d1b350..e43ca59 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -16,6 +16,9 @@
import sys
+# Usage: post_process_props.py file.prop [blacklist_key, ...]
+# Blacklisted keys are removed from the property file, if present
+
# See PROP_VALUE_MAX system_properties.h.
# PROP_VALUE_MAX in system_properties.h includes the termination NUL,
# so we decrease it by 1 here.
@@ -82,8 +85,9 @@
for line in self.lines:
if not line or line.startswith("#"):
continue
- key, value = line.split("=", 1)
- props[key] = value
+ if "=" in line:
+ key, value = line.split("=", 1)
+ props[key] = value
return props
def get(self, name):
@@ -101,6 +105,10 @@
return
self.lines.append(key + value)
+ def delete(self, name):
+ key = name + "="
+ self.lines = [ line for line in self.lines if not line.startswith(key) ]
+
def write(self, f):
f.write("\n".join(self.lines))
f.write("\n")
@@ -124,6 +132,10 @@
if not validate(properties):
sys.exit(1)
+ # Drop any blacklisted keys
+ for key in argv[2:]:
+ properties.delete(key)
+
f = open(filename, 'w+')
properties.write(f)
f.close()
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index f8f2ada..f8cba44 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -24,6 +24,13 @@
import os.path
import subprocess
import sys
+import commands
+import shutil
+import tempfile
+
+import simg_map
+
+FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
def RunCommand(cmd):
""" Echo and run the given command
@@ -38,13 +45,182 @@
p.communicate()
return p.returncode
-def BuildImage(in_dir, prop_dict, out_file):
+def GetVerityTreeSize(partition_size):
+ cmd = "build_verity_tree -s %d"
+ cmd %= partition_size
+ status, output = commands.getstatusoutput(cmd)
+ if status:
+ print output
+ return False, 0
+ return True, int(output)
+
+def GetVerityMetadataSize(partition_size):
+ cmd = "system/extras/verity/build_verity_metadata.py -s %d"
+ cmd %= partition_size
+ status, output = commands.getstatusoutput(cmd)
+ if status:
+ print output
+ return False, 0
+ return True, int(output)
+
+def AdjustPartitionSizeForVerity(partition_size):
+ """Modifies the provided partition size to account for the verity metadata.
+
+ This information is used to size the created image appropriately.
+ Args:
+ partition_size: the size of the partition to be verified.
+ Returns:
+ The size of the partition adjusted for verity metadata.
+ """
+ success, verity_tree_size = GetVerityTreeSize(partition_size)
+ if not success:
+ return 0;
+ success, verity_metadata_size = GetVerityMetadataSize(partition_size)
+ if not success:
+ return 0
+ return partition_size - verity_tree_size - verity_metadata_size
+
+def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
+ cmd = ("build_verity_tree -A %s %s %s" % (FIXED_SALT, sparse_image_path, verity_image_path))
+ print cmd
+ status, output = commands.getstatusoutput(cmd)
+ if status:
+ print "Could not build verity tree! Error: %s" % output
+ return False
+ root, salt = output.split()
+ prop_dict["verity_root_hash"] = root
+ prop_dict["verity_salt"] = salt
+ return True
+
+def BuildVerityMetadata(image_size, verity_metadata_path, root_hash, salt,
+ block_device, signer_path, key):
+ cmd = ("system/extras/verity/build_verity_metadata.py %s %s %s %s %s %s %s" %
+ (image_size,
+ verity_metadata_path,
+ root_hash,
+ salt,
+ block_device,
+ signer_path,
+ key))
+ print cmd
+ status, output = commands.getstatusoutput(cmd)
+ if status:
+ print "Could not build verity metadata! Error: %s" % output
+ return False
+ return True
+
+def Append2Simg(sparse_image_path, unsparse_image_path, error_message):
+ """Appends the unsparse image to the given sparse image.
+
+ Args:
+ sparse_image_path: the path to the (sparse) image
+ unsparse_image_path: the path to the (unsparse) image
+ Returns:
+ True on success, False on failure.
+ """
+ cmd = "append2simg %s %s"
+ cmd %= (sparse_image_path, unsparse_image_path)
+ print cmd
+ status, output = commands.getstatusoutput(cmd)
+ if status:
+ print "%s: %s" % (error_message, output)
+ return False
+ return True
+
+def BuildVerifiedImage(data_image_path, verity_image_path, verity_metadata_path):
+ if not Append2Simg(data_image_path, verity_metadata_path, "Could not append verity metadata!"):
+ return False
+ if not Append2Simg(data_image_path, verity_image_path, "Could not append verity tree!"):
+ return False
+ return True
+
+def UnsparseImage(sparse_image_path, replace=True):
+ img_dir = os.path.dirname(sparse_image_path)
+ unsparse_image_path = "unsparse_" + os.path.basename(sparse_image_path)
+ unsparse_image_path = os.path.join(img_dir, unsparse_image_path)
+ if os.path.exists(unsparse_image_path):
+ if replace:
+ os.unlink(unsparse_image_path)
+ else:
+ return True, unsparse_image_path
+ inflate_command = ["simg2img", sparse_image_path, unsparse_image_path]
+ exit_code = RunCommand(inflate_command)
+ if exit_code != 0:
+ os.remove(unsparse_image_path)
+ return False, None
+ return True, unsparse_image_path
+
+def MappedUnsparseImage(sparse_image_path, unsparse_image_path,
+ map_path, mapped_unsparse_image_path):
+ if simg_map.ComputeMap(sparse_image_path, unsparse_image_path,
+ map_path, mapped_unsparse_image_path):
+ return False
+ return True
+
+def MakeVerityEnabledImage(out_file, prop_dict):
+ """Creates an image that is verifiable using dm-verity.
+
+ Args:
+ out_file: the location to write the verifiable image at
+ prop_dict: a dictionary of properties required for image creation and verification
+ Returns:
+ True on success, False otherwise.
+ """
+ # get properties
+ image_size = prop_dict["partition_size"]
+ block_dev = prop_dict["verity_block_device"]
+ signer_key = prop_dict["verity_key"]
+ signer_path = prop_dict["verity_signer_cmd"]
+
+ # make a tempdir
+ tempdir_name = tempfile.mkdtemp(suffix="_verity_images")
+
+ # get partial image paths
+ verity_image_path = os.path.join(tempdir_name, "verity.img")
+ verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
+
+ # build the verity tree and get the root hash and salt
+ if not BuildVerityTree(out_file, verity_image_path, prop_dict):
+ shutil.rmtree(tempdir_name, ignore_errors=True)
+ return False
+
+ # build the metadata blocks
+ root_hash = prop_dict["verity_root_hash"]
+ salt = prop_dict["verity_salt"]
+ if not BuildVerityMetadata(image_size,
+ verity_metadata_path,
+ root_hash,
+ salt,
+ block_dev,
+ signer_path,
+ signer_key):
+ shutil.rmtree(tempdir_name, ignore_errors=True)
+ return False
+
+ # build the full verified image
+ if not BuildVerifiedImage(out_file,
+ verity_image_path,
+ verity_metadata_path):
+ shutil.rmtree(tempdir_name, ignore_errors=True)
+ return False
+
+ shutil.rmtree(tempdir_name, ignore_errors=True)
+ return True
+
+def BuildImage(in_dir, prop_dict, out_file,
+ fs_config=None,
+ fc_config=None):
"""Build an image to out_file from in_dir with property prop_dict.
Args:
in_dir: path of input directory.
prop_dict: property dictionary.
out_file: path of the output image file.
+ fs_config: path to the fs_config file (typically
+ META/filesystem_config.txt). If None then the configuration in
+ the local client will be used.
+ fc_config: path to the SELinux file_contexts file. If None then
+ the value from prop_dict['selinux_fc'] will be used.
Returns:
True iff the image is built successfully.
@@ -52,6 +228,18 @@
build_command = []
fs_type = prop_dict.get("fs_type", "")
run_fsck = False
+
+ is_verity_partition = prop_dict.get("mount_point") == prop_dict.get("verity_mountpoint")
+ verity_supported = prop_dict.get("verity") == "true"
+ # adjust the partition size to make room for the hashes if this is to be verified
+ if verity_supported and is_verity_partition:
+ partition_size = int(prop_dict.get("partition_size"))
+ adjusted_size = AdjustPartitionSizeForVerity(partition_size)
+ if not adjusted_size:
+ return False
+ prop_dict["partition_size"] = str(adjusted_size)
+ prop_dict["original_partition_size"] = str(partition_size)
+
if fs_type.startswith("ext"):
build_command = ["mkuserimg.sh"]
if "extfs_sparse_flag" in prop_dict:
@@ -59,10 +247,18 @@
run_fsck = True
build_command.extend([in_dir, out_file, fs_type,
prop_dict["mount_point"]])
- if "partition_size" in prop_dict:
- build_command.append(prop_dict["partition_size"])
- if "selinux_fc" in prop_dict:
+ build_command.append(prop_dict["partition_size"])
+ if "timestamp" in prop_dict:
+ build_command.extend(["-T", str(prop_dict["timestamp"])])
+ if fs_config is not None:
+ build_command.extend(["-C", fs_config])
+ if fc_config is not None:
+ build_command.append(fc_config)
+ elif "selinux_fc" in prop_dict:
build_command.append(prop_dict["selinux_fc"])
+ elif fs_type.startswith("f2fs"):
+ build_command = ["mkf2fsuserimg.sh"]
+ build_command.extend([out_file, prop_dict["partition_size"]])
else:
build_command = ["mkyaffs2image", "-f"]
if prop_dict.get("mkyaffs2_extra_flags", None):
@@ -77,14 +273,14 @@
if exit_code != 0:
return False
+ # create the verified image if this is to be verified
+ if verity_supported and is_verity_partition:
+ if not MakeVerityEnabledImage(out_file, prop_dict):
+ return False
+
if run_fsck and prop_dict.get("skip_fsck") != "true":
- # Inflate the sparse image
- unsparse_image = os.path.join(
- os.path.dirname(out_file), "unsparse_" + os.path.basename(out_file))
- inflate_command = ["simg2img", out_file, unsparse_image]
- exit_code = RunCommand(inflate_command)
- if exit_code != 0:
- os.remove(unsparse_image)
+ success, unsparse_image = UnsparseImage(out_file, replace=False)
+ if not success:
return False
# Run e2fsck on the inflated image file
@@ -104,6 +300,10 @@
mount_point: such as "system", "data" etc.
"""
d = {}
+ if "build.prop" in glob_dict:
+ bp = glob_dict["build.prop"]
+ if "ro.build.date.utc" in bp:
+ d["timestamp"] = bp["ro.build.date.utc"]
def copy_prop(src_p, dest_p):
if src_p in glob_dict:
@@ -114,6 +314,11 @@
"mkyaffs2_extra_flags",
"selinux_fc",
"skip_fsck",
+ "verity",
+ "verity_block_device",
+ "verity_key",
+ "verity_signer_cmd",
+ "verity_mountpoint"
)
for p in common_props:
copy_prop(p, p)
@@ -123,7 +328,9 @@
copy_prop("fs_type", "fs_type")
copy_prop("system_size", "partition_size")
elif mount_point == "data":
+ # Copy the generic fs type first, override with specific one if available.
copy_prop("fs_type", "fs_type")
+ copy_prop("userdata_fs_type", "fs_type")
copy_prop("userdata_size", "partition_size")
elif mount_point == "cache":
copy_prop("cache_fs_type", "fs_type")
@@ -131,6 +338,9 @@
elif mount_point == "vendor":
copy_prop("vendor_fs_type", "fs_type")
copy_prop("vendor_size", "partition_size")
+ elif mount_point == "oem":
+ copy_prop("fs_type", "fs_type")
+ copy_prop("oem_size", "partition_size")
return d
@@ -169,6 +379,8 @@
mount_point = "cache"
elif image_filename == "vendor.img":
mount_point = "vendor"
+ elif image_filename == "oem.img":
+ mount_point = "oem"
else:
print >> sys.stderr, "error: unknown image file name ", image_filename
exit(1)
diff --git a/tools/releasetools/check_target_files_signatures b/tools/releasetools/check_target_files_signatures
index 45d30a6..b2f46c1 100755
--- a/tools/releasetools/check_target_files_signatures
+++ b/tools/releasetools/check_target_files_signatures
@@ -41,8 +41,8 @@
import sys
-if sys.hexversion < 0x02040000:
- print >> sys.stderr, "Python 2.4 or newer is required."
+if sys.hexversion < 0x02070000:
+ print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import os
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index d27be6f..701a9cb 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -84,17 +84,24 @@
pass
-def LoadInfoDict(zip):
+def LoadInfoDict(input):
"""Read and parse the META/misc_info.txt key/value pairs from the
input target files and return a dict."""
+ def read_helper(fn):
+ if isinstance(input, zipfile.ZipFile):
+ return input.read(fn)
+ else:
+ path = os.path.join(input, *fn.split("/"))
+ try:
+ with open(path) as f:
+ return f.read()
+ except IOError, e:
+ if e.errno == errno.ENOENT:
+ raise KeyError(fn)
d = {}
try:
- for line in zip.read("META/misc_info.txt").split("\n"):
- line = line.strip()
- if not line or line.startswith("#"): continue
- k, v = line.split("=", 1)
- d[k] = v
+ d = LoadDictionaryFromLines(read_helper("META/misc_info.txt").split("\n"))
except KeyError:
# ok if misc_info.txt doesn't exist
pass
@@ -105,20 +112,20 @@
if "mkyaffs2_extra_flags" not in d:
try:
- d["mkyaffs2_extra_flags"] = zip.read("META/mkyaffs2-extra-flags.txt").strip()
+ d["mkyaffs2_extra_flags"] = read_helper("META/mkyaffs2-extra-flags.txt").strip()
except KeyError:
# ok if flags don't exist
pass
if "recovery_api_version" not in d:
try:
- d["recovery_api_version"] = zip.read("META/recovery-api-version.txt").strip()
+ d["recovery_api_version"] = read_helper("META/recovery-api-version.txt").strip()
except KeyError:
raise ValueError("can't find recovery API version in input target-files")
if "tool_extensions" not in d:
try:
- d["tool_extensions"] = zip.read("META/tool-extensions.txt").strip()
+ d["tool_extensions"] = read_helper("META/tool-extensions.txt").strip()
except KeyError:
# ok if extensions don't exist
pass
@@ -127,7 +134,7 @@
d["fstab_version"] = "1"
try:
- data = zip.read("META/imagesizes.txt")
+ data = read_helper("META/imagesizes.txt")
for line in data.split("\n"):
if not line: continue
name, value = line.split(" ", 1)
@@ -152,33 +159,36 @@
makeint("boot_size")
makeint("fstab_version")
- d["fstab"] = LoadRecoveryFSTab(zip, d["fstab_version"])
- d["build.prop"] = LoadBuildProp(zip)
+ d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
+ d["build.prop"] = LoadBuildProp(read_helper)
return d
-def LoadBuildProp(zip):
+def LoadBuildProp(read_helper):
try:
- data = zip.read("SYSTEM/build.prop")
+ data = read_helper("SYSTEM/build.prop")
except KeyError:
print "Warning: could not find SYSTEM/build.prop in %s" % zip
data = ""
+ return LoadDictionaryFromLines(data.split("\n"))
+def LoadDictionaryFromLines(lines):
d = {}
- for line in data.split("\n"):
+ for line in lines:
line = line.strip()
if not line or line.startswith("#"): continue
- name, value = line.split("=", 1)
- d[name] = value
+ if "=" in line:
+ name, value = line.split("=", 1)
+ d[name] = value
return d
-def LoadRecoveryFSTab(zip, fstab_version):
+def LoadRecoveryFSTab(read_helper, fstab_version):
class Partition(object):
pass
try:
- data = zip.read("RECOVERY/RAMDISK/etc/recovery.fstab")
+ data = read_helper("RECOVERY/RAMDISK/etc/recovery.fstab")
except KeyError:
- print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab in %s." % zip
+ print "Warning: could not find RECOVERY/RAMDISK/etc/recovery.fstab"
data = ""
if fstab_version == 1:
@@ -730,11 +740,14 @@
return result
-def ZipWriteStr(zip, filename, data, perms=0644):
+def ZipWriteStr(zip, filename, data, perms=0644, compression=None):
# use a fixed timestamp so the output is repeatable.
zinfo = zipfile.ZipInfo(filename=filename,
date_time=(2009, 1, 1, 0, 0, 0))
- zinfo.compress_type = zip.compression
+ if compression is None:
+ zinfo.compress_type = zip.compression
+ else:
+ zinfo.compress_type = compression
zinfo.external_attr = perms << 16
zip.writestr(zinfo, data)
@@ -761,6 +774,7 @@
if x == ".py":
f = b
info = imp.find_module(f, [d])
+ print "loaded device-specific extensions from", path
self.module = imp.load_module("device_specific", *info)
except ImportError:
print "unable to load device-specific module; assuming none"
@@ -839,8 +853,8 @@
t.flush()
return t
- def AddToZip(self, z):
- ZipWriteStr(z, self.name, self.data)
+ def AddToZip(self, z, compression=None):
+ ZipWriteStr(z, self.name, self.data, compression=compression)
DIFF_PROGRAM_BY_EXT = {
".gz" : "imgdiff",
@@ -954,7 +968,8 @@
# map recovery.fstab's fs_types to mount/format "partition types"
PARTITION_TYPES = { "yaffs2": "MTD", "mtd": "MTD",
- "ext4": "EMMC", "emmc": "EMMC" }
+ "ext4": "EMMC", "emmc": "EMMC",
+ "f2fs": "EMMC" }
def GetTypeAndDevice(mount_point, info):
fstab = info["fstab"]
@@ -977,3 +992,95 @@
save = True
cert = "".join(cert).decode('base64')
return cert
+
+def XDelta3(source_path, target_path, output_path):
+ diff_program = ["xdelta3", "-0", "-B", str(64<<20), "-e", "-f", "-s"]
+ diff_program.append(source_path)
+ diff_program.append(target_path)
+ diff_program.append(output_path)
+ p = Run(diff_program, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ p.communicate()
+ assert p.returncode == 0, "Couldn't produce patch"
+
+def XZ(path):
+ compress_program = ["xz", "-zk", "-9", "--check=crc32"]
+ compress_program.append(path)
+ p = Run(compress_program, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+ p.communicate()
+ assert p.returncode == 0, "Couldn't compress patch"
+
+def MakePartitionPatch(source_file, target_file, partition):
+ with tempfile.NamedTemporaryFile() as output_file:
+ XDelta3(source_file.name, target_file.name, output_file.name)
+ XZ(output_file.name)
+ with open(output_file.name + ".xz") as patch_file:
+ patch_data = patch_file.read()
+ os.unlink(patch_file.name)
+ return File(partition + ".muimg.p", patch_data)
+
+def MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img,
+ info_dict=None):
+ """Generate a binary patch that creates the recovery image starting
+ with the boot image. (Most of the space in these images is just the
+ kernel, which is identical for the two, so the resulting patch
+ should be efficient.) Add it to the output zip, along with a shell
+ script that is run from init.rc on first boot to actually do the
+ patching and install the new recovery image.
+
+ recovery_img and boot_img should be File objects for the
+ corresponding images. info should be the dictionary returned by
+ common.LoadInfoDict() on the input target_files.
+ """
+
+ if info_dict is None:
+ info_dict = OPTIONS.info_dict
+
+ diff_program = ["imgdiff"]
+ path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
+ if os.path.exists(path):
+ diff_program.append("-b")
+ diff_program.append(path)
+ bonus_args = "-b /system/etc/recovery-resource.dat"
+ else:
+ bonus_args = ""
+
+ d = Difference(recovery_img, boot_img, diff_program=diff_program)
+ _, _, patch = d.ComputePatch()
+ output_sink("recovery-from-boot.p", patch)
+
+ boot_type, boot_device = GetTypeAndDevice("/boot", info_dict)
+ recovery_type, recovery_device = GetTypeAndDevice("/recovery", info_dict)
+
+ sh = """#!/system/bin/sh
+if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
+ applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
+else
+ log -t recovery "Recovery image already installed"
+fi
+""" % { 'boot_size': boot_img.size,
+ 'boot_sha1': boot_img.sha1,
+ 'recovery_size': recovery_img.size,
+ 'recovery_sha1': recovery_img.sha1,
+ 'boot_type': boot_type,
+ 'boot_device': boot_device,
+ 'recovery_type': recovery_type,
+ 'recovery_device': recovery_device,
+ 'bonus_args': bonus_args,
+ }
+
+ # The install script location moved from /system/etc to /system/bin
+ # in the L release. Parse the init.rc file to find out where the
+ # target-files expects it to be, and put it there.
+ sh_location = "etc/install-recovery.sh"
+ try:
+ with open(os.path.join(input_dir, "BOOT", "RAMDISK", "init.rc")) as f:
+ for line in f:
+ m = re.match("^service flash_recovery /system/(\S+)\s*$", line)
+ if m:
+ sh_location = m.group(1)
+ print "putting script in", sh_location
+ break
+ except (OSError, IOError), e:
+ print "failed to read init.rc: %s" % (e,)
+
+ output_sink(sh_location, sh)
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index 426b713..8620812 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -68,19 +68,43 @@
with temporary=True) to this one."""
self.script.extend(other.script)
+ def AssertOemProperty(self, name, value):
+ """Assert that a property on the OEM paritition matches a value."""
+ if not name:
+ raise ValueError("must specify an OEM property")
+ if not value:
+ raise ValueError("must specify the OEM value")
+ cmd = ('file_getprop("/oem/oem.prop", "%s") == "%s" || '
+ 'abort("This package expects the value \\"%s\\" for '
+ '\\"%s\\" on the OEM partition; '
+ 'this has value \\"" + file_getprop("/oem/oem.prop") + "\\".");'
+ ) % (name, value, name, value)
+ self.script.append(cmd)
+
def AssertSomeFingerprint(self, *fp):
- """Assert that the current system build fingerprint is one of *fp."""
+ """Assert that the current recovery build fingerprint is one of *fp."""
if not fp:
raise ValueError("must specify some fingerprints")
cmd = (
- ' ||\n '.join([('file_getprop("/system/build.prop", '
- '"ro.build.fingerprint") == "%s"')
+ ' ||\n '.join([('getprop("ro.build.fingerprint") == "%s"')
% i for i in fp]) +
' ||\n abort("Package expects build fingerprint of %s; this '
'device has " + getprop("ro.build.fingerprint") + ".");'
) % (" or ".join(fp),)
self.script.append(cmd)
+ def AssertSomeThumbprint(self, *fp):
+ """Assert that the current recovery build thumbprint is one of *fp."""
+ if not fp:
+ raise ValueError("must specify some thumbprints")
+ cmd = (
+ ' ||\n '.join([('getprop("ro.build.thumbprint") == "%s"')
+ % i for i in fp]) +
+ ' ||\n abort("Package expects build thumbprint of %s; this '
+ 'device has " + getprop("ro.build.thumbprint") + ".");'
+ ) % (" or ".join(fp),)
+ self.script.append(cmd)
+
def AssertOlderBuild(self, timestamp, timestamp_text):
"""Assert that the build on the device is older (or the same as)
the given timestamp."""
@@ -178,6 +202,15 @@
(p.fs_type, common.PARTITION_TYPES[p.fs_type],
p.device, p.length, p.mount_point))
+ def WipeBlockDevice(self, partition):
+ if partition not in ("/system", "/vendor"):
+ raise ValueError(("WipeBlockDevice doesn't work on %s\n") % (partition,))
+ fstab = self.info.get("fstab", None)
+ size = self.info.get(partition.lstrip("/") + "_size", None)
+ device = fstab[partition].device
+
+ self.script.append('wipe_block_device("%s", %s);' % (device, size))
+
def DeleteFiles(self, file_list):
"""Delete all files in file_list."""
if not file_list: return
@@ -212,7 +245,7 @@
cmd = "".join(cmd)
self.script.append(self._WordWrap(cmd))
- def WriteRawImage(self, mount_point, fn):
+ def WriteRawImage(self, mount_point, fn, mapfn=None):
"""Write the given package file into the partition for the given
mount point."""
@@ -226,8 +259,13 @@
'write_raw_image(package_extract_file("%(fn)s"), "%(device)s");'
% args)
elif partition_type == "EMMC":
- self.script.append(
- 'package_extract_file("%(fn)s", "%(device)s");' % args)
+ if mapfn:
+ args["map"] = mapfn
+ self.script.append(
+ 'package_extract_file("%(fn)s", "%(device)s", "%(map)s");' % args)
+ else:
+ self.script.append(
+ 'package_extract_file("%(fn)s", "%(device)s");' % args)
else:
raise ValueError("don't know how to write \"%s\" partitions" % (p.fs_type,))
@@ -293,6 +331,13 @@
if input_path is None:
data = input_zip.read("OTA/bin/updater")
else:
- data = open(os.path.join(input_path, "updater")).read()
+ data = open(input_path, "rb").read()
common.ZipWriteStr(output_zip, "META-INF/com/google/android/update-binary",
data, perms=0755)
+
+ def Syspatch(self, filename, target_mapfile, target_sha,
+ source_mapfile, source_sha, patchfile):
+ """Applies a compressed binary patch to a block device."""
+ call = 'syspatch("%s", "%s", "%s", "%s", "%s", "%s");'
+ self.script.append(call % (filename, target_mapfile, target_sha,
+ source_mapfile, source_sha, patchfile))
diff --git a/tools/releasetools/img_from_target_files b/tools/releasetools/img_from_target_files
new file mode 120000
index 0000000..afaf24b
--- /dev/null
+++ b/tools/releasetools/img_from_target_files
@@ -0,0 +1 @@
+img_from_target_files.py
\ No newline at end of file
diff --git a/tools/releasetools/img_from_target_files b/tools/releasetools/img_from_target_files.py
similarity index 66%
rename from tools/releasetools/img_from_target_files
rename to tools/releasetools/img_from_target_files.py
index d23d465..9aab41c 100755
--- a/tools/releasetools/img_from_target_files
+++ b/tools/releasetools/img_from_target_files.py
@@ -31,8 +31,8 @@
import sys
-if sys.hexversion < 0x02040000:
- print >> sys.stderr, "Python 2.4 or newer is required."
+if sys.hexversion < 0x02070000:
+ print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import errno
@@ -53,11 +53,27 @@
OPTIONS = common.OPTIONS
-def AddSystem(output_zip):
+def AddSystem(output_zip, sparse=True):
"""Turn the contents of SYSTEM into a system image and store it in
output_zip."""
+ data = BuildSystem(OPTIONS.input_tmp, OPTIONS.info_dict, sparse=sparse)
+ common.ZipWriteStr(output_zip, "system.img", data)
- print "creating system.img..."
+def BuildSystem(input_dir, info_dict, sparse=True, map_file=None):
+ return CreateImage(input_dir, info_dict, "system",
+ sparse=sparse, map_file=map_file)
+
+def AddVendor(output_zip, sparse=True):
+ data = BuildVendor(OPTIONS.input_tmp, OPTIONS.info_dict, sparse=sparse)
+ common.ZipWriteStr(output_zip, "vendor.img", data)
+
+def BuildVendor(input_dir, info_dict, sparse=True, map_file=None):
+ return CreateImage(input_dir, info_dict, "vendor",
+ sparse=sparse, map_file=map_file)
+
+
+def CreateImage(input_dir, info_dict, what, sparse=True, map_file=None):
+ print "creating " + what + ".img..."
img = tempfile.NamedTemporaryFile()
@@ -65,8 +81,8 @@
# mkyaffs2image. It wants "system" but we have a directory named
# "SYSTEM", so create a symlink.
try:
- os.symlink(os.path.join(OPTIONS.input_tmp, "SYSTEM"),
- os.path.join(OPTIONS.input_tmp, "system"))
+ os.symlink(os.path.join(input_dir, what.upper()),
+ os.path.join(input_dir, what))
except OSError, e:
# bogus error on my mac version?
# File "./build/tools/releasetools/img_from_target_files", line 86, in AddSystem
@@ -75,60 +91,62 @@
if (e.errno == errno.EEXIST):
pass
- image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
- "system")
- fstab = OPTIONS.info_dict["fstab"]
+ image_props = build_image.ImagePropFromGlobalDict(info_dict, what)
+ fstab = info_dict["fstab"]
if fstab:
- image_props["fs_type" ] = fstab["/system"].fs_type
- succ = build_image.BuildImage(os.path.join(OPTIONS.input_tmp, "system"),
- image_props, img.name)
- assert succ, "build system.img image failed"
+ image_props["fs_type" ] = fstab["/" + what].fs_type
- img.seek(os.SEEK_SET, 0)
- data = img.read()
- img.close()
+ if what == "system":
+ fs_config_prefix = ""
+ else:
+ fs_config_prefix = what + "_"
- common.CheckSize(data, "system.img", OPTIONS.info_dict)
- common.ZipWriteStr(output_zip, "system.img", data)
+ fs_config = os.path.join(
+ input_dir, "META/" + fs_config_prefix + "filesystem_config.txt")
+ if not os.path.exists(fs_config): fs_config = None
+ fc_config = os.path.join(input_dir, "BOOT/RAMDISK/file_contexts")
+ if not os.path.exists(fc_config): fc_config = None
-def AddVendor(output_zip):
- """Turn the contents of VENDOR into vendor.img and store it in
- output_zip."""
+ succ = build_image.BuildImage(os.path.join(input_dir, what),
+ image_props, img.name,
+ fs_config=fs_config,
+ fc_config=fc_config)
+ assert succ, "build " + what + ".img image failed"
- image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
- "vendor")
- # The build system has to explicitly request for vendor.img.
- if "fs_type" not in image_props:
- return
+ mapdata = None
- print "creating vendor.img..."
+ if sparse:
+ data = open(img.name).read()
+ img.close()
+ else:
+ success, name = build_image.UnsparseImage(img.name, replace=False)
+ if not success:
+ assert False, "unsparsing " + what + ".img failed"
- img = tempfile.NamedTemporaryFile()
+ if map_file:
+ mmap = tempfile.NamedTemporaryFile()
+ mimg = tempfile.NamedTemporaryFile(delete=False)
+ success = build_image.MappedUnsparseImage(
+ img.name, name, mmap.name, mimg.name)
+ if not success:
+ assert False, "creating sparse map failed"
+ os.unlink(name)
+ name = mimg.name
- # The name of the directory it is making an image out of matters to
- # mkyaffs2image. It wants "vendor" but we have a directory named
- # "VENDOR", so create a symlink or an empty directory if VENDOR does not
- # exist.
- if not os.path.exists(os.path.join(OPTIONS.input_tmp, "vendor")):
- if os.path.exists(os.path.join(OPTIONS.input_tmp, "VENDOR")):
- os.symlink(os.path.join(OPTIONS.input_tmp, "VENDOR"),
- os.path.join(OPTIONS.input_tmp, "vendor"))
- else:
- os.mkdir(os.path.join(OPTIONS.input_tmp, "vendor"))
+ with open(mmap.name) as f:
+ mapdata = f.read()
- img = tempfile.NamedTemporaryFile()
+ try:
+ with open(name) as f:
+ data = f.read()
+ finally:
+ os.unlink(name)
- fstab = OPTIONS.info_dict["fstab"]
- if fstab:
- image_props["fs_type" ] = fstab["/vendor"].fs_type
- succ = build_image.BuildImage(os.path.join(OPTIONS.input_tmp, "vendor"),
- image_props, img.name)
- assert succ, "build vendor.img image failed"
-
- common.CheckSize(img.name, "vendor.img", OPTIONS.info_dict)
- output_zip.write(img.name, "vendor.img")
- img.close()
+ if mapdata is None:
+ return data
+ else:
+ return mapdata, data
def AddUserdata(output_zip):
@@ -136,8 +154,9 @@
image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
"data")
- # If no userdata_size is provided for extfs, skip userdata.img.
- if (image_props.get("fs_type", "").startswith("ext") and
+ # We only allow yaffs to have a 0/missing partition_size.
+ # Extfs, f2fs must have a size. Skip userdata.img if no size.
+ if (not image_props.get("fs_type", "").startswith("yaffs") and
not image_props.get("partition_size")):
return
@@ -242,16 +261,27 @@
boot_image = common.GetBootableImage(
"boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
if boot_image:
- boot_image.AddToZip(output_zip)
+ boot_image.AddToZip(output_zip)
recovery_image = common.GetBootableImage(
"recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
if recovery_image:
recovery_image.AddToZip(output_zip)
+ def banner(s):
+ print "\n\n++++ " + s + " ++++\n\n"
+
if not bootable_only:
+ banner("AddSystem")
AddSystem(output_zip)
- AddVendor(output_zip)
+ try:
+ input_zip.getinfo("VENDOR/")
+ banner("AddVendor")
+ AddVendor(output_zip)
+ except KeyError:
+ pass # no vendor partition for this device
+ banner("AddUserdata")
AddUserdata(output_zip)
+ banner("AddCache")
AddCache(output_zip)
CopyInfo(output_zip)
diff --git a/tools/releasetools/make_recovery_patch b/tools/releasetools/make_recovery_patch
new file mode 100755
index 0000000..08d1450
--- /dev/null
+++ b/tools/releasetools/make_recovery_patch
@@ -0,0 +1,53 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2014 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+import sys
+
+if sys.hexversion < 0x02070000:
+ print >> sys.stderr, "Python 2.7 or newer is required."
+ sys.exit(1)
+
+import os
+import common
+
+OPTIONS = common.OPTIONS
+
+def main(argv):
+ # def option_handler(o, a):
+ # return False
+
+ args = common.ParseOptions(argv, __doc__)
+ input_dir, output_dir = args
+
+ OPTIONS.info_dict = common.LoadInfoDict(input_dir)
+
+ recovery_img = common.GetBootableImage("recovery.img", "recovery.img",
+ input_dir, "RECOVERY")
+ boot_img = common.GetBootableImage("boot.img", "boot.img",
+ input_dir, "BOOT")
+
+ if not recovery_img or not boot_img:
+ sys.exit(0)
+
+ def output_sink(fn, data):
+ with open(os.path.join(output_dir, "SYSTEM", *fn.split("/")), "wb") as f:
+ f.write(data)
+
+ common.MakeRecoveryPatch(input_dir, output_sink, recovery_img, boot_img)
+
+
+if __name__ == '__main__':
+ main(sys.argv[1:])
diff --git a/tools/releasetools/ota_from_target_files b/tools/releasetools/ota_from_target_files
index e695218..5f2354c 100755
--- a/tools/releasetools/ota_from_target_files
+++ b/tools/releasetools/ota_from_target_files
@@ -21,7 +21,7 @@
Usage: ota_from_target_files [flags] input_target_files output_ota_package
- -b (--board_config) <file>
+ --board_config <file>
Deprecated.
-k (--package_key) <key> Key to use to sign the package (default is
@@ -37,6 +37,10 @@
Generate an incremental OTA using the given target-files zip as
the starting build.
+ -o (--oem_settings) <file>
+ Use the file to specify the expected OEM-specific properties
+ on the OEM partition of the intended device.
+
-w (--wipe_user_data)
Generate an OTA package that will wipe the user data partition
when installed.
@@ -57,6 +61,16 @@
first, so that any changes made to the system partition are done
using the new recovery (new kernel, etc.).
+ --block
+ Generate a block-based OTA if possible. Will fall back to a
+ file-based OTA if the target_files is older and doesn't support
+ block-based OTAs.
+
+ -b (--binary) <file>
+ Use the given binary as the update-binary in the output package,
+ instead of the binary in the build's target_files. Use for
+ development only.
+
-t (--worker_threads) <int>
Specifies the number of worker-threads that will be used when
generating patches for incremental updates (defaults to 3).
@@ -65,8 +79,8 @@
import sys
-if sys.hexversion < 0x02040000:
- print >> sys.stderr, "Python 2.4 or newer is required."
+if sys.hexversion < 0x02070000:
+ print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import copy
@@ -84,7 +98,9 @@
from sha import sha as sha1
import common
+import img_from_target_files
import edify_generator
+import build_image
OPTIONS = common.OPTIONS
OPTIONS.package_key = None
@@ -99,6 +115,9 @@
OPTIONS.worker_threads = 3
OPTIONS.two_step = False
OPTIONS.no_signing = False
+OPTIONS.block_based = False
+OPTIONS.updater_binary = None
+OPTIONS.oem_source = None
def MostPopularKey(d, default):
"""Given a dict, return the key corresponding to the largest
@@ -144,50 +163,21 @@
return result
return None
-class Item:
- """Items represent the metadata (user, group, mode) of files and
- directories in the system image."""
- ITEMS = {}
- def __init__(self, name, dir=False):
- self.name = name
- self.uid = None
- self.gid = None
- self.mode = None
- self.selabel = None
- self.capabilities = None
- self.dir = dir
+class ItemSet:
+ def __init__(self, partition, fs_config):
+ self.partition = partition
+ self.fs_config = fs_config
+ self.ITEMS = {}
- if name:
- self.parent = Item.Get(os.path.dirname(name), dir=True)
- self.parent.children.append(self)
- else:
- self.parent = None
- if dir:
- self.children = []
+ def Get(self, name, dir=False):
+ if name not in self.ITEMS:
+ self.ITEMS[name] = Item(self, name, dir=dir)
+ return self.ITEMS[name]
- def Dump(self, indent=0):
- if self.uid is not None:
- print "%s%s %d %d %o" % (" "*indent, self.name, self.uid, self.gid, self.mode)
- else:
- print "%s%s %s %s %s" % (" "*indent, self.name, self.uid, self.gid, self.mode)
- if self.dir:
- print "%s%s" % (" "*indent, self.descendants)
- print "%s%s" % (" "*indent, self.best_subtree)
- for i in self.children:
- i.Dump(indent=indent+1)
-
- @classmethod
- def Get(cls, name, dir=False):
- if name not in cls.ITEMS:
- cls.ITEMS[name] = Item(name, dir=dir)
- return cls.ITEMS[name]
-
- @classmethod
- def GetMetadata(cls, input_zip):
-
+ def GetMetadata(self, input_zip):
# The target_files contains a record of what the uid,
# gid, and mode are supposed to be.
- output = input_zip.read("META/filesystem_config.txt")
+ output = input_zip.read(self.fs_config)
for line in output.split("\n"):
if not line: continue
@@ -205,7 +195,7 @@
if key == "capabilities":
capabilities = value
- i = cls.ITEMS.get(name, None)
+ i = self.ITEMS.get(name, None)
if i is not None:
i.uid = int(uid)
i.gid = int(gid)
@@ -216,11 +206,44 @@
i.children.sort(key=lambda i: i.name)
# set metadata for the files generated by this script.
- i = cls.ITEMS.get("system/recovery-from-boot.p", None)
+ i = self.ITEMS.get("system/recovery-from-boot.p", None)
if i: i.uid, i.gid, i.mode, i.selabel, i.capabilities = 0, 0, 0644, None, None
- i = cls.ITEMS.get("system/etc/install-recovery.sh", None)
+ i = self.ITEMS.get("system/etc/install-recovery.sh", None)
if i: i.uid, i.gid, i.mode, i.selabel, i.capabilities = 0, 0, 0544, None, None
+
+class Item:
+ """Items represent the metadata (user, group, mode) of files and
+ directories in the system image."""
+ def __init__(self, itemset, name, dir=False):
+ self.itemset = itemset
+ self.name = name
+ self.uid = None
+ self.gid = None
+ self.mode = None
+ self.selabel = None
+ self.capabilities = None
+ self.dir = dir
+
+ if name:
+ self.parent = itemset.Get(os.path.dirname(name), dir=True)
+ self.parent.children.append(self)
+ else:
+ self.parent = None
+ if dir:
+ self.children = []
+
+ def Dump(self, indent=0):
+ if self.uid is not None:
+ print "%s%s %d %d %o" % (" "*indent, self.name, self.uid, self.gid, self.mode)
+ else:
+ print "%s%s %s %s %s" % (" "*indent, self.name, self.uid, self.gid, self.mode)
+ if self.dir:
+ print "%s%s" % (" "*indent, self.descendants)
+ print "%s%s" % (" "*indent, self.best_subtree)
+ for i in self.children:
+ i.Dump(indent=indent+1)
+
def CountChildMetadata(self):
"""Count up the (uid, gid, mode, selabel, capabilities) tuples for
all children and determine the best strategy for using set_perm_recursive and
@@ -305,9 +328,8 @@
recurse(self, (-1, -1, -1, -1, None, None))
-def CopySystemFiles(input_zip, output_zip=None,
- substitute=None):
- """Copies files underneath system/ in the input zip to the output
+def CopyPartitionFiles(itemset, input_zip, output_zip=None, substitute=None):
+ """Copies files for the partition in the input zip to the output
zip. Populates the Item class with their metadata, and returns a
list of symlinks. output_zip may be None, in which case the copy is
skipped (but the other side effects still happen). substitute is an
@@ -317,15 +339,17 @@
symlinks = []
+ partition = itemset.partition
+
for info in input_zip.infolist():
- if info.filename.startswith("SYSTEM/"):
+ if info.filename.startswith(partition.upper() + "/"):
basefilename = info.filename[7:]
if IsSymlink(info):
symlinks.append((input_zip.read(info.filename),
- "/system/" + basefilename))
+ "/" + partition + "/" + basefilename))
else:
info2 = copy.copy(info)
- fn = info2.filename = "system/" + basefilename
+ fn = info2.filename = partition + "/" + basefilename
if substitute and fn in substitute and substitute[fn] is None:
continue
if output_zip is not None:
@@ -335,9 +359,9 @@
data = input_zip.read(info.filename)
output_zip.writestr(info2, data)
if fn.endswith("/"):
- Item.Get(fn[:-1], dir=True)
+ itemset.Get(fn[:-1], dir=True)
else:
- Item.Get(fn, dir=False)
+ itemset.Get(fn, dir=False)
symlinks.sort()
return symlinks
@@ -351,64 +375,48 @@
whole_file=True)
-def AppendAssertions(script, info_dict):
- device = GetBuildProp("ro.product.device", info_dict)
- script.AssertDevice(device)
-
-
-def MakeRecoveryPatch(input_tmp, output_zip, recovery_img, boot_img):
- """Generate a binary patch that creates the recovery image starting
- with the boot image. (Most of the space in these images is just the
- kernel, which is identical for the two, so the resulting patch
- should be efficient.) Add it to the output zip, along with a shell
- script that is run from init.rc on first boot to actually do the
- patching and install the new recovery image.
-
- recovery_img and boot_img should be File objects for the
- corresponding images. info should be the dictionary returned by
- common.LoadInfoDict() on the input target_files.
-
- Returns an Item for the shell script, which must be made
- executable.
- """
-
- diff_program = ["imgdiff"]
- path = os.path.join(input_tmp, "SYSTEM", "etc", "recovery-resource.dat")
- if os.path.exists(path):
- diff_program.append("-b")
- diff_program.append(path)
- bonus_args = "-b /system/etc/recovery-resource.dat"
+def AppendAssertions(script, info_dict, oem_dict = None):
+ oem_props = info_dict.get("oem_fingerprint_properties")
+ if oem_props is None:
+ device = GetBuildProp("ro.product.device", info_dict)
+ script.AssertDevice(device)
else:
- bonus_args = ""
+ if oem_dict is None:
+ raise common.ExternalError("No OEM file provided to answer expected assertions")
+ for prop in oem_props.split():
+ if oem_dict.get(prop) is None:
+ raise common.ExternalError("The OEM file is missing the property %s" % prop)
+ script.AssertOemProperty(prop, oem_dict.get(prop))
- d = common.Difference(recovery_img, boot_img, diff_program=diff_program)
- _, _, patch = d.ComputePatch()
- common.ZipWriteStr(output_zip, "recovery/recovery-from-boot.p", patch)
- Item.Get("system/recovery-from-boot.p", dir=False)
- boot_type, boot_device = common.GetTypeAndDevice("/boot", OPTIONS.info_dict)
- recovery_type, recovery_device = common.GetTypeAndDevice("/recovery", OPTIONS.info_dict)
+def HasRecoveryPatch(target_files_zip):
+ try:
+ target_files_zip.getinfo("SYSTEM/recovery-from-boot.p")
+ return True
+ except KeyError:
+ return False
- sh = """#!/system/bin/sh
-if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
- log -t recovery "Installing new recovery image"
- applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p
-else
- log -t recovery "Recovery image already installed"
-fi
-""" % { 'boot_size': boot_img.size,
- 'boot_sha1': boot_img.sha1,
- 'recovery_size': recovery_img.size,
- 'recovery_sha1': recovery_img.sha1,
- 'boot_type': boot_type,
- 'boot_device': boot_device,
- 'recovery_type': recovery_type,
- 'recovery_device': recovery_device,
- 'bonus_args': bonus_args,
- }
- common.ZipWriteStr(output_zip, "recovery/etc/install-recovery.sh", sh)
- return Item.Get("system/etc/install-recovery.sh", dir=False)
+def HasVendorPartition(target_files_zip):
+ try:
+ target_files_zip.getinfo("VENDOR/")
+ return True
+ except KeyError:
+ return False
+def GetOemProperty(name, oem_props, oem_dict, info_dict):
+ if oem_props is not None and name in oem_props:
+ return oem_dict[name]
+ return GetBuildProp(name, info_dict)
+
+
+def CalculateFingerprint(oem_props, oem_dict, info_dict):
+ if oem_props is None:
+ return GetBuildProp("ro.build.fingerprint", info_dict)
+ return "%s/%s/%s:%s" % (
+ GetOemProperty("ro.product.brand", oem_props, oem_dict, info_dict),
+ GetOemProperty("ro.product.name", oem_props, oem_dict, info_dict),
+ GetOemProperty("ro.product.device", oem_props, oem_dict, info_dict),
+ GetBuildProp("ro.build.thumbprint", info_dict))
def WriteFullOTAPackage(input_zip, output_zip):
# TODO: how to determine this? We don't know what version it will
@@ -416,9 +424,17 @@
# change very often.
script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
- metadata = {"post-build": GetBuildProp("ro.build.fingerprint",
- OPTIONS.info_dict),
- "pre-device": GetBuildProp("ro.product.device",
+ oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
+ oem_dict = None
+ if oem_props is not None:
+ if OPTIONS.oem_source is None:
+ raise common.ExternalError("OEM source required for this build")
+ script.Mount("/oem")
+ oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
+
+ metadata = {"post-build": CalculateFingerprint(
+ oem_props, oem_dict, OPTIONS.info_dict),
+ "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
OPTIONS.info_dict),
"post-timestamp": GetBuildProp("ro.build.date.utc",
OPTIONS.info_dict),
@@ -433,12 +449,15 @@
metadata=metadata,
info_dict=OPTIONS.info_dict)
+ has_recovery_patch = HasRecoveryPatch(input_zip)
+ block_based = OPTIONS.block_based and has_recovery_patch
+
if not OPTIONS.omit_prereq:
ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
script.AssertOlderBuild(ts, ts_text)
- AppendAssertions(script, OPTIONS.info_dict)
+ AppendAssertions(script, OPTIONS.info_dict, oem_dict)
device_specific.FullOTA_Assertions()
# Two-step package strategy (in chronological order, which is *not*
@@ -482,37 +501,82 @@
device_specific.FullOTA_InstallBegin()
- script.ShowProgress(0.5, 0)
+ system_progress = 0.75
if OPTIONS.wipe_user_data:
- script.FormatPartition("/data")
+ system_progress -= 0.1
+ if HasVendorPartition(input_zip):
+ system_progress -= 0.1
if "selinux_fc" in OPTIONS.info_dict:
WritePolicyConfig(OPTIONS.info_dict["selinux_fc"], output_zip)
- script.FormatPartition("/system")
- script.Mount("/system")
- script.UnpackPackageDir("recovery", "/system")
- script.UnpackPackageDir("system", "/system")
+ system_items = ItemSet("system", "META/filesystem_config.txt")
+ script.ShowProgress(system_progress, 0)
+ if block_based:
+ mapdata, data = img_from_target_files.BuildSystem(
+ OPTIONS.input_tmp, OPTIONS.info_dict,
+ sparse=False, map_file=True)
- symlinks = CopySystemFiles(input_zip, output_zip)
- script.MakeSymlinks(symlinks)
+ common.ZipWriteStr(output_zip, "system.map", mapdata)
+ common.ZipWriteStr(output_zip, "system.muimg", data)
+ script.WipeBlockDevice("/system")
+ script.WriteRawImage("/system", "system.muimg", mapfn="system.map")
+ else:
+ script.FormatPartition("/system")
+ script.Mount("/system")
+ if not has_recovery_patch:
+ script.UnpackPackageDir("recovery", "/system")
+ script.UnpackPackageDir("system", "/system")
+
+ symlinks = CopyPartitionFiles(system_items, input_zip, output_zip)
+ script.MakeSymlinks(symlinks)
boot_img = common.GetBootableImage("boot.img", "boot.img",
OPTIONS.input_tmp, "BOOT")
- MakeRecoveryPatch(OPTIONS.input_tmp, output_zip, recovery_img, boot_img)
- Item.GetMetadata(input_zip)
- Item.Get("system").SetPermissions(script)
+ if not block_based:
+ def output_sink(fn, data):
+ common.ZipWriteStr(output_zip, "recovery/" + fn, data)
+ system_items.Get("system/" + fn, dir=False)
+
+ common.MakeRecoveryPatch(OPTIONS.input_tmp, output_sink,
+ recovery_img, boot_img)
+
+ system_items.GetMetadata(input_zip)
+ system_items.Get("system").SetPermissions(script)
+
+ if HasVendorPartition(input_zip):
+ vendor_items = ItemSet("vendor", "META/vendor_filesystem_config.txt")
+ script.ShowProgress(0.1, 0)
+
+ if block_based:
+ mapdata, data = img_from_target_files.BuildVendor(
+ OPTIONS.input_tmp, OPTIONS.info_dict,
+ sparse=False, map_file=True)
+
+ common.ZipWriteStr(output_zip, "vendor.map", mapdata)
+ common.ZipWriteStr(output_zip, "vendor.muimg", data)
+ script.WipeBlockDevice("/vendor")
+ script.WriteRawImage("/vendor", "vendor.muimg", mapfn="vendor.map")
+ else:
+ script.FormatPartition("/vendor")
+ script.Mount("/vendor")
+ script.UnpackPackageDir("vendor", "/vendor")
+
+ symlinks = CopyPartitionFiles(vendor_items, input_zip, output_zip)
+ script.MakeSymlinks(symlinks)
+
+ vendor_items.GetMetadata(input_zip)
+ vendor_items.Get("vendor").SetPermissions(script)
common.CheckSize(boot_img.data, "boot.img", OPTIONS.info_dict)
common.ZipWriteStr(output_zip, "boot.img", boot_img.data)
- script.ShowProgress(0.2, 0)
- script.ShowProgress(0.2, 10)
+ script.ShowProgress(0.05, 5)
script.WriteRawImage("/boot", "boot.img")
- script.ShowProgress(0.1, 0)
+ script.ShowProgress(0.2, 10)
device_specific.FullOTA_InstallEnd()
if OPTIONS.extra_script is not None:
@@ -520,6 +584,10 @@
script.UnmountAll()
+ if OPTIONS.wipe_user_data:
+ script.ShowProgress(0.1, 10)
+ script.FormatPartition("/data")
+
if OPTIONS.two_step:
script.AppendExtra("""
set_stage("%(bcb_dev)s", "");
@@ -532,7 +600,7 @@
endif;
endif;
""" % bcb_dev)
- script.AddToZip(input_zip, output_zip)
+ script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
WriteMetadata(metadata, output_zip)
def WritePolicyConfig(file_context, output_zip):
@@ -546,14 +614,15 @@
"".join(["%s=%s\n" % kv
for kv in sorted(metadata.iteritems())]))
-def LoadSystemFiles(z):
- """Load all the files from SYSTEM/... in a given target-files
+def LoadPartitionFiles(z, partition):
+ """Load all the files from the given partition in a given target-files
ZipFile, and return a dict of {filename: File object}."""
out = {}
+ prefix = partition.upper() + "/"
for info in z.infolist():
- if info.filename.startswith("SYSTEM/") and not IsSymlink(info):
+ if info.filename.startswith(prefix) and not IsSymlink(info):
basefilename = info.filename[7:]
- fn = "system/" + basefilename
+ fn = partition + "/" + basefilename
data = z.read(info.filename)
out[fn] = common.File(fn, data)
return out
@@ -564,7 +633,7 @@
try:
return info_dict.get("build.prop", {})[prop]
except KeyError:
- raise common.ExternalError("couldn't find %s in build.prop" % (property,))
+ raise common.ExternalError("couldn't find %s in build.prop" % (prop,))
def AddToKnownPaths(filename, known_paths):
if filename[-1] == "/":
@@ -577,7 +646,46 @@
known_paths.add(path)
dirs.pop()
-def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
+class BlockDifference:
+ def __init__(self, partition, builder, output_zip):
+ with tempfile.NamedTemporaryFile() as src_file:
+ with tempfile.NamedTemporaryFile() as tgt_file:
+ print "building source " + partition + " image..."
+ src_file = tempfile.NamedTemporaryFile()
+ src_mapdata, src_data = builder(OPTIONS.source_tmp,
+ OPTIONS.source_info_dict,
+ sparse=False, map_file=True)
+
+ self.src_sha1 = sha1(src_data).hexdigest()
+ print "source " + partition + " sha1:", self.src_sha1
+ src_file.write(src_data)
+
+ print "building target " + partition + " image..."
+ tgt_file = tempfile.NamedTemporaryFile()
+ tgt_mapdata, tgt_data = builder(OPTIONS.target_tmp,
+ OPTIONS.target_info_dict,
+ sparse=False, map_file=True)
+ self.tgt_sha1 = sha1(tgt_data).hexdigest()
+ print "target " + partition + " sha1:", self.tgt_sha1
+ tgt_len = len(tgt_data)
+ tgt_file.write(tgt_data)
+
+ system_type, self.device = common.GetTypeAndDevice("/" + partition,
+ OPTIONS.info_dict)
+ self.patch = common.MakePartitionPatch(src_file, tgt_file, partition)
+
+ TestBlockPatch(src_data, src_mapdata, self.patch.data,
+ tgt_mapdata, self.tgt_sha1)
+ src_data = None
+ tgt_data = None
+
+ self.patch.AddToZip(output_zip, compression=zipfile.ZIP_STORED)
+ self.src_mapfilename = self.patch.name + ".src.map"
+ common.ZipWriteStr(output_zip, self.src_mapfilename, src_mapdata)
+ self.tgt_mapfilename = self.patch.name + ".tgt.map"
+ common.ZipWriteStr(output_zip, self.tgt_mapfilename, tgt_mapdata)
+
+def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
source_version = OPTIONS.source_info_dict["recovery_api_version"]
target_version = OPTIONS.target_info_dict["recovery_api_version"]
@@ -603,81 +711,426 @@
metadata=metadata,
info_dict=OPTIONS.info_dict)
- print "Loading target..."
- target_data = LoadSystemFiles(target_zip)
- print "Loading source..."
- source_data = LoadSystemFiles(source_zip)
-
- verbatim_targets = []
- patch_list = []
- diffs = []
- renames = {}
- known_paths = set()
- largest_source_size = 0
-
- matching_file_cache = {}
- for fn, sf in source_data.items():
- assert fn == sf.name
- matching_file_cache["path:" + fn] = sf
- if fn in target_data.keys():
- AddToKnownPaths(fn, known_paths)
- # Only allow eligibility for filename/sha matching
- # if there isn't a perfect path match.
- if target_data.get(sf.name) is None:
- matching_file_cache["file:" + fn.split("/")[-1]] = sf
- matching_file_cache["sha:" + sf.sha1] = sf
-
- for fn in sorted(target_data.keys()):
- tf = target_data[fn]
- assert fn == tf.name
- sf = ClosestFileMatch(tf, matching_file_cache, renames)
- if sf is not None and sf.name != tf.name:
- print "File has moved from " + sf.name + " to " + tf.name
- renames[sf.name] = tf
-
- if sf is None or fn in OPTIONS.require_verbatim:
- # This file should be included verbatim
- if fn in OPTIONS.prohibit_verbatim:
- raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
- print "send", fn, "verbatim"
- tf.AddToZip(output_zip)
- verbatim_targets.append((fn, tf.size))
- if fn in target_data.keys():
- AddToKnownPaths(fn, known_paths)
- elif tf.sha1 != sf.sha1:
- # File is different; consider sending as a patch
- diffs.append(common.Difference(tf, sf))
- else:
- # Target file data identical to source (may still be renamed)
- pass
-
- common.ComputeDifferences(diffs)
-
- for diff in diffs:
- tf, sf, d = diff.GetPatch()
- path = "/".join(tf.name.split("/")[:-1])
- if d is None or len(d) > tf.size * OPTIONS.patch_threshold or \
- path not in known_paths:
- # patch is almost as big as the file; don't bother patching
- # or a patch + rename cannot take place due to the target
- # directory not existing
- tf.AddToZip(output_zip)
- verbatim_targets.append((tf.name, tf.size))
- if sf.name in renames:
- del renames[sf.name]
- AddToKnownPaths(tf.name, known_paths)
- else:
- common.ZipWriteStr(output_zip, "patch/" + sf.name + ".p", d)
- patch_list.append((tf, sf, tf.size, common.sha1(d).hexdigest()))
- largest_source_size = max(largest_source_size, sf.size)
-
source_fp = GetBuildProp("ro.build.fingerprint", OPTIONS.source_info_dict)
target_fp = GetBuildProp("ro.build.fingerprint", OPTIONS.target_info_dict)
metadata["pre-build"] = source_fp
metadata["post-build"] = target_fp
+ source_boot = common.GetBootableImage(
+ "/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT",
+ OPTIONS.source_info_dict)
+ target_boot = common.GetBootableImage(
+ "/tmp/boot.img", "boot.img", OPTIONS.target_tmp, "BOOT")
+ updating_boot = (not OPTIONS.two_step and
+ (source_boot.data != target_boot.data))
+
+ source_recovery = common.GetBootableImage(
+ "/tmp/recovery.img", "recovery.img", OPTIONS.source_tmp, "RECOVERY",
+ OPTIONS.source_info_dict)
+ target_recovery = common.GetBootableImage(
+ "/tmp/recovery.img", "recovery.img", OPTIONS.target_tmp, "RECOVERY")
+ updating_recovery = (source_recovery.data != target_recovery.data)
+
+ system_diff = BlockDifference("system", img_from_target_files.BuildSystem,
+ output_zip)
+ if HasVendorPartition(target_zip):
+ if not HasVendorPartition(source_zip):
+ raise RuntimeError("can't generate incremental that adds /vendor")
+ vendor_diff = BlockDifference("vendor", img_from_target_files.BuildVendor,
+ output_zip)
+
+ oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
+ oem_dict = None
+ if oem_props is not None:
+ if OPTIONS.oem_source is None:
+ raise common.ExternalError("OEM source required for this build")
+ script.Mount("/oem")
+ oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
+
+ AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
+ device_specific.IncrementalOTA_Assertions()
+
+ # Two-step incremental package strategy (in chronological order,
+ # which is *not* the order in which the generated script has
+ # things):
+ #
+ # if stage is not "2/3" or "3/3":
+ # do verification on current system
+ # write recovery image to boot partition
+ # set stage to "2/3"
+ # reboot to boot partition and restart recovery
+ # else if stage is "2/3":
+ # write recovery image to recovery partition
+ # set stage to "3/3"
+ # reboot to recovery partition and restart recovery
+ # else:
+ # (stage must be "3/3")
+ # perform update:
+ # patch system files, etc.
+ # force full install of new boot image
+ # set up system to update recovery partition on first boot
+ # complete script normally (allow recovery to mark itself finished and reboot)
+
+ if OPTIONS.two_step:
+ if not OPTIONS.info_dict.get("multistage_support", None):
+ assert False, "two-step packages not supported by this build"
+ fs = OPTIONS.info_dict["fstab"]["/misc"]
+ assert fs.fs_type.upper() == "EMMC", \
+ "two-step packages only supported on devices with EMMC /misc partitions"
+ bcb_dev = {"bcb_dev": fs.device}
+ common.ZipWriteStr(output_zip, "recovery.img", target_recovery.data)
+ script.AppendExtra("""
+if get_stage("%(bcb_dev)s", "stage") == "2/3" then
+""" % bcb_dev)
+ script.AppendExtra("sleep(20);\n");
+ script.WriteRawImage("/recovery", "recovery.img")
+ script.AppendExtra("""
+set_stage("%(bcb_dev)s", "3/3");
+reboot_now("%(bcb_dev)s", "recovery");
+else if get_stage("%(bcb_dev)s", "stage") != "3/3" then
+""" % bcb_dev)
+
+ script.Print("Verifying current system...")
+
+ device_specific.IncrementalOTA_VerifyBegin()
+
+ if oem_props is None:
+ script.AssertSomeFingerprint(source_fp, target_fp)
+ else:
+ script.AssertSomeThumbprint(
+ GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
+ GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+
+ if updating_boot:
+ d = common.Difference(target_boot, source_boot)
+ _, _, d = d.ComputePatch()
+ print "boot target: %d source: %d diff: %d" % (
+ target_boot.size, source_boot.size, len(d))
+
+ common.ZipWriteStr(output_zip, "patch/boot.img.p", d)
+
+ boot_type, boot_device = common.GetTypeAndDevice("/boot", OPTIONS.info_dict)
+
+ script.PatchCheck("%s:%s:%d:%s:%d:%s" %
+ (boot_type, boot_device,
+ source_boot.size, source_boot.sha1,
+ target_boot.size, target_boot.sha1))
+
+ device_specific.IncrementalOTA_VerifyEnd()
+
+ if OPTIONS.two_step:
+ script.WriteRawImage("/boot", "recovery.img")
+ script.AppendExtra("""
+set_stage("%(bcb_dev)s", "2/3");
+reboot_now("%(bcb_dev)s", "");
+else
+""" % bcb_dev)
+
+ script.Comment("---- start making changes here ----")
+
+ device_specific.IncrementalOTA_InstallBegin()
+
+ if HasVendorPartition(target_zip):
+ script.Print("Patching vendor image...")
+ script.ShowProgress(0.1, 0)
+ script.Syspatch(vendor_diff.device,
+ vendor_diff.tgt_mapfilename, vendor_diff.tgt_sha1,
+ vendor_diff.src_mapfilename, vendor_diff.src_sha1,
+ vendor_diff.patch.name)
+ sys_progress = 0.8
+ else:
+ sys_progress = 0.9
+
+ script.Print("Patching system image...")
+ script.ShowProgress(sys_progress, 0)
+ script.Syspatch(system_diff.device,
+ system_diff.tgt_mapfilename, system_diff.tgt_sha1,
+ system_diff.src_mapfilename, system_diff.src_sha1,
+ system_diff.patch.name)
+
+ if OPTIONS.two_step:
+ common.ZipWriteStr(output_zip, "boot.img", target_boot.data)
+ script.WriteRawImage("/boot", "boot.img")
+ print "writing full boot image (forced by two-step mode)"
+
+ if not OPTIONS.two_step:
+ if updating_boot:
+ # Produce the boot image by applying a patch to the current
+ # contents of the boot partition, and write it back to the
+ # partition.
+ script.Print("Patching boot image...")
+ script.ShowProgress(0.1, 10)
+ script.ApplyPatch("%s:%s:%d:%s:%d:%s"
+ % (boot_type, boot_device,
+ source_boot.size, source_boot.sha1,
+ target_boot.size, target_boot.sha1),
+ "-",
+ target_boot.size, target_boot.sha1,
+ source_boot.sha1, "patch/boot.img.p")
+ print "boot image changed; including."
+ else:
+ print "boot image unchanged; skipping."
+
+ # Do device-specific installation (eg, write radio image).
+ device_specific.IncrementalOTA_InstallEnd()
+
+ if OPTIONS.extra_script is not None:
+ script.AppendExtra(OPTIONS.extra_script)
+
+ if OPTIONS.wipe_user_data:
+ script.Print("Erasing user data...")
+ script.FormatPartition("/data")
+
+ if OPTIONS.two_step:
+ script.AppendExtra("""
+set_stage("%(bcb_dev)s", "");
+endif;
+endif;
+""" % bcb_dev)
+
+ script.SetProgress(1)
+ script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
+ WriteMetadata(metadata, output_zip)
+
+def ParseMap(map_str):
+ x = map_str.split()
+ assert int(x[0]) == 4096
+ assert int(x[1]) == len(x)-2
+ return int(x[0]), [int(i) for i in x[2:]]
+
+def TestBlockPatch(src_muimg, src_map, patch_data, tgt_map, tgt_sha1):
+ src_blksize, src_regions = ParseMap(src_map)
+ tgt_blksize, tgt_regions = ParseMap(tgt_map)
+
+ with tempfile.NamedTemporaryFile() as src_file,\
+ tempfile.NamedTemporaryFile() as patch_file,\
+ tempfile.NamedTemporaryFile() as src_map_file,\
+ tempfile.NamedTemporaryFile() as tgt_map_file:
+
+ src_total = sum(src_regions) * src_blksize
+ src_file.truncate(src_total)
+ p = 0
+ for i in range(0, len(src_regions), 2):
+ c, dc = src_regions[i:i+2]
+ src_file.write(src_muimg[p:(p+c*src_blksize)])
+ p += c*src_blksize
+ src_file.seek(dc*src_blksize, 1)
+ assert src_file.tell() == src_total
+
+ patch_file.write(patch_data)
+
+ src_map_file.write(src_map)
+ tgt_map_file.write(tgt_map)
+
+ src_file.flush()
+ src_map_file.flush()
+ patch_file.flush()
+ tgt_map_file.flush()
+
+ p = common.Run(["syspatch_host", src_file.name, src_map_file.name,
+ patch_file.name, src_file.name, tgt_map_file.name],
+ stdout=subprocess.PIPE, stderr=subprocess.STDOUT)
+ stdoutdata, _ = p.communicate()
+ if p.returncode != 0:
+ print stdoutdata
+ raise ValueError("failed to reconstruct target system image from patch")
+
+ h = sha1()
+ src_file.seek(0, 0)
+ for i in range(0, len(tgt_regions), 2):
+ c, dc = tgt_regions[i:i+2]
+ h.update(src_file.read(c*tgt_blksize))
+ src_file.seek(dc*tgt_blksize, 1)
+
+ if h.hexdigest() != tgt_sha1:
+ raise ValueError("patch reconstructed incorrect target system image")
+
+ print "test of system image patch succeeded"
+
+
+class FileDifference:
+ def __init__(self, partition, source_zip, target_zip, output_zip):
+ print "Loading target..."
+ self.target_data = target_data = LoadPartitionFiles(target_zip, partition)
+ print "Loading source..."
+ self.source_data = source_data = LoadPartitionFiles(source_zip, partition)
+
+ self.verbatim_targets = verbatim_targets = []
+ self.patch_list = patch_list = []
+ diffs = []
+ self.renames = renames = {}
+ known_paths = set()
+ largest_source_size = 0
+
+ matching_file_cache = {}
+ for fn, sf in source_data.items():
+ assert fn == sf.name
+ matching_file_cache["path:" + fn] = sf
+ if fn in target_data.keys():
+ AddToKnownPaths(fn, known_paths)
+ # Only allow eligibility for filename/sha matching
+ # if there isn't a perfect path match.
+ if target_data.get(sf.name) is None:
+ matching_file_cache["file:" + fn.split("/")[-1]] = sf
+ matching_file_cache["sha:" + sf.sha1] = sf
+
+ for fn in sorted(target_data.keys()):
+ tf = target_data[fn]
+ assert fn == tf.name
+ sf = ClosestFileMatch(tf, matching_file_cache, renames)
+ if sf is not None and sf.name != tf.name:
+ print "File has moved from " + sf.name + " to " + tf.name
+ renames[sf.name] = tf
+
+ if sf is None or fn in OPTIONS.require_verbatim:
+ # This file should be included verbatim
+ if fn in OPTIONS.prohibit_verbatim:
+ raise common.ExternalError("\"%s\" must be sent verbatim" % (fn,))
+ print "send", fn, "verbatim"
+ tf.AddToZip(output_zip)
+ verbatim_targets.append((fn, tf.size))
+ if fn in target_data.keys():
+ AddToKnownPaths(fn, known_paths)
+ elif tf.sha1 != sf.sha1:
+ # File is different; consider sending as a patch
+ diffs.append(common.Difference(tf, sf))
+ else:
+ # Target file data identical to source (may still be renamed)
+ pass
+
+ common.ComputeDifferences(diffs)
+
+ for diff in diffs:
+ tf, sf, d = diff.GetPatch()
+ path = "/".join(tf.name.split("/")[:-1])
+ if d is None or len(d) > tf.size * OPTIONS.patch_threshold or \
+ path not in known_paths:
+ # patch is almost as big as the file; don't bother patching
+ # or a patch + rename cannot take place due to the target
+ # directory not existing
+ tf.AddToZip(output_zip)
+ verbatim_targets.append((tf.name, tf.size))
+ if sf.name in renames:
+ del renames[sf.name]
+ AddToKnownPaths(tf.name, known_paths)
+ else:
+ common.ZipWriteStr(output_zip, "patch/" + sf.name + ".p", d)
+ patch_list.append((tf, sf, tf.size, common.sha1(d).hexdigest()))
+ largest_source_size = max(largest_source_size, sf.size)
+
+ self.largest_source_size = largest_source_size
+
+ def EmitVerification(self, script):
+ so_far = 0
+ for tf, sf, size, patch_sha in self.patch_list:
+ if tf.name != sf.name:
+ script.SkipNextActionIfTargetExists(tf.name, tf.sha1)
+ script.PatchCheck("/"+sf.name, tf.sha1, sf.sha1)
+ so_far += sf.size
+ return so_far
+
+ def RemoveUnneededFiles(self, script, extras=()):
+ script.DeleteFiles(["/"+i[0] for i in self.verbatim_targets] +
+ ["/"+i for i in sorted(self.source_data)
+ if i not in self.target_data and
+ i not in self.renames] +
+ list(extras))
+
+ def TotalPatchSize(self):
+ return sum(i[1].size for i in self.patch_list)
+
+ def EmitPatches(self, script, total_patch_size, so_far):
+ self.deferred_patch_list = deferred_patch_list = []
+ for item in self.patch_list:
+ tf, sf, size, _ = item
+ if tf.name == "system/build.prop":
+ deferred_patch_list.append(item)
+ continue
+ if (sf.name != tf.name):
+ script.SkipNextActionIfTargetExists(tf.name, tf.sha1)
+ script.ApplyPatch("/"+sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/"+sf.name+".p")
+ so_far += tf.size
+ script.SetProgress(so_far / total_patch_size)
+ return so_far
+
+ def EmitDeferredPatches(self, script):
+ for item in self.deferred_patch_list:
+ tf, sf, size, _ = item
+ script.ApplyPatch("/"+sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/"+sf.name+".p")
+ script.SetPermissions("/system/build.prop", 0, 0, 0644, None, None)
+
+ def EmitRenames(self, script):
+ if len(self.renames) > 0:
+ script.Print("Renaming files...")
+ for src, tgt in self.renames.iteritems():
+ print "Renaming " + src + " to " + tgt.name
+ script.RenameFile(src, tgt.name)
+
+
+
+
+def WriteIncrementalOTAPackage(target_zip, source_zip, output_zip):
+ target_has_recovery_patch = HasRecoveryPatch(target_zip)
+ source_has_recovery_patch = HasRecoveryPatch(source_zip)
+
+ if (OPTIONS.block_based and
+ target_has_recovery_patch and
+ source_has_recovery_patch):
+ return WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip)
+
+ source_version = OPTIONS.source_info_dict["recovery_api_version"]
+ target_version = OPTIONS.target_info_dict["recovery_api_version"]
+
+ if source_version == 0:
+ print ("WARNING: generating edify script for a source that "
+ "can't install it.")
+ script = edify_generator.EdifyGenerator(source_version,
+ OPTIONS.target_info_dict)
+
+ oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
+ oem_dict = None
+ if oem_props is not None:
+ if OPTIONS.oem_source is None:
+ raise common.ExternalError("OEM source required for this build")
+ script.Mount("/oem")
+ oem_dict = common.LoadDictionaryFromLines(open(OPTIONS.oem_source).readlines())
+
+ metadata = {"pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+ OPTIONS.source_info_dict),
+ "post-timestamp": GetBuildProp("ro.build.date.utc",
+ OPTIONS.target_info_dict),
+ }
+
+ device_specific = common.DeviceSpecificParams(
+ source_zip=source_zip,
+ source_version=source_version,
+ target_zip=target_zip,
+ target_version=target_version,
+ output_zip=output_zip,
+ script=script,
+ metadata=metadata,
+ info_dict=OPTIONS.info_dict)
+
+ system_diff = FileDifference("system", source_zip, target_zip, output_zip)
script.Mount("/system")
- script.AssertSomeFingerprint(source_fp, target_fp)
+ if HasVendorPartition(target_zip):
+ vendor_diff = FileDifference("vendor", source_zip, target_zip, output_zip)
+ script.Mount("/vendor")
+ else:
+ vendor_diff = None
+
+ target_fp = CalculateFingerprint(oem_props, oem_dict, OPTIONS.target_info_dict)
+ source_fp = CalculateFingerprint(oem_props, oem_dict, OPTIONS.source_info_dict)
+
+ if oem_props is None:
+ script.AssertSomeFingerprint(source_fp, target_fp)
+ else:
+ script.AssertSomeThumbprint(
+ GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
+ GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
+
+ metadata["pre-build"] = source_fp
+ metadata["post-build"] = target_fp
source_boot = common.GetBootableImage(
"/tmp/boot.img", "boot.img", OPTIONS.source_tmp, "BOOT",
@@ -700,7 +1153,7 @@
# 0.1 for unpacking verbatim files, symlinking, and doing the
# device-specific commands.
- AppendAssertions(script, OPTIONS.target_info_dict)
+ AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
device_specific.IncrementalOTA_Assertions()
# Two-step incremental package strategy (in chronological order,
@@ -748,17 +1201,9 @@
device_specific.IncrementalOTA_VerifyBegin()
script.ShowProgress(0.1, 0)
- total_verify_size = float(sum([i[1].size for i in patch_list]) + 1)
- if updating_boot:
- total_verify_size += source_boot.size
- so_far = 0
-
- for tf, sf, size, patch_sha in patch_list:
- if tf.name != sf.name:
- script.SkipNextActionIfTargetExists(tf.name, tf.sha1)
- script.PatchCheck("/"+sf.name, tf.sha1, sf.sha1)
- so_far += sf.size
- script.SetProgress(so_far / total_verify_size)
+ so_far = system_diff.EmitVerification(script)
+ if vendor_diff:
+ so_far += vendor_diff.EmitVerification(script)
if updating_boot:
d = common.Difference(target_boot, source_boot)
@@ -775,10 +1220,13 @@
source_boot.size, source_boot.sha1,
target_boot.size, target_boot.sha1))
so_far += source_boot.size
- script.SetProgress(so_far / total_verify_size)
- if patch_list or updating_recovery or updating_boot:
- script.CacheFreeSpaceCheck(largest_source_size)
+ size = []
+ if system_diff.patch_list: size.append(system_diff.largest_source_size)
+ if vendor_diff:
+ if vendor_diff.patch_list: size.append(vendor_diff.largest_source_size)
+ if size or updating_recovery or updating_boot:
+ script.CacheFreeSpaceCheck(max(size))
device_specific.IncrementalOTA_VerifyEnd()
@@ -799,35 +1247,23 @@
script.WriteRawImage("/boot", "boot.img")
print "writing full boot image (forced by two-step mode)"
- if OPTIONS.wipe_user_data:
- script.Print("Erasing user data...")
- script.FormatPartition("/data")
-
script.Print("Removing unneeded files...")
- script.DeleteFiles(["/"+i[0] for i in verbatim_targets] +
- ["/"+i for i in sorted(source_data)
- if i not in target_data and
- i not in renames] +
- ["/system/recovery.img"])
+ system_diff.RemoveUnneededFiles(script, ("/system/recovery.img",))
+ if vendor_diff:
+ vendor_diff.RemoveUnneededFiles(script)
script.ShowProgress(0.8, 0)
- total_patch_size = float(sum([i[1].size for i in patch_list]) + 1)
+ total_patch_size = 1.0 + system_diff.TotalPatchSize()
+ if vendor_diff:
+ total_patch_size += vendor_diff.TotalPatchSize()
if updating_boot:
total_patch_size += target_boot.size
- so_far = 0
script.Print("Patching system files...")
- deferred_patch_list = []
- for item in patch_list:
- tf, sf, size, _ = item
- if tf.name == "system/build.prop":
- deferred_patch_list.append(item)
- continue
- if (sf.name != tf.name):
- script.SkipNextActionIfTargetExists(tf.name, tf.sha1)
- script.ApplyPatch("/"+sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/"+sf.name+".p")
- so_far += tf.size
- script.SetProgress(so_far / total_patch_size)
+ so_far = system_diff.EmitPatches(script, total_patch_size, 0)
+ if vendor_diff:
+ script.Print("Patching vendor files...")
+ so_far = vendor_diff.EmitPatches(script, total_patch_size, so_far)
if not OPTIONS.two_step:
if updating_boot:
@@ -848,6 +1284,10 @@
else:
print "boot image unchanged; skipping."
+ system_items = ItemSet("system", "META/filesystem_config.txt")
+ if vendor_diff:
+ vendor_items = ItemSet("vendor", "META/vendor_filesystem_config.txt")
+
if updating_recovery:
# Recovery is generated as a patch using both the boot image
# (which contains the same linux kernel as recovery) and the file
@@ -858,26 +1298,39 @@
# For older builds where recovery-resource.dat is not present, we
# use only the boot image as the source.
- MakeRecoveryPatch(OPTIONS.target_tmp, output_zip,
- target_recovery, target_boot)
- script.DeleteFiles(["/system/recovery-from-boot.p",
- "/system/etc/install-recovery.sh"])
+ if not target_has_recovery_patch:
+ def output_sink(fn, data):
+ common.ZipWriteStr(output_zip, "recovery/" + fn, data)
+ system_items.Get("system/" + fn, dir=False)
+
+ common.MakeRecoveryPatch(OPTIONS.target_tmp, output_sink,
+ target_recovery, target_boot)
+ script.DeleteFiles(["/system/recovery-from-boot.p",
+ "/system/etc/install-recovery.sh"])
print "recovery image changed; including as patch from boot."
else:
print "recovery image unchanged; skipping."
script.ShowProgress(0.1, 10)
- target_symlinks = CopySystemFiles(target_zip, None)
+ target_symlinks = CopyPartitionFiles(system_items, target_zip, None)
+ if vendor_diff:
+ target_symlinks.extend(CopyPartitionFiles(vendor_items, target_zip, None))
+
+ temp_script = script.MakeTemporary()
+ system_items.GetMetadata(target_zip)
+ system_items.Get("system").SetPermissions(temp_script)
+ if vendor_diff:
+ vendor_items.GetMetadata(target_zip)
+ vendor_items.Get("vendor").SetPermissions(temp_script)
+
+ # Note that this call will mess up the trees of Items, so make sure
+ # we're done with them.
+ source_symlinks = CopyPartitionFiles(system_items, source_zip, None)
+ if vendor_diff:
+ source_symlinks.extend(CopyPartitionFiles(vendor_items, source_zip, None))
target_symlinks_d = dict([(i[1], i[0]) for i in target_symlinks])
- temp_script = script.MakeTemporary()
- Item.GetMetadata(target_zip)
- Item.Get("system").SetPermissions(temp_script)
-
- # Note that this call will mess up the tree of Items, so make sure
- # we're done with it.
- source_symlinks = CopySystemFiles(source_zip, None)
source_symlinks_d = dict([(i[1], i[0]) for i in source_symlinks])
# Delete all the symlinks in source that aren't in target. This
@@ -889,20 +1342,20 @@
to_delete.append(link)
script.DeleteFiles(to_delete)
- if verbatim_targets:
- script.Print("Unpacking new files...")
+ if system_diff.verbatim_targets:
+ script.Print("Unpacking new system files...")
script.UnpackPackageDir("system", "/system")
+ if vendor_diff and vendor_diff.verbatim_targets:
+ script.Print("Unpacking new vendor files...")
+ script.UnpackPackageDir("vendor", "/vendor")
- if updating_recovery:
+ if updating_recovery and not target_has_recovery_patch:
script.Print("Unpacking new recovery...")
script.UnpackPackageDir("recovery", "/system")
- if len(renames) > 0:
- script.Print("Renaming files...")
-
- for src in renames:
- print "Renaming " + src + " to " + renames[src].name
- script.RenameFile(src, renames[src].name)
+ system_diff.EmitRenames(script)
+ if vendor_diff:
+ vendor_diff.EmitRenames(script)
script.Print("Symlinks and permissions...")
@@ -933,10 +1386,11 @@
# device can still come up, it appears to be the old build and will
# get set the OTA package again to retry.
script.Print("Patching remaining system files...")
- for item in deferred_patch_list:
- tf, sf, size, _ = item
- script.ApplyPatch("/"+sf.name, "-", tf.size, tf.sha1, sf.sha1, "patch/"+sf.name+".p")
- script.SetPermissions("/system/build.prop", 0, 0, 0644, None, None)
+ system_diff.EmitDeferredPatches(script)
+
+ if OPTIONS.wipe_user_data:
+ script.Print("Erasing user data...")
+ script.FormatPartition("/data")
if OPTIONS.two_step:
script.AppendExtra("""
@@ -945,14 +1399,14 @@
endif;
""" % bcb_dev)
- script.AddToZip(target_zip, output_zip)
+ script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
WriteMetadata(metadata, output_zip)
def main(argv):
def option_handler(o, a):
- if o in ("-b", "--board_config"):
+ if o == "--board_config":
pass # deprecated
elif o in ("-k", "--package_key"):
OPTIONS.package_key = a
@@ -962,6 +1416,8 @@
OPTIONS.wipe_user_data = True
elif o in ("-n", "--no_prereq"):
OPTIONS.omit_prereq = True
+ elif o in ("-o", "--oem_settings"):
+ OPTIONS.oem_source = a
elif o in ("-e", "--extra_script"):
OPTIONS.extra_script = a
elif o in ("-a", "--aslr_mode"):
@@ -977,14 +1433,18 @@
"integers are allowed." % (a, o))
elif o in ("-2", "--two_step"):
OPTIONS.two_step = True
- elif o in ("--no_signing"):
+ elif o == "--no_signing":
OPTIONS.no_signing = True
+ elif o == "--block":
+ OPTIONS.block_based = True
+ elif o in ("-b", "--binary"):
+ OPTIONS.updater_binary = a
else:
return False
return True
args = common.ParseOptions(argv, __doc__,
- extra_opts="b:k:i:d:wne:t:a:2",
+ extra_opts="b:k:i:d:wne:t:a:2o:",
extra_long_opts=["board_config=",
"package_key=",
"incremental_from=",
@@ -995,6 +1455,9 @@
"aslr_mode=",
"two_step",
"no_signing",
+ "block",
+ "binary=",
+ "oem_settings=",
],
extra_option_handler=option_handler)
@@ -1023,11 +1486,23 @@
print "--- target info ---"
common.DumpInfoDict(OPTIONS.info_dict)
+ # If the caller explicitly specified the device-specific extensions
+ # path via -s/--device_specific, use that. Otherwise, use
+ # META/releasetools.py if it is present in the target target_files.
+ # Otherwise, take the path of the file from 'tool_extensions' in the
+ # info dict and look for that in the local filesystem, relative to
+ # the current directory.
+
if OPTIONS.device_specific is None:
- OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+ from_input = os.path.join(OPTIONS.input_tmp, "META", "releasetools.py")
+ if os.path.exists(from_input):
+ print "(using device-specific extensions from target_files)"
+ OPTIONS.device_specific = from_input
+ else:
+ OPTIONS.device_specific = OPTIONS.info_dict.get("tool_extensions", None)
+
if OPTIONS.device_specific is not None:
- OPTIONS.device_specific = os.path.normpath(OPTIONS.device_specific)
- print "using device-specific extensions in", OPTIONS.device_specific
+ OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
if OPTIONS.no_signing:
output_zip = zipfile.ZipFile(args[1], "w", compression=zipfile.ZIP_DEFLATED)
@@ -1047,6 +1522,9 @@
OPTIONS.source_tmp, source_zip = common.UnzipTemp(OPTIONS.incremental_source)
OPTIONS.target_info_dict = OPTIONS.info_dict
OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+ if "selinux_fc" in OPTIONS.source_info_dict:
+ OPTIONS.source_info_dict["selinux_fc"] = os.path.join(OPTIONS.source_tmp, "BOOT", "RAMDISK",
+ "file_contexts")
if OPTIONS.package_key is None:
OPTIONS.package_key = OPTIONS.source_info_dict.get(
"default_system_dev_certificate",
diff --git a/tools/releasetools/sign_target_files_apks b/tools/releasetools/sign_target_files_apks
index ab24706..0ecb906 100755
--- a/tools/releasetools/sign_target_files_apks
+++ b/tools/releasetools/sign_target_files_apks
@@ -67,8 +67,8 @@
import sys
-if sys.hexversion < 0x02040000:
- print >> sys.stderr, "Python 2.4 or newer is required."
+if sys.hexversion < 0x02070000:
+ print >> sys.stderr, "Python 2.7 or newer is required."
sys.exit(1)
import base64
@@ -77,6 +77,7 @@
import errno
import os
import re
+import shutil
import subprocess
import tempfile
import zipfile
@@ -139,14 +140,41 @@
return data
-def SignApks(input_tf_zip, output_tf_zip, apk_key_map, key_passwords):
+def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
+ apk_key_map, key_passwords):
+
maxsize = max([len(os.path.basename(i.filename))
for i in input_tf_zip.infolist()
if i.filename.endswith('.apk')])
+ rebuild_recovery = False
+
+ tmpdir = tempfile.mkdtemp()
+ def write_to_temp(fn, attr, data):
+ fn = os.path.join(tmpdir, fn)
+ if fn.endswith("/"):
+ fn = os.path.join(tmpdir, fn)
+ os.mkdir(fn)
+ else:
+ d = os.path.dirname(fn)
+ if d and not os.path.exists(d):
+ os.makedirs(d)
+
+ if attr >> 16 == 0xa1ff:
+ os.symlink(data, fn)
+ else:
+ with open(fn, "wb") as f:
+ f.write(data)
for info in input_tf_zip.infolist():
data = input_tf_zip.read(info.filename)
out_info = copy.copy(info)
+
+ if (info.filename.startswith("BOOT/") or
+ info.filename.startswith("RECOVERY/") or
+ info.filename.startswith("META/") or
+ info.filename == "SYSTEM/etc/recovery-resource.dat"):
+ write_to_temp(info.filename, info.external_attr, data)
+
if info.filename.endswith(".apk"):
name = os.path.basename(info.filename)
key = apk_key_map[name]
@@ -161,16 +189,45 @@
elif info.filename in ("SYSTEM/build.prop",
"RECOVERY/RAMDISK/default.prop"):
print "rewriting %s:" % (info.filename,)
- new_data = RewriteProps(data)
+ new_data = RewriteProps(data, misc_info)
output_tf_zip.writestr(out_info, new_data)
+ if info.filename == "RECOVERY/RAMDISK/default.prop":
+ write_to_temp(info.filename, info.external_attr, new_data)
elif info.filename.endswith("mac_permissions.xml"):
print "rewriting %s with new keys." % (info.filename,)
new_data = ReplaceCerts(data)
output_tf_zip.writestr(out_info, new_data)
+ elif info.filename in ("SYSTEM/recovery-from-boot.p",
+ "SYSTEM/bin/install-recovery.sh"):
+ rebuild_recovery = True
+ elif (OPTIONS.replace_ota_keys and
+ info.filename in ("RECOVERY/RAMDISK/res/keys",
+ "SYSTEM/etc/security/otacerts.zip")):
+ # don't copy these files if we're regenerating them below
+ pass
else:
# a non-APK file; copy it verbatim
output_tf_zip.writestr(out_info, data)
+ if OPTIONS.replace_ota_keys:
+ new_recovery_keys = ReplaceOtaKeys(input_tf_zip, output_tf_zip, misc_info)
+ if new_recovery_keys:
+ write_to_temp("RECOVERY/RAMDISK/res/keys", 0755 << 16, new_recovery_keys)
+
+ if rebuild_recovery:
+ recovery_img = common.GetBootableImage(
+ "recovery.img", "recovery.img", tmpdir, "RECOVERY", info_dict=misc_info)
+ boot_img = common.GetBootableImage(
+ "boot.img", "boot.img", tmpdir, "BOOT", info_dict=misc_info)
+
+ def output_sink(fn, data):
+ output_tf_zip.writestr("SYSTEM/"+fn, data)
+
+ common.MakeRecoveryPatch(tmpdir, output_sink, recovery_img, boot_img,
+ info_dict=misc_info)
+
+ shutil.rmtree(tmpdir)
+
def ReplaceCerts(data):
"""Given a string of data, replace all occurences of a set
@@ -214,14 +271,20 @@
return ",".join(sorted(tags))
-def RewriteProps(data):
+def RewriteProps(data, misc_info):
output = []
for line in data.split("\n"):
line = line.strip()
original_line = line
- if line and line[0] != '#':
+ if line and line[0] != '#' and "=" in line:
key, value = line.split("=", 1)
- if key == "ro.build.fingerprint":
+ if (key == "ro.build.fingerprint"
+ and misc_info.get("oem_fingerprint_properties") is None):
+ pieces = value.split("/")
+ pieces[-1] = EditTags(pieces[-1])
+ value = "/".join(pieces)
+ elif (key == "ro.build.thumbprint"
+ and misc_info.get("oem_fingerprint_properties") is not None):
pieces = value.split("/")
pieces[-1] = EditTags(pieces[-1])
value = "/".join(pieces)
@@ -235,7 +298,7 @@
elif key == "ro.build.display.id":
# change, eg, "JWR66N dev-keys" to "JWR66N"
value = value.split()
- if len(value) > 1 and value[-1].endswith("-keys"):
+ if len(value) > 1 and value[-1].endswith("-keys"):
value.pop()
value = " ".join(value)
line = key + "=" + value
@@ -265,7 +328,8 @@
for k in keylist:
m = re.match(r"^(.*)\.x509\.pem$", k)
if not m:
- raise common.ExternalError("can't parse \"%s\" from META/otakeys.txt" % (k,))
+ raise common.ExternalError(
+ "can't parse \"%s\" from META/otakeys.txt" % (k,))
k = m.group(1)
mapped_keys.append(OPTIONS.key_map.get(k, k) + ".x509.pem")
@@ -287,10 +351,11 @@
os.path.join(OPTIONS.search_path, "framework", "dumpkey.jar")]
+ mapped_keys + extra_recovery_keys,
stdout=subprocess.PIPE)
- data, _ = p.communicate()
+ new_recovery_keys, _ = p.communicate()
if p.returncode != 0:
raise common.ExternalError("failed to run dumpkeys")
- common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys", data)
+ common.ZipWriteStr(output_tf_zip, "RECOVERY/RAMDISK/res/keys",
+ new_recovery_keys)
# SystemUpdateActivity uses the x509.pem version of the keys, but
# put into a zipfile system/etc/security/otacerts.zip.
@@ -304,6 +369,8 @@
common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
tempfile.getvalue())
+ return new_recovery_keys
+
def BuildKeyMap(misc_info, key_mapping_options):
for s, d in key_mapping_options:
@@ -375,10 +442,8 @@
CheckAllApksSigned(input_zip, apk_key_map)
key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
- SignApks(input_zip, output_zip, apk_key_map, key_passwords)
-
- if OPTIONS.replace_ota_keys:
- ReplaceOtaKeys(input_zip, output_zip, misc_info)
+ ProcessTargetFiles(input_zip, output_zip, misc_info,
+ apk_key_map, key_passwords)
input_zip.close()
output_zip.close()
diff --git a/tools/releasetools/simg_map.py b/tools/releasetools/simg_map.py
new file mode 100644
index 0000000..22dc863
--- /dev/null
+++ b/tools/releasetools/simg_map.py
@@ -0,0 +1,148 @@
+#! /usr/bin/env python
+
+# Copyright (C) 2012 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+from __future__ import print_function
+import getopt, posixpath, signal, struct, sys
+
+def main():
+ if len(sys.argv) == 4:
+ print("No sparse_image_file specified")
+ usage(me)
+
+ sparse_fn = sys.argv[1]
+ unsparse_fn = sys.argv[2]
+ map_file = sys.argv[3]
+ mapped_unsparse_fn = sys.argv[4]
+
+ return ComputeMap(sparse_fn, unsparse_fn, map_file, mapped_unsparse_fn)
+
+
+def ComputeMap(sparse_fn, unsparse_fn, map_file, mapped_unsparse_fn):
+ care_map = []
+
+ with open(sparse_fn, "rb") as FH:
+ header_bin = FH.read(28)
+ header = struct.unpack("<I4H4I", header_bin)
+
+ magic = header[0]
+ major_version = header[1]
+ minor_version = header[2]
+ file_hdr_sz = header[3]
+ chunk_hdr_sz = header[4]
+ blk_sz = header[5]
+ total_blks = header[6]
+ total_chunks = header[7]
+ image_checksum = header[8]
+
+ if magic != 0xED26FF3A:
+ print("%s: %s: Magic should be 0xED26FF3A but is 0x%08X"
+ % (me, path, magic))
+ return 1
+ if major_version != 1 or minor_version != 0:
+ print("%s: %s: I only know about version 1.0, but this is version %u.%u"
+ % (me, path, major_version, minor_version))
+ return 1
+ if file_hdr_sz != 28:
+ print("%s: %s: The file header size was expected to be 28, but is %u."
+ % (me, path, file_hdr_sz))
+ return 1
+ if chunk_hdr_sz != 12:
+ print("%s: %s: The chunk header size was expected to be 12, but is %u."
+ % (me, path, chunk_hdr_sz))
+ return 1
+
+ print("%s: Total of %u %u-byte output blocks in %u input chunks."
+ % (sparse_fn, total_blks, blk_sz, total_chunks))
+
+ offset = 0
+ for i in range(total_chunks):
+ header_bin = FH.read(12)
+ header = struct.unpack("<2H2I", header_bin)
+ chunk_type = header[0]
+ reserved1 = header[1]
+ chunk_sz = header[2]
+ total_sz = header[3]
+ data_sz = total_sz - 12
+
+ if chunk_type == 0xCAC1:
+ if data_sz != (chunk_sz * blk_sz):
+ print("Raw chunk input size (%u) does not match output size (%u)"
+ % (data_sz, chunk_sz * blk_sz))
+ return 1
+ else:
+ care_map.append((1, chunk_sz))
+ FH.seek(data_sz, 1)
+
+ elif chunk_type == 0xCAC2:
+ print("Fill chunks are not supported")
+ return 1
+
+ elif chunk_type == 0xCAC3:
+ if data_sz != 0:
+ print("Don't care chunk input size is non-zero (%u)" % (data_sz))
+ return 1
+ else:
+ care_map.append((0, chunk_sz))
+
+ elif chunk_type == 0xCAC4:
+ print("CRC32 chunks are not supported")
+
+ else:
+ print("Unknown chunk type 0x%04X not supported" % (chunk_type,))
+ return 1
+
+ offset += chunk_sz
+
+ if total_blks != offset:
+ print("The header said we should have %u output blocks, but we saw %u"
+ % (total_blks, offset))
+
+ junk_len = len(FH.read())
+ if junk_len:
+ print("There were %u bytes of extra data at the end of the file."
+ % (junk_len))
+ return 1
+
+ last_kind = None
+ new_care_map = []
+ for kind, size in care_map:
+ if kind != last_kind:
+ new_care_map.append((kind, size))
+ last_kind = kind
+ else:
+ new_care_map[-1] = (kind, new_care_map[-1][1] + size)
+
+ if new_care_map[0][0] == 0:
+ new_care_map.insert(0, (1, 0))
+ if len(new_care_map) % 2:
+ new_care_map.append((0, 0))
+
+ with open(map_file, "w") as fmap:
+ fmap.write("%d\n%d\n" % (blk_sz, len(new_care_map)))
+ for _, sz in new_care_map:
+ fmap.write("%d\n" % sz)
+
+ with open(unsparse_fn, "rb") as fin:
+ with open(mapped_unsparse_fn, "wb") as fout:
+ for k, sz in care_map:
+ data = fin.read(sz * blk_sz)
+ if k:
+ fout.write(data)
+ else:
+ assert data == "\x00" * len(data)
+
+if __name__ == "__main__":
+ sys.exit(main())