Merge "Add missing support library projects to pathmap" into mnc-ub-dev
am: 5b30101722

* commit '5b3010172211583034d1cf5eac41583dae79d77a':
  Add missing support library projects to pathmap

Change-Id: Ia5c43c59bd4ff26a2d1069f38205ee7a5de2181b
diff --git a/.gitignore b/.gitignore
index c9b568f..f1f4a52 100644
--- a/.gitignore
+++ b/.gitignore
@@ -1,2 +1,5 @@
 *.pyc
 *.swp
+blueprint/
+kati/
+soong/
diff --git a/CleanSpec.mk b/CleanSpec.mk
index 64d84e3..3c8d6ce 100644
--- a/CleanSpec.mk
+++ b/CleanSpec.mk
@@ -354,7 +354,7 @@
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/root/default.prop)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/recovery/root/default.prop)
 
-# Change PLATFORM_VERSION from MNC to M
+# New York, New York!
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
@@ -364,12 +364,15 @@
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
 
-# 23 is becoming more alive!!!
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+
+# Change PLATFORM_VERSION from NYC to N
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/app/*)
 $(call add-clean-step, rm -rf $(PRODUCT_OUT)/obj/APPS/*)
 
-$(call add-clean-step, rm -rf $(PRODUCT_OUT)/system/build.prop)
+# $(PRODUCT_OUT)/recovery/root/sdcard goes from symlink to folder.
+$(call add-clean-step, rm -rf $(PRODUCT_OUT)/recovery/root/sdcard)
 
 # ************************************************
 # NEWER CLEAN STEPS MUST BE AT THE END OF THE LIST
diff --git a/core/LINUX_KERNEL_COPYING b/core/LINUX_KERNEL_COPYING
new file mode 100644
index 0000000..ca442d3
--- /dev/null
+++ b/core/LINUX_KERNEL_COPYING
@@ -0,0 +1,356 @@
+
+   NOTE! This copyright does *not* cover user programs that use kernel
+ services by normal system calls - this is merely considered normal use
+ of the kernel, and does *not* fall under the heading of "derived work".
+ Also note that the GPL below is copyrighted by the Free Software
+ Foundation, but the instance of code that it refers to (the Linux
+ kernel) is copyrighted by me and others who actually wrote it.
+
+ Also note that the only valid version of the GPL as far as the kernel
+ is concerned is _this_ particular version of the license (ie v2, not
+ v2.2 or v3.x or whatever), unless explicitly otherwise stated.
+
+			Linus Torvalds
+
+----------------------------------------
+
+		    GNU GENERAL PUBLIC LICENSE
+		       Version 2, June 1991
+
+ Copyright (C) 1989, 1991 Free Software Foundation, Inc.
+                       51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+ Everyone is permitted to copy and distribute verbatim copies
+ of this license document, but changing it is not allowed.
+
+			    Preamble
+
+  The licenses for most software are designed to take away your
+freedom to share and change it.  By contrast, the GNU General Public
+License is intended to guarantee your freedom to share and change free
+software--to make sure the software is free for all its users.  This
+General Public License applies to most of the Free Software
+Foundation's software and to any other program whose authors commit to
+using it.  (Some other Free Software Foundation software is covered by
+the GNU Library General Public License instead.)  You can apply it to
+your programs, too.
+
+  When we speak of free software, we are referring to freedom, not
+price.  Our General Public Licenses are designed to make sure that you
+have the freedom to distribute copies of free software (and charge for
+this service if you wish), that you receive source code or can get it
+if you want it, that you can change the software or use pieces of it
+in new free programs; and that you know you can do these things.
+
+  To protect your rights, we need to make restrictions that forbid
+anyone to deny you these rights or to ask you to surrender the rights.
+These restrictions translate to certain responsibilities for you if you
+distribute copies of the software, or if you modify it.
+
+  For example, if you distribute copies of such a program, whether
+gratis or for a fee, you must give the recipients all the rights that
+you have.  You must make sure that they, too, receive or can get the
+source code.  And you must show them these terms so they know their
+rights.
+
+  We protect your rights with two steps: (1) copyright the software, and
+(2) offer you this license which gives you legal permission to copy,
+distribute and/or modify the software.
+
+  Also, for each author's protection and ours, we want to make certain
+that everyone understands that there is no warranty for this free
+software.  If the software is modified by someone else and passed on, we
+want its recipients to know that what they have is not the original, so
+that any problems introduced by others will not reflect on the original
+authors' reputations.
+
+  Finally, any free program is threatened constantly by software
+patents.  We wish to avoid the danger that redistributors of a free
+program will individually obtain patent licenses, in effect making the
+program proprietary.  To prevent this, we have made it clear that any
+patent must be licensed for everyone's free use or not licensed at all.
+
+  The precise terms and conditions for copying, distribution and
+modification follow.
+
+		    GNU GENERAL PUBLIC LICENSE
+   TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION
+
+  0. This License applies to any program or other work which contains
+a notice placed by the copyright holder saying it may be distributed
+under the terms of this General Public License.  The "Program", below,
+refers to any such program or work, and a "work based on the Program"
+means either the Program or any derivative work under copyright law:
+that is to say, a work containing the Program or a portion of it,
+either verbatim or with modifications and/or translated into another
+language.  (Hereinafter, translation is included without limitation in
+the term "modification".)  Each licensee is addressed as "you".
+
+Activities other than copying, distribution and modification are not
+covered by this License; they are outside its scope.  The act of
+running the Program is not restricted, and the output from the Program
+is covered only if its contents constitute a work based on the
+Program (independent of having been made by running the Program).
+Whether that is true depends on what the Program does.
+
+  1. You may copy and distribute verbatim copies of the Program's
+source code as you receive it, in any medium, provided that you
+conspicuously and appropriately publish on each copy an appropriate
+copyright notice and disclaimer of warranty; keep intact all the
+notices that refer to this License and to the absence of any warranty;
+and give any other recipients of the Program a copy of this License
+along with the Program.
+
+You may charge a fee for the physical act of transferring a copy, and
+you may at your option offer warranty protection in exchange for a fee.
+
+  2. You may modify your copy or copies of the Program or any portion
+of it, thus forming a work based on the Program, and copy and
+distribute such modifications or work under the terms of Section 1
+above, provided that you also meet all of these conditions:
+
+    a) You must cause the modified files to carry prominent notices
+    stating that you changed the files and the date of any change.
+
+    b) You must cause any work that you distribute or publish, that in
+    whole or in part contains or is derived from the Program or any
+    part thereof, to be licensed as a whole at no charge to all third
+    parties under the terms of this License.
+
+    c) If the modified program normally reads commands interactively
+    when run, you must cause it, when started running for such
+    interactive use in the most ordinary way, to print or display an
+    announcement including an appropriate copyright notice and a
+    notice that there is no warranty (or else, saying that you provide
+    a warranty) and that users may redistribute the program under
+    these conditions, and telling the user how to view a copy of this
+    License.  (Exception: if the Program itself is interactive but
+    does not normally print such an announcement, your work based on
+    the Program is not required to print an announcement.)
+
+These requirements apply to the modified work as a whole.  If
+identifiable sections of that work are not derived from the Program,
+and can be reasonably considered independent and separate works in
+themselves, then this License, and its terms, do not apply to those
+sections when you distribute them as separate works.  But when you
+distribute the same sections as part of a whole which is a work based
+on the Program, the distribution of the whole must be on the terms of
+this License, whose permissions for other licensees extend to the
+entire whole, and thus to each and every part regardless of who wrote it.
+
+Thus, it is not the intent of this section to claim rights or contest
+your rights to work written entirely by you; rather, the intent is to
+exercise the right to control the distribution of derivative or
+collective works based on the Program.
+
+In addition, mere aggregation of another work not based on the Program
+with the Program (or with a work based on the Program) on a volume of
+a storage or distribution medium does not bring the other work under
+the scope of this License.
+
+  3. You may copy and distribute the Program (or a work based on it,
+under Section 2) in object code or executable form under the terms of
+Sections 1 and 2 above provided that you also do one of the following:
+
+    a) Accompany it with the complete corresponding machine-readable
+    source code, which must be distributed under the terms of Sections
+    1 and 2 above on a medium customarily used for software interchange; or,
+
+    b) Accompany it with a written offer, valid for at least three
+    years, to give any third party, for a charge no more than your
+    cost of physically performing source distribution, a complete
+    machine-readable copy of the corresponding source code, to be
+    distributed under the terms of Sections 1 and 2 above on a medium
+    customarily used for software interchange; or,
+
+    c) Accompany it with the information you received as to the offer
+    to distribute corresponding source code.  (This alternative is
+    allowed only for noncommercial distribution and only if you
+    received the program in object code or executable form with such
+    an offer, in accord with Subsection b above.)
+
+The source code for a work means the preferred form of the work for
+making modifications to it.  For an executable work, complete source
+code means all the source code for all modules it contains, plus any
+associated interface definition files, plus the scripts used to
+control compilation and installation of the executable.  However, as a
+special exception, the source code distributed need not include
+anything that is normally distributed (in either source or binary
+form) with the major components (compiler, kernel, and so on) of the
+operating system on which the executable runs, unless that component
+itself accompanies the executable.
+
+If distribution of executable or object code is made by offering
+access to copy from a designated place, then offering equivalent
+access to copy the source code from the same place counts as
+distribution of the source code, even though third parties are not
+compelled to copy the source along with the object code.
+
+  4. You may not copy, modify, sublicense, or distribute the Program
+except as expressly provided under this License.  Any attempt
+otherwise to copy, modify, sublicense or distribute the Program is
+void, and will automatically terminate your rights under this License.
+However, parties who have received copies, or rights, from you under
+this License will not have their licenses terminated so long as such
+parties remain in full compliance.
+
+  5. You are not required to accept this License, since you have not
+signed it.  However, nothing else grants you permission to modify or
+distribute the Program or its derivative works.  These actions are
+prohibited by law if you do not accept this License.  Therefore, by
+modifying or distributing the Program (or any work based on the
+Program), you indicate your acceptance of this License to do so, and
+all its terms and conditions for copying, distributing or modifying
+the Program or works based on it.
+
+  6. Each time you redistribute the Program (or any work based on the
+Program), the recipient automatically receives a license from the
+original licensor to copy, distribute or modify the Program subject to
+these terms and conditions.  You may not impose any further
+restrictions on the recipients' exercise of the rights granted herein.
+You are not responsible for enforcing compliance by third parties to
+this License.
+
+  7. If, as a consequence of a court judgment or allegation of patent
+infringement or for any other reason (not limited to patent issues),
+conditions are imposed on you (whether by court order, agreement or
+otherwise) that contradict the conditions of this License, they do not
+excuse you from the conditions of this License.  If you cannot
+distribute so as to satisfy simultaneously your obligations under this
+License and any other pertinent obligations, then as a consequence you
+may not distribute the Program at all.  For example, if a patent
+license would not permit royalty-free redistribution of the Program by
+all those who receive copies directly or indirectly through you, then
+the only way you could satisfy both it and this License would be to
+refrain entirely from distribution of the Program.
+
+If any portion of this section is held invalid or unenforceable under
+any particular circumstance, the balance of the section is intended to
+apply and the section as a whole is intended to apply in other
+circumstances.
+
+It is not the purpose of this section to induce you to infringe any
+patents or other property right claims or to contest validity of any
+such claims; this section has the sole purpose of protecting the
+integrity of the free software distribution system, which is
+implemented by public license practices.  Many people have made
+generous contributions to the wide range of software distributed
+through that system in reliance on consistent application of that
+system; it is up to the author/donor to decide if he or she is willing
+to distribute software through any other system and a licensee cannot
+impose that choice.
+
+This section is intended to make thoroughly clear what is believed to
+be a consequence of the rest of this License.
+
+  8. If the distribution and/or use of the Program is restricted in
+certain countries either by patents or by copyrighted interfaces, the
+original copyright holder who places the Program under this License
+may add an explicit geographical distribution limitation excluding
+those countries, so that distribution is permitted only in or among
+countries not thus excluded.  In such case, this License incorporates
+the limitation as if written in the body of this License.
+
+  9. The Free Software Foundation may publish revised and/or new versions
+of the General Public License from time to time.  Such new versions will
+be similar in spirit to the present version, but may differ in detail to
+address new problems or concerns.
+
+Each version is given a distinguishing version number.  If the Program
+specifies a version number of this License which applies to it and "any
+later version", you have the option of following the terms and conditions
+either of that version or of any later version published by the Free
+Software Foundation.  If the Program does not specify a version number of
+this License, you may choose any version ever published by the Free Software
+Foundation.
+
+  10. If you wish to incorporate parts of the Program into other free
+programs whose distribution conditions are different, write to the author
+to ask for permission.  For software which is copyrighted by the Free
+Software Foundation, write to the Free Software Foundation; we sometimes
+make exceptions for this.  Our decision will be guided by the two goals
+of preserving the free status of all derivatives of our free software and
+of promoting the sharing and reuse of software generally.
+
+			    NO WARRANTY
+
+  11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY
+FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW.  EXCEPT WHEN
+OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES
+PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED
+OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.  THE ENTIRE RISK AS
+TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU.  SHOULD THE
+PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING,
+REPAIR OR CORRECTION.
+
+  12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
+WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR
+REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES,
+INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING
+OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED
+TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY
+YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER
+PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE
+POSSIBILITY OF SUCH DAMAGES.
+
+		     END OF TERMS AND CONDITIONS
+
+	    How to Apply These Terms to Your New Programs
+
+  If you develop a new program, and you want it to be of the greatest
+possible use to the public, the best way to achieve this is to make it
+free software which everyone can redistribute and change under these terms.
+
+  To do so, attach the following notices to the program.  It is safest
+to attach them to the start of each source file to most effectively
+convey the exclusion of warranty; and each file should have at least
+the "copyright" line and a pointer to where the full notice is found.
+
+    <one line to give the program's name and a brief idea of what it does.>
+    Copyright (C) <year>  <name of author>
+
+    This program is free software; you can redistribute it and/or modify
+    it under the terms of the GNU General Public License as published by
+    the Free Software Foundation; either version 2 of the License, or
+    (at your option) any later version.
+
+    This program is distributed in the hope that it will be useful,
+    but WITHOUT ANY WARRANTY; without even the implied warranty of
+    MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
+    GNU General Public License for more details.
+
+    You should have received a copy of the GNU General Public License
+    along with this program; if not, write to the Free Software
+    Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA  02110-1301  USA
+
+
+Also add information on how to contact you by electronic and paper mail.
+
+If the program is interactive, make it output a short notice like this
+when it starts in an interactive mode:
+
+    Gnomovision version 69, Copyright (C) year name of author
+    Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
+    This is free software, and you are welcome to redistribute it
+    under certain conditions; type `show c' for details.
+
+The hypothetical commands `show w' and `show c' should show the appropriate
+parts of the General Public License.  Of course, the commands you use may
+be called something other than `show w' and `show c'; they could even be
+mouse-clicks or menu items--whatever suits your program.
+
+You should also get your employer (if you work as a programmer) or your
+school, if any, to sign a "copyright disclaimer" for the program, if
+necessary.  Here is a sample; alter the names:
+
+  Yoyodyne, Inc., hereby disclaims all copyright interest in the program
+  `Gnomovision' (which makes passes at compilers) written by James Hacker.
+
+  <signature of Ty Coon>, 1 April 1989
+  Ty Coon, President of Vice
+
+This General Public License does not permit incorporating your program into
+proprietary programs.  If your program is a subroutine library, you may
+consider it more useful to permit linking proprietary applications with the
+library.  If this is what you want to do, use the GNU Library General
+Public License instead of this License.
diff --git a/core/Makefile b/core/Makefile
index 07d1c94..f377051 100644
--- a/core/Makefile
+++ b/core/Makefile
@@ -47,6 +47,20 @@
 unique_product_copy_files_destinations :=
 
 # -----------------------------------------------------------------
+# Define rules to copy headers defined in copy_headers.mk
+# If more than one makefile declared a header, print a warning,
+# then copy the last one defined. This matches the previous make
+# behavior.
+$(foreach dest,$(ALL_COPIED_HEADERS), \
+    $(eval _srcs := $(ALL_COPIED_HEADERS.$(dest).SRC)) \
+    $(eval _src := $(word $(words $(_srcs)),$(_srcs))) \
+    $(if $(call streq,$(_src),$(_srcs)),, \
+        $(warning Duplicate header copy: $(dest)) \
+	$(warning Defined in: $(ALL_COPIED_HEADERS.$(dest).MAKEFILE))) \
+    $(eval $(call copy-one-header,$(_src),$(dest))))
+all_copied_headers: $(ALL_COPIED_HEADERS)
+
+# -----------------------------------------------------------------
 # docs/index.html
 ifeq (,$(TARGET_BUILD_APPS))
 gen := $(OUT_DOCS)/index.html
@@ -80,9 +94,9 @@
 	$(hide) echo "#" >> $@; \
 	        echo "# BOOTIMAGE_BUILD_PROPERTIES" >> $@; \
 	        echo "#" >> $@;
-	$(hide) echo ro.bootimage.build.date=`date`>>$@
-	$(hide) echo ro.bootimage.build.date.utc=`date +%s`>>$@
-	$(hide) echo ro.bootimage.build.fingerprint="$(BUILD_FINGERPRINT)">>$@
+	$(hide) echo ro.bootimage.build.date=`$(DATE_FROM_FILE)`>>$@
+	$(hide) echo ro.bootimage.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
+	$(hide) echo ro.bootimage.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
 	$(hide) build/tools/post_process_props.py $@
 
 # -----------------------------------------------------------------
@@ -114,7 +128,7 @@
 BUILD_VERSION_TAGS := $(subst $(space),$(comma),$(sort $(BUILD_VERSION_TAGS)))
 
 # A human-readable string that descibes this build in detail.
-build_desc := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT) $(PLATFORM_VERSION) $(BUILD_ID) $(BUILD_NUMBER) $(BUILD_VERSION_TAGS)
+build_desc := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT) $(PLATFORM_VERSION) $(BUILD_ID) $(BUILD_NUMBER_FROM_FILE) $(BUILD_VERSION_TAGS)
 $(intermediate_system_build_prop): PRIVATE_BUILD_DESC := $(build_desc)
 
 # The string used to uniquely identify the combined build and product; used by the OTA server.
@@ -122,7 +136,7 @@
   ifneq ($(filter eng.%,$(BUILD_NUMBER)),)
     # Trim down BUILD_FINGERPRINT: the default BUILD_NUMBER makes it easily exceed
     # the Android system property length limit (PROPERTY_VALUE_MAX=92).
-    BF_BUILD_NUMBER := $(USER)$(shell date +%m%d%H%M)
+    BF_BUILD_NUMBER := $(shell echo $${USER:0:6})$(shell $(DATE) +%m%d%H%M)
   else
     BF_BUILD_NUMBER := $(BUILD_NUMBER)
   endif
@@ -132,6 +146,9 @@
   $(error BUILD_FINGERPRINT cannot contain spaces: "$(BUILD_FINGERPRINT)")
 endif
 
+$(shell mkdir -p $(PRODUCT_OUT) && echo $(BUILD_FINGERPRINT) > $(PRODUCT_OUT)/build_fingerprint.txt)
+BUILD_FINGERPRINT_FROM_FILE := $$(cat $(PRODUCT_OUT)/build_fingerprint.txt)
+
 # The string used to uniquely identify the system build; used by the OTA server.
 # This purposefully excludes any product-specific variables.
 ifeq (,$(strip $(BUILD_THUMBPRINT)))
@@ -155,7 +172,7 @@
 
   # Dev. branches should have DISPLAY_BUILD_NUMBER set
   ifeq "true" "$(DISPLAY_BUILD_NUMBER)"
-    BUILD_DISPLAY_ID := $(BUILD_ID).$(BUILD_NUMBER) $(BUILD_KEYS)
+    BUILD_DISPLAY_ID := $(BUILD_ID).$(BUILD_NUMBER_FROM_FILE) $(BUILD_KEYS)
   else
     BUILD_DISPLAY_ID := $(BUILD_ID) $(BUILD_KEYS)
   endif
@@ -174,6 +191,12 @@
 
 BUILDINFO_SH := build/tools/buildinfo.sh
 
+# TARGET_BUILD_FLAVOR and ro.build.flavor are used only by the test harness to distinguish builds.
+TARGET_BUILD_FLAVOR := $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)
+ifdef SANITIZE_TARGET
+TARGET_BUILD_FLAVOR := $(TARGET_BUILD_FLAVOR)_asan
+endif
+
 ifdef TARGET_SYSTEM_PROP
 system_prop_file := $(TARGET_SYSTEM_PROP)
 else
@@ -191,7 +214,7 @@
 		echo "import /oem/oem.prop $(prop)" >> $@;)
 endif
 	$(hide) TARGET_BUILD_TYPE="$(TARGET_BUILD_VARIANT)" \
-			TARGET_BUILD_FLAVOR="$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)" \
+			TARGET_BUILD_FLAVOR="$(TARGET_BUILD_FLAVOR)" \
 			TARGET_DEVICE="$(TARGET_DEVICE)" \
 			PRODUCT_NAME="$(TARGET_PRODUCT)" \
 			PRODUCT_BRAND="$(PRODUCT_BRAND)" \
@@ -202,7 +225,10 @@
 			PRIVATE_BUILD_DESC="$(PRIVATE_BUILD_DESC)" \
 			BUILD_ID="$(BUILD_ID)" \
 			BUILD_DISPLAY_ID="$(BUILD_DISPLAY_ID)" \
-			BUILD_NUMBER="$(BUILD_NUMBER)" \
+			DATE="$(DATE_FROM_FILE)" \
+			BUILD_NUMBER="$(BUILD_NUMBER_FROM_FILE)" \
+			BOARD_BUILD_SYSTEM_ROOT_IMAGE="$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)" \
+			AB_OTA_UPDATER="$(AB_OTA_UPDATER)" \
 			PLATFORM_VERSION="$(PLATFORM_VERSION)" \
 			PLATFORM_SECURITY_PATCH="$(PLATFORM_SECURITY_PATCH)" \
 			PLATFORM_BASE_OS="$(PLATFORM_BASE_OS)" \
@@ -212,7 +238,7 @@
 			PLATFORM_VERSION_ALL_CODENAMES="$(PLATFORM_VERSION_ALL_CODENAMES)" \
 			BUILD_VERSION_TAGS="$(BUILD_VERSION_TAGS)" \
 			TARGET_BOOTLOADER_BOARD_NAME="$(TARGET_BOOTLOADER_BOARD_NAME)" \
-			BUILD_FINGERPRINT="$(BUILD_FINGERPRINT)" \
+			BUILD_FINGERPRINT="$(BUILD_FINGERPRINT_FROM_FILE)" \
 			$(if $(OEM_THUMBPRINT_PROPERTIES),BUILD_THUMBPRINT="$(BUILD_THUMBPRINT)") \
 			TARGET_BOARD_PLATFORM="$(TARGET_BOARD_PLATFORM)" \
 			TARGET_CPU_ABI_LIST="$(TARGET_CPU_ABI_LIST)" \
@@ -266,9 +292,9 @@
 	@echo Target vendor buildinfo: $@
 	@mkdir -p $(dir $@)
 	$(hide) echo > $@
-	$(hide) echo ro.vendor.build.date=`date`>>$@
-	$(hide) echo ro.vendor.build.date.utc=`date +%s`>>$@
-	$(hide) echo ro.vendor.build.fingerprint="$(BUILD_FINGERPRINT)">>$@
+	$(hide) echo ro.vendor.build.date=`$(DATE_FROM_FILE)`>>$@
+	$(hide) echo ro.vendor.build.date.utc=`$(DATE_FROM_FILE) +%s`>>$@
+	$(hide) echo ro.vendor.build.fingerprint="$(BUILD_FINGERPRINT_FROM_FILE)">>$@
 endif
 
 # ----------------------------------------------------------------
@@ -309,7 +335,12 @@
 	@echo Package stats: $@
 	@mkdir -p $(dir $@)
 	$(hide) rm -f $@
+ifeq ($(PACKAGES_TO_STAT),)
+# Create empty package stats file if target builds no jar(s) or apk(s).
+	$(hide) touch $@
+else
 	$(hide) build/tools/dump-package-stats $^ > $@
+endif
 
 .PHONY: package-stats
 package-stats: $(PACKAGE_STATS_FILE)
@@ -355,20 +386,6 @@
 endif
 
 # -----------------------------------------------------------------
-# module info file
-ifdef CREATE_MODULE_INFO_FILE
-  MODULE_INFO_FILE := $(PRODUCT_OUT)/module-info.txt
-  $(info Generating $(MODULE_INFO_FILE)...)
-  $(shell rm -f $(MODULE_INFO_FILE))
-  $(foreach m,$(ALL_MODULES), \
-    $(shell echo "NAME=\"$(m)\"" \
-	"PATH=\"$(strip $(ALL_MODULES.$(m).PATH))\"" \
-	"TAGS=\"$(strip $(filter-out _%,$(ALL_MODULES.$(m).TAGS)))\"" \
-	"BUILT=\"$(strip $(ALL_MODULES.$(m).BUILT))\"" \
-	"INSTALLED=\"$(strip $(ALL_MODULES.$(m).INSTALLED))\"" >> $(MODULE_INFO_FILE)))
-endif
-
-# -----------------------------------------------------------------
 
 # The dev key is used to sign this package, and as the key required
 # for future OTA packages installed by this system.  Actual product
@@ -452,7 +469,6 @@
 # the ramdisk
 INTERNAL_RAMDISK_FILES := $(filter $(TARGET_ROOT_OUT)/%, \
 	$(ALL_PREBUILT) \
-	$(ALL_COPIED_HEADERS) \
 	$(ALL_GENERATED_SOURCES) \
 	$(ALL_DEFAULT_INSTALLED_MODULES))
 
@@ -481,7 +497,6 @@
 INTERNAL_BOOTIMAGE_ARGS += --ramdisk $(INSTALLED_RAMDISK_TARGET)
 endif
 
-
 INTERNAL_BOOTIMAGE_FILES := $(filter-out --%,$(INTERNAL_BOOTIMAGE_ARGS))
 
 BOARD_KERNEL_CMDLINE := $(strip $(BOARD_KERNEL_CMDLINE))
@@ -499,22 +514,35 @@
   INTERNAL_BOOTIMAGE_ARGS += --pagesize $(BOARD_KERNEL_PAGESIZE)
 endif
 
+INTERNAL_MKBOOTIMG_VERSION_ARGS := \
+    --os_version $(PLATFORM_VERSION) \
+    --os_patch_level $(PLATFORM_SECURITY_PATCH)
+
 INSTALLED_BOOTIMAGE_TARGET := $(PRODUCT_OUT)/boot.img
 
+# BOARD_USES_RECOVERY_AS_BOOT = true must have BOARD_BUILD_SYSTEM_ROOT_IMAGE = true.
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ifneq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+  $(error BOARD_BUILD_SYSTEM_ROOT_IMAGE must be enabled for BOARD_USES_RECOVERY_AS_BOOT.)
+endif
+endif
+
+# We build recovery as boot image if BOARD_USES_RECOVERY_AS_BOOT is true.
+ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
 ifeq ($(TARGET_BOOTIMAGE_USE_EXT2),true)
 $(error TARGET_BOOTIMAGE_USE_EXT2 is not supported anymore)
 else ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)) # TARGET_BOOTIMAGE_USE_EXT2 != true
 
 $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(BOOT_SIGNER)
 	$(call pretty,"Target boot image: $@")
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
+	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
 	$(BOOT_SIGNER) /boot $@ $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $@
 	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG) $(BOOT_SIGNER)
 	@echo "make $@: ignoring dependencies"
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
+	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
 	$(BOOT_SIGNER) /boot $(INSTALLED_BOOTIMAGE_TARGET) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(INSTALLED_BOOTIMAGE_TARGET)
 	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
@@ -522,14 +550,14 @@
 
 $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES) $(VBOOT_SIGNER)
 	$(call pretty,"Target boot image: $@")
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@.unsigned
+	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@.unsigned
 	$(VBOOT_SIGNER) $(FUTILITY) $@.unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $@.keyblock $@
 	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG) $(VBOOT_SIGNER)
 	@echo "make $@: ignoring dependencies"
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET).unsigned
+	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET).unsigned
 	$(VBOOT_SIGNER) $(FUTILITY) $(INSTALLED_BOOTIMAGE_TARGET).unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(INSTALLED_BOOTIMAGE_TARGET).keyblock $(INSTALLED_BOOTIMAGE_TARGET)
 	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
@@ -537,16 +565,17 @@
 
 $(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_FILES)
 	$(call pretty,"Target boot image: $@")
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
+	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $@
 	$(hide) $(call assert-max-image-size,$@,$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
 .PHONY: bootimage-nodeps
 bootimage-nodeps: $(MKBOOTIMG)
 	@echo "make $@: ignoring dependencies"
-	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
+	$(hide) $(MKBOOTIMG) $(INTERNAL_BOOTIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(INSTALLED_BOOTIMAGE_TARGET)
 	$(hide) $(call assert-max-image-size,$(INSTALLED_BOOTIMAGE_TARGET),$(BOARD_BOOTIMAGE_PARTITION_SIZE))
 
 endif # TARGET_BOOTIMAGE_USE_EXT2
+endif # BOARD_USES_RECOVERY_AS_BOOT
 
 else	# TARGET_NO_KERNEL
 # HACK: The top-level targets depend on the bootimage.  Not all targets
@@ -648,7 +677,7 @@
 # make the target NOTICE files depend on this particular file too, which will
 # then be in the right directory for the find in combine-notice-files to work.
 $(kernel_notice_file): \
-	    prebuilts/qemu-kernel/arm/LINUX_KERNEL_COPYING \
+	    $(BUILD_SYSTEM)/LINUX_KERNEL_COPYING \
 	    | $(ACP)
 	@echo Copying: $@
 	$(hide) mkdir -p $(dir $@)
@@ -663,10 +692,24 @@
 # before the rules that use that variable to build the image.
 ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/security/otacerts.zip
 $(TARGET_OUT_ETC)/security/otacerts.zip: KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
-$(TARGET_OUT_ETC)/security/otacerts.zip: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR))
+$(TARGET_OUT_ETC)/security/otacerts.zip: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR)) | $(ZIPTIME)
 	$(hide) rm -f $@
 	$(hide) mkdir -p $(dir $@)
-	$(hide) zip -qj $@ $<
+	$(hide) zip -qjX $@ $<
+	$(remove-timestamps-from-package)
+
+# Carry the public key for update_engine if it's a non-Brillo target that
+# uses the AB updater. We use the same key as otacerts but in RSA public key
+# format.
+ifeq ($(AB_OTA_UPDATER),true)
+ifeq ($(BRILLO),)
+ALL_DEFAULT_INSTALLED_MODULES += $(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem
+$(TARGET_OUT_ETC)/update_engine/update-payload-key.pub.pem: $(addsuffix .x509.pem,$(DEFAULT_KEY_CERT_PAIR))
+	$(hide) rm -f $@
+	$(hide) mkdir -p $(dir $@)
+	$(hide) openssl x509 -pubkey -noout -in $< > $@
+endif
+endif
 
 .PHONY: otacerts
 otacerts: $(TARGET_OUT_ETC)/security/otacerts.zip
@@ -711,11 +754,10 @@
 endif
 endif
 
-ifeq ($(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs)
-INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
+ifneq (true,$(TARGET_USERIMAGES_SPARSE_SQUASHFS_DISABLED))
+  INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG := -s
 endif
-
-ifeq ($(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs)
+ifneq ($(filter $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE) $(BOARD_SYSTEMIMAGE_FILE_SYSTEM_TYPE),squashfs),)
 INTERNAL_USERIMAGES_DEPS += $(MAKE_SQUASHFS) $(MKSQUASHFSUSERIMG) $(IMG2SIMG)
 endif
 
@@ -723,11 +765,16 @@
 
 ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY))
 INTERNAL_USERIMAGES_DEPS += $(BUILD_VERITY_TREE) $(APPEND2SIMG) $(VERITY_SIGNER)
+ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY_FEC))
+INTERNAL_USERIMAGES_DEPS += $(FEC)
+endif
 endif
 
-SELINUX_FC := $(TARGET_ROOT_OUT)/file_contexts
+SELINUX_FC := $(TARGET_ROOT_OUT)/file_contexts.bin
 INTERNAL_USERIMAGES_DEPS += $(SELINUX_FC)
 
+INTERNAL_USERIMAGES_DEPS += $(BLK_ALLOC_TO_BASE_FS)
+
 # $(1): the path of the output dictionary file
 # $(2): additional "key=value" pairs to append to the dictionary file.
 define generate-userimage-prop-dictionary
@@ -738,6 +785,7 @@
 $(if $(BOARD_HAS_EXT4_RESERVED_BLOCKS),$(hide) echo "has_ext4_reserved_blocks=$(BOARD_HAS_EXT4_RESERVED_BLOCKS)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "system_squashfs_compressor=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
 $(if $(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "system_squashfs_compressor_opt=$(BOARD_SYSTEMIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),$(hide) echo "system_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "userdata_fs_type=$(BOARD_USERDATAIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_USERDATAIMAGE_PARTITION_SIZE),$(hide) echo "userdata_size=$(BOARD_USERDATAIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "cache_fs_type=$(BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
@@ -745,14 +793,19 @@
 $(if $(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE),$(hide) echo "vendor_fs_type=$(BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE)" >> $(1))
 $(if $(BOARD_VENDORIMAGE_PARTITION_SIZE),$(hide) echo "vendor_size=$(BOARD_VENDORIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_VENDORIMAGE_JOURNAL_SIZE),$(hide) echo "vendor_journal_size=$(BOARD_VENDORIMAGE_JOURNAL_SIZE)" >> $(1))
+$(if $(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR),$(hide) echo "vendor_squashfs_compressor=$(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR)" >> $(1))
+$(if $(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR_OPT),$(hide) echo "vendor_squashfs_compressor_opt=$(BOARD_VENDORIMAGE_SQUASHFS_COMPRESSOR_OPT)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),$(hide) echo "vendor_base_fs_file=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH)" >> $(1))
 $(if $(BOARD_OEMIMAGE_PARTITION_SIZE),$(hide) echo "oem_size=$(BOARD_OEMIMAGE_PARTITION_SIZE)" >> $(1))
 $(if $(BOARD_OEMIMAGE_JOURNAL_SIZE),$(hide) echo "oem_journal_size=$(BOARD_OEMIMAGE_JOURNAL_SIZE)" >> $(1))
 $(if $(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG),$(hide) echo "extfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_EXT_FLAG)" >> $(1))
+$(if $(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG),$(hide) echo "squashfs_sparse_flag=$(INTERNAL_USERIMAGES_SPARSE_SQUASHFS_FLAG)" >> $(1))
 $(hide) echo "selinux_fc=$(SELINUX_FC)" >> $(1)
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER),$(hide) echo "boot_signer=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_key=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY)" >> $(1))
-$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(VERITY_SIGNER)" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY),$(hide) echo "verity_signer_cmd=$(notdir $(VERITY_SIGNER))" >> $(1))
+$(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY_FEC),$(hide) echo "verity_fec=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VERITY_FEC)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION),$(hide) echo "system_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION),$(hide) echo "vendor_verity_block_device=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_VERITY_PARTITION)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)" >> $(1))
@@ -760,6 +813,8 @@
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_subkey=$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "futility=$(FUTILITY)" >> $(1))
 $(if $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT),$(hide) echo "vboot_signer_cmd=$(VBOOT_SIGNER)" >> $(1))
+$(if $(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)),\
+    $(hide) echo "recovery_as_boot=true" >> $(1))
 $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)),\
     $(hide) echo "system_root_image=true" >> $(1);\
     echo "ramdisk_dir=$(TARGET_ROOT_OUT)" >> $(1))
@@ -769,7 +824,8 @@
 # -----------------------------------------------------------------
 # Recovery image
 
-ifdef INSTALLED_RECOVERYIMAGE_TARGET
+# Recovery image exists if we are building recovery, or building recovery as boot.
+ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
 
 INTERNAL_RECOVERYIMAGE_FILES := $(filter $(TARGET_RECOVERY_OUT)/%, \
     $(ALL_DEFAULT_INSTALLED_MODULES))
@@ -808,18 +864,37 @@
 recovery_font := $(call include-path-for, recovery)/fonts/12x22.png
 endif
 
-recovery_resources_private := $(strip $(wildcard $(TARGET_DEVICE_DIR)/recovery/res))
+ifndef TARGET_PRIVATE_RES_DIRS
+TARGET_PRIVATE_RES_DIRS := $(wildcard $(TARGET_DEVICE_DIR)/recovery/res)
+endif
 recovery_resource_deps := $(shell find $(recovery_resources_common) \
-  $(recovery_resources_private) -type f)
+  $(TARGET_PRIVATE_RES_DIRS) -type f)
 ifdef TARGET_RECOVERY_FSTAB
 recovery_fstab := $(TARGET_RECOVERY_FSTAB)
 else
 recovery_fstab := $(strip $(wildcard $(TARGET_DEVICE_DIR)/recovery.fstab))
 endif
+
+# Prior to A/B update, we used to have:
+#   boot.img + recovery-from-boot.p + recovery-resource.dat = recovery.img.
+# recovery-resource.dat is needed only if we carry a patch of the boot and
+# recovery images and invoke install-recovery.sh on the first boot post an
+# OTA update.
+#
+# We no longer need that if one of the following conditions holds:
+#   a) We carry a full copy of the recovery image
+#      (BOARD_USES_FULL_RECOVERY_IMAGE = true);
+#   b) We build a single image that contains boot and recovery both
+#      (BOARD_USES_RECOVERY_AS_BOOT = true).
+
+ifeq (,$(filter true, $(BOARD_USES_FULL_RECOVERY_IMAGE) $(BOARD_USES_RECOVERY_AS_BOOT)))
 # Named '.dat' so we don't attempt to use imgdiff for patching it.
 RECOVERY_RESOURCE_ZIP := $(TARGET_OUT)/etc/recovery-resource.dat
+else
+RECOVERY_RESOURCE_ZIP :=
+endif
 
-ifeq ($(recovery_resources_private),)
+ifeq ($(TARGET_PRIVATE_RES_DIRS),)
   $(info No private recovery resources for TARGET_DEVICE $(TARGET_DEVICE))
 endif
 
@@ -867,9 +942,9 @@
 define build-recoveryimage-target
   @echo ----- Making recovery image ------
   $(hide) mkdir -p $(TARGET_RECOVERY_OUT)
-  $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/etc $(TARGET_RECOVERY_ROOT_OUT)/tmp
+  $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/etc $(TARGET_RECOVERY_ROOT_OUT)/sdcard $(TARGET_RECOVERY_ROOT_OUT)/tmp
   @echo Copying baseline ramdisk...
-  $(hide) rsync -a $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT) # "cp -Rf" fails to overwrite broken symlinks on Mac.
+  $(hide) rsync -a --exclude=etc --exclude=sdcard $(TARGET_ROOT_OUT) $(TARGET_RECOVERY_OUT) # "cp -Rf" fails to overwrite broken symlinks on Mac.
   @echo Modifying ramdisk contents...
   $(hide) rm -f $(TARGET_RECOVERY_ROOT_OUT)/init*.rc
   $(hide) cp -f $(recovery_initrc) $(TARGET_RECOVERY_ROOT_OUT)/
@@ -880,25 +955,50 @@
   $(hide) rm -rf $(TARGET_RECOVERY_ROOT_OUT)/res/*
   $(hide) cp -rf $(recovery_resources_common)/* $(TARGET_RECOVERY_ROOT_OUT)/res
   $(hide) cp -f $(recovery_font) $(TARGET_RECOVERY_ROOT_OUT)/res/images/font.png
-  $(hide) $(foreach item,$(recovery_resources_private), \
-    cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/)
+  $(hide) $(foreach item,$(TARGET_PRIVATE_RES_DIRS), \
+    cp -rf $(item) $(TARGET_RECOVERY_ROOT_OUT)/$(newline))
   $(hide) $(foreach item,$(recovery_fstab), \
     cp -f $(item) $(TARGET_RECOVERY_ROOT_OUT)/etc/recovery.fstab)
   $(hide) cp $(RECOVERY_INSTALL_OTA_KEYS) $(TARGET_RECOVERY_ROOT_OUT)/res/keys
   $(hide) cat $(INSTALLED_DEFAULT_PROP_TARGET) $(recovery_build_prop) \
           > $(TARGET_RECOVERY_ROOT_OUT)/default.prop
+  $(if $(filter true,$(BOARD_BUILD_SYSTEM_ROOT_IMAGE)), \
+    $(hide) mkdir -p $(TARGET_RECOVERY_ROOT_OUT)/system_root; \
+            rm -rf $(TARGET_RECOVERY_ROOT_OUT)/system; \
+            ln -sf /system_root/system $(TARGET_RECOVERY_ROOT_OUT)/system) # Mount the system_root_image to /system_root and symlink /system.
   $(hide) $(MKBOOTFS) -d $(TARGET_OUT) $(TARGET_RECOVERY_ROOT_OUT) | $(MINIGZIP) > $(recovery_ramdisk)
   $(if $(filter true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)), \
-    $(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned, \
-    $(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1) --id > $(RECOVERYIMAGE_ID_FILE))
+    $(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1).unsigned, \
+    $(hide) $(MKBOOTIMG) $(INTERNAL_RECOVERYIMAGE_ARGS) $(INTERNAL_MKBOOTIMG_VERSION_ARGS) $(BOARD_MKBOOTIMG_ARGS) --output $(1) --id > $(RECOVERYIMAGE_ID_FILE))
   $(if $(filter true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER)),\
     $(BOOT_SIGNER) /recovery $(1) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).pk8 $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VERITY_SIGNING_KEY).x509.pem $(1))
   $(if $(filter true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT)), \
     $(VBOOT_SIGNER) $(FUTILITY) $(1).unsigned $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbpubk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_KEY).vbprivk $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VBOOT_SIGNING_SUBKEY).vbprivk $(1).keyblock $(1))
-  $(hide) $(call assert-max-image-size,$(1),$(BOARD_RECOVERYIMAGE_PARTITION_SIZE))
+  $(if $(filter true,BOARD_USES_RECOVERY_AS_BOOT), \
+    $(hide) $(call assert-max-image-size,$(1),$(BOARD_BOOTIMAGE_PARTITION_SIZE)), \
+    $(hide) $(call assert-max-image-size,$(1),$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)))
   @echo ----- Made recovery image: $(1) --------
 endef
 
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_BOOT_SIGNER))
+$(INSTALLED_BOOTIMAGE_TARGET) : $(BOOT_SIGNER)
+endif
+ifeq (true,$(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SUPPORTS_VBOOT))
+$(INSTALLED_BOOTIMAGE_TARGET) : $(VBOOT_SIGNER)
+endif
+$(INSTALLED_BOOTIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
+		$(INSTALLED_RAMDISK_TARGET) \
+		$(INTERNAL_RECOVERYIMAGE_FILES) \
+		$(recovery_initrc) $(recovery_sepolicy) $(recovery_kernel) \
+		$(INSTALLED_2NDBOOTLOADER_TARGET) \
+		$(recovery_build_prop) $(recovery_resource_deps) \
+		$(recovery_fstab) \
+		$(RECOVERY_INSTALL_OTA_KEYS)
+		$(call pretty,"Target boot image from recovery: $@")
+		$(call build-recoveryimage-target, $@)
+endif
+
 $(INSTALLED_RECOVERYIMAGE_TARGET): $(MKBOOTFS) $(MKBOOTIMG) $(MINIGZIP) \
 		$(INSTALLED_RAMDISK_TARGET) \
 		$(INSTALLED_BOOTIMAGE_TARGET) \
@@ -910,16 +1010,19 @@
 		$(RECOVERY_INSTALL_OTA_KEYS)
 		$(call build-recoveryimage-target, $@)
 
-$(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET)
+ifdef RECOVERY_RESOURCE_ZIP
+$(RECOVERY_RESOURCE_ZIP): $(INSTALLED_RECOVERYIMAGE_TARGET) | $(ZIPTIME)
 	$(hide) mkdir -p $(dir $@)
-	$(hide) find $(TARGET_RECOVERY_ROOT_OUT)/res -type f | sort | zip -0qrj $@ -@
+	$(hide) find $(TARGET_RECOVERY_ROOT_OUT)/res -type f | sort | zip -0qrjX $@ -@
+	$(remove-timestamps-from-package)
+endif
 
 .PHONY: recoveryimage-nodeps
 recoveryimage-nodeps:
 	@echo "make $@: ignoring dependencies"
 	$(call build-recoveryimage-target, $(INSTALLED_RECOVERYIMAGE_TARGET))
 
-else
+else # INSTALLED_RECOVERYIMAGE_TARGET not defined
 RECOVERY_RESOURCE_ZIP :=
 endif
 
@@ -944,7 +1047,6 @@
 
 INTERNAL_SYSTEMIMAGE_FILES := $(filter $(TARGET_OUT)/%, \
     $(ALL_PREBUILT) \
-    $(ALL_COPIED_HEADERS) \
     $(ALL_GENERATED_SOURCES) \
     $(ALL_DEFAULT_INSTALLED_MODULES) \
     $(PDK_FUSION_SYSIMG_FILES) \
@@ -1026,6 +1128,7 @@
 # we can see how big it's going to be, and include that in the system
 # image size check calculation.
 ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
+ifneq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
 intermediates := $(call intermediates-dir-for,PACKAGING,recovery_patch)
 RECOVERY_FROM_BOOT_PATCH := $(intermediates)/recovery_from_boot.p
 $(RECOVERY_FROM_BOOT_PATCH): $(INSTALLED_RECOVERYIMAGE_TARGET) \
@@ -1035,6 +1138,9 @@
 	@echo "Construct recovery from boot"
 	mkdir -p $(dir $@)
 	PATH=$(HOST_OUT_EXECUTABLES):$$PATH $(HOST_OUT_EXECUTABLES)/imgdiff $(INSTALLED_BOOTIMAGE_TARGET) $(INSTALLED_RECOVERYIMAGE_TARGET) $@
+else # $(BOARD_USES_FULL_RECOVERY_IMAGE) == true
+RECOVERY_FROM_BOOT_PATCH := $(INSTALLED_RECOVERYIMAGE_TARGET)
+endif
 endif
 
 
@@ -1086,32 +1192,72 @@
 .PHONY: stnod
 stnod: systemtarball-nodeps
 
-#######
+# -----------------------------------------------------------------
 ## platform.zip: system, plus other files to be used in PDK fusion build,
 ## in a zip file
 ##
 ## PDK_PLATFORM_ZIP_PRODUCT_BINARIES is used to store specified files to platform.zip.
 ## The variable will be typically set from BoardConfig.mk.
 ## Files under out dir will be rejected to prevent possible conflicts with other rules.
+pdk_odex_javalibs := $(strip $(foreach m,$(DEXPREOPT.MODULES.JAVA_LIBRARIES),\
+  $(if $(filter $(DEXPREOPT.$(m).INSTALLED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m))))
+pdk_odex_apps := $(strip $(foreach m,$(DEXPREOPT.MODULES.APPS),\
+  $(if $(filter $(DEXPREOPT.$(m).INSTALLED),$(ALL_DEFAULT_INSTALLED_MODULES)),$(m))))
+pdk_classes_dex := $(strip \
+  $(foreach m,$(pdk_odex_javalibs),$(call intermediates-dir-for,JAVA_LIBRARIES,$(m),,COMMON)/javalib.jar) \
+  $(foreach m,$(pdk_odex_apps),$(call intermediates-dir-for,APPS,$(m))/package.dex.apk))
+
+pdk_odex_config_mk := $(PRODUCT_OUT)/pdk_dexpreopt_config.mk
+$(pdk_odex_config_mk): PRIVATE_JAVA_LIBRARIES := $(pdk_odex_javalibs)
+$(pdk_odex_config_mk): PRIVATE_APPS := $(pdk_odex_apps)
+$(pdk_odex_config_mk) :
+	@echo "PDK odex config makefile: $@"
+	$(hide) mkdir -p $(dir $@)
+	$(hide) echo "# Auto-generated. Do not modify." > $@
+	$(hide) echo "PDK.DEXPREOPT.JAVA_LIBRARIES:=$(PRIVATE_JAVA_LIBRARIES)" >> $@
+	$(hide) echo "PDK.DEXPREOPT.APPS:=$(PRIVATE_APPS)" >> $@
+	$(foreach m,$(PRIVATE_JAVA_LIBRARIES),\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).SRC:=$(patsubst $(OUT_DIR)/%,%,$(call intermediates-dir-for,JAVA_LIBRARIES,$(m),,COMMON)/javalib.jar)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT:=$(DEXPREOPT.$(m).DEX_PREOPT)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).MULTILIB:=$(DEXPREOPT.$(m).MULTILIB)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT_FLAGS:=$(DEXPREOPT.$(m).DEX_PREOPT_FLAGS)" >> $@$(newline)\
+	  )
+	$(foreach m,$(PRIVATE_APPS),\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).SRC:=$(patsubst $(OUT_DIR)/%,%,$(call intermediates-dir-for,APPS,$(m))/package.dex.apk)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT:=$(DEXPREOPT.$(m).DEX_PREOPT)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).MULTILIB:=$(DEXPREOPT.$(m).MULTILIB)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).DEX_PREOPT_FLAGS:=$(DEXPREOPT.$(m).DEX_PREOPT_FLAGS)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).PRIVILEGED_MODULE:=$(DEXPREOPT.$(m).PRIVILEGED_MODULE)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).PROPRIETARY_MODULE:=$(DEXPREOPT.$(m).PROPRIETARY_MODULE)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).TARGET_ARCH:=$(DEXPREOPT.$(m).TARGET_ARCH)" >> $@$(newline)\
+	  $(hide) echo "PDK.DEXPREOPT.$(m).STRIPPED_SRC:=$(patsubst $(PRODUCT_OUT)/%,%,$(DEXPREOPT.$(m).INSTALLED_STRIPPED))" >> $@$(newline)\
+	  )
+
 PDK_PLATFORM_ZIP_PRODUCT_BINARIES := $(filter-out $(OUT_DIR)/%,$(PDK_PLATFORM_ZIP_PRODUCT_BINARIES))
 INSTALLED_PLATFORM_ZIP := $(PRODUCT_OUT)/platform.zip
-$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_SYSTEMIMAGE_FILES)
+
+$(INSTALLED_PLATFORM_ZIP): PRIVATE_DEX_FILES := $(pdk_classes_dex)
+$(INSTALLED_PLATFORM_ZIP): PRIVATE_ODEX_CONFIG := $(pdk_odex_config_mk)
+$(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_SYSTEMIMAGE_FILES) $(pdk_odex_config_mk)
 	$(call pretty,"Platform zip package: $(INSTALLED_PLATFORM_ZIP)")
 	$(hide) rm -f $@
-	$(hide) cd $(dir $@) && zip -qry $(notdir $@) \
+	$(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
 		$(TARGET_COPY_OUT_SYSTEM) \
 		$(patsubst $(PRODUCT_OUT)/%, %, $(TARGET_OUT_NOTICE_FILES)) \
 		$(addprefix symbols/,$(PDK_SYMBOL_FILES_LIST))
 ifdef BOARD_VENDORIMAGE_FILE_SYSTEM_TYPE
-	$(hide) cd $(dir $@) && zip -qry $(notdir $@) \
+	$(hide) cd $(dir $@) && zip -qryX $(notdir $@) \
 		$(TARGET_COPY_OUT_VENDOR)
 endif
 ifneq ($(PDK_PLATFORM_JAVA_ZIP_CONTENTS),)
-	$(hide) cd $(OUT_DIR) && zip -qry $(patsubst $(OUT_DIR)/%,%,$@) $(PDK_PLATFORM_JAVA_ZIP_CONTENTS)
+	$(hide) cd $(OUT_DIR) && zip -qryX $(patsubst $(OUT_DIR)/%,%,$@) $(PDK_PLATFORM_JAVA_ZIP_CONTENTS)
 endif
 ifneq ($(PDK_PLATFORM_ZIP_PRODUCT_BINARIES),)
-	$(hide) zip -qry $@ $(PDK_PLATFORM_ZIP_PRODUCT_BINARIES)
+	$(hide) zip -qryX $@ $(PDK_PLATFORM_ZIP_PRODUCT_BINARIES)
 endif
+	@# Add dex-preopt files and config.
+	$(if $(PRIVATE_DEX_FILES),$(hide) cd $(OUT_DIR) && zip -qryX $(patsubst $(OUT_DIR)/%,%,$@ $(PRIVATE_DEX_FILES)))
+	$(hide) zip -qryXj $@ $(PRIVATE_ODEX_CONFIG)
 
 .PHONY: platform
 platform: $(INSTALLED_PLATFORM_ZIP)
@@ -1124,7 +1270,7 @@
 $(call dist-for-goals, platform platform-java, $(INSTALLED_PLATFORM_ZIP))
 endif
 
-#######
+# -----------------------------------------------------------------
 ## boot tarball
 define build-boottarball-target
     $(hide) echo "Target boot fs tarball: $(INSTALLED_BOOTTARBALL_TARGET)"
@@ -1259,6 +1405,13 @@
 # platform.zip depends on $(INTERNAL_VENDORIMAGE_FILES).
 $(INSTALLED_PLATFORM_ZIP) : $(INTERNAL_VENDORIMAGE_FILES)
 
+INSTALLED_FILES_FILE_VENDOR := $(PRODUCT_OUT)/installed-files-vendor.txt
+$(INSTALLED_FILES_FILE_VENDOR) : $(INTERNAL_VENDORIMAGE_FILES)
+	@echo Installed file list: $@
+	@mkdir -p $(dir $@)
+	@rm -f $@
+	$(hide) build/tools/fileslist.py $(TARGET_OUT_VENDOR) > $@
+
 vendorimage_intermediates := \
     $(call intermediates-dir-for,PACKAGING,vendor)
 BUILT_VENDORIMAGE_TARGET := $(PRODUCT_OUT)/vendor.img
@@ -1276,7 +1429,7 @@
 
 # We just build this directly to the install location.
 INSTALLED_VENDORIMAGE_TARGET := $(BUILT_VENDORIMAGE_TARGET)
-$(INSTALLED_VENDORIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDORIMAGE_FILES)
+$(INSTALLED_VENDORIMAGE_TARGET): $(INTERNAL_USERIMAGES_DEPS) $(INTERNAL_VENDORIMAGE_FILES) $(INSTALLED_FILES_FILE_VENDOR)
 	$(build-vendorimage-target)
 
 .PHONY: vendorimage-nodeps
@@ -1294,7 +1447,35 @@
 # -----------------------------------------------------------------
 # host tools needed to build dist and OTA packages
 
-DISTTOOLS :=  $(HOST_OUT_EXECUTABLES)/minigzip \
+build_ota_package := true
+ifeq ($(TARGET_SKIP_OTA_PACKAGE),true)
+build_ota_package := false
+endif
+ifeq ($(BUILD_OS),darwin)
+build_ota_package := false
+endif
+ifneq ($(strip $(SANITIZE_TARGET)),)
+build_ota_package := false
+endif
+ifeq ($(TARGET_PRODUCT),sdk)
+build_ota_package := false
+endif
+ifneq ($(filter generic%,$(TARGET_DEVICE)),)
+build_ota_package := false
+endif
+ifeq ($(TARGET_NO_KERNEL),true)
+build_ota_package := false
+endif
+ifeq ($(recovery_fstab),)
+build_ota_package := false
+endif
+ifeq ($(TARGET_BUILD_PDK),true)
+build_ota_package := false
+endif
+
+ifeq ($(build_ota_package),true)
+OTATOOLS :=  $(HOST_OUT_EXECUTABLES)/minigzip \
+  $(HOST_OUT_EXECUTABLES)/aapt \
   $(HOST_OUT_EXECUTABLES)/mkbootfs \
   $(HOST_OUT_EXECUTABLES)/mkbootimg \
   $(HOST_OUT_EXECUTABLES)/fs_config \
@@ -1306,31 +1487,50 @@
   $(HOST_OUT_JAVA_LIBRARIES)/BootSignature.jar \
   $(HOST_OUT_EXECUTABLES)/mkuserimg.sh \
   $(HOST_OUT_EXECUTABLES)/make_ext4fs \
+  $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh \
+  $(HOST_OUT_EXECUTABLES)/mksquashfs \
+  $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh \
+  $(HOST_OUT_EXECUTABLES)/make_f2fs \
   $(HOST_OUT_EXECUTABLES)/simg2img \
   $(HOST_OUT_EXECUTABLES)/e2fsck \
   $(HOST_OUT_EXECUTABLES)/build_verity_tree \
   $(HOST_OUT_EXECUTABLES)/verity_signer \
   $(HOST_OUT_EXECUTABLES)/append2simg \
   $(HOST_OUT_EXECUTABLES)/img2simg \
-  $(HOST_OUT_EXECUTABLES)/boot_signer
+  $(HOST_OUT_EXECUTABLES)/boot_signer \
+  $(HOST_OUT_EXECUTABLES)/fec \
+  $(HOST_OUT_EXECUTABLES)/brillo_update_payload \
+  $(HOST_OUT_EXECUTABLES)/lib/shflags/shflags \
+  $(HOST_OUT_EXECUTABLES)/delta_generator \
+  $(BLK_ALLOC_TO_BASE_FS)
 
 # Shared libraries.
-DISTTOOLS += \
+OTATOOLS += \
   $(HOST_LIBRARY_PATH)/libc++$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/liblog$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libcutils$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libselinux$(HOST_SHLIB_SUFFIX) \
   $(HOST_LIBRARY_PATH)/libcrypto-host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2fs_host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_blkid_host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_com_err_host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_e2p_host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_profile_host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_quota_host$(HOST_SHLIB_SUFFIX) \
-  $(HOST_LIBRARY_PATH)/libext2_uuid_host$(HOST_SHLIB_SUFFIX)
-
-OTATOOLS := $(DISTTOOLS) \
-  $(HOST_OUT_EXECUTABLES)/aapt
+  $(HOST_LIBRARY_PATH)/libdivsufsort$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libdivsufsort64$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libext2fs-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libext2_blkid-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libext2_com_err-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libext2_e2p-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libext2_profile-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libext2_quota-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libext2_uuid-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libconscrypt_openjdk_jni$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libbrillo$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libbrillo-stream$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libbrillo-http$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libchrome$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libcurl-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libevent-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libprotobuf-cpp-lite$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libssl-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libz-host$(HOST_SHLIB_SUFFIX) \
+  $(HOST_LIBRARY_PATH)/libbase$(HOST_SHLIB_SUFFIX)
 
 .PHONY: otatools
 otatools: $(OTATOOLS)
@@ -1341,17 +1541,20 @@
 $(BUILT_OTATOOLS_PACKAGE): $(OTATOOLS) | $(ACP)
 	@echo "Package OTA tools: $@"
 	$(hide) rm -rf $@ $(zip_root)
-	$(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools
+	$(hide) mkdir -p $(dir $@) $(zip_root)/bin $(zip_root)/framework $(zip_root)/releasetools $(zip_root)/system/extras/verity
 	$(call copy-files-with-structure,$(OTATOOLS),$(HOST_OUT)/,$(zip_root))
+	$(hide) $(ACP) $(HOST_OUT_JAVA_LIBRARIES)/VeritySigner.jar $(zip_root)/framework/
+	$(hide) $(ACP) -p system/extras/verity/build_verity_metadata.py $(zip_root)/system/extras/verity/
 	$(hide) $(ACP) -r -d -p build/tools/releasetools/* $(zip_root)/releasetools
 	$(hide) rm -rf $@ $(zip_root)/releasetools/*.pyc
-	$(hide) (cd $(zip_root) && zip -qry $(abspath $@) *)
-	$(hide) zip -qry $(abspath $@) build/target/product/security/
-	$(hide) find device vendor -name \*.pk8 -o -name \*.x509.pem -o -name oem.prop | xargs zip -qry $(abspath $@)>/dev/null || true
+	$(hide) (cd $(zip_root) && zip -qryX $(abspath $@) *)
+	$(hide) zip -qryX $(abspath $@) build/target/product/security/
+	$(hide) find device vendor -name \*.pk8 -o -name verifiedboot\* -o -name \*.x509.pem -o -name oem.prop | xargs zip -qryX $(abspath $@)>/dev/null || true
 
 .PHONY: otatools-package
 otatools-package: $(BUILT_OTATOOLS_PACKAGE)
 
+endif # build_ota_package
 
 # -----------------------------------------------------------------
 # A zip of the directories that map to the target filesystem.
@@ -1380,12 +1583,14 @@
   fi
 endef
 
-built_ota_tools := \
-	$(call intermediates-dir-for,EXECUTABLES,applypatch,,,$(TARGET_PREFER_32_BIT))/applypatch \
-	$(call intermediates-dir-for,EXECUTABLES,applypatch_static,,,$(TARGET_PREFER_32_BIT))/applypatch_static \
-	$(call intermediates-dir-for,EXECUTABLES,check_prereq,,,$(TARGET_PREFER_32_BIT))/check_prereq \
-	$(call intermediates-dir-for,EXECUTABLES,sqlite3,,,$(TARGET_PREFER_32_BIT))/sqlite3 \
-	$(call intermediates-dir-for,EXECUTABLES,updater,,,$(TARGET_PREFER_32_BIT))/updater
+built_ota_tools :=
+
+# We can't build static executables when SANITIZE_TARGET=address
+ifeq ($(strip $(SANITIZE_TARGET)),)
+built_ota_tools += \
+    $(call intermediates-dir-for,EXECUTABLES,updater,,,$(TARGET_PREFER_32_BIT))/updater
+endif
+
 $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_OTA_TOOLS := $(built_ota_tools)
 
 $(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_API_VERSION := $(RECOVERY_API_VERSION)
@@ -1398,6 +1603,18 @@
 $(BUILT_TARGET_FILES_PACKAGE): tool_extensions := $(TARGET_RELEASETOOLS_EXTENSIONS)
 endif
 
+# Build OTA tools if not using the AB Updater.
+ifneq ($(AB_OTA_UPDATER),true)
+$(BUILT_TARGET_FILES_PACKAGE): $(built_ota_tools)
+endif
+
+# If we are using recovery as boot, output recovery files to BOOT/.
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
+$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := BOOT
+else
+$(BUILT_TARGET_FILES_PACKAGE): PRIVATE_RECOVERY_OUT := RECOVERY
+endif
+
 # Depending on the various images guarantees that the underlying
 # directories are up-to-date.
 $(BUILT_TARGET_FILES_PACKAGE): \
@@ -1410,37 +1627,46 @@
 		$(INSTALLED_VENDORIMAGE_TARGET) \
 		$(INSTALLED_ANDROID_INFO_TXT_TARGET) \
 		$(SELINUX_FC) \
-		$(built_ota_tools) \
 		$(APKCERTS_FILE) \
 		$(HOST_OUT_EXECUTABLES)/fs_config \
 		| $(ACP)
 	@echo "Package target files: $@"
 	$(hide) rm -rf $@ $(zip_root)
 	$(hide) mkdir -p $(dir $@) $(zip_root)
+ifneq (,$(INSTALLED_RECOVERYIMAGE_TARGET)$(filter true,$(BOARD_USES_RECOVERY_AS_BOOT)))
 	@# Components of the recovery image
-	$(hide) mkdir -p $(zip_root)/RECOVERY
+	$(hide) mkdir -p $(zip_root)/$(PRIVATE_RECOVERY_OUT)
 	$(hide) $(call package_files-copy-root, \
-		$(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/RECOVERY/RAMDISK)
+		$(TARGET_RECOVERY_ROOT_OUT),$(zip_root)/$(PRIVATE_RECOVERY_OUT)/RAMDISK)
 ifdef INSTALLED_KERNEL_TARGET
-	$(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/RECOVERY/kernel
+	$(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/kernel
 endif
 ifdef INSTALLED_2NDBOOTLOADER_TARGET
 	$(hide) $(ACP) \
-		$(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/RECOVERY/second
+		$(INSTALLED_2NDBOOTLOADER_TARGET) $(zip_root)/$(PRIVATE_RECOVERY_OUT)/second
 endif
 ifdef BOARD_KERNEL_CMDLINE
-	$(hide) echo "$(BOARD_KERNEL_CMDLINE)" > $(zip_root)/RECOVERY/cmdline
+	$(hide) echo "$(BOARD_KERNEL_CMDLINE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/cmdline
 endif
 ifdef BOARD_KERNEL_BASE
-	$(hide) echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/RECOVERY/base
+	$(hide) echo "$(BOARD_KERNEL_BASE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/base
 endif
 ifdef BOARD_KERNEL_PAGESIZE
-	$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/RECOVERY/pagesize
+	$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/$(PRIVATE_RECOVERY_OUT)/pagesize
 endif
+endif # INSTALLED_RECOVERYIMAGE_TARGET defined or BOARD_USES_RECOVERY_AS_BOOT is true
 	@# Components of the boot image
 	$(hide) mkdir -p $(zip_root)/BOOT
+ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+	$(hide) mkdir -p $(zip_root)/ROOT
+	$(hide) $(call package_files-copy-root, \
+		$(TARGET_ROOT_OUT),$(zip_root)/ROOT)
+else
 	$(hide) $(call package_files-copy-root, \
 		$(TARGET_ROOT_OUT),$(zip_root)/BOOT/RAMDISK)
+endif
+	@# If we are using recovery as boot, this is already done when processing recovery.
+ifneq ($(BOARD_USES_RECOVERY_AS_BOOT),true)
 ifdef INSTALLED_KERNEL_TARGET
 	$(hide) $(ACP) $(INSTALLED_KERNEL_TARGET) $(zip_root)/BOOT/kernel
 endif
@@ -1457,6 +1683,7 @@
 ifdef BOARD_KERNEL_PAGESIZE
 	$(hide) echo "$(BOARD_KERNEL_PAGESIZE)" > $(zip_root)/BOOT/pagesize
 endif
+endif # BOARD_USES_RECOVERY_AS_BOOT
 	$(hide) $(foreach t,$(INSTALLED_RADIOIMAGE_TARGET),\
 	            mkdir -p $(zip_root)/RADIO; \
 	            $(ACP) $(t) $(zip_root)/RADIO/$(notdir $(t));)
@@ -1472,15 +1699,21 @@
 		$(TARGET_OUT_VENDOR),$(zip_root)/VENDOR)
 endif
 	@# Extra contents of the OTA package
-	$(hide) mkdir -p $(zip_root)/OTA/bin
+	$(hide) mkdir -p $(zip_root)/OTA
 	$(hide) $(ACP) $(INSTALLED_ANDROID_INFO_TXT_TARGET) $(zip_root)/OTA/
+ifneq ($(AB_OTA_UPDATER),true)
+ifneq ($(built_ota_tools),)
+	$(hide) mkdir -p $(zip_root)/OTA/bin
 	$(hide) $(ACP) $(PRIVATE_OTA_TOOLS) $(zip_root)/OTA/bin/
+endif
+endif
 	@# Files that do not end up in any images, but are necessary to
 	@# build them.
 	$(hide) mkdir -p $(zip_root)/META
 	$(hide) $(ACP) $(APKCERTS_FILE) $(zip_root)/META/apkcerts.txt
 	$(hide) if test -e $(tool_extensions)/releasetools.py; then $(ACP) $(tool_extensions)/releasetools.py $(zip_root)/META/; fi
 	$(hide) echo "$(PRODUCT_OTA_PUBLIC_KEYS)" > $(zip_root)/META/otakeys.txt
+	$(hide) $(ACP) $(SELINUX_FC) $(zip_root)/META/file_contexts.bin
 	$(hide) echo "recovery_api_version=$(PRIVATE_RECOVERY_API_VERSION)" > $(zip_root)/META/misc_info.txt
 	$(hide) echo "fstab_version=$(PRIVATE_RECOVERY_FSTAB_VERSION)" >> $(zip_root)/META/misc_info.txt
 ifdef BOARD_FLASH_BLOCK_SIZE
@@ -1489,6 +1722,12 @@
 ifdef BOARD_BOOTIMAGE_PARTITION_SIZE
 	$(hide) echo "boot_size=$(BOARD_BOOTIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
 endif
+ifeq ($(BOARD_USES_RECOVERY_AS_BOOT),)
+	$(hide) echo "recovery_as_boot=$(BOARD_USES_RECOVERY_AS_BOOT)" >> $(zip_root)/META/misc_info.txt
+endif
+ifeq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
+	$(hide) echo "no_recovery=true" >> $(zip_root)/META/misc_info.txt
+endif
 ifdef BOARD_RECOVERYIMAGE_PARTITION_SIZE
 	$(hide) echo "recovery_size=$(BOARD_RECOVERYIMAGE_PARTITION_SIZE)" >> $(zip_root)/META/misc_info.txt
 endif
@@ -1507,28 +1746,75 @@
 	$(hide) echo "extra_recovery_keys=$(PRODUCT_EXTRA_RECOVERY_KEYS)" >> $(zip_root)/META/misc_info.txt
 endif
 	$(hide) echo 'mkbootimg_args=$(BOARD_MKBOOTIMG_ARGS)' >> $(zip_root)/META/misc_info.txt
+	$(hide) echo 'mkbootimg_version_args=$(INTERNAL_MKBOOTIMG_VERSION_ARGS)' >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "use_set_metadata=1" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "multistage_support=1" >> $(zip_root)/META/misc_info.txt
 	$(hide) echo "update_rename_support=1" >> $(zip_root)/META/misc_info.txt
-	$(hide) echo "blockimgdiff_versions=1,2,3" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "blockimgdiff_versions=1,2,3,4" >> $(zip_root)/META/misc_info.txt
 ifneq ($(OEM_THUMBPRINT_PROPERTIES),)
 	# OTA scripts are only interested in fingerprint related properties
 	$(hide) echo "oem_fingerprint_properties=$(OEM_THUMBPRINT_PROPERTIES)" >> $(zip_root)/META/misc_info.txt
 endif
+ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH),)
+	$(hide) $(ACP) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH) \
+	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_SYSTEM_BASE_FS_PATH))
+endif
+ifneq ($(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH),)
+	$(hide) $(ACP) $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH) \
+	  $(zip_root)/META/$(notdir $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_VENDOR_BASE_FS_PATH))
+endif
+ifneq ($(strip $(SANITIZE_TARGET)),)
+	# We need to create userdata.img with real data because the instrumented libraries are in userdata.img.
+	$(hide) echo "userdata_img_with_data=true" >> $(zip_root)/META/misc_info.txt
+endif
+ifeq ($(BOARD_USES_FULL_RECOVERY_IMAGE),true)
+	$(hide) echo "full_recovery_image=true" >> $(zip_root)/META/misc_info.txt
+endif
 	$(call generate-userimage-prop-dictionary, $(zip_root)/META/misc_info.txt)
+ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	    ./build/tools/releasetools/make_recovery_patch $(zip_root) $(zip_root)
-	@# Zip everything up, preserving symlinks
-	$(hide) (cd $(zip_root) && zip -qry ../$(notdir $@) .)
+endif
+ifeq ($(AB_OTA_UPDATER),true)
+	@# When using the A/B updater, include the updater config files in the zip.
+	$(hide) $(ACP) $(TOPDIR)system/update_engine/update_engine.conf $(zip_root)/META/update_engine_config.txt
+	$(hide) for part in $(AB_OTA_PARTITIONS); do \
+	  echo "$${part}" >> $(zip_root)/META/ab_partitions.txt; \
+	done
+	$(hide) for conf in $(AB_OTA_POSTINSTALL_CONFIG); do \
+	  echo "$${conf}" >> $(zip_root)/META/postinstall_config.txt; \
+	done
+	@# Include the build type in META/misc_info.txt so the server can easily differentiate production builds.
+	$(hide) echo "build_type=$(TARGET_BUILD_VARIANT)" >> $(zip_root)/META/misc_info.txt
+	$(hide) echo "ab_update=true" >> $(zip_root)/META/misc_info.txt
+ifdef OSRELEASED_DIRECTORY
+	$(hide) $(ACP) $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/product_id $(zip_root)/META/product_id.txt
+	$(hide) $(ACP) $(TARGET_OUT_ETC)/$(OSRELEASED_DIRECTORY)/product_version $(zip_root)/META/product_version.txt
+endif
+endif
+ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
+	@# If breakpad symbols have been generated, add them to the zip.
+	$(hide) $(ACP) -r $(TARGET_OUT_BREAKPAD) $(zip_root)/BREAKPAD
+endif
+	@# Zip everything up, preserving symlinks and placing META/ files first to
+	@# help early validation of the .zip file while uploading it.
+	$(hide) (cd $(zip_root) && \
+	        zip -qryX ../$(notdir $@) ./META && \
+	        zip -qryXu ../$(notdir $@) .)
 	@# Run fs_config on all the system, vendor, boot ramdisk,
 	@# and recovery ramdisk files in the zip, and save the output
 	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="SYSTEM/" } /^SYSTEM\// {print "system/" $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/filesystem_config.txt
 	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="VENDOR/" } /^VENDOR\// {print "vendor/" $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/vendor_filesystem_config.txt
+ifeq ($(BOARD_BUILD_SYSTEM_ROOT_IMAGE),true)
+	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="ROOT/" } /^ROOT\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/root_filesystem_config.txt
+endif
 	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="BOOT/RAMDISK/" } /^BOOT\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/boot_filesystem_config.txt
+ifneq ($(INSTALLED_RECOVERYIMAGE_TARGET),)
 	$(hide) zipinfo -1 $@ | awk 'BEGIN { FS="RECOVERY/RAMDISK/" } /^RECOVERY\/RAMDISK\// {print $$2}' | $(HOST_OUT_EXECUTABLES)/fs_config -C -D $(TARGET_OUT) -S $(SELINUX_FC) > $(zip_root)/META/recovery_filesystem_config.txt
-	$(hide) (cd $(zip_root) && zip -q ../$(notdir $@) META/*filesystem_config.txt)
+endif
+	$(hide) (cd $(zip_root) && zip -qX ../$(notdir $@) META/*filesystem_config.txt)
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
-	    ./build/tools/releasetools/add_img_to_target_files -p $(HOST_OUT) $@
+	    ./build/tools/releasetools/add_img_to_target_files -v -p $(HOST_OUT) $@
 
 .PHONY: target-files-package
 target-files-package: $(BUILT_TARGET_FILES_PACKAGE)
@@ -1537,11 +1823,7 @@
 $(call dist-for-goals, target-files-package, $(BUILT_TARGET_FILES_PACKAGE))
 endif
 
-ifneq ($(TARGET_PRODUCT),sdk)
-ifeq ($(filter generic%,$(TARGET_DEVICE)),)
-ifneq ($(TARGET_NO_KERNEL),true)
-ifneq ($(recovery_fstab),)
-
+ifeq ($(build_ota_package),true)
 # -----------------------------------------------------------------
 # OTA update package
 
@@ -1555,7 +1837,7 @@
 
 $(INTERNAL_OTA_PACKAGE_TARGET): KEY_CERT_PAIR := $(DEFAULT_KEY_CERT_PAIR)
 
-$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(DISTTOOLS)
+$(INTERNAL_OTA_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE)
 	@echo "Package OTA: $@"
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	   ./build/tools/releasetools/ota_from_target_files -v \
@@ -1568,10 +1850,7 @@
 .PHONY: otapackage
 otapackage: $(INTERNAL_OTA_PACKAGE_TARGET)
 
-endif    # recovery_fstab is defined
-endif    # TARGET_NO_KERNEL != true
-endif    # TARGET_DEVICE != generic*
-endif    # TARGET_PRODUCT != sdk
+endif    # build_ota_package
 
 # -----------------------------------------------------------------
 # The update package
@@ -1584,7 +1863,7 @@
 
 INTERNAL_UPDATE_PACKAGE_TARGET := $(PRODUCT_OUT)/$(name).zip
 
-$(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE) $(DISTTOOLS)
+$(INTERNAL_UPDATE_PACKAGE_TARGET): $(BUILT_TARGET_FILES_PACKAGE)
 	@echo "Package: $@"
 	$(hide) PATH=$(foreach p,$(INTERNAL_USERIMAGES_BINARY_PATHS),$(p):)$$PATH MKBOOTIMG=$(MKBOOTIMG) \
 	   ./build/tools/releasetools/img_from_target_files -v \
@@ -1613,7 +1892,7 @@
 	@echo "Package symbols: $@"
 	$(hide) rm -rf $@
 	$(hide) mkdir -p $(dir $@) $(TARGET_OUT_UNSTRIPPED)
-	$(hide) zip -qr $@ $(TARGET_OUT_UNSTRIPPED)
+	$(hide) zip -qrX $@ $(TARGET_OUT_UNSTRIPPED)
 
 # -----------------------------------------------------------------
 # A zip of the Android Apps. Not keeping full path so that we don't
@@ -1630,8 +1909,13 @@
 	@echo "Package apps: $@"
 	$(hide) rm -rf $@
 	$(hide) mkdir -p $(dir $@)
-	$(hide) zip -qj $@ $(TARGET_OUT_APPS)/*/*.apk $(TARGET_OUT_APPS_PRIVILEGED)/*/*.apk
-
+	$(hide) apps_to_zip=`find $(TARGET_OUT_APPS) $(TARGET_OUT_APPS_PRIVILEGED) -mindepth 2 -maxdepth 3 -name "*.apk"`; \
+	if [ -z "$$apps_to_zip" ]; then \
+		echo "No apps to zip up. Generating empty apps archive." ; \
+		a=$$(mktemp /tmp/XXXXXXX) && touch $$a && zip $@ $$a && zip -d $@ $$a; \
+	else \
+		zip -qjX $@ $$apps_to_zip; \
+	fi
 
 #------------------------------------------------------------------
 # A zip of emma code coverage meta files. Generated for fully emma
@@ -1643,7 +1927,7 @@
 $(EMMA_META_ZIP) :
 	@echo "Collecting Emma coverage meta files."
 	$(hide) find $(TARGET_COMMON_OUT_ROOT) $(HOST_COMMON_OUT_ROOT) -name "coverage.em" | \
-		zip -@ -q $@
+		zip -@ -qX $@
 
 endif # EMMA_INSTRUMENT=true
 
@@ -1659,7 +1943,7 @@
 	$(hide) dict_files=`find $(TARGET_OUT_COMMON_INTERMEDIATES)/APPS -name proguard_dictionary`; \
 		if [ -n "$$dict_files" ]; then \
 		  unobfuscated_jars=$${dict_files//proguard_dictionary/classes.jar}; \
-		  zip -q $@ $$dict_files $$unobfuscated_jars; \
+		  zip -qX $@ $$dict_files $$unobfuscated_jars; \
 		else \
 		  touch $(dir $@)/zipdummy; \
 		  (cd $(dir $@) && zip -q $(notdir $@) zipdummy); \
@@ -1690,7 +1974,7 @@
 
 $(INTERNAL_EMULATOR_PACKAGE_TARGET): $(INTERNAL_EMULATOR_PACKAGE_FILES)
 	@echo "Package: $@"
-	$(hide) zip -qj $@ $(INTERNAL_EMULATOR_PACKAGE_FILES)
+	$(hide) zip -qjX $@ $(INTERNAL_EMULATOR_PACKAGE_FILES)
 
 endif
 # -----------------------------------------------------------------
@@ -1734,7 +2018,6 @@
 ifeq ($(strip $(ATREE_FILES)),)
 ATREE_FILES := \
 	$(ALL_PREBUILT) \
-	$(ALL_COPIED_HEADERS) \
 	$(ALL_DEFAULT_INSTALLED_MODULES) \
 	$(INSTALLED_RAMDISK_TARGET) \
 	$(ALL_DOCS) \
@@ -1818,6 +2101,7 @@
 			-v "TARGET_CPU_ABI=$(TARGET_CPU_ABI)" \
 			-v "DLL_EXTENSION=$(HOST_SHLIB_SUFFIX)" \
 			-v "FONT_OUT=$(SDK_FONT_TEMP)" \
+			-v "JACK_SDKTOOL_VERSION=$(JACK_SDKTOOL_VERSION)" \
 			-o $(PRIVATE_DIR) && \
 		cp -f $(target_notice_file_txt) \
 				$(PRIVATE_DIR)/system-images/android-$(PLATFORM_VERSION)/$(TARGET_CPU_ABI)/NOTICE.txt && \
@@ -1825,7 +2109,7 @@
 		HOST_OUT_EXECUTABLES=$(HOST_OUT_EXECUTABLES) HOST_OS=$(HOST_OS) \
 			development/build/tools/sdk_clean.sh $(PRIVATE_DIR) && \
 		chmod -R ug+rwX $(PRIVATE_DIR) && \
-		cd $(dir $@) && zip -rq $(notdir $@) $(PRIVATE_NAME) \
+		cd $(dir $@) && zip -rqX $(notdir $@) $(PRIVATE_NAME) \
 	) || ( rm -rf $(PRIVATE_DIR) $@ && exit 44 )
 
 
@@ -1860,11 +2144,15 @@
 include $(sort $(wildcard $(BUILD_SYSTEM)/tasks/*.mk))
 -include $(sort $(wildcard vendor/*/build/tasks/*.mk))
 -include $(sort $(wildcard device/*/build/tasks/*.mk))
+-include $(sort $(wildcard product/*/build/tasks/*.mk))
 # Also the project-specific tasks
 -include $(sort $(wildcard vendor/*/*/build/tasks/*.mk))
 -include $(sort $(wildcard device/*/*/build/tasks/*.mk))
+-include $(sort $(wildcard product/*/*/build/tasks/*.mk))
 endif
 
+include $(BUILD_SYSTEM)/product-graph.mk
+
 # -----------------------------------------------------------------
 # Create SDK repository packages. Must be done after tasks/* since
 # we need the addon rules defined.
diff --git a/core/aapt2.mk b/core/aapt2.mk
new file mode 100644
index 0000000..ccc4535
--- /dev/null
+++ b/core/aapt2.mk
@@ -0,0 +1,89 @@
+######################################
+# Compile resource with AAPT2
+# Input variables:
+# full_android_manifest,
+# my_res_resources, my_overlay_resources,
+# my_compiled_res_base_dir, my_res_package,
+# R_file_stamp, proguard_options_file
+# my_generated_res_dirs: Resources generated during the build process and we have to compile them in a single run of aapt2.
+# my_generated_res_dirs_deps: the dependency to use for my_generated_res_dirs.
+#
+# Output variables:
+# my_res_resources_flat, my_overlay_resources_flat,
+# my_generated_resources_flata
+#
+######################################
+
+
+# Compile all the resource files.
+my_res_resources_flat := \
+  $(foreach r, $(my_res_resources),\
+    $(eval o := $(call aapt2-compiled-resource-out-file,$(r),$(my_compiled_res_base_dir)))\
+    $(eval $(call aapt2-compile-one-resource-file-rule,$(r),$(o)))\
+    $(o))
+
+my_overlay_resources_flat := \
+  $(foreach r, $(my_overlay_resources),\
+    $(eval o := $(call aapt2-compiled-resource-out-file,$(r),$(my_compiled_res_base_dir)))\
+    $(eval $(call aapt2-compile-one-resource-file-rule,$(r),$(o)))\
+    $(o))
+
+my_generated_resources_flata :=
+# Compile generated resources
+ifneq ($(my_generated_res_dirs),)
+my_generated_resources_flata := $(my_compiled_res_base_dir)/gen_res.flata
+$(my_generated_resources_flata): PRIVATE_SOURCE_RES_DIRS := $(my_generated_res_dirs)
+$(my_generated_resources_flata) : $(my_generated_res_dirs_deps)
+	@echo "AAPT2 compile $@ <- $(PRIVATE_SOURCE_RES_DIRS)"
+	$(call aapt2-compile-resource-dirs)
+
+my_generated_resources_flata += $(my_generated_resources_flata)
+endif
+
+$(my_res_resources_flat) $(my_overlay_resources_flat) $(my_generated_resources_flata): \
+  PRIVATE_AAPT2_CFLAGS := $(PRODUCT_AAPT2_CFLAGS)
+
+my_static_library_resources := $(foreach l, $(call reverse-list,$(LOCAL_STATIC_ANDROID_LIBRARIES)),\
+  $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/package-res.apk)
+my_shared_library_resources := $(foreach l, $(LOCAL_SHARED_ANDROID_LIBRARIES),\
+  $(call intermediates-dir-for,JAVA_LIBRARIES,$(l),,COMMON)/package-res.apk)
+
+ifneq ($(my_static_library_resources),)
+$(my_res_package): PRIVATE_AAPT_FLAGS += --auto-add-overlay
+endif
+
+$(my_res_package): PRIVATE_RES_FLAT := $(my_res_resources_flat)
+$(my_res_package): PRIVATE_OVERLAY_FLAT := $(my_static_library_resources) $(my_generated_resources_flata) $(my_overlay_resources_flat)
+$(my_res_package): PRIVATE_SHARED_ANDROID_LIBRARIES := $(my_shared_library_resources)
+$(my_res_package): PRIVATE_PROGUARD_OPTIONS_FILE := $(proguard_options_file)
+$(my_res_package) : $(full_android_manifest) $(my_static_library_resources) $(my_shared_library_resources)
+$(my_res_package) : $(my_res_resources_flat) $(my_overlay_resources_flat) \
+  $(my_generated_resources_flata) $(my_static_library_resources) \
+  $(AAPT2)
+	@echo "AAPT2 link $@"
+	$(call aapt2-link)
+
+ifdef R_file_stamp
+$(R_file_stamp) : $(my_res_package) | $(ACP)
+	@echo "target R.java/Manifest.java: $(PRIVATE_MODULE) ($@)"
+	@rm -rf $@ && mkdir -p $(dir $@)
+	$(call find-generated-R.java)
+endif
+
+ifdef proguard_options_file
+$(proguard_options_file) : $(my_res_package)
+endif
+
+resource_export_package :=
+ifdef LOCAL_EXPORT_PACKAGE_RESOURCES
+# Put this module's resources into a PRODUCT-agnositc package that
+# other packages can use to build their own PRODUCT-agnostic R.java (etc.)
+# files.
+resource_export_package := $(intermediates.COMMON)/package-export.apk
+$(R_file_stamp) : $(resource_export_package)
+
+$(resource_export_package) : $(my_res_package) | $(ACP)
+	@echo "target Export Resources: $(PRIVATE_MODULE) $(@)"
+	$(copy-file-to-new-target)
+
+endif
diff --git a/core/android_manifest.mk b/core/android_manifest.mk
index 582bad4..0093e02 100644
--- a/core/android_manifest.mk
+++ b/core/android_manifest.mk
@@ -22,9 +22,12 @@
 my_full_libs_manifest_files += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
   $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/AndroidManifest.xml)
 
+# With aapt2, we'll link in the built resource from the AAR.
+ifndef LOCAL_USE_AAPT2
 LOCAL_RESOURCE_DIR += $(foreach lib, $(LOCAL_STATIC_JAVA_AAR_LIBRARIES),\
   $(call intermediates-dir-for,JAVA_LIBRARIES,$(lib),,COMMON)/aar/res)
-endif
+endif  # LOCAL_USE_AAPT2
+endif  # LOCAL_STATIC_JAVA_AAR_LIBRARIES
 
 # Set up rules to merge library manifest files
 ifdef my_full_libs_manifest_files
diff --git a/core/base_rules.mk b/core/base_rules.mk
index ea64cc6..6722af4 100644
--- a/core/base_rules.mk
+++ b/core/base_rules.mk
@@ -34,14 +34,37 @@
   ifneq ($(LOCAL_IS_HOST_MODULE),true)
     $(error $(LOCAL_PATH): LOCAL_IS_HOST_MODULE must be "true" or empty, not "$(LOCAL_IS_HOST_MODULE)")
   endif
-  my_prefix := HOST_
+  ifeq ($(LOCAL_HOST_PREFIX),)
+    my_prefix := HOST_
+  else
+    my_prefix := $(LOCAL_HOST_PREFIX)
+  endif
   my_host := host-
 else
   my_prefix := TARGET_
   my_host :=
 endif
 
+ifeq ($(my_prefix),HOST_CROSS_)
+  my_host_cross := true
+else
+  my_host_cross :=
+endif
+
 my_module_tags := $(LOCAL_MODULE_TAGS)
+ifeq ($(my_host_cross),true)
+  my_module_tags :=
+endif
+
+ifdef BUILDING_WITH_NINJA
+# Ninja has an implicit dependency on the command being run, and kati will
+# regenerate the ninja manifest if any read makefile changes, so there is no
+# need to have dependencies on makefiles.
+# This won't catch all the cases where LOCAL_ADDITIONAL_DEPENDENCIES contains
+# a .mk file, because a few users of LOCAL_ADDITIONAL_DEPENDENCIES don't include
+# base_rules.mk, but it will fix the most common ones.
+LOCAL_ADDITIONAL_DEPENDENCIES := $(filter-out %.mk,$(LOCAL_ADDITIONAL_DEPENDENCIES))
+endif
 
 ###########################################################
 ## Validate and define fallbacks for input LOCAL_* variables.
@@ -145,20 +168,23 @@
 endif
 
 my_register_name := $(LOCAL_MODULE)
+ifeq ($(my_host_cross),true)
+  my_register_name := host_cross_$(LOCAL_MODULE)
+endif
 ifdef LOCAL_2ND_ARCH_VAR_PREFIX
 ifndef LOCAL_NO_2ND_ARCH_MODULE_SUFFIX
-my_register_name := $(LOCAL_MODULE)$($(my_prefix)2ND_ARCH_MODULE_SUFFIX)
+my_register_name := $(my_register_name)$($(my_prefix)2ND_ARCH_MODULE_SUFFIX)
 endif
 endif
 # Make sure that this IS_HOST/CLASS/MODULE combination is unique.
 module_id := MODULE.$(if \
-    $(LOCAL_IS_HOST_MODULE),HOST,TARGET).$(LOCAL_MODULE_CLASS).$(my_register_name)
+    $(LOCAL_IS_HOST_MODULE),$($(my_prefix)OS),TARGET).$(LOCAL_MODULE_CLASS).$(my_register_name)
 ifdef $(module_id)
 $(error $(LOCAL_PATH): $(module_id) already defined by $($(module_id)))
 endif
 $(module_id) := $(LOCAL_PATH)
 
-intermediates := $(call local-intermediates-dir,,$(LOCAL_2ND_ARCH_VAR_PREFIX))
+intermediates := $(call local-intermediates-dir,,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))
 intermediates.COMMON := $(call local-intermediates-dir,COMMON)
 generated_sources_dir := $(call local-generated-sources-dir)
 
@@ -194,297 +220,42 @@
 # Assemble the list of targets to create PRIVATE_ variables for.
 LOCAL_INTERMEDIATE_TARGETS += $(LOCAL_BUILT_MODULE)
 
-
 ###########################################################
-## AIDL: Compile .aidl files to .java
+## Create .toc files from shared objects to reduce unnecessary rebuild
+# .toc files have the list of external dynamic symbols without their addresses.
+# As .KATI_RESTAT is specified to .toc files and commit-change-for-toc is used,
+# dependent binaries of a .toc file will be rebuilt only when the content of
+# the .toc file is changed.
 ###########################################################
+ifndef LOCAL_IS_HOST_MODULE
+# Disable .toc optimization for host modules: we may run the host binaries during the build process
+# and the libraries' implementation matters.
+ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
+LOCAL_INTERMEDIATE_TARGETS += $(LOCAL_BUILT_MODULE).toc
+$(LOCAL_BUILT_MODULE).toc: $(LOCAL_BUILT_MODULE)
+	$(call $(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)transform-shared-lib-to-toc,$<,$@.tmp)
+	$(call commit-change-for-toc,$@)
 
-aidl_sources := $(filter %.aidl,$(LOCAL_SRC_FILES))
-
-ifneq ($(strip $(aidl_sources)),)
-
-aidl_java_sources := $(patsubst %.aidl,%.java,$(addprefix $(intermediates.COMMON)/src/, $(aidl_sources)))
-aidl_sources := $(addprefix $(TOP_DIR)$(LOCAL_PATH)/, $(aidl_sources))
-
-aidl_preprocess_import :=
-LOCAL_SDK_VERSION:=$(strip $(LOCAL_SDK_VERSION))
-ifdef LOCAL_SDK_VERSION
-ifneq ($(filter current system_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS)),)
-  # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS
-  aidl_preprocess_import := $(TARGET_OUT_COMMON_INTERMEDIATES)/framework.aidl
-else
-  aidl_preprocess_import := $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_VERSION)/framework.aidl
-endif # not current or system_current
-else
-# build against the platform.
-LOCAL_AIDL_INCLUDES += $(FRAMEWORKS_BASE_JAVA_SRC_DIRS)
-endif # LOCAL_SDK_VERSION
-$(aidl_java_sources): PRIVATE_AIDL_FLAGS := -b $(addprefix -p,$(aidl_preprocess_import)) -I$(LOCAL_PATH) -I$(LOCAL_PATH)/src $(addprefix -I,$(LOCAL_AIDL_INCLUDES))
-
-$(aidl_java_sources): $(intermediates.COMMON)/src/%.java: \
-        $(TOPDIR)$(LOCAL_PATH)/%.aidl \
-        $(LOCAL_MODULE_MAKEFILE) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
-        $(AIDL) \
-        $(aidl_preprocess_import)
-	$(transform-aidl-to-java)
--include $(aidl_java_sources:%.java=%.P)
-
-else
-aidl_java_sources :=
+# Kati adds restat=1 to ninja. GNU make does nothing for this.
+.KATI_RESTAT: $(LOCAL_BUILT_MODULE).toc
+# Build .toc file when using mm, mma, or make $(my_register_name)
+$(my_register_name): $(LOCAL_BUILT_MODULE).toc
+endif
 endif
 
 ###########################################################
-## logtags: Add .logtags files to global list, emit java source
+## logtags: Add .logtags files to global list
 ###########################################################
 
 logtags_sources := $(filter %.logtags,$(LOCAL_SRC_FILES))
 
 ifneq ($(strip $(logtags_sources)),)
-
 event_log_tags := $(addprefix $(LOCAL_PATH)/,$(logtags_sources))
-
-# Emit a java source file with constants for the tags, if
-# LOCAL_MODULE_CLASS is "APPS" or "JAVA_LIBRARIES".
-ifneq ($(filter $(LOCAL_MODULE_CLASS),APPS JAVA_LIBRARIES),)
-
-logtags_java_sources := $(patsubst %.logtags,%.java,$(addprefix $(intermediates.COMMON)/src/, $(logtags_sources)))
-logtags_sources := $(addprefix $(TOP_DIR)$(LOCAL_PATH)/, $(logtags_sources))
-
-$(logtags_java_sources): $(intermediates.COMMON)/src/%.java: $(TOPDIR)$(LOCAL_PATH)/%.logtags $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt
-	$(transform-logtags-to-java)
-
-endif
-
 else
-logtags_java_sources :=
 event_log_tags :=
 endif
 
 ###########################################################
-## .proto files: Compile proto files to .java
-###########################################################
-proto_sources := $(filter %.proto,$(LOCAL_SRC_FILES))
-# Because names of the .java files compiled from .proto files are unknown until the
-# .proto files are compiled, we use a timestamp file as depedency.
-proto_java_sources_file_stamp :=
-ifneq ($(proto_sources),)
-proto_sources_fullpath := $(addprefix $(TOP_DIR)$(LOCAL_PATH)/, $(proto_sources))
-# By putting the generated java files into $(LOCAL_INTERMEDIATE_SOURCE_DIR), they will be
-# automatically found by the java compiling function transform-java-to-classes.jar.
-ifneq ($(LOCAL_INTERMEDIATE_SOURCE_DIR),)
-proto_java_intemediate_dir := $(LOCAL_INTERMEDIATE_SOURCE_DIR)/proto
-else
-# LOCAL_INTERMEDIATE_SOURCE_DIR may be not defined in non-java modules.
-proto_java_intemediate_dir := $(intermediates)/proto
-endif
-proto_java_sources_file_stamp := $(proto_java_intemediate_dir)/Proto.stamp
-proto_java_sources_dir := $(proto_java_intemediate_dir)/src
-
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_INCLUDES := $(TOP)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_SRC_FILES := $(proto_sources_fullpath)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_DIR := $(proto_java_sources_dir)
-ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),micro)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javamicro_out
-else
-  ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nano)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javanano_out
-  else
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --java_out
-  endif
-endif
-$(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS)
-$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
-$(proto_java_sources_file_stamp) : $(proto_sources_fullpath) $(PROTOC)
-	$(call transform-proto-to-java)
-
-#TODO: protoc should output the dependencies introduced by imports.
-
-LOCAL_INTERMEDIATE_TARGETS += $(proto_java_sources_file_stamp)
-endif # proto_sources
-
-
-###########################################################
-## Java: Compile .java files to .class
-###########################################################
-#TODO: pull this into java.make once host and target are combined
-
-java_sources := $(addprefix $(TOP_DIR)$(LOCAL_PATH)/, $(filter %.java,$(LOCAL_SRC_FILES))) $(aidl_java_sources) $(logtags_java_sources) \
-                $(filter %.java,$(LOCAL_GENERATED_SOURCES))
-all_java_sources := $(java_sources) $(addprefix $($(my_prefix)OUT_COMMON_INTERMEDIATES)/, $(filter %.java,$(LOCAL_INTERMEDIATE_SOURCES)))
-
-## Java resources #########################################
-
-# Look for resource files in any specified directories.
-# Non-java and non-doc files will be picked up as resources
-# and included in the output jar file.
-java_resource_file_groups :=
-
-LOCAL_JAVA_RESOURCE_DIRS := $(strip $(LOCAL_JAVA_RESOURCE_DIRS))
-ifneq ($(LOCAL_JAVA_RESOURCE_DIRS),)
-  # This makes a list of words like
-  #     <dir1>::<file1>:<file2> <dir2>::<file1> <dir3>:
-  # where each of the files is relative to the directory it's grouped with.
-  # Directories that don't contain any resource files will result in groups
-  # that end with a colon, and they are stripped out in the next step.
-  java_resource_file_groups += \
-    $(foreach dir,$(LOCAL_JAVA_RESOURCE_DIRS), \
-	$(subst $(space),:,$(strip \
-		$(TOP_DIR)$(LOCAL_PATH)/$(dir): \
-	    $(patsubst ./%,%,$(shell cd $(TOP_DIR)$(LOCAL_PATH)/$(dir) && \
-		find . \
-		    -type d -a -name ".svn" -prune -o \
-		    -type f \
-			-a \! -name "*.java" \
-			-a \! -name "package.html" \
-			-a \! -name "overview.html" \
-			-a \! -name ".*.swp" \
-			-a \! -name ".DS_Store" \
-			-a \! -name "*~" \
-			-print \
-		    )) \
-	)) \
-    )
-  java_resource_file_groups := $(filter-out %:,$(java_resource_file_groups))
-endif # LOCAL_JAVA_RESOURCE_DIRS
-
-LOCAL_JAVA_RESOURCE_FILES := $(strip $(LOCAL_JAVA_RESOURCE_FILES))
-ifneq ($(LOCAL_JAVA_RESOURCE_FILES),)
-  java_resource_file_groups += \
-    $(foreach f,$(LOCAL_JAVA_RESOURCE_FILES), \
-	$(patsubst %/,%,$(dir $(f)))::$(notdir $(f)) \
-     )
-endif # LOCAL_JAVA_RESOURCE_FILES
-
-ifdef java_resource_file_groups
-  # The full paths to all resources, used for dependencies.
-  java_resource_sources := \
-    $(foreach group,$(java_resource_file_groups), \
-	$(addprefix $(word 1,$(subst :,$(space),$(group)))/, \
-	    $(wordlist 2,9999,$(subst :,$(space),$(group))) \
-	) \
-    )
-  # The arguments to jar that will include these files in a jar file.
-  # Quote the file name to handle special characters (such as #) correctly.
-  extra_jar_args := \
-    $(foreach group,$(java_resource_file_groups), \
-	$(addprefix -C "$(word 1,$(subst :,$(space),$(group)))" , \
-	    $(foreach w, $(wordlist 2,9999,$(subst :,$(space),$(group))), "$(w)" ) \
-	) \
-    )
-  java_resource_file_groups :=
-else
-  java_resource_sources :=
-  extra_jar_args :=
-endif # java_resource_file_groups
-
-## PRIVATE java vars ######################################
-# LOCAL_SOURCE_FILES_ALL_GENERATED is set only if the module does not have static source files,
-# but generated source files in its LOCAL_INTERMEDIATE_SOURCE_DIR.
-# You have to set up the dependency in some other way.
-need_compile_java := $(strip $(all_java_sources)$(all_res_assets)$(java_resource_sources))$(LOCAL_STATIC_JAVA_LIBRARIES)$(filter true,$(LOCAL_SOURCE_FILES_ALL_GENERATED))
-ifdef need_compile_java
-
-full_static_java_libs := \
-    $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
-      $(call intermediates-dir-for, \
-        JAVA_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),COMMON)/javalib.jar)
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_INSTALL_DIR := $(dir $(LOCAL_INSTALLED_MODULE))
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates)/classes
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates)/src
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCES := $(all_java_sources)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_OBJECTS := $(patsubst %.java,%.class,$(LOCAL_SRC_FILES))
-ifeq ($(my_prefix),TARGET_)
-ifeq ($(LOCAL_SDK_VERSION),)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(call java-lib-files,core-libart)
-else
-ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
-# LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(call java-lib-files,android_stubs_current)
-else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(call java-lib-files,android_system_stubs_current)
-else
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(call java-lib-files,sdk_v$(LOCAL_SDK_VERSION))
-endif # current or system_current
-endif # LOCAL_SDK_VERSION
-endif # TARGET_
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_STATIC_JAVA_LIBRARIES := $(full_static_java_libs)
-
-# full_java_libs: The list of files that should be used as the classpath.
-#                 Using this list as a dependency list WILL NOT WORK.
-# full_java_lib_deps: Should be specified as a prerequisite of this module
-#                 to guarantee that the files in full_java_libs will
-#                 be up-to-date.
-ifdef LOCAL_IS_HOST_MODULE
-ifeq ($(USE_CORE_LIB_BOOTCLASSPATH),true)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(call java-lib-files,core-libart-hostdex,$(LOCAL_IS_HOST_MODULE))
-
-full_shared_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-full_java_lib_deps := $(call java-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE)) \
-    $(full_shared_java_libs)
-else
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH :=
-
-full_shared_java_libs := $(addprefix $(HOST_OUT_JAVA_LIBRARIES)/,\
-    $(addsuffix $(COMMON_JAVA_PACKAGE_SUFFIX),$(LOCAL_JAVA_LIBRARIES)))
-full_java_lib_deps := $(full_shared_java_libs)
-endif # USE_CORE_LIB_BOOTCLASSPATH
-else # !LOCAL_IS_HOST_MODULE
-full_shared_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-full_java_lib_deps := $(call java-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-endif # !LOCAL_IS_HOST_MODULE
-full_java_libs := $(full_shared_java_libs) $(full_static_java_libs) $(LOCAL_CLASSPATH)
-full_java_lib_deps += $(full_static_java_libs) $(LOCAL_CLASSPATH)
-
-# This is set by packages that are linking to other packages that export
-# shared libraries, allowing them to make use of the code in the linked apk.
-apk_libraries := $(sort $(LOCAL_APK_LIBRARIES) $(LOCAL_RES_LIBRARIES))
-ifneq ($(apk_libraries),)
-  link_apk_libraries := \
-      $(foreach lib,$(apk_libraries), \
-        $(call intermediates-dir-for, \
-              APPS,$(lib),,COMMON)/classes.jar)
-
-  # link against the jar with full original names (before proguard processing).
-  full_shared_java_libs += $(link_apk_libraries)
-  full_java_libs += $(link_apk_libraries)
-  full_java_lib_deps += $(link_apk_libraries)
-endif
-
-# This is set by packages that contain instrumentation, allowing them to
-# link against the package they are instrumenting.  Currently only one such
-# package is allowed.
-LOCAL_INSTRUMENTATION_FOR := $(strip $(LOCAL_INSTRUMENTATION_FOR))
-ifdef LOCAL_INSTRUMENTATION_FOR
-  ifneq ($(words $(LOCAL_INSTRUMENTATION_FOR)),1)
-    $(error \
-        $(LOCAL_PATH): Multiple LOCAL_INSTRUMENTATION_FOR members defined)
-  endif
-
-  link_instr_intermediates_dir.COMMON := $(call intermediates-dir-for, \
-      APPS,$(LOCAL_INSTRUMENTATION_FOR),,COMMON)
-  # link against the jar with full original names (before proguard processing).
-  link_instr_classes_jar := $(link_instr_intermediates_dir.COMMON)/classes.jar
-  full_java_libs += $(link_instr_classes_jar)
-  full_java_lib_deps += $(link_instr_classes_jar)
-endif
-
-endif  # need_compile_java
-
-# We may want to add jar manifest or jar resource files even if there is no java code at all.
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_EXTRA_JAR_ARGS := $(extra_jar_args)
-jar_manifest_file :=
-ifneq ($(strip $(LOCAL_JAR_MANIFEST)),)
-jar_manifest_file := $(LOCAL_PATH)/$(LOCAL_JAR_MANIFEST)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAR_MANIFEST := $(jar_manifest_file)
-else
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAR_MANIFEST :=
-endif
-
-###########################################################
 ## make clean- targets
 ###########################################################
 cleantarget := clean-$(my_register_name)
@@ -500,35 +271,12 @@
 ###########################################################
 ## Common definitions for module.
 ###########################################################
-
-# aapt doesn't accept multiple --extra-packages flags.
-# We have to collapse them into a single --extra-packages flag here.
-LOCAL_AAPT_FLAGS := $(strip $(LOCAL_AAPT_FLAGS))
-ifdef LOCAL_AAPT_FLAGS
-ifeq ($(filter 0 1,$(words $(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))),)
-aapt_flags := $(subst --extra-packages$(space),--extra-packages@,$(LOCAL_AAPT_FLAGS))
-aapt_flags_extra_packages := $(patsubst --extra-packages@%,%,$(filter --extra-packages@%,$(aapt_flags)))
-aapt_flags_extra_packages := $(sort $(subst :,$(space),$(aapt_flags_extra_packages)))
-LOCAL_AAPT_FLAGS := $(filter-out --extra-packages@%,$(aapt_flags)) \
-    --extra-packages $(subst $(space),:,$(aapt_flags_extra_packages))
-aapt_flags_extra_packages :=
-aapt_flags :=
-endif
-endif
-
-# Propagate local configuration options to this target.
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_PATH:=$(LOCAL_PATH)
-$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_AAPT_FLAGS:= $(LOCAL_AAPT_FLAGS) $(PRODUCT_AAPT_FLAGS)
-$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_JAVA_LIBRARIES:= $(LOCAL_JAVA_LIBRARIES)
-$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_MANIFEST_PACKAGE_NAME:= $(LOCAL_MANIFEST_PACKAGE_NAME)
-$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_MANIFEST_INSTRUMENTATION_FOR:= $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
-
-$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_ALL_JAVA_LIBRARIES:= $(full_java_libs)
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_IS_HOST_MODULE := $(LOCAL_IS_HOST_MODULE)
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_HOST:= $(my_host)
+$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_PREFIX := $(my_prefix)
 
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_INTERMEDIATES_DIR:= $(intermediates)
-
 $(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_2ND_ARCH_VAR_PREFIX := $(LOCAL_2ND_ARCH_VAR_PREFIX)
 
 # Tell the module and all of its sub-modules who it is.
@@ -540,6 +288,15 @@
 .PHONY: $(my_register_name)
 $(my_register_name): $(LOCAL_BUILT_MODULE) $(LOCAL_INSTALLED_MODULE)
 
+# Set up phony targets that covers all modules under the given paths.
+# This allows us to build everything in given paths by running mmma/mma.
+my_path_components := $(subst /,$(space),$(LOCAL_PATH))
+my_path_prefix := MODULES-IN
+$(foreach c, $(my_path_components),\
+  $(eval my_path_prefix := $(my_path_prefix)-$(c))\
+  $(eval .PHONY : $(my_path_prefix))\
+  $(eval $(my_path_prefix) : $(my_register_name)))
+
 ###########################################################
 ## Module installation rule
 ###########################################################
@@ -549,7 +306,7 @@
   LOCAL_ACP_UNAVAILABLE := $(strip $(HOST_ACP_UNAVAILABLE))
 endif
 
-ifndef LOCAL_UNINSTALLABLE_MODULE
+ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
   # Define a copy rule to install the module.
   # acp and libraries that it uses can't use acp for
   # installation;  hence, LOCAL_ACP_UNAVAILABLE.
@@ -565,8 +322,25 @@
 	$(copy-file-to-target-with-cp)
 endif
 
-endif # !LOCAL_UNINSTALLABLE_MODULE
+# Rule to install the module's companion init.rc.
+my_init_rc := $(LOCAL_INIT_RC_$(my_32_64_bit_suffix))
+my_init_rc_src :=
+my_init_rc_installed :=
+ifndef my_init_rc
+my_init_rc := $(LOCAL_INIT_RC)
+# Make sure we don't define the rule twice in multilib module.
+LOCAL_INIT_RC :=
+endif
+ifdef my_init_rc
+my_init_rc_src := $(LOCAL_PATH)/$(my_init_rc)
+my_init_rc_installed := $(TARGET_OUT$(partition_tag)_ETC)/init/$(notdir $(my_init_rc_src))
+$(my_init_rc_installed) : $(my_init_rc_src) | $(ACP)
+	@echo "Install: $@"
+	$(copy-file-to-new-target)
 
+$(my_register_name) : $(my_init_rc_installed)
+endif # my_init_rc
+endif # !LOCAL_UNINSTALLABLE_MODULE
 
 ###########################################################
 ## CHECK_BUILD goals
@@ -576,8 +350,6 @@
 # checked modules, use LOCAL_BUILT_MODULE.
 ifdef LOCAL_CHECKED_MODULE
   my_checked_module := $(LOCAL_CHECKED_MODULE)
-else ifdef java_alternative_checked_module
-  my_checked_module := $(java_alternative_checked_module)
 else
   my_checked_module := $(LOCAL_BUILT_MODULE)
 endif
@@ -596,6 +368,50 @@
 endif
 
 ###########################################################
+## Compatibiliy suite files.
+###########################################################
+ifdef LOCAL_COMPATIBILITY_SUITE
+ifneq ($(words $(LOCAL_COMPATIBILITY_SUITE)),1)
+$(error $(LOCAL_PATH):$(LOCAL_MODULE) LOCAL_COMPATIBILITY_SUITE can be only one name)
+endif
+
+# The module itself.
+my_compat_dist := \
+  $(LOCAL_BUILT_MODULE):$(COMPATIBILITY_TESTCASES_OUT_$(LOCAL_COMPATIBILITY_SUITE))/$(my_installed_module_stem)
+
+# Make sure we only add the files once for multilib modules.
+ifndef $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
+$(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files := true
+
+# LOCAL_COMPATIBILITY_SUPPORT_FILES is a list of <src>[:<dest>].
+my_compat_dist += $(foreach f, $(LOCAL_COMPATIBILITY_SUPPORT_FILES),\
+  $(eval p := $(subst :,$(space),$(f)))\
+  $(eval s := $(word 1,$(p)))\
+  $(eval d := $(COMPATIBILITY_TESTCASES_OUT_$(LOCAL_COMPATIBILITY_SUITE))/$(or $(word 2,$(p)),$(notdir $(word 1,$(p)))))\
+  $(s):$(d))
+
+ifneq (,$(wildcard $(LOCAL_PATH)/AndroidTest.xml))
+my_compat_dist += \
+  $(LOCAL_PATH)/AndroidTest.xml:$(COMPATIBILITY_TESTCASES_OUT_$(LOCAL_COMPATIBILITY_SUITE))/$(LOCAL_MODULE).config
+endif
+
+ifneq (,$(wildcard $(LOCAL_PATH)/DynamicConfig.xml))
+my_compat_dist += \
+  $(LOCAL_PATH)/DynamicConfig.xml:$(COMPATIBILITY_TESTCASES_OUT_$(LOCAL_COMPATIBILITY_SUITE))/$(LOCAL_MODULE).dynamic
+endif
+endif # $(my_prefix)$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_compat_files
+
+my_compat_files := $(call copy-many-files, $(my_compat_dist))
+
+COMPATIBILITY.$(LOCAL_COMPATIBILITY_SUITE).FILES := \
+  $(COMPATIBILITY.$(LOCAL_COMPATIBILITY_SUITE).FILES) \
+  $(my_compat_files)
+
+# Copy over the compatibility files when user runs mm/mmm.
+$(my_register_name) : $(my_compat_files)
+endif  # LOCAL_COMPATIBILITY_SUITE
+
+###########################################################
 ## Register with ALL_MODULES
 ###########################################################
 
@@ -615,9 +431,12 @@
     $(ALL_MODULES.$(my_register_name).BUILT) $(LOCAL_BUILT_MODULE)
 ifneq (true,$(LOCAL_UNINSTALLABLE_MODULE))
 ALL_MODULES.$(my_register_name).INSTALLED := \
-    $(strip $(ALL_MODULES.$(my_register_name).INSTALLED) $(LOCAL_INSTALLED_MODULE))
+    $(strip $(ALL_MODULES.$(my_register_name).INSTALLED) \
+    $(LOCAL_INSTALLED_MODULE) $(my_init_rc_installed))
 ALL_MODULES.$(my_register_name).BUILT_INSTALLED := \
-    $(strip $(ALL_MODULES.$(my_register_name).BUILT_INSTALLED) $(LOCAL_BUILT_MODULE):$(LOCAL_INSTALLED_MODULE))
+    $(strip $(ALL_MODULES.$(my_register_name).BUILT_INSTALLED) \
+    $(LOCAL_BUILT_MODULE):$(LOCAL_INSTALLED_MODULE) \
+    $(addprefix $(my_init_rc_src):,$(my_init_rc_installed)))
 endif
 ifdef LOCAL_PICKUP_FILES
 # Files or directories ready to pick up by the build system
@@ -625,13 +444,15 @@
 ALL_MODULES.$(my_register_name).PICKUP_FILES := \
     $(ALL_MODULES.$(my_register_name).PICKUP_FILES) $(LOCAL_PICKUP_FILES)
 endif
+my_required_modules := $(LOCAL_REQUIRED_MODULES) \
+    $(LOCAL_REQUIRED_MODULES_$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
+ifdef LOCAL_IS_HOST_MODULE
+my_required_modules += $(LOCAL_REQUIRED_MODULES_$($(my_prefix)OS))
+endif
 ALL_MODULES.$(my_register_name).REQUIRED := \
-    $(strip $(ALL_MODULES.$(my_register_name).REQUIRED) $(LOCAL_REQUIRED_MODULES) \
-      $(LOCAL_REQUIRED_MODULES_$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
+    $(strip $(ALL_MODULES.$(my_register_name).REQUIRED) $(my_required_modules))
 ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS := \
     $(ALL_MODULES.$(my_register_name).EVENT_LOG_TAGS) $(event_log_tags)
-ALL_MODULES.$(my_register_name).INTERMEDIATE_SOURCE_DIR := \
-    $(ALL_MODULES.$(my_register_name).INTERMEDIATE_SOURCE_DIR) $(LOCAL_INTERMEDIATE_SOURCE_DIR)
 ALL_MODULES.$(my_register_name).MAKEFILE := \
     $(ALL_MODULES.$(my_register_name).MAKEFILE) $(LOCAL_MODULE_MAKEFILE)
 ifdef LOCAL_MODULE_OWNER
@@ -641,9 +462,7 @@
 ifdef LOCAL_2ND_ARCH_VAR_PREFIX
 ALL_MODULES.$(my_register_name).FOR_2ND_ARCH := true
 endif
-ifdef aidl_sources
-ALL_MODULES.$(my_register_name).AIDL_FILES := $(aidl_sources)
-endif
+ALL_MODULES.$(my_register_name).FOR_HOST_CROSS := $(my_host_cross)
 
 INSTALLABLE_FILES.$(LOCAL_INSTALLED_MODULE).MODULE := $(my_register_name)
 
@@ -698,89 +517,7 @@
 endif
 
 ###########################################################
-# JACK
-###########################################################
-ifdef LOCAL_JACK_ENABLED
-ifdef need_compile_java
-
-full_static_jack_libs := \
-    $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
-      $(call intermediates-dir-for, \
-        JAVA_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),COMMON)/classes.jack)
-
-ifeq ($(my_prefix),TARGET_)
-ifeq ($(LOCAL_SDK_VERSION),)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(call jack-lib-files,core-libart)
-else
-ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
-# LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(call jack-lib-files,android_stubs_current)
-else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(call jack-lib-files,android_system_stubs_current)
-else
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(call jack-lib-files,sdk_v$(LOCAL_SDK_VERSION))
-endif # current or system_current
-endif # LOCAL_SDK_VERSION
-endif # TARGET_
-
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_STATIC_JACK_LIBRARIES := $(full_static_jack_libs)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VM_ARGS := $(LOCAL_JACK_VM_ARGS)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_EXTRA_ARGS := $(LOCAL_JACK_EXTRA_ARGS)
-
-ifdef LOCAL_IS_HOST_MODULE
-ifeq ($(USE_CORE_LIB_BOOTCLASSPATH),true)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(call jack-lib-files,core-libart-hostdex,$(LOCAL_IS_HOST_MODULE))
-full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-full_jack_lib_deps := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-else
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES :=
-full_shared_jack_libs := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-full_jack_lib_deps := $(full_shared_jack_libs)
-endif # USE_CORE_LIB_BOOTCLASSPATH
-else # !LOCAL_IS_HOST_MODULE
-full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-full_jack_lib_deps := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
-endif # !LOCAL_IS_HOST_MODULE
-full_jack_libs := $(full_shared_jack_libs) $(full_static_jack_libs) $(LOCAL_JACK_CLASSPATH)
-full_jack_lib_deps += $(full_static_jack_libs) $(LOCAL_JACK_CLASSPATH)
-
-# This is set by packages that are linking to other packages that export
-# shared libraries, allowing them to make use of the code in the linked apk.
-ifneq ($(apk_libraries),)
-  link_apk_jack_libraries := \
-      $(foreach lib,$(apk_libraries), \
-        $(call intermediates-dir-for, \
-              APPS,$(lib),,COMMON)/classes.jack)
-
-  # link against the jar with full original names (before proguard processing).
-  full_shared_jack_libs += $(link_apk_jack_libraries)
-  full_jack_libs += $(link_apk_jack_libraries)
-  full_jack_lib_deps += $(link_apk_jack_libraries)
-endif
-
-# This is set by packages that contain instrumentation, allowing them to
-# link against the package they are instrumenting.  Currently only one such
-# package is allowed.
-ifdef LOCAL_INSTRUMENTATION_FOR
-
-   # link against the jar with full original names (before proguard processing).
-   link_instr_classes_jack := $(link_instr_intermediates_dir.COMMON)/classes.noshrob.jack
-   full_jack_libs += $(link_instr_classes_jack)
-   full_jack_lib_deps += $(link_instr_classes_jack)
-endif
-
-endif  # need_compile_java
-
-# Propagate local configuration options to this target.
-$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_ALL_JACK_LIBRARIES:= $(full_jack_libs)
-$(LOCAL_INTERMEDIATE_TARGETS) : PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
-
-endif # LOCAL_JACK_ENABLED
-
-###########################################################
 ## NOTICE files
 ###########################################################
 
 include $(BUILD_NOTICE_FILE)
-
-#:vi noexpandtab
diff --git a/core/binary.mk b/core/binary.mk
index b8003d7..918a28d 100644
--- a/core/binary.mk
+++ b/core/binary.mk
@@ -24,7 +24,7 @@
   endif
 else
   ifeq ($(LOCAL_SYSTEM_SHARED_LIBRARIES),none)
-      my_system_shared_libraries := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_DEFAULT_SYSTEM_SHARED_LIBRARIES)
+      my_system_shared_libraries := libc libm
   else
       my_system_shared_libraries := $(LOCAL_SYSTEM_SHARED_LIBRARIES)
   endif
@@ -34,12 +34,15 @@
 # Because the same LOCAL_ variables may be used to define modules for both 1st arch and 2nd arch,
 # we can't modify them in place.
 my_src_files := $(LOCAL_SRC_FILES)
+my_src_files_exclude := $(LOCAL_SRC_FILES_EXCLUDE)
 my_static_libraries := $(LOCAL_STATIC_LIBRARIES)
 my_whole_static_libraries := $(LOCAL_WHOLE_STATIC_LIBRARIES)
 my_shared_libraries := $(LOCAL_SHARED_LIBRARIES)
 my_cflags := $(LOCAL_CFLAGS)
 my_conlyflags := $(LOCAL_CONLYFLAGS)
 my_cppflags := $(LOCAL_CPPFLAGS)
+my_cflags_no_override := $(GLOBAL_CFLAGS_NO_OVERRIDE)
+my_cppflags_no_override := $(GLOBAL_CPPFLAGS_NO_OVERRIDE)
 my_ldflags := $(LOCAL_LDFLAGS)
 my_ldlibs := $(LOCAL_LDLIBS)
 my_asflags := $(LOCAL_ASFLAGS)
@@ -50,7 +53,7 @@
 my_c_includes := $(LOCAL_C_INCLUDES)
 my_generated_sources := $(LOCAL_GENERATED_SOURCES)
 my_native_coverage := $(LOCAL_NATIVE_COVERAGE)
-my_additional_dependencies := $(LOCAL_MODULE_MAKEFILE) $(LOCAL_ADDITIONAL_DEPENDENCIES)
+my_additional_dependencies := $(LOCAL_MODULE_MAKEFILE_DEP) $(LOCAL_ADDITIONAL_DEPENDENCIES)
 my_export_c_include_dirs := $(LOCAL_EXPORT_C_INCLUDE_DIRS)
 
 ifdef LOCAL_IS_HOST_MODULE
@@ -72,7 +75,15 @@
   my_ndk_source_root := $(HISTORICAL_NDK_VERSIONS_ROOT)/current/sources
   my_ndk_sysroot := $(HISTORICAL_NDK_VERSIONS_ROOT)/current/platforms/android-$(LOCAL_SDK_VERSION)/arch-$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
   my_ndk_sysroot_include := $(my_ndk_sysroot)/usr/include
-  ifeq (x86_64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
+
+  # x86_64 and and mips64 are both multilib toolchains, so their libraries are
+  # installed in /usr/lib64. Aarch64, on the other hand, is not a multilib
+  # compiler, so its libraries are in /usr/lib.
+  #
+  # Mips32r6 is yet another variation, with libraries installed in libr6.
+  #
+  # For the rest, the libraries are installed simply to /usr/lib.
+  ifneq (,$(filter x86_64 mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
     my_ndk_sysroot_lib := $(my_ndk_sysroot)/usr/lib64
   else ifeq (mips32r6,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH_VARIANT))
     my_ndk_sysroot_lib := $(my_ndk_sysroot)/usr/libr6
@@ -82,11 +93,16 @@
 
   # The bionic linker now has support for packed relocations and gnu style
   # hashes (which are much faster!), but shipping to older devices requires
-  # the old style hash and disabling packed relocations.
-  #ifeq ($(shell expr $(LOCAL_SDK_VERSION) >= FIRST_SUPPORTED_VERSION),0)
-    my_ldflags += -Wl,--hash-style=sysv
-    LOCAL_PACK_MODULE_RELOCATIONS := false
-  #endif
+  # the old style hash. Fortunately, we can build with both and it'll work
+  # anywhere.
+  #
+  # This is not currently supported on MIPS architectures.
+  ifeq (,$(filter mips mips64,$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
+    my_ldflags += -Wl,--hash-style=both
+  endif
+
+  # We don't want to expose the relocation packer to the NDK just yet.
+  LOCAL_PACK_MODULE_RELOCATIONS := false
 
   # Set up the NDK stl variant. Starting from NDK-r5 the c++ stl resides in a separate location.
   # See ndk/docs/CPLUSPLUS-SUPPORT.html
@@ -103,15 +119,16 @@
   ifeq (,$(LOCAL_NDK_STL_VARIANT))
     LOCAL_NDK_STL_VARIANT := system
   endif
-  ifneq (1,$(words $(filter system stlport_static stlport_shared c++_static c++_shared gnustl_static, $(LOCAL_NDK_STL_VARIANT))))
+  ifneq (1,$(words $(filter none system stlport_static stlport_shared c++_static c++_shared gnustl_static, $(LOCAL_NDK_STL_VARIANT))))
     $(error $(LOCAL_PATH): Unknown LOCAL_NDK_STL_VARIANT $(LOCAL_NDK_STL_VARIANT))
   endif
   ifeq (system,$(LOCAL_NDK_STL_VARIANT))
     my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/system/include
-    # for "system" variant, the shared library exists in the system library and -lstdc++ is added by default.
+    my_system_shared_libraries += libstdc++
   else # LOCAL_NDK_STL_VARIANT is not system
   ifneq (,$(filter stlport_%, $(LOCAL_NDK_STL_VARIANT)))
     my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/stlport/stlport
+    my_system_shared_libraries += libstdc++
     ifeq (stlport_static,$(LOCAL_NDK_STL_VARIANT))
       my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/stlport/libs/$(my_cpu_variant)/libstlport_static.a
     else
@@ -130,11 +147,14 @@
       my_ndk_stl_shared_lib := -lc++_shared
     endif
     my_ndk_stl_cppflags := -std=c++11
-  else
-    # LOCAL_NDK_STL_VARIANT is gnustl_static
+  else # LOCAL_NDK_STL_VARIANT is not c++_* either
+  ifneq (,$(filter gnustl_%, $(LOCAL_NDK_STL_VARIANT)))
     my_ndk_stl_include_path := $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/libs/$(my_cpu_variant)/include \
                                $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/include
     my_ndk_stl_static_lib := $(my_ndk_source_root)/cxx-stl/gnu-libstdc++/$($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_NDK_GCC_VERSION)/libs/$(my_cpu_variant)/libgnustl_static.a
+  else # LOCAL_NDK_STL_VARIANT must be none
+    # Do nothing.
+  endif
   endif
   endif
   endif
@@ -143,7 +163,7 @@
 # MinGW spits out warnings about -fPIC even for -fpie?!) being ignored because
 # all code is position independent, and then those warnings get promoted to
 # errors.
-ifndef USE_MINGW
+ifneq ($($(my_prefix)OS),windows)
 ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
 my_cflags += -fpie
 else
@@ -151,7 +171,21 @@
 endif
 endif
 
+ifdef LOCAL_IS_HOST_MODULE
+my_src_files += $(LOCAL_SRC_FILES_$($(my_prefix)OS)) $(LOCAL_SRC_FILES_$($(my_prefix)OS)_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
+my_static_libraries += $(LOCAL_STATIC_LIBRARIES_$($(my_prefix)OS))
+my_shared_libraries += $(LOCAL_SHARED_LIBRARIES_$($(my_prefix)OS))
+my_cflags += $(LOCAL_CFLAGS_$($(my_prefix)OS))
+my_cppflags += $(LOCAL_CPPFLAGS_$($(my_prefix)OS))
+my_ldflags += $(LOCAL_LDFLAGS_$($(my_prefix)OS))
+my_ldlibs += $(LOCAL_LDLIBS_$($(my_prefix)OS))
+my_asflags += $(LOCAL_ASFLAGS_$($(my_prefix)OS))
+my_c_includes += $(LOCAL_C_INCLUDES_$($(my_prefix)OS))
+my_generated_sources += $(LOCAL_GENERATED_SOURCES_$($(my_prefix)OS))
+endif
+
 my_src_files += $(LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_SRC_FILES_$(my_32_64_bit_suffix))
+my_src_files_exclude += $(LOCAL_SRC_FILES_EXCLUDE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_SRC_FILES_EXCLUDE_$(my_32_64_bit_suffix))
 my_shared_libraries += $(LOCAL_SHARED_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_SHARED_LIBRARIES_$(my_32_64_bit_suffix))
 my_cflags += $(LOCAL_CFLAGS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_CFLAGS_$(my_32_64_bit_suffix))
 my_cppflags += $(LOCAL_CPPFLAGS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_CPPFLAGS_$(my_32_64_bit_suffix))
@@ -160,6 +194,13 @@
 my_c_includes += $(LOCAL_C_INCLUDES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_C_INCLUDES_$(my_32_64_bit_suffix))
 my_generated_sources += $(LOCAL_GENERATED_SOURCES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_GENERATED_SOURCES_$(my_32_64_bit_suffix))
 
+my_missing_exclude_files := $(filter-out $(my_src_files),$(my_src_files_exclude))
+ifneq ($(my_missing_exclude_files),)
+$(warning Files are listed in LOCAL_SRC_FILES_EXCLUDE but not LOCAL_SRC_FILES)
+$(error $(my_missing_exclude_files))
+endif
+my_src_files := $(filter-out $(my_src_files_exclude),$(my_src_files))
+
 my_clang := $(strip $(LOCAL_CLANG))
 ifdef LOCAL_CLANG_$(my_32_64_bit_suffix)
 my_clang := $(strip $(LOCAL_CLANG_$(my_32_64_bit_suffix)))
@@ -171,26 +212,49 @@
 # clang is enabled by default for host builds
 # enable it unless we've specifically disabled clang above
 ifdef LOCAL_IS_HOST_MODULE
-    ifneq ($(HOST_OS),windows)
+    ifneq ($($(my_prefix)OS),windows)
     ifeq ($(my_clang),)
         my_clang := true
     endif
     endif
+# Add option to make gcc the default for device build
+else ifeq ($(USE_CLANG_PLATFORM_BUILD),false)
+    ifeq ($(my_clang),)
+        my_clang := false
+    endif
+else ifeq ($(my_clang),)
+    my_clang := true
+endif
+
+my_cpp_std_version := -std=gnu++14
+
+ifneq ($(my_clang),true)
+    # GCC uses an invalid C++14 ABI (emits calls to
+    # __cxa_throw_bad_array_length, which is not a valid C++ RT ABI).
+    # http://b/25022512
+    my_cpp_std_version := -std=gnu++11
+endif
+
+ifdef LOCAL_SDK_VERSION
+    # The NDK handles this itself.
+    my_cpp_std_version :=
+endif
+
+ifdef LOCAL_IS_HOST_MODULE
+    ifneq ($(my_clang),true)
+        # The host GCC doesn't support C++14 (and is deprecated, so likely
+        # never will). Build these modules with C++11.
+        my_cpp_std_version := -std=gnu++11
+    endif
 endif
 
-# Add option to make clang the default for device build
-ifeq ($(USE_CLANG_PLATFORM_BUILD),true)
-    ifeq ($(my_clang),)
-        my_clang := true
-    endif
-endif
+my_cppflags := $(my_cpp_std_version) $(my_cppflags)
+
 
 # arch-specific static libraries go first so that generic ones can depend on them
 my_static_libraries := $(LOCAL_STATIC_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_STATIC_LIBRARIES_$(my_32_64_bit_suffix)) $(my_static_libraries)
 my_whole_static_libraries := $(LOCAL_WHOLE_STATIC_LIBRARIES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_WHOLE_STATIC_LIBRARIES_$(my_32_64_bit_suffix)) $(my_whole_static_libraries)
 
-my_cflags := $(filter-out $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_UNSUPPORTED_CFLAGS),$(my_cflags))
-
 include $(BUILD_SYSTEM)/cxx_stl_setup.mk
 
 # Add static HAL libraries
@@ -202,12 +266,14 @@
 b_lib :=
 endif
 
-include $(BUILD_SYSTEM)/config_sanitizers.mk
-
-ifeq ($(strip $($(LOCAL_2ND_ARCH_VAR_PREFIX)WITHOUT_$(my_prefix)CLANG)),true)
-  my_clang :=
+ifneq ($(strip $(CUSTOM_$(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)LINKER)),)
+  my_linker := $(CUSTOM_$(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)LINKER)
+else
+  my_linker := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LINKER)
 endif
 
+include $(BUILD_SYSTEM)/config_sanitizers.mk
+
 # Add in libcompiler_rt for all regular device builds
 ifeq (,$(LOCAL_SDK_VERSION)$(WITHOUT_LIBCOMPILER_RT))
   my_static_libraries += $(COMPILER_RT_CONFIG_EXTRA_STATIC_LIBRARIES)
@@ -228,9 +294,10 @@
     my_cflags += $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_FDO_OPTIMIZE_CFLAGS)
     my_fdo_build := true
   endif
-  # Disable ccache (or other compiler wrapper).
-  my_cc_wrapper :=
-  my_cxx_wrapper :=
+  # Disable ccache (or other compiler wrapper) except gomacc, which
+  # can handle -fprofile-use properly.
+  my_cc_wrapper := $(filter $(GOMA_CC),$(my_cc_wrapper))
+  my_cxx_wrapper := $(filter $(GOMA_CC),$(my_cxx_wrapper))
 endif
 
 ###########################################################
@@ -276,17 +343,17 @@
 else # LOCAL_IS_HOST_MODULE
 
 ifeq ($(my_clang),true)
-my_host_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_HOST_GLOBAL_CFLAGS)
-my_host_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_HOST_GLOBAL_CONLYFLAGS)
-my_host_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_HOST_GLOBAL_CPPFLAGS)
-my_host_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_HOST_GLOBAL_LDFLAGS)
-my_host_c_includes := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_C_INCLUDES)
+my_host_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CFLAGS)
+my_host_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CONLYFLAGS)
+my_host_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_CPPFLAGS)
+my_host_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)CLANG_$(my_prefix)GLOBAL_LDFLAGS)
+my_host_c_includes := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
 else
-my_host_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_CFLAGS)
-my_host_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_CONLYFLAGS)
-my_host_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_CPPFLAGS)
-my_host_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_LDFLAGS)
-my_host_c_includes := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_C_INCLUDES)
+my_host_global_cflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CFLAGS)
+my_host_global_conlyflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CONLYFLAGS)
+my_host_global_cppflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_CPPFLAGS)
+my_host_global_ldflags := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)GLOBAL_LDFLAGS)
+my_host_c_includes := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)C_INCLUDES)
 endif # my_clang
 
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_HOST_C_INCLUDES := $(my_host_c_includes)
@@ -315,6 +382,14 @@
     my_native_coverage := false
 endif
 
+ifeq ($(my_clang),true)
+    my_coverage_lib := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBPROFILE_RT)
+else
+    my_coverage_lib := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCOV)
+endif
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_COVERAGE_LIB := $(my_coverage_lib)
+
 ###########################################################
 ## Define PRIVATE_ variables used by multiple module types
 ###########################################################
@@ -329,6 +404,12 @@
   LOCAL_NO_STATIC_ANALYZER := true
 endif
 
+# Clang does not recognize all gcc flags.
+# Use static analyzer only if clang is used.
+ifneq ($(my_clang),true)
+  LOCAL_NO_STATIC_ANALYZER := true
+endif
+
 ifneq ($(strip $(LOCAL_IS_HOST_MODULE)),)
   my_syntax_arch := host
 else
@@ -343,13 +424,16 @@
   endif
   my_cc := $(my_cc_wrapper) $(my_cc)
 endif
+
 ifneq ($(LOCAL_NO_STATIC_ANALYZER),true)
-  my_cc := $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer $(my_syntax_arch) "$(my_cc)"
+  my_cc := CCC_CC=$(CLANG) CLANG=$(CLANG) \
+           $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer
 else
 ifneq ($(LOCAL_NO_SYNTAX_CHECK),true)
-  my_cc := $(SYNTAX_TOOLS_PREFIX)/ccc-syntax $(my_syntax_arch) "$(my_cc)"
+  my_cc := $(my_cc) -fsyntax-only
 endif
 endif
+
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CC := $(my_cc)
 
 ifeq ($(strip $(my_cxx)),)
@@ -360,13 +444,17 @@
   endif
   my_cxx := $(my_cxx_wrapper) $(my_cxx)
 endif
+
 ifneq ($(LOCAL_NO_STATIC_ANALYZER),true)
-  my_cxx := $(SYNTAX_TOOLS_PREFIX)/cxx-analyzer $(my_syntax_arch) "$(my_cxx)"
+  my_cxx := CCC_CXX=$(CLANG_CXX) CLANG_CXX=$(CLANG_CXX) \
+            $(SYNTAX_TOOLS_PREFIX)/c++-analyzer
 else
 ifneq ($(LOCAL_NO_SYNTAX_CHECK),true)
-  my_cxx := $(SYNTAX_TOOLS_PREFIX)/cxx-syntax $(my_syntax_arch) "$(my_cxx)"
+  my_cxx := $(my_cxx) -fsyntax-only
 endif
 endif
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_LINKER := $(my_linker)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CXX := $(my_cxx)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLANG := $(my_clang)
 
@@ -375,7 +463,6 @@
 ifeq ($(LOCAL_CPP_EXTENSION),)
   LOCAL_CPP_EXTENSION := .cpp
 endif
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CPP_EXTENSION := $(LOCAL_CPP_EXTENSION)
 
 # Certain modules like libdl have to have symbols resolved at runtime and blow
 # up if --no-undefined is passed to the linker.
@@ -430,6 +517,34 @@
 endif
 
 ####################################################
+## Keep track of src -> obj mapping
+####################################################
+
+my_tracked_gen_files :=
+my_tracked_src_files :=
+
+###########################################################
+## Stuff source generated from one-off tools
+###########################################################
+$(my_generated_sources): PRIVATE_MODULE := $(my_register_name)
+
+my_gen_sources_copy := $(patsubst $(generated_sources_dir)/%,$(intermediates)/%,$(filter $(generated_sources_dir)/%,$(my_generated_sources)))
+
+$(my_gen_sources_copy): $(intermediates)/% : $(generated_sources_dir)/% | $(ACP)
+	@echo "Copy: $@"
+	$(copy-file-to-target)
+
+my_generated_sources := $(patsubst $(generated_sources_dir)/%,$(intermediates)/%,$(my_generated_sources))
+
+# Generated sources that will actually produce object files.
+# Other files (like headers) are allowed in LOCAL_GENERATED_SOURCES,
+# since other compiled sources may depend on them, and we set up
+# the dependencies.
+my_gen_src_files := $(filter %.c %$(LOCAL_CPP_EXTENSION) %.S %.s,$(my_generated_sources))
+
+ALL_GENERATED_SOURCES += $(my_generated_sources)
+
+####################################################
 ## Compile RenderScript with reflected C++
 ####################################################
 
@@ -441,6 +556,20 @@
 RenderScript_file_stamp := $(intermediates)/RenderScriptCPP.stamp
 renderscript_intermediate := $(intermediates)/renderscript
 
+renderscript_target_api :=
+
+ifneq (,$(LOCAL_RENDERSCRIPT_TARGET_API))
+renderscript_target_api := $(LOCAL_RENDERSCRIPT_TARGET_API)
+else
+ifneq (,$(LOCAL_SDK_VERSION))
+# Set target-api for LOCAL_SDK_VERSIONs other than current.
+ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+renderscript_target_api := $(LOCAL_SDK_VERSION)
+endif
+endif  # LOCAL_SDK_VERSION is set
+endif  # LOCAL_RENDERSCRIPT_TARGET_API is set
+
+
 ifeq ($(LOCAL_RENDERSCRIPT_CC),)
 LOCAL_RENDERSCRIPT_CC := $(LLVM_RS_CC)
 endif
@@ -469,12 +598,13 @@
 $(RenderScript_file_stamp): PRIVATE_RS_FLAGS := $(renderscript_flags)
 $(RenderScript_file_stamp): PRIVATE_RS_SOURCE_FILES := $(renderscript_sources_fullpath)
 $(RenderScript_file_stamp): PRIVATE_RS_OUTPUT_DIR := $(renderscript_intermediate)
+$(RenderScript_file_stamp): PRIVATE_RS_TARGET_API := $(renderscript_target_api)
 $(RenderScript_file_stamp): PRIVATE_DEP_FILES := $(bc_dep_files)
 $(RenderScript_file_stamp): $(renderscript_sources_fullpath) $(LOCAL_RENDERSCRIPT_CC)
 	$(transform-renderscripts-to-cpp-and-bc)
 
 # include the dependency files (.d/.P) generated by llvm-rs-cc.
--include $(bc_dep_files:%.d=%.P)
+$(call include-depfile,$(RenderScript_file_stamp).P,$(RenderScript_file_stamp))
 
 LOCAL_INTERMEDIATE_TARGETS += $(RenderScript_file_stamp)
 
@@ -482,6 +612,8 @@
     $(renderscript_intermediate)/ScriptC_,$(patsubst %.fs,%.cpp, $(patsubst %.rs,%.cpp, \
     $(notdir $(renderscript_sources)))))
 
+$(call track-src-file-gen,$(renderscript_sources),$(rs_generated_cpps))
+
 # This is just a dummy rule to make sure gmake doesn't skip updating the dependents.
 $(rs_generated_cpps) : $(RenderScript_file_stamp)
 	@echo "Updated RS generated cpp file $@."
@@ -494,21 +626,6 @@
 
 
 ###########################################################
-## Stuff source generated from one-off tools
-###########################################################
-$(my_generated_sources): PRIVATE_MODULE := $(my_register_name)
-
-my_gen_sources_copy := $(patsubst $(generated_sources_dir)/%,$(intermediates)/%,$(filter $(generated_sources_dir)/%,$(my_generated_sources)))
-
-$(my_gen_sources_copy): $(intermediates)/% : $(generated_sources_dir)/% | $(ACP)
-	@echo "Copy: $@"
-	$(copy-file-to-target)
-
-my_generated_sources := $(patsubst $(generated_sources_dir)/%,$(intermediates)/%,$(my_generated_sources))
-
-ALL_GENERATED_SOURCES += $(my_generated_sources)
-
-###########################################################
 ## Compile the .proto files to .cc (or .c) and then to .o
 ###########################################################
 proto_sources := $(filter %.proto,$(my_src_files))
@@ -537,6 +654,7 @@
 proto_generated_headers := $(patsubst %.pb$(my_proto_source_suffix),%.pb.h, $(proto_generated_sources))
 proto_generated_objects := $(addprefix $(proto_generated_obj_dir)/, \
     $(patsubst %.proto,%.pb.o,$(proto_sources_fullpath)))
+$(call track-src-file-obj,$(proto_sources),$(proto_generated_objects))
 
 # Ensure the transform-proto-to-cc rule is only defined once in multilib build.
 ifndef $(my_prefix)_$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_proto_defined
@@ -561,7 +679,7 @@
 else
 	$(transform-$(PRIVATE_HOST)cpp-to-o)
 endif
--include $(proto_generated_objects:%.o=%.P)
+$(call include-depfiles-for-objs, $(proto_generated_objects))
 
 my_c_includes += $(my_proto_c_includes)
 # Auto-export the generated proto source dir.
@@ -573,13 +691,13 @@
     my_static_libraries += libprotobuf-c-nano
 else ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),full)
     ifdef LOCAL_SDK_VERSION
-        my_static_libraries += libprotobuf-cpp-full
+        my_static_libraries += libprotobuf-cpp-full-ndk
     else
         my_shared_libraries += libprotobuf-cpp-full
     endif
 else
     ifdef LOCAL_SDK_VERSION
-        my_static_libraries += libprotobuf-cpp-lite
+        my_static_libraries += libprotobuf-cpp-lite-ndk
     else
         my_shared_libraries += libprotobuf-cpp-lite
     endif
@@ -587,79 +705,179 @@
 endif  # $(proto_sources) non-empty
 
 ###########################################################
-## YACC: Compile .y and .yy files to .cpp and the to .o.
+## Compile the .dbus-xml files to c++ headers
+###########################################################
+dbus_definitions := $(filter %.dbus-xml,$(my_src_files))
+dbus_generated_headers :=
+ifneq ($(dbus_definitions),)
+
+dbus_definition_paths := $(addprefix $(LOCAL_PATH)/,$(dbus_definitions))
+dbus_service_config := $(filter %dbus-service-config.json,$(my_src_files))
+dbus_service_config_path := $(addprefix $(LOCAL_PATH)/,$(dbus_service_config))
+
+# Mark these source files as not producing objects
+$(call track-src-file-obj,$(dbus_definitions) $(dbus_service_config),)
+
+dbus_gen_dir := $(generated_sources_dir)/dbus_bindings
+
+ifdef LOCAL_DBUS_PROXY_PREFIX
+dbus_header_dir := $(dbus_gen_dir)/include/$(LOCAL_DBUS_PROXY_PREFIX)
+dbus_headers := dbus-proxies.h
+else
+dbus_header_dir := $(dbus_gen_dir)
+dbus_headers := $(patsubst %.dbus-xml,%.h,$(dbus_definitions))
+endif
+dbus_generated_headers := $(addprefix $(dbus_header_dir)/,$(dbus_headers))
+
+# Ensure that we only define build rules once in multilib builds.
+ifndef $(my_prefix)_$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_dbus_bindings_defined
+$(my_prefix)_$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_dbus_bindings_defined := true
+
+$(dbus_generated_headers): PRIVATE_MODULE := $(LOCAL_MODULE)
+$(dbus_generated_headers): PRIVATE_DBUS_SERVICE_CONFIG := $(dbus_service_config_path)
+$(dbus_generated_headers) : $(dbus_service_config_path) $(DBUS_GENERATOR)
+ifdef LOCAL_DBUS_PROXY_PREFIX
+$(dbus_generated_headers) : $(dbus_definition_paths)
+	$(generate-dbus-proxies)
+else
+$(dbus_generated_headers) : $(dbus_header_dir)/%.h : $(LOCAL_PATH)/%.dbus-xml
+	$(generate-dbus-adaptors)
+endif  # $(LOCAL_DBUS_PROXY_PREFIX)
+endif  # $(my_prefix)_$(LOCAL_MODULE_CLASS)_$(LOCAL_MODULE)_dbus_bindings_defined
+
+ifdef LOCAL_DBUS_PROXY_PREFIX
+# Auto-export the generated dbus proxy directory.
+my_export_c_include_dirs += $(dbus_gen_dir)/include
+my_c_includes += $(dbus_gen_dir)/include
+else
+my_export_c_include_dirs += $(dbus_header_dir)
+my_c_includes += $(dbus_header_dir)
+endif  # $(LOCAL_DBUS_PROXY_PREFIX)
+
+my_generated_sources += $(dbus_generated_headers)
+
+endif  # $(dbus_definitions) non-empty
+
+
+###########################################################
+## AIDL: Compile .aidl files to .cpp and .h files
+###########################################################
+aidl_src := $(strip $(filter %.aidl,$(my_src_files)))
+aidl_gen_cpp :=
+ifneq ($(aidl_src),)
+
+# Use the intermediates directory to avoid writing our own .cpp -> .o rules.
+aidl_gen_cpp_root := $(intermediates)/aidl-generated/src
+aidl_gen_include_root := $(intermediates)/aidl-generated/include
+
+# Multi-architecture builds have distinct intermediates directories.
+# Thus we'll actually generate source for each architecture.
+$(foreach s,$(aidl_src),\
+    $(eval $(call define-aidl-cpp-rule,$(s),$(aidl_gen_cpp_root),aidl_gen_cpp)))
+$(foreach cpp,$(aidl_gen_cpp), \
+    $(call include-depfile,$(addsuffix .aidl.P,$(basename $(cpp))),$(cpp)))
+$(call track-src-file-gen,$(aidl_src),$(aidl_gen_cpp))
+
+$(aidl_gen_cpp) : PRIVATE_MODULE := $(LOCAL_MODULE)
+$(aidl_gen_cpp) : PRIVATE_HEADER_OUTPUT_DIR := $(aidl_gen_include_root)
+$(aidl_gen_cpp) : PRIVATE_AIDL_FLAGS := $(addprefix -I,$(LOCAL_AIDL_INCLUDES))
+
+# Add generated headers to include paths.
+my_c_includes += $(aidl_gen_include_root)
+my_export_c_include_dirs += $(aidl_gen_include_root)
+# Pick up the generated C++ files later for transformation to .o files.
+my_generated_sources += $(aidl_gen_cpp)
+
+endif  # $(aidl_src) non-empty
+
+###########################################################
+## Compile the .vts files to .cc (or .c) and then to .o
+###########################################################
+
+vts_src := $(strip $(filter %.vts,$(my_src_files)))
+vts_gen_cpp :=
+ifneq ($(vts_src),)
+
+# Use the intermediates directory to avoid writing our own .cpp -> .o rules.
+vts_gen_cpp_root := $(intermediates)/vts-generated/src
+vts_gen_include_root := $(intermediates)/vts-generated/include
+
+# Multi-architecture builds have distinct intermediates directories.
+# Thus we'll actually generate source for each architecture.
+$(foreach s,$(vts_src),\
+    $(eval $(call define-vts-cpp-rule,$(s),$(vts_gen_cpp_root),vts_gen_cpp)))
+$(foreach cpp,$(vts_gen_cpp), \
+    $(call include-depfile,$(addsuffix .vts.P,$(basename $(cpp))),$(cpp)))
+$(call track-src-file-gen,$(vts_src),$(vts_gen_cpp))
+
+$(vts_gen_cpp) : PRIVATE_MODULE := $(LOCAL_MODULE)
+$(vts_gen_cpp) : PRIVATE_HEADER_OUTPUT_DIR := $(vts_gen_include_root)
+$(vts_gen_cpp) : PRIVATE_VTS_FLAGS := $(addprefix -I,$(LOCAL_VTS_INCLUDES))
+
+# Add generated headers to include paths.
+my_c_includes += $(vts_gen_include_root)
+my_export_c_include_dirs += $(vts_gen_include_root)
+# Pick up the generated C++ files later for transformation to .o files.
+my_generated_sources += $(vts_gen_cpp)
+
+endif  # $(vts_src) non-empty
+
+###########################################################
+## YACC: Compile .y/.yy files to .c/.cpp and then to .o.
 ###########################################################
 
 y_yacc_sources := $(filter %.y,$(my_src_files))
-y_yacc_cpps := $(addprefix \
-    $(intermediates)/,$(y_yacc_sources:.y=$(LOCAL_CPP_EXTENSION)))
+y_yacc_cs := $(addprefix \
+    $(intermediates)/,$(y_yacc_sources:.y=.c))
+ifneq ($(y_yacc_cs),)
+$(y_yacc_cs): $(intermediates)/%.c: \
+    $(TOPDIR)$(LOCAL_PATH)/%.y \
+    $(my_additional_dependencies)
+	$(call transform-y-to-c-or-cpp)
+$(call track-src-file-gen,$(y_yacc_sources),$(y_yacc_cs))
+
+my_generated_sources += $(y_yacc_cs)
+endif
 
 yy_yacc_sources := $(filter %.yy,$(my_src_files))
 yy_yacc_cpps := $(addprefix \
     $(intermediates)/,$(yy_yacc_sources:.yy=$(LOCAL_CPP_EXTENSION)))
-
-yacc_cpps := $(y_yacc_cpps) $(yy_yacc_cpps)
-yacc_headers := $(yacc_cpps:$(LOCAL_CPP_EXTENSION)=.h)
-yacc_objects := $(yacc_cpps:$(LOCAL_CPP_EXTENSION)=.o)
-
-ifneq ($(strip $(y_yacc_cpps)),)
-$(y_yacc_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
-    $(TOPDIR)$(LOCAL_PATH)/%.y \
-    $(lex_cpps) $(my_additional_dependencies)
-	$(call transform-y-to-cpp,$(PRIVATE_CPP_EXTENSION))
-$(yacc_headers): $(intermediates)/%.h: $(intermediates)/%$(LOCAL_CPP_EXTENSION)
-endif
-
-ifneq ($(strip $(yy_yacc_cpps)),)
+ifneq ($(yy_yacc_cpps),)
 $(yy_yacc_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
     $(TOPDIR)$(LOCAL_PATH)/%.yy \
-    $(lex_cpps) $(my_additional_dependencies)
-	$(call transform-y-to-cpp,$(PRIVATE_CPP_EXTENSION))
-$(yacc_headers): $(intermediates)/%.h: $(intermediates)/%$(LOCAL_CPP_EXTENSION)
-endif
+    $(my_additional_dependencies)
+	$(call transform-y-to-c-or-cpp)
+$(call track-src-file-gen,$(yy_yacc_sources),$(yy_yacc_cpps))
 
-ifneq ($(strip $(yacc_cpps)),)
-$(yacc_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
-$(yacc_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
-$(yacc_objects): $(intermediates)/%.o: $(intermediates)/%$(LOCAL_CPP_EXTENSION)
-	$(transform-$(PRIVATE_HOST)cpp-to-o)
+my_generated_sources += $(yy_yacc_cpps)
 endif
 
 ###########################################################
-## LEX: Compile .l and .ll files to .cpp and then to .o.
+## LEX: Compile .l/.ll files to .c/.cpp and then to .o.
 ###########################################################
 
 l_lex_sources := $(filter %.l,$(my_src_files))
-l_lex_cpps := $(addprefix \
-    $(intermediates)/,$(l_lex_sources:.l=$(LOCAL_CPP_EXTENSION)))
+l_lex_cs := $(addprefix \
+    $(intermediates)/,$(l_lex_sources:.l=.c))
+ifneq ($(l_lex_cs),)
+$(l_lex_cs): $(intermediates)/%.c: \
+    $(TOPDIR)$(LOCAL_PATH)/%.l
+	$(transform-l-to-c-or-cpp)
+$(call track-src-file-gen,$(l_lex_sources),$(l_lex_cs))
+
+my_generated_sources += $(l_lex_cs)
+endif
 
 ll_lex_sources := $(filter %.ll,$(my_src_files))
 ll_lex_cpps := $(addprefix \
     $(intermediates)/,$(ll_lex_sources:.ll=$(LOCAL_CPP_EXTENSION)))
-
-lex_cpps := $(l_lex_cpps) $(ll_lex_cpps)
-lex_objects := $(lex_cpps:$(LOCAL_CPP_EXTENSION)=.o)
-
-ifneq ($(strip $(l_lex_cpps)),)
-$(l_lex_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
-    $(TOPDIR)$(LOCAL_PATH)/%.l
-	$(transform-l-to-cpp)
-endif
-
-ifneq ($(strip $(ll_lex_cpps)),)
+ifneq ($(ll_lex_cpps),)
 $(ll_lex_cpps): $(intermediates)/%$(LOCAL_CPP_EXTENSION): \
     $(TOPDIR)$(LOCAL_PATH)/%.ll
-	$(transform-l-to-cpp)
-endif
+	$(transform-l-to-c-or-cpp)
+$(call track-src-file-gen,$(ll_lex_sources),$(ll_lex_cpps))
 
-ifneq ($(strip $(lex_cpps)),)
-$(lex_objects): PRIVATE_ARM_MODE := $(normal_objects_mode)
-$(lex_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
-$(lex_objects): $(intermediates)/%.o: \
-    $(intermediates)/%$(LOCAL_CPP_EXTENSION) \
-    $(my_additional_dependencies) \
-    $(yacc_headers)
-	$(transform-$(PRIVATE_HOST)cpp-to-o)
+my_generated_sources += $(ll_lex_cpps)
 endif
 
 ###########################################################
@@ -672,6 +890,7 @@
 dotdot_arm_sources := $(filter ../%,$(cpp_arm_sources))
 cpp_arm_sources := $(filter-out ../%,$(cpp_arm_sources))
 cpp_arm_objects := $(addprefix $(intermediates)/,$(cpp_arm_sources:$(LOCAL_CPP_EXTENSION)=.o))
+$(call track-src-file-obj,$(patsubst %,%.arm,$(cpp_arm_sources)),$(cpp_arm_objects))
 
 # For source files starting with ../, we remove all the ../ in the object file path,
 # to avoid object file escaping the intermediate directory.
@@ -680,6 +899,7 @@
   $(eval $(call compile-dotdot-cpp-file,$(s),\
   $(yacc_cpps) $(proto_generated_headers) $(my_additional_dependencies),\
   dotdot_arm_objects)))
+$(call track-src-file-obj,$(patsubst %,%.arm,$(dotdot_arm_sources)),$(dotdot_arm_objects))
 
 dotdot_sources := $(filter ../%$(LOCAL_CPP_EXTENSION),$(my_src_files))
 dotdot_objects :=
@@ -687,9 +907,11 @@
   $(eval $(call compile-dotdot-cpp-file,$(s),\
     $(yacc_cpps) $(proto_generated_headers) $(my_additional_dependencies),\
     dotdot_objects)))
+$(call track-src-file-obj,$(dotdot_sources),$(dotdot_objects))
 
 cpp_normal_sources := $(filter-out ../%,$(filter %$(LOCAL_CPP_EXTENSION),$(my_src_files)))
 cpp_normal_objects := $(addprefix $(intermediates)/,$(cpp_normal_sources:$(LOCAL_CPP_EXTENSION)=.o))
+$(call track-src-file-obj,$(cpp_normal_sources),$(cpp_normal_objects))
 
 $(dotdot_arm_objects) $(cpp_arm_objects): PRIVATE_ARM_MODE := $(arm_objects_mode)
 $(dotdot_arm_objects) $(cpp_arm_objects): PRIVATE_ARM_CFLAGS := $(arm_objects_cflags)
@@ -704,7 +926,7 @@
     $(yacc_cpps) $(proto_generated_headers) \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)cpp-to-o)
--include $(cpp_objects:%.o=%.P)
+$(call include-depfiles-for-objs, $(cpp_objects))
 endif
 
 cpp_objects += $(dotdot_arm_objects) $(dotdot_objects)
@@ -715,6 +937,7 @@
 
 gen_cpp_sources := $(filter %$(LOCAL_CPP_EXTENSION),$(my_generated_sources))
 gen_cpp_objects := $(gen_cpp_sources:%$(LOCAL_CPP_EXTENSION)=%.o)
+$(call track-gen-file-obj,$(gen_cpp_sources),$(gen_cpp_objects))
 
 ifneq ($(strip $(gen_cpp_objects)),)
 # Compile all generated files as thumb.
@@ -726,7 +949,7 @@
     $(proto_generated_headers) \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)cpp-to-o)
--include $(gen_cpp_objects:%.o=%.P)
+$(call include-depfiles-for-objs, $(gen_cpp_objects))
 endif
 
 ###########################################################
@@ -735,25 +958,27 @@
 
 gen_S_sources := $(filter %.S,$(my_generated_sources))
 gen_S_objects := $(gen_S_sources:%.S=%.o)
+$(call track-gen-file-obj,$(gen_S_sources),$(gen_S_objects))
 
 ifneq ($(strip $(gen_S_sources)),)
 $(gen_S_objects): $(intermediates)/%.o: $(intermediates)/%.S \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)s-to-o)
--include $(gen_S_objects:%.o=%.P)
+$(call include-depfiles-for-objs, $(gen_S_objects))
 endif
 
 gen_s_sources := $(filter %.s,$(my_generated_sources))
 gen_s_objects := $(gen_s_sources:%.s=%.o)
+$(call track-gen-file-obj,$(gen_s_sources),$(gen_s_objects))
 
 ifneq ($(strip $(gen_s_objects)),)
 $(gen_s_objects): $(intermediates)/%.o: $(intermediates)/%.s \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)s-to-o-no-deps)
--include $(gen_s_objects:%.o=%.P)
 endif
 
 gen_asm_objects := $(gen_S_objects) $(gen_s_objects)
+$(gen_asm_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
 
 ###########################################################
 ## o: Include generated .o files in output.
@@ -769,6 +994,7 @@
 dotdot_arm_sources := $(filter ../%,$(c_arm_sources))
 c_arm_sources := $(filter-out ../%,$(c_arm_sources))
 c_arm_objects := $(addprefix $(intermediates)/,$(c_arm_sources:.c=.o))
+$(call track-src-file-obj,$(patsubst %,%.arm,$(c_arm_sources)),$(c_arm_objects))
 
 # For source files starting with ../, we remove all the ../ in the object file path,
 # to avoid object file escaping the intermediate directory.
@@ -777,6 +1003,7 @@
   $(eval $(call compile-dotdot-c-file,$(s),\
     $(yacc_cpps) $(proto_generated_headers) $(my_additional_dependencies),\
     dotdot_arm_objects)))
+$(call track-src-file-obj,$(patsubst %,%.arm,$(dotdot_arm_sources)),$(dotdot_arm_objects))
 
 dotdot_sources := $(filter ../%.c, $(my_src_files))
 dotdot_objects :=
@@ -784,9 +1011,11 @@
   $(eval $(call compile-dotdot-c-file,$(s),\
     $(yacc_cpps) $(proto_generated_headers) $(my_additional_dependencies),\
     dotdot_objects)))
+$(call track-src-file-obj,$(dotdot_sources),$(dotdot_objects))
 
 c_normal_sources := $(filter-out ../%,$(filter %.c,$(my_src_files)))
 c_normal_objects := $(addprefix $(intermediates)/,$(c_normal_sources:.c=.o))
+$(call track-src-file-obj,$(c_normal_sources),$(c_normal_objects))
 
 $(dotdot_arm_objects) $(c_arm_objects): PRIVATE_ARM_MODE := $(arm_objects_mode)
 $(dotdot_arm_objects) $(c_arm_objects): PRIVATE_ARM_CFLAGS := $(arm_objects_cflags)
@@ -799,7 +1028,7 @@
 $(c_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.c $(yacc_cpps) $(proto_generated_headers) \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)c-to-o)
--include $(c_objects:%.o=%.P)
+$(call include-depfiles-for-objs, $(c_objects))
 endif
 
 c_objects += $(dotdot_arm_objects) $(dotdot_objects)
@@ -810,6 +1039,7 @@
 
 gen_c_sources := $(filter %.c,$(my_generated_sources))
 gen_c_objects := $(gen_c_sources:%.c=%.o)
+$(call track-gen-file-obj,$(gen_c_sources),$(gen_c_objects))
 
 ifneq ($(strip $(gen_c_objects)),)
 # Compile all generated files as thumb.
@@ -819,7 +1049,7 @@
 $(gen_c_objects): $(intermediates)/%.o: $(intermediates)/%.c $(yacc_cpps) $(proto_generated_headers) \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)c-to-o)
--include $(gen_c_objects:%.o=%.P)
+$(call include-depfiles-for-objs, $(gen_c_objects))
 endif
 
 ###########################################################
@@ -828,12 +1058,28 @@
 
 objc_sources := $(filter %.m,$(my_src_files))
 objc_objects := $(addprefix $(intermediates)/,$(objc_sources:.m=.o))
+$(call track-src-file-obj,$(objc_sources),$(objc_objects))
 
 ifneq ($(strip $(objc_objects)),)
 $(objc_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.m $(yacc_cpps) $(proto_generated_headers) \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)m-to-o)
--include $(objc_objects:%.o=%.P)
+$(call include-depfiles-for-objs, $(objc_objects))
+endif
+
+###########################################################
+## ObjC++: Compile .mm files to .o
+###########################################################
+
+objcpp_sources := $(filter %.mm,$(my_src_files))
+objcpp_objects := $(addprefix $(intermediates)/,$(objcpp_sources:.mm=.o))
+$(call track-src-file-obj,$(objcpp_sources),$(objcpp_objects))
+
+ifneq ($(strip $(objcpp_objects)),)
+$(objcpp_objects): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.mm $(yacc_cpps) $(proto_generated_headers) \
+    $(my_additional_dependencies)
+	$(transform-$(PRIVATE_HOST)mm-to-o)
+$(call include-depfiles-for-objs, $(objcpp_objects))
 endif
 
 ###########################################################
@@ -844,30 +1090,34 @@
 dotdot_sources := $(filter ../%,$(asm_sources_S))
 asm_sources_S := $(filter-out ../%,$(asm_sources_S))
 asm_objects_S := $(addprefix $(intermediates)/,$(asm_sources_S:.S=.o))
+$(call track-src-file-obj,$(asm_sources_S),$(asm_objects_S))
 
 dotdot_objects_S :=
 $(foreach s,$(dotdot_sources),\
   $(eval $(call compile-dotdot-s-file,$(s),\
     $(my_additional_dependencies),\
     dotdot_objects_S)))
+$(call track-src-file-obj,$(dotdot_sources),$(dotdot_objects_S))
 
 ifneq ($(strip $(asm_objects_S)),)
 $(asm_objects_S): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.S \
     $(my_additional_dependencies)
 	$(transform-$(PRIVATE_HOST)s-to-o)
--include $(asm_objects_S:%.o=%.P)
+$(call include-depfiles-for-objs, $(asm_objects_S))
 endif
 
 asm_sources_s := $(filter %.s,$(my_src_files))
 dotdot_sources := $(filter ../%,$(asm_sources_s))
 asm_sources_s := $(filter-out ../%,$(asm_sources_s))
 asm_objects_s := $(addprefix $(intermediates)/,$(asm_sources_s:.s=.o))
+$(call track-src-file-obj,$(asm_sources_s),$(asm_objects_s))
 
 dotdot_objects_s :=
 $(foreach s,$(dotdot_sources),\
   $(eval $(call compile-dotdot-s-file-no-deps,$(s),\
     $(my_additional_dependencies),\
     dotdot_objects_s)))
+$(call track-src-file-obj,$(dotdot_sources),$(dotdot_objects_s))
 
 ifneq ($(strip $(asm_objects_s)),)
 $(asm_objects_s): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.s \
@@ -876,6 +1126,7 @@
 endif
 
 asm_objects := $(dotdot_objects_S) $(dotdot_objects_s) $(asm_objects_S) $(asm_objects_s)
+$(asm_objects): PRIVATE_ARM_CFLAGS := $(normal_objects_cflags)
 
 
 # .asm for x86/x86_64 needs to be compiled with yasm.
@@ -885,6 +1136,7 @@
 $(asm_objects_asm): $(intermediates)/%.o: $(TOPDIR)$(LOCAL_PATH)/%.asm \
     $(my_additional_dependencies)
 	$(transform-asm-to-o)
+$(call track-src-file-obj,$(asm_sources_asm),$(asm_objects_asm))
 
 asm_objects += $(asm_objects_asm)
 endif
@@ -920,11 +1172,11 @@
 import_includes := $(intermediates)/import_includes
 import_includes_deps := $(strip \
     $(foreach l, $(installed_shared_library_module_names), \
-      $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX))/export_includes) \
+      $(call intermediates-dir-for,SHARED_LIBRARIES,$(l),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/export_includes) \
     $(foreach l, $(my_static_libraries) $(my_whole_static_libraries), \
-      $(call intermediates-dir-for,STATIC_LIBRARIES,$(l),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX))/export_includes))
+      $(call intermediates-dir-for,STATIC_LIBRARIES,$(l),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/export_includes))
 $(import_includes): PRIVATE_IMPORT_EXPORT_INCLUDES := $(import_includes_deps)
-$(import_includes) : $(LOCAL_MODULE_MAKEFILE) $(import_includes_deps)
+$(import_includes) : $(LOCAL_MODULE_MAKEFILE_DEP) $(import_includes_deps)
 	@echo Import includes file: $@
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 ifdef import_includes_deps
@@ -939,6 +1191,11 @@
 ## Common object handling.
 ###########################################################
 
+my_unused_src_files := $(filter-out $(logtags_sources) $(my_tracked_src_files),$(my_src_files) $(my_gen_src_files))
+ifneq ($(my_unused_src_files),)
+  $(warning $(LOCAL_MODULE_MAKEFILE): $(LOCAL_MODULE): Unused source files: $(my_unused_src_files))
+endif
+
 # some rules depend on asm_objects being first.  If your code depends on
 # being first, it's reasonable to require it to be assembly
 normal_objects := \
@@ -949,13 +1206,32 @@
     $(c_objects) \
     $(gen_c_objects) \
     $(objc_objects) \
-    $(yacc_objects) \
-    $(lex_objects) \
-    $(proto_generated_objects) \
-    $(addprefix $(TOPDIR)$(LOCAL_PATH)/,$(LOCAL_PREBUILT_OBJ_FILES))
+    $(objcpp_objects) \
+    $(proto_generated_objects)
+
+new_order_normal_objects := $(foreach f,$(my_src_files),$(my_src_file_obj_$(f)))
+new_order_normal_objects += $(foreach f,$(my_gen_src_files),$(my_src_file_obj_$(f)))
+
+ifneq ($(sort $(normal_objects)),$(sort $(new_order_normal_objects)))
+$(warning $(LOCAL_MODULE_MAKEFILE) Internal build system warning: New object list does not match old)
+$(info Only in old: $(filter-out $(new_order_normal_objects),$(sort $(normal_objects))))
+$(info Only in new: $(filter-out $(normal_objects),$(sort $(new_order_normal_objects))))
+endif
+
+ifeq ($(BINARY_OBJECTS_ORDER),soong)
+normal_objects := $(new_order_normal_objects)
+endif
+
+normal_objects += $(addprefix $(TOPDIR)$(LOCAL_PATH)/,$(LOCAL_PREBUILT_OBJ_FILES))
 
 all_objects := $(normal_objects) $(gen_o_objects)
 
+# Cleanup file tracking
+$(foreach f,$(my_tracked_gen_files),$(eval my_src_file_gen_$(s):=))
+my_tracked_gen_files :=
+$(foreach f,$(my_tracked_src_files),$(eval my_src_file_obj_$(s):=))
+my_tracked_src_files :=
+
 my_c_includes += $(TOPDIR)$(LOCAL_PATH) $(intermediates) $(generated_sources_dir)
 
 ifndef LOCAL_SDK_VERSION
@@ -967,7 +1243,11 @@
 # that custom build rules which generate .o files don't consume other generated
 # sources as input (or if they do they take care of that dependency themselves).
 $(normal_objects) : | $(my_generated_sources)
+ifeq ($(BUILDING_WITH_NINJA),true)
+$(all_objects) : $(import_includes)
+else
 $(all_objects) : | $(import_includes)
+endif
 ALL_C_CPP_ETC_OBJECTS += $(all_objects)
 
 
@@ -1009,6 +1289,7 @@
     $(addprefix $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)/, \
       $(addsuffix $(so_suffix), \
         $(my_shared_libraries)))
+built_shared_library_deps := $(addsuffix .toc, $(built_shared_libraries))
 
 # Add the NDK libraries to the built module dependency
 my_system_shared_libraries_fullpath := \
@@ -1022,12 +1303,20 @@
     $(addprefix $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)/, \
       $(addsuffix $(so_suffix), \
         $(installed_shared_library_module_names)))
+ifdef LOCAL_IS_HOST_MODULE
+# Disable .toc optimization for host modules: we may run the host binaries during the build process
+# and the libraries' implementation matters.
+built_shared_library_deps := $(built_shared_libraries)
+else
+built_shared_library_deps := $(addsuffix .toc, $(built_shared_libraries))
+endif
+my_system_shared_libraries_fullpath :=
 endif
 
 built_static_libraries := \
     $(foreach lib,$(my_static_libraries), \
       $(call intermediates-dir-for, \
-        STATIC_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX))/$(lib)$(a_suffix))
+        STATIC_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/$(lib)$(a_suffix))
 
 ifdef LOCAL_SDK_VERSION
 built_static_libraries += $(my_ndk_stl_static_lib)
@@ -1036,7 +1325,7 @@
 built_whole_libraries := \
     $(foreach lib,$(my_whole_static_libraries), \
       $(call intermediates-dir-for, \
-        STATIC_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX))/$(lib)$(a_suffix))
+        STATIC_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),,$(LOCAL_2ND_ARCH_VAR_PREFIX),$(my_host_cross))/$(lib)$(a_suffix))
 
 # We don't care about installed static libraries, since the
 # libraries have already been linked into the module at that point.
@@ -1060,6 +1349,8 @@
 my_cflags += $(LOCAL_CLANG_CFLAGS)
 my_conlyflags += $(LOCAL_CLANG_CONLYFLAGS)
 my_cppflags += $(LOCAL_CLANG_CPPFLAGS)
+my_cflags_no_override += $(GLOBAL_CLANG_CFLAGS_NO_OVERRIDE)
+my_cppflags_no_override += $(GLOBAL_CLANG_CPPFLAGS_NO_OVERRIDE)
 my_asflags += $(LOCAL_CLANG_ASFLAGS)
 my_ldflags += $(LOCAL_CLANG_LDFLAGS)
 my_cflags += $(LOCAL_CLANG_CFLAGS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) $(LOCAL_CLANG_CFLAGS_$(my_32_64_bit_suffix))
@@ -1079,18 +1370,28 @@
   my_cflags := $(filter-out $(fdo_incompatible_flags),$(my_cflags))
 endif
 
+# No one should ever use this flag. On GCC it's mere presence will disable all
+# warnings, even those that are specified after it (contrary to typical warning
+# flag behavior). This circumvents CFLAGS_NO_OVERRIDE from forcibly enabling the
+# warnings that are *always* bugs.
+my_illegal_flags := -w
+my_cflags := $(filter-out $(my_illegal_flags),$(my_cflags))
+my_cppflags := $(filter-out $(my_illegal_flags),$(my_cppflags))
+my_conlyflags := $(filter-out $(my_illegal_flags),$(my_conlyflags))
+
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_YACCFLAGS := $(LOCAL_YACCFLAGS)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASFLAGS := $(my_asflags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CONLYFLAGS := $(my_conlyflags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CFLAGS := $(my_cflags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CPPFLAGS := $(my_cppflags)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CFLAGS_NO_OVERRIDE := $(my_cflags_no_override)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CPPFLAGS_NO_OVERRIDE := $(my_cppflags_no_override)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RTTI_FLAG := $(LOCAL_RTTI_FLAG)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEBUG_CFLAGS := $(debug_cflags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_C_INCLUDES := $(my_c_includes)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_IMPORT_INCLUDES := $(import_includes)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_LDFLAGS := $(my_ldflags)
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_LDLIBS := $(my_ldlibs)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_NO_CRT := $(strip $(LOCAL_NO_CRT) $(LOCAL_NO_CRT_$(TARGET_$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)))
 
 # this is really the way to get the files onto the command line instead
 # of using $^, because then LOCAL_ADDITIONAL_DEPENDENCIES doesn't work
@@ -1104,7 +1405,8 @@
 ###########################################################
 # all_libraries is used for the dependencies on LOCAL_BUILT_MODULE.
 all_libraries := \
-    $(built_shared_libraries) \
+    $(built_shared_library_deps) \
+    $(my_system_shared_libraries_fullpath) \
     $(built_static_libraries) \
     $(built_whole_libraries)
 
@@ -1119,16 +1421,31 @@
 export_includes := $(intermediates)/export_includes
 $(export_includes): PRIVATE_EXPORT_C_INCLUDE_DIRS := $(my_export_c_include_dirs)
 # Make sure .pb.h are already generated before any dependent source files get compiled.
-$(export_includes) : $(LOCAL_MODULE_MAKEFILE) $(proto_generated_headers)
+# Similarly, the generated DBus headers need to exist before we export their location.
+# People are not going to consume the aidl generated cpp file, but the cpp file is
+# generated after the headers, so this is a convenient way to ensure the headers exist.
+$(export_includes) : $(LOCAL_MODULE_MAKEFILE_DEP) $(proto_generated_headers) $(dbus_generated_headers) $(aidl_gen_cpp) $(vts_gen_cpp)
 	@echo Export includes file: $< -- $@
-	$(hide) mkdir -p $(dir $@) && rm -f $@
+	$(hide) mkdir -p $(dir $@) && rm -f $@.tmp
 ifdef my_export_c_include_dirs
 	$(hide) for d in $(PRIVATE_EXPORT_C_INCLUDE_DIRS); do \
-	        echo "-I $$d" >> $@; \
+	        echo "-I $$d" >> $@.tmp; \
 	        done
 else
-	$(hide) touch $@
+	$(hide) touch $@.tmp
 endif
+ifeq ($(BUILDING_WITH_NINJA),true)
+	$(hide) if cmp -s $@.tmp $@ ; then \
+	  rm $@.tmp ; \
+	else \
+	  mv $@.tmp $@ ; \
+	fi
+else
+	mv $@.tmp $@ ;
+endif
+
+# Kati adds restat=1 to ninja. GNU make does nothing for this.
+.KATI_RESTAT: $(export_includes)
 
 # Make sure export_includes gets generated when you are running mm/mmm
 $(LOCAL_BUILT_MODULE) : | $(export_includes)
diff --git a/core/build-system.html b/core/build-system.html
index caade58..bddde6a 100644
--- a/core/build-system.html
+++ b/core/build-system.html
@@ -270,6 +270,7 @@
         <li>Installs non-APK modules that have no tags specified.
         <li>Installs APKs according to the product definition files; tags
             are ignored for APK modules.
+        <li><code>ro.adb.secure=1</code>
         <li><code>ro.secure=1</code>
         <li><code>ro.debuggable=0</code>
         <li><code>adb</code> is disabled by default.
@@ -462,26 +463,17 @@
 <p>Sometimes you need to set flags specifically for different platforms.  Here
 is a list of which values the different build-system defined variables will be
 set to and some examples.</p>
-<p>For a device build, <code>TARGET_OS</code> is <code>linux</code> (we're using
-linux!), and <code>TARGET_ARCH</code> is <code>arm</code>.</p>
-<p>For a simulator build, <code>TARGET_OS</code> and <code>TARGET_ARCH</code>
-are set to the same as <code>HOST_OS</code> and <code>HOST_ARCH</code> are
-on your platform.  <code>TARGET_PRODUCT</code> is the name of the target
-hardware/product you are building for.  The value <code>sim</code> is used
-for the simulator.  We haven't thought through the full extent of customization
-that will happen here, but likely there will be additional UI configurations
-specified here as well.</p>
 <table cellspacing=25>
 <tr>
     <td valign=top align=center>
         <b>HOST_OS</b><br/>
         linux<br/>
-        darwin<br/>
-        (cygwin)
+        darwin
     </td>
     <td valign=top align=center>
         <b>HOST_ARCH</b><br/>
-        x86
+        x86<br/>
+        x86_64
     </td>
     <td valign=top align=center>
         <b>HOST_BUILD_TYPE</b><br/>
@@ -491,42 +483,40 @@
 </tr>
 <tr>
     <td valign=top align=center>
-        <b>TARGET_OS</b><br/>
-        linux<br/>
-        darwin<br/>
-        (cygwin)
-    </td>
-    <td valign=top align=center>
         <b>TARGET_ARCH</b><br/>
         arm<br/>
-        x86
+        arm64<br/>
+        mips<br/>
+        mips64<br/>
+        x86<br/>
+        x86_64
     </td>
     <td valign=top align=center>
         <b>TARGET_BUILD_TYPE</b><br/>
         release<br/>
         debug
     </td>
-    <td valign=top align=center>
-        <b>TARGET_PRODUCT</b><br/>
-        sim<br/>
-        dream<br/>
-        sooner
-    </td>
 </tr>
 </table>
 
+<p>There are also special variables to use instead of conditionals. Many of the
+normal variables (LOCAL_SRC_FILES, LOCAL_CFLAGS, etc) can be conditionally added
+to with _{arch} _{32|64}, and for the host, _{os}.</p>
+
 <h4>Some Examples</h4>
 <pre>ifeq ($(TARGET_BUILD_TYPE),release)
 LOCAL_CFLAGS += -DNDEBUG=1
 endif
 
+LOCAL_CFLAGS_arm += -DTARGET_IS_ARM
+
+LOCAL_CFLAGS_64 += -DBIG_POINTER
+
 # from libutils
-ifeq ($(TARGET_OS),linux)
 # Use the futex based mutex and condition variable
 # implementation from android-arm because it's shared mem safe
-LOCAL_SRC_FILES += futex_synchro.c
-LOCAL_LDLIBS += -lrt -ldl
-endif
+LOCAL_SRC_FILES_linux += futex_synchro.c
+LOCAL_LDLIBS_linux += -lrt -ldl
 
 </pre>
 
@@ -554,7 +544,7 @@
 the unstripped executables so GDB can find the symbols.
 <code>LOCAL_UNSTRIPPED_PATH</code> is not necessary if you only specified
 <code>LOCAL_MODULE_RELATIVE_PATH</code>.</p>
-<p>Look in <code>config/envsetup.make</code> for all of the variables defining
+<p>Look in <code>core/envsetup.mk</code> for all of the variables defining
 places to build things.</p>
 <p>FYI: If you're installing an executable to /sbin, you probably also want to
 set <code>LOCAL_FORCE_STATIC_EXCUTABLE := true</code> in your Android.mk, which
@@ -587,6 +577,11 @@
     and definitions that are specific to either the host or the target builds.
     Do not set variables that start with HOST_ or TARGET_ in your makefiles.
     </li>
+    <li><b>HOST_CROSS_</b> - These contain the directories and definitions that
+    are specific to cross-building host binaries. The common case is building
+    windows host tools on linux. Do not set variables that start with
+    HOST_CROSS_ in your makefiles.
+    </li>
     <li><b>BUILD_</b> and <b>CLEAR_VARS</b> - These contain the names of
     well-defined template makefiles to include.  Some examples are CLEAR_VARS
     and BUILD_HOST_PACKAGE.</li>
@@ -773,13 +768,13 @@
 </code></p>
 
 <h4>LOCAL_PREBUILT_EXECUTABLES</h4>
-<p>When including $(BUILD_PREBUILT) or $(BUILD_HOST_PREBUILT), set these to
-executables that you want copied.  They're located automatically into the
+<p>When including $(BUILD_MULTI_PREBUILT) or $(BUILD_HOST_PREBUILT), set these
+to executables that you want copied.  They're located automatically into the
 right bin directory.</p>
 
 <h4>LOCAL_PREBUILT_LIBS</h4>
-<p>When including $(BUILD_PREBUILT) or $(BUILD_HOST_PREBUILT), set these to
-libraries that you want copied.  They're located automatically into the
+<p>When including $(BUILD_MULTI_PREBUILT) or $(BUILD_HOST_PREBUILT), set these
+to libraries that you want copied.  They're located automatically into the
 right lib directory.</p>
 
 <h4>LOCAL_SHARED_LIBRARIES</h4>
@@ -836,6 +831,16 @@
 the relative path.</p>
 <p>See <a href="#moving-modules">Putting modules elsewhere</a> for more.</p>
 
+<h4>LOCAL_MODULE_HOST_OS</h4>
+<p>This specifies which OSes are supported by this host module. It is not used
+for target builds. The accepted values here are combinations of
+<code>linux</code>, <code>darwin</code>, and <code>windows</code>. By default,
+linux and darwin(MacOS) are considered to be supported. If a module should
+build under windows, you must specify windows, and any others to be supported.
+Some examples:</p>
+<p><code>LOCAL_MODULE_HOST_OS := linux<br/>
+LOCAL_MODULE_HOST_OS := darwin linux windows</code></p>
+
 <h4>LOCAL_UNSTRIPPED_PATH</h4>
 <p>Instructs the build system to put the unstripped version of the module
 somewhere other than what's normal for its type.  Usually, you override this
@@ -878,10 +883,10 @@
 If you make a change that requires an update, you need to update two places
 so this message will be printed.
 <ul>
-    <li>In config/envsetup.make, increment the
+    <li>In core/envsetup.mk, increment the
         CORRECT_BUILD_ENV_SEQUENCE_NUMBER definition.</li>
     <li>In buildspec.mk.default, update the BUILD_ENV_SEQUENCE_DUMBER
-        definition to match the one in config/envsetup.make</li>
+        definition to match the one in core/envsetup.mk</li>
 </ul>
 The scripts automatically get the value from the build system, so they will
 trigger the warning as well.
@@ -900,53 +905,39 @@
 for some other dependency that isn't created automatically.</p>
 
 <h4>LOCAL_BUILT_MODULE</h4>
+<p class=warning>This should not be used, since multiple binaries are now
+created from a single module defintiion.</p>
 <p>When a module is built, the module is created in an intermediate
 directory then copied to its final location.  LOCAL_BUILT_MODULE is
 the full path to the intermediate file.  See LOCAL_INSTALLED_MODULE
 for the path to the final installed location of the module.</p>
 
-<h4>LOCAL_HOST</h4>
-<p>Set by the host_xxx.make includes to tell base_rules.make and the other
-includes that we're building for the host.  Kenneth did this as part of
-openbinder, and I would like to clean it up so the rules, includes and
-definitions aren't duplicated for host and target.</p>
+<h4>LOCAL_IS_HOST_MODULE</h4>
+<p>Set by the host_xxx.mk includes to tell base_rules.mk and the other
+includes that we're building for the host.</p>
 
 <h4>LOCAL_INSTALLED_MODULE</h4>
+<p class=warning>This should not be used, since multiple binaries are now
+created from a single module defintiion.</p>
 <p>The fully qualified path name of the final location of the module.
 See LOCAL_BUILT_MODULE for the location of the intermediate file that
 the make rules should actually be constructing.</p>
 
-<h4>LOCAL_REPLACE_VARS</h4>
-<p>Used in some stuff remaining from the openbinder for building scripts
-with particular values set,</p>
-
-<h4>LOCAL_SCRIPTS</h4>
-<p>Used in some stuff remaining from the openbinder build system that we
-might find handy some day.</p>
-
 <h4>LOCAL_MODULE_CLASS</h4>
 <p>Which kind of module this is.  This variable is used to construct other
-variable names used to locate the modules.  See base_rules.make and
-envsetup.make.</p>
-
-<h4>LOCAL_MODULE_NAME</h4>
-<p>Set to the leaf name of the LOCAL_BUILT_MODULE.  I'm not sure,
-but it looks like it's just used in the WHO_AM_I variable to identify
-in the pretty printing what's being built.</p>
+variable names used to locate the modules.  See base_rules.mk and
+envsetup.mk.</p>
 
 <h4>LOCAL_MODULE_SUFFIX</h4>
 <p>The suffix that will be appended to <code>LOCAL_MODULE</code> to form
 <code>LOCAL_MODULE_NAME</code>.  For example, .so, .a, .dylib.</p>
 
 <h4>LOCAL_STRIP_MODULE</h4>
-<p>Calculated in base_rules.make to determine if this module should actually
-be stripped or not, based on whether <code>LOCAL_STRIPPABLE_MODULE</code>
-is set, and whether the combo is configured to ever strip modules.  With
-Iliyan's stripping tool, this might change.</p>
-
-<h4>LOCAL_STRIPPABLE_MODULE</h4>
-<p>Set by the include makefiles if that type of module is strippable. 
-Executables and shared libraries are.</p>
+<p>If set to true (the default), the binary will be stripped and a debug
+link will be set up so that GDB will still work. If set to no_debuglink,
+the binary will be stripped, but no debug link will be added. If set to
+keep_symbols, it will strip the debug information, but keep the symbol table.
+Any other value will prevent stripping.</p>
 
 <h4>LOCAL_SYSTEM_SHARED_LIBRARIES</h4>
 <p>Used while building the base libraries: libc, libm, libdl.  Usually
diff --git a/core/build_id.mk b/core/build_id.mk
index 0e8e2cf..5a012b9 100644
--- a/core/build_id.mk
+++ b/core/build_id.mk
@@ -18,4 +18,4 @@
 # (like "CRB01").  It must be a single word, and is
 # capitalized by convention.
 
-export BUILD_ID=MASTER
+export BUILD_ID=NYC
diff --git a/core/ccache.mk b/core/ccache.mk
index 34e5e1c..5c2ae23 100644
--- a/core/ccache.mk
+++ b/core/ccache.mk
@@ -14,7 +14,7 @@
 # limitations under the License.
 #
 
-ifneq ($(USE_CCACHE),)
+ifneq ($(filter-out false,$(USE_CCACHE)),)
   # The default check uses size and modification time, causing false misses
   # since the mtime depends when the repo was checked out
   export CCACHE_COMPILERCHECK := content
@@ -37,11 +37,6 @@
   export CCACHE_CPP2 := true
 
   CCACHE_HOST_TAG := $(HOST_PREBUILT_TAG)
-  # If we are cross-compiling Windows binaries on Linux
-  # then use the linux ccache binary instead.
-  ifeq ($(HOST_OS)-$(BUILD_OS),windows-linux)
-    CCACHE_HOST_TAG := linux-$(HOST_PREBUILT_ARCH)
-  endif
   ccache := prebuilts/misc/$(CCACHE_HOST_TAG)/ccache/ccache
   # Check that the executable is here.
   ccache := $(strip $(wildcard $(ccache)))
diff --git a/core/clang/HOST_CROSS_x86.mk b/core/clang/HOST_CROSS_x86.mk
new file mode 100644
index 0000000..b78a074
--- /dev/null
+++ b/core/clang/HOST_CROSS_x86.mk
@@ -0,0 +1,56 @@
+
+include $(BUILD_SYSTEM)/clang/x86.mk
+
+CLANG_CONFIG_x86_HOST_CROSS_TRIPLE := i686-pc-mingw32
+
+CLANG_CONFIG_x86_HOST_CROSS_EXTRA_ASFLAGS := \
+  $(CLANG_CONFIG_EXTRA_ASFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_ASFLAGS) \
+  $(CLANG_CONFIG_x86_EXTRA_ASFLAGS) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_COMBO_EXTRA_ASFLAGS) \
+  -target $(CLANG_CONFIG_x86_HOST_CROSS_TRIPLE)
+
+CLANG_CONFIG_x86_HOST_CROSS_EXTRA_CFLAGS := \
+  $(CLANG_CONFIG_EXTRA_CFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_CFLAGS) \
+  $(CLANG_CONFIG_x86_EXTRA_CFLAGS) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_COMBO_EXTRA_CFLAGS) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_EXTRA_ASFLAGS)
+
+CLANG_CONFIG_x86_HOST_CROSS_EXTRA_CONLYFLAGS := \
+  $(CLANG_CONFIG_EXTRA_CONLYFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_CONLYFLAGS) \
+  $(CLANG_CONFIG_x86_EXTRA_CONLYFLAGS) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_COMBO_EXTRA_CONLYFLAGS)
+
+CLANG_CONFIG_x86_HOST_CROSS_EXTRA_CPPFLAGS := \
+  $(CLANG_CONFIG_EXTRA_CPPFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_CPPFLAGS) \
+  $(CLANG_CONFIG_x86_EXTRA_CPPFLAGS) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_COMBO_EXTRA_CPPFLAGS) \
+  -target $(CLANG_CONFIG_x86_HOST_CROSS_TRIPLE)
+
+CLANG_CONFIG_x86_HOST_CROSS_EXTRA_LDFLAGS := \
+  $(CLANG_CONFIG_EXTRA_LDFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_LDFLAGS) \
+  $(CLANG_CONFIG_x86_EXTRA_LDFLAGS) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_COMBO_EXTRA_LDFLAGS) \
+  -target $(CLANG_CONFIG_x86_HOST_CROSS_TRIPLE)
+
+$(clang_2nd_arch_prefix)CLANG_HOST_CROSS_GLOBAL_CFLAGS := \
+  $(call convert-to-host-clang-flags,$($(clang_2nd_arch_prefix)HOST_CROSS_GLOBAL_CFLAGS)) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_EXTRA_CFLAGS)
+
+$(clang_2nd_arch_prefix)CLANG_HOST_CROSS_GLOBAL_CONLYFLAGS := \
+  $(call convert-to-host-clang-flags,$($(clang_2nd_arch_prefix)HOST_CROSS_GLOBAL_CONLYFLAGS)) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_EXTRA_CONLYFLAGS)
+
+$(clang_2nd_arch_prefix)CLANG_HOST_CROSS_GLOBAL_CPPFLAGS := \
+  $(call convert-to-host-clang-flags,$($(clang_2nd_arch_prefix)HOST_CROSS_GLOBAL_CPPFLAGS)) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_EXTRA_CPPFLAGS)
+
+$(clang_2nd_arch_prefix)CLANG_HOST_CROSS_GLOBAL_LDFLAGS := \
+  $(call convert-to-host-clang-flags,$($(clang_2nd_arch_prefix)HOST_CROSS_GLOBAL_LDFLAGS)) \
+  $(CLANG_CONFIG_x86_HOST_CROSS_EXTRA_LDFLAGS)
+
+$(clang_2nd_arch_prefix)HOST_CROSS_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686.a
diff --git a/core/clang/HOST_CROSS_x86_64.mk b/core/clang/HOST_CROSS_x86_64.mk
new file mode 100644
index 0000000..b6f2de9
--- /dev/null
+++ b/core/clang/HOST_CROSS_x86_64.mk
@@ -0,0 +1,56 @@
+
+include $(BUILD_SYSTEM)/clang/x86_64.mk
+
+CLANG_CONFIG_x86_64_HOST_CROSS_TRIPLE := x86_64-pc-mingw32
+
+CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_ASFLAGS := \
+  $(CLANG_CONFIG_EXTRA_ASFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_ASFLAGS) \
+  $(CLANG_CONFIG_x86_64_EXTRA_ASFLAGS) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_COMBO_EXTRA_ASFLAGS) \
+  -target $(CLANG_CONFIG_x86_64_HOST_CROSS_TRIPLE)
+
+CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_CFLAGS := \
+  $(CLANG_CONFIG_EXTRA_CFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_CFLAGS) \
+  $(CLANG_CONFIG_x86_64_EXTRA_CFLAGS) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_COMBO_EXTRA_CFLAGS) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_ASFLAGS)
+
+CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_CONLYFLAGS := \
+  $(CLANG_CONFIG_EXTRA_CONLYFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_CONLYFLAGS) \
+  $(CLANG_CONFIG_x86_64_EXTRA_CONLYFLAGS) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_COMBO_EXTRA_CONLYFLAGS)
+
+CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_CPPFLAGS := \
+  $(CLANG_CONFIG_EXTRA_CPPFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_CPPFLAGS) \
+  $(CLANG_CONFIG_x86_64_EXTRA_CPPFLAGS) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_COMBO_EXTRA_CPPFLAGS) \
+  -target $(CLANG_CONFIG_x86_64_HOST_CROSS_TRIPLE)
+
+CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_LDFLAGS := \
+  $(CLANG_CONFIG_EXTRA_LDFLAGS) \
+  $(CLANG_CONFIG_HOST_CROSS_EXTRA_LDFLAGS) \
+  $(CLANG_CONFIG_x86_64_EXTRA_LDFLAGS) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_COMBO_EXTRA_LDFLAGS) \
+  -target $(CLANG_CONFIG_x86_64_HOST_CROSS_TRIPLE)
+
+$(clang_2nd_arch_prefix)CLANG_HOST_CROSS_GLOBAL_CFLAGS := \
+  $(call convert-to-host-clang-flags,$($(clang_2nd_arch_prefix)HOST_CROSS_GLOBAL_CFLAGS)) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_CFLAGS)
+
+$(clang_2nd_arch_prefix)CLANG_HOST_CROSS_GLOBAL_CONLYFLAGS := \
+  $(call convert-to-host-clang-flags,$($(clang_2nd_arch_prefix)HOST_CROSS_GLOBAL_CONLYFLAGS)) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_CONLYFLAGS)
+
+$(clang_2nd_arch_prefix)CLANG_HOST_CROSS_GLOBAL_CPPFLAGS := \
+  $(call convert-to-host-clang-flags,$($(clang_2nd_arch_prefix)HOST_CROSS_GLOBAL_CPPFLAGS)) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_CPPFLAGS)
+
+$(clang_2nd_arch_prefix)CLANG_HOST_CROSS_GLOBAL_LDFLAGS := \
+  $(call convert-to-host-clang-flags,$($(clang_2nd_arch_prefix)HOST_CROSS_GLOBAL_LDFLAGS)) \
+  $(CLANG_CONFIG_x86_64_HOST_CROSS_EXTRA_LDFLAGS)
+
+$(clang_2nd_arch_prefix)HOST_CROSS_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-x86_64.a
diff --git a/core/clang/HOST_x86.mk b/core/clang/HOST_x86.mk
index f14a9c3..0ec64ad 100644
--- a/core/clang/HOST_x86.mk
+++ b/core/clang/HOST_x86.mk
@@ -16,9 +16,6 @@
 CLANG_CONFIG_x86_HOST_COMBO_EXTRA_CPPFLAGS := $(CLANG_CONFIG_x86_DARWIN_HOST_EXTRA_CPPFLAGS)
 CLANG_CONFIG_x86_HOST_COMBO_EXTRA_LDFLAGS := $(CLANG_CONFIG_x86_DARWIN_HOST_EXTRA_LDFLAGS)
 endif
-ifeq ($(HOST_OS),windows)
-CLANG_CONFIG_x86_HOST_TRIPLE := i686-pc-mingw32
-endif
 
 CLANG_CONFIG_x86_HOST_EXTRA_ASFLAGS := \
   $(CLANG_CONFIG_EXTRA_ASFLAGS) \
diff --git a/core/clang/HOST_x86_64.mk b/core/clang/HOST_x86_64.mk
index 1211e73..d46cb67 100644
--- a/core/clang/HOST_x86_64.mk
+++ b/core/clang/HOST_x86_64.mk
@@ -16,9 +16,6 @@
 CLANG_CONFIG_x86_64_HOST_COMBO_EXTRA_CPPFLAGS := $(CLANG_CONFIG_x86_DARWIN_HOST_EXTRA_CPPFLAGS)
 CLANG_CONFIG_x86_64_HOST_COMBO_EXTRA_LDFLAGS := $(CLANG_CONFIG_x86_DARWIN_HOST_EXTRA_LDFLAGS)
 endif
-ifeq ($(HOST_OS),windows)
-CLANG_CONFIG_x86_64_HOST_TRIPLE := x86_64-pc-mingw64
-endif
 
 CLANG_CONFIG_x86_64_HOST_EXTRA_ASFLAGS := \
   $(CLANG_CONFIG_EXTRA_ASFLAGS) \
diff --git a/core/clang/HOST_x86_common.mk b/core/clang/HOST_x86_common.mk
index 74b5a69..9e71750 100644
--- a/core/clang/HOST_x86_common.mk
+++ b/core/clang/HOST_x86_common.mk
@@ -6,27 +6,31 @@
 
 CLANG_CONFIG_x86_DARWIN_HOST_EXTRA_CFLAGS := \
   -integrated-as
+
+CLANG_CONFIG_x86_DARWIN_HOST_EXTRA_CFLAGS += -fstack-protector-strong
 endif
 
 ifeq ($(HOST_OS),linux)
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_ASFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot
+  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot
 
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)
 
+CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CFLAGS += -fstack-protector-strong
+
 ifneq ($(strip $($(clang_2nd_arch_prefix)HOST_IS_64_BIT)),)
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CPPFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
+  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8 \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8/x86_64-linux \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8/backward
 
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_LDFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
+  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
   -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/bin \
   -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/lib/gcc/x86_64-linux/4.8 \
   -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/lib/gcc/x86_64-linux/4.8 \
@@ -34,21 +38,17 @@
 else
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_CPPFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
+  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8 \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8/x86_64-linux/32 \
   -isystem $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/include/c++/4.8/backward
 
 CLANG_CONFIG_x86_LINUX_HOST_EXTRA_LDFLAGS := \
   --gcc-toolchain=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG) \
-  --sysroot=$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
+  --sysroot $($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/sysroot \
   -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/bin \
   -B$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/lib/gcc/x86_64-linux/4.8/32 \
   -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/lib/gcc/x86_64-linux/4.8/32 \
   -L$($(clang_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG)/x86_64-linux/lib32/
 endif
 endif  # Linux
-
-ifeq ($(HOST_OS),windows)
-# nothing required here yet
-endif
diff --git a/core/clang/TARGET_arm.mk b/core/clang/TARGET_arm.mk
index 62ce242..5c1bf6f 100644
--- a/core/clang/TARGET_arm.mk
+++ b/core/clang/TARGET_arm.mk
@@ -61,7 +61,7 @@
   $(call $(clang_2nd_arch_prefix)convert-to-clang-flags,$($(clang_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS)) \
   $(CLANG_CONFIG_arm_TARGET_EXTRA_LDFLAGS)
 
-$(clang_2nd_arch_prefix)RS_TRIPLE := armv7-none-linux-gnueabi
+$(clang_2nd_arch_prefix)RS_TRIPLE := armv7-linux-androideabi
 $(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS :=
 $(clang_2nd_arch_prefix)RS_COMPAT_TRIPLE := armv7-none-linux-gnueabi
 
@@ -69,4 +69,4 @@
 
 # Address sanitizer clang config
 $(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-arm-android
-$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RPATH := /system/lib/asan
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
diff --git a/core/clang/TARGET_arm64.mk b/core/clang/TARGET_arm64.mk
index ea4d937..15b0172 100644
--- a/core/clang/TARGET_arm64.mk
+++ b/core/clang/TARGET_arm64.mk
@@ -66,5 +66,5 @@
 TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-aarch64-android.a
 
 # Address sanitizer clang config
-ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-arm64-android
-ADDRESS_SANITIZER_RPATH := /system/lib64/asan
+ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-aarch64-android
+ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan64
diff --git a/core/clang/TARGET_mips.mk b/core/clang/TARGET_mips.mk
index 19bbaf2..1a0176a 100644
--- a/core/clang/TARGET_mips.mk
+++ b/core/clang/TARGET_mips.mk
@@ -60,7 +60,7 @@
   $(call $(clang_2nd_arch_prefix)convert-to-clang-flags,$($(clang_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS)) \
   $(CLANG_CONFIG_mips_TARGET_EXTRA_LDFLAGS)
 
-$(clang_2nd_arch_prefix)RS_TRIPLE := armv7-none-linux-gnueabi
+$(clang_2nd_arch_prefix)RS_TRIPLE := armv7-linux-androideabi
 $(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS :=
 RS_COMPAT_TRIPLE := mipsel-linux-android
 
diff --git a/core/clang/TARGET_x86.mk b/core/clang/TARGET_x86.mk
index 4e9e8be..741768b 100644
--- a/core/clang/TARGET_x86.mk
+++ b/core/clang/TARGET_x86.mk
@@ -19,11 +19,8 @@
   $(CLANG_CONFIG_TARGET_EXTRA_CFLAGS) \
   $(CLANG_CONFIG_x86_EXTRA_CFLAGS) \
   $(CLANG_CONFIG_x86_TARGET_EXTRA_ASFLAGS) \
-  -fno-optimize-sibling-calls \
   -mstackrealign
 
-# http://llvm.org/bugs/show_bug.cgi?id=15086,
-# llvm tail call optimization is wrong for x86.
 # -mstackrealign is needed to realign stack in native code
 # that could be called from JNI, so that movaps instruction
 # will work on assumed stack aligned local variables.
@@ -69,8 +66,12 @@
   $(call $(clang_2nd_arch_prefix)convert-to-clang-flags,$($(clang_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS)) \
   $(CLANG_CONFIG_x86_TARGET_EXTRA_LDFLAGS)
 
-$(clang_2nd_arch_prefix)RS_TRIPLE := armv7-none-linux-gnueabi
+$(clang_2nd_arch_prefix)RS_TRIPLE := armv7-linux-androideabi
 $(clang_2nd_arch_prefix)RS_TRIPLE_CFLAGS := -D__i386__
 $(clang_2nd_arch_prefix)RS_COMPAT_TRIPLE := i686-linux-android
 
 $(clang_2nd_arch_prefix)TARGET_LIBPROFILE_RT := $(LLVM_RTLIB_PATH)/libclang_rt.profile-i686-android.a
+
+# Address sanitizer clang config
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_RUNTIME_LIBRARY := libclang_rt.asan-i686-android
+$(clang_2nd_arch_prefix)ADDRESS_SANITIZER_LINKER := /system/bin/linker_asan
diff --git a/core/clang/arm.mk b/core/clang/arm.mk
index bf31f51..4053bb2 100644
--- a/core/clang/arm.mk
+++ b/core/clang/arm.mk
@@ -7,12 +7,7 @@
 ifneq (,$(filter krait,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)))
   # Android's clang support's krait as a CPU whereas GCC doesn't. Specify
   # -mcpu here rather than the more normal core/combo/arch/arm/armv7-a-neon.mk.
-  CLANG_CONFIG_arm_EXTRA_CFLAGS += -mcpu=krait
-endif
-
-ifeq ($(HOST_OS),darwin)
-  # Darwin is really bad at dealing with idiv/sdiv. Don't use krait on Darwin.
-  CLANG_CONFIG_arm_EXTRA_CFLAGS += -mcpu=cortex-a9
+  CLANG_CONFIG_arm_EXTRA_CFLAGS += -mcpu=krait -mfpu=neon-vfpv4
 endif
 
 CLANG_CONFIG_arm_EXTRA_CPPFLAGS :=
@@ -34,8 +29,7 @@
   -fno-partial-inlining \
   -fno-strict-volatile-bitfields \
   -fno-tree-copy-prop \
-  -fno-tree-loop-optimize \
-  -Wa,--noexecstack
+  -fno-tree-loop-optimize
 
 define subst-clang-incompatible-arm-flags
   $(subst -march=armv5te,-march=armv5t,\
diff --git a/core/clang/arm64.mk b/core/clang/arm64.mk
index ab395b3..cad7321 100644
--- a/core/clang/arm64.mk
+++ b/core/clang/arm64.mk
@@ -13,8 +13,7 @@
   -frerun-cse-after-loop \
   -frename-registers \
   -fno-strict-volatile-bitfields \
-  -fno-align-jumps \
-  -Wa,--noexecstack
+  -fno-align-jumps
 
 # We don't have any arm64 flags to substitute yet.
 define subst-clang-incompatible-arm64-flags
diff --git a/core/clang/config.mk b/core/clang/config.mk
index e1bfb01..6cc3446 100644
--- a/core/clang/config.mk
+++ b/core/clang/config.mk
@@ -1,14 +1,7 @@
 ## Clang configurations.
 
-# WITHOUT_CLANG covers both HOST and TARGET
-ifeq ($(WITHOUT_CLANG),true)
-WITHOUT_TARGET_CLANG := true
-WITHOUT_HOST_CLANG := true
-endif
-
-LLVM_PREBUILTS_VERSION := 3.6
-LLVM_PREBUILTS_PATH := prebuilts/clang/$(BUILD_OS)-x86/host/$(LLVM_PREBUILTS_VERSION)/bin
-LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_PATH)/../lib/clang/$(LLVM_PREBUILTS_VERSION)/lib/linux/
+LLVM_PREBUILTS_PATH := $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/bin
+LLVM_RTLIB_PATH := $(LLVM_PREBUILTS_PATH)/../lib64/clang/$(LLVM_RELEASE_VERSION)/lib/linux/
 
 CLANG := $(LLVM_PREBUILTS_PATH)/clang$(BUILD_EXECUTABLE_SUFFIX)
 CLANG_CXX := $(LLVM_PREBUILTS_PATH)/clang++$(BUILD_EXECUTABLE_SUFFIX)
@@ -18,6 +11,16 @@
 CLANG_TBLGEN := $(BUILD_OUT_EXECUTABLES)/clang-tblgen$(BUILD_EXECUTABLE_SUFFIX)
 LLVM_TBLGEN := $(BUILD_OUT_EXECUTABLES)/llvm-tblgen$(BUILD_EXECUTABLE_SUFFIX)
 
+# RenderScript-specific tools
+# These are tied to the version of LLVM directly in external/, so they might
+# trail the host prebuilts being used for the rest of the build process.
+RS_LLVM_PREBUILTS_VERSION := clang-2690385
+RS_LLVM_PREBUILTS_BASE := prebuilts/clang/host
+RS_LLVM_PREBUILTS_PATH := $(RS_LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(RS_LLVM_PREBUILTS_VERSION)/bin
+RS_CLANG := $(RS_LLVM_PREBUILTS_PATH)/clang$(BUILD_EXECUTABLE_SUFFIX)
+RS_LLVM_AS := $(RS_LLVM_PREBUILTS_PATH)/llvm-as$(BUILD_EXECUTABLE_SUFFIX)
+RS_LLVM_LINK := $(RS_LLVM_PREBUILTS_PATH)/llvm-link$(BUILD_EXECUTABLE_SUFFIX)
+
 # Clang flags for all host or target rules
 CLANG_CONFIG_EXTRA_ASFLAGS :=
 CLANG_CONFIG_EXTRA_CFLAGS :=
@@ -33,7 +36,8 @@
   -Werror=int-conversion
 
 # Disable overly aggressive warning for macros defined with a leading underscore
-# This happens in AndroidConfig.h, which is included nearly everywhere.
+# This used to happen in AndroidConfig.h, which was included everywhere.
+# TODO: can we remove this now?
 CLANG_CONFIG_EXTRA_CFLAGS += \
   -Wno-reserved-id-macro
 
@@ -52,6 +56,13 @@
 CLANG_CONFIG_EXTRA_CPPFLAGS += \
   -Wno-inconsistent-missing-override
 
+# Force clang to always output color diagnostics.  Ninja will strip the ANSI
+# color codes if it is not running in a terminal.
+ifdef BUILDING_WITH_NINJA
+CLANG_CONFIG_EXTRA_CFLAGS += \
+  -fcolor-diagnostics
+endif
+
 CLANG_CONFIG_UNKNOWN_CFLAGS := \
   -finline-functions \
   -finline-limit=64 \
@@ -77,7 +88,9 @@
   -Wno-unused-but-set-variable \
   -Wno-unused-local-typedefs \
   -Wunused-but-set-parameter \
-  -Wunused-but-set-variable
+  -Wunused-but-set-variable \
+  -fdiagnostics-color \
+  -fdebug-prefix-map=/proc/self/cwd=
 
 # Clang flags for all host rules
 CLANG_CONFIG_HOST_EXTRA_ASFLAGS :=
@@ -85,6 +98,12 @@
 CLANG_CONFIG_HOST_EXTRA_CPPFLAGS :=
 CLANG_CONFIG_HOST_EXTRA_LDFLAGS :=
 
+# Clang flags for all host cross rules
+CLANG_CONFIG_HOST_CROSS_EXTRA_ASFLAGS :=
+CLANG_CONFIG_HOST_CROSS_EXTRA_CFLAGS :=
+CLANG_CONFIG_HOST_CROSS_EXTRA_CPPFLAGS :=
+CLANG_CONFIG_HOST_CROSS_EXTRA_LDFLAGS :=
+
 # Clang flags for all target rules
 CLANG_CONFIG_TARGET_EXTRA_ASFLAGS :=
 CLANG_CONFIG_TARGET_EXTRA_CFLAGS := -nostdlibinc
@@ -127,6 +146,15 @@
 include $(BUILD_SYSTEM)/clang/HOST_$(HOST_2ND_ARCH).mk
 endif
 
+ifdef HOST_CROSS_ARCH
+clang_2nd_arch_prefix :=
+include $(BUILD_SYSTEM)/clang/HOST_CROSS_$(HOST_CROSS_ARCH).mk
+ifdef HOST_CROSS_2ND_ARCH
+clang_2nd_arch_prefix := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
+include $(BUILD_SYSTEM)/clang/HOST_CROSS_$(HOST_CROSS_2ND_ARCH).mk
+endif
+endif
+
 # TARGET config
 clang_2nd_arch_prefix :=
 include $(BUILD_SYSTEM)/clang/TARGET_$(TARGET_ARCH).mk
@@ -140,7 +168,7 @@
 ADDRESS_SANITIZER_CONFIG_EXTRA_CFLAGS := -fno-omit-frame-pointer
 ADDRESS_SANITIZER_CONFIG_EXTRA_LDFLAGS := -Wl,-u,__asan_preinit
 
-ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES := libdl
+ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES :=
 ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES := libasan
 
 # This allows us to use the superset of functionality that compiler-rt
diff --git a/core/clang/mips.mk b/core/clang/mips.mk
index 08daf40..4a8f812 100644
--- a/core/clang/mips.mk
+++ b/core/clang/mips.mk
@@ -12,8 +12,14 @@
   -frerun-cse-after-loop \
   -frename-registers \
   -msynci \
+  -mno-synci \
   -mno-fused-madd
 
+# Temporary workaround for Mips clang++ problem,  creates
+#   relocated ptrs in read-only pic .gcc_exception_table;
+#   permanent fix pending at http://reviews.llvm.org/D9669
+CLANG_CONFIG_mips_UNKNOWN_CFLAGS += -Wl,--warn-shared-textrel
+
 # We don't have any mips flags to substitute yet.
 define subst-clang-incompatible-mips-flags
   $(1)
diff --git a/core/clang/mips64.mk b/core/clang/mips64.mk
index 612175c..1b72e05 100644
--- a/core/clang/mips64.mk
+++ b/core/clang/mips64.mk
@@ -12,8 +12,14 @@
   -frerun-cse-after-loop \
   -frename-registers \
   -msynci \
+  -mno-synci \
   -mno-fused-madd
 
+# Temporary workaround for Mips clang++ problem creating
+#   relocated ptrs in read-only pic .gcc_exception_table;
+#   permanent fix pending at http://reviews.llvm.org/D9669
+CLANG_CONFIG_mips64_UNKNOWN_CFLAGS += -Wl,--warn-shared-textrel
+
 # We don't have any mips64 flags to substitute yet.
 define subst-clang-incompatible-mips64-flags
   $(1)
diff --git a/core/clang/versions.mk b/core/clang/versions.mk
new file mode 100644
index 0000000..81bd3b8
--- /dev/null
+++ b/core/clang/versions.mk
@@ -0,0 +1,5 @@
+## Clang/LLVM release versions.
+
+LLVM_RELEASE_VERSION := 3.8
+LLVM_PREBUILTS_VERSION ?= clang-2690385
+LLVM_PREBUILTS_BASE ?= prebuilts/clang/host
diff --git a/core/cleanbuild.mk b/core/cleanbuild.mk
index 801a292..0d6a406 100644
--- a/core/cleanbuild.mk
+++ b/core/cleanbuild.mk
@@ -54,6 +54,7 @@
 # can have permission to touch it.
 include $(BUILD_SYSTEM)/cleanspec.mk
 INTERNAL_CLEAN_BUILD_VERSION := $(strip $(INTERNAL_CLEAN_BUILD_VERSION))
+INTERNAL_CLEAN_STEPS := $(strip $(INTERNAL_CLEAN_STEPS))
 
 # If the clean_steps.mk file is missing (usually after a clean build)
 # then we won't do anything.
@@ -105,20 +106,39 @@
   _crs_new_cmd :=
   steps :=
 endif
-CURRENT_CLEAN_BUILD_VERSION :=
-CURRENT_CLEAN_STEPS :=
 
 # Write the new state to the file.
 #
+rewrite_clean_steps_file :=
+ifneq ($(CURRENT_CLEAN_BUILD_VERSION)-$(CURRENT_CLEAN_STEPS),$(INTERNAL_CLEAN_BUILD_VERSION)-$(INTERNAL_CLEAN_STEPS))
+rewrite_clean_steps_file := true
+endif
+ifeq ($(wildcard $(clean_steps_file)),)
+# This is the first build.
+rewrite_clean_steps_file := true
+endif
+ifeq ($(rewrite_clean_steps_file),true)
 $(shell \
   mkdir -p $(dir $(clean_steps_file)) && \
   echo "CURRENT_CLEAN_BUILD_VERSION := $(INTERNAL_CLEAN_BUILD_VERSION)" > \
       $(clean_steps_file) ;\
-  echo "CURRENT_CLEAN_STEPS := $(INTERNAL_CLEAN_STEPS)" >> \
-      $(clean_steps_file) \
+  echo "CURRENT_CLEAN_STEPS := $(wordlist 1,500,$(INTERNAL_CLEAN_STEPS))" >> $(clean_steps_file) \
  )
+define -cs-write-clean-steps-if-arg1-not-empty
+$(if $(1),$(shell echo "CURRENT_CLEAN_STEPS += $(1)" >> $(clean_steps_file)))
+endef
+$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 501,1000,$(INTERNAL_CLEAN_STEPS)))
+$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 1001,1500,$(INTERNAL_CLEAN_STEPS)))
+$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 1501,2000,$(INTERNAL_CLEAN_STEPS)))
+$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 2001,2500,$(INTERNAL_CLEAN_STEPS)))
+$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 2501,3000,$(INTERNAL_CLEAN_STEPS)))
+$(call -cs-write-clean-steps-if-arg1-not-empty,$(wordlist 3001,99999,$(INTERNAL_CLEAN_STEPS)))
+endif
 
+CURRENT_CLEAN_BUILD_VERSION :=
+CURRENT_CLEAN_STEPS :=
 clean_steps_file :=
+rewrite_clean_steps_file :=
 INTERNAL_CLEAN_STEPS :=
 INTERNAL_CLEAN_BUILD_VERSION :=
 
@@ -137,15 +157,23 @@
 
 current_build_config := \
     $(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)-{$(aapt_config_list)}
+current_sanitize_target := $(strip $(SANITIZE_TARGET))
+ifeq (,$(current_sanitize_target))
+  current_sanitize_target := false
+endif
 aapt_config_list :=
 force_installclean := false
+force_objclean := false
 
 # Read the current state from the file, if present.
 # Will set PREVIOUS_BUILD_CONFIG.
 #
 PREVIOUS_BUILD_CONFIG :=
+PREVIOUS_SANITIZE_TARGET :=
 -include $(previous_build_config_file)
 PREVIOUS_BUILD_CONFIG := $(strip $(PREVIOUS_BUILD_CONFIG))
+PREVIOUS_SANITIZE_TARGET := $(strip $(PREVIOUS_SANITIZE_TARGET))
+
 ifdef PREVIOUS_BUILD_CONFIG
   ifneq "$(current_build_config)" "$(PREVIOUS_BUILD_CONFIG)"
     $(info *** Build configuration changed: "$(PREVIOUS_BUILD_CONFIG)" -> "$(current_build_config)")
@@ -156,15 +184,27 @@
     endif
   endif
 endif  # else, this is the first build, so no need to clean.
-PREVIOUS_BUILD_CONFIG :=
+
+ifdef PREVIOUS_SANITIZE_TARGET
+  ifneq "$(current_sanitize_target)" "$(PREVIOUS_SANITIZE_TARGET)"
+    $(info *** SANITIZE_TARGET changed: "$(PREVIOUS_SANITIZE_TARGET)" -> "$(current_sanitize_target)")
+    force_objclean := true
+  endif
+endif  # else, this is the first build, so no need to clean.
 
 # Write the new state to the file.
 #
+ifneq ($(PREVIOUS_BUILD_CONFIG)-$(PREVIOUS_SANITIZE_TARGET),$(current_build_config)-$(current_sanitize_target))
 $(shell \
   mkdir -p $(dir $(previous_build_config_file)) && \
   echo "PREVIOUS_BUILD_CONFIG := $(current_build_config)" > \
+      $(previous_build_config_file) && \
+  echo "PREVIOUS_SANITIZE_TARGET := $(current_sanitize_target)" >> \
       $(previous_build_config_file) \
  )
+endif
+PREVIOUS_BUILD_CONFIG :=
+PREVIOUS_SANITIZE_TARGET :=
 previous_build_config_file :=
 current_build_config :=
 
@@ -208,10 +248,12 @@
 	$(PRODUCT_OUT)/obj/JAVA_LIBRARIES \
 	$(PRODUCT_OUT)/obj/FAKE \
 	$(PRODUCT_OUT)/obj/EXECUTABLES/adbd_intermediates \
+	$(PRODUCT_OUT)/obj/EXECUTABLES/logd_intermediates \
 	$(PRODUCT_OUT)/obj/STATIC_LIBRARIES/libfs_mgr_intermediates \
 	$(PRODUCT_OUT)/obj/EXECUTABLES/init_intermediates \
 	$(PRODUCT_OUT)/obj/ETC/mac_permissions.xml_intermediates \
 	$(PRODUCT_OUT)/obj/ETC/sepolicy_intermediates \
+	$(PRODUCT_OUT)/obj/ETC/sepolicy.recovery_intermediates \
 	$(PRODUCT_OUT)/obj/ETC/init.environ.rc_intermediates
 
 # The files/dirs to delete during a dataclean, which removes any files
@@ -221,6 +263,12 @@
 	$(PRODUCT_OUT)/data-qemu/* \
 	$(PRODUCT_OUT)/userdata-qemu.img
 
+# The files/dirs to delete during an objclean, which removes any files
+# in the staging and emulator data partitions.
+objclean_files := \
+	$(TARGET_OUT_INTERMEDIATES) \
+	$($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES)
+
 # make sure *_OUT is set so that we won't result in deleting random parts
 # of the filesystem.
 ifneq (2,$(words $(HOST_OUT) $(PRODUCT_OUT)))
@@ -240,6 +288,12 @@
 	$(hide) rm -rf $(FILES)
 	@echo "Deleted images and staging directories."
 
+.PHONY: objclean
+objclean: FILES := $(objclean_files)
+objclean:
+	$(hide) rm -rf $(FILES)
+	@echo "Deleted images and staging directories."
+
 ifeq "$(force_installclean)" "true"
   $(info *** Forcing "make installclean"...)
   $(info *** rm -rf $(dataclean_files) $(installclean_files))
@@ -248,45 +302,13 @@
 endif
 force_installclean :=
 
-###########################################################
-# Clean build tools when swithcing between prebuilt host tools (such as in
-# apps_only build) and tools built from source (platform build).
-previous_prebuilt_tools_config_file := $(HOST_OUT)/previous_prebuilt_tools_config.mk
-ifneq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))
-current_prebuilt_tools := true
-else
-current_prebuilt_tools := false
+ifeq "$(force_objclean)" "true"
+  $(info *** Forcing cleanup of intermediate files...)
+  $(info *** rm -rf $(objclean_files))
+  $(shell rm -rf $(objclean_files))
+  $(info *** Done with the cleaning, now starting the real build.)
 endif
-PREVIOUS_PREBUILT_TOOLS :=
--include $(previous_prebuilt_tools_config_file)
-force_tools_clean :=
-ifdef PREVIOUS_PREBUILT_TOOLS
-ifneq ($(PREVIOUS_PREBUILT_TOOLS),$(current_prebuilt_tools))
-force_tools_clean := true
-endif
-endif # else, this is the first build, so no need to clean.
-
-# Write the new state to the file.
-$(shell \
-  mkdir -p $(dir $(previous_prebuilt_tools_config_file)) && \
-  echo "PREVIOUS_PREBUILT_TOOLS:=$(current_prebuilt_tools)" > \
-    $(previous_prebuilt_tools_config_file))
-
-ifeq ($(force_tools_clean),true)
-# For this list of prebuilt tools, see prebuilts/sdk/tools/Android.mk.
-tools_clean_files := \
-  $(HOST_OUT_COMMON_INTERMEDIATES)/JAVA_LIBRARIES/signapk_intermediates \
-  $(HOST_OUT_COMMON_INTERMEDIATES)/JAVA_LIBRARIES/dx_intermediates \
-  $(HOST_OUT_COMMON_INTERMEDIATES)/JAVA_LIBRARIES/shrinkedAndroid_intermediates \
-  $(HOST_OUT)/obj*/EXECUTABLES/aapt_intermediates \
-  $(HOST_OUT)/obj*/EXECUTABLES/aidl_intermediates \
-  $(HOST_OUT)/obj*/EXECUTABLES/zipalign_intermediates \
-  $(HOST_OUT)/obj*/lib/libc++$(HOST_SHLIB_SUFFIX) \
-
-$(info *** build type changed, clean host tools...)
-$(info *** rm -rf $(tools_clean_files))
-$(shell rm -rf $(tools_clean_files))
-endif
+force_objclean :=
 
 ###########################################################
 
diff --git a/core/clear_vars.mk b/core/clear_vars.mk
index 076fb78..59e907b 100644
--- a/core/clear_vars.mk
+++ b/core/clear_vars.mk
@@ -28,6 +28,7 @@
 LOCAL_ACP_UNAVAILABLE:=
 LOCAL_MODULE_TAGS:=
 LOCAL_SRC_FILES:=
+LOCAL_SRC_FILES_EXCLUDE:=
 LOCAL_PREBUILT_OBJ_FILES:=
 LOCAL_STATIC_JAVA_LIBRARIES:=
 LOCAL_STATIC_JAVA_AAR_LIBRARIES:=
@@ -82,6 +83,7 @@
 LOCAL_DROIDDOC_CUSTOM_ASSET_DIR:=
 LOCAL_DROIDDOC_OPTIONS:=
 LOCAL_DROIDDOC_HTML_DIR:=
+LOCAL_DROIDDOC_STUB_OUT_DIR:=
 LOCAL_ADDITIONAL_HTML_DIR:=
 LOCAL_ASSET_DIR:=
 LOCAL_RESOURCE_DIR:=
@@ -103,17 +105,19 @@
 LOCAL_RES_LIBRARIES:=
 LOCAL_MANIFEST_INSTRUMENTATION_FOR:=
 LOCAL_AIDL_INCLUDES:=
+LOCAL_VTS_INCLUDES:=
 LOCAL_JARJAR_RULES:=
 LOCAL_ADDITIONAL_JAVA_DIR:=
 LOCAL_ALLOW_UNDEFINED_SYMBOLS:=
 LOCAL_DX_FLAGS:=
 LOCAL_JACK_ENABLED:=$(DEFAULT_JACK_ENABLED) # '' (ie disabled), disabled, full, incremental
-LOCAL_JACK_VM_ARGS := $(DEFAULT_JACK_VM_ARGS)
-LOCAL_JACK_EXTRA_ARGS := $(DEFAULT_JACK_EXTRA_ARGS)
 LOCAL_JACK_FLAGS:=
+LOCAL_JACK_COVERAGE_INCLUDE_FILTER:=
+LOCAL_JACK_COVERAGE_EXCLUDE_FILTER:=
 LOCAL_JILL_FLAGS:=
 LOCAL_CERTIFICATE:=
 LOCAL_SDK_VERSION:=
+LOCAL_MIN_SDK_VERSION:=
 LOCAL_SDK_RES_VERSION:=
 LOCAL_NDK_STL_VARIANT:=
 LOCAL_EMMA_INSTRUMENT:=
@@ -132,7 +136,6 @@
 LOCAL_RENDERSCRIPT_CC:=
 LOCAL_RENDERSCRIPT_COMPATIBILITY:=
 LOCAL_RENDERSCRIPT_FLAGS:=
-LOCAL_RENDERSCRIPT_SKIP_INSTALL:=
 LOCAL_RENDERSCRIPT_TARGET_API:=
 LOCAL_DEX_PREOPT:= # '',true,false,nostripping
 LOCAL_DEX_PREOPT_IMAGE_LOCATION:=
@@ -140,6 +143,7 @@
 LOCAL_PROTOC_OPTIMIZE_TYPE:= # lite(default),micro,nano,full,nanopb-c,nanopb-c-enable_malloc
 LOCAL_PROTOC_FLAGS:=
 LOCAL_PROTO_JAVA_OUTPUT_PARAMS:=
+LOCAL_VTSC_FLAGS:=
 LOCAL_NO_CRT:=
 LOCAL_NO_LIBGCC:=
 LOCAL_PROPRIETARY_MODULE:=
@@ -147,14 +151,14 @@
 LOCAL_ODM_MODULE:=
 LOCAL_PRIVILEGED_MODULE:=
 LOCAL_MODULE_OWNER:=
+LOCAL_COMPATIBILITY_SUITE:=
+LOCAL_COMPATIBILITY_SUPPORT_FILES:=
 LOCAL_CTS_TEST_PACKAGE:=
 LOCAL_CTS_TEST_RUNNER:=
 LOCAL_CLANG:=
-LOCAL_ADDRESS_SANITIZER:=
 LOCAL_JAR_EXCLUDE_FILES:=
 LOCAL_JAR_PACKAGES:=
 LOCAL_JAR_EXCLUDE_PACKAGES:=
-LOCAL_LINT_FLAGS:=
 LOCAL_SOURCE_FILES_ALL_GENERATED:= # '',true
 # Don't delete the META_INF dir when merging static Java libraries.
 LOCAL_DONT_DELETE_JAR_META_INF:=
@@ -163,7 +167,6 @@
 LOCAL_PREBUILT_MODULE_FILE:=
 LOCAL_POST_LINK_CMD:=
 LOCAL_POST_INSTALL_CMD:=
-LOCAL_DIST_BUNDLED_BINARIES:=
 LOCAL_HAL_STATIC_LIBRARIES:=
 LOCAL_RMTYPEDEFS:=
 LOCAL_NO_SYNTAX_CHECK:=
@@ -175,15 +178,36 @@
 LOCAL_MODULE_UNSUPPORTED_TARGET_ARCH:=
 LOCAL_MODULE_UNSUPPORTED_TARGET_ARCH_WARN:=
 LOCAL_MODULE_HOST_ARCH:=
+LOCAL_MODULE_HOST_ARCH_WARN:=
+LOCAL_MODULE_UNSUPPORTED_HOST_ARCH:=
+LOCAL_MODULE_UNSUPPORTED_HOST_ARCH_WARN:=
+LOCAL_MODULE_HOST_CROSS_ARCH:=
+LOCAL_MODULE_HOST_CROSS_ARCH_WARN:=
+LOCAL_MODULE_UNSUPPORTED_HOST_CROSS_ARCH:=
+LOCAL_MODULE_UNSUPPORTED_HOST_CROSS_ARCH_WARN:=
 LOCAL_NO_FPIE :=
 LOCAL_CXX_STL := default
 LOCAL_NATIVE_COVERAGE :=
 LOCAL_DPI_VARIANTS:=
 LOCAL_DPI_FILE_STEM:=
 LOCAL_SANITIZE:=
+LOCAL_SANITIZE_RECOVER:=
+LOCAL_DATA_BINDING:=
+LOCAL_DBUS_PROXY_PREFIX:=
+LOCAL_INIT_RC:=
+LOCAL_MODULE_HOST_OS:=
+LOCAL_FINDBUGS_FLAGS:=
+LOCAL_NOTICE_FILE:=
+LOCAL_USE_AAPT2:=$(USE_AAPT2)
+LOCAL_STATIC_ANDROID_LIBRARIES:=
+LOCAL_SHARED_ANDROID_LIBRARIES:=
+# Used to replace the installed file of a presigned prebuilt apk in PDK fusion build,
+# to avoid installing the presigned apks with classes.dex unstripped.
+LOCAL_REPLACE_PREBUILT_APK_INSTALLED:=
 
 # arch specific variables
 LOCAL_SRC_FILES_$(TARGET_ARCH):=
+LOCAL_SRC_FILES_EXCLUDE_$(TARGET_ARCH):=
 LOCAL_CFLAGS_$(TARGET_ARCH):=
 LOCAL_CLANG_CFLAGS_$(TARGET_ARCH):=
 LOCAL_CPPFLAGS_$(TARGET_ARCH):=
@@ -191,7 +215,6 @@
 LOCAL_C_INCLUDES_$(TARGET_ARCH):=
 LOCAL_ASFLAGS_$(TARGET_ARCH):=
 LOCAL_CLANG_ASFLAGS_$(TARGET_ARCH):=
-LOCAL_NO_CRT_$(TARGET_ARCH):=
 LOCAL_LDFLAGS_$(TARGET_ARCH):=
 LOCAL_CLANG_LDFLAGS_$(TARGET_ARCH):=
 LOCAL_SHARED_LIBRARIES_$(TARGET_ARCH):=
@@ -201,8 +224,11 @@
 LOCAL_REQUIRED_MODULES_$(TARGET_ARCH):=
 LOCAL_CLANG_$(TARGET_ARCH):=
 LOCAL_PREBUILT_JNI_LIBS_$(TARGET_ARCH):=
+LOCAL_STRIP_MODULE_$(TARGET_ARCH):=
+LOCAL_PACK_MODULE_RELOCATIONS_$(TARGET_ARCH):=
 ifdef TARGET_2ND_ARCH
 LOCAL_SRC_FILES_$(TARGET_2ND_ARCH):=
+LOCAL_SRC_FILES_EXCLUDE_$(TARGET_2ND_ARCH):=
 LOCAL_CFLAGS_$(TARGET_2ND_ARCH):=
 LOCAL_CLANG_CFLAGS_$(TARGET_2ND_ARCH):=
 LOCAL_CPPFLAGS_$(TARGET_2ND_ARCH):=
@@ -210,7 +236,6 @@
 LOCAL_C_INCLUDES_$(TARGET_2ND_ARCH):=
 LOCAL_ASFLAGS_$(TARGET_2ND_ARCH):=
 LOCAL_CLANG_ASFLAGS_$(TARGET_2ND_ARCH):=
-LOCAL_NO_CRT_$(TARGET_2ND_ARCH):=
 LOCAL_LDFLAGS_$(TARGET_2ND_ARCH):=
 LOCAL_CLANG_LDFLAGS_$(TARGET_2ND_ARCH):=
 LOCAL_SHARED_LIBRARIES_$(TARGET_2ND_ARCH):=
@@ -220,8 +245,11 @@
 LOCAL_REQUIRED_MODULES_$(TARGET_2ND_ARCH):=
 LOCAL_CLANG_$(TARGET_2ND_ARCH):=
 LOCAL_PREBUILT_JNI_LIBS_$(TARGET_2ND_ARCH):=
+LOCAL_STRIP_MODULE_$(TARGET_2ND_ARCH):=
+LOCAL_PACK_MODULE_RELOCATIONS_$(TARGET_2ND_ARCH):=
 endif
 LOCAL_SRC_FILES_$(HOST_ARCH):=
+LOCAL_SRC_FILES_EXCLUDE_$(HOST_ARCH):=
 LOCAL_CFLAGS_$(HOST_ARCH):=
 LOCAL_CLANG_CFLAGS_$(HOST_ARCH):=
 LOCAL_CPPFLAGS_$(HOST_ARCH):=
@@ -229,7 +257,6 @@
 LOCAL_C_INCLUDES_$(HOST_ARCH):=
 LOCAL_ASFLAGS_$(HOST_ARCH):=
 LOCAL_CLANG_ASFLAGS_$(HOST_ARCH):=
-LOCAL_NO_CRT_$(HOST_ARCH):=
 LOCAL_LDFLAGS_$(HOST_ARCH):=
 LOCAL_CLANG_LDFLAGS_$(HOST_ARCH):=
 LOCAL_SHARED_LIBRARIES_$(HOST_ARCH):=
@@ -240,6 +267,7 @@
 LOCAL_CLANG_$(HOST_ARCH):=
 ifdef HOST_2ND_ARCH
 LOCAL_SRC_FILES_$(HOST_2ND_ARCH):=
+LOCAL_SRC_FILES_EXCLUDE_$(HOST_2ND_ARCH):=
 LOCAL_CFLAGS_$(HOST_2ND_ARCH):=
 LOCAL_CLANG_CFLAGS_$(HOST_2ND_ARCH):=
 LOCAL_CPPFLAGS_$(HOST_2ND_ARCH):=
@@ -247,7 +275,6 @@
 LOCAL_C_INCLUDES_$(HOST_2ND_ARCH):=
 LOCAL_ASFLAGS_$(HOST_2ND_ARCH):=
 LOCAL_CLANG_ASFLAGS_$(HOST_2ND_ARCH):=
-LOCAL_NO_CRT_$(HOST_2ND_ARCH):=
 LOCAL_LDFLAGS_$(HOST_2ND_ARCH):=
 LOCAL_CLANG_LDFLAGS_$(HOST_2ND_ARCH):=
 LOCAL_SHARED_LIBRARIES_$(HOST_2ND_ARCH):=
@@ -258,8 +285,47 @@
 LOCAL_CLANG_$(HOST_2ND_ARCH):=
 endif
 
+LOCAL_SRC_FILES_$(HOST_OS):=
+LOCAL_STATIC_LIBRARIES_$(HOST_OS):=
+LOCAL_SHARED_LIBRARIES_$(HOST_OS):=
+LOCAL_CFLAGS_$(HOST_OS):=
+LOCAL_CPPFLAGS_$(HOST_OS):=
+LOCAL_LDFLAGS_$(HOST_OS):=
+LOCAL_LDLIBS_$(HOST_OS):=
+LOCAL_ASFLAGS_$(HOST_OS):=
+LOCAL_C_INCLUDES_$(HOST_OS):=
+LOCAL_GENERATED_SOURCES_$(HOST_OS):=
+LOCAL_REQUIRED_MODULES_$(HOST_OS):=
+
+ifdef HOST_CROSS_OS
+LOCAL_SRC_FILES_$(HOST_CROSS_OS):=
+LOCAL_STATIC_LIBRARIES_$(HOST_CROSS_OS):=
+LOCAL_SHARED_LIBRARIES_$(HOST_CROSS_OS):=
+LOCAL_CFLAGS_$(HOST_CROSS_OS):=
+LOCAL_CPPFLAGS_$(HOST_CROSS_OS):=
+LOCAL_LDFLAGS_$(HOST_CROSS_OS):=
+LOCAL_LDLIBS_$(HOST_CROSS_OS):=
+LOCAL_ASFLAGS_$(HOST_CROSS_OS):=
+LOCAL_C_INCLUDES_$(HOST_CROSS_OS):=
+LOCAL_GENERATED_SOURCES_$(HOST_CROSS_OS):=
+LOCAL_REQUIRED_MODULES_$(HOST_CROSS_OS):=
+endif
+
+LOCAL_SRC_FILES_$(HOST_OS)_$(HOST_ARCH):=
+ifdef HOST_2ND_ARCH
+LOCAL_SRC_FILES_$(HOST_OS)_$(HOST_2ND_ARCH):=
+endif
+ifdef HOST_CROSS_OS
+LOCAL_SRC_FILES_$(HOST_CROSS_OS)_$(HOST_CROSS_ARCH):=
+ifdef HOST_CROSS_2ND_ARCH
+LOCAL_SRC_FILES_$(HOST_CROSS_OS)_$(HOST_CROSS_2ND_ARCH):=
+endif
+endif
+
 LOCAL_SRC_FILES_32:=
 LOCAL_SRC_FILES_64:=
+LOCAL_SRC_FILES_EXCLUDE_32:=
+LOCAL_SRC_FILES_EXCLUDE_64:=
 LOCAL_SHARED_LIBRARIES_32:=
 LOCAL_SHARED_LIBRARIES_64:=
 LOCAL_STATIC_LIBRARIES_32:=
@@ -292,6 +358,9 @@
 LOCAL_MODULE_STEM_64:=
 LOCAL_CLANG_32:=
 LOCAL_CLANG_64:=
+LOCAL_INIT_RC_32:=
+LOCAL_INIT_RC_64:=
+LOCAL_JAVA_LANGUAGE_VERSION:=
 
 # Trim MAKEFILE_LIST so that $(call my-dir) doesn't need to
 # iterate over thousands of entries every time.
diff --git a/core/combo/HOST_CROSS_windows-x86.mk b/core/combo/HOST_CROSS_windows-x86.mk
new file mode 100644
index 0000000..6180a26
--- /dev/null
+++ b/core/combo/HOST_CROSS_windows-x86.mk
@@ -0,0 +1,65 @@
+#
+# Copyright (C) 2006 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Settings to use MinGW as a cross-compiler under Linux
+# Included by combo/select.make
+
+$(combo_var_prefix)GLOBAL_CFLAGS += -DUSE_MINGW -DWIN32_LEAN_AND_MEAN
+$(combo_var_prefix)GLOBAL_CFLAGS += -Wno-unused-parameter
+$(combo_var_prefix)GLOBAL_CFLAGS += --sysroot prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32
+$(combo_var_prefix)GLOBAL_CFLAGS += -m32
+$(combo_var_prefix)GLOBAL_LDFLAGS += -m32
+TOOLS_PREFIX := prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/bin/x86_64-w64-mingw32-
+$(combo_var_prefix)C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/include
+$(combo_var_prefix)C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/lib/gcc/x86_64-w64-mingw32/4.8.3/include
+$(combo_var_prefix)GLOBAL_LD_DIRS += -Lprebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/lib32
+
+# Workaround differences in inttypes.h between host and target.
+# See bug 12708004.
+$(combo_var_prefix)GLOBAL_CFLAGS += -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS
+# Use C99-compliant printf functions (%zd).
+$(combo_var_prefix)GLOBAL_CFLAGS += -D__USE_MINGW_ANSI_STDIO=1
+# Admit to using >= Vista. Both are needed because of <_mingw.h>.
+$(combo_var_prefix)GLOBAL_CFLAGS += -D_WIN32_WINNT=0x0600 -DWINVER=0x0600
+# Get 64-bit off_t and related functions.
+$(combo_var_prefix)GLOBAL_CFLAGS += -D_FILE_OFFSET_BITS=64
+
+$(combo_var_prefix)CC := $(TOOLS_PREFIX)gcc
+$(combo_var_prefix)CXX := $(TOOLS_PREFIX)g++
+$(combo_var_prefix)AR := $(TOOLS_PREFIX)ar
+$(combo_var_prefix)NM := $(TOOLS_PREFIX)nm
+$(combo_var_prefix)OBJDUMP := $(TOOLS_PREFIX)objdump
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OBJDUMP) -x $(1) | grep "^Name" | cut -f3 -d" " > $(2)
+$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)NM) -g -f p $(1) | cut -f1-2 -d" " >> $(2)
+endef
+
+$(combo_var_prefix)GLOBAL_LDFLAGS += \
+    --enable-stdcall-fixup
+
+ifneq ($(strip $(BUILD_HOST_static)),)
+# Statically-linked binaries are desirable for sandboxed environment
+$(combo_var_prefix)GLOBAL_LDFLAGS += -static
+endif # BUILD_HOST_static
+
+$(combo_var_prefix)SHLIB_SUFFIX := .dll
+$(combo_var_prefix)EXECUTABLE_SUFFIX := .exe
+
+$(combo_var_prefix)IS_64_BIT :=
+
+# The mingw gcc is 4.8, 4.9 is required for color diagnostics
+$(combo_var_prefix)UNKNOWN_CFLAGS := -fdiagnostics-color
diff --git a/core/combo/HOST_CROSS_windows-x86_64.mk b/core/combo/HOST_CROSS_windows-x86_64.mk
new file mode 100644
index 0000000..e9b19cf
--- /dev/null
+++ b/core/combo/HOST_CROSS_windows-x86_64.mk
@@ -0,0 +1,65 @@
+#
+# Copyright (C) 2006 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Settings to use MinGW as a cross-compiler under Linux
+# Included by combo/select.make
+
+$(combo_var_prefix)GLOBAL_CFLAGS += -DUSE_MINGW -DWIN32_LEAN_AND_MEAN
+$(combo_var_prefix)GLOBAL_CFLAGS += -Wno-unused-parameter
+$(combo_var_prefix)GLOBAL_CFLAGS += --sysroot prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32
+$(combo_var_prefix)GLOBAL_CFLAGS += -m64
+$(combo_var_prefix)GLOBAL_LDFLAGS += -m64
+TOOLS_PREFIX := prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/bin/x86_64-w64-mingw32-
+$(combo_var_prefix)C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/include
+$(combo_var_prefix)C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/lib/gcc/x86_64-w64-mingw32/4.8.3/include
+$(combo_var_prefix)GLOBAL_LD_DIRS += -Lprebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/lib64
+
+# Workaround differences in inttypes.h between host and target.
+# See bug 12708004.
+$(combo_var_prefix)GLOBAL_CFLAGS += -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS
+# Use C99-compliant printf functions (%zd).
+$(combo_var_prefix)GLOBAL_CFLAGS += -D__USE_MINGW_ANSI_STDIO=1
+# Admit to using >= Vista. Both are needed because of <_mingw.h>.
+$(combo_var_prefix)GLOBAL_CFLAGS += -D_WIN32_WINNT=0x0600 -DWINVER=0x0600
+# Get 64-bit off_t and related functions.
+$(combo_var_prefix)GLOBAL_CFLAGS += -D_FILE_OFFSET_BITS=64
+
+$(combo_var_prefix)CC := $(TOOLS_PREFIX)gcc
+$(combo_var_prefix)CXX := $(TOOLS_PREFIX)g++
+$(combo_var_prefix)AR := $(TOOLS_PREFIX)ar
+$(combo_var_prefix)NM := $(TOOLS_PREFIX)nm
+$(combo_var_prefix)OBJDUMP := $(TOOLS_PREFIX)objdump
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OBJDUMP) -x $(1) | grep "^Name" | cut -f3 -d" " > $(2)
+$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)NM) -g -f p $(1) | cut -f1-2 -d" " >> $(2)
+endef
+
+$(combo_var_prefix)GLOBAL_LDFLAGS += \
+    --enable-stdcall-fixup
+
+ifneq ($(strip $(BUILD_HOST_static)),)
+# Statically-linked binaries are desirable for sandboxed environment
+$(combo_var_prefix)GLOBAL_LDFLAGS += -static
+endif # BUILD_HOST_static
+
+$(combo_var_prefix)SHLIB_SUFFIX := .dll
+$(combo_var_prefix)EXECUTABLE_SUFFIX := .exe
+
+$(combo_var_prefix)IS_64_BIT := true
+
+# The mingw gcc is 4.8, 4.9 is required for color diagnostics
+$(combo_var_prefix)UNKNOWN_CFLAGS := -fdiagnostics-color
diff --git a/core/combo/HOST_darwin-x86.mk b/core/combo/HOST_darwin-x86.mk
index e77fd21..fc56e52 100644
--- a/core/combo/HOST_darwin-x86.mk
+++ b/core/combo/HOST_darwin-x86.mk
@@ -36,6 +36,10 @@
 $(combo_2nd_arch_prefix)HOST_CC  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)-gcc
 $(combo_2nd_arch_prefix)HOST_CXX := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)-g++
 
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_macho,$(1),$(2))
+endef
+
 # gcc location for clang; to be updated when clang is updated
 # HOST_TOOLCHAIN_ROOT is a Darwin-specific define
 $(combo_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_ROOT)
@@ -52,10 +56,6 @@
 $(combo_2nd_arch_prefix)HOST_SHLIB_SUFFIX := .dylib
 $(combo_2nd_arch_prefix)HOST_JNILIB_SUFFIX := .jnilib
 
-# TODO: add AndroidConfig.h for darwin-x86_64
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += \
-    -include $(call select-android-config-h,darwin-x86)
-
 $(combo_2nd_arch_prefix)HOST_GLOBAL_ARFLAGS := cqs
 
 ############################################################
diff --git a/core/combo/HOST_darwin-x86_64.mk b/core/combo/HOST_darwin-x86_64.mk
index 0efa78f..251455f 100644
--- a/core/combo/HOST_darwin-x86_64.mk
+++ b/core/combo/HOST_darwin-x86_64.mk
@@ -36,6 +36,10 @@
 HOST_CC  := $(HOST_TOOLCHAIN_PREFIX)-gcc
 HOST_CXX := $(HOST_TOOLCHAIN_PREFIX)-g++
 
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_macho,$(1),$(2))
+endef
+
 # gcc location for clang; to be updated when clang is updated
 # HOST_TOOLCHAIN_ROOT is a Darwin-specific define
 HOST_TOOLCHAIN_FOR_CLANG := $(HOST_TOOLCHAIN_ROOT)
@@ -52,9 +56,6 @@
 HOST_SHLIB_SUFFIX := .dylib
 HOST_JNILIB_SUFFIX := .jnilib
 
-HOST_GLOBAL_CFLAGS += \
-    -include $(call select-android-config-h,darwin-x86)
-
 HOST_GLOBAL_ARFLAGS := cqs
 
 # We Reuse the following functions with the same name from HOST_darwin-x86.mk:
diff --git a/core/combo/HOST_linux-x86.mk b/core/combo/HOST_linux-x86.mk
index 8eda6c0..169e2d2 100644
--- a/core/combo/HOST_linux-x86.mk
+++ b/core/combo/HOST_linux-x86.mk
@@ -23,13 +23,19 @@
 $(combo_2nd_arch_prefix)HOST_CC  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)gcc
 $(combo_2nd_arch_prefix)HOST_CXX := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)g++
 $(combo_2nd_arch_prefix)HOST_AR  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)ar
+$(combo_2nd_arch_prefix)HOST_READELF  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)readelf
+$(combo_2nd_arch_prefix)HOST_NM  := $($(combo_2nd_arch_prefix)HOST_TOOLCHAIN_PREFIX)nm
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
 
 # gcc location for clang; to be updated when clang is updated
-$(combo_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG := prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/
+$(combo_2nd_arch_prefix)HOST_TOOLCHAIN_FOR_CLANG := prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8
 
 # We expect SSE3 floating point math.
 $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -msse3 -mfpmath=sse -m32 -Wa,--noexecstack -march=prescott
-$(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -m32 -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now
+$(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -m32 -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now -Wl,--no-undefined-version
 
 ifneq ($(strip $(BUILD_HOST_static)),)
 # Statically-linked binaries are desirable for sandboxed environment
@@ -38,15 +44,17 @@
 
 $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -fPIC \
   -no-canonical-prefixes \
-  -include $(call select-android-config-h,linux-x86)
 
-# TODO: Set _FORTIFY_SOURCE=2. Bug 20558757.
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -fstack-protector
+$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=2 -fstack-protector
 
 # Workaround differences in inttypes.h between host and target.
 # See bug 12708004.
 $(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS
 
+# We build a 32-bit host art, and right now that also means building *all* host libraries
+# both 32- and 64-bit (whether art uses them or not --- 9d59f417767991246848c3e101cb27d2dfea5988).
+$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -D_FILE_OFFSET_BITS=64 -D_LARGEFILE_SOURCE=1
+
 $(combo_2nd_arch_prefix)HOST_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
 
 ############################################################
diff --git a/core/combo/HOST_linux-x86_64.mk b/core/combo/HOST_linux-x86_64.mk
index e268e41..9766f2b 100644
--- a/core/combo/HOST_linux-x86_64.mk
+++ b/core/combo/HOST_linux-x86_64.mk
@@ -23,25 +23,28 @@
 HOST_CC  := $(HOST_TOOLCHAIN_PREFIX)gcc
 HOST_CXX := $(HOST_TOOLCHAIN_PREFIX)g++
 HOST_AR  := $(HOST_TOOLCHAIN_PREFIX)ar
+HOST_READELF  := $(HOST_TOOLCHAIN_PREFIX)readelf
+HOST_NM  := $(HOST_TOOLCHAIN_PREFIX)nm
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
 
 # gcc location for clang; to be updated when clang is updated
-HOST_TOOLCHAIN_FOR_CLANG := prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8/
+HOST_TOOLCHAIN_FOR_CLANG := prebuilts/gcc/linux-x86/host/x86_64-linux-glibc2.15-4.8
 
 HOST_GLOBAL_CFLAGS += -m64 -Wa,--noexecstack
-HOST_GLOBAL_LDFLAGS += -m64 -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now
+HOST_GLOBAL_LDFLAGS += -m64 -Wl,-z,noexecstack -Wl,-z,relro -Wl,-z,now -Wl,--no-undefined-version
 
 ifneq ($(strip $(BUILD_HOST_static)),)
 # Statically-linked binaries are desirable for sandboxed environment
 HOST_GLOBAL_LDFLAGS += -static
 endif # BUILD_HOST_static
 
-# TODO: Add AndroidConfig.h for linux-x86_64
 HOST_GLOBAL_CFLAGS += -fPIC \
   -no-canonical-prefixes \
-  -include $(call select-android-config-h,linux-x86)
 
-# TODO: Set _FORTIFY_SOURCE=2. Bug 20558757.
-HOST_GLOBAL_CFLAGS += -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=0 -fstack-protector
+HOST_GLOBAL_CFLAGS += -U_FORTIFY_SOURCE -D_FORTIFY_SOURCE=2 -fstack-protector
 
 # Workaround differences in inttypes.h between host and target.
 # See bug 12708004.
diff --git a/core/combo/HOST_windows-x86.mk b/core/combo/HOST_windows-x86.mk
deleted file mode 100644
index b71ac16..0000000
--- a/core/combo/HOST_windows-x86.mk
+++ /dev/null
@@ -1,86 +0,0 @@
-#
-# Copyright (C) 2006 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Configuration for Linux on x86.
-# Included by combo/select.make
-
-# right now we get these from the environment, but we should
-# pick them from the tree somewhere
-TOOLS_PREFIX := #prebuilt/windows/host/bin/
-TOOLS_EXE_SUFFIX := .exe
-
-# Settings to use MinGW has a cross-compiler under Linux
-ifneq ($(findstring Linux,$(UNAME)),)
-ifdef USE_MINGW
-HOST_ACP_UNAVAILABLE := true
-TOOLS_EXE_SUFFIX :=
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -DUSE_MINGW -DWIN32_LEAN_AND_MEAN
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -Wno-unused-parameter
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += --sysroot=prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -m32
-$(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -m32
-TOOLS_PREFIX := prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/bin/x86_64-w64-mingw32-
-$(combo_2nd_arch_prefix)HOST_C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/include
-$(combo_2nd_arch_prefix)HOST_C_INCLUDES += prebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/lib/gcc/x86_64-w64-mingw32/4.8.3/include
-$(combo_2nd_arch_prefix)HOST_GLOBAL_LD_DIRS += -Lprebuilts/gcc/linux-x86/host/x86_64-w64-mingw32-4.8/x86_64-w64-mingw32/lib32
-endif # USE_MINGW
-endif # Linux
-
-# Workaround differences in inttypes.h between host and target.
-# See bug 12708004.
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS -D__USE_MINGW_ANSI_STDIO
-
-$(combo_2nd_arch_prefix)HOST_CC := $(TOOLS_PREFIX)gcc$(TOOLS_EXE_SUFFIX)
-$(combo_2nd_arch_prefix)HOST_CXX := $(TOOLS_PREFIX)g++$(TOOLS_EXE_SUFFIX)
-$(combo_2nd_arch_prefix)HOST_AR := $(TOOLS_PREFIX)ar$(TOOLS_EXE_SUFFIX)
-
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += \
-    -include $(call select-android-config-h,windows)
-$(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += \
-    --enable-stdcall-fixup
-
-ifneq ($(strip $(BUILD_HOST_static)),)
-# Statically-linked binaries are desirable for sandboxed environment
-$(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -static
-endif # BUILD_HOST_static
-
-# when building under Cygwin, ensure that we use Mingw compilation by default.
-# you can disable this (i.e. to generate Cygwin executables) by defining the
-# USE_CYGWIN variable in your environment, e.g.:
-#
-#   export USE_CYGWIN=1
-#
-# note that the -mno-cygwin flags are not needed when cross-compiling the
-# Windows host tools on Linux
-#
-ifneq ($(findstring CYGWIN,$(UNAME)),)
-ifeq ($(strip $(USE_CYGWIN)),)
-$(combo_2nd_arch_prefix)HOST_GLOBAL_CFLAGS += -mno-cygwin
-$(combo_2nd_arch_prefix)HOST_GLOBAL_LDFLAGS += -mno-cygwin -mconsole
-endif
-endif
-
-############################################################
-## Macros after this line are shared by the 64-bit config.
-
-HOST_SHLIB_SUFFIX := .dll
-HOST_EXECUTABLE_SUFFIX := .exe
-
-# $(1): The file to check
-# TODO: find out what format cygwin's stat(1) uses
-define get-file-size
-999999999
-endef
diff --git a/core/combo/HOST_windows-x86_64.mk b/core/combo/HOST_windows-x86_64.mk
deleted file mode 100644
index bd392ea..0000000
--- a/core/combo/HOST_windows-x86_64.mk
+++ /dev/null
@@ -1,66 +0,0 @@
-#
-# Copyright (C) 2006 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# Configuration for Windows on x86_64.
-# Included by combo/select.make
-
-# right now we get these from the environment, but we should
-# pick them from the tree somewhere
-TOOLS_PREFIX := #prebuilt/windows/host/bin/
-TOOLS_EXE_SUFFIX := .exe
-
-# Settings to use MinGW has a cross-compiler under Linux
-ifneq ($(findstring Linux,$(UNAME)),)
-ifdef USE_MINGW
-HOST_ACP_UNAVAILABLE := true
-TOOLS_EXE_SUFFIX :=
-HOST_GLOBAL_CFLAGS += -DUSE_MINGW
-TOOLS_PREFIX := /usr/bin/amd64-mingw32msvc-
-HOST_C_INCLUDES += /usr/lib/gcc/amd64-mingw32msvc/4.4.2/include
-HOST_GLOBAL_LD_DIRS += -L/usr/amd64-mingw32msvc/lib
-endif # USE_MINGW
-endif # Linux
-
-# Workaround differences in inttypes.h between host and target.
-# See bug 12708004.
-HOST_GLOBAL_CFLAGS += -D__STDC_FORMAT_MACROS -D__STDC_CONSTANT_MACROS -D__USE_MINGW_ANSI_STDIO
-
-HOST_CC := $(TOOLS_PREFIX)gcc$(TOOLS_EXE_SUFFIX)
-HOST_CXX := $(TOOLS_PREFIX)g++$(TOOLS_EXE_SUFFIX)
-HOST_AR := $(TOOLS_PREFIX)ar$(TOOLS_EXE_SUFFIX)
-
-HOST_GLOBAL_CFLAGS += -include $(call select-android-config-h,windows)
-HOST_GLOBAL_LDFLAGS += --enable-stdcall-fixup
-ifneq ($(strip $(BUILD_HOST_static)),)
-# Statically-linked binaries are desirable for sandboxed environment
-HOST_GLOBAL_LDFLAGS += -static
-endif # BUILD_HOST_static
-
-# when building under Cygwin, ensure that we use Mingw compilation by default.
-# you can disable this (i.e. to generate Cygwin executables) by defining the
-# USE_CYGWIN variable in your environment, e.g.:
-#
-#   export USE_CYGWIN=1
-#
-# note that the -mno-cygwin flags are not needed when cross-compiling the
-# Windows host tools on Linux
-#
-ifneq ($(findstring CYGWIN,$(UNAME)),)
-ifeq ($(strip $(USE_CYGWIN)),)
-HOST_GLOBAL_CFLAGS += -mno-cygwin
-HOST_GLOBAL_LDFLAGS += -mno-cygwin -mconsole
-endif
-endif
diff --git a/core/combo/TARGET_linux-arm.mk b/core/combo/TARGET_linux-arm.mk
index 3651c39..510aae5 100644
--- a/core/combo/TARGET_linux-arm.mk
+++ b/core/combo/TARGET_linux-arm.mk
@@ -20,8 +20,7 @@
 # You can set TARGET_ARCH_VARIANT to use an arch version other
 # than ARMv5TE. Each value should correspond to a file named
 # $(BUILD_COMBOS)/arch/<name>.mk which must contain
-# makefile variable definitions similar to the preprocessor
-# defines in build/core/combo/include/arch/<combo>/AndroidConfig.h. Their
+# makefile variable definitions. Their
 # purpose is to allow module Android.mk files to selectively compile
 # different versions of code based upon the funtionality and
 # instructions available in a given architecture version.
@@ -57,13 +56,18 @@
 $(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX := $($(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/bin/arm-linux-androideabi-
 endif
 
-$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip$(HOST_EXECUTABLE_SUFFIX)
+$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc
+$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++
+$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar
+$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy
+$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld
+$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf
+$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip
+$(combo_2nd_arch_prefix)TARGET_NM := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)nm
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
 
 $(combo_2nd_arch_prefix)TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
 
@@ -92,14 +96,12 @@
   $(combo_2nd_arch_prefix)TARGET_thumb_CFLAGS += -marm -fno-omit-frame-pointer
 endif
 
-android_config_h := $(call select-android-config-h,linux-arm)
-
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += \
 			-msoft-float \
 			-ffunction-sections \
 			-fdata-sections \
 			-funwind-tables \
-			-fstack-protector \
+			-fstack-protector-strong \
 			-Wa,--noexecstack \
 			-Werror=format-security \
 			-D_FORTIFY_SOURCE=2 \
@@ -107,8 +109,6 @@
 			-no-canonical-prefixes \
 			-fno-canonical-system-headers \
 			$(arch_variant_cflags) \
-			-include $(android_config_h) \
-			-I $(dir $(android_config_h))
 
 # The "-Wunused-but-set-variable" option often breaks projects that enable
 # "-Wall -Werror" due to a commom idiom "ALOGV(mesg)" where ALOGV is turned
@@ -138,6 +138,7 @@
 			-Wl,--fatal-warnings \
 			-Wl,--icf=safe \
 			-Wl,--hash-style=gnu \
+			-Wl,--no-undefined-version \
 			$(arch_variant_ldflags)
 
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -mthumb-interwork
@@ -171,6 +172,7 @@
 endif
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
+KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-$(TARGET_$(combo_2nd_arch_prefix)ARCH)
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
 
@@ -190,6 +192,4 @@
 
 $(combo_2nd_arch_prefix)TARGET_PACK_MODULE_RELOCATIONS := true
 
-$(combo_2nd_arch_prefix)TARGET_DEFAULT_SYSTEM_SHARED_LIBRARIES := libc libm
-
 $(combo_2nd_arch_prefix)TARGET_LINKER := /system/bin/linker
diff --git a/core/combo/TARGET_linux-arm64.mk b/core/combo/TARGET_linux-arm64.mk
index 3acddc5..6a1d861 100644
--- a/core/combo/TARGET_linux-arm64.mk
+++ b/core/combo/TARGET_linux-arm64.mk
@@ -20,8 +20,7 @@
 # You can set TARGET_ARCH_VARIANT to use an arch version other
 # than ARMv5TE. Each value should correspond to a file named
 # $(BUILD_COMBOS)/arch/<name>.mk which must contain
-# makefile variable definitions similar to the preprocessor
-# defines in build/core/combo/include/arch/<combo>/AndroidConfig.h. Their
+# makefile variable definitions. Their
 # purpose is to allow module Android.mk files to selectively compile
 # different versions of code based upon the funtionality and
 # instructions available in a given architecture version.
@@ -57,23 +56,26 @@
 TARGET_TOOLS_PREFIX := $(TARGET_TOOLCHAIN_ROOT)/bin/aarch64-linux-android-
 endif
 
-TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc$(HOST_EXECUTABLE_SUFFIX)
-TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++$(HOST_EXECUTABLE_SUFFIX)
-TARGET_AR := $(TARGET_TOOLS_PREFIX)ar$(HOST_EXECUTABLE_SUFFIX)
-TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy$(HOST_EXECUTABLE_SUFFIX)
-TARGET_LD := $(TARGET_TOOLS_PREFIX)ld$(HOST_EXECUTABLE_SUFFIX)
-TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf$(HOST_EXECUTABLE_SUFFIX)
-TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip$(HOST_EXECUTABLE_SUFFIX)
+TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc
+TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++
+TARGET_AR := $(TARGET_TOOLS_PREFIX)ar
+TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy
+TARGET_LD := $(TARGET_TOOLS_PREFIX)ld
+TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf
+TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip
+TARGET_NM := $(TARGET_TOOLS_PREFIX)nm
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
 
 TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
 
 TARGET_GLOBAL_CFLAGS += \
     -fno-strict-aliasing \
 
-android_config_h := $(call select-android-config-h,linux-arm64)
-
 TARGET_GLOBAL_CFLAGS += \
-			-fstack-protector \
+			-fstack-protector-strong \
 			-ffunction-sections \
 			-fdata-sections \
 			-funwind-tables \
@@ -84,8 +86,6 @@
 			-no-canonical-prefixes \
 			-fno-canonical-system-headers \
 			$(arch_variant_cflags) \
-			-include $(android_config_h) \
-			-I $(dir $(android_config_h))
 
 # Help catch common 32/64-bit errors.
 TARGET_GLOBAL_CFLAGS += \
@@ -115,6 +115,9 @@
 			-Wl,-maarch64linux \
 			-Wl,--hash-style=gnu \
 			-Wl,--fix-cortex-a53-843419 \
+			-fuse-ld=gold \
+			-Wl,--icf=safe \
+			-Wl,--no-undefined-version \
 			$(arch_variant_ldflags)
 
 # Disable transitive dependency library symbol resolving.
@@ -142,6 +145,7 @@
 	-print-file-name=libgcov.a)
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
+KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-$(TARGET_ARCH)
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
 
@@ -161,6 +165,4 @@
 
 TARGET_PACK_MODULE_RELOCATIONS := true
 
-TARGET_DEFAULT_SYSTEM_SHARED_LIBRARIES := libc libm
-
 TARGET_LINKER := /system/bin/linker64
diff --git a/core/combo/TARGET_linux-mips.mk b/core/combo/TARGET_linux-mips.mk
index 29e49fb..186d88f 100644
--- a/core/combo/TARGET_linux-mips.mk
+++ b/core/combo/TARGET_linux-mips.mk
@@ -20,8 +20,7 @@
 # You can set TARGET_ARCH_VARIANT to use an arch version other
 # than mips32r2-fp. Each value should correspond to a file named
 # $(BUILD_COMBOS)/arch/<name>.mk which must contain
-# makefile variable definitions similar to the preprocessor
-# defines in build/core/combo/include/arch/<combo>/AndroidConfig.h. Their
+# makefile variable definitions. Their
 # purpose is to allow module Android.mk files to selectively compile
 # different versions of code based upon the funtionality and
 # instructions available in a given architecture version.
@@ -57,13 +56,18 @@
 $(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX := $($(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/bin/mips64el-linux-android-
 endif
 
-$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip$(HOST_EXECUTABLE_SUFFIX)
+$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc
+$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++
+$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar
+$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy
+$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld
+$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf
+$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip
+$(combo_2nd_arch_prefix)TARGET_NM := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)nm
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
 
 $(combo_2nd_arch_prefix)TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
 
@@ -79,22 +83,19 @@
   TARGET_mips_CFLAGS += -fno-omit-frame-pointer
 endif
 
-android_config_h := $(call select-android-config-h,linux-mips)
-
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += \
 			$(TARGET_mips_CFLAGS) \
 			-U__unix -U__unix__ -Umips \
 			-ffunction-sections \
 			-fdata-sections \
 			-funwind-tables \
+			-fstack-protector-strong \
 			-Wa,--noexecstack \
 			-Werror=format-security \
 			-D_FORTIFY_SOURCE=2 \
 			-no-canonical-prefixes \
 			-fno-canonical-system-headers \
 			$(arch_variant_cflags) \
-			-include $(android_config_h) \
-			-I $(dir $(android_config_h))
 
 ifneq ($(ARCH_MIPS_PAGE_SHIFT),)
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += -DPAGE_SHIFT=$(ARCH_MIPS_PAGE_SHIFT)
@@ -107,6 +108,7 @@
 			-Wl,--build-id=md5 \
 			-Wl,--warn-shared-textrel \
 			-Wl,--fatal-warnings \
+			-Wl,--no-undefined-version \
 			$(arch_variant_ldflags)
 
 # Disable transitive dependency library symbol resolving.
@@ -145,6 +147,7 @@
 endif
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
+KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-mips # mips covers both mips and mips64.
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
 
@@ -164,6 +167,4 @@
 
 $(combo_2nd_arch_prefix)TARGET_PACK_MODULE_RELOCATIONS := true
 
-$(combo_2nd_arch_prefix)TARGET_DEFAULT_SYSTEM_SHARED_LIBRARIES := libc libm
-
 $(combo_2nd_arch_prefix)TARGET_LINKER := /system/bin/linker
diff --git a/core/combo/TARGET_linux-mips64.mk b/core/combo/TARGET_linux-mips64.mk
index b34b7a6..3e1f61a 100644
--- a/core/combo/TARGET_linux-mips64.mk
+++ b/core/combo/TARGET_linux-mips64.mk
@@ -20,8 +20,7 @@
 # You can set TARGET_ARCH_VARIANT to use an arch version other
 # than mips64r6. Each value should correspond to a file named
 # $(BUILD_COMBOS)/arch/<name>.mk which must contain
-# makefile variable definitions similar to the preprocessor
-# defines in build/core/combo/include/arch/<combo>/AndroidConfig.h. Their
+# makefile variable definitions. Their
 # purpose is to allow module Android.mk files to selectively compile
 # different versions of code based upon the funtionality and
 # instructions available in a given architecture version.
@@ -57,13 +56,18 @@
 TARGET_TOOLS_PREFIX := $(TARGET_TOOLCHAIN_ROOT)/bin/mips64el-linux-android-
 endif
 
-TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc$(HOST_EXECUTABLE_SUFFIX)
-TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++$(HOST_EXECUTABLE_SUFFIX)
-TARGET_AR := $(TARGET_TOOLS_PREFIX)ar$(HOST_EXECUTABLE_SUFFIX)
-TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy$(HOST_EXECUTABLE_SUFFIX)
-TARGET_LD := $(TARGET_TOOLS_PREFIX)ld$(HOST_EXECUTABLE_SUFFIX)
-TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf$(HOST_EXECUTABLE_SUFFIX)
-TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip$(HOST_EXECUTABLE_SUFFIX)
+TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc
+TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++
+TARGET_AR := $(TARGET_TOOLS_PREFIX)ar
+TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy
+TARGET_LD := $(TARGET_TOOLS_PREFIX)ld
+TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf
+TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip
+TARGET_NM := $(TARGET_TOOLS_PREFIX)nm
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
 
 TARGET_NO_UNDEFINED_LDFLAGS := -Wl,--no-undefined
 
@@ -79,22 +83,19 @@
   TARGET_mips_CFLAGS += -fno-omit-frame-pointer
 endif
 
-android_config_h := $(call select-android-config-h,linux-mips64)
-
 TARGET_GLOBAL_CFLAGS += \
 			$(TARGET_mips_CFLAGS) \
 			-U__unix -U__unix__ -Umips \
 			-ffunction-sections \
 			-fdata-sections \
 			-funwind-tables \
+			-fstack-protector-strong \
 			-Wa,--noexecstack \
 			-Werror=format-security \
 			-D_FORTIFY_SOURCE=2 \
 			-no-canonical-prefixes \
 			-fno-canonical-system-headers \
 			$(arch_variant_cflags) \
-			-include $(android_config_h) \
-			-I $(dir $(android_config_h))
 
 # Help catch common 32/64-bit errors.
 TARGET_GLOBAL_CFLAGS += \
@@ -113,8 +114,12 @@
 			-Wl,--build-id=md5 \
 			-Wl,--warn-shared-textrel \
 			-Wl,--fatal-warnings \
+			-Wl,--no-undefined-version \
 			$(arch_variant_ldflags)
 
+# Disable transitive dependency library symbol resolving.
+TARGET_GLOBAL_LDFLAGS += -Wl,--allow-shlib-undefined
+
 TARGET_GLOBAL_CPPFLAGS += -fvisibility-inlines-hidden
 
 # More flags/options can be added here
@@ -128,7 +133,6 @@
 
 libc_root := bionic/libc
 libm_root := bionic/libm
-libthread_db_root := bionic/libthread_db
 
 
 ## on some hosts, the target cross-compiler is not available so do not run this command
@@ -149,6 +153,7 @@
 endif
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
+KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-mips
 # TODO: perhaps use $(libc_root)/kernel/uapi/asm-$(TARGET_ARCH) instead of asm-mips ?
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
@@ -159,7 +164,7 @@
 	$(KERNEL_HEADERS) \
 	$(libm_root)/include \
 	$(libm_root)/include/mips \
-	$(libthread_db_root)/include
+
 # TODO: perhaps use $(libm_root)/include/mips64 instead of mips ?
 
 TARGET_CRTBEGIN_STATIC_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_static.o
@@ -171,6 +176,4 @@
 
 TARGET_PACK_MODULE_RELOCATIONS := true
 
-TARGET_DEFAULT_SYSTEM_SHARED_LIBRARIES := libc libstdc++ libm
-
 TARGET_LINKER := /system/bin/linker64
diff --git a/core/combo/TARGET_linux-x86.mk b/core/combo/TARGET_linux-x86.mk
index 340f306..558ec3b 100644
--- a/core/combo/TARGET_linux-x86.mk
+++ b/core/combo/TARGET_linux-x86.mk
@@ -49,13 +49,18 @@
 $(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX := $($(combo_2nd_arch_prefix)TARGET_TOOLCHAIN_ROOT)/bin/x86_64-linux-android-
 endif
 
-$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf$(HOST_EXECUTABLE_SUFFIX)
-$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip$(HOST_EXECUTABLE_SUFFIX)
+$(combo_2nd_arch_prefix)TARGET_CC := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)gcc
+$(combo_2nd_arch_prefix)TARGET_CXX := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)g++
+$(combo_2nd_arch_prefix)TARGET_AR := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ar
+$(combo_2nd_arch_prefix)TARGET_OBJCOPY := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)objcopy
+$(combo_2nd_arch_prefix)TARGET_LD := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)ld
+$(combo_2nd_arch_prefix)TARGET_READELF := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)readelf
+$(combo_2nd_arch_prefix)TARGET_STRIP := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)strip
+$(combo_2nd_arch_prefix)TARGET_NM := $($(combo_2nd_arch_prefix)TARGET_TOOLS_PREFIX)nm
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
 
 ifneq ($(wildcard $($(combo_2nd_arch_prefix)TARGET_CC)),)
 $(combo_2nd_arch_prefix)TARGET_LIBGCC := \
@@ -72,11 +77,10 @@
 libm_root := bionic/libm
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
+KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-x86 # x86 covers both x86 and x86_64.
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
 
-android_config_h := $(call select-android-config-h,target_linux-x86)
-
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += \
 			-O2 \
 			-Wa,--noexecstack \
@@ -90,12 +94,10 @@
 			-fstrict-aliasing \
 			-funswitch-loops \
 			-funwind-tables \
-			-fstack-protector \
+			-fstack-protector-strong \
 			-m32 \
 			-no-canonical-prefixes \
 			-fno-canonical-system-headers \
-			-include $(android_config_h) \
-			-I $(dir $(android_config_h))
 
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_CFLAGS += $(arch_variant_cflags)
 
@@ -127,6 +129,7 @@
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS += -Wl,--fatal-warnings
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS += -Wl,--gc-sections
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS += -Wl,--hash-style=gnu
+$(combo_2nd_arch_prefix)TARGET_GLOBAL_LDFLAGS += -Wl,--no-undefined-version
 
 $(combo_2nd_arch_prefix)TARGET_C_INCLUDES := \
 	$(libc_root)/arch-x86/include \
@@ -144,8 +147,6 @@
 
 $(combo_2nd_arch_prefix)TARGET_PACK_MODULE_RELOCATIONS := true
 
-$(combo_2nd_arch_prefix)TARGET_DEFAULT_SYSTEM_SHARED_LIBRARIES := libc libm
-
 $(combo_2nd_arch_prefix)TARGET_LINKER := /system/bin/linker
 
 $(combo_2nd_arch_prefix)TARGET_GLOBAL_YASM_FLAGS := -f elf32 -m x86
diff --git a/core/combo/TARGET_linux-x86_64.mk b/core/combo/TARGET_linux-x86_64.mk
index 53b0572..12166ec 100644
--- a/core/combo/TARGET_linux-x86_64.mk
+++ b/core/combo/TARGET_linux-x86_64.mk
@@ -49,13 +49,18 @@
 TARGET_TOOLS_PREFIX := $(TARGET_TOOLCHAIN_ROOT)/bin/x86_64-linux-android-
 endif
 
-TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc$(HOST_EXECUTABLE_SUFFIX)
-TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++$(HOST_EXECUTABLE_SUFFIX)
-TARGET_AR := $(TARGET_TOOLS_PREFIX)ar$(HOST_EXECUTABLE_SUFFIX)
-TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy$(HOST_EXECUTABLE_SUFFIX)
-TARGET_LD := $(TARGET_TOOLS_PREFIX)ld$(HOST_EXECUTABLE_SUFFIX)
-TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf$(HOST_EXECUTABLE_SUFFIX)
-TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip$(HOST_EXECUTABLE_SUFFIX)
+TARGET_CC := $(TARGET_TOOLS_PREFIX)gcc
+TARGET_CXX := $(TARGET_TOOLS_PREFIX)g++
+TARGET_AR := $(TARGET_TOOLS_PREFIX)ar
+TARGET_OBJCOPY := $(TARGET_TOOLS_PREFIX)objcopy
+TARGET_LD := $(TARGET_TOOLS_PREFIX)ld
+TARGET_READELF := $(TARGET_TOOLS_PREFIX)readelf
+TARGET_STRIP := $(TARGET_TOOLS_PREFIX)strip
+TARGET_NM := $(TARGET_TOOLS_PREFIX)nm
+
+define $(combo_var_prefix)transform-shared-lib-to-toc
+$(call _gen_toc_command_for_elf,$(1),$(2))
+endef
 
 ifneq ($(wildcard $(TARGET_CC)),)
 TARGET_LIBGCC := \
@@ -72,6 +77,7 @@
 libm_root := bionic/libm
 
 KERNEL_HEADERS_COMMON := $(libc_root)/kernel/uapi
+KERNEL_HEADERS_COMMON += $(libc_root)/kernel/common
 KERNEL_HEADERS_ARCH   := $(libc_root)/kernel/uapi/asm-x86 # x86 covers both x86 and x86_64.
 KERNEL_HEADERS := $(KERNEL_HEADERS_COMMON) $(KERNEL_HEADERS_ARCH)
 
@@ -88,7 +94,7 @@
 			-fstrict-aliasing \
 			-funswitch-loops \
 			-funwind-tables \
-			-fstack-protector \
+			-fstack-protector-strong \
 			-m64 \
 			-no-canonical-prefixes \
 			-fno-canonical-system-headers
@@ -99,10 +105,6 @@
     -Werror=int-to-pointer-cast \
     -Werror=implicit-function-declaration \
 
-android_config_h := $(call select-android-config-h,target_linux-x86)
-TARGET_ANDROID_CONFIG_CFLAGS := -include $(android_config_h) -I $(dir $(android_config_h))
-TARGET_GLOBAL_CFLAGS += $(TARGET_ANDROID_CONFIG_CFLAGS)
-
 TARGET_GLOBAL_CFLAGS += $(arch_variant_cflags)
 
 ifeq ($(ARCH_X86_HAVE_SSSE3),true)   # yes, really SSSE3, not SSE3!
@@ -136,6 +138,7 @@
 TARGET_GLOBAL_LDFLAGS += -Wl,--fatal-warnings
 TARGET_GLOBAL_LDFLAGS += -Wl,--gc-sections
 TARGET_GLOBAL_LDFLAGS += -Wl,--hash-style=gnu
+TARGET_GLOBAL_LDFLAGS += -Wl,--no-undefined-version
 
 TARGET_C_INCLUDES := \
 	$(libc_root)/arch-x86_64/include \
@@ -151,8 +154,6 @@
 TARGET_CRTBEGIN_SO_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtbegin_so.o
 TARGET_CRTEND_SO_O := $(TARGET_OUT_INTERMEDIATE_LIBRARIES)/crtend_so.o
 
-TARGET_DEFAULT_SYSTEM_SHARED_LIBRARIES := libc libm
-
 TARGET_LINKER := /system/bin/linker64
 
 TARGET_GLOBAL_YASM_FLAGS := -f elf64 -m amd64
diff --git a/core/combo/arch/arm/armv7-a-neon.mk b/core/combo/arch/arm/armv7-a-neon.mk
index 99f17aa..5d5b050 100644
--- a/core/combo/arch/arm/armv7-a-neon.mk
+++ b/core/combo/arch/arm/armv7-a-neon.mk
@@ -6,6 +6,8 @@
 ARCH_ARM_HAVE_VFP_D32           := true
 ARCH_ARM_HAVE_NEON              := true
 
+local_arch_has_lpae := false
+
 ifneq (,$(filter cortex-a15 krait denver,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)))
 	# TODO: krait is not a cortex-a15, we set the variant to cortex-a15 so that
 	#       hardware divide operations are generated. This should be removed and a
@@ -13,9 +15,7 @@
 	#       core/clang/arm.mk.
 	arch_variant_cflags := -mcpu=cortex-a15
 
-	# Fake an ARM compiler flag as these processors support LPAE which GCC/clang
-	# don't advertise.
-	arch_variant_cflags += -D__ARM_FEATURE_LPAE=1
+	local_arch_has_lpae := true
 	arch_variant_ldflags := \
 		-Wl,--no-fix-cortex-a8
 else
@@ -24,8 +24,10 @@
 	arch_variant_ldflags := \
 		-Wl,--fix-cortex-a8
 else
-ifeq ($(strip $(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)),cortex-a7)
+ifneq (,$(filter cortex-a7 cortex-a53 cortex-a53.a57,$(TARGET_$(combo_2nd_arch_prefix)CPU_VARIANT)))
 	arch_variant_cflags := -mcpu=cortex-a7
+
+	local_arch_has_lpae := true
 	arch_variant_ldflags := \
 		-Wl,--no-fix-cortex-a8
 else
@@ -37,6 +39,16 @@
 endif
 endif
 
+ifeq (true,$(local_arch_has_lpae))
+	# Fake an ARM compiler flag as these processors support LPAE which GCC/clang
+	# don't advertise.
+	# TODO This is a hack and we need to add it for each processor that supports LPAE until some
+	# better solution comes around. See Bug 27340895
+	arch_variant_cflags += -D__ARM_FEATURE_LPAE=1
+endif
+
+local_arch_has_lpae :=
+
 arch_variant_cflags += \
     -mfloat-abi=softfp \
     -mfpu=neon
diff --git a/core/combo/arch/mips/mips32-fp.mk b/core/combo/arch/mips/mips32-fp.mk
index 8320e93..912ff63 100644
--- a/core/combo/arch/mips/mips32-fp.mk
+++ b/core/combo/arch/mips/mips32-fp.mk
@@ -7,6 +7,7 @@
     -mips32 \
     -mfp32 \
     -modd-spreg \
+    -mno-synci
 
 arch_variant_ldflags := \
     -Wl,-melf32ltsmip
diff --git a/core/combo/arch/mips/mips32r2-fp-xburst.mk b/core/combo/arch/mips/mips32r2-fp-xburst.mk
index 2b4f714..09b3bc2 100644
--- a/core/combo/arch/mips/mips32r2-fp-xburst.mk
+++ b/core/combo/arch/mips/mips32r2-fp-xburst.mk
@@ -9,7 +9,8 @@
     -mfp32 \
     -modd-spreg \
     -mno-fused-madd \
-    -Wa,-mmxu
+    -Wa,-mmxu \
+    -mno-synci
 
 arch_variant_ldflags := \
     -Wl,-melf32ltsmip
diff --git a/core/combo/include/arch/darwin-x86/AndroidConfig.h b/core/combo/include/arch/darwin-x86/AndroidConfig.h
deleted file mode 100644
index c28a7f8..0000000
--- a/core/combo/include/arch/darwin-x86/AndroidConfig.h
+++ /dev/null
@@ -1,50 +0,0 @@
-/*
- * Copyright (C) 2005 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Android config -- "Darwin".  Used for X86 Mac OS X.
- */
-#ifndef _ANDROID_CONFIG_H
-#define _ANDROID_CONFIG_H
-
-/*
- * ===========================================================================
- *                              !!! IMPORTANT !!!
- * ===========================================================================
- *
- * This file is included by ALL C/C++ source files.  Don't put anything in
- * here unless you are absolutely certain it can't go anywhere else.
- *
- * Any C++ stuff must be wrapped with "#ifdef __cplusplus".  Do not use "//"
- * comments.
- */
-
-/*
- * Define if we have <malloc.h> header
- */
-/* #define HAVE_MALLOC_H 1 */
-
-/*
- * The default path separator for the platform
- */
-#define OS_PATH_SEPARATOR '/'
-
-/*
- * Define if <stdint.h> exists.
- */
-#define HAVE_STDINT_H 1
-
-#endif /*_ANDROID_CONFIG_H*/
diff --git a/core/combo/include/arch/linux-arm/AndroidConfig.h b/core/combo/include/arch/linux-arm/AndroidConfig.h
deleted file mode 100644
index e819535..0000000
--- a/core/combo/include/arch/linux-arm/AndroidConfig.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2005 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Android config -- "android-arm".  Used for ARM device builds.
- */
-#ifndef _ANDROID_CONFIG_H
-#define _ANDROID_CONFIG_H
-
-/*
- * ===========================================================================
- *                              !!! IMPORTANT !!!
- * ===========================================================================
- *
- * This file is included by ALL C/C++ source files.  Don't put anything in
- * here unless you are absolutely certain it can't go anywhere else.
- *
- * Any C++ stuff must be wrapped with "#ifdef __cplusplus".  Do not use "//"
- * comments.
- */
-
-/*
- * Define if we have <malloc.h> header
- */
-#define HAVE_MALLOC_H 1
-
-/*
- * Define if we're running on *our* linux on device or emulator.
- */
-#define HAVE_ANDROID_OS 1
-
-/*
- * The default path separator for the platform
- */
-#define OS_PATH_SEPARATOR '/'
-
-/*
- * Define if <stdint.h> exists.
- */
-#define HAVE_STDINT_H 1
-
-#endif /* _ANDROID_CONFIG_H */
diff --git a/core/combo/include/arch/linux-arm64/AndroidConfig.h b/core/combo/include/arch/linux-arm64/AndroidConfig.h
deleted file mode 100644
index cee484d..0000000
--- a/core/combo/include/arch/linux-arm64/AndroidConfig.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Android config -- "android-aarch64".  Used for ARM aarch64 device builds.
- */
-#ifndef _ANDROID_CONFIG_H
-#define _ANDROID_CONFIG_H
-
-/*
- * ===========================================================================
- *                              !!! IMPORTANT !!!
- * ===========================================================================
- *
- * This file is included by ALL C/C++ source files.  Don't put anything in
- * here unless you are absolutely certain it can't go anywhere else.
- *
- * Any C++ stuff must be wrapped with "#ifdef __cplusplus".  Do not use "//"
- * comments.
- */
-
-/*
- * Define if we have <malloc.h> header
- */
-#define HAVE_MALLOC_H 1
-
-/*
- * Define if we're running on *our* linux on device or emulator.
- */
-#define HAVE_ANDROID_OS 1
-
-/*
- * The default path separator for the platform
- */
-#define OS_PATH_SEPARATOR '/'
-
-/*
- * Define if <stdint.h> exists.
- */
-#define HAVE_STDINT_H 1
-
-#endif /* _ANDROID_CONFIG_H */
diff --git a/core/combo/include/arch/linux-mips/AndroidConfig.h b/core/combo/include/arch/linux-mips/AndroidConfig.h
deleted file mode 100644
index a5dcef1..0000000
--- a/core/combo/include/arch/linux-mips/AndroidConfig.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2010 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Android config -- "android-mips".  Used for MIPS device builds.
- */
-#ifndef _ANDROID_CONFIG_H
-#define _ANDROID_CONFIG_H
-
-/*
- * ===========================================================================
- *                              !!! IMPORTANT !!!
- * ===========================================================================
- *
- * This file is included by ALL C/C++ source files.  Don't put anything in
- * here unless you are absolutely certain it can't go anywhere else.
- *
- * Any C++ stuff must be wrapped with "#ifdef __cplusplus".  Do not use "//"
- * comments.
- */
-
-/*
- * Define if we have <malloc.h> header
- */
-#define HAVE_MALLOC_H 1
-
-/*
- * Define if we're running on *our* linux on device or emulator.
- */
-#define HAVE_ANDROID_OS 1
-
-/*
- * The default path separator for the platform
- */
-#define OS_PATH_SEPARATOR '/'
-
-/*
- * Define if <stdint.h> exists.
- */
-#define HAVE_STDINT_H 1
-
-#endif /* _ANDROID_CONFIG_H */
diff --git a/core/combo/include/arch/linux-mips64/AndroidConfig.h b/core/combo/include/arch/linux-mips64/AndroidConfig.h
deleted file mode 100644
index 62d569e..0000000
--- a/core/combo/include/arch/linux-mips64/AndroidConfig.h
+++ /dev/null
@@ -1,55 +0,0 @@
-/*
- * Copyright (C) 2013 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Android config -- "android-mips64".  Used for MIPS device builds.
- */
-#ifndef _ANDROID_CONFIG_H
-#define _ANDROID_CONFIG_H
-
-/*
- * ===========================================================================
- *                              !!! IMPORTANT !!!
- * ===========================================================================
- *
- * This file is included by ALL C/C++ source files.  Don't put anything in
- * here unless you are absolutely certain it can't go anywhere else.
- *
- * Any C++ stuff must be wrapped with "#ifdef __cplusplus".  Do not use "//"
- * comments.
- */
-
-/*
- * Define if we have <malloc.h> header
- */
-#define HAVE_MALLOC_H 1
-
-/*
- * Define if we're running on *our* linux on device or emulator.
- */
-#define HAVE_ANDROID_OS 1
-
-/*
- * The default path separator for the platform
- */
-#define OS_PATH_SEPARATOR '/'
-
-/*
- * Define if <stdint.h> exists.
- */
-#define HAVE_STDINT_H 1
-
-#endif /* _ANDROID_CONFIG_H */
diff --git a/core/combo/include/arch/linux-x86/AndroidConfig.h b/core/combo/include/arch/linux-x86/AndroidConfig.h
deleted file mode 100644
index 89b29fe..0000000
--- a/core/combo/include/arch/linux-x86/AndroidConfig.h
+++ /dev/null
@@ -1,58 +0,0 @@
-/*
- * Copyright (C) 2005 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Android config -- "Linux".  Used for desktop x86 Linux.
- */
-#ifndef _ANDROID_CONFIG_H
-#define _ANDROID_CONFIG_H
-
-/*
- * ===========================================================================
- *                              !!! IMPORTANT !!!
- * ===========================================================================
- *
- * This file is included by ALL C/C++ source files.  Don't put anything in
- * here unless you are absolutely certain it can't go anywhere else.
- *
- * Any C++ stuff must be wrapped with "#ifdef __cplusplus".  Do not use "//"
- * comments.
- */
-
-/*
- * We need to choose between 32-bit and 64-bit off_t.  All of our code should
- * agree on the same size.  For desktop systems, use 64-bit values,
- * because some of our libraries (e.g. wxWidgets) expect to be built that way.
- */
-#define _FILE_OFFSET_BITS 64
-#define _LARGEFILE_SOURCE 1
-
-/*
- * Define if we have <malloc.h> header
- */
-#define HAVE_MALLOC_H 1
-
-/*
- * The default path separator for the platform
- */
-#define OS_PATH_SEPARATOR '/'
-
-/*
- * Define if <stdint.h> exists.
- */
-#define HAVE_STDINT_H 1
-
-#endif /*_ANDROID_CONFIG_H*/
diff --git a/core/combo/include/arch/target_linux-x86/AndroidConfig.h b/core/combo/include/arch/target_linux-x86/AndroidConfig.h
deleted file mode 100644
index 41e4df9..0000000
--- a/core/combo/include/arch/target_linux-x86/AndroidConfig.h
+++ /dev/null
@@ -1,41 +0,0 @@
-/*
- * Copyright 2005 The Android Open Source Project
- *
- * Android config -- "target_linux-x86".  Used for x86 linux target devices.
- */
-#ifndef _ANDROID_CONFIG_H
-#define _ANDROID_CONFIG_H
-
-/*
- * ===========================================================================
- *                              !!! IMPORTANT !!!
- * ===========================================================================
- *
- * This file is included by ALL C/C++ source files.  Don't put anything in
- * here unless you are absolutely certain it can't go anywhere else.
- *
- * Any C++ stuff must be wrapped with "#ifdef __cplusplus".  Do not use "//"
- * comments.
- */
-
-/*
- * Define if we have <malloc.h> header
- */
-#define HAVE_MALLOC_H 1
-
-/*
- * Define if we're running on *our* linux on device or emulator.
- */
-#define HAVE_ANDROID_OS 1
-
-/*
- * The default path separator for the platform
- */
-#define OS_PATH_SEPARATOR '/'
-
-/*
- * Define if <stdint.h> exists.
- */
-#define HAVE_STDINT_H 1
-
-#endif /* _ANDROID_CONFIG_H */
diff --git a/core/combo/include/arch/windows/AndroidConfig.h b/core/combo/include/arch/windows/AndroidConfig.h
deleted file mode 100644
index e7eb837..0000000
--- a/core/combo/include/arch/windows/AndroidConfig.h
+++ /dev/null
@@ -1,104 +0,0 @@
-/*
- * Copyright (C) 2005 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-/*
- * Android config -- "CYGWIN_NT-5.1".
- *
- * Cygwin has pthreads, but GDB seems to get confused if you use it to
- * create threads.  By "confused", I mean it freezes up the first time the
- * debugged process creates a thread, even if you use CreateThread.  The
- * mere presence of pthreads linkage seems to cause problems.
- */
-#ifndef _ANDROID_CONFIG_H
-#define _ANDROID_CONFIG_H
-
-/*
- * ===========================================================================
- *                              !!! IMPORTANT !!!
- * ===========================================================================
- *
- * This file is included by ALL C/C++ source files.  Don't put anything in
- * here unless you are absolutely certain it can't go anywhere else.
- *
- * Any C++ stuff must be wrapped with "#ifdef __cplusplus".  Do not use "//"
- * comments.
- */
-
-/* MingW doesn't define __BEGIN_DECLS / __END_DECLS. */
-
-#ifndef __BEGIN_DECLS
-#  ifdef __cplusplus
-#    define __BEGIN_DECLS extern "C" {
-#  else
-#    define __BEGIN_DECLS
-#  endif
-#endif
-
-#ifndef __END_DECLS
-#  ifdef __cplusplus
-#    define __END_DECLS }
-#  else
-#    define __END_DECLS
-#  endif
-#endif
-
-/* TODO: replace references to this. */
-#define HAVE_WIN32_IPC
-
-#ifdef __CYGWIN__
-#error "CYGWIN is unsupported for platform builds"
-#endif
-
-/*
- * Define this if you build against MSVCRT.DLL
- */
-#define HAVE_MS_C_RUNTIME
-
-/*
- * Define this if we want to use WinSock.
- */
-#define HAVE_WINSOCK
-
-/*
- * We need to choose between 32-bit and 64-bit off_t.  All of our code should
- * agree on the same size.  For desktop systems, use 64-bit values,
- * because some of our libraries (e.g. wxWidgets) expect to be built that way.
- */
-#define _FILE_OFFSET_BITS 64
-#define _LARGEFILE_SOURCE 1
-
-/*
- * Add any extra platform-specific defines here.
- */
-#define WIN32 1                 /* stock Cygwin doesn't define these */
-#define _WIN32 1
-#define _WIN32_WINNT 0x0500     /* admit to using >= Win2K */
-
-#define HAVE_WINDOWS_PATHS      /* needed by simulator */
-
-/*
- * The default path separator for the platform
- */
-#define OS_PATH_SEPARATOR '\\'
-
-/*
- * Various definitions missing in MinGW
- */
-#ifdef USE_MINGW
-#define S_IRGRP 0
-#endif
-
-#endif /*_ANDROID_CONFIG_H*/
diff --git a/core/combo/javac.mk b/core/combo/javac.mk
index 82cbb43..7f66ea8 100644
--- a/core/combo/javac.mk
+++ b/core/combo/javac.mk
@@ -14,7 +14,7 @@
 ANDROID_COMPILE_WITH_JACK := true
 endif
 
-common_jdk_flags := -source 1.7 -target 1.7 -Xmaxerrs 9999999
+common_jdk_flags := -Xmaxerrs 9999999
 
 # Use the indexer wrapper to index the codebase instead of the javac compiler
 ifeq ($(ALTERNATE_JAVAC),)
@@ -31,12 +31,7 @@
 endif
 
 # Whatever compiler is on this system.
-ifeq ($(BUILD_OS), windows)
-    COMMON_JAVAC := development/host/windows/prebuilt/javawrap.exe -J-Xmx256m \
-        $(common_jdk_flags)
-else
-    COMMON_JAVAC := $(JAVACC) -J-Xmx1024M $(common_jdk_flags)
-endif
+COMMON_JAVAC := $(JAVACC) -J-Xmx1024M $(common_jdk_flags)
 
 # Eclipse.
 ifeq ($(CUSTOM_JAVA_COMPILER), eclipse)
@@ -45,6 +40,8 @@
     $(info CUSTOM_JAVA_COMPILER=eclipse)
 endif
 
+GLOBAL_JAVAC_DEBUG_FLAGS := -g
+
 HOST_JAVAC ?= $(COMMON_JAVAC)
 TARGET_JAVAC ?= $(COMMON_JAVAC)
 
diff --git a/core/combo/mac_version.mk b/core/combo/mac_version.mk
index 6defba7..51394c6 100644
--- a/core/combo/mac_version.mk
+++ b/core/combo/mac_version.mk
@@ -9,7 +9,7 @@
 
 build_mac_version := $(shell sw_vers -productVersion)
 
-mac_sdk_versions_supported :=  10.6 10.7 10.8 10.9
+mac_sdk_versions_supported :=  10.8 10.9 10.10 10.11
 ifneq ($(strip $(MAC_SDK_VERSION)),)
 mac_sdk_version := $(MAC_SDK_VERSION)
 ifeq ($(filter $(mac_sdk_version),$(mac_sdk_versions_supported)),)
@@ -19,10 +19,11 @@
 $(error Stop.)
 endif
 else
-mac_sdk_versions_installed := $(shell xcodebuild -showsdks | grep macosx | sort | sed -e "s/.*macosx//g")
+mac_sdk_versions_installed := $(shell xcodebuild -showsdks | grep macosx | sed -e "s/.*macosx//g")
 mac_sdk_version := $(firstword $(filter $(mac_sdk_versions_installed), $(mac_sdk_versions_supported)))
 ifeq ($(mac_sdk_version),)
 mac_sdk_version := $(firstword $(mac_sdk_versions_supported))
+$(warning none of the installed SDKs ($mac_sdk_versions_installed) match supported versions ($(mac_sdk_versions_supported)), trying $(mac_sdk_version))
 endif
 endif
 
@@ -32,6 +33,7 @@
 mac_sdk_root := $(mac_sdk_path)/Platforms/MacOSX.platform/Developer/SDKs/MacOSX$(mac_sdk_version).sdk
 ifeq ($(wildcard $(mac_sdk_root)),)
 # try legacy /Developer/SDKs/MacOSX10.?.sdk
+$(warning no SDK $(mac_sdk_version) at $(mac_sdk_root), trying legacy dir)
 mac_sdk_root := /Developer/SDKs/MacOSX$(mac_sdk_version).sdk
 endif
 ifeq ($(wildcard $(mac_sdk_root)),)
diff --git a/core/config.mk b/core/config.mk
index 51810aa..94c880f 100644
--- a/core/config.mk
+++ b/core/config.mk
@@ -23,6 +23,10 @@
 
 
 endef
+# The pound character "#"
+define pound
+#
+endef
 # Unfortunately you can't simply define backslash as \ or \\.
 backslash := \a
 backslash := $(patsubst %a,%,$(backslash))
@@ -31,6 +35,11 @@
 # only has an effect on python 2.6 and above.
 export PYTHONDONTWRITEBYTECODE := 1
 
+ifneq ($(filter --color=always, $(GREP_OPTIONS)),)
+$(warning The build system needs unmodified output of grep.)
+$(error Please remove --color=always from your  $$GREP_OPTIONS)
+endif
+
 # Standard source directories.
 SRC_DOCS:= $(TOPDIR)docs
 # TODO: Enforce some kind of layering; only add include paths
@@ -53,6 +62,7 @@
 SRC_TARGET_DIR := $(TOPDIR)build/target
 SRC_API_DIR := $(TOPDIR)prebuilts/sdk/api
 SRC_SYSTEM_API_DIR := $(TOPDIR)prebuilts/sdk/system-api
+SRC_TEST_API_DIR := $(TOPDIR)prebuilts/sdk/test-api
 
 # Some specific paths to tools
 SRC_DROIDDOC_DIR := $(TOPDIR)build/tools/droiddoc
@@ -86,6 +96,8 @@
 BUILD_NATIVE_TEST := $(BUILD_SYSTEM)/native_test.mk
 BUILD_NATIVE_BENCHMARK := $(BUILD_SYSTEM)/native_benchmark.mk
 BUILD_HOST_NATIVE_TEST := $(BUILD_SYSTEM)/host_native_test.mk
+BUILD_FUZZ_TEST := $(BUILD_SYSTEM)/fuzz_test.mk
+BUILD_HOST_FUZZ_TEST := $(BUILD_SYSTEM)/host_fuzz_test.mk
 
 BUILD_SHARED_TEST_LIBRARY := $(BUILD_SYSTEM)/shared_test_lib.mk
 BUILD_HOST_SHARED_TEST_LIBRARY := $(BUILD_SYSTEM)/host_shared_test_lib.mk
@@ -105,33 +117,27 @@
 # lines being executed, instead of a short message about
 # the kind of operation being done.
 SHOW_COMMANDS:= $(filter showcommands,$(MAKECMDGOALS))
+hide := $(if $(SHOW_COMMANDS),,@)
 
+################################################################
+# Tools needed in product configuration makefiles.
+################################################################
+NORMALIZE_PATH := build/tools/normalize_path.py
+
+# $(1): the paths to be normalized
+define normalize-paths
+$(if $(1),$(shell $(NORMALIZE_PATH) $(1)))
+endef
 
 # ###############################################################
 # Set common values
 # ###############################################################
 
-# These can be changed to modify both host and device modules.
-COMMON_GLOBAL_CFLAGS:= -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith
-COMMON_RELEASE_CFLAGS:= -DNDEBUG -UDEBUG
-
-COMMON_GLOBAL_CPPFLAGS:= $(COMMON_GLOBAL_CFLAGS) -Wsign-promo -std=gnu++11
-COMMON_RELEASE_CPPFLAGS:= $(COMMON_RELEASE_CFLAGS)
-
-GLOBAL_CFLAGS_NO_OVERRIDE :=  \
-    -Werror=int-to-pointer-cast \
-    -Werror=pointer-to-int-cast \
-
-GLOBAL_CPPFLAGS_NO_OVERRIDE :=
-
 # Set the extensions used for various packages
 COMMON_PACKAGE_SUFFIX := .zip
 COMMON_JAVA_PACKAGE_SUFFIX := .jar
 COMMON_ANDROID_PACKAGE_SUFFIX := .apk
 
-# list of flags to turn specific warnings in to errors
-TARGET_ERROR_FLAGS := -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point
-
 ifdef TMPDIR
 JAVA_TMPDIR_ARG := -Djava.io.tmpdir=$(TMPDIR)
 else
@@ -227,9 +233,16 @@
 endif
 TARGET_CPU_ABI2 := $(strip $(TARGET_CPU_ABI2))
 
-# $(1): os/arch
-define select-android-config-h
-build/core/combo/include/arch/$(1)/AndroidConfig.h
+# Commands to generate .toc file common to ELF .so files.
+define _gen_toc_command_for_elf
+$(hide) ($($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)READELF) -d $(1) | grep SONAME || echo "No SONAME for $1") > $(2)
+$(hide) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)READELF) --dyn-syms $(1) | awk '{$$2=""; $$3=""; print}' >> $(2)
+endef
+
+# Commands to generate .toc file from Darwin dynamic library.
+define _gen_toc_command_for_macho
+$(hide) otool -l $(1) | grep LC_ID_DYLIB -A 5 > $(2)
+$(hide) nm -gP $(1) | cut -f1-2 -d" " | grep -v U$$ >> $(2)
 endef
 
 combo_target := HOST_
@@ -243,6 +256,19 @@
 include $(BUILD_SYSTEM)/combo/select.mk
 endif
 
+# Load the windows cross compiler under Linux
+ifdef HOST_CROSS_OS
+combo_target := HOST_CROSS_
+combo_2nd_arch_prefix :=
+include $(BUILD_SYSTEM)/combo/select.mk
+
+ifdef HOST_CROSS_2ND_ARCH
+combo_target := HOST_CROSS_
+combo_2nd_arch_prefix := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
+include $(BUILD_SYSTEM)/combo/select.mk
+endif
+endif
+
 # on windows, the tools have .exe at the end, and we depend on the
 # host config stuff being done first
 
@@ -258,6 +284,7 @@
 endif
 
 include $(BUILD_SYSTEM)/ccache.mk
+include $(BUILD_SYSTEM)/goma.mk
 
 ifdef TARGET_PREFER_32_BIT
 TARGET_PREFER_32_BIT_APPS := true
@@ -328,20 +355,18 @@
   WITH_SYNTAX_CHECK :=
 endif
 
+# define clang/llvm versions and base directory.
+include $(BUILD_SYSTEM)/clang/versions.mk
+
 # Disable WITH_STATIC_ANALYZER and WITH_SYNTAX_CHECK if tool can't be found
-SYNTAX_TOOLS_PREFIX := prebuilts/misc/$(HOST_PREBUILT_TAG)/analyzer/bin
+SYNTAX_TOOLS_PREFIX := \
+    $(LLVM_PREBUILTS_BASE)/$(BUILD_OS)-x86/$(LLVM_PREBUILTS_VERSION)/tools/scan-build/libexec
 ifneq ($(strip $(WITH_STATIC_ANALYZER)),)
   ifeq ($(wildcard $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer),)
     $(warning *** Disable WITH_STATIC_ANALYZER because $(SYNTAX_TOOLS_PREFIX)/ccc-analyzer does not exist)
     WITH_STATIC_ANALYZER :=
   endif
 endif
-ifneq ($(strip $(WITH_SYNTAX_CHECK)),)
-  ifeq ($(wildcard $(SYNTAX_TOOLS_PREFIX)/ccc-syntax),)
-    $(warning *** Disable WITH_SYNTAX_CHECK because $(SYNTAX_TOOLS_PREFIX)/ccc-syntax does not exist)
-    WITH_SYNTAX_CHECK :=
-  endif
-endif
 
 # WITH_STATIC_ANALYZER trumps WITH_SYNTAX_CHECK
 ifneq ($(strip $(WITH_STATIC_ANALYZER)),)
@@ -375,14 +400,104 @@
 endif
 endif
 
+# Set up PDK so we can use TARGET_BUILD_PDK to select prebuilt tools below
+.PHONY: pdk fusion
+pdk fusion: $(DEFAULT_GOAL)
+
+# What to build:
+# pdk fusion if:
+# 1) PDK_FUSION_PLATFORM_ZIP is passed in from the environment
+# or
+# 2) the platform.zip exists in the default location
+# or
+# 3) fusion is a command line build goal,
+#    PDK_FUSION_PLATFORM_ZIP is needed anyway, then do we need the 'fusion' goal?
+# otherwise pdk only if:
+# 1) pdk is a command line build goal
+# or
+# 2) TARGET_BUILD_PDK is passed in from the environment
+
+# if PDK_FUSION_PLATFORM_ZIP is specified, do not override.
+ifndef PDK_FUSION_PLATFORM_ZIP
+# Most PDK project paths should be using vendor/pdk/TARGET_DEVICE
+# but some legacy ones (e.g. mini_armv7a_neon generic PDK) were setup
+# with vendor/pdk/TARGET_PRODUCT.
+_pdk_fusion_default_platform_zip = $(strip \
+  $(wildcard vendor/pdk/$(TARGET_DEVICE)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip) \
+  $(wildcard vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip) \
+  $(wildcard vendor/pdk/$(TARGET_PRODUCT)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip) \
+  $(wildcard vendor/pdk/$(TARGET_PRODUCT)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip))
+ifneq (,$(_pdk_fusion_default_platform_zip))
+PDK_FUSION_PLATFORM_ZIP := $(word 1, $(_pdk_fusion_default_platform_zip))
+TARGET_BUILD_PDK := true
+endif # _pdk_fusion_default_platform_zip
+endif # !PDK_FUSION_PLATFORM_ZIP
+
+ifneq (,$(filter pdk fusion, $(MAKECMDGOALS)))
+TARGET_BUILD_PDK := true
+ifneq (,$(filter fusion, $(MAKECMDGOALS)))
+ifndef PDK_FUSION_PLATFORM_ZIP
+  $(error Specify PDK_FUSION_PLATFORM_ZIP to do a PDK fusion.)
+endif
+endif  # fusion
+endif  # pdk or fusion
+
+ifdef PDK_FUSION_PLATFORM_ZIP
+TARGET_BUILD_PDK := true
+ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_ZIP)))
+  $(error Cannot find file $(PDK_FUSION_PLATFORM_ZIP).)
+endif
+endif
+
+BUILD_PLATFORM_ZIP := $(filter platform platform-java,$(MAKECMDGOALS))
+
+#
+# Tools that are prebuilts for TARGET_BUILD_APPS
+#
+
+ACP := $(HOST_OUT_EXECUTABLES)/acp
+AIDL := $(HOST_OUT_EXECUTABLES)/aidl
+AAPT := $(HOST_OUT_EXECUTABLES)/aapt
+AAPT2 := $(HOST_OUT_EXECUTABLES)/aapt2
+ZIPALIGN := $(HOST_OUT_EXECUTABLES)/zipalign
+SIGNAPK_JAR := $(HOST_OUT_JAVA_LIBRARIES)/signapk$(COMMON_JAVA_PACKAGE_SUFFIX)
+SIGNAPK_JNI_LIBRARY_PATH := $(HOST_OUT_SHARED_LIBRARIES)
+LLVM_RS_CC := $(HOST_OUT_EXECUTABLES)/llvm-rs-cc
+BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat
+
+DX := $(HOST_OUT_EXECUTABLES)/dx
+MAINDEXCLASSES := $(HOST_OUT_EXECUTABLES)/mainDexClasses
+
+USE_PREBUILT_SDK_TOOLS_IN_PLACE := true
+
+# Override the definitions above for unbundled and PDK builds
+ifneq (,$(TARGET_BUILD_APPS)$(filter true,$(TARGET_BUILD_PDK)))
+prebuilt_sdk_tools := prebuilts/sdk/tools
+prebuilt_sdk_tools_bin := $(prebuilt_sdk_tools)/$(HOST_OS)/bin
+
+ACP := $(prebuilt_sdk_tools_bin)/acp
+AIDL := $(prebuilt_sdk_tools_bin)/aidl
+AAPT := $(prebuilt_sdk_tools_bin)/aapt
+AAPT2 := $(prebuilt_sdk_tools_bin)/aapt2
+ZIPALIGN := $(prebuilt_sdk_tools_bin)/zipalign
+SIGNAPK_JAR := $(prebuilt_sdk_tools)/lib/signapk$(COMMON_JAVA_PACKAGE_SUFFIX)
+# Use 64-bit libraries unconditionally because 32-bit JVMs are no longer supported
+SIGNAPK_JNI_LIBRARY_PATH := $(prebuilt_sdk_tools)/$(HOST_OS)/lib64
+
+DX := $(prebuilt_sdk_tools)/dx
+MAINDEXCLASSES := $(prebuilt_sdk_tools)/mainDexClasses
+
+# Don't use prebuilts in PDK
+ifneq ($(TARGET_BUILD_PDK),true)
+LLVM_RS_CC := $(prebuilt_sdk_tools_bin)/llvm-rs-cc
+BCC_COMPAT := $(prebuilt_sdk_tools_bin)/bcc_compat
+endif # TARGET_BUILD_PDK
+endif # TARGET_BUILD_APPS || TARGET_BUILD_PDK
+
 
 # ---------------------------------------------------------------
 # Generic tools.
 JACK := $(HOST_OUT_EXECUTABLES)/jack
-JACK_JAR := $(HOST_OUT_JAVA_LIBRARIES)/jack.jar
-JACK_LAUNCHER_JAR := $(HOST_OUT_JAVA_LIBRARIES)/jack-launcher.jar
-JILL_JAR := $(HOST_OUT_JAVA_LIBRARIES)/jill.jar
-JACK_MULTIDEX_DEFAULT_PREPROCESSOR := frameworks/multidex/library/resources/JACK-INF/legacyMultidexInstallation.jpp
 
 LEX := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/flex/flex-2.5.39
 # The default PKGDATADIR built in the prebuilt bison is a relative path
@@ -396,10 +511,16 @@
 YASM := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/yasm/yasm
 
 DOXYGEN:= doxygen
-AAPT := $(HOST_OUT_EXECUTABLES)/aapt$(HOST_EXECUTABLE_SUFFIX)
-AIDL := $(HOST_OUT_EXECUTABLES)/aidl$(HOST_EXECUTABLE_SUFFIX)
+AIDL_CPP := $(HOST_OUT_EXECUTABLES)/aidl-cpp$(HOST_EXECUTABLE_SUFFIX)
+ifeq ($(HOST_OS),linux)
+BREAKPAD_DUMP_SYMS := $(HOST_OUT_EXECUTABLES)/dump_syms
+else
+# For non-supported hosts, do not generate breakpad symbols.
+BREAKPAD_GENERATE_SYMBOLS := false
+endif
 PROTOC := $(HOST_OUT_EXECUTABLES)/aprotoc$(HOST_EXECUTABLE_SUFFIX)
-SIGNAPK_JAR := $(HOST_OUT_JAVA_LIBRARIES)/signapk$(COMMON_JAVA_PACKAGE_SUFFIX)
+VTSC := $(HOST_OUT_EXECUTABLES)/vtsc$(HOST_EXECUTABLE_SUFFIX)
+DBUS_GENERATOR := $(HOST_OUT_EXECUTABLES)/dbus-binding-generator
 MKBOOTFS := $(HOST_OUT_EXECUTABLES)/mkbootfs$(HOST_EXECUTABLE_SUFFIX)
 MINIGZIP := $(HOST_OUT_EXECUTABLES)/minigzip$(HOST_EXECUTABLE_SUFFIX)
 ifeq (,$(strip $(BOARD_CUSTOM_MKBOOTIMG)))
@@ -410,14 +531,10 @@
 APICHECK := $(HOST_OUT_EXECUTABLES)/apicheck$(HOST_EXECUTABLE_SUFFIX)
 FS_GET_STATS := $(HOST_OUT_EXECUTABLES)/fs_get_stats$(HOST_EXECUTABLE_SUFFIX)
 MAKE_EXT4FS := $(HOST_OUT_EXECUTABLES)/make_ext4fs$(HOST_EXECUTABLE_SUFFIX)
+BLK_ALLOC_TO_BASE_FS := $(HOST_OUT_EXECUTABLES)/blk_alloc_to_base_fs$(HOST_EXECUTABLE_SUFFIX)
 MKEXTUSERIMG := $(HOST_OUT_EXECUTABLES)/mkuserimg.sh
-ifeq ($(HOST_OS),linux)
 MAKE_SQUASHFS := $(HOST_OUT_EXECUTABLES)/mksquashfs$(HOST_EXECUTABLE_SUFFIX)
 MKSQUASHFSUSERIMG := $(HOST_OUT_EXECUTABLES)/mksquashfsimage.sh
-else
-MAKE_SQUASHFS :=
-MKSQUASHFSUSERIMG :=
-endif
 MAKE_F2FS := $(HOST_OUT_EXECUTABLES)/make_f2fs$(HOST_EXECUTABLE_SUFFIX)
 MKF2FSUSERIMG := $(HOST_OUT_EXECUTABLES)/mkf2fsuserimg.sh
 SIMG2IMG := $(HOST_OUT_EXECUTABLES)/simg2img$(HOST_EXECUTABLE_SUFFIX)
@@ -427,33 +544,13 @@
 TUNE2FS := $(HOST_OUT_EXECUTABLES)/tune2fs$(HOST_EXECUTABLE_SUFFIX)
 E2FSCK := $(HOST_OUT_EXECUTABLES)/e2fsck$(HOST_EXECUTABLE_SUFFIX)
 JARJAR := $(HOST_OUT_JAVA_LIBRARIES)/jarjar.jar
+DATA_BINDING_COMPILER := $(HOST_OUT_JAVA_LIBRARIES)/databinding-compiler.jar
 
 ifeq ($(ANDROID_COMPILE_WITH_JACK),true)
 DEFAULT_JACK_ENABLED:=full
 else
 DEFAULT_JACK_ENABLED:=
 endif
-ifneq ($(strip $(ANDROID_JACK_VM)),)
-JACK_VM := $(ANDROID_JACK_VM)
-else
-JACK_VM := java
-endif
-# call jack
-#
-# $(1): vm arguments
-# $(2): jack perf arguments
-ifneq (,$(strip $(filter dist,$(MAKECMDGOALS))))
-JACK_SERVER_LOG_COMMAND := mkdir -p $(DIST_DIR)/logs/; SERVER_LOG=$(DIST_DIR)/logs/jack-server.log
-endif
-define call-jack
-$(JACK_SERVER_LOG_COMMAND) JACK_VM_COMMAND="$(JACK_VM) $(1) $(JAVA_TMPDIR_ARG) -jar $(JACK_LAUNCHER_JAR) " JACK_JAR="$(JACK_JAR)" $(JACK) $(2)
-endef
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VM_ARGS := $(DEFAULT_JACK_VM_ARGS)
-ifneq ($(ANDROID_JACK_VM_ARGS),)
-DEFAULT_JACK_VM_ARGS := $(ANDROID_JACK_VM_ARGS)
-else
-DEFAULT_JACK_VM_ARGS := -Dfile.encoding=UTF-8 -Xms2560m -XX:+TieredCompilation
-endif
 ifneq ($(ANDROID_JACK_EXTRA_ARGS),)
 DEFAULT_JACK_EXTRA_ARGS := $(ANDROID_JACK_EXTRA_ARGS)
 else
@@ -462,12 +559,8 @@
 # Turn off jack warnings by default.
 DEFAULT_JACK_EXTRA_ARGS += --verbose error
 
-JILL := java -Xmx3500m -jar $(JILL_JAR)
 PROGUARD := external/proguard/bin/proguard.sh
 JAVATAGS := build/tools/java-event-log-tags.py
-LLVM_RS_CC := $(HOST_OUT_EXECUTABLES)/llvm-rs-cc$(HOST_EXECUTABLE_SUFFIX)
-BCC_COMPAT := $(HOST_OUT_EXECUTABLES)/bcc_compat$(HOST_EXECUTABLE_SUFFIX)
-LINT := prebuilts/sdk/tools/lint
 RMTYPEDEFS := $(HOST_OUT_EXECUTABLES)/rmtypedefs
 APPEND2SIMG := $(HOST_OUT_EXECUTABLES)/append2simg
 VERITY_SIGNER := $(HOST_OUT_EXECUTABLES)/verity_signer
@@ -475,13 +568,16 @@
 BOOT_SIGNER := $(HOST_OUT_EXECUTABLES)/boot_signer
 FUTILITY := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/futility/futility
 VBOOT_SIGNER := prebuilts/misc/scripts/vboot_signer/vboot_signer.sh
+FEC := $(HOST_OUT_EXECUTABLES)/fec
 
-# ACP is always for the build OS, not for the host OS
-ACP := $(BUILD_OUT_EXECUTABLES)/acp$(BUILD_EXECUTABLE_SUFFIX)
+ifndef TARGET_BUILD_APPS
+ZIPTIME := $(HOST_OUT_EXECUTABLES)/ziptime$(HOST_EXECUTABLE_SUFFIX)
+endif
 
-# dx is java behind a shell script; no .exe necessary.
-DX := $(HOST_OUT_EXECUTABLES)/dx
-ZIPALIGN := $(HOST_OUT_EXECUTABLES)/zipalign$(HOST_EXECUTABLE_SUFFIX)
+# ijar converts a .jar file to a smaller .jar file which only has its
+# interfaces.
+IJAR := $(HOST_OUT_EXECUTABLES)/ijar$(BUILD_EXECUTABLE_SUFFIX)
+DEXDUMP := $(HOST_OUT_EXECUTABLES)/dexdump2$(BUILD_EXECUTABLE_SUFFIX)
 
 # relocation packer
 RELOCATION_PACKER := prebuilts/misc/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)/relocation_packer/relocation_packer
@@ -493,20 +589,15 @@
 # Tool to merge AndroidManifest.xmls
 ANDROID_MANIFEST_MERGER := java -classpath prebuilts/devtools/tools/lib/manifest-merger.jar com.android.manifmerger.Main merge
 
-YACC_HEADER_SUFFIX:= .hpp
-
-# Don't use column under Windows, cygwin or not
-ifeq ($(HOST_OS),windows)
-COLUMN:= cat
-else
 COLUMN:= column
-endif
 
+# We may not have the right JAVA_HOME/PATH set up yet when this is run from envsetup.sh.
+ifneq ($(CALLED_FROM_SETUP),true)
 HOST_JDK_TOOLS_JAR:= $(shell $(BUILD_SYSTEM)/find-jdk-tools-jar.sh)
 
 ifneq ($(HOST_JDK_TOOLS_JAR),)
 ifeq ($(wildcard $(HOST_JDK_TOOLS_JAR)),)
-$(error Error: could not find jdk tools.jar, please check if your JDK was installed correctly)
+$(error Error: could not find jdk tools.jar at $(HOST_JDK_TOOLS_JAR), please check if your JDK was installed correctly)
 endif
 endif
 
@@ -515,6 +606,7 @@
 ifneq ($(filter 64-Bit, $(shell java -version 2>&1)),)
 HOST_JDK_IS_64BIT_VERSION := true
 endif
+endif  # CALLED_FROM_SETUP not true
 
 # It's called md5 on Mac OS and md5sum on Linux
 ifeq ($(HOST_OS),darwin)
@@ -539,6 +631,67 @@
 # Set up final options.
 # ###############################################################
 
+ifneq ($(COMMON_GLOBAL_CFLAGS)$(COMMON_GLOBAL_CPPFLAGS),)
+$(warning COMMON_GLOBAL_C(PP)FLAGS changed)
+$(info *** Device configurations are no longer allowed to change the global flags.)
+$(info *** COMMON_GLOBAL_CFLAGS: $(COMMON_GLOBAL_CFLAGS))
+$(info *** COMMON_GLOBAL_CPPFLAGS: $(COMMON_GLOBAL_CPPFLAGS))
+$(error bailing...)
+endif
+
+# These can be changed to modify both host and device modules.
+COMMON_GLOBAL_CFLAGS:= -DANDROID -fmessage-length=0 -W -Wall -Wno-unused -Winit-self -Wpointer-arith
+COMMON_RELEASE_CFLAGS:= -DNDEBUG -UDEBUG
+
+# Force gcc to always output color diagnostics.  Ninja will strip the ANSI
+# color codes if it is not running in a terminal.
+ifdef BUILDING_WITH_NINJA
+COMMON_GLOBAL_CFLAGS += -fdiagnostics-color
+endif
+
+COMMON_GLOBAL_CPPFLAGS:= -Wsign-promo
+COMMON_RELEASE_CPPFLAGS:=
+
+GLOBAL_CFLAGS_NO_OVERRIDE := \
+    -Werror=int-to-pointer-cast \
+    -Werror=pointer-to-int-cast \
+
+GLOBAL_CLANG_CFLAGS_NO_OVERRIDE := \
+    -Werror=address-of-temporary \
+    -Werror=null-dereference \
+    -Werror=return-type \
+
+GLOBAL_CPPFLAGS_NO_OVERRIDE :=
+
+# list of flags to turn specific warnings in to errors
+TARGET_ERROR_FLAGS := -Werror=return-type -Werror=non-virtual-dtor -Werror=address -Werror=sequence-point -Werror=date-time
+
+# We run gcc/clang with PWD=/proc/self/cwd to remove the $TOP
+# from the debug output. That way two builds in two different
+# directories will create the same output.
+# /proc doesn't exist on Darwin.
+ifeq ($(HOST_OS),linux)
+RELATIVE_PWD := PWD=/proc/self/cwd
+# Remove this useless prefix from the debug output.
+COMMON_GLOBAL_CFLAGS += -fdebug-prefix-map=/proc/self/cwd=
+else
+RELATIVE_PWD :=
+endif
+
+# Allow the C/C++ macros __DATE__ and __TIME__ to be set to the
+# build date and time, so that a build may be repeated.
+# Write the date and time to a file so that the command line
+# doesn't change every time, which would cause ninja to rebuild
+# the files.
+$(shell mkdir -p $(OUT_DIR) && \
+    $(DATE) "+%b %_d %Y" > $(OUT_DIR)/build_c_date.txt && \
+    $(DATE) +%T > $(OUT_DIR)/build_c_time.txt)
+BUILD_DATETIME_C_DATE := $$(cat $(OUT_DIR)/build_c_date.txt)
+BUILD_DATETIME_C_TIME := $$(cat $(OUT_DIR)/build_c_time.txt)
+ifeq ($(OVERRIDE_C_DATE_TIME),true)
+COMMON_GLOBAL_CFLAGS += -Wno-builtin-macro-redefined -D__DATE__="\"$(BUILD_DATETIME_C_DATE)\"" -D__TIME__=\"$(BUILD_DATETIME_C_TIME)\"
+endif
+
 HOST_GLOBAL_CFLAGS += $(COMMON_GLOBAL_CFLAGS)
 HOST_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
 
@@ -563,7 +716,6 @@
 # sure to only specify them for the target compilers checked in to
 # the source tree.
 TARGET_GLOBAL_CFLAGS += $(TARGET_ERROR_FLAGS)
-TARGET_GLOBAL_CPPFLAGS += $(TARGET_ERROR_FLAGS)
 
 HOST_GLOBAL_CFLAGS += $(HOST_RELEASE_CFLAGS)
 HOST_GLOBAL_CPPFLAGS += $(HOST_RELEASE_CPPFLAGS)
@@ -579,7 +731,6 @@
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_LD_DIRS += -L$($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_PROJECT_INCLUDES := $(TARGET_PROJECT_INCLUDES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_CFLAGS += $(TARGET_ERROR_FLAGS)
-$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_CPPFLAGS += $(TARGET_ERROR_FLAGS)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_CFLAGS += $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_RELEASE_CFLAGS)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_CPPFLAGS += $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_RELEASE_CPPFLAGS)
 endif
@@ -595,27 +746,54 @@
 $(HOST_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_CPPFLAGS += $($(HOST_2ND_ARCH_VAR_PREFIX)HOST_RELEASE_CPPFLAGS)
 endif
 
+ifdef HOST_CROSS_OS
+HOST_CROSS_GLOBAL_CFLAGS += $(filter-out $(HOST_CROSS_UNKNOWN_CFLAGS),$(COMMON_GLOBAL_CFLAGS))
+HOST_CROSS_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
+HOST_CROSS_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
+HOST_CROSS_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
+HOST_CROSS_GLOBAL_LD_DIRS += -L$(HOST_CROSS_OUT_INTERMEDIATE_LIBRARIES)
+HOST_CROSS_PROJECT_INCLUDES:= $(SRC_HEADERS) $(SRC_HOST_HEADERS) $(HOST_CROSS_OUT_HEADERS)
+HOST_CROSS_GLOBAL_CFLAGS += $(HOST_CROSS_RELEASE_CFLAGS)
+HOST_CROSS_GLOBAL_CPPFLAGS += $(HOST_CROSS_RELEASE_CPPFLAGS)
+
+ifdef HOST_CROSS_2ND_ARCH
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_CFLAGS += $(filter-out $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_UNKNOWN_CFLAGS),$(COMMON_GLOBAL_CFLAGS))
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_RELEASE_CFLAGS += $(COMMON_RELEASE_CFLAGS)
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_CPPFLAGS += $(COMMON_GLOBAL_CPPFLAGS)
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_RELEASE_CPPFLAGS += $(COMMON_RELEASE_CPPFLAGS)
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_LD_DIRS += -L$($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATE_LIBRARIES)
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_PROJECT_INCLUDES:= $(SRC_HEADERS) $(SRC_HOST_HEADERS) $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_HEADERS)
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_CFLAGS += $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_RELEASE_CFLAGS)
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_GLOBAL_CPPFLAGS += $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_RELEASE_CPPFLAGS)
+endif
+endif
+
+ifdef BRILLO
+# Add a C define that identifies Brillo targets. __BRILLO__ should only be used
+# to differentiate between Brillo and non-Brillo-but-Android environments. Use
+# __ANDROID__ instead to test if something is being built in an Android-derived
+# environment (including Brillo) as opposed to an entirely different
+# environment (e.g. Chrome OS).
+TARGET_GLOBAL_CFLAGS += -D__BRILLO__
+ifdef TARGET_2ND_ARCH
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_CFLAGS += -D__BRILLO__
+endif
+endif
+
 # allow overriding default Java libraries on a per-target basis
 ifeq ($(TARGET_DEFAULT_JAVA_LIBRARIES),)
-  TARGET_DEFAULT_JAVA_LIBRARIES := core-libart core-junit ext framework
+  TARGET_DEFAULT_JAVA_LIBRARIES := core-oj core-libart core-junit ext framework okhttp
 endif
 
 # Flags for DEX2OAT
+first_non_empty_of_three = $(if $(1),$(1),$(if $(2),$(2),$(3)))
 DEX2OAT_TARGET_ARCH := $(TARGET_ARCH)
-ifeq ($(TARGET_CPU_VARIANT),)
-ifeq ($(TARGET_ARCH_VARIANT),)
-DEX2OAT_TARGET_CPU_VARIANT := default
-else
-DEX2OAT_TARGET_CPU_VARIANT := $(TARGET_ARCH_VARIANT)
-endif
-else
-DEX2OAT_TARGET_CPU_VARIANT := $(TARGET_CPU_VARIANT)
-endif
+DEX2OAT_TARGET_CPU_VARIANT := $(call first_non_empty_of_three,$(TARGET_CPU_VARIANT),$(TARGET_ARCH_VARIANT),default)
 DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES := default
 
 ifdef TARGET_2ND_ARCH
 $(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_ARCH := $(TARGET_2ND_ARCH)
-$(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT := $(TARGET_2ND_CPU_VARIANT)
+$(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT := $(call first_non_empty_of_three,$(TARGET_2ND_CPU_VARIANT),$(TARGET_2ND_ARCH_VARIANT),default)
 $(TARGET_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES := default
 endif
 
@@ -657,13 +835,15 @@
     $(patsubst $(HISTORICAL_SDK_VERSIONS_ROOT)/%/android.jar,%, \
     $(wildcard $(HISTORICAL_SDK_VERSIONS_ROOT)/*/android.jar)))
 
-# We don't have prebuilt system_current SDK yet.
-TARGET_AVAILABLE_SDK_VERSIONS := $(TARGET_AVAILABLE_SDK_VERSIONS)
+# We don't have prebuilt test_current SDK yet.
+TARGET_AVAILABLE_SDK_VERSIONS := test_current $(TARGET_AVAILABLE_SDK_VERSIONS)
 
 INTERNAL_PLATFORM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/public_api.txt
 INTERNAL_PLATFORM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/removed.txt
 INTERNAL_PLATFORM_SYSTEM_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-api.txt
 INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/system-removed.txt
+INTERNAL_PLATFORM_TEST_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-api.txt
+INTERNAL_PLATFORM_TEST_REMOVED_API_FILE := $(TARGET_OUT_COMMON_INTERMEDIATES)/PACKAGING/test-removed.txt
 
 # This is the standard way to name a directory containing prebuilt target
 # objects. E.g., prebuilt/$(TARGET_PREBUILT_TAG)/libc.so
@@ -677,9 +857,10 @@
 RS_PREBUILT_CLCORE := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/librsrt_$(TARGET_ARCH).bc
 RS_PREBUILT_COMPILER_RT := prebuilts/sdk/renderscript/lib/$(TARGET_ARCH)/libcompiler_rt.a
 ifeq (true,$(TARGET_IS_64_BIT))
-RS_PREBUILT_LIBPATH := -L prebuilts/ndk/9/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib
+RS_PREBUILT_LIBPATH := -L prebuilts/ndk/current/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib64 \
+                       -L prebuilts/ndk/current/platforms/android-21/arch-$(TARGET_ARCH)/usr/lib
 else
-RS_PREBUILT_LIBPATH := -L prebuilts/ndk/8/platforms/android-9/arch-$(TARGET_ARCH)/usr/lib
+RS_PREBUILT_LIBPATH := -L prebuilts/ndk/current/platforms/android-9/arch-$(TARGET_ARCH)/usr/lib
 endif
 
 # API Level lists for Renderscript Compat lib.
diff --git a/core/config_sanitizers.mk b/core/config_sanitizers.mk
index 7eb1c89..6e96880 100644
--- a/core/config_sanitizers.mk
+++ b/core/config_sanitizers.mk
@@ -4,43 +4,64 @@
 
 my_sanitize := $(strip $(LOCAL_SANITIZE))
 
-# Keep compatibility for LOCAL_ADDRESS_SANITIZER until all targets have moved to
-# `LOCAL_SANITIZE := address`.
-ifeq ($(strip $(LOCAL_ADDRESS_SANITIZER)),true)
-  my_sanitize += address
+# SANITIZE_HOST is only in effect if the module is already using clang (host
+# modules that haven't set `LOCAL_CLANG := false` and device modules that
+# have set `LOCAL_CLANG := true`.
+my_global_sanitize :=
+ifeq ($(my_clang),true)
+  ifdef LOCAL_IS_HOST_MODULE
+    my_global_sanitize := $(strip $(SANITIZE_HOST))
+
+    # SANITIZE_HOST=true is a deprecated way to say SANITIZE_HOST=address.
+    my_global_sanitize := $(subst true,address,$(my_global_sanitize))
+  else
+    my_global_sanitize := $(strip $(SANITIZE_TARGET))
+  endif
 endif
 
-# And `LOCAL_SANITIZE := never`.
-ifeq ($(strip $(LOCAL_ADDRESS_SANITIZER)),false)
-  my_sanitize := never
+# The sanitizer specified by the environment wins over the module.
+ifneq ($(my_global_sanitize),)
+  my_sanitize := $(my_global_sanitize)
 endif
 
 # Don't apply sanitizers to NDK code.
 ifdef LOCAL_SDK_VERSION
-  my_sanitize := never
+  my_sanitize :=
 endif
 
-# Configure SANITIZE_HOST.
-ifdef LOCAL_IS_HOST_MODULE
-  ifeq ($(my_sanitize),)
-    my_sanitize := $(strip $(SANITIZE_HOST))
+# Never always wins.
+ifeq ($(LOCAL_SANITIZE),never)
+  my_sanitize :=
+endif
 
-    # SANTIZIZE_HOST=true is a deprecated way to say SANITIZE_HOST=address.
-    ifeq ($(my_sanitize),true)
-      my_sanitize := address
-    endif
-
-    # SANITIZE_HOST is only in effect if the module is already using clang (host
-    # modules that haven't set `LOCAL_CLANG := false` and device modules that
-    # have set `LOCAL_CLANG := true`.
-    ifneq ($(my_clang),true)
-      my_sanitize :=
+# TSAN is not supported on 32-bit architectures. For non-multilib cases, make
+# its use an error. For multilib cases, don't use it for the 32-bit case.
+ifneq ($(filter thread,$(my_sanitize)),)
+  ifeq ($(my_32_64_bit_suffix),32)
+    ifeq ($(my_module_multilib),both)
+        my_sanitize := $(filter-out thread,$(my_sanitize))
+    else
+        $(error $(LOCAL_PATH): $(LOCAL_MODULE): TSAN cannot be used for 32-bit modules.)
     endif
   endif
 endif
 
-ifeq ($(my_sanitize),never)
-  my_sanitize :=
+# Undefined symbols can occur if a non-sanitized library links
+# sanitized static libraries. That's OK, because the executable
+# always depends on the ASan runtime library, which defines these
+# symbols.
+ifneq ($(strip $(SANITIZE_TARGET)),)
+  ifndef LOCAL_IS_HOST_MODULE
+    ifeq ($(LOCAL_MODULE_CLASS),SHARED_LIBRARIES)
+      ifeq ($(my_sanitize),)
+        my_allow_undefined_symbols := true
+      endif
+    endif
+    # Workaround for a bug in AddressSanitizer that breaks stack unwinding.
+    # https://code.google.com/p/address-sanitizer/issues/detail?id=387
+    # Revert when external/compiler-rt is updated past r236014.
+    LOCAL_PACK_MODULE_RELOCATIONS := false
+  endif
 endif
 
 # Sanitizers can only be used with clang.
@@ -52,21 +73,30 @@
 
 ifneq ($(filter default-ub,$(my_sanitize)),)
   my_sanitize := $(CLANG_DEFAULT_UB_CHECKS)
-  my_ldlibs += -ldl
+endif
 
-  ifdef LOCAL_IS_HOST_MODULE
-    my_cflags += -fno-sanitize-recover=all
-  else
-    my_cflags += -fsanitize-undefined-trap-on-error
+ifneq ($(filter coverage,$(my_sanitize)),)
+  ifeq ($(filter address,$(my_sanitize)),)
+    $(error $(LOCAL_PATH): $(LOCAL_MODULE): Use of 'coverage' also requires 'address')
   endif
+  my_cflags += -fsanitize-coverage=edge,indirect-calls,8bit-counters,trace-cmp
+  my_sanitize := $(filter-out coverage,$(my_sanitize))
 endif
 
 ifneq ($(my_sanitize),)
-  fsanitize_arg := $(subst $(space),$(comma),$(my_sanitize)),
+  fsanitize_arg := $(subst $(space),$(comma),$(my_sanitize))
   my_cflags += -fsanitize=$(fsanitize_arg)
 
   ifdef LOCAL_IS_HOST_MODULE
+    my_cflags += -fno-sanitize-recover=all
     my_ldflags += -fsanitize=$(fsanitize_arg)
+    my_ldlibs += -lrt -ldl
+  else
+    ifeq ($(filter address,$(my_sanitize)),)
+      my_cflags += -fsanitize-trap=all
+      my_cflags += -ftrap-function=abort
+    endif
+    my_shared_libraries += libdl
   endif
 endif
 
@@ -78,22 +108,29 @@
   ifdef LOCAL_IS_HOST_MODULE
     # -nodefaultlibs (provided with libc++) prevents the driver from linking
     # libraries needed with -fsanitize=address. http://b/18650275 (WAI)
-    my_ldlibs += -lm -ldl -lpthread
+    my_ldlibs += -lm -lpthread
     my_ldflags += -Wl,--no-as-needed
   else
+    my_cflags += -mllvm -asan-globals=0
     # ASan runtime library must be the first in the link order.
     my_shared_libraries := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_RUNTIME_LIBRARY) \
                            $(my_shared_libraries) \
                            $(ADDRESS_SANITIZER_CONFIG_EXTRA_SHARED_LIBRARIES)
     my_static_libraries += $(ADDRESS_SANITIZER_CONFIG_EXTRA_STATIC_LIBRARIES)
-    my_ldflags += -Wl,-rpath,$($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_RPATH)
+
+    my_linker := $($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_LINKER)
+    # Make sure linker_asan get installed.
+    $(LOCAL_INSTALLED_MODULE) : | $(PRODUCT_OUT)$($(LOCAL_2ND_ARCH_VAR_PREFIX)ADDRESS_SANITIZER_LINKER)
   endif
 endif
 
 ifneq ($(filter undefined,$(my_sanitize)),)
-  ifdef LOCAL_IS_HOST_MODULE
-    my_ldlibs += -ldl
-  else
+  ifndef LOCAL_IS_HOST_MODULE
     $(error ubsan is not yet supported on the target)
   endif
 endif
+
+ifneq ($(strip $(LOCAL_SANITIZE_RECOVER)),)
+  recover_arg := $(subst $(space),$(comma),$(LOCAL_SANITIZE_RECOVER)),
+  my_cflags += -fsanitize-recover=$(recover_arg)
+endif
diff --git a/core/configure_local_jack.mk b/core/configure_local_jack.mk
index 98b13d2..2270c88 100644
--- a/core/configure_local_jack.mk
+++ b/core/configure_local_jack.mk
@@ -17,7 +17,10 @@
 ifdef ANDROID_FORCE_JACK_ENABLED
 LOCAL_JACK_ENABLED := $(ANDROID_FORCE_JACK_ENABLED)
 endif
+
 LOCAL_JACK_ENABLED := $(strip $(LOCAL_JACK_ENABLED))
+LOCAL_MODULE := $(strip $(LOCAL_MODULE))
+
 ifneq ($(LOCAL_JACK_ENABLED),full)
 ifneq ($(LOCAL_JACK_ENABLED),incremental)
 ifdef LOCAL_JACK_ENABLED
@@ -28,3 +31,9 @@
 LOCAL_JACK_ENABLED :=
 endif
 endif
+
+ifdef $(LOCAL_MODULE).JACK_VERSION
+LOCAL_JACK_VERSION := $($(LOCAL_MODULE).JACK_VERSION)
+else
+LOCAL_JACK_VERSION := $(JACK_DEFAULT_VERSION)
+endif
diff --git a/core/copy_headers.mk b/core/copy_headers.mk
index e16560f..7d5a5d9 100644
--- a/core/copy_headers.mk
+++ b/core/copy_headers.mk
@@ -18,8 +18,10 @@
       $(if $(LOCAL_COPY_HEADERS_TO),\
         $($(my_prefix)OUT_HEADERS)/$(LOCAL_COPY_HEADERS_TO)/$(notdir $(header)),\
         $($(my_prefix)OUT_HEADERS)/$(notdir $(header)))) \
-  $(eval $(call copy-one-header,$(_chFrom),$(_chTo))) \
-  $(eval all_copied_headers: $(_chTo)) \
+  $(eval ALL_COPIED_HEADERS.$(_chTo).MAKEFILE += $(LOCAL_MODULE_MAKEFILE)) \
+  $(eval ALL_COPIED_HEADERS.$(_chTo).SRC += $(_chFrom)) \
+  $(if $(filter $(_chTo),$(ALL_COPIED_HEADERS)),, \
+      $(eval ALL_COPIED_HEADERS += $(_chTo))) \
  )
 _chFrom :=
 _chTo :=
diff --git a/core/cxx_stl_setup.mk b/core/cxx_stl_setup.mk
index 3450b2c..37be1f7 100644
--- a/core/cxx_stl_setup.mk
+++ b/core/cxx_stl_setup.mk
@@ -1,6 +1,6 @@
 #############################################################
 ## Set up flags based on LOCAL_CXX_STL.
-## Input variables: LOCAL_CXX_STL
+## Input variables: LOCAL_CXX_STL, my_prefix
 ## Output variables: My_cflags, my_c_includes, my_shared_libraries, etc.
 #############################################################
 
@@ -8,29 +8,53 @@
 ifeq ($(strip $(LOCAL_CXX_STL)),default)
     ifndef LOCAL_SDK_VERSION
         # Platform code. Select the appropriate STL.
-        ifndef USE_MINGW
-            my_cxx_stl := libc++
-            ifdef LOCAL_IS_HOST_MODULE
-                ifneq (,$(BUILD_HOST_static))
-                    my_cxx_stl := libc++_static
-                endif
+        my_cxx_stl := libc++
+        ifdef LOCAL_IS_HOST_MODULE
+            ifneq (,$(BUILD_HOST_static))
+                my_cxx_stl := libc++_static
             endif
-        else
-            # libc++ is not supported on mingw.
-            my_cxx_stl := libstdc++
+
+            ifeq ($($(my_prefix)OS),windows)
+                # libc++ is not supported on mingw.
+                my_cxx_stl := libstdc++
+            endif
         endif
     else
         my_cxx_stl := ndk
     endif
 else
     my_cxx_stl := $(strip $(LOCAL_CXX_STL))
+    ifdef LOCAL_SDK_VERSION
+        # The NDK has historically used LOCAL_NDK_STL_VARIANT to specify the
+        # STL. An Android.mk that specifies both LOCAL_CXX_STL and
+        # LOCAL_SDK_VERSION will incorrectly try (and most likely fail) to use
+        # the platform STL in an NDK binary. Emit an error to direct the user
+        # toward the correct option.
+        #
+        # Note that we could also accept LOCAL_CXX_STL as an alias for
+        # LOCAL_NDK_STL_VARIANT (and in fact soong does use the same name), but
+        # the two options use different names for the STLs.
+        $(error $(LOCAL_PATH): $(LOCAL_MODULE): Must use LOCAL_NDK_STL_VARIANT rather than LOCAL_CXX_STL for NDK binaries)
+    endif
+    ifdef LOCAL_IS_HOST_MODULE
+        ifeq ($($(my_prefix)OS),windows)
+            ifneq ($(filter $(my_cxx_stl),libc++ libc++_static),)
+                # libc++ is not supported on mingw.
+                my_cxx_stl := libstdc++
+            endif
+        endif
+    endif
 endif
 
 # Yes, this is actually what the clang driver does.
-HOST_linux_dynamic_gcclibs := -lgcc_s -lgcc -lc -lgcc_s -lgcc
-HOST_linux_static_gcclibs := -Wl,--start-group -lgcc -lgcc_eh -lc -Wl,--end-group
-HOST_darwin_dynamic_gcclibs := -lc -lSystem
-HOST_darwin_static_gcclibs := NO_STATIC_HOST_BINARIES_ON_DARWIN
+linux_dynamic_gcclibs := -lgcc_s -lgcc -lc -lgcc_s -lgcc
+linux_static_gcclibs := -Wl,--start-group -lgcc -lgcc_eh -lc -Wl,--end-group
+darwin_dynamic_gcclibs := -lc -lSystem
+darwin_static_gcclibs := NO_STATIC_HOST_BINARIES_ON_DARWIN
+windows_dynamic_gcclibs := \
+    -lmsvcr110 -lmingw32 -lgcc -lmoldname -lmingwex -lmsvcrt -ladvapi32 \
+    -lshell32 -luser32 -lkernel32 -lmingw32 -lgcc -lmoldname -lmingwex -lmsvcrt
+windows_static_gcclibs := NO_STATIC_HOST_BINARIES_ON_WINDOWS
 
 my_link_type := dynamic
 ifdef LOCAL_IS_HOST_MODULE
@@ -48,23 +72,24 @@
 
 ifneq ($(filter $(my_cxx_stl),libc++ libc++_static),)
     my_cflags += -D_USING_LIBCXX
-    my_c_includes += external/libcxx/include
-    ifeq ($(my_cxx_stl),libc++)
-        my_shared_libraries += libc++
+
+    # Note that the structure of this means that LOCAL_CXX_STL := libc++ will
+    # use the static libc++ for static executables.
+    ifeq ($(my_link_type),dynamic)
+        ifeq ($(my_cxx_stl),libc++)
+            my_shared_libraries += libc++
+        else
+            my_static_libraries += libc++_static
+        endif
     else
         my_static_libraries += libc++_static
-        ifndef LOCAL_IS_HOST_MODULE
-            ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
-                my_static_libraries += libm libc libdl
-            endif
-        endif
     endif
 
     ifdef LOCAL_IS_HOST_MODULE
         my_cppflags += -nostdinc++
         my_ldflags += -nodefaultlibs
         my_ldlibs += -lpthread -lm
-        my_ldlibs += $($(my_prefix)$(HOST_OS)_$(my_link_type)_gcclibs)
+        my_ldlibs += $($($(my_prefix)OS)_$(my_link_type)_gcclibs)
     else
         ifeq (arm,$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
             my_static_libraries += libunwind_llvm
@@ -72,16 +97,13 @@
         endif
 
         ifeq ($(my_link_type),static)
-            my_static_libraries += libdl
+            my_static_libraries += libm libc libdl
         else
             my_shared_libraries += libdl
         endif
     endif
 else ifeq ($(my_cxx_stl),ndk)
     # Using an NDK STL. Handled in binary.mk.
-    ifndef LOCAL_IS_HOST_MODULE
-        my_system_shared_libraries += libstdc++
-    endif
 else ifeq ($(my_cxx_stl),libstdc++)
     # Using bionic's basic libstdc++. Not actually an STL. Only around until the
     # tree is in good enough shape to not need it.
@@ -94,7 +116,7 @@
     ifdef LOCAL_IS_HOST_MODULE
         my_cppflags += -nostdinc++
         my_ldflags += -nodefaultlibs
-        my_ldlibs += $($(my_prefix)$(HOST_OS)_$(my_link_type)_gcclibs)
+        my_ldlibs += $($($(my_prefix)OS)_$(my_link_type)_gcclibs)
     endif
 else
     $(error $(LOCAL_PATH): $(LOCAL_MODULE): $(my_cxx_stl) is not a supported STL.)
diff --git a/core/definitions.mk b/core/definitions.mk
index 9dea18c..5d24b67 100644
--- a/core/definitions.mk
+++ b/core/definitions.mk
@@ -87,11 +87,18 @@
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_DEPENDENCIES_ON_SHARED_LIBRARIES :=
 HOST_DEPENDENCIES_ON_SHARED_LIBRARIES :=
 $(HOST_2ND_ARCH_VAR_PREFIX)HOST_DEPENDENCIES_ON_SHARED_LIBRARIES :=
+HOST_CROSS_DEPENDENCIES_ON_SHARED_LIBRARIES :=
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_DEPENDENCIES_ON_SHARED_LIBRARIES :=
 
 # Generated class file names for Android resource.
 # They are escaped and quoted so can be passed safely to a bash command.
 ANDROID_RESOURCE_GENERATED_CLASSES := 'R.class' 'R$$*.class' 'Manifest.class' 'Manifest$$*.class'
 
+# Display names for various build targets
+TARGET_DISPLAY := target
+HOST_DISPLAY := host
+HOST_CROSS_DISPLAY := host cross
+
 ###########################################################
 ## Debugging; prints a variable list to stdout
 ###########################################################
@@ -126,6 +133,7 @@
 define my-dir
 $(strip \
   $(eval LOCAL_MODULE_MAKEFILE := $$(lastword $$(MAKEFILE_LIST))) \
+  $(eval LOCAL_MODULE_MAKEFILE_DEP := $(if $(BUILDING_WITH_NINJA),,$$(LOCAL_MODULE_MAKEFILE))) \
   $(if $(filter $(BUILD_SYSTEM)/% $(OUT_DIR)/%,$(LOCAL_MODULE_MAKEFILE)), \
     $(error my-dir must be called before including any other makefile.) \
    , \
@@ -135,11 +143,27 @@
 endef
 
 ###########################################################
+## Remove any makefiles that are being handled by soong
+###########################################################
+ifeq ($(USE_SOONG),true)
+define filter-soong-makefiles
+$(foreach mk,$(1),\
+  $(if $(wildcard $(patsubst %/Android.mk,%/Android.bp,$(mk))),\
+    $(info skipping $(mk) ...),\
+    $(mk)))
+endef
+else
+define filter-soong-makefiles
+$(1)
+endef
+endif
+
+###########################################################
 ## Retrieve a list of all makefiles immediately below some directory
 ###########################################################
 
 define all-makefiles-under
-$(wildcard $(1)/*/Android.mk)
+$(sort $(call filter-soong-makefiles,$(wildcard $(1)/*/Android.mk)))
 endef
 
 ###########################################################
@@ -150,8 +174,9 @@
 # $(1): directory to search under
 # Ignores $(1)/Android.mk
 define first-makefiles-under
-$(shell build/tools/findleaves.py $(FIND_LEAVES_EXCLUDES) \
-        --mindepth=2 $(1) Android.mk)
+$(call filter-soong-makefiles,\
+  $(shell build/tools/findleaves.py $(FIND_LEAVES_EXCLUDES) \
+        --mindepth=2 $(1) Android.mk))
 endef
 
 ###########################################################
@@ -171,7 +196,8 @@
 
 # $(1): List of directories to look for under this directory
 define all-named-subdir-makefiles
-$(wildcard $(addsuffix /Android.mk, $(addprefix $(call my-dir)/,$(1))))
+$(sort $(call filter-soong-makefiles,\
+  $(wildcard $(addsuffix /Android.mk, $(addprefix $(call my-dir)/,$(1))))))
 endef
 
 ###########################################################
@@ -294,6 +320,24 @@
 endef
 
 ###########################################################
+## Find all files named "*.vts" under the named directories,
+## which must be relative to $(LOCAL_PATH).  The returned list
+## is relative to $(LOCAL_PATH).
+###########################################################
+
+define all-vts-files-under
+$(call all-named-files-under,*.vts,$(1))
+endef
+
+###########################################################
+## Find all of the "*.vts" files under $(LOCAL_PATH).
+###########################################################
+
+define all-subdir-vts-files
+$(call all-vts-files-under,.)
+endef
+
+###########################################################
 ## Find all of the logtags files under the named directories.
 ## Meant to be used like:
 ##    SRC_FILES := $(call all-logtags-files-under,src)
@@ -381,7 +425,7 @@
 
 define find-subdir-assets
 $(sort $(if $(1),$(patsubst ./%,%, \
-	$(shell if [ -d $(1) ] ; then cd $(1) ; find ./ -not -name '.*' -and -type f -and -not -type l ; fi)), \
+	$(shell if [ -d $(1) ] ; then cd $(1) ; find -L ./ -not -name '.*' -and -type f -and -not -type l ; fi)), \
 	$(warning Empty argument supplied to find-subdir-assets) \
 ))
 endef
@@ -424,7 +468,7 @@
 
 define find-parent-file
 $(strip \
-  $(eval _fpf := $(wildcard $(foreach f, $(2), $(strip $(1))/$(f)))) \
+  $(eval _fpf := $(sort $(wildcard $(foreach f, $(2), $(strip $(1))/$(f))))) \
   $(if $(_fpf),$(_fpf), \
        $(if $(filter-out ./ .,$(1)), \
              $(call find-parent-file,$(patsubst %/,%,$(dir $(1))),$(2)) \
@@ -461,7 +505,8 @@
 # $(2): target name, like "NotePad"
 # $(3): if non-empty, this is a HOST target.
 # $(4): if non-empty, force the intermediates to be COMMON
-# $(5): if non-empty, force the intermedistes to be for the 2nd arch
+# $(5): if non-empty, force the intermediates to be for the 2nd arch
+# $(6): if non-empty, force the intermediates to be for the host cross os
 define intermediates-dir-for
 $(strip \
     $(eval _idfClass := $(strip $(1))) \
@@ -470,11 +515,11 @@
     $(eval _idfName := $(strip $(2))) \
     $(if $(_idfName),, \
         $(error $(LOCAL_PATH): Name not defined in call to intermediates-dir-for)) \
-    $(eval _idfPrefix := $(if $(strip $(3)),HOST,TARGET)) \
+    $(eval _idfPrefix := $(if $(strip $(3)),$(if $(strip $(6)),HOST_CROSS,HOST),TARGET)) \
     $(eval _idf2ndArchPrefix := $(if $(strip $(5)),$(TARGET_2ND_ARCH_VAR_PREFIX))) \
     $(if $(filter $(_idfPrefix)-$(_idfClass),$(COMMON_MODULE_CLASSES))$(4), \
         $(eval _idfIntBase := $($(_idfPrefix)_OUT_COMMON_INTERMEDIATES)) \
-      ,$(if $(filter $(_idfClass),SHARED_LIBRARIES STATIC_LIBRARIES EXECUTABLES GYP),\
+      ,$(if $(filter $(_idfClass),$(PER_ARCH_MODULE_CLASSES)),\
           $(eval _idfIntBase := $($(_idf2ndArchPrefix)$(_idfPrefix)_OUT_INTERMEDIATES)) \
        ,$(eval _idfIntBase := $($(_idfPrefix)_OUT_INTERMEDIATES)) \
        ) \
@@ -488,13 +533,14 @@
 #
 # $(1): if non-empty, force the intermediates to be COMMON
 # $(2): if non-empty, force the intermediates to be for the 2nd arch
+# $(3): if non-empty, force the intermediates to be for the host cross os
 define local-intermediates-dir
 $(strip \
     $(if $(strip $(LOCAL_MODULE_CLASS)),, \
         $(error $(LOCAL_PATH): LOCAL_MODULE_CLASS not defined before call to local-intermediates-dir)) \
     $(if $(strip $(LOCAL_MODULE)),, \
         $(error $(LOCAL_PATH): LOCAL_MODULE not defined before call to local-intermediates-dir)) \
-    $(call intermediates-dir-for,$(LOCAL_MODULE_CLASS),$(LOCAL_MODULE),$(LOCAL_IS_HOST_MODULE),$(1),$(2)) \
+    $(call intermediates-dir-for,$(LOCAL_MODULE_CLASS),$(LOCAL_MODULE),$(LOCAL_IS_HOST_MODULE),$(1),$(2),$(3)) \
 )
 endef
 
@@ -521,7 +567,7 @@
         $(error $(LOCAL_PATH): Name not defined in call to generated-sources-dir-for)) \
     $(eval _idfPrefix := $(if $(strip $(3)),HOST,TARGET)) \
     $(if $(filter $(_idfPrefix)-$(_idfClass),$(COMMON_MODULE_CLASSES))$(4), \
-        $(eval _idfIntBase := $($(_idfPrefix)_OUT_GEN_COMMON)) \
+        $(eval _idfIntBase := $($(_idfPrefix)_OUT_COMMON_GEN)) \
       , \
         $(eval _idfIntBase := $($(_idfPrefix)_OUT_GEN)) \
      ) \
@@ -627,25 +673,36 @@
 # $(1): library name
 # $(2): Non-empty if IS_HOST_MODULE
 define _java-lib-full-classes.jar
-$(call _java-lib-dir,$(1),$(2))/classes$(COMMON_JAVA_PACKAGE_SUFFIX)
+$(call _java-lib-dir,$(1),$(2))/$(if $(2),javalib,classes)$(COMMON_JAVA_PACKAGE_SUFFIX)
 endef
 
+# Get the jar files (you can pass to "javac -classpath") of static or shared
+# Java libraries that you want to link against.
 # $(1): library name list
 # $(2): Non-empty if IS_HOST_MODULE
 define java-lib-files
 $(foreach lib,$(1),$(call _java-lib-full-classes.jar,$(lib),$(2)))
 endef
 
-# $(1): library name
-# $(2): Non-empty if IS_HOST_MODULE
-define _java-lib-full-dep
-$(call _java-lib-dir,$(1),$(2))/$(if $(2),javalib,classes)$(COMMON_JAVA_PACKAGE_SUFFIX)
-endef
-
+# Get the dependency files (you can put on the right side of "|" of a build rule)
+# of the Java libraries.
 # $(1): library name list
 # $(2): Non-empty if IS_HOST_MODULE
+# Historically for target Java libraries we used a different file (javalib.jar)
+# as the dependency.
+# Now we can use classes.jar as dependency, so java-lib-deps is the same
+# as java-lib-files.
 define java-lib-deps
-$(foreach lib,$(1),$(call _java-lib-full-dep,$(lib),$(2)))
+$(call java-lib-files,$(1),$(2))
+endef
+
+# Get the jar files (you can pass to "javac -classpath") of host dalvik Java libraries.
+# You can also use them as dependency files.
+# A host dalvik Java library is different from a host Java library in that
+# the java lib file is classes.jar, not javalib.jar.
+# $(1): library name list
+define host-dex-java-lib-files
+$(foreach lib,$(1),$(call _java-lib-dir,$(lib),true)/classes.jar)
 endef
 
 ###########################################################
@@ -666,16 +723,10 @@
 $(foreach lib,$(1),$(call _jack-lib-full-classes,$(lib),$(2)))
 endef
 
-# $(1): library name
-# $(2): Non-empty if IS_HOST_MODULE
-define _jack-lib-full-dep
-$(call _jack-lib-full-classes,$(1),$(2))
-endef
-
 # $(1): library name list
 # $(2): Non-empty if IS_HOST_MODULE
 define jack-lib-deps
-$(foreach lib,$(1),$(call _jack-lib-full-dep,$(lib),$(2)))
+$(call jack-lib-files,$(1),$(2))
 endef
 
 ###########################################################
@@ -841,52 +892,13 @@
 define pretty
 @echo $1
 endef
-hide := @
 else
 define pretty
 endef
-hide :=
 endif
 
 ###########################################################
-## Dump the variables that are associated with targets
-###########################################################
-
-define dump-module-variables
-@echo all_dependencies=$^
-@echo PRIVATE_YACCFLAGS=$(PRIVATE_YACCFLAGS);
-@echo PRIVATE_CFLAGS=$(PRIVATE_CFLAGS);
-@echo PRIVATE_CPPFLAGS=$(PRIVATE_CPPFLAGS);
-@echo PRIVATE_DEBUG_CFLAGS=$(PRIVATE_DEBUG_CFLAGS);
-@echo PRIVATE_C_INCLUDES=$(PRIVATE_C_INCLUDES);
-@echo PRIVATE_LDFLAGS=$(PRIVATE_LDFLAGS);
-@echo PRIVATE_LDLIBS=$(PRIVATE_LDLIBS);
-@echo PRIVATE_ARFLAGS=$(PRIVATE_ARFLAGS);
-@echo PRIVATE_AAPT_FLAGS=$(PRIVATE_AAPT_FLAGS);
-@echo PRIVATE_DX_FLAGS=$(PRIVATE_DX_FLAGS);
-@echo PRIVATE_JAVACFLAGS=$(PRIVATE_JAVACFLAGS);
-@echo PRIVATE_JAVA_LIBRARIES=$(PRIVATE_JAVA_LIBRARIES);
-@echo PRIVATE_ALL_SHARED_LIBRARIES=$(PRIVATE_ALL_SHARED_LIBRARIES);
-@echo PRIVATE_ALL_STATIC_LIBRARIES=$(PRIVATE_ALL_STATIC_LIBRARIES);
-@echo PRIVATE_ALL_WHOLE_STATIC_LIBRARIES=$(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES);
-@echo PRIVATE_ALL_OBJECTS=$(PRIVATE_ALL_OBJECTS);
-@echo PRIVATE_NO_CRT=$(PRIVATE_NO_CRT);
-endef
-
-###########################################################
-## Commands for using sed to replace given variable values
-###########################################################
-
-define transform-variables
-@mkdir -p $(dir $@)
-@echo "Sed: $(if $(PRIVATE_MODULE),$(PRIVATE_MODULE),$@) <= $<"
-$(hide) sed $(foreach var,$(REPLACE_VARS),-e "s/{{$(var)}}/$(subst /,\/,$(PWD)/$($(var)))/g") $< >$@
-$(hide) if [ "$(suffix $@)" = ".sh" ]; then chmod a+rx $@; fi
-endef
-
-
-###########################################################
-## Commands for munging the dependency files GCC generates
+## Commands for munging the dependency files the compiler generates
 ###########################################################
 # $(1): the input .d file
 # $(2): the output .P file
@@ -902,38 +914,106 @@
 endef
 
 ###########################################################
+## Commands for including the dependency files the compiler generates
+###########################################################
+# $(1): the .P file
+# $(2): the main build target
+ifeq ($(BUILDING_WITH_NINJA),true)
+define include-depfile
+$(eval $(2) : .KATI_DEPFILE := $1)
+endef
+else
+define include-depfile
+$(eval -include $1)
+endef
+endif
+
+# $(1): object files
+define include-depfiles-for-objs
+$(foreach obj, $(1), $(call include-depfile, $(obj:%.o=%.P), $(obj)))
+endef
+
+###########################################################
+## Track source files compiled to objects
+###########################################################
+# $(1): list of sources
+# $(2): list of matching objects
+define track-src-file-obj
+$(eval $(call _track-src-file-obj,$(1)))
+endef
+define _track-src-file-obj
+i := w
+$(foreach s,$(1),
+my_tracked_src_files += $(s)
+my_src_file_obj_$(s) := $$(word $$(words $$(i)),$$(2))
+i += w)
+endef
+
+# $(1): list of sources
+# $(2): list of matching generated sources
+define track-src-file-gen
+$(eval $(call _track-src-file-gen,$(2)))
+endef
+define _track-src-file-gen
+i := w
+$(foreach s,$(1),
+my_tracked_gen_files += $(s)
+my_src_file_gen_$(s) := $$(word $$(words $$(i)),$$(1))
+i += w)
+endef
+
+# $(1): list of generated sources
+# $(2): list of matching objects
+define track-gen-file-obj
+$(call track-src-file-obj,$(foreach f,$(1),\
+  $(or $(my_src_file_gen_$(f)),$(f))),$(2))
+endef
+
+###########################################################
 ## Commands for running lex
 ###########################################################
 
-define transform-l-to-cpp
-@mkdir -p $(dir $@)
+define transform-l-to-c-or-cpp
 @echo "Lex: $(PRIVATE_MODULE) <= $<"
+@mkdir -p $(dir $@)
 $(hide) $(LEX) -o$@ $<
 endef
 
 ###########################################################
 ## Commands for running yacc
 ##
-## Because the extension of c++ files can change, the
-## extension must be specified in $1.
-## E.g, "$(call transform-y-to-cpp,.cpp)"
 ###########################################################
 
-define transform-y-to-cpp
-@mkdir -p $(dir $@)
+define transform-y-to-c-or-cpp
 @echo "Yacc: $(PRIVATE_MODULE) <= $<"
-$(YACC) $(PRIVATE_YACCFLAGS) -o $@ $<
-touch $(@:$1=$(YACC_HEADER_SUFFIX))
-echo '#ifndef '$(@F:$1=_h) > $(@:$1=.h)
-echo '#define '$(@F:$1=_h) >> $(@:$1=.h)
-cat $(@:$1=$(YACC_HEADER_SUFFIX)) >> $(@:$1=.h)
-echo '#endif' >> $(@:$1=.h)
+@mkdir -p $(dir $@)
+$(YACC) $(PRIVATE_YACCFLAGS) \
+  --defines=$(basename $@).h \
+  -o $@ $<
 endef
 
 ###########################################################
 ## Commands to compile RenderScript to Java
 ###########################################################
 
+## Merge multiple .d files generated by llvm-rs-cc. This is necessary
+## because ninja can handle only a single depfile per build target.
+## .d files generated by llvm-rs-cc define .stamp, .bc, and optionally
+## .java as build targets. However, there's no way to let ninja know
+## dependencies to .bc files and .java files, so we give up build
+## targets for them. As we write the .stamp file as the target by
+## ourselves, the awk script removes the first lines before the colon
+## and append a backslash to the last line to concatenate contents of
+## multiple files.
+# $(1): .d files to be merged
+# $(2): merged .d file
+define _merge-renderscript-d
+$(hide) echo '$@: $(backslash)' > $2
+$(foreach d,$1, \
+  $(hide) awk 'start { sub(/( \\)?$$/, " \\"); print } /:/ { start=1 }' < $d >> $2$(newline))
+$(hide) echo >> $2
+endef
+
 define transform-renderscripts-to-java-and-bc
 @echo "RenderScript: $(PRIVATE_MODULE) <= $(PRIVATE_RS_SOURCE_FILES)"
 $(hide) rm -rf $(PRIVATE_RS_OUTPUT_DIR)
@@ -948,8 +1028,8 @@
   $(PRIVATE_RS_FLAGS) \
   $(foreach inc,$(PRIVATE_RS_INCLUDES),$(addprefix -I , $(inc))) \
   $(PRIVATE_RS_SOURCE_FILES)
-  $(foreach d,$(PRIVATE_DEP_FILES),\
-    $(call transform-d-to-p-args,$(d),$(d:%.d=%.P))$(newline))
+$(call _merge-renderscript-d,$(PRIVATE_DEP_FILES),$@.d)
+$(call transform-d-to-p-args,$@.d,$@.P)
 $(hide) mkdir -p $(dir $@)
 $(hide) touch $@
 endef
@@ -981,11 +1061,12 @@
   -d $(PRIVATE_RS_OUTPUT_DIR) \
   -a $@ -MD \
   -reflect-c++ \
+  $(addprefix -target-api , $(PRIVATE_RS_TARGET_API)) \
   $(PRIVATE_RS_FLAGS) \
   $(addprefix -I , $(PRIVATE_RS_INCLUDES)) \
   $(PRIVATE_RS_SOURCE_FILES)
-  $(foreach d,$(PRIVATE_DEP_FILES),\
-    $(call transform-d-to-p-args,$(d),$(d:%.d=%.P))$(newline))
+$(call _merge-renderscript-d,$(PRIVATE_DEP_FILES),$@.d)
+$(call transform-d-to-p-args,$@.d,$@.P)
 $(hide) mkdir -p $(dir $@)
 $(hide) touch $@
 endef
@@ -1002,6 +1083,51 @@
 endef
 #$(AIDL) $(PRIVATE_AIDL_FLAGS) $< - | indent -nut -br -npcs -l1000 > $@
 
+define transform-aidl-to-cpp
+@mkdir -p $(dir $@)
+@mkdir -p $(PRIVATE_HEADER_OUTPUT_DIR)
+@echo "Generating C++ from AIDL: $(PRIVATE_MODULE) <= $<"
+$(hide) $(AIDL_CPP) -d$(basename $@).aidl.P $(PRIVATE_AIDL_FLAGS) \
+    $< $(PRIVATE_HEADER_OUTPUT_DIR) $@
+endef
+
+## Given a .aidl file path generate the rule to compile it a .cpp file.
+# $(1): a .aidl source file
+# $(2): a directory to place the generated .cpp files in
+# $(3): name of a variable to add the path to the generated source file to
+#
+# You must call this with $(eval).
+define define-aidl-cpp-rule
+define-aidl-cpp-rule-src := $(patsubst %.aidl,%$(LOCAL_CPP_EXTENSION),$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
+$$(define-aidl-cpp-rule-src) : $(LOCAL_PATH)/$(1) $(AIDL_CPP)
+	$$(transform-aidl-to-cpp)
+$(3) += $$(define-aidl-cpp-rule-src)
+endef
+
+###########################################################
+## Commands for running vts
+###########################################################
+
+define transform-vts-to-cpp
+@mkdir -p $(dir $@)
+@mkdir -p $(PRIVATE_HEADER_OUTPUT_DIR)
+@echo "Generating C++ from VTS: $(PRIVATE_MODULE) <= $<"
+$(hide) $(VTSC) -d$(basename $@).vts.P $(PRIVATE_VTS_FLAGS) \
+    $< $(PRIVATE_HEADER_OUTPUT_DIR) $@
+endef
+
+## Given a .vts file path generate the rule to compile it a .cpp file.
+# $(1): a .vts source file
+# $(2): a directory to place the generated .cpp files in
+# $(3): name of a variable to add the path to the generated source file to
+#
+# You must call this with $(eval).
+define define-vts-cpp-rule
+define-vts-cpp-rule-src := $(patsubst %.vts,%$(LOCAL_CPP_EXTENSION),$(subst ../,dotdot/,$(addprefix $(2)/,$(1))))
+$$(define-vts-cpp-rule-src) : $(LOCAL_PATH)/$(1) $(VTSC)
+	$$(transform-vts-to-cpp)
+$(3) += $$(define-vts-cpp-rule-src)
+endef
 
 ###########################################################
 ## Commands for running java-event-log-tags.py
@@ -1037,22 +1163,48 @@
 ## Commands for running protoc to compile .proto into .pb.cc (or.pb.c) and .pb.h
 ######################################################################
 define transform-proto-to-cc
-@mkdir -p $(dir $@)
 @echo "Protoc: $@ <= $<"
+@mkdir -p $(dir $@)
 $(hide) $(PROTOC) \
 	$(addprefix --proto_path=, $(PRIVATE_PROTO_INCLUDES)) \
 	$(PRIVATE_PROTOC_FLAGS) \
 	$<
 endef
 
+
+######################################################################
+## Commands for generating DBus adaptors from .dbus-xml files.
+######################################################################
+define generate-dbus-adaptors
+@echo "Generating DBus adaptors for $(PRIVATE_MODULE)"
+@mkdir -p $(dir $@)
+$(hide) $(DBUS_GENERATOR) \
+	--service-config=$(PRIVATE_DBUS_SERVICE_CONFIG) \
+	--adaptor=$@ \
+	$<
+endef
+
+######################################################################
+## Commands for generating DBus proxies from .dbus-xml files.
+######################################################################
+define generate-dbus-proxies
+@echo "Generating DBus proxies for $(PRIVATE_MODULE)"
+@mkdir -p $(dir $@)
+$(hide) $(DBUS_GENERATOR) \
+	--service-config=$(PRIVATE_DBUS_SERVICE_CONFIG) \
+	--proxy=$@ \
+	$(filter %.dbus-xml,$^)
+endef
+
+
 ###########################################################
 ## Commands for running gcc to compile a C++ file
 ###########################################################
 
 define transform-cpp-to-o
-@mkdir -p $(dir $@)
 @echo "target $(PRIVATE_ARM_MODE) C++: $(PRIVATE_MODULE) <= $<"
-$(hide) $(PRIVATE_CXX) \
+@mkdir -p $(dir $@)
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CXX) \
 	$(addprefix -I , $(PRIVATE_C_INCLUDES)) \
 	$(shell cat $(PRIVATE_IMPORT_INCLUDES)) \
 	$(addprefix -isystem ,\
@@ -1070,8 +1222,8 @@
 	$(PRIVATE_CFLAGS) \
 	$(PRIVATE_CPPFLAGS) \
 	$(PRIVATE_DEBUG_CFLAGS) \
-	$(GLOBAL_CFLAGS_NO_OVERRIDE) \
-	$(GLOBAL_CPPFLAGS_NO_OVERRIDE) \
+	$(PRIVATE_CFLAGS_NO_OVERRIDE) \
+	$(PRIVATE_CPPFLAGS_NO_OVERRIDE) \
 	-MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
 $(transform-d-to-p)
 endef
@@ -1084,7 +1236,7 @@
 # $(1): extra flags
 define transform-c-or-s-to-o-no-deps
 @mkdir -p $(dir $@)
-$(hide) $(PRIVATE_CC) \
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \
 	$(addprefix -I , $(PRIVATE_C_INCLUDES)) \
 	$(shell cat $(PRIVATE_IMPORT_INCLUDES)) \
 	$(addprefix -isystem ,\
@@ -1108,7 +1260,7 @@
     $(PRIVATE_CFLAGS) \
     $(PRIVATE_CONLYFLAGS) \
     $(PRIVATE_DEBUG_CFLAGS) \
-    $(GLOBAL_CFLAGS_NO_OVERRIDE))
+    $(PRIVATE_CFLAGS_NO_OVERRIDE))
 endef
 
 define transform-s-to-o-no-deps
@@ -1157,15 +1309,15 @@
 ###########################################################
 
 define transform-host-cpp-to-o
+@echo "$($(PRIVATE_PREFIX)DISPLAY) C++: $(PRIVATE_MODULE) <= $<"
 @mkdir -p $(dir $@)
-@echo "host C++: $(PRIVATE_MODULE) <= $<"
-$(hide) $(PRIVATE_CXX) \
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CXX) \
 	$(addprefix -I , $(PRIVATE_C_INCLUDES)) \
 	$(shell cat $(PRIVATE_IMPORT_INCLUDES)) \
 	$(addprefix -isystem ,\
 	    $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 	        $(filter-out $(PRIVATE_C_INCLUDES), \
-	            $(HOST_PROJECT_INCLUDES) \
+	            $($(PRIVATE_PREFIX)PROJECT_INCLUDES) \
 	            $(PRIVATE_HOST_C_INCLUDES)))) \
 	-c \
 	$(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
@@ -1175,6 +1327,8 @@
 	$(PRIVATE_CFLAGS) \
 	$(PRIVATE_CPPFLAGS) \
 	$(PRIVATE_DEBUG_CFLAGS) \
+	$(PRIVATE_CFLAGS_NO_OVERRIDE) \
+	$(PRIVATE_CPPFLAGS_NO_OVERRIDE) \
 	-MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
 $(transform-d-to-p)
 endef
@@ -1187,13 +1341,13 @@
 # $(1): extra flags
 define transform-host-c-or-s-to-o-no-deps
 @mkdir -p $(dir $@)
-$(hide) $(PRIVATE_CC) \
+$(hide) $(RELATIVE_PWD) $(PRIVATE_CC) \
 	$(addprefix -I , $(PRIVATE_C_INCLUDES)) \
 	$(shell cat $(PRIVATE_IMPORT_INCLUDES)) \
 	$(addprefix -isystem ,\
 	    $(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 	        $(filter-out $(PRIVATE_C_INCLUDES), \
-	            $(HOST_PROJECT_INCLUDES) \
+	            $($(PRIVATE_PREFIX)PROJECT_INCLUDES) \
 	            $(PRIVATE_HOST_C_INCLUDES)))) \
 	-c \
 	$(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
@@ -1201,16 +1355,17 @@
 	    $(PRIVATE_HOST_GLOBAL_CONLYFLAGS) \
 	 ) \
 	$(1) \
+	$(PRIVATE_CFLAGS_NO_OVERRIDE) \
 	-MD -MF $(patsubst %.o,%.d,$@) -o $@ $<
 endef
 
 define transform-host-c-to-o-no-deps
-@echo "host C: $(PRIVATE_MODULE) <= $<"
+@echo "$($(PRIVATE_PREFIX)DISPLAY) C: $(PRIVATE_MODULE) <= $<"
 $(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_CFLAGS) $(PRIVATE_CONLYFLAGS) $(PRIVATE_DEBUG_CFLAGS))
 endef
 
 define transform-host-s-to-o-no-deps
-@echo "host asm: $(PRIVATE_MODULE) <= $<"
+@echo "$($(PRIVATE_PREFIX)DISPLAY) asm: $(PRIVATE_MODULE) <= $<"
 $(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_ASFLAGS))
 endef
 
@@ -1229,7 +1384,7 @@
 ###########################################################
 
 define transform-host-m-to-o-no-deps
-@echo "host ObjC: $(PRIVATE_MODULE) <= $<"
+@echo "$($(PRIVATE_PREFIX)DISPLAY) ObjC: $(PRIVATE_MODULE) <= $<"
 $(call transform-host-c-or-s-to-o-no-deps, $(PRIVATE_CFLAGS) $(PRIVATE_DEBUG_CFLAGS))
 endef
 
@@ -1238,6 +1393,14 @@
 $(transform-d-to-p)
 endef
 
+###########################################################
+## Commands for running gcc to compile a host Objective-C++ file
+###########################################################
+
+define transform-host-mm-to-o
+$(transform-host-cpp-to-o)
+endef
+
 
 ###########################################################
 ## Rules to compile a single C/C++ source with ../ in the path
@@ -1254,7 +1417,7 @@
 o := $(intermediates)/$(patsubst %$(LOCAL_CPP_EXTENSION),%.o,$(subst ../,$(DOTDOT_REPLACEMENT),$(1)))
 $$(o) : $(TOPDIR)$(LOCAL_PATH)/$(1) $(2)
 	$$(transform-$$(PRIVATE_HOST)cpp-to-o)
--include $$(o:%.o=%.P)
+$$(call include-depfiles-for-objs, $$(o))
 $(3) += $$(o)
 endef
 
@@ -1267,7 +1430,7 @@
 o := $(intermediates)/$(patsubst %.c,%.o,$(subst ../,$(DOTDOT_REPLACEMENT),$(1)))
 $$(o) : $(TOPDIR)$(LOCAL_PATH)/$(1) $(2)
 	$$(transform-$$(PRIVATE_HOST)c-to-o)
--include $$(o:%.o=%.P)
+$$(call include-depfiles-for-objs, $$(o))
 $(3) += $$(o)
 endef
 
@@ -1280,7 +1443,7 @@
 o := $(intermediates)/$(patsubst %.S,%.o,$(subst ../,$(DOTDOT_REPLACEMENT),$(1)))
 $$(o) : $(TOPDIR)$(LOCAL_PATH)/$(1) $(2)
 	$$(transform-$$(PRIVATE_HOST)s-to-o)
--include $$(o:%.o=%.P)
+$$(call include-depfiles-for-objs, $$(o))
 $(3) += $$(o)
 endef
 
@@ -1322,7 +1485,6 @@
 
 # $(1): the full path of the source static library.
 define _extract-and-include-single-target-whole-static-lib
-@echo "preparing StaticLib: $(PRIVATE_MODULE) [including $(strip $(1))]"
 $(hide) ldir=$(PRIVATE_INTERMEDIATES_DIR)/WHOLE/$(basename $(notdir $(1)))_objs;\
     rm -rf $$ldir; \
     mkdir -p $$ldir; \
@@ -1350,7 +1512,6 @@
 # $(1): the full path of the source static library.
 define extract-and-include-whole-static-libs-first
 $(if $(strip $(1)),
-@echo "preparing StaticLib: $(PRIVATE_MODULE) [including $(strip $(1))]"
 $(hide) cp $(1) $@)
 endef
 
@@ -1363,10 +1524,10 @@
 # Explicitly delete the archive first so that ar doesn't
 # try to add to an existing archive.
 define transform-o-to-static-lib
+@echo "target StaticLib: $(PRIVATE_MODULE) ($@)"
 @mkdir -p $(dir $@)
 @rm -f $@
 $(extract-and-include-target-whole-static-libs)
-@echo "target StaticLib: $(PRIVATE_MODULE) ($@)"
 $(call split-long-arguments,$($(PRIVATE_2ND_ARCH_VAR_PREFIX)TARGET_AR) \
     $($(PRIVATE_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_ARFLAGS) \
     $(PRIVATE_ARFLAGS) $@,$(PRIVATE_ALL_OBJECTS))
@@ -1378,7 +1539,6 @@
 
 # $(1): the full path of the source static library.
 define _extract-and-include-single-host-whole-static-lib
-@echo "preparing StaticLib: $(PRIVATE_MODULE) [including $(strip $(1))]"
 $(hide) ldir=$(PRIVATE_INTERMEDIATES_DIR)/WHOLE/$(basename $(notdir $(1)))_objs;\
     rm -rf $$ldir; \
     mkdir -p $$ldir; \
@@ -1386,19 +1546,19 @@
     lib_to_include=$$ldir/$(notdir $(1)); \
     filelist=; \
     subdir=0; \
-    for f in `$($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_AR) t $(1) | \grep '\.o$$'`; do \
+    for f in `$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) t $(1) | \grep '\.o$$'`; do \
         if [ -e $$ldir/$$f ]; then \
            mkdir $$ldir/$$subdir; \
            ext=$$subdir/; \
            subdir=$$((subdir+1)); \
-           $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_AR) m $$lib_to_include $$f; \
+           $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) m $$lib_to_include $$f; \
         else \
            ext=; \
         fi; \
-        $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_AR) p $$lib_to_include $$f > $$ldir/$$ext$$f; \
+        $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) p $$lib_to_include $$f > $$ldir/$$ext$$f; \
         filelist="$$filelist $$ldir/$$ext$$f"; \
     done ; \
-    $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_AR) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_ARFLAGS) \
+    $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)GLOBAL_ARFLAGS) \
         $(PRIVATE_ARFLAGS) $@ $$filelist
 
 endef
@@ -1412,12 +1572,12 @@
 # Explicitly delete the archive first so that ar doesn't
 # try to add to an existing archive.
 define transform-host-o-to-static-lib
+@echo "$($(PRIVATE_PREFIX)DISPLAY) StaticLib: $(PRIVATE_MODULE) ($@)"
 @mkdir -p $(dir $@)
 @rm -f $@
 $(extract-and-include-host-whole-static-libs)
-@echo "host StaticLib: $(PRIVATE_MODULE) ($@)"
-$(call split-long-arguments,$($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_AR) \
-    $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_ARFLAGS) \
+$(call split-long-arguments,$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)AR) \
+    $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)GLOBAL_ARFLAGS) \
     $(PRIVATE_ARFLAGS) $@,$(PRIVATE_ALL_OBJECTS))
 endef
 
@@ -1431,11 +1591,11 @@
 ifneq ($(HOST_CUSTOM_LD_COMMAND),true)
 define transform-host-o-to-shared-lib-inner
 $(hide) $(PRIVATE_CXX) \
-	-Wl,-rpath-link=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES) \
-	-Wl,-rpath,\$$ORIGIN/../$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)) \
-	-Wl,-rpath,\$$ORIGIN/$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)) \
+	-Wl,-rpath-link=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_INTERMEDIATE_LIBRARIES) \
+	-Wl,-rpath,\$$ORIGIN/../$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_SHARED_LIBRARIES)) \
+	-Wl,-rpath,\$$ORIGIN/$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_SHARED_LIBRARIES)) \
 	-shared -Wl,-soname,$(notdir $@) \
-	$($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_LD_DIRS) \
+	$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)GLOBAL_LD_DIRS) \
 	$(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 	   $(PRIVATE_HOST_GLOBAL_LDFLAGS) \
 	) \
@@ -1456,14 +1616,14 @@
 endif
 
 define transform-host-o-to-shared-lib
+@echo "$($(PRIVATE_PREFIX)DISPLAY) SharedLib: $(PRIVATE_MODULE) ($@)"
 @mkdir -p $(dir $@)
-@echo "host SharedLib: $(PRIVATE_MODULE) ($@)"
 $(transform-host-o-to-shared-lib-inner)
 endef
 
 define transform-host-o-to-package
+@echo "$($(PRIVATE_PREFIX)DISPLAY) Package: $(PRIVATE_MODULE) ($@)"
 @mkdir -p $(dir $@)
-@echo "host Package: $(PRIVATE_MODULE) ($@)"
 $(transform-host-o-to-shared-lib-inner)
 endef
 
@@ -1478,7 +1638,7 @@
 	-Wl,--gc-sections \
 	$(if $(filter true,$(PRIVATE_CLANG)),-shared,-Wl$(comma)-shared) \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_SO_O)) \
+	$(PRIVATE_TARGET_CRTBEGIN_SO_O) \
 	$(PRIVATE_ALL_OBJECTS) \
 	-Wl,--whole-archive \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES)) \
@@ -1486,25 +1646,23 @@
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--start-group) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
-	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_LIBGCOV)) \
-	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_LIBPROFILE_RT)) \
+	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(PRIVATE_TARGET_LIBGCC) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_SO_O)) \
+	$(PRIVATE_TARGET_CRTEND_SO_O) \
 	$(PRIVATE_LDLIBS)
 endef
 
 define transform-o-to-shared-lib
-@mkdir -p $(dir $@)
 @echo "target SharedLib: $(PRIVATE_MODULE) ($@)"
+@mkdir -p $(dir $@)
 $(transform-o-to-shared-lib-inner)
 endef
 
-
 ###########################################################
 ## Commands for filtering a target executable or library
 ###########################################################
@@ -1515,15 +1673,15 @@
 endif
 
 define transform-to-stripped
-@mkdir -p $(dir $@)
 @echo "target Strip: $(PRIVATE_MODULE) ($@)"
+@mkdir -p $(dir $@)
 $(hide) $(PRIVATE_STRIP) --strip-all $< -o $@ \
   $(if $(PRIVATE_NO_DEBUGLINK),,$(TARGET_STRIP_EXTRA))
 endef
 
 define transform-to-stripped-keep-symbols
-@mkdir -p $(dir $@)
 @echo "target Strip (keep symbols): $(PRIVATE_MODULE) ($@)"
+@mkdir -p $(dir $@)
 $(hide) $(PRIVATE_OBJCOPY) \
     `$(PRIVATE_READELF) -S $< | awk '/.debug_/ {print "-R " $$2}' | xargs` \
     $(TARGET_STRIP_KEEP_SYMBOLS_EXTRA) $< $@
@@ -1534,8 +1692,8 @@
 ###########################################################
 
 define pack-elf-relocations
-$(copy-file-to-target)
 @echo "target Pack Relocations: $(PRIVATE_MODULE) ($@)"
+$(copy-file-to-target)
 $(hide) $(RELOCATION_PACKER) $@
 endef
 
@@ -1546,12 +1704,12 @@
 define transform-o-to-executable-inner
 $(hide) $(PRIVATE_CXX) -pie \
 	-nostdlib -Bdynamic \
-	-Wl,-dynamic-linker,$($(PRIVATE_2ND_ARCH_VAR_PREFIX)TARGET_LINKER) \
+	-Wl,-dynamic-linker,$(PRIVATE_LINKER) \
 	-Wl,--gc-sections \
 	-Wl,-z,nocopyreloc \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
 	-Wl,-rpath-link=$(PRIVATE_TARGET_OUT_INTERMEDIATE_LIBRARIES) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O)) \
+	$(PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O) \
 	$(PRIVATE_ALL_OBJECTS) \
 	-Wl,--whole-archive \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_WHOLE_STATIC_LIBRARIES)) \
@@ -1559,21 +1717,20 @@
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--start-group) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_STATIC_LIBRARIES)) \
 	$(if $(PRIVATE_GROUP_STATIC_LIBRARIES),-Wl$(comma)--end-group) \
-	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_LIBGCOV)) \
-	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_LIBPROFILE_RT)) \
+	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(PRIVATE_TARGET_LIBGCC) \
 	$(call normalize-target-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O)) \
+	$(PRIVATE_TARGET_CRTEND_O) \
 	$(PRIVATE_LDLIBS)
 endef
 
 define transform-o-to-executable
-@mkdir -p $(dir $@)
 @echo "target Executable: $(PRIVATE_MODULE) ($@)"
+@mkdir -p $(dir $@)
 $(transform-o-to-executable-inner)
 endef
 
@@ -1595,7 +1752,7 @@
 	-Wl,--gc-sections \
 	-o $@ \
 	$(PRIVATE_TARGET_GLOBAL_LD_DIRS) \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTBEGIN_STATIC_O)) \
+	$(PRIVATE_TARGET_CRTBEGIN_STATIC_O) \
 	$(PRIVATE_TARGET_GLOBAL_LDFLAGS) \
 	$(PRIVATE_LDFLAGS) \
 	$(PRIVATE_ALL_OBJECTS) \
@@ -1606,18 +1763,17 @@
 	-Wl,--start-group \
 	$(call normalize-target-libraries,$(filter %libc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
 	$(call normalize-target-libraries,$(filter %libc_nomalloc.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
-	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_LIBGCOV)) \
-	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_LIBPROFILE_RT)) \
+	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_TARGET_COVERAGE_LIB)) \
 	$(PRIVATE_TARGET_LIBATOMIC) \
 	$(call normalize-target-libraries,$(filter %libcompiler_rt.a,$(PRIVATE_ALL_STATIC_LIBRARIES))) \
 	$(PRIVATE_TARGET_LIBGCC) \
 	-Wl,--end-group \
-	$(if $(filter true,$(PRIVATE_NO_CRT)),,$(PRIVATE_TARGET_CRTEND_O))
+	$(PRIVATE_TARGET_CRTEND_O)
 endef
 
 define transform-o-to-static-executable
-@mkdir -p $(dir $@)
 @echo "target StaticExecutable: $(PRIVATE_MODULE) ($@)"
+@mkdir -p $(dir $@)
 $(transform-o-to-static-executable-inner)
 endef
 
@@ -1630,8 +1786,8 @@
 else
 HOST_FPIE_FLAGS := -pie
 # Force the correct entry point to workaround a bug in binutils that manifests with -pie
-ifeq ($(HOST_OS),windows)
-HOST_FPIE_FLAGS += -Wl,-e_mainCRTStartup
+ifeq ($(HOST_CROSS_OS),windows)
+HOST_CROSS_FPIE_FLAGS += -Wl,-e_mainCRTStartup
 endif
 endif
 
@@ -1648,10 +1804,10 @@
 	$(if $(filter true,$(NATIVE_COVERAGE)),-lgcov) \
 	$(if $(filter true,$(NATIVE_COVERAGE)),$(PRIVATE_HOST_LIBPROFILE_RT)) \
 	$(call normalize-host-libraries,$(PRIVATE_ALL_SHARED_LIBRARIES)) \
-	-Wl,-rpath-link=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES) \
-	-Wl,-rpath,\$$ORIGIN/../$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)) \
-	-Wl,-rpath,\$$ORIGIN/$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES)) \
-	$($(PRIVATE_2ND_ARCH_VAR_PREFIX)HOST_GLOBAL_LD_DIRS) \
+	-Wl,-rpath-link=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_INTERMEDIATE_LIBRARIES) \
+	-Wl,-rpath,\$$ORIGIN/../$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_SHARED_LIBRARIES)) \
+	-Wl,-rpath,\$$ORIGIN/$(notdir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)OUT_SHARED_LIBRARIES)) \
+	$($(PRIVATE_2ND_ARCH_VAR_PREFIX)$(PRIVATE_PREFIX)GLOBAL_LD_DIRS) \
 	$(if $(PRIVATE_NO_DEFAULT_COMPILER_FLAGS),, \
 		$(PRIVATE_HOST_GLOBAL_LDFLAGS) \
 	) \
@@ -1662,8 +1818,8 @@
 endif
 
 define transform-host-o-to-executable
+@echo "$($(PRIVATE_PREFIX)DISPLAY) Executable: $(PRIVATE_MODULE) ($@)"
 @mkdir -p $(dir $@)
-@echo "host Executable: $(PRIVATE_MODULE) ($@)"
 $(transform-host-o-to-executable-inner)
 endef
 
@@ -1674,7 +1830,7 @@
 
 # Add BUILD_NUMBER to apps default version name if it's unbundled build.
 ifdef TARGET_BUILD_APPS
-APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)-$(BUILD_NUMBER)
+APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)-$(BUILD_NUMBER_FROM_FILE)
 else
 APPS_DEFAULT_VERSION_NAME := $(PLATFORM_VERSION)
 endif
@@ -1711,11 +1867,89 @@
     --skip-symbols-without-default-localization
 endef
 
-ifeq ($(HOST_OS),windows)
-xlint_unchecked :=
-else
+# Search for generated R.java/Manifest.java, copy the found R.java as $@.
+# Also copy them to a central 'R' directory to make it easier to add the files to an IDE.
+define find-generated-R.java
+$(hide) for GENERATED_MANIFEST_FILE in `find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) \
+  -name Manifest.java 2> /dev/null`; do \
+    dir=`awk '/package/{gsub(/\./,"/",$$2);gsub(/;/,"",$$2);print $$2;exit}' $$GENERATED_MANIFEST_FILE`; \
+    mkdir -p $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
+    $(ACP) -fp $$GENERATED_MANIFEST_FILE $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
+  done;
+$(hide) for GENERATED_R_FILE in `find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) \
+  -name R.java 2> /dev/null`; do \
+    dir=`awk '/package/{gsub(/\./,"/",$$2);gsub(/;/,"",$$2);print $$2;exit}' $$GENERATED_R_FILE`; \
+    mkdir -p $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
+    $(ACP) -fp $$GENERATED_R_FILE $(TARGET_COMMON_OUT_ROOT)/R/$$dir \
+      || exit 31; \
+    $(ACP) -fp $$GENERATED_R_FILE $@ || exit 32; \
+  done;
+@# Ensure that the target file is always created, i.e. also in case we did not
+@# enter the GENERATED_R_FILE-loop above. This avoids unnecessary rebuilding.
+$(hide) touch $@
+endef
+
+###########################################################
+# AAPT2 compilation and link
+###########################################################
+define aapt2-compile-one-resource-file
+@mkdir -p $(dir $@)
+$(hide) $(AAPT2) compile -o $(dir $@) $(PRIVATE_AAPT2_CFLAGS) --legacy $<
+endef
+
+define aapt2-compile-resource-dirs
+@mkdir -p $(dir $@)
+$(hide) $(AAPT2) compile -o $@ $(addprefix --dir ,$(PRIVATE_SOURCE_RES_DIRS)) \
+  $(PRIVATE_AAPT2_CFLAGS) --legacy
+endef
+
+# Set up rule to compile one resource file with aapt2.
+# Must be called with $(eval).
+# $(1): the source file
+# $(2): the output file
+define aapt2-compile-one-resource-file-rule
+$(2) : $(1) $(AAPT2)
+	@echo "AAPT2 compile $$@ <- $$<"
+	$$(call aapt2-compile-one-resource-file)
+endef
+
+# Convert input resource file path to output file path.
+# values-[config]/<file>.xml -> values-[config]_<file>.arsc.flat;
+# For other resource file, just replace the last "/" with "_" and
+# add .flat extension.
+#
+# $(1): the input resource file path
+# $(2): the base dir of the output file path
+# Returns: the compiled output file path
+define aapt2-compiled-resource-out-file
+$(eval _p_w := $(strip $(subst /,$(space),$(dir $(1)))))$(2)/$(subst $(space),/,$(_p_w))_$(if $(filter values%,$(lastword $(_p_w))),$(patsubst %.xml,%.arsc,$(notdir $(1))),$(notdir $(1))).flat
+endef
+
+define aapt2-link
+@mkdir -p $(dir $@)
+$(call dump-words-to-file,$(PRIVATE_RES_FLAT),$(dir $@)aapt2-flat-list)
+$(hide) $(AAPT2) link -o $@ \
+  $(PRIVATE_AAPT_FLAGS) \
+  $(addprefix --manifest ,$(PRIVATE_ANDROID_MANIFEST)) \
+  $(addprefix -I ,$(PRIVATE_AAPT_INCLUDES)) \
+  $(addprefix -I ,$(PRIVATE_SHARED_ANDROID_LIBRARIES)) \
+  $(addprefix --java ,$(PRIVATE_SOURCE_INTERMEDIATES_DIR)) \
+  $(addprefix --proguard ,$(PRIVATE_PROGUARD_OPTIONS_FILE)) \
+  $(addprefix --min-sdk-version ,$(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
+  $(addprefix --target-sdk-version ,$(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
+  $(if $(filter --product,$(PRIVATE_AAPT_FLAGS)),,$(addprefix --product ,$(PRIVATE_TARGET_AAPT_CHARACTERISTICS))) \
+  $(addprefix -c ,$(PRIVATE_PRODUCT_AAPT_CONFIG)) \
+  $(addprefix --preferred-density ,$(PRIVATE_PRODUCT_AAPT_PREF_CONFIG)) \
+  $(if $(filter --version-code,$(PRIVATE_AAPT_FLAGS)),,--version-code $(PLATFORM_SDK_VERSION)) \
+  $(if $(filter --version-name,$(PRIVATE_AAPT_FLAGS)),,--version-name $(APPS_DEFAULT_VERSION_NAME)) \
+  $(addprefix --rename-manifest-package ,$(PRIVATE_MANIFEST_PACKAGE_NAME)) \
+  $(addprefix --rename-instrumentation-target-package ,$(PRIVATE_MANIFEST_INSTRUMENTATION_FOR)) \
+  $(addprefix -R , $(PRIVATE_OVERLAY_FLAT)) \
+  \@$(dir $@)aapt2-flat-list
+endef
+
+###########################################################
 xlint_unchecked := -Xlint:unchecked
-endif
 
 # emit-line, <word list>, <output file>
 define emit-line
@@ -1725,36 +1959,40 @@
 # dump-words-to-file, <word list>, <output file>
 define dump-words-to-file
         @rm -f $(2)
-        @$(call emit-line,$(wordlist 1,200,$(1)),$(2))
-        @$(call emit-line,$(wordlist 201,400,$(1)),$(2))
-        @$(call emit-line,$(wordlist 401,600,$(1)),$(2))
-        @$(call emit-line,$(wordlist 601,800,$(1)),$(2))
-        @$(call emit-line,$(wordlist 801,1000,$(1)),$(2))
-        @$(call emit-line,$(wordlist 1001,1200,$(1)),$(2))
-        @$(call emit-line,$(wordlist 1201,1400,$(1)),$(2))
-        @$(call emit-line,$(wordlist 1401,1600,$(1)),$(2))
-        @$(call emit-line,$(wordlist 1601,1800,$(1)),$(2))
-        @$(call emit-line,$(wordlist 1801,2000,$(1)),$(2))
-        @$(call emit-line,$(wordlist 2001,2200,$(1)),$(2))
-        @$(call emit-line,$(wordlist 2201,2400,$(1)),$(2))
-        @$(call emit-line,$(wordlist 2401,2600,$(1)),$(2))
-        @$(call emit-line,$(wordlist 2601,2800,$(1)),$(2))
-        @$(call emit-line,$(wordlist 2801,3000,$(1)),$(2))
-        @$(call emit-line,$(wordlist 3001,3200,$(1)),$(2))
-        @$(call emit-line,$(wordlist 3201,3400,$(1)),$(2))
-        @$(call emit-line,$(wordlist 3401,3600,$(1)),$(2))
-        @$(call emit-line,$(wordlist 3601,3800,$(1)),$(2))
-        @$(call emit-line,$(wordlist 3801,4000,$(1)),$(2))
-        @$(call emit-line,$(wordlist 4001,4200,$(1)),$(2))
-        @$(call emit-line,$(wordlist 4201,4400,$(1)),$(2))
-        @$(call emit-line,$(wordlist 4401,4600,$(1)),$(2))
-        @$(call emit-line,$(wordlist 4601,4800,$(1)),$(2))
-        @$(call emit-line,$(wordlist 4801,5000,$(1)),$(2))
-        @$(if $(wordlist 5001,5002,$(1)),$(error Too many words ($(words $(1)))))
+        @touch $(2)
+        @$(call emit-line,$(wordlist 1,500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 501,1000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 1001,1500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 1501,2000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 2001,2500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 2501,3000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 3001,3500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 3501,4000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 4001,4500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 4501,5000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 5001,5500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 5501,6000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 6001,6500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 6501,7000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 7001,7500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 7501,8000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 8001,8500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 8501,9000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 9001,9500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 9501,10000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 10001,10500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 10501,11000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 11001,11500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 11501,12000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 12001,12500,$(1)),$(2))
+        @$(call emit-line,$(wordlist 12501,13000,$(1)),$(2))
+        @$(call emit-line,$(wordlist 13001,13500,$(1)),$(2))
+        @$(if $(wordlist 13501,13502,$(1)),$(error Too many words ($(words $(1)))))
 endef
 
 # For a list of jar files, unzip them to a specified directory,
-# but make sure that no META-INF files come along for the ride.
+# but make sure that no META-INF files come along for the ride,
+# unless PRIVATE_DONT_DELETE_JAR_META_INF is set.
 #
 # $(1): files to unzip
 # $(2): destination directory
@@ -1766,8 +2004,14 @@
       exit 1; \
     fi; \
     unzip -qo $$f -d $(2); \
-  done \
-  $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,;rm -rf $(2)/META-INF)
+  done
+  $(if $(PRIVATE_DONT_DELETE_JAR_META_INF),,$(hide) rm -rf $(2)/META-INF)
+endef
+
+# Call jack
+#
+define call-jack
+ JACK_VERSION=$(PRIVATE_JACK_VERSION) $(JACK) $(DEFAULT_JACK_EXTRA_ARGS)
 endef
 
 # Common definition to invoke javac on the host and target.
@@ -1788,13 +2032,12 @@
 $(call unzip-jar-files,$(PRIVATE_STATIC_JAVA_LIBRARIES),$(PRIVATE_CLASS_INTERMEDIATES_DIR))
 $(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list)
 $(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
-          find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list; \
+          find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' -and -not -name '.*' >> $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list; \
 fi
 $(hide) tr ' ' '\n' < $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list \
-    | sort -u > $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
+    | $(NORMALIZE_PATH) | sort -u > $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq
 $(hide) if [ -s $(PRIVATE_CLASS_INTERMEDIATES_DIR)/java-source-list-uniq ] ; then \
     $(1) -encoding UTF-8 \
-    $(strip $(PRIVATE_JAVAC_DEBUG_FLAGS)) \
     $(if $(findstring true,$(PRIVATE_WARNINGS_ENABLE)),$(xlint_unchecked),) \
     $(2) \
     $(addprefix -classpath ,$(strip \
@@ -1823,7 +2066,7 @@
         $(PRIVATE_CLASS_INTERMEDIATES_DIR)/$(subst .,/,$(pkg))))
 $(if $(PRIVATE_RMTYPEDEFS), $(hide) $(RMTYPEDEFS) -v $(PRIVATE_CLASS_INTERMEDIATES_DIR))
 $(if $(PRIVATE_JAR_MANIFEST), \
-    $(hide) sed -e 's/%BUILD_NUMBER%/$(BUILD_NUMBER)/' \
+    $(hide) sed -e "s/%BUILD_NUMBER%/$(BUILD_NUMBER_FROM_FILE)/" \
             $(PRIVATE_JAR_MANIFEST) > $(dir $@)/manifest.mf && \
         jar -cfm $@ $(dir $@)/manifest.mf \
             -C $(PRIVATE_CLASS_INTERMEDIATES_DIR) ., \
@@ -1855,7 +2098,7 @@
           find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list; \
 fi
 $(hide) tr ' ' '\n' < $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list \
-    | sort -u > $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list-uniq
+    | $(NORMALIZE_PATH) | sort -u > $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list-uniq
 $(if $(PRIVATE_JACK_PROGUARD_FLAGS), \
     $(hide) echo -basedirectory $(CURDIR) > $@.flags; \
     echo $(PRIVATE_JACK_PROGUARD_FLAGS) >> $@.flags; \
@@ -1864,16 +2107,16 @@
     $(hide) mkdir -p $@.res.tmp
     $(hide) $(call create-empty-package-at,$@.res.tmp.zip)
     $(hide) $(call add-java-resources-to,$@.res.tmp.zip)
-    $(hide) $(call unzip-jar-files,$@.res.tmp.zip,$@.res.tmp)
+    $(hide) unzip -qo $@.res.tmp.zip -d $@.res.tmp
     $(hide) rm $@.res.tmp.zip)
 $(hide) if [ -s $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list-uniq ] ; then \
     export tmpEcjArg="@$(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list-uniq"; \
 else \
     export tmpEcjArg=""; \
 fi; \
-$(call call-jack,$(PRIVATE_JACK_VM_ARGS),$(PRIVATE_JACK_EXTRA_ARGS)) \
+$(call call-jack) \
     $(strip $(PRIVATE_JACK_FLAGS)) \
-    $(strip $(PRIVATE_JACK_DEBUG_FLAGS)) \
+    $(strip $(PRIVATE_JACK_COVERAGE_OPTIONS)) \
     $(if $(NO_OPTIMIZE_DX), \
         -D jack.dex.optimize="false") \
     $(if $(PRIVATE_RMTYPEDEFS), \
@@ -1882,16 +2125,16 @@
         $(call normalize-path-list,$(PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES) $(PRIVATE_ALL_JACK_LIBRARIES)))) \
     $(addprefix --import ,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES))) \
     $(if $(PRIVATE_EXTRA_JAR_ARGS),--import-resource $@.res.tmp) \
+    -D jack.android.min-api-level=$(PRIVATE_JACK_MIN_SDK_VERSION) \
     -D jack.import.resource.policy=keep-first \
     -D jack.import.type.policy=keep-first \
     --output-jack $(PRIVATE_CLASSES_JACK) \
-    -D jack.java.source.version=1.7 \
     $(if $(PRIVATE_JACK_INCREMENTAL_DIR),--incremental-folder $(PRIVATE_JACK_INCREMENTAL_DIR)) \
     --output-dex $(PRIVATE_JACK_INTERMEDIATES_DIR) \
     $(addprefix --config-jarjar ,$(strip $(PRIVATE_JARJAR_RULES))) \
     $(if $(PRIVATE_JACK_PROGUARD_FLAGS),--config-proguard $@.flags) \
     $$tmpEcjArg \
-    || ( rm -rf $(PRIVATE_CLASSES_JACK); rm -rf $(PRIVATE_JACK_INTERMEDIATES_DIR); exit 41 )
+    || ( rm -rf $(PRIVATE_CLASSES_JACK); exit 41 )
 $(hide) mv $(PRIVATE_JACK_INTERMEDIATES_DIR)/classes*.dex $(dir $@)
 $(hide) rm -f $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list
 $(if $(PRIVATE_EXTRA_JAR_ARGS),$(hide) rm -rf $@.res.tmp)
@@ -1901,22 +2144,120 @@
 $(if $(PRIVATE_JAR_MANIFEST), $(hide) echo unsupported options JAR_MANIFEST in $@; exit 53)
 endef
 
+# Invoke Jack to compile java source just to check it compiles correctly.
+#
+# Some historical notes:
+# - below we write the list of java files to java-source-list to avoid argument
+#   list length problems with Cygwin
+# - we filter out duplicate java file names because Jack doesn't like them.
+define jack-check-java
+$(hide) rm -f $@
+$(hide) rm -f $@.java-source-list
+$(hide) rm -f $@.java-source-list-uniq
+$(hide) mkdir -p $(dir $@)
+$(if $(PRIVATE_JACK_INCREMENTAL_DIR),$(hide) mkdir -p $(PRIVATE_JACK_INCREMENTAL_DIR))
+$(call dump-words-to-file,$(PRIVATE_JAVA_SOURCES),$@.java-source-list)
+$(hide) if [ -d "$(PRIVATE_SOURCE_INTERMEDIATES_DIR)" ]; then \
+          find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $@.java-source-list; \
+fi
+$(hide) tr ' ' '\n' < $@.java-source-list \
+    | sort -u > $@.java-source-list-uniq
+$(hide) if [ -s $@.java-source-list-uniq ] ; then \
+	$(call call-jack,$(PRIVATE_JACK_EXTRA_ARGS)) \
+	    $(strip $(PRIVATE_JACK_FLAGS)) \
+	    $(strip $(PRIVATE_JACK_DEBUG_FLAGS)) \
+	    $(addprefix --classpath ,$(strip \
+	        $(call normalize-path-list,$(call reverse-list,$(PRIVATE_STATIC_JACK_LIBRARIES)) $(PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES) $(PRIVATE_ALL_JACK_LIBRARIES)))) \
+	    -D jack.import.resource.policy=keep-first \
+	    -D jack.android.min-api-level=$(PRIVATE_JACK_MIN_SDK_VERSION) \
+	    -D jack.import.type.policy=keep-first \
+	    $(if $(PRIVATE_JACK_INCREMENTAL_DIR),--incremental-folder $(PRIVATE_JACK_INCREMENTAL_DIR)) \
+	    @$@.java-source-list-uniq; \
+fi
+touch $@
+endef
+
 define transform-jar-to-jack
 	$(hide) mkdir -p $(dir $@)
-	$(JILL) $(PRIVATE_JILL_FLAGS) --output $@.tmpjill.jack $<
 	$(hide) mkdir -p $@.tmpjill.res
-	$(hide) $(call unzip-jar-files,$<,$@.tmpjill.res)
+	$(hide) unzip -qo $< -d $@.tmpjill.res
 	$(hide) find $@.tmpjill.res -iname "*.class" -delete
-	$(hide) $(call call-jack,$(PRIVATE_JACK_VM_ARGS),$(PRIVATE_JACK_EXTRA_ARGS)) \
+	$(hide) $(call call-jack) \
+	    $(PRIVATE_JACK_FLAGS) \
         -D jack.import.resource.policy=keep-first \
         -D jack.import.type.policy=keep-first \
-	    --import $@.tmpjill.jack \
+        -D jack.android.min-api-level=$(PRIVATE_JACK_MIN_SDK_VERSION) \
+	    --import $< \
 	    --import-resource $@.tmpjill.res \
 	    --output-jack $@
 	$(hide) rm -rf $@.tmpjill.res
-	$(hide) rm $@.tmpjill.jack
 endef
 
+# Moves $1.tmp to $1 if necessary. This is designed to be used with
+# .KATI_RESTAT. For kati, this function doesn't update the timestamp
+# of $1 when $1.tmp is identical to $1 so that ninja won't rebuild
+# targets which depend on $1. For GNU make, this function simply
+# copies $1.tmp to $1.
+ifeq ($(BUILDING_WITH_NINJA),true)
+define commit-change-for-toc
+$(hide) if cmp -s $1.tmp $1 ; then \
+ rm $1.tmp ; \
+else \
+ mv $1.tmp $1 ; \
+fi
+endef
+else
+define commit-change-for-toc
+@# make doesn't support restat. We always update .toc files so the dependents will always be updated too.
+$(hide) mv $1.tmp $1
+endef
+endif
+
+## Rule to create a table of contents from a .jar file.
+## Must be called with $(eval).
+# $(1): A .jar file
+define _transform-jar-to-toc
+$1.toc: $1 | $(IJAR)
+	@echo Generating TOC: $$@
+	$(hide) $(IJAR) $$< $$@.tmp
+	$$(call commit-change-for-toc,$$@)
+endef
+
+## Define a rule which generates .jar.toc and mark it as .KATI_RESTAT.
+# $(1): A .jar file
+define define-jar-to-toc-rule
+$(eval $(call _transform-jar-to-toc,$1))\
+$(eval .KATI_RESTAT: $1.toc)
+endef
+
+ifeq (,$(TARGET_BUILD_APPS))
+
+## Rule to create a table of contents from a .dex file.
+## Must be called with $(eval).
+# $(1): The directory which contains classes*.dex files
+define _transform-dex-to-toc
+$1/classes.dex.toc: PRIVATE_INPUT_DEX_FILES := $1/classes*.dex
+$1/classes.dex.toc: $1/classes.dex $(DEXDUMP)
+	@echo Generating TOC: $$@
+	$(hide) $(DEXDUMP) -l xml $$(PRIVATE_INPUT_DEX_FILES) > $$@.tmp
+	$$(call commit-change-for-toc,$$@)
+endef
+
+## Define a rule which generates .dex.toc and mark it as .KATI_RESTAT.
+# $(1): The directory which contains classes*.dex files
+define define-dex-to-toc-rule
+$(eval $(call _transform-dex-to-toc,$1))\
+$(eval .KATI_RESTAT: $1/classes.dex.toc)
+endef
+
+else
+
+# Turn off .toc optimization for apps build as we cannot build dexdump.
+define define-dex-to-toc-rule
+endef
+
+endif  # TARGET_BUILD_APPS
+
 
 # Invoke Jack to compile java from source to jack files without shrink or obfuscation.
 #
@@ -1935,7 +2276,7 @@
           find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name '*.java' >> $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list; \
 fi
 $(hide) tr ' ' '\n' < $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list \
-    | sort -u > $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list-uniq
+    | $(NORMALIZE_PATH) | sort -u > $(PRIVATE_JACK_INTERMEDIATES_DIR)/java-source-list-uniq
 $(if $(PRIVATE_JACK_PROGUARD_FLAGS), \
     $(hide) echo -basedirectory $(CURDIR) > $@.flags; \
     echo $(PRIVATE_JACK_PROGUARD_FLAGS) >> $@.flags; \
@@ -1951,9 +2292,8 @@
 else \
     export tmpEcjArg=""; \
 fi; \
-$(call call-jack,$(PRIVATE_JACK_VM_ARGS),$(PRIVATE_JACK_EXTRA_ARGS)) \
+$(call call-jack) \
     $(strip $(PRIVATE_JACK_FLAGS)) \
-    $(strip $(PRIVATE_JACK_DEBUG_FLAGS)) \
     $(if $(NO_OPTIMIZE_DX), \
         -D jack.dex.optimize="false") \
     $(addprefix --classpath ,$(strip \
@@ -1962,7 +2302,7 @@
     $(if $(PRIVATE_EXTRA_JAR_ARGS),--import-resource $@.res.tmp) \
     -D jack.import.resource.policy=keep-first \
     -D jack.import.type.policy=keep-first \
-    -D jack.java.source.version=1.7 \
+    -D jack.android.min-api-level=$(PRIVATE_JACK_MIN_SDK_VERSION) \
     $(if $(PRIVATE_JACK_INCREMENTAL_DIR),--incremental-folder $(PRIVATE_JACK_INCREMENTAL_DIR)) \
     --output-jack $@ \
     $(addprefix --config-jarjar ,$(strip $(PRIVATE_JARJAR_RULES))) \
@@ -1983,26 +2323,6 @@
     $(addprefix -ix , $(PRIVATE_EMMA_COVERAGE_FILTER))
 endef
 
-#TODO: use a smaller -Xmx value for most libraries;
-#      only core.jar and framework.jar need a heap this big.
-# Avoid the memory arguments on Windows, dx fails to load for some reason with them.
-define transform-classes.jar-to-dex
-@echo "target Dex: $(PRIVATE_MODULE)"
-@mkdir -p $(dir $@)
-$(hide) rm -f $(dir $@)classes*.dex
-$(hide) $(DX) \
-    $(if $(findstring windows,$(HOST_OS)),,-JXms16M -JXmx2048M) \
-    --dex --output=$(dir $@) \
-    $(if $(NO_OPTIMIZE_DX), \
-        --no-optimize) \
-    $(if $(GENERATE_DEX_DEBUG), \
-	    --debug --verbose \
-	    --dump-to=$(@:.dex=.lst) \
-	    --dump-width=1000) \
-    $(PRIVATE_DX_FLAGS) \
-    $<
-endef
-
 # Create a mostly-empty .jar file that we'll add to later.
 # The MacOS jar tool doesn't like creating empty jar files,
 # so we need to give it something.
@@ -2052,7 +2372,7 @@
     $(addprefix -I , $(PRIVATE_AAPT_INCLUDES)) \
     $(addprefix --min-sdk-version , $(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
     $(addprefix --target-sdk-version , $(PRIVATE_DEFAULT_APP_TARGET_SDK)) \
-    $(if $(filter --product,$(PRIVATE_AAPT_FLAGS)),,$(addprefix --product , $(TARGET_AAPT_CHARACTERISTICS))) \
+    $(if $(filter --product,$(PRIVATE_AAPT_FLAGS)),,$(addprefix --product , $(PRIVATE_TARGET_AAPT_CHARACTERISTICS))) \
     $(if $(filter --version-code,$(PRIVATE_AAPT_FLAGS)),,--version-code $(PLATFORM_SDK_VERSION)) \
     $(if $(filter --version-name,$(PRIVATE_AAPT_FLAGS)),,--version-name $(APPS_DEFAULT_VERSION_NAME)) \
     $(addprefix --rename-manifest-package , $(PRIVATE_MANIFEST_PACKAGE_NAME)) \
@@ -2085,13 +2405,18 @@
 $(foreach abi,$(PRIVATE_JNI_SHARED_LIBRARIES_ABI),\
   $(call _add-jni-shared-libs-to-package-per-abi,$(abi),\
     $(patsubst $(abi):%,%,$(filter $(abi):%,$(PRIVATE_JNI_SHARED_LIBRARIES)))))
-$(hide) (cd $(dir $@) && zip -r $(JNI_COMPRESS_FLAGS) $(notdir $@) lib)
+$(hide) (cd $(dir $@) && zip -qrX $(JNI_COMPRESS_FLAGS) $(notdir $@) lib)
 $(hide) rm -rf $(dir $@)lib
 endef
 
 #TODO: update the manifest to point to the dex file
 define add-dex-to-package
-$(hide) zip -qj $@ $(dir $(PRIVATE_DEX_FILE))classes*.dex
+$(call add-dex-to-package-arg,$@)
+endef
+
+# $(1): the package file.
+define add-dex-to-package-arg
+$(hide) find $(dir $(PRIVATE_DEX_FILE)) -maxdepth 1 -name "classes*.dex" | sort | xargs zip -qjX $(1)
 endef
 
 # Add java resources added by the current module.
@@ -2107,37 +2432,68 @@
 #
 define add-carried-jack-resources
  $(hide) if [ -d $(PRIVATE_JACK_INTERMEDIATES_DIR) ] ; then \
-    jack_res_jar_flags=$$(find $(PRIVATE_JACK_INTERMEDIATES_DIR) -type f \
-        | sed -e "s?^$(PRIVATE_JACK_INTERMEDIATES_DIR)/? -C $(PRIVATE_JACK_INTERMEDIATES_DIR) ?"); \
-    if [ -n "$$jack_res_jar_flags" ] ; then \
-        echo $$jack_res_jar_flags >$(dir $@)jack_res_jar_flags; \
-        jar uf $@ $$jack_res_jar_flags; \
+    find $(PRIVATE_JACK_INTERMEDIATES_DIR) -type f | sort \
+        | sed -e "s?^$(PRIVATE_JACK_INTERMEDIATES_DIR)/? -C \"$(PRIVATE_JACK_INTERMEDIATES_DIR)\" \"?" -e "s/$$/\"/" \
+        > $(dir $@)jack_res_jar_flags; \
+    if [ -s $(dir $@)jack_res_jar_flags ] ; then \
+        jar uf $@ @$(dir $@)jack_res_jar_flags; \
     fi; \
 fi
 endef
 
+# Returns the minSdkVersion of the specified APK as a decimal number. If the
+# version is a codename, returns the current platform SDK version (always a
+# decimal number) instead. If the APK does not specify a minSdkVersion, returns
+# 0 to match how the Android platform interprets this situation at runtime.
+#
+# This currently substitutes any version which contains characters other than
+# digits with the current platform's API Level number. This is because I
+# couldn't figure out an easy way to perform the substitution only for the
+# version codes listed in PLATFORM_VERSION_ALL_CODENAMES.
+define get-package-min-sdk-version-int
+$$(($(AAPT) dump badging $(1) 2>&1 | grep '^sdkVersion' || echo "sdkVersion:'0'") \
+    | cut -d"'" -f2 | \
+    sed -e s/^.*[^0-9].*$$/$(PLATFORM_SDK_VERSION)/)
+endef
+
 # Sign a package using the specified key/cert.
 #
 define sign-package
-$(hide) mv $@ $@.unsigned
-$(hide) java -jar $(SIGNAPK_JAR) \
+$(call sign-package-arg,$@)
+endef
+
+# $(1): the package file we are signing.
+define sign-package-arg
+$(hide) mv $(1) $(1).unsigned
+$(hide) java -Djava.library.path=$(SIGNAPK_JNI_LIBRARY_PATH) -jar $(SIGNAPK_JAR) \
+    --min-sdk-version $(call get-package-min-sdk-version-int,$@.unsigned) \
     $(PRIVATE_CERTIFICATE) $(PRIVATE_PRIVATE_KEY) \
-    $(PRIVATE_ADDITIONAL_CERTIFICATES) $@.unsigned $@.signed
-$(hide) mv $@.signed $@
+    $(PRIVATE_ADDITIONAL_CERTIFICATES) $(1).unsigned $(1).signed
+$(hide) mv $(1).signed $(1)
 endef
 
 # Align STORED entries of a package on 4-byte boundaries to make them easier to mmap.
 #
 define align-package
-$(hide) mv $@ $@.unaligned
-$(hide) $(ZIPALIGN) \
+$(hide) if ! $(ZIPALIGN) -c $(ZIPALIGN_PAGE_ALIGN_FLAGS) 4 $@ >/dev/null ; then \
+  mv $@ $@.unaligned; \
+  $(ZIPALIGN) \
     -f \
     $(ZIPALIGN_PAGE_ALIGN_FLAGS) \
     4 \
-    $@.unaligned $@.aligned
-$(hide) mv $@.aligned $@
+    $@.unaligned $@.aligned; \
+  mv $@.aligned $@; \
+  fi
 endef
 
+# Remove dynamic timestamps from packages
+#
+ifndef TARGET_BUILD_APPS
+define remove-timestamps-from-package
+$(hide) $(ZIPTIME) $@
+endef
+endif
+
 # Uncompress shared libraries embedded in an apk.
 #
 define uncompress-shared-libs
@@ -2145,7 +2501,7 @@
   rm -rf $(dir $@)uncompressedlibs && mkdir $(dir $@)uncompressedlibs; \
   unzip $@ $(PRIVATE_EMBEDDED_JNI_LIBS) -d $(dir $@)uncompressedlibs && \
   zip -d $@ 'lib/*.so' && \
-  ( cd $(dir $@)uncompressedlibs && zip -D -r -0 ../$(notdir $@) lib ) && \
+  ( cd $(dir $@)uncompressedlibs && find lib -type f | sort | zip -D -X -0 ../$(notdir $@) -@ ) && \
   rm -rf $(dir $@)uncompressedlibs; \
   fi
 endef
@@ -2170,7 +2526,7 @@
 # Note: we intentionally don't clean PRIVATE_CLASS_INTERMEDIATES_DIR
 # in transform-java-to-classes for the sake of vm-tests.
 define transform-host-java-to-package
-@echo "host Java: $(PRIVATE_MODULE) ($(PRIVATE_CLASS_INTERMEDIATES_DIR))"
+@echo "$($(PRIVATE_PREFIX)DISPLAY) Java: $(PRIVATE_MODULE) ($(PRIVATE_CLASS_INTERMEDIATES_DIR))"
 $(call compile-java,$(HOST_JAVAC),$(PRIVATE_BOOTCLASSPATH))
 endef
 
@@ -2517,12 +2873,13 @@
 endef
 
 # Return the arch for the source file of a prebuilt
-# Return "none" if no matching arch found, so the result can be passed to
+# Return "none" if no matching arch found and return empty
+# if the input is empty, so the result can be passed to
 # LOCAL_MODULE_TARGET_ARCH.
 # $(1) the list of archs supported by the prebuilt
 define get-prebuilt-src-arch
 $(strip $(if $(filter $(TARGET_ARCH),$(1)),$(TARGET_ARCH),\
-  $(if $(filter $(TARGET_2ND_ARCH),$(1)),$(TARGET_2ND_ARCH),none)))
+  $(if $(filter $(TARGET_2ND_ARCH),$(1)),$(TARGET_2ND_ARCH),$(if $(1),none))))
 endef
 
 ###########################################################
@@ -2537,6 +2894,7 @@
 # Include any vendor specific definitions.mk file
 -include $(TOPDIR)vendor/*/build/core/definitions.mk
 -include $(TOPDIR)device/*/build/core/definitions.mk
+-include $(TOPDIR)product/*/build/core/definitions.mk
 
 # broken:
 #	$(foreach file,$^,$(if $(findstring,.a,$(suffix $file)),-l$(file),$(file)))
diff --git a/core/dex_preopt.mk b/core/dex_preopt.mk
index 8b555e0..d182dc0 100644
--- a/core/dex_preopt.mk
+++ b/core/dex_preopt.mk
@@ -19,6 +19,28 @@
 # The default value for LOCAL_DEX_PREOPT
 DEX_PREOPT_DEFAULT ?= true
 
+# The default values for pre-opting: always preopt PIC.
+# Conditional to building on linux, as dex2oat currently does not work on darwin.
+ifeq ($(HOST_OS),linux)
+  WITH_DEXPREOPT_PIC ?= true
+  WITH_DEXPREOPT ?= true
+# For an eng build only pre-opt the boot image. This gives reasonable performance and still
+# allows a simple workflow: building in frameworks/base and syncing.
+  ifeq (eng,$(TARGET_BUILD_VARIANT))
+    WITH_DEXPREOPT_BOOT_IMG_ONLY ?= true
+  endif
+# Add mini-debug-info to the boot classpath unless explicitly asked not to.
+  ifneq (false,$(WITH_DEXPREOPT_DEBUG_INFO))
+    PRODUCT_DEX_PREOPT_BOOT_FLAGS += --generate-mini-debug-info
+  endif
+endif
+
+GLOBAL_DEXPREOPT_FLAGS :=
+ifeq ($(WITH_DEXPREOPT_PIC),true)
+# Compile boot.oat as position-independent code if WITH_DEXPREOPT_PIC=true
+GLOBAL_DEXPREOPT_FLAGS += --compile-pic
+endif
+
 # $(1): the .jar or .apk to remove classes.dex
 define dexpreopt-remove-classes.dex
 $(hide) zip --quiet --delete $(1) classes.dex; \
@@ -35,7 +57,7 @@
 _dbj_jar_no_dex := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(1)_nodex.jar
 _dbj_src_jar := $(call intermediates-dir-for,JAVA_LIBRARIES,$(1),,COMMON)/javalib.jar
 
-$$(_dbj_jar_no_dex) : $$(_dbj_src_jar) | $(ACP) $(AAPT)
+$$(_dbj_jar_no_dex) : $$(_dbj_src_jar) | $(ACP)
 	$$(call copy-file-to-target)
 ifneq ($(DEX_PREOPT_DEFAULT),nostripping)
 	$$(call dexpreopt-remove-classes.dex,$$@)
diff --git a/core/dex_preopt_libart.mk b/core/dex_preopt_libart.mk
index 1de72b7..2a7ffb7 100644
--- a/core/dex_preopt_libart.mk
+++ b/core/dex_preopt_libart.mk
@@ -3,19 +3,16 @@
 #
 ####################################
 
+# Default to debug version to help find bugs.
+# Set USE_DEX2OAT_DEBUG to false for only building non-debug versions.
+ifeq ($(USE_DEX2OAT_DEBUG),false)
 DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oat$(HOST_EXECUTABLE_SUFFIX)
-DEX2OATD := $(HOST_OUT_EXECUTABLES)/dex2oatd$(HOST_EXECUTABLE_SUFFIX)
+else
+DEX2OAT := $(HOST_OUT_EXECUTABLES)/dex2oatd$(HOST_EXECUTABLE_SUFFIX)
+endif
 
-# By default, do not run rerun dex2oat if the tool changes.
-# Comment out the | to force dex2oat to rerun on after all changes.
-DEX2OAT_DEPENDENCY := art/runtime/oat.cc # dependency on oat version number
-DEX2OAT_DEPENDENCY += art/runtime/image.cc # dependency on image version number
-DEX2OAT_DEPENDENCY += |
 DEX2OAT_DEPENDENCY += $(DEX2OAT)
 
-DEX2OATD_DEPENDENCY := $(DEX2OAT_DEPENDENCY)
-DEX2OATD_DEPENDENCY += $(DEX2OATD)
-
 # Use the first preloaded-classes file in PRODUCT_COPY_FILES.
 PRELOADED_CLASSES := $(call word-colon,1,$(firstword \
     $(filter %system/etc/preloaded-classes,$(PRODUCT_COPY_FILES))))
@@ -24,13 +21,6 @@
 COMPILED_CLASSES := $(call word-colon,1,$(firstword \
     $(filter %system/etc/compiled-classes,$(PRODUCT_COPY_FILES))))
 
-# Default to debug version to help find bugs.
-# Set USE_DEX2OAT_DEBUG to false for only building non-debug versions.
-ifneq ($(USE_DEX2OAT_DEBUG), false)
-DEX2OAT = $(DEX2OATD)
-DEX2OAT_DEPENDENCY = $(DEX2OATD_DEPENDENCY)
-endif
-
 # start of image reserved address space
 LIBART_IMG_HOST_BASE_ADDRESS   := 0x60000000
 LIBART_IMG_TARGET_BASE_ADDRESS := 0x70000000
@@ -77,6 +67,13 @@
 LIBART_TARGET_BOOT_DEX_LOCATIONS := $(foreach jar,$(LIBART_TARGET_BOOT_JARS),/$(DEXPREOPT_BOOT_JAR_DIR)/$(jar).jar)
 LIBART_TARGET_BOOT_DEX_FILES := $(foreach jar,$(LIBART_TARGET_BOOT_JARS),$(call intermediates-dir-for,JAVA_LIBRARIES,$(jar),,COMMON)/javalib.jar)
 
+# dex preopt on the bootclasspath produces multiple files.  The first dex file
+# is converted into to boot.art (to match the legacy assumption that boot.art
+# exists), and the rest are converted to boot-<name>.art.
+# In addition, each .art file has an associated .oat file.
+LIBART_TARGET_BOOT_ART_EXTRA_FILES := $(foreach jar,$(wordlist 2,999,$(LIBART_TARGET_BOOT_JARS)),boot-$(jar).art boot-$(jar).oat)
+LIBART_TARGET_BOOT_ART_EXTRA_FILES += boot.oat
+
 my_2nd_arch_prefix :=
 include $(BUILD_SYSTEM)/dex_preopt_libart_boot.mk
 
@@ -95,7 +92,7 @@
 define dex2oat-one-file
 $(hide) rm -f $(2)
 $(hide) mkdir -p $(dir $(2))
-$(hide) $(DEX2OAT) \
+$(hide) ANDROID_LOG_TAGS="*:e" $(DEX2OAT) \
 	--runtime-arg -Xms$(DEX2OAT_XMS) --runtime-arg -Xmx$(DEX2OAT_XMX) \
 	--boot-image=$(PRIVATE_DEX_PREOPT_IMAGE_LOCATION) \
 	--dex-file=$(1) \
@@ -107,5 +104,7 @@
 	--instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES) \
 	--include-patch-information --runtime-arg -Xnorelocate --no-generate-debug-info \
 	--abort-on-hard-verifier-error \
-	$(PRIVATE_DEX_PREOPT_FLAGS)
+	--no-inline-from=core-oj.jar \
+	$(PRIVATE_DEX_PREOPT_FLAGS) \
+	$(GLOBAL_DEXPREOPT_FLAGS)
 endef
diff --git a/core/dex_preopt_libart_boot.mk b/core/dex_preopt_libart_boot.mk
index afd61eb..1a0dc5b 100644
--- a/core/dex_preopt_libart_boot.mk
+++ b/core/dex_preopt_libart_boot.mk
@@ -28,11 +28,8 @@
 $(my_2nd_arch_prefix)LIBART_TARGET_BOOT_OAT_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)$(patsubst %.art,%.oat,$($(my_2nd_arch_prefix)LIBART_BOOT_IMAGE_FILENAME))
 
 $(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE := $(PRODUCT_OUT)$($(my_2nd_arch_prefix)LIBART_BOOT_IMAGE_FILENAME)
-
-# Compile boot.oat as position-independent code if WITH_DEXPREOPT_PIC=true
-ifeq (true,$(WITH_DEXPREOPT_PIC))
-  PRODUCT_DEX_PREOPT_BOOT_FLAGS += --compile-pic
-endif
+$(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES := $(addprefix $(dir $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE)),\
+    $(LIBART_TARGET_BOOT_ART_EXTRA_FILES))
 
 # If we have a compiled-classes file, create a parameter.
 COMPILED_CLASSES_FLAGS :=
@@ -40,18 +37,30 @@
   COMPILED_CLASSES_FLAGS := --compiled-classes=$(COMPILED_CLASSES)
 endif
 
-# The rule to install boot.art and boot.oat
-$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP)
-	$(call copy-file-to-target)
-	$(hide) $(ACP) -fp $(patsubst %.art,%.oat,$<) $(patsubst %.art,%.oat,$@)
+# The rule to install boot.art
+# Depends on installed boot.oat, boot-*.art, boot-*.oat
+$($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_INSTALLED_IMAGE) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) | $(ACP) $($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES)
+	@echo "Install: $@"
+	$(copy-file-to-target)
+
+# The rule to install boot.oat, boot-*.art, boot-*.oat
+# Depends on built-but-not-installed boot.art
+$($(my_2nd_arch_prefix)LIBART_TARGET_BOOT_ART_EXTRA_INSTALLED_FILES) : $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME)  | $(ACP)
+	@echo "Install: $@"
+	@mkdir -p $(dir $@)
+	$(hide) $(ACP) -fp $(dir $<)$(notdir $@) $@
 
 $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME): PRIVATE_2ND_ARCH_VAR_PREFIX := $(my_2nd_arch_prefix)
 # Use dex2oat debug version for better error reporting
 $($(my_2nd_arch_prefix)DEFAULT_DEX_PREOPT_BUILT_IMAGE_FILENAME) : $(LIBART_TARGET_BOOT_DEX_FILES) $(DEX2OAT_DEPENDENCY)
-	@echo "target dex2oat: $@ ($?)"
+	@echo "target dex2oat: $@"
 	@mkdir -p $(dir $@)
 	@mkdir -p $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))
-	$(hide) $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) --runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
+	@rm -f $(dir $@)/*.art $(dir $@)/*.oat
+	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.art
+	@rm -f $(dir $($(PRIVATE_2ND_ARCH_VAR_PREFIX)LIBART_TARGET_BOOT_OAT_UNSTRIPPED))/*.oat
+	$(hide) ANDROID_LOG_TAGS="*:e" $(DEX2OAT) --runtime-arg -Xms$(DEX2OAT_IMAGE_XMS) \
+		--runtime-arg -Xmx$(DEX2OAT_IMAGE_XMX) \
 		--image-classes=$(PRELOADED_CLASSES) \
 		$(addprefix --dex-file=,$(LIBART_TARGET_BOOT_DEX_FILES)) \
 		$(addprefix --dex-location=,$(LIBART_TARGET_BOOT_DEX_LOCATIONS)) \
@@ -63,4 +72,5 @@
 		--instruction-set-variant=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_CPU_VARIANT) \
 		--instruction-set-features=$($(PRIVATE_2ND_ARCH_VAR_PREFIX)DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES) \
 		--android-root=$(PRODUCT_OUT)/system --include-patch-information --runtime-arg -Xnorelocate --no-generate-debug-info \
-		$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(COMPILED_CLASSES_FLAGS)
+		--multi-image --no-inline-from=core-oj.jar \
+		$(PRODUCT_DEX_PREOPT_BOOT_FLAGS) $(GLOBAL_DEXPREOPT_FLAGS) $(COMPILED_CLASSES_FLAGS) $(ART_BOOT_IMAGE_EXTRA_ARGS)
diff --git a/core/dex_preopt_odex_install.mk b/core/dex_preopt_odex_install.mk
index 73c8146..4e486d5 100644
--- a/core/dex_preopt_odex_install.mk
+++ b/core/dex_preopt_odex_install.mk
@@ -88,6 +88,9 @@
 endif  # LOCAL_MODULE_CLASS
 endif  # boot jar
 
+built_odex := $(strip $(built_odex))
+installed_odex := $(strip $(installed_odex))
+
 ifdef built_odex
 ifndef LOCAL_DEX_PREOPT_FLAGS
 LOCAL_DEX_PREOPT_FLAGS := $(DEXPREOPT.$(TARGET_PRODUCT).$(LOCAL_MODULE).CONFIG)
@@ -96,11 +99,6 @@
 endif
 endif
 
-# Compile apps with position-independent code if WITH_DEXPREOPT_PIC=true
-ifeq (true,$(WITH_DEXPREOPT_PIC))
-  LOCAL_DEX_PREOPT_FLAGS += --compile-pic
-endif
-
 $(built_odex): PRIVATE_DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
 
 # Use pattern rule - we may have multiple installed odex files.
@@ -116,6 +114,19 @@
 ALL_MODULES.$(my_register_name).INSTALLED += $(installed_odex)
 ALL_MODULES.$(my_register_name).BUILT_INSTALLED += $(built_installed_odex)
 
+# Record dex-preopt config.
+DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT := $(LOCAL_DEX_PREOPT)
+DEXPREOPT.$(LOCAL_MODULE).MULTILIB := $(LOCAL_MULTILIB)
+DEXPREOPT.$(LOCAL_MODULE).DEX_PREOPT_FLAGS := $(LOCAL_DEX_PREOPT_FLAGS)
+DEXPREOPT.$(LOCAL_MODULE).PRIVILEGED_MODULE := $(LOCAL_PRIVILEGED_MODULE)
+DEXPREOPT.$(LOCAL_MODULE).PROPRIETARY_MODULE := $(LOCAL_PROPRIETARY_MODULE)
+DEXPREOPT.$(LOCAL_MODULE).TARGET_ARCH := $(LOCAL_MODULE_TARGET_ARCH)
+DEXPREOPT.$(LOCAL_MODULE).INSTALLED := $(installed_odex)
+DEXPREOPT.$(LOCAL_MODULE).INSTALLED_STRIPPED := $(LOCAL_INSTALLED_MODULE)
+DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS) := $(sort \
+  $(DEXPREOPT.MODULES.$(LOCAL_MODULE_CLASS)) $(LOCAL_MODULE))
+
+
 # Make sure to install the .odex when you run "make <module_name>"
 $(my_register_name): $(installed_odex)
 
diff --git a/core/dpi_specific_apk.mk b/core/dpi_specific_apk.mk
index 5d0b5bf..6bae25d 100644
--- a/core/dpi_specific_apk.mk
+++ b/core/dpi_specific_apk.mk
@@ -16,7 +16,7 @@
 $(built_dpi_apk): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
 $(built_dpi_apk): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
 $(built_dpi_apk): PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
-ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
 $(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(LOCAL_SDK_VERSION)
 else
 $(built_dpi_apk): PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
@@ -48,7 +48,7 @@
 $(built_dpi_apk) : $(R_file_stamp)
 $(built_dpi_apk) : $(all_library_res_package_export_deps)
 $(built_dpi_apk) : $(private_key) $(certificate) $(SIGNAPK_JAR)
-$(built_dpi_apk) : $(AAPT) | $(ZIPALIGN)
+$(built_dpi_apk) : $(AAPT)
 $(built_dpi_apk) : $(all_res_assets) $(jni_shared_libraries) $(full_android_manifest)
 	@echo "target Package: $(PRIVATE_MODULE) ($@)"
 	$(if $(PRIVATE_SOURCE_ARCHIVE),\
@@ -68,7 +68,6 @@
 endif
 endif
 	$(sign-package)
-	$(align-package)
 
 # Set up global variables to register this apk to the higher-level dependency graph.
 ALL_MODULES += $(dpi_apk_name)
diff --git a/core/droiddoc.mk b/core/droiddoc.mk
index 41f135c..f143579 100644
--- a/core/droiddoc.mk
+++ b/core/droiddoc.mk
@@ -23,6 +23,7 @@
 LOCAL_IS_HOST_MODULE := $(call true-or-empty,$(LOCAL_IS_HOST_MODULE))
 ifeq ($(LOCAL_IS_HOST_MODULE),true)
 my_prefix := HOST_
+LOCAL_HOST_PREFIX :=
 else
 my_prefix := TARGET_
 endif
@@ -67,13 +68,16 @@
   else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
     LOCAL_JAVA_LIBRARIES := android_system_stubs_current $(LOCAL_JAVA_LIBRARIES)
     $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_system_stubs_current)
+  else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
+    LOCAL_JAVA_LIBRARIES := android_test_stubs_current $(LOCAL_JAVA_LIBRARIES)
+    $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, android_test_stubs_current)
   else
     LOCAL_JAVA_LIBRARIES := sdk_v$(LOCAL_SDK_VERSION) $(LOCAL_JAVA_LIBRARIES)
     $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, sdk_v$(LOCAL_SDK_VERSION))
   endif
 else
-  LOCAL_JAVA_LIBRARIES := core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
-  $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-libart)
+  LOCAL_JAVA_LIBRARIES := core-oj core-libart ext framework $(LOCAL_JAVA_LIBRARIES)
+  $(full_target): PRIVATE_BOOTCLASSPATH := $(call java-lib-files, core-oj):$(call java-lib-files, core-libart)
 endif  # LOCAL_SDK_VERSION
 LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
 
@@ -98,6 +102,7 @@
 
 $(full_target): PRIVATE_OUT_DIR := $(out_dir)
 $(full_target): PRIVATE_DROIDDOC_OPTIONS := $(LOCAL_DROIDDOC_OPTIONS)
+$(full_target): PRIVATE_STUB_OUT_DIR := $(LOCAL_DROIDDOC_STUB_OUT_DIR)
 
 # Lists the input files for the doc build into a text file
 # suitable for the @ syntax of javadoc.
@@ -107,7 +112,7 @@
 define prepare-doc-source-list
 $(hide) mkdir -p $(dir $(1))
 $(call dump-words-to-file, $(2), $(1))
-$(hide) for d in $(3) ; do find $$d -name '*.java' >> $(1) 2> /dev/null ; done ; true
+$(hide) for d in $(3) ; do find $$d -name '*.java' -and -not -name '.*' >> $(1) 2> /dev/null ; done ; true
 endef
 
 ifeq (a,b)
@@ -124,15 +129,15 @@
 ##
 
 droiddoc_templates := \
-    $(shell find $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR) -type f)
+    $(sort $(shell find $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR) -type f))
 
 droiddoc := \
 	$(HOST_JDK_TOOLS_JAR) \
 	$(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX)
 
 $(full_target): PRIVATE_DOCLETPATH := $(HOST_OUT_JAVA_LIBRARIES)/jsilver$(COMMON_JAVA_PACKAGE_SUFFIX):$(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX)
-$(full_target): PRIVATE_CURRENT_BUILD := -hdf page.build $(BUILD_ID)-$(BUILD_NUMBER)
-$(full_target): PRIVATE_CURRENT_TIME :=  -hdf page.now "$(shell date "+%d %b %Y %k:%M")"
+$(full_target): PRIVATE_CURRENT_BUILD := -hdf page.build $(BUILD_ID)-$(BUILD_NUMBER_FROM_FILE)
+$(full_target): PRIVATE_CURRENT_TIME :=  -hdf page.now "$$($(DATE_FROM_FILE) "+%d %b %Y %k:%M")"
 $(full_target): PRIVATE_CUSTOM_TEMPLATE_DIR := $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)
 $(full_target): PRIVATE_IN_CUSTOM_ASSET_DIR := $(LOCAL_DROIDDOC_CUSTOM_TEMPLATE_DIR)/$(LOCAL_DROIDDOC_CUSTOM_ASSET_DIR)
 $(full_target): PRIVATE_OUT_ASSET_DIR := $(out_dir)/$(LOCAL_DROIDDOC_ASSET_DIR)
@@ -141,7 +146,7 @@
 html_dir_files :=
 ifneq ($(strip $(LOCAL_DROIDDOC_HTML_DIR)),)
 $(full_target): PRIVATE_DROIDDOC_HTML_DIR := -htmldir $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR)
-html_dir_files := $(shell find $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR) -type f)
+html_dir_files := $(sort $(shell find $(LOCAL_PATH)/$(LOCAL_DROIDDOC_HTML_DIR) -type f))
 else
 $(full_target): PRIVATE_DROIDDOC_HTML_DIR :=
 endif
@@ -160,17 +165,18 @@
         $(droiddoc) \
         $(html_dir_files) \
         $(full_java_lib_deps) \
-        $(LOCAL_MODULE_MAKEFILE) \
+        $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	@echo Docs droiddoc: $(PRIVATE_OUT_DIR)
 	$(hide) mkdir -p $(dir $@)
+	$(addprefix $(hide) rm -rf ,$(PRIVATE_STUB_OUT_DIR))
 	$(call prepare-doc-source-list,$(PRIVATE_SRC_LIST_FILE),$(PRIVATE_JAVA_FILES), \
 			$(PRIVATE_SOURCE_INTERMEDIATES_DIR) $(PRIVATE_ADDITIONAL_JAVA_DIR))
 	$(hide) ( \
 		javadoc \
                 -encoding UTF-8 \
                 \@$(PRIVATE_SRC_LIST_FILE) \
-                -J-Xmx1280m \
+                -J-Xmx1600m \
                 -XDignore.symbol.file \
                 $(PRIVATE_PROFILING_OPTIONS) \
                 -quiet \
@@ -185,6 +191,7 @@
                 -d $(PRIVATE_OUT_DIR) \
                 $(PRIVATE_CURRENT_BUILD) $(PRIVATE_CURRENT_TIME) \
                 $(PRIVATE_DROIDDOC_OPTIONS) \
+                $(addprefix -stubs ,$(PRIVATE_STUB_OUT_DIR)) \
         && touch -f $@ \
     ) || (rm -rf $(PRIVATE_OUT_DIR) $(PRIVATE_SRC_LIST_FILE); exit 45)
 
@@ -208,6 +215,7 @@
                 \@$(PRIVATE_SRC_LIST_FILE) \
                 -J-Xmx1024m \
                 -XDignore.symbol.file \
+                $(if $(LEGACY_USE_JAVA7),,-Xdoclint:none) \
                 $(PRIVATE_PROFILING_OPTIONS) \
                 $(addprefix -classpath ,$(PRIVATE_CLASSPATH)) \
                 $(addprefix -bootclasspath ,$(PRIVATE_BOOTCLASSPATH)) \
@@ -240,7 +248,7 @@
 	@echo Package docs: $@
 	@rm -f $@
 	@mkdir -p $(dir $@)
-	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_DOCS_DIR) && zip -rq $$F * )
+	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_DOCS_DIR) && zip -rqX $$F * )
 
 $(LOCAL_MODULE)-docs.zip : $(out_zip)
 
diff --git a/core/dumpvar.mk b/core/dumpvar.mk
index 510bc7d..1bd4777 100644
--- a/core/dumpvar.mk
+++ b/core/dumpvar.mk
@@ -1,3 +1,35 @@
+
+# List of variables we want to print in the build banner.
+print_build_config_vars := \
+  PLATFORM_VERSION_CODENAME \
+  PLATFORM_VERSION \
+  TARGET_PRODUCT \
+  TARGET_BUILD_VARIANT \
+  TARGET_BUILD_TYPE \
+  TARGET_BUILD_APPS \
+  TARGET_ARCH \
+  TARGET_ARCH_VARIANT \
+  TARGET_CPU_VARIANT \
+  TARGET_2ND_ARCH \
+  TARGET_2ND_ARCH_VARIANT \
+  TARGET_2ND_CPU_VARIANT \
+  HOST_ARCH \
+  HOST_2ND_ARCH \
+  HOST_OS \
+  HOST_OS_EXTRA \
+  HOST_CROSS_OS \
+  HOST_CROSS_ARCH \
+  HOST_CROSS_2ND_ARCH \
+  HOST_BUILD_TYPE \
+  BUILD_ID \
+  OUT_DIR
+
+ifeq ($(TARGET_BUILD_PDK),true)
+print_build_config_vars += \
+  TARGET_BUILD_PDK \
+  PDK_FUSION_PLATFORM_ZIP
+endif
+
 # ---------------------------------------------------------------
 # the setpath shell function in envsetup.sh uses this to figure out
 # what to add to the path given the config we have chosen.
@@ -38,11 +70,7 @@
   absolute_dumpvar := $(strip $(filter abs-%,$(dumpvar_goals)))
   ifdef absolute_dumpvar
     dumpvar_goals := $(patsubst abs-%,%,$(dumpvar_goals))
-    ifneq ($(filter /%,$($(dumpvar_goals))),)
-      DUMPVAR_VALUE := $($(dumpvar_goals))
-    else
-      DUMPVAR_VALUE := $(PWD)/$($(dumpvar_goals))
-    endif
+    DUMPVAR_VALUE := $(abspath $($(dumpvar_goals)))
     dumpvar_target := dumpvar-abs-$(dumpvar_goals)
   else
     DUMPVAR_VALUE := $($(dumpvar_goals))
@@ -59,29 +87,37 @@
 PRINT_BUILD_CONFIG:=
 endif
 
+ifneq ($(filter report_config,$(DUMP_MANY_VARS)),)
+# Construct the shell commands that print the config banner.
+report_config_sh := echo '============================================';
+report_config_sh += $(foreach v,$(print_build_config_vars),echo '$v=$($(v))';)
+report_config_sh += echo '============================================';
+endif
+
+# Dump mulitple variables to "<var>=<value>" pairs, one per line.
+# The output may be executed as bash script.
+# Input variables:
+#   DUMP_MANY_VARS: the list of variable names.
+#   DUMP_VAR_PREFIX: an optional prefix of the variable name added to the output.
+#   DUMP_MANY_ABS_VARS: the list of abs variable names.
+#   DUMP_ABS_VAR_PREFIX: an optional prefix of the abs variable name added to the output.
+.PHONY: dump-many-vars
+dump-many-vars :
+	@$(foreach v, $(filter-out report_config, $(DUMP_MANY_VARS)),\
+	  echo "$(DUMP_VAR_PREFIX)$(v)='$($(v))'";)
+ifneq ($(filter report_config, $(DUMP_MANY_VARS)),)
+	@# Construct a special variable for report_config.
+	@# Escape \` to defer the execution of report_config_sh to preserve the line breaks.
+	@echo "$(DUMP_VAR_PREFIX)report_config=\`$(report_config_sh)\`"
+endif
+	@$(foreach v, $(sort $(DUMP_MANY_ABS_VARS)),\
+	  echo "$(DUMP_ABS_VAR_PREFIX)$(v)='$(abspath $($(v)))'";)
+
 endif # CALLED_FROM_SETUP
 
-
 ifneq ($(PRINT_BUILD_CONFIG),)
-HOST_OS_EXTRA:=$(shell python -c "import platform; print(platform.platform())")
 $(info ============================================)
-$(info   PLATFORM_VERSION_CODENAME=$(PLATFORM_VERSION_CODENAME))
-$(info   PLATFORM_VERSION=$(PLATFORM_VERSION))
-$(info   TARGET_PRODUCT=$(TARGET_PRODUCT))
-$(info   TARGET_BUILD_VARIANT=$(TARGET_BUILD_VARIANT))
-$(info   TARGET_BUILD_TYPE=$(TARGET_BUILD_TYPE))
-$(info   TARGET_BUILD_APPS=$(TARGET_BUILD_APPS))
-$(info   TARGET_ARCH=$(TARGET_ARCH))
-$(info   TARGET_ARCH_VARIANT=$(TARGET_ARCH_VARIANT))
-$(info   TARGET_CPU_VARIANT=$(TARGET_CPU_VARIANT))
-$(info   TARGET_2ND_ARCH=$(TARGET_2ND_ARCH))
-$(info   TARGET_2ND_ARCH_VARIANT=$(TARGET_2ND_ARCH_VARIANT))
-$(info   TARGET_2ND_CPU_VARIANT=$(TARGET_2ND_CPU_VARIANT))
-$(info   HOST_ARCH=$(HOST_ARCH))
-$(info   HOST_OS=$(HOST_OS))
-$(info   HOST_OS_EXTRA=$(HOST_OS_EXTRA))
-$(info   HOST_BUILD_TYPE=$(HOST_BUILD_TYPE))
-$(info   BUILD_ID=$(BUILD_ID))
-$(info   OUT_DIR=$(OUT_DIR))
+$(foreach v, $(print_build_config_vars),\
+  $(info $v=$($(v))))
 $(info ============================================)
 endif
diff --git a/core/dynamic_binary.mk b/core/dynamic_binary.mk
index 38c0cbe..91fd271 100644
--- a/core/dynamic_binary.mk
+++ b/core/dynamic_binary.mk
@@ -44,7 +44,12 @@
 relocation_packer_input := $(linked_module)
 relocation_packer_output := $(intermediates)/PACKED/$(my_built_module_stem)
 
-my_pack_module_relocations := $(LOCAL_PACK_MODULE_RELOCATIONS)
+my_pack_module_relocations := false
+ifneq ($(DISABLE_RELOCATION_PACKER),true)
+    my_pack_module_relocations := $(firstword \
+      $(LOCAL_PACK_MODULE_RELOCATIONS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
+      $(LOCAL_PACK_MODULE_RELOCATIONS))
+endif
 
 ifeq ($(my_pack_module_relocations),)
   my_pack_module_relocations := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_PACK_MODULE_RELOCATIONS)
@@ -86,6 +91,20 @@
 	@echo "target Symbolic: $(PRIVATE_MODULE) ($@)"
 	$(copy-file-to-target)
 
+###########################################################
+## Store breakpad symbols
+###########################################################
+
+ifeq ($(BREAKPAD_GENERATE_SYMBOLS),true)
+my_breakpad_path := $(TARGET_OUT_BREAKPAD)/$(patsubst $(PRODUCT_OUT)/%,%,$(my_module_path))
+breakpad_input := $(relocation_packer_output)
+breakpad_output := $(my_breakpad_path)/$(my_installed_module_stem).sym
+$(breakpad_output) : $(breakpad_input) | $(BREAKPAD_DUMP_SYMS)
+	@echo "target breakpad: $(PRIVATE_MODULE) ($@)"
+	@mkdir -p $(dir $@)
+	$(hide) $(BREAKPAD_DUMP_SYMS) -c $< > $@
+$(LOCAL_BUILT_MODULE) : $(breakpad_output)
+endif
 
 ###########################################################
 ## Strip
@@ -93,7 +112,9 @@
 strip_input := $(symbolic_output)
 strip_output := $(LOCAL_BUILT_MODULE)
 
-my_strip_module := $(LOCAL_STRIP_MODULE)
+my_strip_module := $(firstword \
+  $(LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
+  $(LOCAL_STRIP_MODULE))
 ifeq ($(my_strip_module),)
   my_strip_module := true
 endif
@@ -143,5 +164,6 @@
 
 $(cleantarget): PRIVATE_CLEAN_FILES += \
     $(linked_module) \
+    $(breakpad_output) \
     $(symbolic_output) \
     $(strip_output)
diff --git a/core/envsetup.mk b/core/envsetup.mk
index bf04455..0a72603 100644
--- a/core/envsetup.mk
+++ b/core/envsetup.mk
@@ -27,7 +27,7 @@
 # NOTE: This will be overridden in product_config.mk if make
 # was invoked with a PRODUCT-xxx-yyy goal.
 ifeq ($(TARGET_PRODUCT),)
-TARGET_PRODUCT := full
+TARGET_PRODUCT := aosp_arm
 endif
 
 
@@ -53,19 +53,18 @@
 ifneq (,$(findstring Macintosh,$(UNAME)))
   HOST_OS := darwin
 endif
-ifneq (,$(findstring CYGWIN,$(UNAME)))
-  HOST_OS := windows
-endif
+
+HOST_OS_EXTRA:=$(shell python -c "import platform; print(platform.platform())")
 
 # BUILD_OS is the real host doing the build.
 BUILD_OS := $(HOST_OS)
 
-# Under Linux, if USE_MINGW is set, we change HOST_OS to Windows to build the
-# Windows SDK. Only a subset of tools and SDK will manage to build properly.
+HOST_CROSS_OS :=
+# We can cross-build Windows binaries on Linux
 ifeq ($(HOST_OS),linux)
-ifdef USE_MINGW
-  HOST_OS := windows
-endif
+HOST_CROSS_OS := windows
+HOST_CROSS_ARCH := x86
+HOST_CROSS_2ND_ARCH := x86_64
 endif
 
 ifeq ($(HOST_OS),)
@@ -105,11 +104,7 @@
 HOST_PREBUILT_ARCH := x86
 # This is the standard way to name a directory containing prebuilt host
 # objects. E.g., prebuilt/$(HOST_PREBUILT_TAG)/cc
-ifeq ($(HOST_OS),windows)
-  HOST_PREBUILT_TAG := windows
-else
-  HOST_PREBUILT_TAG := $(HOST_OS)-$(HOST_PREBUILT_ARCH)
-endif
+HOST_PREBUILT_TAG := $(BUILD_OS)-$(HOST_PREBUILT_ARCH)
 
 # TARGET_COPY_OUT_* are all relative to the staging directory, ie PRODUCT_OUT.
 # Define them here so they can be used in product config files.
@@ -140,15 +135,6 @@
 $(error must be empty or one of: eng user userdebug)
 endif
 
-# Build host as 32-bit for SDK build.
-ifneq ($(filter $(MAKECMDGOALS),win_sdk sdk),)
-HOST_PREFER_32_BIT := true
-endif
-ifdef USE_MINGW
-# We only build sdk host tools in the MinGW windows build.
-# Build it as 32-bit as well.
-HOST_PREFER_32_BIT := true
-endif
 SDK_HOST_ARCH := x86
 
 # Boards may be defined under $(SRC_TARGET_DIR)/board/$(TARGET_DEVICE)
@@ -156,11 +142,11 @@
 # make sure only one exists.
 # Real boards should always be associated with an OEM vendor.
 board_config_mk := \
-	$(strip $(wildcard \
+	$(strip $(sort $(wildcard \
 		$(SRC_TARGET_DIR)/board/$(TARGET_DEVICE)/BoardConfig.mk \
-		$(shell test -d device && find device -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
-		$(shell test -d vendor && find vendor -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
-	))
+		$(shell test -d device && find -L device -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
+		$(shell test -d vendor && find -L vendor -maxdepth 4 -path '*/$(TARGET_DEVICE)/BoardConfig.mk') \
+	)))
 ifeq ($(board_config_mk),)
   $(error No config file found for TARGET_DEVICE $(TARGET_DEVICE))
 endif
@@ -171,6 +157,10 @@
 ifeq ($(TARGET_ARCH),)
   $(error TARGET_ARCH not defined by board config: $(board_config_mk))
 endif
+ifneq ($(MALLOC_IMPL),)
+  $(warning *** Unsupported option MALLOC_IMPL defined by board config: $(board_config_mk).)
+  $(error Use `MALLOC_SVELTE := true` to configure jemalloc for low-memory)
+endif
 TARGET_DEVICE_DIR := $(patsubst %/,%,$(dir $(board_config_mk)))
 board_config_mk :=
 
@@ -229,8 +219,12 @@
 HOST_OUT_release := $(HOST_OUT_ROOT_release)/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
 HOST_OUT_debug := $(HOST_OUT_ROOT_debug)/$(HOST_OS)-$(HOST_PREBUILT_ARCH)
 HOST_OUT := $(HOST_OUT_$(HOST_BUILD_TYPE))
+# TODO: remove
+BUILD_OUT := $(HOST_OUT)
 
-BUILD_OUT := $(OUT_DIR)/host/$(BUILD_OS)-$(HOST_PREBUILT_ARCH)
+HOST_CROSS_OUT_release := $(HOST_OUT_ROOT_release)/windows-$(HOST_PREBUILT_ARCH)
+HOST_CROSS_OUT_debug := $(HOST_OUT_ROOT_debug)/windows-$(HOST_PREBUILT_ARCH)
+HOST_CROSS_OUT := $(HOST_CROSS_OUT_$(HOST_BUILD_TYPE))
 
 TARGET_PRODUCT_OUT_ROOT := $(TARGET_OUT_ROOT)/product
 
@@ -245,9 +239,13 @@
 
 HOST_OUT_EXECUTABLES := $(HOST_OUT)/bin
 HOST_OUT_SHARED_LIBRARIES := $(HOST_OUT)/lib64
+HOST_OUT_RENDERSCRIPT_BITCODE := $(HOST_OUT_SHARED_LIBRARIES)
 HOST_OUT_JAVA_LIBRARIES := $(HOST_OUT)/framework
 HOST_OUT_SDK_ADDON := $(HOST_OUT)/sdk_addon
 
+HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT)/bin
+HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib
+
 HOST_OUT_INTERMEDIATES := $(HOST_OUT)/obj
 HOST_OUT_HEADERS := $(HOST_OUT_INTERMEDIATES)/include
 HOST_OUT_INTERMEDIATE_LIBRARIES := $(HOST_OUT_INTERMEDIATES)/lib
@@ -255,9 +253,16 @@
 HOST_OUT_COMMON_INTERMEDIATES := $(HOST_COMMON_OUT_ROOT)/obj
 HOST_OUT_FAKE := $(HOST_OUT)/fake_packages
 
+HOST_CROSS_OUT_INTERMEDIATES := $(HOST_CROSS_OUT)/obj
+HOST_CROSS_OUT_HEADERS := $(HOST_CROSS_OUT_INTERMEDIATES)/include
+HOST_CROSS_OUT_INTERMEDIATE_LIBRARIES := $(HOST_CROSS_OUT_INTERMEDIATES)/lib
+HOST_CROSS_OUT_NOTICE_FILES := $(HOST_CROSS_OUT_INTERMEDIATES)/NOTICE_FILES
+
 HOST_OUT_GEN := $(HOST_OUT)/gen
 HOST_OUT_COMMON_GEN := $(HOST_COMMON_OUT_ROOT)/gen
 
+HOST_CROSS_OUT_GEN := $(HOST_CROSS_OUT)/gen
+
 # Out for HOST_2ND_ARCH
 HOST_2ND_ARCH_VAR_PREFIX := 2ND_
 HOST_2ND_ARCH_MODULE_SUFFIX := _32
@@ -265,6 +270,7 @@
 $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES := $($(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATES)/lib
 $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_SHARED_LIBRARIES := $(HOST_OUT)/lib
 $(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_EXECUTABLES := $(HOST_OUT_EXECUTABLES)
+$(HOST_2ND_ARCH_VAR_PREFIX)HOST_OUT_JAVA_LIBRARIES := $(HOST_OUT_JAVA_LIBRARIES)
 
 # The default host library path.
 # It always points to the path where we build libraries in the default bitness.
@@ -274,6 +280,14 @@
 HOST_LIBRARY_PATH := $(HOST_OUT_SHARED_LIBRARIES)
 endif
 
+# Out for HOST_CROSS_2ND_ARCH
+HOST_CROSS_2ND_ARCH_VAR_PREFIX := 2ND_
+HOST_CROSS_2ND_ARCH_MODULE_SUFFIX := _64
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATES := $(HOST_CROSS_OUT)/obj64
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATE_LIBRARIES := $($(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_INTERMEDIATES)/lib
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_SHARED_LIBRARIES := $(HOST_CROSS_OUT)/lib64
+$(HOST_CROSS_2ND_ARCH_VAR_PREFIX)HOST_CROSS_OUT_EXECUTABLES := $(HOST_CROSS_OUT_EXECUTABLES)
+
 TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj
 TARGET_OUT_HEADERS := $(TARGET_OUT_INTERMEDIATES)/include
 TARGET_OUT_INTERMEDIATE_LIBRARIES := $(TARGET_OUT_INTERMEDIATES)/lib
@@ -283,15 +297,22 @@
 TARGET_OUT_COMMON_GEN := $(TARGET_COMMON_OUT_ROOT)/gen
 
 TARGET_OUT := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_SYSTEM)
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+target_out_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DATA)
+else
+target_out_shared_libraries_base := $(TARGET_OUT)
+endif
+
 TARGET_OUT_EXECUTABLES := $(TARGET_OUT)/bin
 TARGET_OUT_OPTIONAL_EXECUTABLES := $(TARGET_OUT)/xbin
 ifeq ($(TARGET_IS_64_BIT),true)
 # /system/lib always contains 32-bit libraries,
 # and /system/lib64 (if present) always contains 64-bit libraries.
-TARGET_OUT_SHARED_LIBRARIES := $(TARGET_OUT)/lib64
+TARGET_OUT_SHARED_LIBRARIES := $(target_out_shared_libraries_base)/lib64
 else
-TARGET_OUT_SHARED_LIBRARIES := $(TARGET_OUT)/lib
+TARGET_OUT_SHARED_LIBRARIES := $(target_out_shared_libraries_base)/lib
 endif
+TARGET_OUT_RENDERSCRIPT_BITCODE := $(TARGET_OUT_SHARED_LIBRARIES)
 TARGET_OUT_JAVA_LIBRARIES := $(TARGET_OUT)/framework
 TARGET_OUT_APPS := $(TARGET_OUT)/app
 TARGET_OUT_APPS_PRIVILEGED := $(TARGET_OUT)/priv-app
@@ -306,7 +327,8 @@
 TARGET_2ND_ARCH_MODULE_SUFFIX := $(HOST_2ND_ARCH_MODULE_SUFFIX)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES := $(PRODUCT_OUT)/obj_$(TARGET_2ND_ARCH)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATES)/lib
-$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES := $(TARGET_OUT)/lib
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES := $(target_out_shared_libraries_base)/lib
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_RENDERSCRIPT_BITCODE := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_EXECUTABLES := $(TARGET_OUT_EXECUTABLES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_APPS := $(TARGET_OUT_APPS)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_APPS_PRIVILEGED := $(TARGET_OUT_APPS_PRIVILEGED)
@@ -321,8 +343,10 @@
 TARGET_OUT_DATA_ETC := $(TARGET_OUT_ETC)
 ifeq ($(TARGET_IS_64_BIT),true)
 TARGET_OUT_DATA_NATIVE_TESTS := $(TARGET_OUT_DATA)/nativetest64
+TARGET_OUT_DATA_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest64
 else
 TARGET_OUT_DATA_NATIVE_TESTS := $(TARGET_OUT_DATA)/nativetest
+TARGET_OUT_DATA_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest
 endif
 TARGET_OUT_DATA_FAKE := $(TARGET_OUT_DATA)/fake_packages
 
@@ -330,16 +354,23 @@
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_SHARED_LIBRARIES := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_SHARED_LIBRARIES)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_APPS := $(TARGET_OUT_DATA_APPS)
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_NATIVE_TESTS := $(TARGET_OUT_DATA)/nativetest
+$(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_METRIC_TESTS := $(TARGET_OUT_DATA)/benchmarktest
 
 TARGET_OUT_CACHE := $(PRODUCT_OUT)/cache
 
 TARGET_OUT_VENDOR := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_VENDOR)
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+target_out_vendor_shared_libraries_base := $(PRODUCT_OUT)/$(TARGET_COPY_OUT_DATA)/vendor
+else
+target_out_vendor_shared_libraries_base := $(TARGET_OUT_VENDOR)
+endif
+
 TARGET_OUT_VENDOR_EXECUTABLES := $(TARGET_OUT_VENDOR)/bin
 TARGET_OUT_VENDOR_OPTIONAL_EXECUTABLES := $(TARGET_OUT_VENDOR)/xbin
 ifeq ($(TARGET_IS_64_BIT),true)
-TARGET_OUT_VENDOR_SHARED_LIBRARIES := $(TARGET_OUT_VENDOR)/lib64
+TARGET_OUT_VENDOR_SHARED_LIBRARIES := $(target_out_vendor_shared_libraries_base)/lib64
 else
-TARGET_OUT_VENDOR_SHARED_LIBRARIES := $(TARGET_OUT_VENDOR)/lib
+TARGET_OUT_VENDOR_SHARED_LIBRARIES := $(target_out_vendor_shared_libraries_base)/lib
 endif
 TARGET_OUT_VENDOR_JAVA_LIBRARIES := $(TARGET_OUT_VENDOR)/framework
 TARGET_OUT_VENDOR_APPS := $(TARGET_OUT_VENDOR)/app
@@ -379,6 +410,8 @@
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_SHARED_LIBRARIES := $(TARGET_OUT_ODM)/lib
 $(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_ODM_APPS := $(TARGET_OUT_ODM_APPS)
 
+TARGET_OUT_BREAKPAD := $(PRODUCT_OUT)/breakpad
+
 TARGET_OUT_UNSTRIPPED := $(PRODUCT_OUT)/symbols
 TARGET_OUT_EXECUTABLES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/system/bin
 TARGET_OUT_SHARED_LIBRARIES_UNSTRIPPED := $(TARGET_OUT_UNSTRIPPED)/system/lib
@@ -406,6 +439,7 @@
 TARGET_INSTALLER_SYSTEM_OUT := $(TARGET_INSTALLER_OUT)/root/system
 
 COMMON_MODULE_CLASSES := TARGET-NOTICE_FILES HOST-NOTICE_FILES HOST-JAVA_LIBRARIES
+PER_ARCH_MODULE_CLASSES := SHARED_LIBRARIES STATIC_LIBRARIES EXECUTABLES GYP RENDERSCRIPT_BITCODE
 
 ifeq (,$(strip $(DIST_DIR)))
   DIST_DIR := $(OUT_DIR)/dist
@@ -414,3 +448,7 @@
 ifeq ($(PRINT_BUILD_CONFIG),)
 PRINT_BUILD_CONFIG := true
 endif
+
+ifeq ($(USE_CLANG_PLATFORM_BUILD),)
+USE_CLANG_PLATFORM_BUILD := true
+endif
diff --git a/core/executable.mk b/core/executable.mk
index 4dd9a23..e22ea0e 100644
--- a/core/executable.mk
+++ b/core/executable.mk
@@ -6,6 +6,17 @@
 # LOCAL_MODULE_PATH_32 and LOCAL_MODULE_PATH_64 or LOCAL_MODULE_STEM_32 and
 # LOCAL_MODULE_STEM_64
 
+my_skip_this_target :=
+ifneq ($(filter address,$(SANITIZE_TARGET)),)
+  ifeq (true,$(LOCAL_FORCE_STATIC_EXECUTABLE))
+    my_skip_this_target := true
+  else ifeq (false, $(LOCAL_CLANG))
+    my_skip_this_target := true
+  endif
+endif
+
+ifneq (true,$(my_skip_this_target))
+
 my_prefix := TARGET_
 include $(BUILD_SYSTEM)/multilib.mk
 
@@ -59,9 +70,6 @@
 OVERRIDE_BUILT_MODULE_PATH :=
 LOCAL_BUILT_MODULE :=
 LOCAL_INSTALLED_MODULE :=
-LOCAL_MODULE_STEM :=
-LOCAL_BUILT_MODULE_STEM :=
-LOCAL_INSTALLED_MODULE_STEM :=
 LOCAL_INTERMEDIATE_TARGETS :=
 include $(BUILD_SYSTEM)/executable_internal.mk
 endif
@@ -71,3 +79,5 @@
 LOCAL_NO_2ND_ARCH_MODULE_SUFFIX :=
 
 my_module_arch_supported :=
+
+endif
diff --git a/core/executable_internal.mk b/core/executable_internal.mk
index cdaf293..febea98 100644
--- a/core/executable_internal.mk
+++ b/core/executable_internal.mk
@@ -35,17 +35,21 @@
 
 # Define PRIVATE_ variables from global vars
 my_target_global_ld_dirs := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_LD_DIRS)
-my_target_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBPROFILE_RT)
-my_target_libgcov := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCOV)
 ifeq ($(LOCAL_NO_LIBGCC),true)
 my_target_libgcc :=
 else
 my_target_libgcc := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCC)
 endif
 my_target_libatomic := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBATOMIC)
+ifeq ($(LOCAL_NO_CRT),true)
+my_target_crtbegin_dynamic_o :=
+my_target_crtbegin_static_o :=
+my_target_crtend_o :=
+else
 my_target_crtbegin_dynamic_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_CRTBEGIN_DYNAMIC_O)
 my_target_crtbegin_static_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_CRTBEGIN_STATIC_O)
 my_target_crtend_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_CRTEND_O)
+endif
 ifdef LOCAL_SDK_VERSION
 # Make sure the prebuilt NDK paths are put ahead of the TARGET_GLOBAL_LD_DIRS,
 # so we don't have race condition when the system libraries (such as libc, libstdc++) are also built in the tree.
@@ -60,23 +64,22 @@
 endif
 $(linked_module): PRIVATE_TARGET_GLOBAL_LD_DIRS := $(my_target_global_ld_dirs)
 $(linked_module): PRIVATE_TARGET_GLOBAL_LDFLAGS := $(my_target_global_ldflags)
-$(linked_module): PRIVATE_TARGET_LIBPROFILE_RT := $(my_target_libprofile_rt)
-$(linked_module): PRIVATE_TARGET_LIBGCOV := $(my_target_libgcov)
 $(linked_module): PRIVATE_TARGET_LIBGCC := $(my_target_libgcc)
 $(linked_module): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
 $(linked_module): PRIVATE_TARGET_CRTBEGIN_DYNAMIC_O := $(my_target_crtbegin_dynamic_o)
 $(linked_module): PRIVATE_TARGET_CRTBEGIN_STATIC_O := $(my_target_crtbegin_static_o)
 $(linked_module): PRIVATE_TARGET_CRTEND_O := $(my_target_crtend_o)
 $(linked_module): PRIVATE_TARGET_OUT_INTERMEDIATE_LIBRARIES := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_OUT_INTERMEDIATE_LIBRARIES)
+$(linked_module): PRIVATE_POST_LINK_CMD := $(LOCAL_POST_LINK_CMD)
 
 ifeq ($(LOCAL_FORCE_STATIC_EXECUTABLE),true)
-$(linked_module): PRIVATE_POST_LINK_CMD := $(LOCAL_POST_LINK_CMD)
 $(linked_module): $(my_target_crtbegin_static_o) $(all_objects) $(all_libraries) $(my_target_crtend_o)
 	$(transform-o-to-static-executable)
 	$(PRIVATE_POST_LINK_CMD)
 else
 $(linked_module): $(my_target_crtbegin_dynamic_o) $(all_objects) $(all_libraries) $(my_target_crtend_o)
 	$(transform-o-to-executable)
+	$(PRIVATE_POST_LINK_CMD)
 endif
 
 endif  # skip_build_from_source
diff --git a/core/executable_prefer_symlink.mk b/core/executable_prefer_symlink.mk
index fc97ad3..931550f 100644
--- a/core/executable_prefer_symlink.mk
+++ b/core/executable_prefer_symlink.mk
@@ -42,7 +42,7 @@
 # $(my_symlink) doesn't need to depend on $(PRIVATE_SRC_BINARY_NAME): we can generate symlink to nonexistent file.
 # If you add the dependency, make would compare the timestamp of a file against that of its symlink:
 # they are always equal, because make follows symlink.
-$(my_symlink): $(LOCAL_MODULE_MAKEFILE)
+$(my_symlink): $(LOCAL_MODULE_MAKEFILE_DEP)
 	@echo "Symlink: $@ -> $(PRIVATE_SRC_BINARY_NAME)"
 	@mkdir -p $(dir $@)
 	@rm -rf $@
diff --git a/core/find-jdk-tools-jar.sh b/core/find-jdk-tools-jar.sh
index 0224829..ac0b3b6 100755
--- a/core/find-jdk-tools-jar.sh
+++ b/core/find-jdk-tools-jar.sh
@@ -16,5 +16,5 @@
         LSLINE=$(ls -l "$JAVAC")
         JAVAC=$(echo -n "$LSLINE" | sed -e "s/.* -> //")
     done
-    echo $JAVAC | sed -e "s:\(.*\)/bin/javac.*:\\1/lib/tools.jar:"
+    echo $JAVAC | sed -e 's:\(.*\)/javac$:\1/../lib/tools.jar:'
 fi
diff --git a/core/fuzz_test.mk b/core/fuzz_test.mk
new file mode 100644
index 0000000..fc582b3
--- /dev/null
+++ b/core/fuzz_test.mk
@@ -0,0 +1,34 @@
+###########################################
+## A thin wrapper around BUILD_EXECUTABLE
+## Common flags for fuzz tests are added.
+###########################################
+
+ifdef LOCAL_SDK_VERSION
+    $(error $(LOCAL_PATH): $(LOCAL_MODULE): NDK fuzz tests are not supported.)
+endif
+
+LOCAL_CFLAGS += -fsanitize-coverage=edge,indirect-calls,8bit-counters,trace-cmp
+LOCAL_STATIC_LIBRARIES += libLLVMFuzzer
+
+ifdef LOCAL_MODULE_PATH
+$(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH when building test $(LOCAL_MODULE))
+endif
+
+ifdef LOCAL_MODULE_PATH_32
+$(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH_32 when building test $(LOCAL_MODULE))
+endif
+
+ifdef LOCAL_MODULE_PATH_64
+$(error $(LOCAL_PATH): Do not set LOCAL_MODULE_PATH_64 when building test $(LOCAL_MODULE))
+endif
+
+LOCAL_MODULE_PATH_64 := $(TARGET_OUT_DATA_NATIVE_TESTS)/$(LOCAL_MODULE)
+LOCAL_MODULE_PATH_32 := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_NATIVE_TESTS)/$(LOCAL_MODULE)
+
+ifndef LOCAL_MULTILIB
+ifndef LOCAL_32_BIT_ONLY
+LOCAL_MULTILIB := both
+endif
+endif
+
+include $(BUILD_EXECUTABLE)
diff --git a/core/goma.mk b/core/goma.mk
new file mode 100644
index 0000000..6535b3e
--- /dev/null
+++ b/core/goma.mk
@@ -0,0 +1,67 @@
+#
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+# Notice: this works only with Google's Goma build infrastructure.
+ifneq ($(filter-out false,$(USE_GOMA)),)
+  # Check if USE_NINJA is not false because GNU make won't work well
+  # with goma. Note this file is evaluated twice, once by GNU make and
+  # once by kati with USE_NINJA=false. We do this check in the former
+  # pass.
+  ifndef KATI
+    ifeq ($(USE_NINJA),false)
+      $(error USE_GOMA=true is not compatible with USE_NINJA=false)
+    endif
+  endif
+
+  # Goma requires a lot of processes and file descriptors.
+  ifeq ($(shell echo $$(($$(ulimit -u) < 2500 || $$(ulimit -n) < 16000))),1)
+    $(warning Max user processes and/or open files are insufficient)
+    ifeq ($(shell uname),Darwin)
+      $(error See go/ma/how-to-use-goma/how-to-use-goma-for-android to relax the limit)
+    else
+      $(error Adjust the limit by ulimit -u and ulimit -n)
+    endif
+  endif
+
+  ifdef GOMA_DIR
+    goma_dir := $(GOMA_DIR)
+  else
+    goma_dir := $(HOME)/goma
+  endif
+  goma_ctl := $(goma_dir)/goma_ctl.py
+  GOMA_CC := $(goma_dir)/gomacc
+
+  $(if $(wildcard $(goma_ctl)),, \
+   $(warning You should have goma in $$GOMA_DIR or $(HOME)/goma) \
+   $(error See go/ma/how-to-use-goma/how-to-use-goma-for-android for detail))
+
+  # Append gomacc to existing *_WRAPPER variables so it's possible to
+  # use both ccache and gomacc.
+  CC_WRAPPER := $(strip $(CC_WRAPPER) $(GOMA_CC))
+  CXX_WRAPPER := $(strip $(CXX_WRAPPER) $(GOMA_CC))
+
+  # gomacc can start goma client's daemon process automatically, but
+  # it is safer and faster to start up it beforehand. We run this as a
+  # background process so this won't slow down the build.
+  # We use "ensure_start" command when the compiler_proxy is already
+  # running and uses GOMA_HERMETIC=error flag. The compiler_proxy will
+  # restart otherwise.
+  # TODO(hamaji): Remove this condition after http://b/25676777 is fixed.
+  $(shell ( if ( curl http://localhost:$$($(GOMA_CC) port)/flagz | grep GOMA_HERMETIC=error ); then cmd=ensure_start; else cmd=restart; fi; GOMA_HERMETIC=error $(goma_ctl) $${cmd} ) &> /dev/null &)
+
+  goma_ctl :=
+  goma_dir :=
+endif
diff --git a/core/host_dalvik_java_library.mk b/core/host_dalvik_java_library.mk
index 4814d70..83047d4 100644
--- a/core/host_dalvik_java_library.mk
+++ b/core/host_dalvik_java_library.mk
@@ -28,13 +28,14 @@
 #######################################
 
 ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-  LOCAL_JAVA_LIBRARIES +=  core-libart-hostdex
+  LOCAL_JAVA_LIBRARIES += core-oj-hostdex core-libart-hostdex
 endif
 
 full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
 full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
 full_classes_jar := $(intermediates.COMMON)/classes.jar
 full_classes_jack := $(intermediates.COMMON)/classes.jack
+jack_check_timestamp := $(intermediates.COMMON)/jack.check.timestamp
 built_dex := $(intermediates.COMMON)/classes.dex
 
 LOCAL_INTERMEDIATE_TARGETS += \
@@ -42,33 +43,35 @@
     $(full_classes_jarjar_jar) \
     $(full_classes_jack) \
     $(full_classes_jar) \
+    $(jack_check_timestamp) \
     $(built_dex)
 
 # See comment in java.mk
-java_alternative_checked_module := $(full_classes_compiled_jar)
+ifndef LOCAL_CHECKED_MODULE
+ifdef LOCAL_JACK_ENABLED
+LOCAL_CHECKED_MODULE := $(jack_check_timestamp)
+else
+LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
+endif
+endif
 
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
+java_sources := $(addprefix $(LOCAL_PATH)/, $(filter %.java,$(LOCAL_SRC_FILES))) \
+                $(filter %.java,$(LOCAL_GENERATED_SOURCES))
+all_java_sources := $(java_sources)
 
-$(full_classes_compiled_jar): PRIVATE_JAVAC_DEBUG_FLAGS := -g
-
-java_alternative_checked_module :=
+include $(BUILD_SYSTEM)/java_common.mk
 
 # The layers file allows you to enforce a layering between java packages.
 # Run build/tools/java-layers.py for more details.
 layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
 
-$(LOCAL_INTERMEDIATE_TARGETS): \
-	PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates.COMMON)/classes
-$(LOCAL_INTERMEDIATE_TARGETS): \
-	PRIVATE_SOURCE_INTERMEDIATES_DIR := $(LOCAL_INTERMEDIATE_SOURCE_DIR)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RMTYPEDEFS :=
-
 $(cleantarget): PRIVATE_CLEAN_FILES += $(intermediates.COMMON)
 
 $(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
@@ -78,7 +81,7 @@
         $(full_java_lib_deps) \
         $(jar_manifest_file) \
         $(proto_java_sources_file_stamp) \
-        $(LOCAL_MODULE_MAKEFILE) \
+        $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-host-java-to-package)
 
@@ -99,10 +102,6 @@
 	$(hide) $(ACP) -fp $< $@
 
 ifndef LOCAL_JACK_ENABLED
-$(built_dex): PRIVATE_INTERMEDIATES_DIR := $(intermediates.COMMON)
-$(built_dex): PRIVATE_DX_FLAGS := $(LOCAL_DX_FLAGS)
-$(built_dex): $(full_classes_jar) $(DX)
-	$(transform-classes.jar-to-dex)
 
 $(LOCAL_BUILT_MODULE): PRIVATE_DEX_FILE := $(built_dex)
 $(LOCAL_BUILT_MODULE): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
@@ -123,16 +122,22 @@
 $(LOCAL_INTERMEDIATE_TARGETS): \
 	PRIVATE_JACK_INCREMENTAL_DIR :=
 endif
-$(LOCAL_INTERMEDIATE_TARGETS):  PRIVATE_JACK_DEBUG_FLAGS := -g
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_MIN_SDK_VERSION := $(PLATFORM_JACK_MIN_SDK_VERSION)
 
+jack_all_deps := $(java_sources) $(java_resource_sources) $(full_jack_deps) \
+        $(jar_manifest_file) $(proto_java_sources_file_stamp) $(LOCAL_MODULE_MAKEFILE_DEP) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JACK)
 $(built_dex): PRIVATE_CLASSES_JACK := $(full_classes_jack)
-$(built_dex): PRIVATE_JACK_FLAGS := $(LOCAL_JACK_FLAGS)
-$(built_dex): $(java_sources) $(java_resource_sources) $(full_jack_lib_deps) \
-        $(jar_manifest_file) $(proto_java_sources_file_stamp) $(LOCAL_MODULE_MAKEFILE) \
-        $(LOCAL_MODULE_MAKEFILE) $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JACK_JAR) $(JACK_LAUNCHER_JAR)
+$(built_dex): $(jack_all_deps) | setup-jack-server
 	@echo Building with Jack: $@
 	$(jack-java-to-dex)
 
+$(jack_check_timestamp): $(jack_all_deps) | setup-jack-server
+	@echo Checking build with Jack: $@
+	$(jack-check-java)
+
 # $(full_classes_jack) is just by-product of $(built_dex).
 # The dummy command was added because, without it, make misses the fact the $(built_dex) also
 # change $(full_classes_jack).
diff --git a/core/host_dalvik_static_java_library.mk b/core/host_dalvik_static_java_library.mk
index 90bcc12..c296be3 100644
--- a/core/host_dalvik_static_java_library.mk
+++ b/core/host_dalvik_static_java_library.mk
@@ -24,7 +24,7 @@
 LOCAL_UNINSTALLABLE_MODULE := true
 LOCAL_IS_STATIC_JAVA_LIBRARY := true
 USE_CORE_LIB_BOOTCLASSPATH := true
-LOCAL_JAVA_LIBRARIES += core-libart-hostdex
+LOCAL_JAVA_LIBRARIES += core-oj-hostdex core-libart-hostdex
 
 intermediates.COMMON := $(call intermediates-dir-for,JAVA_LIBRARIES,$(LOCAL_MODULE),true,COMMON,)
 full_classes_jack := $(intermediates.COMMON)/classes.jack
@@ -34,7 +34,10 @@
 include $(BUILD_SYSTEM)/host_java_library.mk
 # proguard is not supported
 # *.proto files are not supported
-$(full_classes_jack): PRIVATE_JACK_FLAGS := $(LOCAL_JACK_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_MIN_SDK_VERSION := $(PLATFORM_JACK_MIN_SDK_VERSION)
+
 $(full_classes_jack): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
 $(full_classes_jack): \
 	PRIVATE_JACK_INTERMEDIATES_DIR := $(intermediates.COMMON)/jack-rsc
@@ -45,10 +48,10 @@
 $(full_classes_jack): \
 	PRIVATE_JACK_INCREMENTAL_DIR :=
 endif
-$(full_classes_jack): $(java_sources) $(java_resource_sources) $(full_jack_lib_deps) \
-        $(jar_manifest_file) $(layers_file) $(LOCAL_MODULE_MAKEFILE) \
+$(full_classes_jack): $(java_sources) $(java_resource_sources) $(full_jack_deps) \
+        $(jar_manifest_file) $(layers_file) $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
-        $(JACK_JAR) $(JACK_LAUNCHER_JAR)
+        $(JACK) | setup-jack-server
 	@echo Building with Jack: $@
 	$(java-to-jack)
 
diff --git a/core/host_executable.mk b/core/host_executable.mk
index 0091f3f..6f19bd1 100644
--- a/core/host_executable.mk
+++ b/core/host_executable.mk
@@ -1,5 +1,6 @@
-
+LOCAL_IS_HOST_MODULE := true
 my_prefix := HOST_
+LOCAL_HOST_PREFIX :=
 include $(BUILD_SYSTEM)/multilib.mk
 
 ifndef LOCAL_MODULE_HOST_ARCH
@@ -50,5 +51,57 @@
 LOCAL_2ND_ARCH_VAR_PREFIX :=
 endif  # HOST_2ND_ARCH
 
+ifdef HOST_CROSS_OS
+my_prefix := HOST_CROSS_
+LOCAL_HOST_PREFIX := $(my_prefix)
+include $(BUILD_SYSTEM)/module_arch_supported.mk
+ifeq ($(my_module_arch_supported),true)
+# Build for Windows
+OVERRIDE_BUILT_MODULE_PATH :=
+# we don't want others using the cross compiled version
+saved_LOCAL_BUILT_MODULE := $(LOCAL_BUILT_MODULE)
+saved_LOCAL_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+saved_LOCAL_LDFLAGS := $(LOCAL_LDFLAGS)
+LOCAL_BUILT_MODULE :=
+LOCAL_INSTALLED_MODULE :=
+LOCAL_INTERMEDIATE_TARGETS :=
+
+ifeq ($(LOCAL_NO_FPIE),)
+LOCAL_LDFLAGS += $(HOST_CROSS_FPIE_FLAGS)
+endif
+
+include $(BUILD_SYSTEM)/host_executable_internal.mk
+LOCAL_LDFLAGS := $(saved_LOCAL_LDFLAGS)
+LOCAL_BUILT_MODULE := $(saved_LOCAL_BUILT_MODULE)
+LOCAL_INSTALLED_MODULE := $(saved_LOCAL_INSTALLED_MODULE)
+endif
+
+ifdef HOST_CROSS_2ND_ARCH
+LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
+include $(BUILD_SYSTEM)/module_arch_supported.mk
+ifeq ($(my_module_arch_supported),true)
+OVERRIDE_BUILT_MODULE_PATH :=
+# we don't want others using the cross compiled version
+saved_LOCAL_BUILT_MODULE := $(LOCAL_BUILT_MODULE)
+saved_LOCAL_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+saved_LOCAL_LDFLAGS := $(LOCAL_LDFLAGS)
+LOCAL_BUILT_MODULE :=
+LOCAL_INSTALLED_MODULE :=
+LOCAL_INTERMEDIATE_TARGETS :=
+
+ifeq ($(LOCAL_NO_FPIE),)
+LOCAL_LDFLAGS += $(HOST_CROSS_FPIE_FLAGS)
+endif
+
+include $(BUILD_SYSTEM)/host_executable_internal.mk
+LOCAL_LDFLAGS := $(saved_LOCAL_LDFLAGS)
+LOCAL_BUILT_MODULE := $(saved_LOCAL_BUILT_MODULE)
+LOCAL_INSTALLED_MODULE := $(saved_LOCAL_INSTALLED_MODULE)
+endif
+LOCAL_2ND_ARCH_VAR_PREFIX :=
+endif
+LOCAL_HOST_PREFIX :=
+endif
+
 LOCAL_NO_2ND_ARCH_MODULE_SUFFIX :=
 my_module_arch_supported :=
diff --git a/core/host_executable_internal.mk b/core/host_executable_internal.mk
index 0c0ac3d..b682ffd 100644
--- a/core/host_executable_internal.mk
+++ b/core/host_executable_internal.mk
@@ -5,12 +5,11 @@
 ## None.
 ###########################################################
 
-LOCAL_IS_HOST_MODULE := true
 ifeq ($(strip $(LOCAL_MODULE_CLASS)),)
 LOCAL_MODULE_CLASS := EXECUTABLES
 endif
 ifeq ($(strip $(LOCAL_MODULE_SUFFIX)),)
-LOCAL_MODULE_SUFFIX := $(HOST_EXECUTABLE_SUFFIX)
+LOCAL_MODULE_SUFFIX := $($(my_prefix)EXECUTABLE_SUFFIX)
 endif
 
 $(call host-executable-hook)
@@ -27,7 +26,7 @@
 
 include $(BUILD_SYSTEM)/binary.mk
 
-my_host_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_LIBPROFILE_RT)
+my_host_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBPROFILE_RT)
 $(LOCAL_BUILT_MODULE): PRIVATE_HOST_LIBPROFILE_RT := $(my_host_libprofile_rt)
 
 $(LOCAL_BUILT_MODULE): $(all_objects) $(all_libraries)
diff --git a/core/host_fuzz_test.mk b/core/host_fuzz_test.mk
new file mode 100644
index 0000000..cc7baad
--- /dev/null
+++ b/core/host_fuzz_test.mk
@@ -0,0 +1,9 @@
+################################################
+## A thin wrapper around BUILD_HOST_EXECUTABLE
+## Common flags for host fuzz tests are added.
+################################################
+
+LOCAL_CFLAGS += -fsanitize-coverage=edge,indirect-calls,8bit-counters,trace-cmp
+LOCAL_STATIC_LIBRARIES += libLLVMFuzzer
+
+include $(BUILD_HOST_EXECUTABLE)
diff --git a/core/host_java_library.mk b/core/host_java_library.mk
index 47189d7..97079fd 100644
--- a/core/host_java_library.mk
+++ b/core/host_java_library.mk
@@ -30,20 +30,57 @@
 endif
 
 full_classes_compiled_jar := $(intermediates.COMMON)/classes-full-debug.jar
+full_classes_jarjar_jar := $(intermediates.COMMON)/classes-jarjar.jar
 emma_intermediates_dir := $(intermediates.COMMON)/emma_out
 # emma is hardcoded to use the leaf name of its input for the output file --
 # only the output directory can be changed
-full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(notdir $(full_classes_compiled_jar))
+full_classes_emma_jar := $(emma_intermediates_dir)/lib/$(notdir $(full_classes_jarjar_jar))
 
 LOCAL_INTERMEDIATE_TARGETS += \
     $(full_classes_compiled_jar) \
+    $(full_classes_jarjar_jar) \
     $(full_classes_emma_jar)
 
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
 
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RMTYPEDEFS :=
+java_sources := $(addprefix $(LOCAL_PATH)/, $(filter %.java,$(LOCAL_SRC_FILES))) \
+                $(filter %.java,$(LOCAL_GENERATED_SOURCES))
+all_java_sources := $(java_sources)
+
+include $(BUILD_SYSTEM)/java_common.mk
+
+# The layers file allows you to enforce a layering between java packages.
+# Run build/tools/java-layers.py for more details.
+layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
+
+$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
+$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
+$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
+$(full_classes_compiled_jar): \
+        $(java_sources) \
+        $(java_resource_sources) \
+        $(full_java_lib_deps) \
+        $(jar_manifest_file) \
+        $(proto_java_sources_file_stamp) \
+        $(LOCAL_MODULE_MAKEFILE_DEP) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES)
+	$(transform-host-java-to-package)
+
+# Run jarjar if necessary, otherwise just copy the file.
+ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
+$(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
+$(full_classes_jarjar_jar): $(full_classes_compiled_jar) $(LOCAL_JARJAR_RULES) | $(JARJAR)
+	@echo JarJar: $@
+	$(hide) java -jar $(JARJAR) process $(PRIVATE_JARJAR_RULES) $< $@
+else
+$(full_classes_jarjar_jar): $(full_classes_compiled_jar) | $(ACP)
+	@echo Copying: $@
+	$(hide) $(ACP) -fp $< $@
+endif
 
 ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
 $(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILE := $(intermediates.COMMON)/coverage.em
@@ -57,7 +94,7 @@
 endif
 # this rule will generate both $(PRIVATE_EMMA_COVERAGE_FILE) and
 # $(full_classes_emma_jar)
-$(full_classes_emma_jar) : $(full_classes_compiled_jar) | $(EMMA_JAR)
+$(full_classes_emma_jar) : $(full_classes_jarjar_jar) | $(EMMA_JAR)
 	$(transform-classes.jar-to-emma)
 
 $(built_javalib_jar) : $(full_classes_emma_jar)
@@ -65,27 +102,8 @@
 	$(hide) $(ACP) -fp $< $@
 
 else # LOCAL_EMMA_INSTRUMENT
-# Directly build into $(built_javalib_jar).
-full_classes_compiled_jar := $(built_javalib_jar)
+$(built_javalib_jar): $(full_classes_jarjar_jar) | $(ACP)
+	@echo Copying: $@
+	$(hide) $(ACP) -fp $< $@
 endif # LOCAL_EMMA_INSTRUMENT
 
-$(full_classes_compiled_jar): PRIVATE_JAVAC_DEBUG_FLAGS := -g
-
-# The layers file allows you to enforce a layering between java packages.
-# Run build/tools/java-layers.py for more details.
-layers_file := $(addprefix $(LOCAL_PATH)/, $(LOCAL_JAVA_LAYERS_FILE))
-
-$(full_classes_compiled_jar): PRIVATE_JAVA_LAYERS_FILE := $(layers_file)
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES :=
-$(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES :=
-$(full_classes_compiled_jar): \
-        $(java_sources) \
-        $(java_resource_sources) \
-        $(full_java_lib_deps) \
-        $(jar_manifest_file) \
-        $(proto_java_sources_file_stamp) \
-        $(LOCAL_MODULE_MAKEFILE) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES)
-	$(transform-host-java-to-package)
diff --git a/core/host_native_test.mk b/core/host_native_test.mk
index b54bd3a..7cba1ae 100644
--- a/core/host_native_test.mk
+++ b/core/host_native_test.mk
@@ -5,4 +5,22 @@
 
 include $(BUILD_SYSTEM)/host_test_internal.mk
 
+needs_symlink :=
+ifndef LOCAL_MULTILIB
+  ifndef LOCAL_32_BIT_ONLY
+    LOCAL_MULTILIB := both
+
+    ifeq (,$(LOCAL_MODULE_STEM_32)$(LOCAL_MODULE_STEM_64))
+      LOCAL_MODULE_STEM_32 := $(LOCAL_MODULE)32
+      LOCAL_MODULE_STEM_64 := $(LOCAL_MODULE)64
+      needs_symlink := true
+    endif
+  endif
+endif
+
 include $(BUILD_HOST_EXECUTABLE)
+
+ifdef needs_symlink
+include $(BUILD_SYSTEM)/executable_prefer_symlink.mk
+needs_symlink :=
+endif
diff --git a/core/host_shared_library.mk b/core/host_shared_library.mk
index e840780..2e0c9f1c 100644
--- a/core/host_shared_library.mk
+++ b/core/host_shared_library.mk
@@ -1,4 +1,6 @@
+LOCAL_IS_HOST_MODULE := true
 my_prefix := HOST_
+LOCAL_HOST_PREFIX :=
 include $(BUILD_SYSTEM)/multilib.mk
 
 ifndef LOCAL_MODULE_HOST_ARCH
@@ -34,6 +36,45 @@
 LOCAL_2ND_ARCH_VAR_PREFIX :=
 endif  # HOST_2ND_ARCH
 
+ifdef HOST_CROSS_OS
+my_prefix := HOST_CROSS_
+LOCAL_HOST_PREFIX := $(my_prefix)
+include $(BUILD_SYSTEM)/module_arch_supported.mk
+ifeq ($(my_module_arch_supported),true)
+# Build for Windows
+OVERRIDE_BUILT_MODULE_PATH :=
+LOCAL_BUILT_MODULE :=
+LOCAL_MODULE_SUFFIX :=
+# We don't want makefiles using the cross-compiled host tool
+saved_LOCAL_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+LOCAL_INSTALLED_MODULE :=
+LOCAL_INTERMEDIATE_TARGETS :=
+
+include $(BUILD_SYSTEM)/host_shared_library_internal.mk
+LOCAL_INSTALLED_MODULE := $(saved_LOCAL_INSTALLED_MODULE)
+endif
+
+ifdef HOST_CROSS_2ND_ARCH
+LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
+include $(BUILD_SYSTEM)/module_arch_supported.mk
+ifeq ($(my_module_arch_supported),true)
+# Build for HOST_CROSS_2ND_ARCH
+OVERRIDE_BUILT_MODULE_PATH :=
+LOCAL_BUILT_MODULE :=
+LOCAL_MODULE_SUFFIX :=
+# We don't want makefiles using the cross-compiled host tool
+saved_LOCAL_INSTALLED_MODULE := $(LOCAL_INSTALLED_MODULE)
+LOCAL_INSTALLED_MODULE :=
+LOCAL_INTERMEDIATE_TARGETS :=
+
+include $(BUILD_SYSTEM)/host_shared_library_internal.mk
+LOCAL_INSTALLED_MODULE := $(saved_LOCAL_INSTALLED_MODULE)
+endif
+LOCAL_2ND_ARCH_VAR_PREFIX :=
+endif
+LOCAL_HOST_PREFIX :=
+endif
+
 my_module_arch_supported :=
 
 ###########################################################
diff --git a/core/host_shared_library_internal.mk b/core/host_shared_library_internal.mk
index 645098a..272e76f 100644
--- a/core/host_shared_library_internal.mk
+++ b/core/host_shared_library_internal.mk
@@ -7,13 +7,11 @@
 ## LOCAL_MODULE_SUFFIX will be set for you.
 ###########################################################
 
-LOCAL_IS_HOST_MODULE := true
-
 ifeq ($(strip $(LOCAL_MODULE_CLASS)),)
 LOCAL_MODULE_CLASS := SHARED_LIBRARIES
 endif
 ifeq ($(strip $(LOCAL_MODULE_SUFFIX)),)
-LOCAL_MODULE_SUFFIX := $(HOST_SHLIB_SUFFIX)
+LOCAL_MODULE_SUFFIX := $($(my_prefix)SHLIB_SUFFIX)
 endif
 ifneq ($(strip $(OVERRIDE_BUILT_MODULE_PATH)),)
 $(error $(LOCAL_PATH): Illegal use of OVERRIDE_BUILT_MODULE_PATH)
@@ -36,17 +34,17 @@
 
 # Put the built modules of all shared libraries in a common directory
 # to simplify the link line.
-OVERRIDE_BUILT_MODULE_PATH := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_OUT_INTERMEDIATE_LIBRARIES)
+OVERRIDE_BUILT_MODULE_PATH := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)
 
 include $(BUILD_SYSTEM)/binary.mk
 
-my_host_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)HOST_LIBPROFILE_RT)
+my_host_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)LIBPROFILE_RT)
 $(LOCAL_BUILD_MODULE): PRIVATE_HOST_LIBPROFILE_RT := $(my_host_libprofile_rt)
 
 $(LOCAL_BUILT_MODULE): \
         $(all_objects) \
         $(all_libraries) \
-        $(LOCAL_MODULE_MAKEFILE) \
+        $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-host-o-to-shared-lib)
 
diff --git a/core/host_static_library.mk b/core/host_static_library.mk
index 52c42ef..068c702 100644
--- a/core/host_static_library.mk
+++ b/core/host_static_library.mk
@@ -1,4 +1,6 @@
+LOCAL_IS_HOST_MODULE := true
 my_prefix := HOST_
+LOCAL_HOST_PREFIX :=
 include $(BUILD_SYSTEM)/multilib.mk
 
 ifndef LOCAL_MODULE_HOST_ARCH
@@ -34,6 +36,37 @@
 LOCAL_2ND_ARCH_VAR_PREFIX :=
 endif  # HOST_2ND_ARCH
 
+ifdef HOST_CROSS_OS
+my_prefix := HOST_CROSS_
+LOCAL_HOST_PREFIX := $(my_prefix)
+include $(BUILD_SYSTEM)/module_arch_supported.mk
+ifeq ($(my_module_arch_supported),true)
+# Build for Windows
+OVERRIDE_BUILT_MODULE_PATH :=
+LOCAL_BUILT_MODULE :=
+LOCAL_INSTALLED_MODULE :=
+LOCAL_INTERMEDIATE_TARGETS :=
+
+include $(BUILD_SYSTEM)/host_static_library_internal.mk
+endif
+
+ifdef HOST_CROSS_2ND_ARCH
+LOCAL_2ND_ARCH_VAR_PREFIX := $(HOST_CROSS_2ND_ARCH_VAR_PREFIX)
+include $(BUILD_SYSTEM)/module_arch_supported.mk
+ifeq ($(my_module_arch_supported),true)
+# Build for HOST_CROSS_2ND_ARCH
+OVERRIDE_BUILT_MODULE_PATH :=
+LOCAL_BUILT_MODULE :=
+LOCAL_INSTALLED_MODULE :=
+LOCAL_INTERMEDIATE_TARGETS :=
+
+include $(BUILD_SYSTEM)/host_static_library_internal.mk
+endif
+LOCAL_2ND_ARCH_VAR_PREFIX :=
+endif
+LOCAL_HOST_PREFIX :=
+endif
+
 my_module_arch_supported :=
 
 ###########################################################
diff --git a/core/host_static_library_internal.mk b/core/host_static_library_internal.mk
index a533cf5..3946aa7 100644
--- a/core/host_static_library_internal.mk
+++ b/core/host_static_library_internal.mk
@@ -18,8 +18,6 @@
 endif
 LOCAL_UNINSTALLABLE_MODULE := true
 
-LOCAL_IS_HOST_MODULE := true
-
 include $(BUILD_SYSTEM)/binary.mk
 
 $(LOCAL_BUILT_MODULE): $(built_whole_libraries)
diff --git a/core/host_test_internal.mk b/core/host_test_internal.mk
index e0070c1..7f6aff0 100644
--- a/core/host_test_internal.mk
+++ b/core/host_test_internal.mk
@@ -2,14 +2,13 @@
 ## Shared definitions for all host test compilations.
 #####################################################
 
-ifeq ($(HOST_OS),windows)
-LOCAL_CFLAGS += -DGTEST_OS_WINDOWS
-else
-LOCAL_CFLAGS += -DGTEST_OS_LINUX
-LOCAL_LDLIBS += -lpthread
-endif
+LOCAL_CFLAGS_windows += -DGTEST_OS_WINDOWS
+LOCAL_CFLAGS_linux += -DGTEST_OS_LINUX
+LOCAL_LDLIBS_linux += -lpthread
+LOCAL_CFLAGS_darwin += -DGTEST_OS_LINUX
+LOCAL_LDLIBS_darwin += -lpthread
 
 LOCAL_CFLAGS += -DGTEST_HAS_STD_STRING -O0 -g
 LOCAL_C_INCLUDES +=  external/gtest/include
 
-LOCAL_STATIC_LIBRARIES += libgtest_host libgtest_main_host
+LOCAL_STATIC_LIBRARIES += libgtest_main_host libgtest_host
diff --git a/core/install_jni_libs_internal.mk b/core/install_jni_libs_internal.mk
index 16d0962..27b9697 100644
--- a/core/install_jni_libs_internal.mk
+++ b/core/install_jni_libs_internal.mk
@@ -55,7 +55,8 @@
 my_jni_filenames := $(notdir $(my_jni_shared_libraries))
 # Make sure the JNI libraries get installed
 my_shared_library_path := $($(my_2nd_arch_prefix)TARGET_OUT$(partition_tag)_SHARED_LIBRARIES)
-$(LOCAL_INSTALLED_MODULE) : | $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
+# Do not use order-only dependency, because we want to rebuild the image if an jni is updated.
+$(LOCAL_INSTALLED_MODULE) : $(addprefix $(my_shared_library_path)/, $(my_jni_filenames))
 
 # Create symlink in the app specific lib path
 ifdef LOCAL_POST_INSTALL_CMD
@@ -93,7 +94,7 @@
 $(foreach lib, $(my_prebuilt_jni_libs), \
     $(eval $(call copy-one-file, $(lib), $(my_app_lib_path)/$(notdir $(lib)))))
 
-$(LOCAL_INSTALLED_MODULE) : | $(addprefix $(my_app_lib_path)/, $(notdir $(my_prebuilt_jni_libs)))
+$(LOCAL_INSTALLED_MODULE) : $(addprefix $(my_app_lib_path)/, $(notdir $(my_prebuilt_jni_libs)))
 endif  # my_embed_jni
 endif  # inner my_prebuilt_jni_libs
 endif  # outer my_prebuilt_jni_libs
diff --git a/core/java.mk b/core/java.mk
index bac5ca7..bc8ed64 100644
--- a/core/java.mk
+++ b/core/java.mk
@@ -1,3 +1,4 @@
+# Target Java.
 # Requires:
 # LOCAL_MODULE_SUFFIX
 # LOCAL_MODULE_CLASS
@@ -30,17 +31,34 @@
         LOCAL_JAVA_LIBRARIES := android_stubs_current $(LOCAL_JAVA_LIBRARIES)
       else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
         LOCAL_JAVA_LIBRARIES := android_system_stubs_current $(LOCAL_JAVA_LIBRARIES)
+      else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
+        LOCAL_JAVA_LIBRARIES := android_test_stubs_current $(LOCAL_JAVA_LIBRARIES)
       else
         LOCAL_JAVA_LIBRARIES := sdk_v$(LOCAL_SDK_VERSION) $(LOCAL_JAVA_LIBRARIES)
       endif
+
+      ifeq ($(LOCAL_SDK_VERSION),current)
+        my_jack_min_sdk_version := $(PLATFORM_JACK_MIN_SDK_VERSION)
+      else ifeq ($(LOCAL_SDK_VERSION),system_current)
+        my_jack_min_sdk_version := $(PLATFORM_JACK_MIN_SDK_VERSION)
+      else ifeq ($(LOCAL_SDK_VERSION),test_current)
+        my_jack_min_sdk_version := $(PLATFORM_JACK_MIN_SDK_VERSION)
+      else
+        my_jack_min_sdk_version := $(LOCAL_SDK_VERSION)
+      endif
     endif
   endif
 else
+  my_jack_min_sdk_version := $(PLATFORM_JACK_MIN_SDK_VERSION)
   ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
     LOCAL_JAVA_LIBRARIES := $(TARGET_DEFAULT_JAVA_LIBRARIES) $(LOCAL_JAVA_LIBRARIES)
   endif
 endif
 
+ifneq (,$(strip $(LOCAL_MIN_SDK_VERSION)))
+  my_jack_min_sdk_version := $(LOCAL_MIN_SDK_VERSION)
+endif
+
 proto_sources := $(filter %.proto,$(LOCAL_SRC_FILES))
 ifneq ($(proto_sources),)
 ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),micro)
@@ -54,10 +72,12 @@
 endif
 endif
 
-# LOCAL_STATIC_JAVA_AAR_LIBRARIES are special LOCAL_STATIC_JAVA_LIBRARIES
-LOCAL_STATIC_JAVA_LIBRARIES := $(strip $(LOCAL_STATIC_JAVA_LIBRARIES) $(LOCAL_STATIC_JAVA_AAR_LIBRARIES))
-
-LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES))
+# LOCAL_STATIC_JAVA_AAR_LIBRARIES and LOCAL_STATIC_ANDROID_LIBRARIES are also LOCAL_STATIC_JAVA_LIBRARIES.
+LOCAL_STATIC_JAVA_LIBRARIES := $(strip $(LOCAL_STATIC_JAVA_LIBRARIES) \
+    $(LOCAL_STATIC_JAVA_AAR_LIBRARIES) \
+    $(LOCAL_STATIC_ANDROID_LIBRARIES))
+# LOCAL_SHARED_ANDROID_LIBRARIES are also LOCAL_JAVA_LIBRARIES.
+LOCAL_JAVA_LIBRARIES := $(sort $(LOCAL_JAVA_LIBRARIES) $(LOCAL_SHARED_ANDROID_LIBRARIES))
 
 LOCAL_BUILT_MODULE_STEM := $(strip $(LOCAL_BUILT_MODULE_STEM))
 ifeq ($(LOCAL_BUILT_MODULE_STEM),)
@@ -118,6 +138,7 @@
 full_classes_jack := $(intermediates.COMMON)/classes.jack
 # intermediate Jack library without shrink and obfuscation
 noshrob_classes_jack := $(intermediates.COMMON)/classes.noshrob.jack
+jack_check_timestamp := $(intermediates.COMMON)/jack.check.timestamp
 
 LOCAL_INTERMEDIATE_TARGETS += \
     $(full_classes_compiled_jar) \
@@ -128,6 +149,7 @@
     $(built_dex_intermediate) \
     $(full_classes_jack) \
     $(noshrob_classes_jack) \
+    $(jack_check_timestamp) \
     $(built_dex) \
     $(full_classes_stubs_jar)
 
@@ -142,6 +164,7 @@
 # Because names of the java files from RenderScript are unknown until the
 # .rs file(s) are compiled, we have to depend on a timestamp file.
 RenderScript_file_stamp :=
+rs_generated_res_dir :=
 rs_compatibility_jni_libs :=
 ifneq ($(renderscript_sources),)
 renderscript_sources_fullpath := $(addprefix $(LOCAL_PATH)/, $(renderscript_sources))
@@ -156,7 +179,7 @@
 else
   ifneq (,$(LOCAL_SDK_VERSION))
     # Set target-api for LOCAL_SDK_VERSIONs other than current.
-    ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
+    ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
       renderscript_target_api := $(LOCAL_SDK_VERSION)
     endif
   endif  # LOCAL_SDK_VERSION is set
@@ -181,7 +204,7 @@
 renderscript_flags += $(LOCAL_RENDERSCRIPT_FLAGS)
 
 # prepend the RenderScript system include path
-ifneq ($(filter-out current system_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current,$(LOCAL_SDK_VERSION))),)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_VERSION))),)
 # if a numeric LOCAL_SDK_VERSION, or current LOCAL_SDK_VERSION with TARGET_BUILD_APPS
 LOCAL_RENDERSCRIPT_INCLUDES := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/renderscript/clang-include \
@@ -214,7 +237,7 @@
 	$(transform-renderscripts-to-java-and-bc)
 
 # include the dependency files (.d/.P) generated by llvm-rs-cc.
--include $(bc_dep_files:%.d=%.P)
+$(call include-depfile,$(RenderScript_file_stamp).P,$(RenderScript_file_stamp))
 
 ifneq ($(LOCAL_RENDERSCRIPT_COMPATIBILITY),)
 
@@ -274,29 +297,89 @@
 
 LOCAL_INTERMEDIATE_TARGETS += $(RenderScript_file_stamp)
 # Make sure the generated resource will be added to the apk.
-LOCAL_RESOURCE_DIR := $(LOCAL_INTERMEDIATE_SOURCE_DIR)/renderscript/res $(LOCAL_RESOURCE_DIR)
+rs_generated_res_dir := $(renderscript_intermediate.COMMON)/res
+LOCAL_RESOURCE_DIR := $(rs_generated_res_dir) $(LOCAL_RESOURCE_DIR)
 endif
 
+
+###########################################################
+## AIDL: Compile .aidl files to .java
+###########################################################
+aidl_sources := $(filter %.aidl,$(LOCAL_SRC_FILES))
+
+ifneq ($(strip $(aidl_sources)),)
+aidl_java_sources := $(patsubst %.aidl,%.java,$(addprefix $(intermediates.COMMON)/src/, $(aidl_sources)))
+aidl_sources := $(addprefix $(LOCAL_PATH)/, $(aidl_sources))
+
+aidl_preprocess_import :=
+ifdef LOCAL_SDK_VERSION
+ifneq ($(filter current system_current test_current, $(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS)),)
+  # LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS
+  aidl_preprocess_import := $(TARGET_OUT_COMMON_INTERMEDIATES)/framework.aidl
+else
+  aidl_preprocess_import := $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_VERSION)/framework.aidl
+endif # not current or system_current
+else
+# build against the platform.
+LOCAL_AIDL_INCLUDES += $(FRAMEWORKS_BASE_JAVA_SRC_DIRS)
+endif # LOCAL_SDK_VERSION
+$(aidl_java_sources): PRIVATE_AIDL_FLAGS := -b $(addprefix -p,$(aidl_preprocess_import)) -I$(LOCAL_PATH) -I$(LOCAL_PATH)/src $(addprefix -I,$(LOCAL_AIDL_INCLUDES))
+
+$(aidl_java_sources): $(intermediates.COMMON)/src/%.java: \
+        $(LOCAL_PATH)/%.aidl \
+        $(LOCAL_MODULE_MAKEFILE_DEP) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) \
+        $(AIDL) \
+        $(aidl_preprocess_import)
+	$(transform-aidl-to-java)
+$(foreach java,$(aidl_java_sources), \
+    $(call include-depfile,$(java:%.java=%.P),$(java)))
+
+else
+aidl_java_sources :=
+endif
+
+##########################################
+
 # All of the rules after full_classes_compiled_jar are very unlikely
 # to fail except for bugs in their respective tools.  If you would
 # like to run these rules, add the "all" modifier goal to the make
 # command line.
+ifndef LOCAL_CHECKED_MODULE
 ifdef full_classes_jar
-java_alternative_checked_module := $(full_classes_compiled_jar)
+ifdef LOCAL_JACK_ENABLED
+LOCAL_CHECKED_MODULE := $(jack_check_timestamp)
 else
-java_alternative_checked_module :=
+LOCAL_CHECKED_MODULE := $(full_classes_compiled_jar)
 endif
-
-# TODO: It looks like the only thing we need from base_rules is
-# all_java_sources.  See if we can get that by adding a
-# common_java.mk, and moving the include of base_rules.mk to
-# after all the declarations.
+endif
+endif
 
 #######################################
 include $(BUILD_SYSTEM)/base_rules.mk
 #######################################
 
-java_alternative_checked_module :=
+###########################################################
+## logtags: emit java source
+###########################################################
+ifneq ($(strip $(logtags_sources)),)
+
+logtags_java_sources := $(patsubst %.logtags,%.java,$(addprefix $(intermediates.COMMON)/src/, $(logtags_sources)))
+logtags_sources := $(addprefix $(LOCAL_PATH)/, $(logtags_sources))
+
+$(logtags_java_sources): $(intermediates.COMMON)/src/%.java: $(LOCAL_PATH)/%.logtags $(TARGET_OUT_COMMON_INTERMEDIATES)/all-event-log-tags.txt
+	$(transform-logtags-to-java)
+
+else
+logtags_java_sources :=
+endif
+
+##########################################
+java_sources := $(addprefix $(LOCAL_PATH)/, $(filter %.java,$(LOCAL_SRC_FILES))) $(aidl_java_sources) $(logtags_java_sources) \
+                $(filter %.java,$(LOCAL_GENERATED_SOURCES))
+all_java_sources := $(java_sources) $(addprefix $(TARGET_OUT_COMMON_INTERMEDIATES)/, $(filter %.java,$(LOCAL_INTERMEDIATE_SOURCES)))
+
+include $(BUILD_SYSTEM)/java_common.mk
 
 #######################################
 # defines built_odex along with rule to install odex
@@ -310,37 +393,8 @@
 endif
 endif
 
-# Install the RS compatibility libraries to /system/lib/ if necessary
-ifdef rs_compatibility_jni_libs
-installed_rs_compatibility_jni_libs := $(addprefix $(TARGET_OUT_SHARED_LIBRARIES)/,\
-    $(notdir $(rs_compatibility_jni_libs)))
-# Provide a way to skip sources included in multiple projects.
-ifdef LOCAL_RENDERSCRIPT_SKIP_INSTALL
-skip_install_rs_libs := $(patsubst %.rs,%.so, \
-    $(addprefix $(TARGET_OUT_SHARED_LIBRARIES)/librs., \
-    $(notdir $(LOCAL_RENDERSCRIPT_SKIP_INSTALL))))
-installed_rs_compatibility_jni_libs := \
-    $(filter-out $(skip_install_rs_libs),$(installed_rs_compatibility_jni_libs))
-endif
-ifneq (,$(strip $(installed_rs_compatibility_jni_libs)))
-$(installed_rs_compatibility_jni_libs) : $(TARGET_OUT_SHARED_LIBRARIES)/lib%.so : \
-    $(renderscript_intermediate)/lib%.so
-	$(hide) mkdir -p $(dir $@) && cp -f $< $@
-
-# Install them only if the current module is installed.
-$(LOCAL_INSTALLED_MODULE) : $(installed_rs_compatibility_jni_libs)
-endif
-endif
-
-# We use intermediates.COMMON because the classes.jar/.dex files will be
-# common even if LOCAL_BUILT_MODULE isn't.
-#
-# Override some target variables that base_rules set up for us.
-$(LOCAL_INTERMEDIATE_TARGETS): \
-	PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates.COMMON)/classes
-$(LOCAL_INTERMEDIATE_TARGETS): \
-	PRIVATE_SOURCE_INTERMEDIATES_DIR := $(LOCAL_INTERMEDIATE_SOURCE_DIR)
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RMTYPEDEFS := $(LOCAL_RMTYPEDEFS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_MIN_SDK_VERSION := $(my_jack_min_sdk_version)
+my_jack_min_sdk_version :=
 
 # Since we're using intermediates.COMMON, make sure that it gets cleaned
 # properly.
@@ -376,7 +430,7 @@
 # This intentionally depends on java_sources, not all_java_sources.
 # Deps for generated source files must be handled separately,
 # via deps on the target that generates the sources.
-$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(LOCAL_JAVACFLAGS)
+$(full_classes_compiled_jar): PRIVATE_JAVACFLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JAVACFLAGS)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_FILES := $(LOCAL_JAR_EXCLUDE_FILES)
 $(full_classes_compiled_jar): PRIVATE_JAR_PACKAGES := $(LOCAL_JAR_PACKAGES)
 $(full_classes_compiled_jar): PRIVATE_JAR_EXCLUDE_PACKAGES := $(LOCAL_JAR_EXCLUDE_PACKAGES)
@@ -389,12 +443,10 @@
         $(layers_file) \
         $(RenderScript_file_stamp) \
         $(proto_java_sources_file_stamp) \
-        $(LOCAL_MODULE_MAKEFILE) \
+        $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-java-to-classes.jar)
 
-$(full_classes_compiled_jar): PRIVATE_JAVAC_DEBUG_FLAGS := -g
-
 # Run jarjar if necessary, otherwise just copy the file.
 ifneq ($(strip $(LOCAL_JARJAR_RULES)),)
 $(full_classes_jarjar_jar): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
@@ -408,7 +460,7 @@
 endif
 
 ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
-$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILE := $(intermediates.COMMON)/coverage.em
+$(full_classes_emma_jar): PRIVATE_EMMA_COVERAGE_FILE := $(intermediates.COMMON)/coverage.emma.ignore
 $(full_classes_emma_jar): PRIVATE_EMMA_INTERMEDIATES_DIR := $(emma_intermediates_dir)
 # module level coverage filter can be defined using LOCAL_EMMA_COVERAGE_FILTER
 # in Android.mk
@@ -435,6 +487,8 @@
 	@echo Copying: $@
 	$(hide) $(ACP) -fp $< $@
 
+$(call define-jar-to-toc-rule, $(full_classes_jar))
+
 # Run proguard if necessary, otherwise just copy the file.
 ifdef LOCAL_PROGUARD_ENABLED
 ifneq ($(filter-out full custom nosystem obfuscation optimization shrinktests,$(LOCAL_PROGUARD_ENABLED)),)
@@ -442,6 +496,7 @@
     $(error invalid value for LOCAL_PROGUARD_ENABLED: $(LOCAL_PROGUARD_ENABLED))
 endif
 proguard_dictionary := $(intermediates.COMMON)/proguard_dictionary
+jack_dictionary := $(intermediates.COMMON)/jack_dictionary
 
 # Hack: see b/20667396
 # When an app's LOCAL_SDK_VERSION is lower than the support library's LOCAL_SDK_VERSION,
@@ -452,7 +507,7 @@
 ifneq (,$(filter android-support-%,$(LOCAL_STATIC_JAVA_LIBRARIES)))
 ifdef LOCAL_SDK_VERSION
 ifdef TARGET_BUILD_APPS
-ifeq (,$(filter current system_current, $(LOCAL_SDK_VERSION)))
+ifeq (,$(filter current system_current test_current, $(LOCAL_SDK_VERSION)))
   my_support_library_sdk_raise := $(call java-lib-files, sdk_vcurrent)
 endif
 else
@@ -465,14 +520,20 @@
 
 # jack already has the libraries in its classpath and doesn't support jars
 legacy_proguard_flags := $(addprefix -libraryjars ,$(my_support_library_sdk_raise) $(full_shared_java_libs))
-common_proguard_flags :=  \
-                  -forceprocessing \
-                  -printmapping $(proguard_dictionary)
+
+legacy_proguard_flags += -printmapping $(proguard_dictionary)
+jack_proguard_flags := -printmapping $(jack_dictionary)
+
+common_proguard_flags := -forceprocessing
 
 ifeq ($(filter nosystem,$(LOCAL_PROGUARD_ENABLED)),)
 common_proguard_flags += -include $(BUILD_SYSTEM)/proguard.flags
 ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
+ifdef LOCAL_JACK_ENABLED
+common_proguard_flags += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
+else
 common_proguard_flags += -include $(BUILD_SYSTEM)/proguard.emma.flags
+endif # LOCAL_JACK_ENABLED
 endif
 # If this is a test package, add proguard keep flags for tests.
 ifneq ($(LOCAL_INSTRUMENTATION_FOR)$(filter tests,$(LOCAL_MODULE_TAGS)),)
@@ -506,9 +567,9 @@
     -applymapping $(link_instr_intermediates_dir.COMMON)/proguard_dictionary \
     -verbose \
     $(legacy_proguard_flags)
-# not supported with jack
 ifdef LOCAL_JACK_ENABLED
-    $(error $(LOCAL_MODULE): Build with jack of instrumentation when obfuscating is not yet supported)
+jack_proguard_flags += -applymapping $(link_instr_intermediates_dir.COMMON)/jack_dictionary
+full_jack_deps += $(link_instr_intermediates_dir.COMMON)/jack_dictionary
 endif
 
 # Sometimes (test + main app) uses different keep rules from the main app -
@@ -556,8 +617,6 @@
 ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
 $(built_dex_intermediate): PRIVATE_DX_FLAGS += --no-locals
 endif
-$(built_dex_intermediate): $(full_classes_proguard_jar) $(DX)
-	$(transform-classes.jar-to-dex)
 endif # LOCAL_JACK_ENABLED is disabled
 
 $(built_dex): $(built_dex_intermediate) | $(ACP)
@@ -570,13 +629,13 @@
 endif
 
 findbugs_xml := $(intermediates.COMMON)/findbugs.xml
-$(findbugs_xml) : PRIVATE_AUXCLASSPATH := $(addprefix -auxclasspath ,$(strip \
-								$(call normalize-path-list,$(filter %.jar,\
-										$(full_java_libs)))))
-$(findbugs_xml) : $(full_classes_jar)
+$(findbugs_xml): PRIVATE_AUXCLASSPATH := $(addprefix -auxclasspath ,$(strip \
+    $(call normalize-path-list,$(filter %.jar,$(full_java_libs)))))
+$(findbugs_xml): PRIVATE_FINDBUGS_FLAGS := $(LOCAL_FINDBUGS_FLAGS)
+$(findbugs_xml) : $(full_classes_jar) $(filter %.xml, $(LOCAL_FINDBUGS_FLAGS))
 	@echo Findbugs: $@
 	$(hide) $(FINDBUGS) -textui -effort:min -xml:withMessages \
-		$(PRIVATE_AUXCLASSPATH) \
+		$(PRIVATE_AUXCLASSPATH) $(PRIVATE_FINDBUGS_FLAGS) \
 		$< \
 		> $@
 
@@ -601,14 +660,16 @@
 ifeq ($(LOCAL_JACK_ENABLED),incremental)
 $(LOCAL_INTERMEDIATE_TARGETS): \
 	PRIVATE_JACK_INCREMENTAL_DIR := $(intermediates.COMMON)/jack-incremental
+$(noshrob_classes_jack): PRIVATE_JACK_INCREMENTAL_DIR := $(intermediates.COMMON)/jack-noshrob-incremental
+$(jack_check_timestamp): PRIVATE_JACK_INCREMENTAL_DIR := $(intermediates.COMMON)/jack-check-incremental
 else
 $(LOCAL_INTERMEDIATE_TARGETS): \
 	PRIVATE_JACK_INCREMENTAL_DIR :=
+$(noshrob_classes_jack): PRIVATE_JACK_INCREMENTAL_DIR :=
+$(jack_check_timestamp): PRIVATE_JACK_INCREMENTAL_DIR :=
 endif
 
 ifdef full_classes_jar
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_DEBUG_FLAGS := -g
-
 ifdef LOCAL_PROGUARD_ENABLED
 
 ifndef LOCAL_JACK_PROGUARD_FLAGS
@@ -619,27 +680,54 @@
     $(error $(LOCAL_MODULE): Build with jack when LOCAL_TEST_MODULE_TO_PROGUARD_WITH is defined is not yet implemented)
 endif
 
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_PROGUARD_FLAGS := $(common_proguard_flags) $(LOCAL_JACK_PROGUARD_FLAGS)
+# $(jack_dictionary) is just by-product of $(built_dex_intermediate).
+# The dummy command was added because, without it, make misses the fact the $(built_dex) also
+# change $(jack_dictionary).
+$(jack_dictionary): $(full_classes_jack)
+	$(hide) touch $@
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_PROGUARD_FLAGS := $(common_proguard_flags) $(jack_proguard_flags) $(LOCAL_JACK_PROGUARD_FLAGS)
 else  # LOCAL_PROGUARD_ENABLED not defined
 $(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_PROGUARD_FLAGS :=
 endif # LOCAL_PROGUARD_ENABLED defined
 
-$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(LOCAL_JACK_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_FLAGS := $(GLOBAL_JAVAC_DEBUG_FLAGS) $(LOCAL_JACK_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JACK_VERSION := $(LOCAL_JACK_VERSION)
 
-jack_all_deps := $(java_sources) $(java_resource_sources) $(full_jack_lib_deps) \
+jack_all_deps := $(java_sources) $(java_resource_sources) $(full_jack_deps) \
         $(jar_manifest_file) $(layers_file) $(RenderScript_file_stamp) $(proguard_flag_files) \
         $(proto_java_sources_file_stamp) $(LOCAL_ADDITIONAL_DEPENDENCIES) $(LOCAL_JARJAR_RULES) \
-        $(LOCAL_MODULE_MAKEFILE) $(JACK_JAR) $(JACK_LAUNCHER_JAR)
+        $(LOCAL_MODULE_MAKEFILE_DEP) $(JACK)
+
+$(jack_check_timestamp): $(jack_all_deps) | setup-jack-server
+	@echo Checking build with Jack: $@
+	$(jack-check-java)
 
 ifeq ($(LOCAL_IS_STATIC_JAVA_LIBRARY),true)
-$(full_classes_jack): $(jack_all_deps)
+$(full_classes_jack): $(jack_all_deps) | setup-jack-server
 	@echo Building with Jack: $@
 	$(java-to-jack)
 
+# Update timestamps of .toc files for static java libraries so
+# dependents will be always rebuilt.
+$(built_dex).toc: $(full_classes_jack)
+	touch $@
+
 else #LOCAL_IS_STATIC_JAVA_LIBRARY
 $(built_dex_intermediate): PRIVATE_CLASSES_JACK := $(full_classes_jack)
 
-$(built_dex_intermediate): $(jack_all_deps)
+ifeq ($(LOCAL_EMMA_INSTRUMENT),true)
+$(built_dex_intermediate): PRIVATE_JACK_COVERAGE_OPTIONS := \
+    -D jack.coverage=true \
+    -D jack.coverage.metadata.file=$(intermediates.COMMON)/coverage.em \
+    -D jack.coverage.jacoco.package=$(JACOCO_PACKAGE_NAME) \
+    $(addprefix -D jack.coverage.jacoco.include=,$(LOCAL_JACK_COVERAGE_INCLUDE_FILTER)) \
+    $(addprefix -D jack.coverage.jacoco.exclude=,$(LOCAL_JACK_COVERAGE_EXCLUDE_FILTER))
+else
+$(built_dex_intermediate): PRIVATE_JACK_COVERAGE_OPTIONS :=
+endif
+
+$(built_dex_intermediate): $(jack_all_deps) | setup-jack-server
 	@echo Building with Jack: $@
 	$(jack-java-to-dex)
 
@@ -649,16 +737,13 @@
 $(full_classes_jack): $(built_dex_intermediate)
 	$(hide) touch $@
 
+$(call define-dex-to-toc-rule, $(intermediates.COMMON))
+
 endif #LOCAL_IS_STATIC_JAVA_LIBRARY
 
 $(noshrob_classes_jack): PRIVATE_JACK_INTERMEDIATES_DIR := $(intermediates.COMMON)/jack-noshrob-rsc
-ifeq ($(LOCAL_JACK_ENABLED),incremental)
-$(noshrob_classes_jack): PRIVATE_JACK_INCREMENTAL_DIR := $(intermediates.COMMON)/jack-noshrob-incremental
-else
-$(noshrob_classes_jack): PRIVATE_JACK_INCREMENTAL_DIR :=
-endif
 $(noshrob_classes_jack): PRIVATE_JACK_PROGUARD_FLAGS :=
-$(noshrob_classes_jack): $(jack_all_deps)
+$(noshrob_classes_jack): $(jack_all_deps) | setup-jack-server
 	@echo Building with Jack: $@
 	$(java-to-jack)
 endif  # full_classes_jar is defined
diff --git a/core/java_common.mk b/core/java_common.mk
new file mode 100644
index 0000000..9b7d10f
--- /dev/null
+++ b/core/java_common.mk
@@ -0,0 +1,401 @@
+# Common to host and target Java modules.
+
+###########################################################
+## Java version
+###########################################################
+# Use the LOCAL_JAVA_LANGUAGE_VERSION if it is set, otherwise
+# use one based on the LOCAL_SDK_VERSION. If it is < 24
+# pass "1.7" to the tools, if it is unset, >= 24 or "current"
+# pass "1.8".
+#
+# The LOCAL_SDK_VERSION behavior is to ensure that, by default,
+# code that is expected to run on older releases of Android
+# does not use any 1.8 language features that are not supported
+# on earlier runtimes (like default / static interface methods).
+# Modules can override this logic by specifying
+# LOCAL_JAVA_LANGUAGE_VERSION explicitly.
+ifeq (,$(LOCAL_JAVA_LANGUAGE_VERSION))
+  private_sdk_versions_without_any_java_18_support := 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23
+  ifneq (,$(filter $(LOCAL_SDK_VERSION), $(private_sdk_versions_without_any_java_18_support)))
+    LOCAL_JAVA_LANGUAGE_VERSION := 1.7
+  else
+    LOCAL_JAVA_LANGUAGE_VERSION := 1.8
+  endif
+endif
+LOCAL_JAVACFLAGS += -source $(LOCAL_JAVA_LANGUAGE_VERSION) -target $(LOCAL_JAVA_LANGUAGE_VERSION)
+
+###########################################################
+## .proto files: Compile proto files to .java
+###########################################################
+proto_sources := $(filter %.proto,$(LOCAL_SRC_FILES))
+# Because names of the .java files compiled from .proto files are unknown until the
+# .proto files are compiled, we use a timestamp file as depedency.
+proto_java_sources_file_stamp :=
+ifneq ($(proto_sources),)
+proto_sources_fullpath := $(addprefix $(LOCAL_PATH)/, $(proto_sources))
+
+# By putting the generated java files into $(LOCAL_INTERMEDIATE_SOURCE_DIR), they will be
+# automatically found by the java compiling function transform-java-to-classes.jar.
+proto_java_intemediate_dir := $(LOCAL_INTERMEDIATE_SOURCE_DIR)/proto
+proto_java_sources_file_stamp := $(proto_java_intemediate_dir)/Proto.stamp
+proto_java_sources_dir := $(proto_java_intemediate_dir)/src
+
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_INCLUDES := $(TOP)
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_SRC_FILES := $(proto_sources_fullpath)
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_DIR := $(proto_java_sources_dir)
+ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),micro)
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javamicro_out
+else
+  ifeq ($(LOCAL_PROTOC_OPTIMIZE_TYPE),nano)
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --javanano_out
+  else
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_OPTION := --java_out
+  endif
+endif
+$(proto_java_sources_file_stamp): PRIVATE_PROTOC_FLAGS := $(LOCAL_PROTOC_FLAGS)
+$(proto_java_sources_file_stamp): PRIVATE_PROTO_JAVA_OUTPUT_PARAMS := $(LOCAL_PROTO_JAVA_OUTPUT_PARAMS)
+$(proto_java_sources_file_stamp) : $(proto_sources_fullpath) $(PROTOC)
+	$(call transform-proto-to-java)
+
+#TODO: protoc should output the dependencies introduced by imports.
+
+ALL_MODULES.$(my_register_name).PROTO_FILES := $(proto_sources_fullpath)
+endif # proto_sources
+
+#########################################
+## Java resources
+
+# Look for resource files in any specified directories.
+# Non-java and non-doc files will be picked up as resources
+# and included in the output jar file.
+java_resource_file_groups :=
+
+LOCAL_JAVA_RESOURCE_DIRS := $(strip $(LOCAL_JAVA_RESOURCE_DIRS))
+ifneq ($(LOCAL_JAVA_RESOURCE_DIRS),)
+  # This makes a list of words like
+  #     <dir1>::<file1>:<file2> <dir2>::<file1> <dir3>:
+  # where each of the files is relative to the directory it's grouped with.
+  # Directories that don't contain any resource files will result in groups
+  # that end with a colon, and they are stripped out in the next step.
+  java_resource_file_groups += \
+    $(foreach dir,$(LOCAL_JAVA_RESOURCE_DIRS), \
+	$(subst $(space),:,$(strip \
+		$(LOCAL_PATH)/$(dir): \
+	    $(patsubst ./%,%,$(sort $(shell cd $(LOCAL_PATH)/$(dir) && \
+		find . \
+		    -type d -a -name ".svn" -prune -o \
+		    -type f \
+			-a \! -name "*.java" \
+			-a \! -name "package.html" \
+			-a \! -name "overview.html" \
+			-a \! -name ".*.swp" \
+			-a \! -name ".DS_Store" \
+			-a \! -name "*~" \
+			-print \
+		    ))) \
+	)) \
+    )
+  java_resource_file_groups := $(filter-out %:,$(java_resource_file_groups))
+endif # LOCAL_JAVA_RESOURCE_DIRS
+
+LOCAL_JAVA_RESOURCE_FILES := $(strip $(LOCAL_JAVA_RESOURCE_FILES))
+ifneq ($(LOCAL_JAVA_RESOURCE_FILES),)
+  java_resource_file_groups += \
+    $(foreach f,$(LOCAL_JAVA_RESOURCE_FILES), \
+	$(patsubst %/,%,$(dir $(f)))::$(notdir $(f)) \
+     )
+endif # LOCAL_JAVA_RESOURCE_FILES
+
+ifdef java_resource_file_groups
+  # The full paths to all resources, used for dependencies.
+  java_resource_sources := \
+    $(foreach group,$(java_resource_file_groups), \
+	$(addprefix $(word 1,$(subst :,$(space),$(group)))/, \
+	    $(wordlist 2,9999,$(subst :,$(space),$(group))) \
+	) \
+    )
+  # The arguments to jar that will include these files in a jar file.
+  # Quote the file name to handle special characters (such as #) correctly.
+  extra_jar_args := \
+    $(foreach group,$(java_resource_file_groups), \
+	$(addprefix -C "$(word 1,$(subst :,$(space),$(group)))" , \
+	    $(foreach w, $(wordlist 2,9999,$(subst :,$(space),$(group))), "$(w)" ) \
+	) \
+    )
+  java_resource_file_groups :=
+else
+  java_resource_sources :=
+  extra_jar_args :=
+endif # java_resource_file_groups
+
+######################################
+## PRIVATE java vars
+# LOCAL_SOURCE_FILES_ALL_GENERATED is set only if the module does not have static source files,
+# but generated source files in its LOCAL_INTERMEDIATE_SOURCE_DIR.
+# You have to set up the dependency in some other way.
+need_compile_java := $(strip $(all_java_sources)$(all_res_assets)$(java_resource_sources))$(LOCAL_STATIC_JAVA_LIBRARIES)$(filter true,$(LOCAL_SOURCE_FILES_ALL_GENERATED))
+ifdef need_compile_java
+
+full_static_java_libs := \
+    $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
+      $(call intermediates-dir-for, \
+        JAVA_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),COMMON)/javalib.jar)
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_STATIC_JAVA_LIBRARIES := $(full_static_java_libs)
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASSET_DIR := $(LOCAL_ASSET_DIR)
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_CLASS_INTERMEDIATES_DIR := $(intermediates.COMMON)/classes
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(intermediates.COMMON)/src
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAVA_SOURCES := $(all_java_sources)
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RMTYPEDEFS := $(LOCAL_RMTYPEDEFS)
+
+# full_java_libs: The list of files that should be used as the classpath.
+#                 Using this list as a dependency list WILL NOT WORK.
+# full_java_lib_deps: Should be specified as a prerequisite of this module
+#                 to guarantee that the files in full_java_libs will
+#                 be up-to-date.
+ifndef LOCAL_IS_HOST_MODULE
+ifeq ($(LOCAL_SDK_VERSION),)
+ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+# No bootclasspath. But we still need "" to prevent javac from using default host bootclasspath.
+my_bootclasspath := ""
+else  # LOCAL_NO_STANDARD_LIBRARIES
+my_bootclasspath := $(call java-lib-files,core-oj):$(call java-lib-files,core-libart)
+endif  # LOCAL_NO_STANDARD_LIBRARIES
+else
+ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
+# LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
+my_bootclasspath := $(call java-lib-files,android_stubs_current)
+else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
+my_bootclasspath := $(call java-lib-files,android_system_stubs_current)
+else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
+my_bootclasspath := $(call java-lib-files,android_test_stubs_current)
+else
+my_bootclasspath := $(call java-lib-files,sdk_v$(LOCAL_SDK_VERSION))
+endif # current, system_current, or test_current
+endif # LOCAL_SDK_VERSION
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(my_bootclasspath)
+
+# In order to compile lambda code javac requires various invokedynamic-
+# related classes to be present. This change adds stubs needed for
+# javac to compile lambdas.
+my_additional_javac_libs :=
+ifndef TARGET_BUILD_APPS
+# TODO: support to build lamdbas using javac in unbundled build.
+# We may need to check in a prebuilt core-lambda-stubs to prebuilts/sdk.
+ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+my_additional_javac_libs := core-lambda-stubs
+endif
+endif
+
+full_shared_java_libs := $(call java-lib-files,$(LOCAL_JAVA_LIBRARIES) $(my_additional_javac_libs),$(LOCAL_IS_HOST_MODULE))
+full_java_lib_deps := $(call java-lib-deps,$(LOCAL_JAVA_LIBRARIES) $(my_additional_javac_libs),$(LOCAL_IS_HOST_MODULE))
+full_java_lib_deps := $(addsuffix .toc, $(full_java_lib_deps))
+
+else # LOCAL_IS_HOST_MODULE
+
+ifeq ($(USE_CORE_LIB_BOOTCLASSPATH),true)
+ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+my_bootclasspath := ""
+else
+my_bootclasspath := $(call normalize-path-list,$(call host-dex-java-lib-files,core-oj-hostdex core-libart-hostdex))
+endif
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH := -bootclasspath $(my_bootclasspath)
+
+full_shared_java_libs := $(call host-dex-java-lib-files,$(LOCAL_JAVA_LIBRARIES))
+full_java_lib_deps := $(full_shared_java_libs)
+else # !USE_CORE_LIB_BOOTCLASSPATH
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH :=
+
+full_shared_java_libs := $(addprefix $(HOST_OUT_JAVA_LIBRARIES)/,\
+    $(addsuffix $(COMMON_JAVA_PACKAGE_SUFFIX),$(LOCAL_JAVA_LIBRARIES)))
+full_java_lib_deps := $(full_shared_java_libs)
+endif # USE_CORE_LIB_BOOTCLASSPATH
+endif # !LOCAL_IS_HOST_MODULE
+
+full_java_libs := $(full_shared_java_libs) $(full_static_java_libs) $(LOCAL_CLASSPATH)
+full_java_lib_deps := $(full_java_lib_deps) $(full_static_java_libs) $(LOCAL_CLASSPATH)
+
+ifndef LOCAL_IS_HOST_MODULE
+# This is set by packages that are linking to other packages that export
+# shared libraries, allowing them to make use of the code in the linked apk.
+apk_libraries := $(sort $(LOCAL_APK_LIBRARIES) $(LOCAL_RES_LIBRARIES))
+ifneq ($(apk_libraries),)
+  link_apk_libraries := \
+      $(foreach lib,$(apk_libraries), \
+        $(call intermediates-dir-for, \
+              APPS,$(lib),,COMMON)/classes.jar)
+
+  # link against the jar with full original names (before proguard processing).
+  full_shared_java_libs += $(link_apk_libraries)
+  full_java_libs += $(link_apk_libraries)
+  full_java_lib_deps += $(link_apk_libraries)
+endif
+
+# This is set by packages that contain instrumentation, allowing them to
+# link against the package they are instrumenting.  Currently only one such
+# package is allowed.
+LOCAL_INSTRUMENTATION_FOR := $(strip $(LOCAL_INSTRUMENTATION_FOR))
+ifdef LOCAL_INSTRUMENTATION_FOR
+  ifneq ($(words $(LOCAL_INSTRUMENTATION_FOR)),1)
+    $(error \
+        $(LOCAL_PATH): Multiple LOCAL_INSTRUMENTATION_FOR members defined)
+  endif
+
+  link_instr_intermediates_dir.COMMON := $(call intermediates-dir-for, \
+      APPS,$(LOCAL_INSTRUMENTATION_FOR),,COMMON)
+  # link against the jar with full original names (before proguard processing).
+  link_instr_classes_jar := $(link_instr_intermediates_dir.COMMON)/classes.jar
+  full_java_libs += $(link_instr_classes_jar)
+  full_java_lib_deps += $(link_instr_classes_jar)
+endif  # LOCAL_INSTRUMENTATION_FOR
+endif  # LOCAL_IS_HOST_MODULE
+
+endif  # need_compile_java
+
+# We may want to add jar manifest or jar resource files even if there is no java code at all.
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_EXTRA_JAR_ARGS := $(extra_jar_args)
+jar_manifest_file :=
+ifneq ($(strip $(LOCAL_JAR_MANIFEST)),)
+jar_manifest_file := $(LOCAL_PATH)/$(LOCAL_JAR_MANIFEST)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAR_MANIFEST := $(jar_manifest_file)
+else
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JAR_MANIFEST :=
+endif
+
+##########################################################
+ifndef LOCAL_IS_HOST_MODULE
+## AAPT Flags
+# aapt doesn't accept multiple --extra-packages flags.
+# We have to collapse them into a single --extra-packages flag here.
+LOCAL_AAPT_FLAGS := $(strip $(LOCAL_AAPT_FLAGS))
+ifdef LOCAL_AAPT_FLAGS
+ifeq ($(filter 0 1,$(words $(filter --extra-packages,$(LOCAL_AAPT_FLAGS)))),)
+aapt_flags := $(subst --extra-packages$(space),--extra-packages@,$(LOCAL_AAPT_FLAGS))
+aapt_flags_extra_packages := $(patsubst --extra-packages@%,%,$(filter --extra-packages@%,$(aapt_flags)))
+aapt_flags_extra_packages := $(sort $(subst :,$(space),$(aapt_flags_extra_packages)))
+LOCAL_AAPT_FLAGS := $(filter-out --extra-packages@%,$(aapt_flags)) \
+    --extra-packages $(subst $(space),:,$(aapt_flags_extra_packages))
+aapt_flags_extra_packages :=
+aapt_flags :=
+endif
+endif
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) $(PRODUCT_AAPT_FLAGS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_AAPT_CHARACTERISTICS := $(TARGET_AAPT_CHARACTERISTICS)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_PACKAGE_NAME := $(LOCAL_MANIFEST_PACKAGE_NAME)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_INSTRUMENTATION_FOR := $(LOCAL_MANIFEST_INSTRUMENTATION_FOR)
+
+ifdef aidl_sources
+ALL_MODULES.$(my_register_name).AIDL_FILES := $(aidl_sources)
+endif
+ifdef renderscript_sources
+ALL_MODULES.$(my_register_name).RS_FILES := $(renderscript_sources_fullpath)
+endif
+endif  # !LOCAL_IS_HOST_MODULE
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_JAVA_LIBRARIES := $(full_java_libs)
+
+ALL_MODULES.$(my_register_name).INTERMEDIATE_SOURCE_DIR := \
+    $(ALL_MODULES.$(my_register_name).INTERMEDIATE_SOURCE_DIR) $(LOCAL_INTERMEDIATE_SOURCE_DIR)
+
+###########################################################
+# JACK
+###########################################################
+ifdef LOCAL_JACK_ENABLED
+ifdef need_compile_java
+
+LOCAL_JACK_FLAGS += -D jack.java.source.version=$(LOCAL_JAVA_LANGUAGE_VERSION)
+
+full_static_jack_libs := \
+    $(foreach lib,$(LOCAL_STATIC_JAVA_LIBRARIES), \
+      $(call intermediates-dir-for, \
+        JAVA_LIBRARIES,$(lib),$(LOCAL_IS_HOST_MODULE),COMMON)/classes.jack)
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_STATIC_JACK_LIBRARIES := $(full_static_jack_libs)
+
+ifndef LOCAL_IS_HOST_MODULE
+ifeq ($(LOCAL_SDK_VERSION),)
+ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+my_bootclasspath :=
+else
+my_bootclasspath := $(call jack-lib-files,core-oj core-libart)
+endif
+else  # LOCAL_SDK_VERSION
+ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),current)
+# LOCAL_SDK_VERSION is current and no TARGET_BUILD_APPS.
+my_bootclasspath := $(call jack-lib-files,android_stubs_current)
+else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),system_current)
+my_bootclasspath := $(call jack-lib-files,android_system_stubs_current)
+else ifeq ($(LOCAL_SDK_VERSION)$(TARGET_BUILD_APPS),test_current)
+my_bootclasspath := $(call jack-lib-files,android_test_stubs_current)
+else
+my_bootclasspath :=$(call jack-lib-files,sdk_v$(LOCAL_SDK_VERSION))
+endif # current, system_current, or test_current
+endif # LOCAL_SDK_VERSION
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(my_bootclasspath)
+
+full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
+full_jack_deps := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
+# Turn off .toc optimization for apps build as we cannot build dexdump.
+ifeq (,$(TARGET_BUILD_APPS))
+full_jack_deps := $(patsubst %.jack, %.dex.toc, $(full_jack_deps))
+endif
+
+else # LOCAL_IS_HOST_MODULE
+
+ifeq ($(USE_CORE_LIB_BOOTCLASSPATH),true)
+ifeq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
+my_bootclasspath :=
+else
+my_bootclasspath := $(call jack-lib-files,core-oj-hostdex core-libart-hostdex,$(LOCAL_IS_HOST_MODULE))
+endif
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES := $(my_bootclasspath)
+# Compiling against the final jack library. If we want to add support for obfuscated library
+# we'll need to change that to compile against the not obfuscated jack library.
+full_shared_jack_libs := $(call jack-lib-files,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
+full_jack_deps := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
+else
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_BOOTCLASSPATH_JAVA_LIBRARIES :=
+full_shared_jack_libs := $(call jack-lib-deps,$(LOCAL_JAVA_LIBRARIES),$(LOCAL_IS_HOST_MODULE))
+full_jack_deps := $(full_shared_jack_libs)
+endif # USE_CORE_LIB_BOOTCLASSPATH
+endif # !LOCAL_IS_HOST_MODULE
+full_jack_libs := $(full_shared_jack_libs) $(full_static_jack_libs) $(LOCAL_JACK_CLASSPATH)
+full_jack_deps += $(full_static_jack_libs) $(LOCAL_JACK_CLASSPATH)
+
+ifndef LOCAL_IS_HOST_MODULE
+# This is set by packages that are linking to other packages that export
+# shared libraries, allowing them to make use of the code in the linked apk.
+ifneq ($(apk_libraries),)
+  link_apk_jack_libraries := \
+      $(foreach lib,$(apk_libraries), \
+        $(call intermediates-dir-for, \
+              APPS,$(lib),,COMMON)/classes.jack)
+
+  # link against the jar with full original names (before proguard processing).
+  full_shared_jack_libs += $(link_apk_jack_libraries)
+  full_jack_libs += $(link_apk_jack_libraries)
+  full_jack_deps += $(link_apk_jack_libraries)
+endif
+
+# This is set by packages that contain instrumentation, allowing them to
+# link against the package they are instrumenting.  Currently only one such
+# package is allowed.
+ifdef LOCAL_INSTRUMENTATION_FOR
+   # link against the jar with full original names (before proguard processing).
+   link_instr_classes_jack := $(link_instr_intermediates_dir.COMMON)/classes.noshrob.jack
+   full_jack_libs += $(link_instr_classes_jack)
+   full_jack_deps += $(link_instr_classes_jack)
+endif  # LOCAL_INSTRUMENTATION_FOR
+endif  # !LOCAL_IS_HOST_MODULE
+
+# Propagate local configuration options to this target.
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ALL_JACK_LIBRARIES:= $(full_jack_libs)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_JARJAR_RULES := $(LOCAL_JARJAR_RULES)
+
+endif  # need_compile_java
+endif # LOCAL_JACK_ENABLED
diff --git a/core/java_library.mk b/core/java_library.mk
index 5a2d19b..81a4a6a 100644
--- a/core/java_library.mk
+++ b/core/java_library.mk
@@ -47,7 +47,12 @@
 ifeq (true,$(EMMA_INSTRUMENT))
 ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
 ifeq (true,$(EMMA_INSTRUMENT_STATIC))
+ifdef LOCAL_JACK_ENABLED
+# Jack supports coverage with Jacoco
+LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+else
 LOCAL_STATIC_JAVA_LIBRARIES += emma
+endif # LOCAL_JACK_ENABLED
 endif # LOCAL_EMMA_INSTRUMENT
 endif # EMMA_INSTRUMENT_STATIC
 else
@@ -81,7 +86,7 @@
 $(common_javalib.jar): PRIVATE_DEX_FILE := $(built_dex)
 $(common_javalib.jar): PRIVATE_SOURCE_ARCHIVE := $(full_classes_jarjar_jar)
 $(common_javalib.jar): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
-$(common_javalib.jar) : $(built_dex) $(java_resource_sources)
+$(common_javalib.jar) : $(built_dex) $(java_resource_sources) | $(ZIPTIME)
 	@echo "target Jar: $(PRIVATE_MODULE) ($@)"
 ifdef LOCAL_JACK_ENABLED
 	$(create-empty-package)
@@ -92,6 +97,7 @@
 ifdef LOCAL_JACK_ENABLED
 	$(add-carried-jack-resources)
 endif
+	$(remove-timestamps-from-package)
 
 ifdef LOCAL_DEX_PREOPT
 ifneq ($(dexpreopt_boot_jar_module),) # boot jar
diff --git a/core/legacy_prebuilts.mk b/core/legacy_prebuilts.mk
index c477900..f4633d0 100644
--- a/core/legacy_prebuilts.mk
+++ b/core/legacy_prebuilts.mk
@@ -22,68 +22,11 @@
 # to add any new such module in the system
 
 GRANDFATHERED_ALL_PREBUILT := \
-	akmd2 \
-	ap_gain.bin \
-	AVRCP.kl \
-	batch \
-	bitmap_size.txt \
 	bmgr \
-	bp.img \
-	brcm_guci_drv \
-	bypassfactory \
-	cdt.bin \
-	chat-ril \
-	cpcap-key.kl \
-	egl.cfg \
-	firmware_error.565 \
-	firmware_install.565 \
-	ftmipcd \
-	gps.conf \
-	gpsconfig.xml \
-	gps.stingray.so \
-	gralloc.omap3.so \
-	gralloc.tegra.so \
-	hwcomposer.tegra.so \
 	ime \
-	init.goldfish.rc \
-	init.goldfish.sh \
-	init.olympus.rc \
-	init.sholes.rc \
-	init.stingray.rc \
 	input \
-	kernel \
-	lbl \
-	libEGL_POWERVR_SGX530_121.so \
-	libEGL_tegra.so \
-	libGLESv1_CM_POWERVR_SGX530_121.so \
-	libGLESv1_CM_tegra.so \
-	libGLESv2_POWERVR_SGX530_121.so \
-	libGLESv2_tegra.so \
-	libmoto_ril.so \
-	libpppd_plugin-ril.so \
-	libril_rds.so \
-	location \
-	location.cfg \
-	main.conf \
-	mbm.bin \
-	mbm_consumer.bin \
-	mdm_panicd \
 	monkey \
 	pm \
-	pppd-ril \
-	pppd-ril.options \
-	qwerty.kl \
-	radio.img \
-	rdl.bin \
 	RFFspeed_501.bmd \
 	RFFstd_501.bmd \
-	savebpver \
-	sholes-keypad.kl \
-	suplcerts.bks \
-	svc \
-	tcmd \
-	ueventd.goldfish.rc \
-	ueventd.olympus.rc \
-	ueventd.stingray.rc \
-	vold.fstab \
-	wl1271.bin
+	svc
diff --git a/core/main.mk b/core/main.mk
index 5b6e1e9..f9aad2b 100644
--- a/core/main.mk
+++ b/core/main.mk
@@ -38,8 +38,6 @@
 #endif
 
 # Check for broken versions of make.
-# (Allow any version under Cygwin since we don't actually build the platform there.)
-ifeq (,$(findstring CYGWIN,$(shell uname -sm)))
 ifneq (1,$(strip $(shell expr $(MAKE_VERSION) \>= 3.81)))
 $(warning ********************************************************************************)
 $(warning *  You are using version $(MAKE_VERSION) of make.)
@@ -48,7 +46,6 @@
 $(warning ********************************************************************************)
 $(error stopping)
 endif
-endif
 
 # Absolute path of the present working direcotry.
 # This overrides the shell variable $PWD, which does not necessarily points to
@@ -60,10 +57,16 @@
 
 BUILD_SYSTEM := $(TOPDIR)build/core
 
+# Ensure JAVA_NOT_REQUIRED is not set externally.
+JAVA_NOT_REQUIRED := false
+
 # This is the default target.  It must be the first declared target.
 .PHONY: droid
 DEFAULT_GOAL := droid
-$(DEFAULT_GOAL):
+$(DEFAULT_GOAL): droid_targets
+
+.PHONY: droid_targets
+droid_targets:
 
 # Used to force goals to build.  Only use for conditionally defined goals.
 .PHONY: FORCE
@@ -80,12 +83,15 @@
     vendorimage-nodeps \
     ramdisk-nodeps \
     bootimage-nodeps \
-    recoveryimage-nodeps
+    recoveryimage-nodeps \
+    product-graph dump-products
 
 ifneq ($(filter $(dont_bother_goals), $(MAKECMDGOALS)),)
 dont_bother := true
 endif
 
+ORIGINAL_MAKECMDGOALS := $(MAKECMDGOALS)
+
 # Targets that provide quick help on the build system.
 include $(BUILD_SYSTEM)/help.mk
 
@@ -93,8 +99,41 @@
 # and host information.
 include $(BUILD_SYSTEM)/config.mk
 
+relaunch_with_ninja :=
+ifneq ($(USE_NINJA),false)
+ifndef BUILDING_WITH_NINJA
+relaunch_with_ninja := true
+endif
+endif
+
+ifeq ($(relaunch_with_ninja),true)
+# Mark this is a ninja build.
+$(shell mkdir -p $(OUT_DIR) && touch $(OUT_DIR)/ninja_build)
+include build/core/ninja.mk
+else # !relaunch_with_ninja
+ifndef BUILDING_WITH_NINJA
+# Remove ninja build mark if it exists.
+$(shell rm -f $(OUT_DIR)/ninja_build)
+endif
+
+# Write the build number to a file so it can be read back in
+# without changing the command line every time.  Avoids rebuilds
+# when using ninja.
+$(shell mkdir -p $(OUT_DIR) && \
+    echo -n $(BUILD_NUMBER) > $(OUT_DIR)/build_number.txt && \
+    echo -n $(BUILD_DATETIME) > $(OUT_DIR)/build_date.txt)
+BUILD_NUMBER_FROM_FILE := $$(cat $(OUT_DIR)/build_number.txt)
+BUILD_DATETIME_FROM_FILE := $$(cat $(OUT_DIR)/build_date.txt)
+ifeq ($(HOST_OS),darwin)
+DATE_FROM_FILE := date -r $(BUILD_DATETIME_FROM_FILE)
+else
+DATE_FROM_FILE := date -d @$(BUILD_DATETIME_FROM_FILE)
+endif
+
 # CTS-specific config.
 -include cts/build/config.mk
+# VTS-specific config.
+-include test/vts/tools/vts-tradefed/build/config.mk
 
 # This allows us to force a clean build - included after the config.mk
 # environment setup is done, but before we generate any dependencies.  This
@@ -105,13 +144,12 @@
 # Include the google-specific config
 -include vendor/google/build/config.mk
 
-VERSION_CHECK_SEQUENCE_NUMBER := 5
+VERSION_CHECK_SEQUENCE_NUMBER := 6
 -include $(OUT_DIR)/versions_checked.mk
 ifneq ($(VERSION_CHECK_SEQUENCE_NUMBER),$(VERSIONS_CHECKED))
 
 $(info Checking build tools versions...)
 
-ifneq ($(HOST_OS),windows)
 # check for a case sensitive file system
 ifneq (a,$(shell mkdir -p $(OUT_DIR) ; \
                 echo a > $(OUT_DIR)/casecheck.txt; \
@@ -123,7 +161,6 @@
 $(warning ************************************************************)
 $(error Case-insensitive filesystems not supported)
 endif
-endif
 
 # Make sure that there are no spaces in the absolute path; the
 # build system can't deal with them.
@@ -140,22 +177,23 @@
 $(error Directory names containing spaces not supported)
 endif
 
+ifeq ($(JAVA_NOT_REQUIRED), false)
 java_version_str := $(shell unset _JAVA_OPTIONS && java -version 2>&1)
 javac_version_str := $(shell unset _JAVA_OPTIONS && javac -version 2>&1)
 
-# Check for the correct version of java, should be 1.7 by
-# default, and 1.8 if EXPERIMENTAL_USE_JAVA8 is set
-ifneq ($(EXPERIMENTAL_USE_JAVA8),)
+# Check for the correct version of java, should be 1.8 by
+# default and only 1.7 if LEGACY_USE_JAVA7 is set.
+ifeq ($(LEGACY_USE_JAVA7),) # if LEGACY_USE_JAVA7 == ''
 required_version := "1.8.x"
 required_javac_version := "1.8"
-java_version := $(shell echo '$(java_version_str)' | grep 'openjdk .*[ "]1\.8[\. "$$]')
+java_version := $(shell echo '$(java_version_str)' | grep '[ "]1\.8[\. "$$]')
 javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.8[\. "$$]')
-else # default
+else
 required_version := "1.7.x"
 required_javac_version := "1.7"
 java_version := $(shell echo '$(java_version_str)' | grep '^java .*[ "]1\.7[\. "$$]')
 javac_version := $(shell echo '$(javac_version_str)' | grep '[ "]1\.7[\. "$$]')
-endif # if EXPERIMENTAL_USE_JAVA8
+endif # if LEGACY_USE_JAVA7 == ''
 
 ifeq ($(strip $(java_version)),)
 $(info ************************************************************)
@@ -173,20 +211,20 @@
 
 # Check for the current JDK.
 #
-# For Java 1.7, we require OpenJDK on linux and Oracle JDK on Mac OS.
+# For Java 1.7/1.8, we require OpenJDK on linux and Oracle JDK on Mac OS.
 requires_openjdk := false
-ifeq ($(HOST_OS), linux)
+ifeq ($(BUILD_OS),linux)
 requires_openjdk := true
 endif
 
 
 # Check for the current jdk
 ifeq ($(requires_openjdk), true)
-# The user asked for java7 openjdk, so check that the host
-# java version is really openjdk
+# The user asked for openjdk, so check that the host
+# java version is really openjdk and not some other JDK.
 ifeq ($(shell echo '$(java_version_str)' | grep -i openjdk),)
 $(info ************************************************************)
-$(info You asked for an OpenJDK 7 build but your version is)
+$(info You asked for an OpenJDK based build but your version is)
 $(info $(java_version_str).)
 $(info ************************************************************)
 $(error stop)
@@ -204,6 +242,12 @@
 endif # java version is not Sun Oracle JDK
 endif # if requires_openjdk
 
+KNOWN_INCOMPATIBLE_JAVAC_VERSIONS := google
+incompat_javac := $(foreach v,$(KNOWN_INCOMPATIBLE_JAVAC_VERSIONS),$(findstring $(v),$(javac_version_str)))
+ifneq ($(incompat_javac),)
+javac_version :=
+endif
+
 # Check for the correct version of javac
 ifeq ($(strip $(javac_version)),)
 $(info ************************************************************)
@@ -211,7 +255,12 @@
 $(info of javac.)
 $(info $(space))
 $(info Your version is: $(javac_version_str).)
+ifneq ($(incompat_javac),)
+$(info This '$(incompat_javac)' version is not supported for Android platform builds.)
+$(info Use a publicly available JDK and make sure you have run envsetup.sh / lunch.)
+else
 $(info The required version is: $(required_javac_version))
+endif
 $(info $(space))
 $(info Please follow the machine setup instructions at)
 $(info $(space)$(space)$(space)$(space)https://source.android.com/source/download.html)
@@ -219,6 +268,7 @@
 $(error stop)
 endif
 
+endif # if JAVA_NOT_REQUIRED
 
 ifndef BUILD_EMULATOR
   # Emulator binaries are now provided under prebuilts/android-emulator/
@@ -281,6 +331,30 @@
 # The pdk (Platform Development Kit) build
 include build/core/pdk_config.mk
 
+#
+# -----------------------------------------------------------------
+# Jack version configuration
+-include $(TOPDIR)prebuilts/sdk/tools/jack_versions.mk
+-include $(TOPDIR)prebuilts/sdk/tools/jack_for_module.mk
+
+#
+# -----------------------------------------------------------------
+# Install and start Jack server
+-include $(TOPDIR)prebuilts/sdk/tools/jack_server_setup.mk
+
+#
+# -----------------------------------------------------------------
+# Jacoco package name for Jack
+-include $(TOPDIR)external/jacoco/config.mk
+
+#
+# -----------------------------------------------------------------
+# Enable dynamic linker developer warnings for all builds except
+# final release.
+ifneq ($(PLATFORM_VERSION_CODENAME),REL)
+  ADDITIONAL_BUILD_PROPERTIES += ro.bionic.ld.warning=1
+endif
+
 # -----------------------------------------------------------------
 ###
 ### In this section we set up the things that are different
@@ -295,7 +369,7 @@
 
 # Add build properties for ART. These define system properties used by installd
 # to pass flags to dex2oat.
-ADDITIONAL_BUILD_PROPERTIES += persist.sys.dalvik.vm.lib.2=libart
+ADDITIONAL_BUILD_PROPERTIES += persist.sys.dalvik.vm.lib.2=libart.so
 ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).variant=$(DEX2OAT_TARGET_CPU_VARIANT)
 ifneq ($(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES),)
   ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.isa.$(TARGET_ARCH).features=$(DEX2OAT_TARGET_INSTRUCTION_SET_FEATURES)
@@ -317,6 +391,10 @@
   # Target is secure in user builds.
   ADDITIONAL_DEFAULT_PROPERTIES += ro.secure=1
 
+  ifeq ($(user_variant),user)
+    ADDITIONAL_DEFAULT_PROPERTIES += ro.adb.secure=1
+  endif
+
   ifeq ($(user_variant),userdebug)
     # Pick up some extra useful tools
     tags_to_install += debug
@@ -325,18 +403,6 @@
     enable_target_debugging :=
   endif
 
-  # Turn on Dalvik preoptimization for user builds, but only if not
-  # explicitly disabled and the build is running on Linux (since host
-  # Dalvik isn't built for non-Linux hosts).
-  ifeq (,$(WITH_DEXPREOPT))
-    ifeq ($(user_variant),user)
-      ifeq ($(HOST_OS),linux)
-        # TODO: turn on WITH_DEXPREOPT for libart user builds.
-        # WITH_DEXPREOPT := true
-      endif
-    endif
-  endif
-
   # Disallow mock locations by default for user builds
   ADDITIONAL_DEFAULT_PROPERTIES += ro.allow.mock.location=0
 
@@ -372,12 +438,9 @@
           ro.setupwizard.mode=OPTIONAL
 endif
 ifndef is_sdk_build
-  # Don't verify or compile the image on eng builds to speed startup.
+  # To speedup startup of non-preopted builds, don't verify or compile the boot image.
   ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.image-dex2oat-filter=verify-at-runtime
-  # Don't verify or compile apps on eng builds to speed startup.
-  ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.dex2oat-filter=verify-at-runtime
 endif
-  ADDITIONAL_BUILD_PROPERTIES += dalvik.vm.usejit=true
 endif
 
 ## sdk ##
@@ -388,7 +451,7 @@
 sdk_repo_goal := $(strip $(filter sdk_repo,$(MAKECMDGOALS)))
 MAKECMDGOALS := $(strip $(filter-out sdk_repo,$(MAKECMDGOALS)))
 
-ifneq ($(words $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild emulator_tests target-files-package,$(MAKECMDGOALS))),1)
+ifneq ($(words $(sort $(filter-out $(INTERNAL_MODIFIER_TARGETS) checkbuild emulator_tests target-files-package,$(MAKECMDGOALS)))),1)
 $(error The 'sdk' target may not be specified with any other targets)
 endif
 
@@ -434,21 +497,6 @@
 $(INTERNAL_MODIFIER_TARGETS): $(DEFAULT_GOAL)
 endif
 
-# Bring in all modules that need to be built.
-ifeq ($(HOST_OS),windows)
-SDK_ONLY := true
-endif
-
-ifeq ($(SDK_ONLY),true)
-include $(TOPDIR)sdk/build/windows_sdk_whitelist.mk
-include $(TOPDIR)development/build/windows_sdk_whitelist.mk
-
-# Exclude tools/acp when cross-compiling windows under linux
-ifeq ($(findstring Linux,$(UNAME)),)
-subdirs += build/tools/acp
-endif
-
-else	# !SDK_ONLY
 #
 # Typical build; include any Android.mk files we can find.
 #
@@ -456,8 +504,6 @@
 
 FULL_BUILD := true
 
-endif	# !SDK_ONLY
-
 # Before we go and include all of the module makefiles, stash away
 # the PRODUCT_* values so that later we can verify they are not modified.
 stash_product_vars:=true
@@ -484,6 +530,7 @@
 # A helper goal printing out install paths
 .PHONY: GET-INSTALL-PATH
 GET-INSTALL-PATH:
+	@echo "Install paths for modules in $(ONE_SHOT_MAKEFILE):"
 	@$(foreach m, $(ALL_MODULES), $(if $(ALL_MODULES.$(m).INSTALLED), \
 		echo 'INSTALL-PATH: $(m) $(ALL_MODULES.$(m).INSTALLED)';))
 
@@ -499,7 +546,16 @@
 subdir_makefiles := \
 	$(shell build/tools/findleaves.py $(FIND_LEAVES_EXCLUDES) $(subdirs) Android.mk)
 
-$(foreach mk, $(subdir_makefiles), $(info including $(mk) ...)$(eval include $(mk)))
+ifeq ($(USE_SOONG),true)
+subdir_makefiles := $(SOONG_ANDROID_MK) $(call filter-soong-makefiles,$(subdir_makefiles))
+endif
+
+$(foreach mk, $(subdir_makefiles),$(info including $(mk) ...)$(eval include $(mk)))
+
+ifdef PDK_FUSION_PLATFORM_ZIP
+# Bring in the PDK platform.zip modules.
+include $(BUILD_SYSTEM)/pdk_fusion_modules.mk
+endif # PDK_FUSION_PLATFORM_ZIP
 
 endif # dont_bother
 
@@ -569,6 +625,8 @@
     $(m))))
 endef
 
+# If a module is for a cross host os, the required modules must be for
+# that OS too.
 # If a module is built for 32-bit, the required modules must be 32-bit too;
 # Otherwise if the module is an exectuable or shared library,
 #   the required modules must be 64-bit;
@@ -576,6 +634,8 @@
 $(foreach m,$(ALL_MODULES),\
   $(eval r := $(ALL_MODULES.$(m).REQUIRED))\
   $(if $(r),\
+    $(if $(ALL_MODULES.$(m).FOR_HOST_CROSS),\
+      $(eval r := $(addprefix host_cross_,$(r))))\
     $(if $(ALL_MODULES.$(m).FOR_2ND_ARCH),\
       $(eval r_r := $(call get-32-bit-modules-if-we-can,$(r))),\
       $(if $(filter EXECUTABLES SHARED_LIBRARIES,$(ALL_MODULES.$(m).CLASS)),\
@@ -598,32 +658,40 @@
     $(eval r := $(call module-installed-files,$(r))) \
     $(eval t_m := $(filter $(TARGET_OUT_ROOT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
     $(eval h_m := $(filter $(HOST_OUT_ROOT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
+    $(eval hc_m := $(filter $(HOST_CROSS_OUT_ROOT)/%, $(ALL_MODULES.$(m).INSTALLED))) \
     $(eval t_r := $(filter $(TARGET_OUT_ROOT)/%, $(r))) \
     $(eval h_r := $(filter $(HOST_OUT_ROOT)/%, $(r))) \
+    $(eval hc_r := $(filter $(HOST_CROSS_OUT_ROOT)/%, $(r))) \
     $(eval t_m := $(filter-out $(t_r), $(t_m))) \
     $(eval h_m := $(filter-out $(h_r), $(h_m))) \
+    $(eval hc_m := $(filter-out $(hc_r), $(hc_m))) \
     $(if $(t_m), $(eval $(call add-required-deps, $(t_m),$(t_r)))) \
     $(if $(h_m), $(eval $(call add-required-deps, $(h_m),$(h_r)))) \
+    $(if $(hc_m), $(eval $(call add-required-deps, $(hc_m),$(hc_r)))) \
    ) \
  )
 
 t_m :=
 h_m :=
+hc_m :=
 t_r :=
 h_r :=
+hc_r :=
 
 # Establish the dependecies on the shared libraries.
 # It also adds the shared library module names to ALL_MODULES.$(m).REQUIRED,
 # so they can be expanded to product_MODULES later.
-# $(1): TARGET_ or HOST_.
+# $(1): TARGET_ or HOST_ or HOST_CROSS_.
 # $(2): non-empty for 2nd arch.
+# $(3): non-empty for host cross compile.
 define resolve-shared-libs-depes
 $(foreach m,$($(if $(2),$($(1)2ND_ARCH_VAR_PREFIX))$(1)DEPENDENCIES_ON_SHARED_LIBRARIES),\
   $(eval p := $(subst :,$(space),$(m)))\
   $(eval mod := $(firstword $(p)))\
   $(eval deps := $(subst $(comma),$(space),$(lastword $(p))))\
   $(if $(2),$(eval deps := $(addsuffix $($(1)2ND_ARCH_MODULE_SUFFIX),$(deps))))\
-  $(eval r := $(filter $($(1)OUT_ROOT)/%,$(call module-installed-files,\
+  $(if $(3),$(eval deps := $(addprefix host_cross_,$(deps))))\
+  $(eval r := $(filter $($(1)OUT)/%,$(call module-installed-files,\
     $(deps))))\
   $(eval $(call add-required-deps,$(word 2,$(p)),$(r)))\
   $(eval ALL_MODULES.$(mod).REQUIRED += $(deps)))
@@ -637,6 +705,9 @@
 ifdef HOST_2ND_ARCH
 $(call resolve-shared-libs-depes,HOST_,true)
 endif
+ifdef HOST_CROSS_OS
+$(call resolve-shared-libs-depes,HOST_CROSS_,,true)
+endif
 
 m :=
 r :=
@@ -729,7 +800,7 @@
 ifdef overridden_packages
 #  old_modules_to_install := $(modules_to_install)
   modules_to_install := \
-      $(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk), \
+      $(filter-out $(foreach p,$(overridden_packages),$(p) %/$(p).apk %/$(p).odex), \
           $(modules_to_install))
 endif
 #$(error filtered out
@@ -823,12 +894,12 @@
 # -------------------------------------------------------------------
 
 .PHONY: checkbuild
-checkbuild: $(modules_to_check)
-ifeq (true,$(ANDROID_BUILD_EVERYTHING_BY_DEFAULT)$(filter $(MAKECMDGOALS),checkbuild))
+checkbuild: $(modules_to_check) droid_targets
+ifeq ($(USE_SOONG),true)
+checkbuild: checkbuild-soong
+endif
+ifeq (true,$(ANDROID_BUILD_EVERYTHING_BY_DEFAULT))
 droid: checkbuild
-else
-# ANDROID_BUILD_EVERYTHING_BY_DEFAULT not set, or checkbuild is one of the cmd goals.
-checkbuild: droid
 endif
 
 .PHONY: ramdisk
@@ -865,7 +936,8 @@
 all_modules: $(ALL_MODULES)
 else
 # BUILD_MODULES_IN_PATHS is a list of paths relative to the top of the tree
-module_path_patterns := $(foreach p, $(BUILD_MODULES_IN_PATHS),\
+build_modules_in_paths := $(patsubst ./%,%,$(BUILD_MODULES_IN_PATHS))
+module_path_patterns := $(foreach p, $(build_modules_in_paths),\
     $(if $(filter %/,$(p)),$(p)%,$(p)/%))
 my_all_modules := $(sort $(foreach m, $(ALL_MODULES),$(if $(filter\
     $(module_path_patterns), $(addsuffix /,$(ALL_MODULES.$(m).PATH))),$(m))))
@@ -882,7 +954,8 @@
 	$(INSTALLED_USERDATAIMAGE_TARGET) \
 	$(INSTALLED_CACHEIMAGE_TARGET) \
 	$(INSTALLED_VENDORIMAGE_TARGET) \
-	$(INSTALLED_FILES_FILE)
+	$(INSTALLED_FILES_FILE) \
+	$(INSTALLED_FILES_FILE_VENDOR)
 
 # dist_files only for putting your library into the dist directory with a full build.
 .PHONY: dist_files
@@ -924,7 +997,7 @@
 .PHONY: apps_only
 apps_only: $(unbundled_build_modules)
 
-droid: apps_only
+droid_targets: apps_only
 
 # Combine the NOTICE files for a apps_only build
 $(eval $(call combine-notice-files, \
@@ -942,6 +1015,7 @@
     $(BUILT_OTATOOLS_PACKAGE) \
     $(SYMBOLS_ZIP) \
     $(INSTALLED_FILES_FILE) \
+    $(INSTALLED_FILES_FILE_VENDOR) \
     $(INSTALLED_BUILD_PROP_TARGET) \
     $(BUILT_TARGET_FILES_PACKAGE) \
     $(INSTALLED_ANDROID_INFO_TXT_TARGET) \
@@ -969,7 +1043,7 @@
   endif
 
 # Building a full system-- the default is to build droidcore
-droid: droidcore dist_files
+droid_targets: droidcore dist_files
 
 endif # TARGET_BUILD_APPS
 
@@ -1001,13 +1075,13 @@
 tests : host-tests target-tests
 
 # To catch more build breakage, check build tests modules in eng and userdebug builds.
+ifneq ($(ANDROID_NO_TEST_CHECK),true)
 ifneq ($(TARGET_BUILD_PDK),true)
 ifneq ($(filter eng userdebug,$(TARGET_BUILD_VARIANT)),)
 droidcore : target-tests host-tests
 endif
 endif
-
-.PHONY: lintall
+endif
 
 ifneq (,$(filter samplecode, $(MAKECMDGOALS)))
 .PHONY: samplecode
@@ -1052,3 +1126,4 @@
 .PHONY: nothing
 nothing:
 	@echo Successfully read the makefiles.
+endif # !relaunch_with_ninja
diff --git a/core/module_arch_supported.mk b/core/module_arch_supported.mk
index a5e4a7c..62e2643 100644
--- a/core/module_arch_supported.mk
+++ b/core/module_arch_supported.mk
@@ -8,9 +8,11 @@
 ## LOCAL_MODULE_$(my_prefix)ARCH_WARN
 ## LOCAL_MODULE_UNSUPPORTED_$(my_prefix)ARCH
 ## LOCAL_MODULE_UNSUPPORTED_$(my_prefix)ARCH_WARN
+## LOCAL_IS_HOST_MODULE
+## LOCAL_MODULE_HOST_OS
 ##
 ## Inputs from build system:
-## $(my_prefix)IS_64_BIT
+## $(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)IS_64_BIT
 ## LOCAL_2ND_ARCH_VAR_PREFIX
 ##
 ## Outputs:
@@ -23,19 +25,18 @@
 my_module_arch_supported := false
 endif
 
-ifeq ($(LOCAL_2ND_ARCH_VAR_PREFIX),)
-ifeq ($($(my_prefix)IS_64_BIT)|$(my_module_multilib),true|32)
-my_module_arch_supported := false
-else ifeq ($($(my_prefix)IS_64_BIT)|$(my_module_multilib),|64)
+ifeq ($($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)IS_64_BIT)|$(my_module_multilib),true|32)
 my_module_arch_supported := false
 endif
-else # LOCAL_2ND_ARCH_VAR_PREFIX
+ifeq ($($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)IS_64_BIT)|$(my_module_multilib),|64)
+my_module_arch_supported := false
+endif
+
+ifneq ($(LOCAL_2ND_ARCH_VAR_PREFIX),)
 ifeq ($(my_module_multilib),first)
 my_module_arch_supported := false
-else ifeq ($(my_module_multilib),64)
-my_module_arch_supported := false
 endif
-endif # LOCAL_2ND_ARCH_VAR_PREFIX
+endif
 
 ifneq (,$(LOCAL_MODULE_$(my_prefix)ARCH))
 ifeq (,$(filter $($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH),$(LOCAL_MODULE_$(my_prefix)ARCH)))
@@ -58,3 +59,14 @@
 my_module_arch_supported := false
 $(warning $(LOCAL_MODULE): architecture $($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) unsupported)
 endif
+
+ifdef LOCAL_IS_HOST_MODULE
+ifneq (,$(LOCAL_MODULE_HOST_OS))
+  ifeq (,$(filter $($(my_prefix)OS),$(LOCAL_MODULE_HOST_OS)))
+    my_module_arch_supported := false
+  endif
+else ifeq ($($(my_prefix)OS),windows)
+  # If LOCAL_MODULE_HOST_OS is empty, only linux and darwin are supported
+  my_module_arch_supported := false
+endif
+endif
diff --git a/core/native_benchmark.mk b/core/native_benchmark.mk
index 431e40a..ac37701 100644
--- a/core/native_benchmark.mk
+++ b/core/native_benchmark.mk
@@ -3,10 +3,15 @@
 ## Common flags for native benchmarks are added.
 ###########################################
 
-LOCAL_STATIC_LIBRARIES += libbenchmark
+LOCAL_STATIC_LIBRARIES += libgoogle-benchmark
 
-ifndef LOCAL_MODULE_PATH
-LOCAL_MODULE_PATH := $(TARGET_OUT_DATA_NATIVE_TESTS)/$(LOCAL_MODULE)
+LOCAL_MODULE_PATH_64 := $(TARGET_OUT_DATA_METRIC_TESTS)/$(LOCAL_MODULE)
+LOCAL_MODULE_PATH_32 := $($(TARGET_2ND_ARCH_VAR_PREFIX)TARGET_OUT_DATA_METRIC_TESTS)/$(LOCAL_MODULE)
+
+ifndef LOCAL_MULTILIB
+ifndef LOCAL_32_BIT_ONLY
+LOCAL_MULTILIB := both
+endif
 endif
 
 include $(BUILD_EXECUTABLE)
diff --git a/core/ninja.mk b/core/ninja.mk
new file mode 100644
index 0000000..9d0ff9a
--- /dev/null
+++ b/core/ninja.mk
@@ -0,0 +1,192 @@
+NINJA ?= prebuilts/ninja/$(HOST_PREBUILT_TAG)/ninja
+
+ifeq ($(USE_SOONG),true)
+USE_SOONG_FOR_KATI := true
+endif
+
+ifeq ($(USE_SOONG_FOR_KATI),true)
+include $(BUILD_SYSTEM)/soong.mk
+else
+KATI ?= $(HOST_OUT_EXECUTABLES)/ckati
+MAKEPARALLEL ?= $(HOST_OUT_EXECUTABLES)/makeparallel
+endif
+
+KATI_OUTPUT_PATTERNS := $(OUT_DIR)/build%.ninja $(OUT_DIR)/ninja%.sh
+
+# Modifier goals we don't need to pass to Ninja.
+NINJA_EXCLUDE_GOALS := showcommands all dist
+.PHONY : $(NINJA_EXCLUDE_GOALS)
+
+# A list of goals which affect parsing of makefiles and we need to pass to Kati.
+PARSE_TIME_MAKE_GOALS := \
+	$(PARSE_TIME_MAKE_GOALS) \
+	$(dont_bother_goals) \
+	all \
+	APP-% \
+	DUMP_% \
+	ECLIPSE-% \
+	PRODUCT-% \
+	boottarball-nodeps \
+	btnod \
+	build-art% \
+	build_kernel-nodeps \
+	clean-oat% \
+	continuous_instrumentation_tests \
+	continuous_native_tests \
+	cts \
+	custom_images \
+	deps-license \
+	dicttool_aosp \
+	dist \
+	dump-products \
+	dumpvar-% \
+	eng \
+	fusion \
+	oem_image \
+	old-cts \
+	online-system-api-sdk-docs \
+	pdk \
+	platform \
+	platform-java \
+	product-graph \
+	samplecode \
+	sdk \
+	sdk_addon \
+	sdk_repo \
+	snod \
+	stnod \
+	systemimage-nodeps \
+	systemtarball-nodeps \
+	target-files-package \
+	test-art% \
+	user \
+	userdataimage \
+	userdebug \
+	valgrind-test-art% \
+	vts \
+	win_sdk \
+	winsdk-tools
+
+-include vendor/google/build/ninja_config.mk
+
+# Any Android goals that need to be built.
+ANDROID_GOALS := $(filter-out $(KATI_OUTPUT_PATTERNS) $(KATI) $(MAKEPARALLEL),\
+    $(sort $(ORIGINAL_MAKECMDGOALS) $(MAKECMDGOALS)))
+# Goals we need to pass to Ninja.
+NINJA_GOALS := $(filter-out $(NINJA_EXCLUDE_GOALS), $(ANDROID_GOALS))
+# Goals we need to pass to Kati.
+KATI_GOALS := $(filter $(PARSE_TIME_MAKE_GOALS),  $(ANDROID_GOALS))
+
+define replace_space_and_slash
+$(subst /,_,$(subst $(space),_,$(sort $1)))
+endef
+
+KATI_NINJA_SUFFIX := -$(TARGET_PRODUCT)
+ifneq ($(KATI_GOALS),)
+KATI_NINJA_SUFFIX := $(KATI_NINJA_SUFFIX)-$(call replace_space_and_slash,$(KATI_GOALS))
+endif
+ifneq ($(ONE_SHOT_MAKEFILE),)
+KATI_NINJA_SUFFIX := $(KATI_NINJA_SUFFIX)-mmm-$(call replace_space_and_slash,$(ONE_SHOT_MAKEFILE))
+endif
+ifneq ($(BUILD_MODULES_IN_PATHS),)
+KATI_NINJA_SUFFIX := $(KATI_NINJA_SUFFIX)-mmma-$(call replace_space_and_slash,$(BUILD_MODULES_IN_PATHS))
+endif
+
+my_checksum_suffix :=
+my_ninja_suffix_too_long := $(filter 1, $(shell v='$(KATI_NINJA_SUFFIX)' && echo $$(($${$(pound)v} > 64))))
+ifneq ($(my_ninja_suffix_too_long),)
+# Replace the suffix with a checksum if it gets too long.
+my_checksum_suffix := $(KATI_NINJA_SUFFIX)
+KATI_NINJA_SUFFIX := -$(word 1, $(shell echo $(my_checksum_suffix) | $(MD5SUM)))
+endif
+
+KATI_BUILD_NINJA := $(OUT_DIR)/build$(KATI_NINJA_SUFFIX).ninja
+KATI_ENV_SH := $(OUT_DIR)/env$(KATI_NINJA_SUFFIX).sh
+
+# Write out a file mapping checksum to the real suffix.
+ifneq ($(my_checksum_suffix),)
+my_ninja_suffix_file := $(basename $(KATI_BUILD_NINJA)).suf
+$(shell mkdir -p $(dir $(my_ninja_suffix_file)) && \
+    echo $(my_checksum_suffix) > $(my_ninja_suffix_file))
+endif
+
+ifeq (,$(NINJA_STATUS))
+NINJA_STATUS := [%p %s/%t]$(space)
+endif
+
+ifneq (,$(filter showcommands,$(ORIGINAL_MAKECMDGOALS)))
+NINJA_ARGS += "-v"
+endif
+
+ifdef USE_GOMA
+KATI_MAKEPARALLEL := $(MAKEPARALLEL)
+# Ninja runs remote jobs (i.e., commands which contain gomacc) with
+# this parallelism. Note the parallelism of all other jobs is still
+# limited by the -j flag passed to GNU make.
+NINJA_REMOTE_NUM_JOBS ?= 500
+NINJA_ARGS += -j$(NINJA_REMOTE_NUM_JOBS)
+else
+NINJA_MAKEPARALLEL := $(MAKEPARALLEL) --ninja
+endif
+
+ifeq ($(USE_SOONG),true)
+COMBINED_BUILD_NINJA := $(OUT_DIR)/combined$(KATI_NINJA_SUFFIX).ninja
+
+$(COMBINED_BUILD_NINJA): $(KATI_BUILD_NINJA) $(SOONG_ANDROID_MK)
+	$(hide) echo "builddir = $(OUT_DIR)" > $(COMBINED_BUILD_NINJA)
+	$(hide) echo "subninja $(SOONG_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA)
+	$(hide) echo "subninja $(KATI_BUILD_NINJA)" >> $(COMBINED_BUILD_NINJA)
+else
+COMBINED_BUILD_NINJA := $(KATI_BUILD_NINJA)
+endif
+
+$(sort $(DEFAULT_GOAL) $(ANDROID_GOALS)) : ninja_wrapper
+	@#empty
+
+.PHONY: ninja_wrapper
+ninja_wrapper: $(COMBINED_BUILD_NINJA) $(MAKEPARALLEL)
+	@echo Starting build with ninja
+	+$(hide) export NINJA_STATUS="$(NINJA_STATUS)" && source $(KATI_ENV_SH) && $(NINJA_MAKEPARALLEL) $(NINJA) $(NINJA_GOALS) -C $(TOP) -f $(COMBINED_BUILD_NINJA) $(NINJA_ARGS)
+
+# Dummy Android.mk and CleanSpec.mk files so that kati won't recurse into the
+# out directory
+DUMMY_OUT_MKS := $(OUT_DIR)/Android.mk $(OUT_DIR)/CleanSpec.mk
+$(DUMMY_OUT_MKS):
+	@mkdir -p $(dir $@)
+	$(hide) echo '# This file prevents findleaves.py from traversing this directory further' >$@
+
+KATI_FIND_EMULATOR := --use_find_emulator
+ifeq ($(KATI_EMULATE_FIND),false)
+  KATI_FIND_EMULATOR :=
+endif
+$(KATI_BUILD_NINJA): $(KATI) $(MAKEPARALLEL) $(DUMMY_OUT_MKS) $(SOONG_ANDROID_MK) FORCE
+	@echo Running kati to generate build$(KATI_NINJA_SUFFIX).ninja...
+	+$(hide) $(KATI_MAKEPARALLEL) $(KATI) --ninja --ninja_dir=$(OUT_DIR) --ninja_suffix=$(KATI_NINJA_SUFFIX) --regen --ignore_dirty=$(OUT_DIR)/% --no_ignore_dirty=$(SOONG_ANDROID_MK) --ignore_optional_include=$(OUT_DIR)/%.P --detect_android_echo $(KATI_FIND_EMULATOR) -f build/core/main.mk $(KATI_GOALS) --gen_all_targets BUILDING_WITH_NINJA=true SOONG_ANDROID_MK=$(SOONG_ANDROID_MK)
+
+ifneq ($(USE_SOONG_FOR_KATI),true)
+KATI_CXX := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_CFLAGS) $(CLANG_HOST_GLOBAL_CPPFLAGS)
+KATI_LD := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_LDFLAGS)
+# Build static ckati. Unfortunately Mac OS X doesn't officially support static exectuables.
+ifeq ($(BUILD_OS),linux)
+# We need everything in libpthread.a otherwise C++11's threading library will be disabled.
+KATI_LD += -static -Wl,--whole-archive -lpthread -Wl,--no-whole-archive -ldl
+endif
+
+KATI_INTERMEDIATES_PATH := $(HOST_OUT_INTERMEDIATES)/EXECUTABLES/ckati_intermediates
+KATI_BIN_PATH := $(HOST_OUT_EXECUTABLES)
+include build/kati/Makefile.ckati
+
+MAKEPARALLEL_CXX := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_CFLAGS) $(CLANG_HOST_GLOBAL_CPPFLAGS)
+MAKEPARALLEL_LD := $(CLANG_CXX) $(CLANG_HOST_GLOBAL_LDFLAGS)
+# Build static makeparallel. Unfortunately Mac OS X doesn't officially support static exectuables.
+ifeq ($(BUILD_OS),linux)
+MAKEPARALLEL_LD += -static
+endif
+
+MAKEPARALLEL_INTERMEDIATES_PATH := $(HOST_OUT_INTERMEDIATES)/EXECUTABLES/makeparallel_intermediates
+MAKEPARALLEL_BIN_PATH := $(HOST_OUT_EXECUTABLES)
+include build/tools/makeparallel/Makefile
+endif
+
+.PHONY: FORCE
+FORCE:
diff --git a/core/notice_files.mk b/core/notice_files.mk
index 43a5435..e7f8974 100644
--- a/core/notice_files.mk
+++ b/core/notice_files.mk
@@ -2,7 +2,11 @@
 ## Track NOTICE files
 ###########################################################
 
+ifneq ($(LOCAL_NOTICE_FILE),)
+notice_file:=$(strip $(LOCAL_NOTICE_FILE))
+else
 notice_file:=$(strip $(wildcard $(LOCAL_PATH)/NOTICE))
+endif
 
 ifeq ($(LOCAL_MODULE_CLASS),GYP)
   # We ignore NOTICE files for modules of type GYP.
@@ -14,6 +18,7 @@
 # so my_prefix is not set at this point.
 ifeq ($(LOCAL_IS_HOST_MODULE),true)
   my_prefix := HOST_
+  LOCAL_HOST_PREFIX :=
 else
   my_prefix := TARGET_
 endif
@@ -56,6 +61,7 @@
 
 # In case it's actually a host file
 module_installed_filename := $(patsubst $(HOST_OUT)%,%,$(module_installed_filename))
+module_installed_filename := $(patsubst $(HOST_CROSS_OUT)%,%,$(module_installed_filename))
 
 installed_notice_file := $($(my_prefix)OUT_NOTICE_FILES)/src/$(module_installed_filename).txt
 
diff --git a/core/package_internal.mk b/core/package_internal.mk
index 23648c1..551f18e 100644
--- a/core/package_internal.mk
+++ b/core/package_internal.mk
@@ -57,6 +57,9 @@
 endif
 LOCAL_MODULE_CLASS := APPS
 
+intermediates := $(call local-intermediates-dir)
+intermediates.COMMON := $(call local-intermediates-dir,COMMON)
+
 #################################
 include $(BUILD_SYSTEM)/configure_local_jack.mk
 #################################
@@ -112,6 +115,45 @@
 need_compile_asset := true
 endif
 
+my_res_package :=
+ifdef LOCAL_USE_AAPT2
+# In aapt2 the last takes precedence.
+my_resource_dirs := $(call reverse-list,$(LOCAL_RESOURCE_DIR))
+my_res_dir :=
+my_overlay_res_dirs :=
+
+ifneq ($(LOCAL_STATIC_ANDROID_LIBRARIES),)
+# If we are using static android libraries, every source file becomes an overlay.
+# This is to emulate old AAPT behavior which simulated library support.
+my_res_dir :=
+my_overlay_res_dirs := $(my_resource_dirs)
+else
+# Without static libraries, the first directory is our directory, which can then be
+# overlaid by the rest. (First directory in my_resource_dirs is last directory in
+# $(LOCAL_RESOURCE_DIR) due to it being reversed.
+my_res_dir := $(firstword $(my_resource_dirs))
+my_overlay_res_dirs := $(wordlist 2,999,$(my_resource_dirs))
+endif
+
+my_overlay_resources := $(strip \
+  $(foreach d,$(my_overlay_res_dirs),\
+    $(addprefix $(d)/, \
+        $(call find-subdir-assets,$(d)))))
+
+my_res_resources := $(strip \
+    $(addprefix $(my_res_dir)/, \
+        $(call find-subdir-assets,$(my_res_dir))))
+
+all_resources := $(strip $(my_res_resources) $(my_overlay_resources))
+
+# The linked resource package.
+my_res_package := $(intermediates)/package-res.apk
+LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
+
+# Always run aapt2, because we need to at least compile the AndroidManifest.xml.
+need_compile_res := true
+
+else  # LOCAL_USE_AAPT2
 all_resources := $(strip \
     $(foreach dir, $(LOCAL_RESOURCE_DIR), \
       $(addprefix $(dir)/, \
@@ -121,13 +163,14 @@
        ) \
      ))
 
+endif  # LOCAL_USE_AAPT2
+
 ifneq ($(all_resources),)
   need_compile_res := true
 endif
 
 all_res_assets := $(strip $(all_assets) $(all_resources))
 
-intermediates.COMMON := $(call local-intermediates-dir,COMMON)
 
 # If no assets or resources were found, clear the directory variables so
 # we don't try to build them.
@@ -176,7 +219,7 @@
 ifeq (true,$(EMMA_INSTRUMENT))
 ifndef LOCAL_EMMA_INSTRUMENT
 # No emma for test apks.
-ifeq (,$(filer tests,$(LOCAL_MODULE_TAGS))$(LOCAL_INSTRUMENTATION_FOR))
+ifeq (,$(LOCAL_INSTRUMENTATION_FOR))
 LOCAL_EMMA_INSTRUMENT := true
 endif # No test apk
 endif # LOCAL_EMMA_INSTRUMENT is not set
@@ -186,17 +229,46 @@
 
 ifeq (true,$(LOCAL_EMMA_INSTRUMENT))
 ifeq (true,$(EMMA_INSTRUMENT_STATIC))
+ifdef LOCAL_JACK_ENABLED
+# Jack supports coverage with Jacoco
+ifneq ($(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),)
+# Only add jacocoagent if the package contains some java code
+LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+endif # Contains java code
+else
 LOCAL_STATIC_JAVA_LIBRARIES += emma
+endif # LOCAL_JACK_ENABLED
 else
 ifdef LOCAL_SDK_VERSION
 ifdef TARGET_BUILD_APPS
-# In unbundled build merge the emma library into the apk.
-LOCAL_STATIC_JAVA_LIBRARIES += emma
+# In unbundled build, merge the coverage library into the apk.
+ifdef LOCAL_JACK_ENABLED
+# Jack supports coverage with Jacoco
+ifneq ($(LOCAL_SRC_FILES)$(LOCAL_STATIC_JAVA_LIBRARIES)$(LOCAL_SOURCE_FILES_ALL_GENERATED),)
+# Only add jacocoagent if the package contains some java code
+LOCAL_STATIC_JAVA_LIBRARIES += jacocoagent
+# Exclude jacoco classes from proguard
+LOCAL_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
+LOCAL_JACK_PROGUARD_FLAGS += -include $(BUILD_SYSTEM)/proguard.jacoco.flags
+endif # Contains java code
 else
-# If build against the SDK in full build, core.jar is not used,
-# we have to use prebiult emma.jar to make Proguard happy;
+LOCAL_STATIC_JAVA_LIBRARIES += emma
+endif # LOCAL_JACK_ENABLED
+else
+# If build against the SDK in full build, core.jar is not used
+# so coverage classes are not present.
+ifdef LOCAL_JACK_ENABLED
+# Jack needs jacoco on the classpath but we do not want it to be in
+# the final apk. While it is a static library, we add it to the
+# LOCAL_JAVA_LIBRARIES which are only present on the classpath.
+# Note: we have nothing to do for proguard since jacoco will be
+# on the classpath only, thus not modified during the compilation.
+LOCAL_JAVA_LIBRARIES += jacocoagent
+else
+# We have to use prebuilt emma.jar to make Proguard happy;
 # Otherwise emma classes are included in core.jar.
 LOCAL_PROGUARD_FLAGS += -libraryjars $(EMMA_JAR)
+endif # LOCAL_JACK_ENABLED
 endif # full build
 endif # LOCAL_SDK_VERSION
 endif # EMMA_INSTRUMENT_STATIC
@@ -204,6 +276,24 @@
 
 rs_compatibility_jni_libs :=
 
+ifeq ($(LOCAL_DATA_BINDING),true)
+data_binding_intermediates := $(intermediates.COMMON)/data-binding
+
+LOCAL_JAVACFLAGS += -processorpath $(DATA_BINDING_COMPILER) -s $(data_binding_intermediates)/anno-src
+LOCAL_JACK_FLAGS += --processorpath $(DATA_BINDING_COMPILER)
+
+LOCAL_STATIC_JAVA_LIBRARIES += databinding-baselibrary
+LOCAL_STATIC_JAVA_AAR_LIBRARIES += databinding-library databinding-adapters
+
+data_binding_res_in := $(LOCAL_RESOURCE_DIR)
+data_binding_res_out := $(data_binding_intermediates)/res
+
+# Replace with the processed merged res dir.
+LOCAL_RESOURCE_DIR := $(data_binding_res_out)
+
+LOCAL_AAPT_FLAGS += --auto-add-overlay --extra-packages com.android.databinding.library
+endif  # LOCAL_DATA_BINDING
+
 include $(BUILD_SYSTEM)/android_manifest.mk
 
 #################################
@@ -217,7 +307,7 @@
 
 $(LOCAL_INTERMEDIATE_TARGETS): \
     PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
-ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
+ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
 $(LOCAL_INTERMEDIATE_TARGETS): \
     PRIVATE_DEFAULT_APP_TARGET_SDK := $(LOCAL_SDK_VERSION)
 else
@@ -225,7 +315,43 @@
     PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
 endif
 
+ifeq ($(LOCAL_DATA_BINDING),true)
+data_binding_stamp := $(data_binding_intermediates)/data-binding.stamp
+$(data_binding_stamp): PRIVATE_INTERMEDIATES := $(data_binding_intermediates)
+$(data_binding_stamp): PRIVATE_MANIFEST := $(full_android_manifest)
+# Generate code into $(LOCAL_INTERMEDIATE_SOURCE_DIR) so that the generated .java files
+# will be automatically picked up by function compile-java.
+$(data_binding_stamp): PRIVATE_SRC_OUT := $(LOCAL_INTERMEDIATE_SOURCE_DIR)/data-binding
+$(data_binding_stamp): PRIVATE_XML_OUT := $(data_binding_intermediates)/xml
+$(data_binding_stamp): PRIVATE_RES_OUT := $(data_binding_res_out)
+$(data_binding_stamp): PRIVATE_RES_IN := $(data_binding_res_in)
+$(data_binding_stamp): PRIVATE_ANNO_SRC_DIR := $(data_binding_intermediates)/anno-src
+
+$(data_binding_stamp) : $(all_res_assets) $(full_android_manifest) \
+    $(DATA_BINDING_COMPILER)
+	@echo "Data-binding process: $@"
+	@rm -rf $(PRIVATE_INTERMEDIATES) $(PRIVATE_SRC_OUT) && \
+	  mkdir -p $(PRIVATE_INTERMEDIATES) $(PRIVATE_SRC_OUT) \
+	      $(PRIVATE_XML_OUT) $(PRIVATE_RES_OUT) $(PRIVATE_ANNO_SRC_DIR)
+	$(hide) java -classpath $(DATA_BINDING_COMPILER) android.databinding.tool.MakeCopy \
+	  $(PRIVATE_MANIFEST) $(PRIVATE_SRC_OUT) $(PRIVATE_XML_OUT) $(PRIVATE_RES_OUT) $(PRIVATE_RES_IN)
+	$(hide) touch $@
+
+# Make sure the data-binding process happens before javac and generation of R.java.
+$(R_file_stamp) $(full_classes_compiled_jar) : $(data_binding_stamp)
+# The dependency path when jack is enabled
+$(built_dex_intermediate) : $(data_binding_stamp)
+endif  # LOCAL_DATA_BINDING
+
 ifeq ($(need_compile_res),true)
+ifdef LOCAL_USE_AAPT2
+my_compiled_res_base_dir := $(intermediates)/flat-res
+my_generated_res_dirs := $(rs_generated_res_dir)
+my_generated_res_dirs_deps := $(RenderScript_file_stamp)
+# Add AAPT2 link specific flags.
+$(my_res_package): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --no-static-lib-packages
+include $(BUILD_SYSTEM)/aapt2.mk
+else  # LOCAL_USE_AAPT2
 
 # Since we don't know where the real R.java file is going to end up,
 # we need to use another file to stand in its place.  We'll just
@@ -241,22 +367,9 @@
 $(R_file_stamp): PRIVATE_PROGUARD_OPTIONS_FILE := $(proguard_options_file)
 $(R_file_stamp): $(all_res_assets) $(full_android_manifest) $(RenderScript_file_stamp) $(AAPT) | $(ACP)
 	@echo "target R.java/Manifest.java: $(PRIVATE_MODULE) ($@)"
-	@rm -f $@
+	@rm -rf $@ && mkdir -p $(dir $@)
 	$(create-resource-java-files)
-	$(hide) for GENERATED_MANIFEST_FILE in `find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) \
-					-name Manifest.java 2> /dev/null`; do \
-		dir=`awk '/package/{gsub(/\./,"/",$$2);gsub(/;/,"",$$2);print $$2;exit}' $$GENERATED_MANIFEST_FILE`; \
-		mkdir -p $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
-		$(ACP) -fp $$GENERATED_MANIFEST_FILE $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
-	done;
-	$(hide) for GENERATED_R_FILE in `find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) \
-					-name R.java 2> /dev/null`; do \
-		dir=`awk '/package/{gsub(/\./,"/",$$2);gsub(/;/,"",$$2);print $$2;exit}' $$GENERATED_R_FILE`; \
-		mkdir -p $(TARGET_COMMON_OUT_ROOT)/R/$$dir; \
-		$(ACP) -fp $$GENERATED_R_FILE $(TARGET_COMMON_OUT_ROOT)/R/$$dir \
-			|| exit 31; \
-		$(ACP) -fp $$GENERATED_R_FILE $@ || exit 32; \
-	done; \
+	$(call find-generated-R.java)
 
 $(proguard_options_file): $(R_file_stamp)
 
@@ -278,6 +391,8 @@
 	$(add-assets-to-package)
 endif
 
+endif  # LOCAL_USE_AAPT2
+
 # Other modules should depend on the BUILT module if
 # they want to use this module's R.java file.
 $(LOCAL_BUILT_MODULE): $(R_file_stamp)
@@ -291,6 +406,7 @@
 endif
 ifneq ($(full_classes_jack),)
 $(full_classes_jack): $(R_file_stamp)
+$(jack_check_timestamp): $(R_file_stamp)
 endif
 endif # LOCAL_JACK_ENABLED
 
@@ -312,7 +428,7 @@
 # Most packages should link against the resources defined by framework-res.
 # Even if they don't have their own resources, they may use framework
 # resources.
-ifneq ($(filter-out current system_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current,$(LOCAL_SDK_RES_VERSION))),)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 # for released sdk versions, the platform resources were built into android.jar.
 framework_res_package_export := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
@@ -335,10 +451,13 @@
     $(framework_res_package_export_deps) \
     $(foreach lib,$(LOCAL_RES_LIBRARIES),\
         $(call intermediates-dir-for,APPS,$(lib),,COMMON)/src/R.stamp)
-
 $(resource_export_package) $(R_file_stamp) $(LOCAL_BUILT_MODULE): $(all_library_res_package_export_deps)
 $(LOCAL_INTERMEDIATE_TARGETS): \
     PRIVATE_AAPT_INCLUDES := $(all_library_res_package_exports)
+
+ifdef LOCAL_USE_AAPT2
+$(my_res_package) : $(all_library_res_package_export_deps)
+endif
 endif # LOCAL_NO_STANDARD_LIBRARIES
 
 ifneq ($(full_classes_jar),)
@@ -387,7 +506,6 @@
     $(LOCAL_ADDITIONAL_CERTIFICATES), $(c).x509.pem $(c).pk8)
 
 # Define the rule to build the actual package.
-$(LOCAL_BUILT_MODULE): $(AAPT) | $(ZIPALIGN)
 # PRIVATE_JNI_SHARED_LIBRARIES is a list of <abi>:<path_of_built_lib>.
 $(LOCAL_BUILT_MODULE): PRIVATE_JNI_SHARED_LIBRARIES := $(jni_shared_libraries_with_abis)
 # PRIVATE_JNI_SHARED_LIBRARIES_ABI is a list of ABI names.
@@ -397,19 +515,35 @@
     LOCAL_AAPT_INCLUDE_ALL_RESOURCES := true
 endif
 ifeq ($(LOCAL_AAPT_INCLUDE_ALL_RESOURCES),true)
-    $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_CONFIG :=
-    $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
+    $(my_res_package) $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_CONFIG :=
+    $(my_res_package) $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
 else
-    $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_CONFIG := $(PRODUCT_AAPT_CONFIG)
+    $(my_res_package) $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_CONFIG := $(PRODUCT_AAPT_CONFIG)
 ifdef LOCAL_PACKAGE_SPLITS
-    $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
+    $(my_res_package) $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
 else
-    $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG := $(PRODUCT_AAPT_PREF_CONFIG)
+    $(my_res_package) $(LOCAL_BUILT_MODULE): PRIVATE_PRODUCT_AAPT_PREF_CONFIG := $(PRODUCT_AAPT_PREF_CONFIG)
 endif
 endif
 $(LOCAL_BUILT_MODULE): PRIVATE_DONT_DELETE_JAR_DIRS := $(LOCAL_DONT_DELETE_JAR_DIRS)
-$(LOCAL_BUILT_MODULE): $(all_res_assets) $(jni_shared_libraries) $(full_android_manifest)
+$(LOCAL_BUILT_MODULE) : $(jni_shared_libraries)
+ifdef LOCAL_USE_AAPT2
+$(LOCAL_BUILT_MODULE): PRIVATE_RES_PACKAGE := $(my_res_package)
+$(LOCAL_BUILT_MODULE) : $(my_res_package) $(AAPT2) | $(ACP)
+else
+$(LOCAL_BUILT_MODULE) : $(all_res_assets) $(full_android_manifest) $(AAPT)
+endif
 	@echo "target Package: $(PRIVATE_MODULE) ($@)"
+ifdef LOCAL_USE_AAPT2
+ifdef LOCAL_JACK_ENABLED
+	$(call copy-file-to-new-target)
+else
+	@# TODO: implement merge-two-packages.
+	$(if $(PRIVATE_SOURCE_ARCHIVE),\
+	  $(call merge-two-packages,$(PRIVATE_RES_PACKAGE) $(PRIVATE_SOURCE_ARCHIVE),$@),
+	  $(call copy-file-to-new-target))
+endif
+else  # LOCAL_USE_AAPT2
 ifdef LOCAL_JACK_ENABLED
 	$(create-empty-package)
 else
@@ -418,26 +552,29 @@
 	  $(create-empty-package))
 endif
 	$(add-assets-to-package)
+endif  # LOCAL_USE_AAPT2
 ifneq ($(jni_shared_libraries),)
 	$(add-jni-shared-libs-to-package)
 endif
 ifeq ($(full_classes_jar),)
 # We don't build jar, need to add the Java resources here.
 	$(if $(PRIVATE_EXTRA_JAR_ARGS),$(call add-java-resources-to,$@))
-else
+else  # full_classes_jar
 	$(add-dex-to-package)
-endif
+endif  # full_classes_jar
 ifdef LOCAL_JACK_ENABLED
 	$(add-carried-jack-resources)
 endif
 ifdef LOCAL_DEX_PREOPT
+ifneq ($(BUILD_PLATFORM_ZIP),)
+	@# Keep a copy of apk with classes.dex unstripped
+	$(hide) cp -f $@ $(dir $@)package.dex.apk
+endif  # BUILD_PLATFORM_ZIP
 ifneq (nostripping,$(LOCAL_DEX_PREOPT))
 	$(call dexpreopt-remove-classes.dex,$@)
 endif
 endif
 	$(sign-package)
-	@# Alignment must happen after all other zip operations.
-	$(align-package)
 
 ###############################
 ## Build dpi-specific apks, if it's apps_only build.
@@ -472,7 +609,7 @@
 installed_apk_splits := $(foreach s,$(my_split_suffixes),$(my_module_path)/$(LOCAL_MODULE)_$(s).apk)
 
 # The splits should have been built in the same command building the base apk.
-# This rule just runs signing and zipalign etc.
+# This rule just runs signing.
 # Note that we explicily check the existence of the split apk and remove the
 # built base apk if the split apk isn't there.
 # That way the build system will rerun the aapt after the user changes the splitting parameters.
@@ -484,7 +621,6 @@
 	  rm $<; exit 1; \
 	fi
 	$(sign-package)
-	$(align-package)
 
 # Rules to install the splits
 $(installed_apk_splits) : $(my_module_path)/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk | $(ACP)
@@ -498,6 +634,18 @@
 
 # Make sure to install the splits when you run "make <module_name>".
 $(my_register_name): $(installed_apk_splits)
+
+ifdef LOCAL_COMPATIBILITY_SUITE
+cts_testcase_file := $(foreach s,$(my_split_suffixes),$(COMPATIBILITY_TESTCASES_OUT_$(LOCAL_COMPATIBILITY_SUITE))/$(LOCAL_MODULE)_$(s).apk)
+$(cts_testcase_file) : $(COMPATIBILITY_TESTCASES_OUT_$(LOCAL_COMPATIBILITY_SUITE))/$(LOCAL_MODULE)_%.apk : $(built_module_path)/package_%.apk | $(ACP)
+	$(copy-file-to-new-target)
+
+COMPATIBILITY.$(LOCAL_COMPATIBILITY_SUITE).FILES := \
+  $(COMPATIBILITY.$(LOCAL_COMPATIBILITY_SUITE).FILES) \
+  $(cts_testcase_file)
+
+$(my_register_name) : $(cts_testcase_file)
+endif # LOCAL_COMPATIBILITY_SUITE
 endif # LOCAL_PACKAGE_SPLITS
 
 # Save information about this package
@@ -509,39 +657,6 @@
 
 PACKAGES := $(PACKAGES) $(LOCAL_PACKAGE_NAME)
 
-# Dist the files that can be bundled in system.img.
-# They include the jni shared libraries and the apk with jni libraries stripped.
-ifeq ($(LOCAL_DIST_BUNDLED_BINARIES),true)
-ifneq ($(filter $(LOCAL_PACKAGE_NAME),$(TARGET_BUILD_APPS)),)
-ifneq ($(strip $(jni_shared_libraries)),)
-dist_subdir := bundled_$(LOCAL_PACKAGE_NAME)
-$(foreach f, $(jni_shared_libraries), \
-  $(call dist-for-goals, apps_only, $(f):$(dist_subdir)/$(notdir $(f))))
-
-apk_jni_stripped := $(intermediates)/jni_stripped/package.apk
-$(apk_jni_stripped): PRIVATE_JNI_SHARED_LIBRARIES := $(notdir $(jni_shared_libraries))
-$(apk_jni_stripped) : $(LOCAL_BUILT_MODULE) | $(ZIPALIGN)
-	@rm -rf $(dir $@) && mkdir -p $(dir $@)
-	$(hide) cp $< $@
-	$(hide) zip -d $@ $(foreach f,$(PRIVATE_JNI_SHARED_LIBRARIES),\*/$(f))
-	$(align-package)
-
-$(call dist-for-goals, apps_only, $(apk_jni_stripped):$(dist_subdir)/$(LOCAL_PACKAGE_NAME).apk)
-
-endif  # jni_shared_libraries
-endif  # apps_only build
-endif  # LOCAL_DIST_BUNDLED_BINARIES
-
-# Lint phony targets
-.PHONY: lint-$(LOCAL_PACKAGE_NAME)
-lint-$(LOCAL_PACKAGE_NAME): PRIVATE_PATH := $(LOCAL_PATH)
-lint-$(LOCAL_PACKAGE_NAME): PRIVATE_LINT_FLAGS := $(LOCAL_LINT_FLAGS)
-lint-$(LOCAL_PACKAGE_NAME) :
-	@echo lint $(PRIVATE_PATH)
-	$(LINT) $(PRIVATE_LINT_FLAGS) $(PRIVATE_PATH)
-
-lintall : lint-$(LOCAL_PACKAGE_NAME)
-
 endif # skip_definition
 
 # Reset internal variables.
diff --git a/core/pathmap.mk b/core/pathmap.mk
index abbae2f..6161fd3 100644
--- a/core/pathmap.mk
+++ b/core/pathmap.mk
@@ -16,7 +16,14 @@
 
 #
 # A central place to define mappings to paths, to avoid hard-coding
-# them in Android.mk files.
+# them in Android.mk files. Not meant for header file include directories,
+# despite the fact that it was historically used for that!
+#
+# If you want this for a library's header files, use LOCAL_EXPORT_C_INCLUDES
+# instead. Then users of the library don't have to do anything --- they'll
+# have the correct header files added to their include path automatically.
+#
+
 #
 # TODO: Allow each project to define stuff like this before the per-module
 #       Android.mk files are included, so we don't need to have a big central
@@ -30,16 +37,9 @@
     camera:system/media/camera/include \
     frameworks-base:frameworks/base/include \
     frameworks-native:frameworks/native/include \
-    libc:bionic/libc/include \
     libhardware:hardware/libhardware/include \
     libhardware_legacy:hardware/libhardware_legacy/include \
-    libhost:build/libs/host/include \
-    libm:bionic/libm/include \
-    libnativehelper:libnativehelper/include \
-    libpagemap:system/extras/libpagemap/include \
     libril:hardware/ril/include \
-    libstdc++:bionic/libstdc++/include \
-    mkbootimg:system/core/mkbootimg \
     opengl-tests-includes:frameworks/native/opengl/tests/include \
     recovery:bootable/recovery \
     system-core:system/core/include \
@@ -67,7 +67,7 @@
 # Many modules expect to be able to say "#include <jni.h>",
 # so make it easy for them to find the correct path.
 #
-JNI_H_INCLUDE := $(call include-path-for,libnativehelper)/nativehelper
+JNI_H_INCLUDE := libnativehelper/include/nativehelper
 
 #
 # A list of all source roots under frameworks/base, which will be
@@ -158,8 +158,8 @@
 #
 FRAMEWORKS_DATA_BINDING_SUBDIRS := \
         baseLibrary/src/main \
-        library/src/main \
-        library/src/doc
+        extensions/library/src/main \
+        extensions/library/src/doc
 
 #
 # A version of FRAMEWORKS_DATA_BINDING_SUBDIRS that is expanded to full paths from
diff --git a/core/pdk_config.mk b/core/pdk_config.mk
index 262b50e..c0aaacd 100644
--- a/core/pdk_config.mk
+++ b/core/pdk_config.mk
@@ -1,47 +1,4 @@
 # This file defines the rule to fuse the platform.zip into the current PDK build.
-
-.PHONY: pdk fusion
-pdk fusion: $(DEFAULT_GOAL)
-
-# What to build:
-# pdk fusion if:
-# 1) PDK_FUSION_PLATFORM_ZIP is passed in from the environment
-# or
-# 2) the platform.zip exists in the default location
-# or
-# 3) fusion is a command line build goal,
-#    PDK_FUSION_PLATFORM_ZIP is needed anyway, then do we need the 'fusion' goal?
-# otherwise pdk only if:
-# 1) pdk is a command line build goal
-# or
-# 2) TARGET_BUILD_PDK is passed in from the environment
-
-# if PDK_FUSION_PLATFORM_ZIP is specified, do not override.
-ifndef PDK_FUSION_PLATFORM_ZIP
-# Most PDK project paths should be using vendor/pdk/TARGET_DEVICE
-# but some legacy ones (e.g. mini_armv7a_neon generic PDK) were setup
-# with vendor/pdk/TARGET_PRODUCT.
-_pdk_fusion_default_platform_zip = $(wildcard \
-vendor/pdk/$(TARGET_DEVICE)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
-vendor/pdk/$(TARGET_DEVICE)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
-vendor/pdk/$(TARGET_PRODUCT)/$(TARGET_PRODUCT)-$(TARGET_BUILD_VARIANT)/platform/platform.zip \
-vendor/pdk/$(TARGET_PRODUCT)/$(patsubst aosp_%,full_%,$(TARGET_PRODUCT))-$(TARGET_BUILD_VARIANT)/platform/platform.zip)
-ifneq (,$(_pdk_fusion_default_platform_zip))
-PDK_FUSION_PLATFORM_ZIP := $(word 1, $(_pdk_fusion_default_platform_zip))
-TARGET_BUILD_PDK := true
-$(info $(PDK_FUSION_PLATFORM_ZIP) found, do a PDK fusion build.)
-endif # _pdk_fusion_default_platform_zip
-endif # !PDK_FUSION_PLATFORM_ZIP
-
-ifneq (,$(filter pdk fusion, $(MAKECMDGOALS)))
-TARGET_BUILD_PDK := true
-ifneq (,$(filter fusion, $(MAKECMDGOALS)))
-ifndef PDK_FUSION_PLATFORM_ZIP
-  $(error Specify PDK_FUSION_PLATFORM_ZIP to do a PDK fusion.)
-endif
-endif  # fusion
-endif  # pdk or fusion
-
 PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR :=
 PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR := \
 	host/common/obj/JAVA_LIBRARIES/bouncycastle-host_intermediates
@@ -58,6 +15,7 @@
   target/common/obj/JAVA_LIBRARIES/android_stubs_current_intermediates \
   target/common/obj/JAVA_LIBRARIES/bouncycastle_intermediates \
   target/common/obj/JAVA_LIBRARIES/conscrypt_intermediates \
+  target/common/obj/JAVA_LIBRARIES/core-oj_intermediates \
   target/common/obj/JAVA_LIBRARIES/core-libart_intermediates \
   target/common/obj/JAVA_LIBRARIES/core-junit_intermediates \
   target/common/obj/JAVA_LIBRARIES/ext_intermediates \
@@ -78,7 +36,9 @@
 	$(PDK_PLATFORM_JAVA_ZIP_JAVA_HOST_LIB_DIR)
 
 PDK_PLATFORM_JAVA_ZIP_CONTENTS += $(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_LIB_DIR),\
-    $(lib_dir)/classes.jack $(lib_dir)/classes.jar $(lib_dir)/javalib.jar)
+    $(lib_dir)/classes.jack $(lib_dir)/classes.jar $(lib_dir)/classes.jar.toc \
+    $(lib_dir)/javalib.jar  $(lib_dir)/classes*.dex \
+    $(lib_dir)/classes.dex.toc )
 
 # check and override java support level
 ifneq ($(TARGET_BUILD_PDK)$(PDK_FUSION_PLATFORM_ZIP),)
@@ -91,11 +51,6 @@
 endif # PDK
 
 ifdef PDK_FUSION_PLATFORM_ZIP
-TARGET_BUILD_PDK := true
-ifeq (,$(wildcard $(PDK_FUSION_PLATFORM_ZIP)))
-  $(error Cannot find file $(PDK_FUSION_PLATFORM_ZIP).)
-endif
-
 _pdk_fusion_intermediates := $(call intermediates-dir-for, PACKAGING, pdk_fusion)
 _pdk_fusion_stamp := $(_pdk_fusion_intermediates)/pdk_fusion.stamp
 
@@ -149,10 +104,8 @@
 endif
 
 define JAVA_dependency_template
-$(PDK_FUSION_OUT_DIR)/$(strip $(1)): $(_pdk_fusion_intermediates)/$(strip $(1)) \
-  $(PDK_FUSION_OUT_DIR)/$(strip $(2)) $(_pdk_fusion_stamp)
-	@mkdir -p $$(dir $$@)
-	$(hide) cp -fpPR $$< $$@
+$(call add-dependency,$(PDK_FUSION_OUT_DIR)/$(strip $(1)),\
+  $(foreach d,$(filter $(2),$(_pdk_fusion_java_file_list)),$(PDK_FUSION_OUT_DIR)/$(d)))
 endef
 
 # needs explicit dependency as package-export.apk is not explicitly pulled
@@ -165,6 +118,14 @@
 $(eval $(call JAVA_dependency_template,$(lib_dir)/javalib.jar,\
 $(lib_dir)/classes.jar)))
 
+# pull .jack and .dex files
+$(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR),\
+  $(eval $(call JAVA_dependency_template,$(lib_dir)/classes.jar.toc,\
+    $(lib_dir)/classes.jar $(lib_dir)/classes.jack)))
+$(foreach lib_dir,$(PDK_PLATFORM_JAVA_ZIP_JAVA_TARGET_LIB_DIR),\
+  $(eval $(call JAVA_dependency_template,$(lib_dir)/classes.dex.toc,\
+    $(lib_dir)/classes.jar $(lib_dir)/classes.jack $(lib_dir)/classes%.dex)))
+
 # implicit rules for all other target files
 $(TARGET_COMMON_OUT_ROOT)/% : $(_pdk_fusion_intermediates)/target/common/% $(_pdk_fusion_stamp)
 	@mkdir -p $(dir $@)
diff --git a/core/pdk_fusion_modules.mk b/core/pdk_fusion_modules.mk
new file mode 100644
index 0000000..49b30dc
--- /dev/null
+++ b/core/pdk_fusion_modules.mk
@@ -0,0 +1,78 @@
+# Auto-generate module defitions from platform.zip.
+# We use these rules to rebuild .odex files of the .jar/.apk inside the platform.zip.
+#
+
+pdk_dexpreopt_config_mk := $(TARGET_OUT_INTERMEDIATES)/pdk_dexpreopt_config.mk
+
+$(shell rm -f $(pdk_dexpreopt_config_mk) && mkdir -p $(dir $(pdk_dexpreopt_config_mk)) && \
+        unzip -qo $(PDK_FUSION_PLATFORM_ZIP) -d $(dir $(pdk_dexpreopt_config_mk)) pdk_dexpreopt_config.mk 2>/dev/null)
+
+-include $(pdk_dexpreopt_config_mk)
+
+# Define a PDK prebuilt module that comes from platform.zip.
+# Must be called with $(eval)
+define prebuilt-pdk-java-module
+include $(CLEAR_VARS)
+LOCAL_MODULE:=$(1)
+LOCAL_MODULE_CLASS:=$(2)
+# Use LOCAL_PREBUILT_MODULE_FILE instead of LOCAL_SRC_FILES so we don't need to deal with LOCAL_PATH.
+LOCAL_PREBUILT_MODULE_FILE:=$(3)
+LOCAL_DEX_PREOPT:=$(4)
+LOCAL_MULTILIB:=$(5)
+LOCAL_DEX_PREOPT_FLAGS:=$(6)
+LOCAL_BUILT_MODULE_STEM:=$(7)
+LOCAL_MODULE_SUFFIX:=$(suffix $(7))
+LOCAL_PRIVILEGED_MODULE:=$(8)
+LOCAL_PROPRIETARY_MODULE:=$(9)
+LOCAL_MODULE_TARGET_ARCH:=$(10)
+LOCAL_REPLACE_PREBUILT_APK_INSTALLED:=$(11)
+LOCAL_CERTIFICATE:=PRESIGNED
+include $(BUILD_PREBUILT)
+
+# The source prebuilts are extracted in the rule of _pdk_fusion_stamp.
+# Use a touch rule to establish the dependency.
+$(3) $(11) : $(_pdk_fusion_stamp)
+	$(hide) if [ ! -f $$@ ]; then \
+	  echo 'Error: $$@ does not exist. Check your platform.zip.' 1>&2; \
+	  exit 1; \
+	fi
+	$(hide) touch $$@
+endef
+
+# We don't have a LOCAL_PATH for the auto-generated modules, so let it be the $(BUILD_SYSTEM).
+LOCAL_PATH := $(BUILD_SYSTEM)
+
+##### Java libraries.
+# Only set up rules for modules that aren't built from source.
+pdk_prebuilt_libraries := $(foreach l,$(PDK.DEXPREOPT.JAVA_LIBRARIES),\
+  $(if $(MODULE.TARGET.JAVA_LIBRARIES.$(l)),,$(l)))
+
+$(foreach l,$(pdk_prebuilt_libraries), $(eval \
+  $(call prebuilt-pdk-java-module,\
+    $(l),\
+    JAVA_LIBRARIES,\
+    $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(l).SRC),\
+    $(PDK.DEXPREOPT.$(l).DEX_PREOPT),\
+    $(PDK.DEXPREOPT.$(l).MULTILIB),\
+    $(PDK.DEXPREOPT.$(l).DEX_PREOPT_FLAGS),\
+    javalib.jar,\
+    )))
+
+###### Apps.
+pdk_prebuilt_apps := $(foreach a,$(PDK.DEXPREOPT.APPS),\
+  $(if $(MODULE.TARGET.APPS.$(a)),,$(a)))
+
+$(foreach a,$(pdk_prebuilt_apps), $(eval \
+  $(call prebuilt-pdk-java-module,\
+    $(a),\
+    APPS,\
+    $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(a).SRC),\
+    $(PDK.DEXPREOPT.$(a).DEX_PREOPT),\
+    $(PDK.DEXPREOPT.$(a).MULTILIB),\
+    $(PDK.DEXPREOPT.$(a).DEX_PREOPT_FLAGS),\
+    package.apk,\
+    $(PDK.DEXPREOPT.$(a).PRIVILEGED_MODULE),\
+    $(PDK.DEXPREOPT.$(a).PROPRIETARY_MODULE),\
+    $(PDK.DEXPREOPT.$(a).TARGET_ARCH),\
+    $(_pdk_fusion_intermediates)/$(PDK.DEXPREOPT.$(a).STRIPPED_SRC),\
+    )))
diff --git a/core/phony_package.mk b/core/phony_package.mk
index 30e198c..866b13c 100644
--- a/core/phony_package.mk
+++ b/core/phony_package.mk
@@ -7,7 +7,7 @@
 
 include $(BUILD_SYSTEM)/base_rules.mk
 
-$(LOCAL_BUILT_MODULE): $(LOCAL_MODULE_MAKEFILE) $(LOCAL_ADDITIONAL_DEPENDENCIES)
+$(LOCAL_BUILT_MODULE): $(LOCAL_MODULE_MAKEFILE_DEP) $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(hide) echo "Fake: $@"
 	$(hide) mkdir -p $(dir $@)
 	$(hide) touch $@
diff --git a/core/post_clean.mk b/core/post_clean.mk
index 0273ff2..f08abff 100644
--- a/core/post_clean.mk
+++ b/core/post_clean.mk
@@ -54,27 +54,30 @@
 current_all_packages_config :=
 
 #######################################################
-# Check if we need to delete obsolete aidl-generated java files.
-# When an aidl file gets deleted (or renamed), the generated java file is obsolete.
-previous_aidl_config := $(TARGET_OUT_COMMON_INTERMEDIATES)/previous_aidl_config.mk
-current_aidl_config := $(TARGET_OUT_COMMON_INTERMEDIATES)/current_aidl_config.mk
+# Check if we need to delete obsolete generated java files.
+# When an aidl/proto/etc file gets deleted (or renamed), the generated java file is obsolete.
+previous_gen_java_config := $(TARGET_OUT_COMMON_INTERMEDIATES)/previous_gen_java_config.mk
+current_gen_java_config := $(TARGET_OUT_COMMON_INTERMEDIATES)/current_gen_java_config.mk
 
-$(shell rm -rf $(current_aidl_config) \
-  && mkdir -p $(dir $(current_aidl_config))\
-  && touch $(current_aidl_config))
--include $(previous_aidl_config)
+$(shell rm -rf $(current_gen_java_config) \
+  && mkdir -p $(dir $(current_gen_java_config))\
+  && touch $(current_gen_java_config))
+-include $(previous_gen_java_config)
 
 intermediates_to_clean :=
-modules_with_aidl_files :=
+modules_with_gen_java_files :=
 $(foreach p, $(ALL_MODULES), \
-  $(if $(ALL_MODULES.$(p).AIDL_FILES),\
-    $(eval modules_with_aidl_files += $(p))\
-    $(shell echo 'AIDL_FILES.$(p) := $(ALL_MODULES.$(p).AIDL_FILES)' >> $(current_aidl_config)))\
-  $(if $(filter-out $(ALL_MODULES.$(p).AIDL_FILES),$(AIDL_FILES.$(p))),\
+  $(eval gs := $(strip $(ALL_MODULES.$(p).AIDL_FILES)\
+                       $(ALL_MODULES.$(p).PROTO_FILES)\
+                       $(ALL_MODULES.$(p).RS_FILES)))\
+  $(if $(gs),\
+    $(eval modules_with_gen_java_files += $(p))\
+    $(shell echo 'GEN_SRC_FILES.$(p) := $(gs)' >> $(current_gen_java_config)))\
+  $(if $(filter-out $(gs),$(GEN_SRC_FILES.$(p))),\
     $(eval intermediates_to_clean += $(ALL_MODULES.$(p).INTERMEDIATE_SOURCE_DIR))))
 intermediates_to_clean := $(strip $(intermediates_to_clean))
 ifdef intermediates_to_clean
-$(info *** Obsolete aidl-generated files detected, clean intermediate files...)
+$(info *** Obsolete generated java files detected, clean intermediate files...)
 $(info *** rm -rf $(intermediates_to_clean))
 $(shell rm -rf $(intermediates_to_clean))
 intermediates_to_clean :=
@@ -82,15 +85,15 @@
 
 # For modules not loaded by the current build (e.g. you are running mm/mmm),
 # we copy the info from the previous bulid.
-$(foreach p, $(filter-out $(ALL_MODULES),$(MODULES_WITH_AIDL_FILES)),\
-  $(shell echo 'AIDL_FILES.$(p) := $(AIDL_FILES.$(p))' >> $(current_aidl_config)))
-MODULES_WITH_AIDL_FILES := $(sort $(MODULES_WITH_AIDL_FILES) $(modules_with_aidl_files))
-$(shell echo 'MODULES_WITH_AIDL_FILES := $(MODULES_WITH_AIDL_FILES)' >> $(current_aidl_config))
+$(foreach p, $(filter-out $(ALL_MODULES),$(MODULES_WITH_GEN_JAVA_FILES)),\
+  $(shell echo 'GEN_SRC_FILES.$(p) := $(GEN_SRC_FILES.$(p))' >> $(current_gen_java_config)))
+MODULES_WITH_GEN_JAVA_FILES := $(sort $(MODULES_WITH_GEN_JAVA_FILES) $(modules_with_gen_java_files))
+$(shell echo 'MODULES_WITH_GEN_JAVA_FILES := $(MODULES_WITH_GEN_JAVA_FILES)' >> $(current_gen_java_config))
 
 # Now current becomes previous.
-$(shell mv -f $(current_aidl_config) $(previous_aidl_config))
+$(shell cmp $(current_gen_java_config) $(previous_gen_java_config) > /dev/null 2>&1 || mv -f $(current_gen_java_config) $(previous_gen_java_config))
 
-MODULES_WITH_AIDL_FILES :=
-modules_with_aidl_files :=
-previous_aidl_config :=
-current_aidl_config :=
+MODULES_WITH_GEN_JAVA_FILES :=
+modules_with_gen_java_files :=
+previous_gen_java_config :=
+current_gen_java_config :=
diff --git a/core/prebuilt.mk b/core/prebuilt.mk
index d3f9d1a..428922b 100644
--- a/core/prebuilt.mk
+++ b/core/prebuilt.mk
@@ -8,6 +8,7 @@
 
 ifdef LOCAL_IS_HOST_MODULE
   my_prefix := HOST_
+  LOCAL_HOST_PREFIX :=
 else
   my_prefix := TARGET_
 endif
@@ -41,9 +42,6 @@
 OVERRIDE_BUILT_MODULE_PATH :=
 LOCAL_BUILT_MODULE :=
 LOCAL_INSTALLED_MODULE :=
-LOCAL_MODULE_STEM :=
-LOCAL_BUILT_MODULE_STEM :=
-LOCAL_INSTALLED_MODULE_STEM :=
 LOCAL_INTERMEDIATE_TARGETS :=
 include $(BUILD_SYSTEM)/prebuilt_internal.mk
 endif # $(my_module_arch_supported)
@@ -52,4 +50,39 @@
 
 LOCAL_2ND_ARCH_VAR_PREFIX :=
 
+ifdef LOCAL_IS_HOST_MODULE
+ifdef HOST_CROSS_OS
+ifneq (,$(filter EXECUTABLES STATIC_LIBRARIES SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS)))
+my_prefix := HOST_CROSS_
+LOCAL_HOST_PREFIX := $(my_prefix)
+include $(BUILD_SYSTEM)/module_arch_supported.mk
+ifeq ($(my_module_arch_supported),true)
+# host cross compilation is supported
+OVERRIDE_BUILT_MODULE_PATH :=
+LOCAL_BUILT_MODULE :=
+LOCAL_INSTALLED_MODULE :=
+LOCAL_INTERMEDIATE_TARGETS :=
+include $(BUILD_SYSTEM)/prebuilt_internal.mk
+endif
+LOCAL_HOST_PREFIX :=
+endif
+
+ifdef HOST_CROSS_2ND_ARCH
+my_prefix := HOST_CROSS_
+LOCAL_2ND_ARCH_VAR_PREFIX := $($(my_prefix)2ND_ARCH_VAR_PREFIX)
+LOCAL_HOST_PREFIX := $(my_prefix)
+include $(BUILD_SYSTEM)/module_arch_supported.mk
+ifeq ($(my_module_arch_supported),true)
+OVERRIDE_BUILT_MODULE_PATH :=
+LOCAL_BUILT_MODULE :=
+LOCAL_INSTALLED_MODULE :=
+LOCAL_INTERMEDIATE_TARGETS :=
+include $(BUILD_SYSTEM)/prebuilt_internal.mk
+endif
+LOCAL_HOST_PREFIX :=
+LOCAL_2ND_ARCH_VAR_PREFIX :=
+endif
+endif
+endif
+
 my_module_arch_supported :=
diff --git a/core/prebuilt_internal.mk b/core/prebuilt_internal.mk
index 0406353..ee68427 100644
--- a/core/prebuilt_internal.mk
+++ b/core/prebuilt_internal.mk
@@ -26,27 +26,41 @@
 else
   ifdef LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)
     my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH))
+    LOCAL_SRC_FILES_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) :=
   else
     ifdef LOCAL_SRC_FILES_$(my_32_64_bit_suffix)
       my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES_$(my_32_64_bit_suffix))
+      LOCAL_SRC_FILES_$(my_32_64_bit_suffix) :=
     else
       my_prebuilt_src_file := $(LOCAL_PATH)/$(LOCAL_SRC_FILES)
+      LOCAL_SRC_FILES :=
     endif
   endif
 endif
 
+my_strip_module := $(firstword \
+  $(LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
+  $(LOCAL_STRIP_MODULE))
+my_pack_module_relocations := $(firstword \
+  $(LOCAL_PACK_MODULE_RELOCATIONS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH)) \
+  $(LOCAL_PACK_MODULE_RELOCATIONS))
+
 ifeq (SHARED_LIBRARIES,$(LOCAL_MODULE_CLASS))
   # Put the built targets of all shared libraries in a common directory
   # to simplify the link line.
   OVERRIDE_BUILT_MODULE_PATH := $($(LOCAL_2ND_ARCH_VAR_PREFIX)$(my_prefix)OUT_INTERMEDIATE_LIBRARIES)
-  ifeq ($(LOCAL_IS_HOST_MODULE)$(LOCAL_STRIP_MODULE),)
+  ifeq ($(LOCAL_IS_HOST_MODULE)$(my_strip_module),)
     # Strip but not try to add debuglink
-    LOCAL_STRIP_MODULE := no_debuglink
+    my_strip_module := no_debuglink
   endif
 
-  ifeq ($(LOCAL_IS_HOST_MODULE)$(LOCAL_PACK_MODULE_RELOCATIONS),)
+  ifeq ($(LOCAL_IS_HOST_MODULE)$(my_pack_module_relocations),)
     # Do not pack relocations by default
-    LOCAL_PACK_MODULE_RELOCATIONS := false
+    my_pack_module_relocations := false
+  endif
+
+  ifeq ($(DISABLE_RELOCATION_PACKER),true)
+    my_pack_module_relocations := false
   endif
 endif
 
@@ -63,12 +77,18 @@
 endif
 endif
 
+ifeq (JAVA_LIBRARIES,$(LOCAL_IS_HOST_MODULE)$(LOCAL_MODULE_CLASS)$(filter true,$(LOCAL_UNINSTALLABLE_MODULE)))
+  prebuilt_module_is_dex_javalib := true
+else
+  prebuilt_module_is_dex_javalib :=
+endif
+
 ifeq ($(LOCAL_MODULE_CLASS),APPS)
 LOCAL_BUILT_MODULE_STEM := package.apk
 LOCAL_INSTALLED_MODULE_STEM := $(LOCAL_MODULE).apk
 endif
 
-ifneq ($(filter true no_debuglink,$(LOCAL_STRIP_MODULE) $(LOCAL_PACK_MODULE_RELOCATIONS)),)
+ifneq ($(filter true no_debuglink,$(my_strip_module) $(my_pack_module_relocations)),)
   ifdef LOCAL_IS_HOST_MODULE
     $(error Cannot strip/pack host module LOCAL_PATH=$(LOCAL_PATH))
   endif
@@ -78,17 +98,20 @@
   ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
     $(error Cannot strip/pack scripts LOCAL_PATH=$(LOCAL_PATH))
   endif
+  # Set the arch-specific variables to set up the strip/pack rules.
+  LOCAL_STRIP_MODULE_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) := $(my_strip_module)
+  LOCAL_PACK_MODULE_RELOCATIONS_$($(my_prefix)$(LOCAL_2ND_ARCH_VAR_PREFIX)ARCH) := $(my_pack_module_relocations)
   include $(BUILD_SYSTEM)/dynamic_binary.mk
   built_module := $(linked_module)
 
-else  # LOCAL_STRIP_MODULE and LOCAL_PACK_MODULE_RELOCATIONS not true
+else  # my_strip_module and my_pack_module_relocations not true
   include $(BUILD_SYSTEM)/base_rules.mk
   built_module := $(LOCAL_BUILT_MODULE)
 
 ifdef prebuilt_module_is_a_library
 export_includes := $(intermediates)/export_includes
 $(export_includes): PRIVATE_EXPORT_C_INCLUDE_DIRS := $(LOCAL_EXPORT_C_INCLUDE_DIRS)
-$(export_includes) : $(LOCAL_MODULE_MAKEFILE)
+$(export_includes) : $(LOCAL_MODULE_MAKEFILE_DEP)
 	@echo Export includes file: $< -- $@
 	$(hide) mkdir -p $(dir $@) && rm -f $@
 ifdef LOCAL_EXPORT_C_INCLUDE_DIRS
@@ -123,8 +146,8 @@
 endif
 
 # We need to enclose the above export_includes and my_built_shared_libraries in
-# "LOCAL_STRIP_MODULE not true" because otherwise the rules are defined in dynamic_binary.mk.
-endif  # LOCAL_STRIP_MODULE not true
+# "my_strip_module not true" because otherwise the rules are defined in dynamic_binary.mk.
+endif  # my_strip_module not true
 
 ifeq ($(LOCAL_MODULE_CLASS),APPS)
 PACKAGES.$(LOCAL_MODULE).OVERRIDES := $(strip $(LOCAL_OVERRIDES_PACKAGES))
@@ -186,16 +209,23 @@
 endif
 
 # Disable dex-preopt of prebuilts to save space, if requested.
+ifndef LOCAL_DEX_PREOPT
 ifeq ($(DONT_DEXPREOPT_PREBUILTS),true)
 LOCAL_DEX_PREOPT := false
 endif
+endif
 
 #######################################
 # defines built_odex along with rule to install odex
 include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
 #######################################
-# Sign and align non-presigned .apks.
+ifneq ($(LOCAL_REPLACE_PREBUILT_APK_INSTALLED),)
+# There is a replacement for the prebuilt .apk we can install without any processing.
+$(built_module) : $(LOCAL_REPLACE_PREBUILT_APK_INSTALLED) | $(ACP)
+	$(transform-prebuilt-to-target)
 
+else  # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
+# Sign and align non-presigned .apks.
 # The embedded prebuilt jni to uncompress.
 ifeq ($(LOCAL_CERTIFICATE),PRESIGNED)
 # For PRESIGNED apks we must uncompress every .so file:
@@ -209,19 +239,28 @@
 endif
 $(built_module): PRIVATE_EMBEDDED_JNI_LIBS := $(embedded_prebuilt_jni_libs)
 
-$(built_module) : $(my_prebuilt_src_file) | $(ACP) $(ZIPALIGN) $(SIGNAPK_JAR)
+$(built_module) : $(my_prebuilt_src_file) | $(ACP) $(ZIPALIGN) $(SIGNAPK_JAR) $(AAPT)
 	$(transform-prebuilt-to-target)
 	$(uncompress-shared-libs)
+ifdef LOCAL_DEX_PREOPT
+ifneq ($(BUILD_PLATFORM_ZIP),)
+	@# Keep a copy of apk with classes.dex unstripped
+	$(hide) cp -f $@ $(dir $@)package.dex.apk
+endif  # BUILD_PLATFORM_ZIP
+endif  # LOCAL_DEX_PREOPT
 ifneq ($(LOCAL_CERTIFICATE),PRESIGNED)
 	@# Only strip out files if we can re-sign the package.
 ifdef LOCAL_DEX_PREOPT
 ifneq (nostripping,$(LOCAL_DEX_PREOPT))
 	$(call dexpreopt-remove-classes.dex,$@)
-endif
-endif
+endif  # LOCAL_DEX_PREOPT != nostripping
+endif  # LOCAL_DEX_PREOPT
 	$(sign-package)
-endif
+	# No need for align-package because sign-package takes care of alignment
+else  # LOCAL_CERTIFICATE == PRESIGNED
 	$(align-package)
+endif  # LOCAL_CERTIFICATE
+endif  # ! LOCAL_REPLACE_PREBUILT_APK_INSTALLED
 
 ###############################
 ## Rule to build the odex file
@@ -237,7 +276,7 @@
 built_apk_splits := $(addprefix $(built_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
 installed_apk_splits := $(addprefix $(my_module_path)/,$(notdir $(LOCAL_PACKAGE_SPLITS)))
 
-# Rules to sign and zipalign the split apks.
+# Rules to sign the split apks.
 my_src_dir := $(sort $(dir $(LOCAL_PACKAGE_SPLITS)))
 ifneq (1,$(words $(my_src_dir)))
 $(error You must put all the split source apks in the same folder: $(LOCAL_PACKAGE_SPLITS))
@@ -246,10 +285,9 @@
 
 $(built_apk_splits) : PRIVATE_PRIVATE_KEY := $(LOCAL_CERTIFICATE).pk8
 $(built_apk_splits) : PRIVATE_CERTIFICATE := $(LOCAL_CERTIFICATE).x509.pem
-$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk | $(ACP)
+$(built_apk_splits) : $(built_module_path)/%.apk : $(my_src_dir)/%.apk | $(ACP) $(AAPT)
 	$(copy-file-to-new-target)
 	$(sign-package)
-	$(align-package)
 
 # Rules to install the split apks.
 $(installed_apk_splits) : $(my_module_path)/%.apk : $(built_module_path)/%.apk | $(ACP)
@@ -267,57 +305,155 @@
 endif # LOCAL_PACKAGE_SPLITS
 
 else # LOCAL_MODULE_CLASS != APPS
+ifeq ($(prebuilt_module_is_dex_javalib),true)
+# This is a target shared library, i.e. a jar with classes.dex.
+#######################################
+# defines built_odex along with rule to install odex
+include $(BUILD_SYSTEM)/dex_preopt_odex_install.mk
+#######################################
+ifdef LOCAL_DEX_PREOPT
+ifneq ($(dexpreopt_boot_jar_module),) # boot jar
+# boot jar's rules are defined in dex_preopt.mk
+dexpreopted_boot_jar := $(DEXPREOPT_BOOT_JAR_DIR_FULL_PATH)/$(dexpreopt_boot_jar_module)_nodex.jar
+$(built_module) : $(dexpreopted_boot_jar) | $(ACP)
+	$(call copy-file-to-target)
+
+# For libart boot jars, we don't have .odex files.
+else # ! boot jar
+$(built_odex): PRIVATE_MODULE := $(LOCAL_MODULE)
+# Use pattern rule - we may have multiple built odex files.
+$(built_odex) : $(dir $(LOCAL_BUILT_MODULE))% : $(my_prebuilt_src_file)
+	@echo "Dexpreopt Jar: $(PRIVATE_MODULE) ($@)"
+	$(call dexpreopt-one-file,$<,$@)
+
+$(built_module) : $(my_prebuilt_src_file) | $(ACP)
+	$(call copy-file-to-target)
+ifneq (nostripping,$(LOCAL_DEX_PREOPT))
+	$(call dexpreopt-remove-classes.dex,$@)
+endif
+endif # boot jar
+else # ! LOCAL_DEX_PREOPT
+$(built_module) : $(my_prebuilt_src_file) | $(ACP)
+	$(call copy-file-to-target)
+endif # LOCAL_DEX_PREOPT
+
+else  # ! prebuilt_module_is_dex_javalib
 ifneq ($(LOCAL_PREBUILT_STRIP_COMMENTS),)
 $(built_module) : $(my_prebuilt_src_file)
 	$(transform-prebuilt-to-target-strip-comments)
-else
+ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
+	$(hide) chmod +x $@
+endif
+else ifneq ($(LOCAL_ACP_UNAVAILABLE),true)
 $(built_module) : $(my_prebuilt_src_file) | $(ACP)
 	$(transform-prebuilt-to-target)
+ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
+	$(hide) chmod +x $@
 endif
+else
+$(built_module) : $(my_prebuilt_src_file)
+	$(copy-file-to-target-with-cp)
+ifeq ($(LOCAL_MODULE_CLASS),EXECUTABLES)
+	$(hide) chmod +x $@
+endif
+endif
+endif # ! prebuilt_module_is_dex_javalib
 endif # LOCAL_MODULE_CLASS != APPS
 
-ifeq ($(LOCAL_IS_HOST_MODULE)$(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
+ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
+my_src_jar := $(my_prebuilt_src_file)
+ifeq ($(LOCAL_IS_HOST_MODULE),)
 # for target java libraries, the LOCAL_BUILT_MODULE is in a product-specific dir,
 # while the deps should be in the common dir, so we make a copy in the common dir.
-# For nonstatic library, $(common_javalib_jar) is the dependency file,
-# while $(common_classes_jar) is used to link.
 common_classes_jar := $(intermediates.COMMON)/classes.jar
 common_javalib_jar := $(intermediates.COMMON)/javalib.jar
 
 $(common_classes_jar) $(common_javalib_jar): PRIVATE_MODULE := $(LOCAL_MODULE)
 
-ifneq ($(filter %.aar, $(my_prebuilt_src_file)),)
+ifeq ($(prebuilt_module_is_dex_javalib),true)
+# For prebuilt shared Java library we don't have classes.jar.
+$(common_javalib_jar) : $(my_src_jar) | $(ACP)
+	$(transform-prebuilt-to-target)
+
+else  # ! prebuilt_module_is_dex_javalib
+
+my_src_aar := $(filter %.aar, $(my_prebuilt_src_file))
+ifneq ($(my_src_aar),)
 # This is .aar file, archive of classes.jar and Android resources.
 my_src_jar := $(intermediates.COMMON)/aar/classes.jar
 
-$(my_src_jar) : $(my_prebuilt_src_file)
+$(my_src_jar) : $(my_src_aar)
 	$(hide) rm -rf $(dir $@) && mkdir -p $(dir $@)
 	$(hide) unzip -qo -d $(dir $@) $<
 	# Make sure the extracted classes.jar has a new timestamp.
 	$(hide) touch $@
 
-else
-# This is jar file.
-my_src_jar := $(my_prebuilt_src_file)
 endif
+
 $(common_classes_jar) : $(my_src_jar) | $(ACP)
 	$(transform-prebuilt-to-target)
 
 $(common_javalib_jar) : $(common_classes_jar) | $(ACP)
 	$(transform-prebuilt-to-target)
 
+$(call define-jar-to-toc-rule, $(common_classes_jar))
+
+ifdef LOCAL_USE_AAPT2
+ifneq ($(my_src_aar),)
+my_res_package := $(intermediates.COMMON)/package-res.apk
+
+# We needed only very few PRIVATE variables and aapt2.mk input variables. Reset the unnecessary ones.
+$(my_res_package): PRIVATE_AAPT2_CFLAGS :=
+$(my_res_package): PRIVATE_ANDROID_MANIFEST := $(intermediates.COMMON)/aar/AndroidManifest.xml
+$(my_res_package): PRIVATE_AAPT_INCLUDES :=
+$(my_res_package): PRIVATE_SOURCE_INTERMEDIATES_DIR :=
+$(my_res_package): PRIVATE_PROGUARD_OPTIONS_FILE :=
+$(my_res_package): PRIVATE_DEFAULT_APP_TARGET_SDK :=
+$(my_res_package): PRIVATE_DEFAULT_APP_TARGET_SDK :=
+$(my_res_package): PRIVATE_PRODUCT_AAPT_CONFIG :=
+$(my_res_package): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
+$(my_res_package): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
+
+full_android_manifest :=
+my_res_resources :=
+my_overlay_resources :=
+my_compiled_res_base_dir :=
+R_file_stamp :=
+proguard_options_file :=
+my_generated_res_dirs := $(intermediates.COMMON)/aar/res
+my_generated_res_dirs_deps := $(my_src_jar)
+include $(BUILD_SYSTEM)/aapt2.mk
+
+# Make sure my_res_package is created when you run mm/mmm.
+$(built_module) : $(my_res_package)
+endif  # $(my_src_aar)
+endif  # LOCAL_USE_AAPT2
 # make sure the classes.jar and javalib.jar are built before $(LOCAL_BUILT_MODULE)
 $(built_module) : $(common_javalib_jar)
-endif # TARGET JAVA_LIBRARIES
 
-ifeq ($(LOCAL_MODULE_CLASS),JAVA_LIBRARIES)
-$(intermediates.COMMON)/classes.jack : PRIVATE_JILL_FLAGS:=$(LOCAL_JILL_FLAGS)
-$(intermediates.COMMON)/classes.jack : $(my_src_jar) $(LOCAL_MODULE_MAKEFILE) \
-        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JILL_JAR) $(JACK_JAR) $(JACK_LAUNCHER_JAR)
+endif # ! prebuilt_module_is_dex_javalib
+endif # LOCAL_IS_HOST_MODULE is not set
+
+ifneq ($(prebuilt_module_is_dex_javalib),true)
+ifneq ($(LOCAL_JILL_FLAGS),)
+$(error LOCAL_JILL_FLAGS is not supported any more, please use jack options in LOCAL_JACK_FLAGS instead)
+endif
+
+# We may be building classes.jack from a host jar for host dalvik Java library.
+$(intermediates.COMMON)/classes.jack : PRIVATE_JACK_FLAGS:=$(LOCAL_JACK_FLAGS)
+$(intermediates.COMMON)/classes.jack : PRIVATE_JACK_MIN_SDK_VERSION := 1
+$(intermediates.COMMON)/classes.jack : $(my_src_jar) $(LOCAL_MODULE_MAKEFILE_DEP) \
+        $(LOCAL_ADDITIONAL_DEPENDENCIES) $(JACK) | setup-jack-server
 	$(transform-jar-to-jack)
 
+# Update timestamps of .toc files for prebuilts so dependents will be
+# always rebuilt.
+$(intermediates.COMMON)/classes.dex.toc: $(intermediates.COMMON)/classes.jack
+	touch $@
+
+endif # ! prebuilt_module_is_dex_javalib
 endif # JAVA_LIBRARIES
 
-$(built_module) : $(LOCAL_MODULE_MAKEFILE) $(LOCAL_ADDITIONAL_DEPENDENCIES)
+$(built_module) : $(LOCAL_MODULE_MAKEFILE_DEP) $(LOCAL_ADDITIONAL_DEPENDENCIES)
 
 my_prebuilt_src_file :=
diff --git a/core/tasks/product-graph.mk b/core/product-graph.mk
similarity index 98%
rename from core/tasks/product-graph.mk
rename to core/product-graph.mk
index db2cf71..36e9037 100644
--- a/core/tasks/product-graph.mk
+++ b/core/product-graph.mk
@@ -34,7 +34,7 @@
 endef
 
 
-this_makefile := build/core/tasks/product-graph.mk
+this_makefile := build/core/product-graph.mk
 
 products_svg := $(OUT_DIR)/products.svg
 products_pdf := $(OUT_DIR)/products.pdf
diff --git a/core/product.mk b/core/product.mk
index e97cba4..5b6a86c 100644
--- a/core/product.mk
+++ b/core/product.mk
@@ -23,13 +23,21 @@
 # and the .mk suffix) of the product makefile, "<product_name>:" can be
 # omitted.
 
+# Search for AndroidProducts.mks in the given dir.
+# $(1): the path to the dir
+define _search-android-products-files-in-dir
+$(sort $(shell test -d $(1) && find -L $(1) \
+  -maxdepth 6 \
+  -name .git -prune \
+  -o -name AndroidProducts.mk -print))
+endef
+
 #
 # Returns the list of all AndroidProducts.mk files.
 # $(call ) isn't necessary.
 #
 define _find-android-products-files
-$(shell test -d device && find device -maxdepth 6 -name AndroidProducts.mk) \
-  $(shell test -d vendor && find vendor -maxdepth 6 -name AndroidProducts.mk) \
+$(foreach d, device vendor product,$(call _search-android-products-files-in-dir,$(d))) \
   $(SRC_TARGET_DIR)/product/AndroidProducts.mk
 endef
 
@@ -100,6 +108,7 @@
     PRODUCT_SUPPORTS_BOOT_SIGNER \
     PRODUCT_SUPPORTS_VBOOT \
     PRODUCT_SUPPORTS_VERITY \
+    PRODUCT_SUPPORTS_VERITY_FEC \
     PRODUCT_OEM_PROPERTIES \
     PRODUCT_SYSTEM_PROPERTY_BLACKLIST \
     PRODUCT_SYSTEM_SERVER_JARS \
@@ -111,6 +120,9 @@
     PRODUCT_DEX_PREOPT_MODULE_CONFIGS \
     PRODUCT_DEX_PREOPT_DEFAULT_FLAGS \
     PRODUCT_DEX_PREOPT_BOOT_FLAGS \
+    PRODUCT_SYSTEM_BASE_FS_PATH \
+    PRODUCT_VENDOR_BASE_FS_PATH \
+
 
 
 define dump-product
@@ -133,11 +145,14 @@
 #  3. Records that we've visited this node, in ALL_PRODUCTS
 #
 define inherit-product
+  $(if $(findstring ../,$(1)),\
+    $(eval np := $(call normalize-paths,$(1))),\
+    $(eval np := $(strip $(1))))\
   $(foreach v,$(_product_var_list), \
-      $(eval $(v) := $($(v)) $(INHERIT_TAG)$(strip $(1)))) \
+      $(eval $(v) := $($(v)) $(INHERIT_TAG)$(np))) \
   $(eval inherit_var := \
       PRODUCTS.$(strip $(word 1,$(_include_stack))).INHERITS_FROM) \
-  $(eval $(inherit_var) := $(sort $($(inherit_var)) $(strip $(1)))) \
+  $(eval $(inherit_var) := $(sort $($(inherit_var)) $(np))) \
   $(eval inherit_var:=) \
   $(eval ALL_PRODUCTS := $(sort $(ALL_PRODUCTS) $(word 1,$(_include_stack))))
 endef
@@ -238,7 +253,6 @@
 	TARGET_NO_RECOVERY \
 	TARGET_NO_RADIOIMAGE \
 	TARGET_HARDWARE_3D \
-	TARGET_PROVIDES_INIT_RC \
 	TARGET_CPU_ABI \
 	TARGET_CPU_ABI2 \
 
@@ -274,6 +288,7 @@
 _product_stash_var_list += \
 	GLOBAL_CFLAGS_NO_OVERRIDE \
 	GLOBAL_CPPFLAGS_NO_OVERRIDE \
+	GLOBAL_CLANG_CFLAGS_NO_OVERRIDE \
 
 #
 # Stash values of the variables in _product_stash_var_list.
diff --git a/core/product_config.mk b/core/product_config.mk
index 94449c2..c0a1a9d 100644
--- a/core/product_config.mk
+++ b/core/product_config.mk
@@ -149,7 +149,7 @@
 unbundled_goals := $(strip $(filter APP-%,$(MAKECMDGOALS)))
 ifdef unbundled_goals
   ifneq ($(words $(unbundled_goals)),1)
-    $(error Only one APP-* goal may be specified; saw "$(unbundled_goals)"))
+    $(error Only one APP-* goal may be specified; saw "$(unbundled_goals)")
   endif
   TARGET_BUILD_APPS := $(strip $(subst -, ,$(patsubst APP-%,%,$(unbundled_goals))))
   ifneq ($(filter $(DEFAULT_GOAL),$(MAKECMDGOALS)),)
@@ -213,7 +213,19 @@
 current_product_makefile := $(strip $(current_product_makefile))
 all_product_makefiles := $(strip $(all_product_makefiles))
 
-ifneq (,$(filter product-graph dump-products, $(MAKECMDGOALS)))
+load_all_product_makefiles :=
+ifneq (,$(filter product-graph, $(MAKECMDGOALS)))
+ifeq ($(ANDROID_PRODUCT_GRAPH),--all)
+load_all_product_makefiles := true
+endif
+endif
+ifneq (,$(filter dump-products,$(MAKECMDGOALS)))
+ifeq ($(ANDROID_DUMP_PRODUCTS),all)
+load_all_product_makefiles := true
+endif
+endif
+
+ifeq ($(load_all_product_makefiles),true)
 # Import all product makefiles.
 $(call import-products, $(all_product_makefiles))
 else
@@ -287,9 +299,11 @@
 
 # product-scoped aapt flags
 PRODUCT_AAPT_FLAGS :=
+PRODUCT_AAPT2_CFLAGS :=
 ifneq ($(filter en_XA ar_XB,$(PRODUCT_LOCALES)),)
-# Force generating resources for pseudo-locales.
-PRODUCT_AAPT_FLAGS += --pseudo-localize
+  # Force generating resources for pseudo-locales.
+  PRODUCT_AAPT2_CFLAGS += --pseudo-localize
+  PRODUCT_AAPT_FLAGS += --pseudo-localize
 endif
 
 PRODUCT_BRAND := $(strip $(PRODUCTS.$(INTERNAL_PRODUCT).PRODUCT_BRAND))
diff --git a/core/proguard.flags b/core/proguard.flags
index 0641627..6ed1f9b 100644
--- a/core/proguard.flags
+++ b/core/proguard.flags
@@ -10,9 +10,9 @@
 #-flattenpackagehierarchy
 
 # Keep classes and methods that have the guava @VisibleForTesting annotation
--keep @com.google.common.annotations.VisibleForTesting class *
+-keep @**.VisibleForTesting class *
 -keepclassmembers class * {
-@com.google.common.annotations.VisibleForTesting *;
+@**.VisibleForTesting *;
 }
 
 -include proguard_basic_keeps.flags
diff --git a/core/proguard.jacoco.flags b/core/proguard.jacoco.flags
new file mode 100644
index 0000000..c3bed94
--- /dev/null
+++ b/core/proguard.jacoco.flags
@@ -0,0 +1,8 @@
+# Keep everything for the emma classes
+-keep class com.vladium.** {
+  *;
+}
+# Keep everything for the jacoco classes
+-keep class org.jacoco.** {
+  *;
+}
diff --git a/core/sdk_font.mk b/core/sdk_font.mk
index 204403d..c10f19f 100644
--- a/core/sdk_font.mk
+++ b/core/sdk_font.mk
@@ -18,7 +18,7 @@
 SDK_FONT_TEMP := $(call intermediates-dir-for,PACKAGING,sdk-fonts,HOST,COMMON)
 
 # The font configuration files - system_fonts.xml, fallback_fonts.xml etc.
-sdk_font_config := $(wildcard frameworks/base/data/fonts/*.xml)
+sdk_font_config := $(sort $(wildcard frameworks/base/data/fonts/*.xml))
 sdk_font_config :=  $(addprefix $(SDK_FONT_TEMP)/, $(notdir $(sdk_font_config)))
 
 $(sdk_font_config): $(SDK_FONT_TEMP)/%.xml: \
diff --git a/core/shared_library.mk b/core/shared_library.mk
index 71a3bb2..2f48341 100644
--- a/core/shared_library.mk
+++ b/core/shared_library.mk
@@ -9,13 +9,13 @@
 ifeq ($(my_module_multilib),both)
 ifneq ($(LOCAL_MODULE_PATH),)
 ifneq ($(TARGET_2ND_ARCH),)
-$(warning $(LOCAL_MODULE): LOCAL_MODULE_PATH for shared libraries is unsupported in multiarch builds, use LOCAL_MODULE_RELATIVE_PATH instead)
+$(error $(LOCAL_MODULE): LOCAL_MODULE_PATH for shared libraries is unsupported in multiarch builds, use LOCAL_MODULE_RELATIVE_PATH instead)
 endif
 endif
 
 ifneq ($(LOCAL_UNSTRIPPED_PATH),)
 ifneq ($(TARGET_2ND_ARCH),)
-$(warning $(LOCAL_MODULE): LOCAL_UNSTRIPPED_PATH for shared libraries is unsupported in multiarch builds)
+$(error $(LOCAL_MODULE): LOCAL_UNSTRIPPED_PATH for shared libraries is unsupported in multiarch builds)
 endif
 endif
 endif # my_module_multilib == both
@@ -38,9 +38,6 @@
 OVERRIDE_BUILT_MODULE_PATH :=
 LOCAL_BUILT_MODULE :=
 LOCAL_INSTALLED_MODULE :=
-LOCAL_MODULE_STEM :=
-LOCAL_BUILT_MODULE_STEM :=
-LOCAL_INSTALLED_MODULE_STEM :=
 LOCAL_INTERMEDIATE_TARGETS :=
 
 include $(BUILD_SYSTEM)/shared_library_internal.mk
diff --git a/core/shared_library_internal.mk b/core/shared_library_internal.mk
index e21cf5d..b9a5e3e 100644
--- a/core/shared_library_internal.mk
+++ b/core/shared_library_internal.mk
@@ -40,16 +40,19 @@
 
 # Define PRIVATE_ variables from global vars
 my_target_global_ld_dirs := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_GLOBAL_LD_DIRS)
-my_target_libprofile_rt := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBPROFILE_RT)
-my_target_libgcov := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCOV)
 ifeq ($(LOCAL_NO_LIBGCC),true)
 my_target_libgcc :=
 else
 my_target_libgcc := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBGCC)
 endif
 my_target_libatomic := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_LIBATOMIC)
+ifeq ($(LOCAL_NO_CRT),true)
+my_target_crtbegin_so_o :=
+my_target_crtend_so_o :=
+else
 my_target_crtbegin_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_CRTBEGIN_SO_O)
 my_target_crtend_so_o := $($(LOCAL_2ND_ARCH_VAR_PREFIX)TARGET_CRTEND_SO_O)
+endif
 ifdef LOCAL_SDK_VERSION
 # Make sure the prebuilt NDK paths are put ahead of the TARGET_GLOBAL_LD_DIRS,
 # so we don't have race condition when the system libraries (such as libc, libstdc++) are also built in the tree.
@@ -63,8 +66,6 @@
 endif
 $(linked_module): PRIVATE_TARGET_GLOBAL_LD_DIRS := $(my_target_global_ld_dirs)
 $(linked_module): PRIVATE_TARGET_GLOBAL_LDFLAGS := $(my_target_global_ldflags)
-$(linked_module): PRIVATE_TARGET_LIBPROFILE_RT := $(my_target_libprofile_rt)
-$(linked_module): PRIVATE_TARGET_LIBGCOV := $(my_target_libgcov)
 $(linked_module): PRIVATE_TARGET_LIBGCC := $(my_target_libgcc)
 $(linked_module): PRIVATE_TARGET_LIBATOMIC := $(my_target_libatomic)
 $(linked_module): PRIVATE_TARGET_CRTBEGIN_SO_O := $(my_target_crtbegin_so_o)
@@ -75,7 +76,7 @@
         $(all_libraries) \
         $(my_target_crtbegin_so_o) \
         $(my_target_crtend_so_o) \
-        $(LOCAL_MODULE_MAKEFILE) \
+        $(LOCAL_MODULE_MAKEFILE_DEP) \
         $(LOCAL_ADDITIONAL_DEPENDENCIES)
 	$(transform-o-to-shared-lib)
 
diff --git a/core/soong.mk b/core/soong.mk
new file mode 100644
index 0000000..2463953
--- /dev/null
+++ b/core/soong.mk
@@ -0,0 +1,80 @@
+SOONG_OUT_DIR := $(OUT_DIR)/soong
+SOONG_HOST_EXECUTABLES := $(SOONG_OUT_DIR)/host/$(HOST_PREBUILT_TAG)/bin
+KATI := $(SOONG_HOST_EXECUTABLES)/ckati
+MAKEPARALLEL := $(SOONG_HOST_EXECUTABLES)/makeparallel
+
+SOONG := $(SOONG_OUT_DIR)/soong
+SOONG_BOOTSTRAP := $(SOONG_OUT_DIR)/.soong.bootstrap
+SOONG_BUILD_NINJA := $(SOONG_OUT_DIR)/build.ninja
+SOONG_ANDROID_MK := $(SOONG_OUT_DIR)/Android.mk
+SOONG_IN_MAKE := $(SOONG_OUT_DIR)/.soong.in_make
+SOONG_VARIABLES := $(SOONG_OUT_DIR)/soong.variables
+
+# We need to rebootstrap soong if SOONG_OUT_DIR or the reverse path from
+# SOONG_OUT_DIR to TOP changes
+SOONG_NEEDS_REBOOTSTRAP :=
+ifneq ($(wildcard $(SOONG_BOOTSTRAP)),)
+  ifneq ($(SOONG_OUT_DIR),$(strip $(shell source $(SOONG_BOOTSTRAP); echo $$BUILDDIR)))
+    SOONG_NEEDS_REBOOTSTRAP := FORCE
+    $(warning soong_out_dir changed)
+  endif
+  ifneq ($(strip $(shell build/soong/reverse_path.py $(SOONG_OUT_DIR))),$(strip $(shell source $(SOONG_BOOTSTRAP); echo $$SRCDIR_FROM_BUILDDIR)))
+    SOONG_NEEDS_REBOOTSTRAP := FORCE
+    $(warning reverse path changed)
+  endif
+endif
+
+# Bootstrap soong.
+$(SOONG_BOOTSTRAP): bootstrap.bash $(SOONG_NEEDS_REBOOTSTRAP)
+	$(hide) mkdir -p $(dir $@)
+	$(hide) BUILDDIR=$(SOONG_OUT_DIR) ./bootstrap.bash
+
+# Create soong.variables with copies of makefile settings.  Runs every build,
+# but only updates soong.variables if it changes
+SOONG_VARIABLES_TMP := $(SOONG_VARIABLES).$$$$
+$(SOONG_VARIABLES): FORCE
+	$(hide) mkdir -p $(dir $@)
+	$(hide) (\
+	echo '{'; \
+	echo '    "Device_uses_jemalloc": $(if $(filter true,$(MALLOC_SVELTE)),false,true),'; \
+	echo '    "Device_uses_dlmalloc": $(if $(filter true,$(MALLOC_SVELTE)),true,false),'; \
+	echo '    "Platform_sdk_version": $(PLATFORM_SDK_VERSION),'; \
+	echo '    "Unbundled_build": $(if $(TARGET_BUILD_APPS),true,false),'; \
+	echo '    "Brillo": $(if $(BRILLO),true,false),'; \
+	echo ''; \
+	echo '    "DeviceName": "$(TARGET_DEVICE)",'; \
+	echo '    "DeviceArch": "$(TARGET_ARCH)",'; \
+	echo '    "DeviceArchVariant": "$(TARGET_ARCH_VARIANT)",'; \
+	echo '    "DeviceCpuVariant": "$(TARGET_CPU_VARIANT)",'; \
+	echo '    "DeviceAbi": ["$(TARGET_CPU_ABI)", "$(TARGET_CPU_ABI2)"],'; \
+	echo '    "DeviceUsesClang": $(if $(USE_CLANG_PLATFORM_BUILD),$(USE_CLANG_PLATFORM_BUILD),false),'; \
+	echo ''; \
+	echo '    "DeviceSecondaryArch": "$(TARGET_2ND_ARCH)",'; \
+	echo '    "DeviceSecondaryArchVariant": "$(TARGET_2ND_ARCH_VARIANT)",'; \
+	echo '    "DeviceSecondaryCpuVariant": "$(TARGET_2ND_CPU_VARIANT)",'; \
+	echo '    "DeviceSecondaryAbi": ["$(TARGET_2ND_CPU_ABI)", "$(TARGET_2ND_CPU_ABI2)"],'; \
+	echo ''; \
+	echo '    "HostArch": "$(HOST_ARCH)",'; \
+	echo '    "HostSecondaryArch": "$(HOST_2ND_ARCH)",'; \
+	echo ''; \
+	echo '    "CrossHost": "$(HOST_CROSS_OS)",'; \
+	echo '    "CrossHostArch": "$(HOST_CROSS_ARCH)",'; \
+	echo '    "CrossHostSecondaryArch": "$(HOST_CROSS_2ND_ARCH)"'; \
+	echo '}') > $(SOONG_VARIABLES_TMP); \
+	if ! cmp -s $(SOONG_VARIABLES_TMP) $(SOONG_VARIABLES); then \
+	  mv $(SOONG_VARIABLES_TMP) $(SOONG_VARIABLES); \
+	else \
+	  rm $(SOONG_VARIABLES_TMP); \
+	fi
+
+# Tell soong that it is embedded in make
+$(SOONG_IN_MAKE):
+	$(hide) mkdir -p $(dir $@)
+	$(hide) touch $@
+
+# Build an Android.mk listing all soong outputs as prebuilts
+$(SOONG_ANDROID_MK): $(SOONG_BOOTSTRAP) $(SOONG_VARIABLES) $(SOONG_IN_MAKE) FORCE
+	$(hide) $(SOONG) $(KATI) $(MAKEPARALLEL) $(NINJA_ARGS)
+
+$(KATI): $(SOONG_ANDROID_MK)
+$(MAKEPARALLEL): $(SOONG_ANDROID_MK)
diff --git a/core/static_java_library.mk b/core/static_java_library.mk
index 9b7b46a..1279878 100644
--- a/core/static_java_library.mk
+++ b/core/static_java_library.mk
@@ -23,6 +23,14 @@
 LOCAL_IS_STATIC_JAVA_LIBRARY := true
 LOCAL_MODULE_CLASS := JAVA_LIBRARIES
 
+#################################
+include $(BUILD_SYSTEM)/configure_local_jack.mk
+#################################
+
+intermediates.COMMON := $(call local-intermediates-dir,COMMON)
+
+my_res_package :=
+
 # Hack to build static Java library with Android resource
 # See bug 5714516
 all_resources :=
@@ -30,6 +38,14 @@
 # A static Java library needs to explicily set LOCAL_RESOURCE_DIR.
 ifdef LOCAL_RESOURCE_DIR
 need_compile_res := true
+endif
+ifdef LOCAL_USE_AAPT2
+ifneq ($(LOCAL_STATIC_ANDROID_LIBRARIES),)
+need_compile_res := true
+endif
+endif
+
+ifeq ($(need_compile_res),true)
 all_resources := $(strip \
     $(foreach dir, $(LOCAL_RESOURCE_DIR), \
       $(addprefix $(dir)/, \
@@ -52,17 +68,12 @@
 
 proguard_options_file :=
 
-intermediates.COMMON := $(call local-intermediates-dir,COMMON)
 ifneq ($(LOCAL_PROGUARD_ENABLED),custom)
   proguard_options_file := $(intermediates.COMMON)/proguard_options
 endif
 
 LOCAL_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_PROGUARD_FLAGS)
 
-#################################
-include $(BUILD_SYSTEM)/configure_local_jack.mk
-#################################
-
 ifdef LOCAL_JACK_ENABLED
 ifndef LOCAL_JACK_PROGUARD_FLAGS
     LOCAL_JACK_PROGUARD_FLAGS := $(LOCAL_PROGUARD_FLAGS)
@@ -70,6 +81,18 @@
 LOCAL_JACK_PROGUARD_FLAGS := $(addprefix -include ,$(proguard_options_file)) $(LOCAL_JACK_PROGUARD_FLAGS)
 endif # LOCAL_JACK_ENABLED
 
+R_file_stamp := $(intermediates.COMMON)/src/R.stamp
+LOCAL_INTERMEDIATE_TARGETS += $(R_file_stamp)
+
+ifdef LOCAL_USE_AAPT2
+# For library we treat all the resource equal with no overlay.
+my_res_resources := $(all_resources)
+my_overlay_resources :=
+# For libraries put everything in the COMMON intermediate directory.
+my_res_package := $(intermediates.COMMON)/package-res.apk
+
+LOCAL_INTERMEDIATE_TARGETS += $(my_res_package)
+endif  # LOCAL_USE_AAPT2
 endif  # LOCAL_RESOURCE_DIR
 
 all_res_assets := $(all_resources)
@@ -77,8 +100,6 @@
 include $(BUILD_SYSTEM)/java_library.mk
 
 ifeq (true,$(need_compile_res))
-R_file_stamp := $(LOCAL_INTERMEDIATE_SOURCE_DIR)/R.stamp
-
 include $(BUILD_SYSTEM)/android_manifest.mk
 
 LOCAL_SDK_RES_VERSION:=$(strip $(LOCAL_SDK_RES_VERSION))
@@ -90,7 +111,7 @@
 framework_res_package_export_deps :=
 # Please refer to package.mk
 ifneq ($(LOCAL_NO_STANDARD_LIBRARIES),true)
-ifneq ($(filter-out current system_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current,$(LOCAL_SDK_RES_VERSION))),)
+ifneq ($(filter-out current system_current test_current,$(LOCAL_SDK_RES_VERSION))$(if $(TARGET_BUILD_APPS),$(filter current system_current test_current,$(LOCAL_SDK_RES_VERSION))),)
 framework_res_package_export := \
     $(HISTORICAL_SDK_VERSIONS_ROOT)/$(LOCAL_SDK_RES_VERSION)/android.jar
 framework_res_package_export_deps := $(framework_res_package_export)
@@ -102,34 +123,53 @@
 endif
 endif
 
-$(R_file_stamp): PRIVATE_MODULE := $(LOCAL_MODULE)
 # add --non-constant-id to prevent inlining constants.
 # AAR needs text symbol file R.txt.
-$(R_file_stamp): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --non-constant-id --output-text-symbols $(LOCAL_INTERMEDIATE_SOURCE_DIR)
-$(R_file_stamp): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(LOCAL_INTERMEDIATE_SOURCE_DIR)
-$(R_file_stamp): PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
-$(R_file_stamp): PRIVATE_RESOURCE_PUBLICS_OUTPUT := $(intermediates.COMMON)/public_resources.xml
-$(R_file_stamp): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
-$(R_file_stamp): PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
-ifneq (,$(filter-out current system_current, $(LOCAL_SDK_VERSION)))
-$(R_file_stamp): PRIVATE_DEFAULT_APP_TARGET_SDK := $(LOCAL_SDK_VERSION)
+ifdef LOCAL_USE_AAPT2
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --static-lib --no-static-lib-packages
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PRODUCT_AAPT_CONFIG :=
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PRODUCT_AAPT_PREF_CONFIG :=
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_TARGET_AAPT_CHARACTERISTICS :=
 else
-$(R_file_stamp): PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_FLAGS := $(LOCAL_AAPT_FLAGS) --non-constant-id --output-text-symbols $(LOCAL_INTERMEDIATE_SOURCE_DIR)
 endif
-$(R_file_stamp): PRIVATE_ASSET_DIR :=
-$(R_file_stamp): PRIVATE_PROGUARD_OPTIONS_FILE := $(proguard_options_file)
-$(R_file_stamp): PRIVATE_MANIFEST_PACKAGE_NAME :=
-$(R_file_stamp): PRIVATE_MANIFEST_INSTRUMENTATION_FOR :=
 
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_SOURCE_INTERMEDIATES_DIR := $(LOCAL_INTERMEDIATE_SOURCE_DIR)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RESOURCE_PUBLICS_OUTPUT := $(intermediates.COMMON)/public_resources.xml
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_AAPT_INCLUDES := $(framework_res_package_export)
+
+ifneq (,$(filter-out current system_current test_current, $(LOCAL_SDK_VERSION)))
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(LOCAL_SDK_VERSION)
+else
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_DEFAULT_APP_TARGET_SDK := $(DEFAULT_APP_TARGET_SDK)
+endif
+
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_ASSET_DIR :=
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_PROGUARD_OPTIONS_FILE := $(proguard_options_file)
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_PACKAGE_NAME :=
+$(LOCAL_INTERMEDIATE_TARGETS): PRIVATE_MANIFEST_INSTRUMENTATION_FOR :=
+
+ifdef LOCAL_USE_AAPT2
+# One more level with name res so we can zip up the flat resources that can be linked by apps.
+my_compiled_res_base_dir := $(intermediates.COMMON)/flat-res/res
+my_generated_res_dirs := $(rs_generated_res_dir)
+my_generated_res_dirs_deps := $(RenderScript_file_stamp)
+include $(BUILD_SYSTEM)/aapt2.mk
+$(my_res_package) : $(framework_res_package_export_deps)
+else
 $(R_file_stamp) : $(all_resources) $(full_android_manifest) $(AAPT) $(framework_res_package_export_deps)
 	@echo "target R.java/Manifest.java: $(PRIVATE_MODULE) ($@)"
 	$(create-resource-java-files)
 	$(hide) find $(PRIVATE_SOURCE_INTERMEDIATES_DIR) -name R.java | xargs cat > $@
+endif  # LOCAL_USE_AAPT2
 
 $(LOCAL_BUILT_MODULE): $(R_file_stamp)
 ifdef LOCAL_JACK_ENABLED
 $(noshrob_classes_jack): $(R_file_stamp)
 $(full_classes_jack): $(R_file_stamp)
+$(jack_check_timestamp): $(R_file_stamp)
 endif # LOCAL_JACK_ENABLED
 $(full_classes_compiled_jar): $(R_file_stamp)
 
@@ -137,10 +177,10 @@
 built_aar := $(intermediates.COMMON)/javalib.aar
 $(built_aar): PRIVATE_MODULE := $(LOCAL_MODULE)
 $(built_aar): PRIVATE_ANDROID_MANIFEST := $(full_android_manifest)
-$(built_aar): PRIVATE_CLASSES_JAR := $(LOCAL_BUILT_MODULE)
+$(built_aar): PRIVATE_CLASSES_JAR := $(full_classes_jar)
 $(built_aar): PRIVATE_RESOURCE_DIR := $(LOCAL_RESOURCE_DIR)
 $(built_aar): PRIVATE_R_TXT := $(LOCAL_INTERMEDIATE_SOURCE_DIR)/R.txt
-$(built_aar) : $(LOCAL_BUILT_MODULE)
+$(built_aar) : $(full_classes_jar) $(full_android_manifest)
 	@echo "target AAR:  $(PRIVATE_MODULE) ($@)"
 	$(hide) rm -rf $(dir $@)aar && mkdir -p $(dir $@)aar/res
 	$(hide) cp $(PRIVATE_ANDROID_MANIFEST) $(dir $@)aar/AndroidManifest.xml
@@ -153,7 +193,6 @@
 
 # Register the aar file.
 ALL_MODULES.$(LOCAL_MODULE).AAR := $(built_aar)
-
 endif  # need_compile_res
 
 # Reset internal variables.
diff --git a/core/static_library.mk b/core/static_library.mk
index 854cbfc..a8ae399 100644
--- a/core/static_library.mk
+++ b/core/static_library.mk
@@ -23,9 +23,6 @@
 OVERRIDE_BUILT_MODULE_PATH :=
 LOCAL_BUILT_MODULE :=
 LOCAL_INSTALLED_MODULE :=
-LOCAL_MODULE_STEM :=
-LOCAL_BUILT_MODULE_STEM :=
-LOCAL_INSTALLED_MODULE_STEM :=
 LOCAL_INTERMEDIATE_TARGETS :=
 
 include $(BUILD_SYSTEM)/static_library_internal.mk
diff --git a/core/target_test_internal.mk b/core/target_test_internal.mk
index f8a9f04..4715fe8 100644
--- a/core/target_test_internal.mk
+++ b/core/target_test_internal.mk
@@ -7,9 +7,9 @@
 LOCAL_C_INCLUDES += external/gtest/include
 
 ifndef LOCAL_SDK_VERSION
-LOCAL_STATIC_LIBRARIES += libgtest libgtest_main
+LOCAL_STATIC_LIBRARIES += libgtest_main libgtest
 else
-LOCAL_STATIC_LIBRARIES += libgtest_ndk libgtest_main_ndk
+LOCAL_STATIC_LIBRARIES += libgtest_main_ndk libgtest_ndk
 endif
 
 ifdef LOCAL_MODULE_PATH
diff --git a/core/tasks/apicheck.mk b/core/tasks/apicheck.mk
index 683a075..3975d20 100644
--- a/core/tasks/apicheck.mk
+++ b/core/tasks/apicheck.mk
@@ -129,4 +129,37 @@
 	@echo Copying system-removed.txt
 	$(hide) $(ACP) $(INTERNAL_PLATFORM_SYSTEM_REMOVED_API_FILE) frameworks/base/api/system-removed.txt
 
+#####################Check Test API#####################
+.PHONY: check-test-api
+checkapi : check-test-api
+
+# Check that the Test API we're building hasn't changed from the not-yet-released
+# SDK version. Note that we don't check that we haven't broken the previous
+# SDK's API because the test API is meant only for CTS which is always
+# associated with the current release.
+$(eval $(call check-api, \
+    checktestapi-current, \
+    frameworks/base/api/test-current.txt, \
+    $(INTERNAL_PLATFORM_TEST_API_FILE), \
+    frameworks/base/api/test-removed.txt, \
+    $(INTERNAL_PLATFORM_TEST_REMOVED_API_FILE), \
+    -error 2 -error 3 -error 4 -error 5 -error 6 \
+    -error 7 -error 8 -error 9 -error 10 -error 11 -error 12 -error 13 -error 14 -error 15 \
+    -error 16 -error 17 -error 18 -error 19 -error 20 -error 21 -error 23 -error 24 \
+    -error 25 -error 26 -error 27, \
+    cat $(BUILD_SYSTEM)/apicheck_msg_current.txt, \
+    check-test-api, \
+    $(call doc-timestamp-for,test-api-stubs) \
+    ))
+
+.PHONY: update-test-api
+update-api : update-test-api
+
+update-test-api: $(INTERNAL_PLATFORM_TEST_API_FILE) | $(ACP)
+	@echo Copying test-current.txt
+	$(hide) $(ACP) $(INTERNAL_PLATFORM_TEST_API_FILE) frameworks/base/api/test-current.txt
+	@echo Copying test-removed.txt
+	$(hide) $(ACP) $(INTERNAL_PLATFORM_TEST_REMOVED_API_FILE) frameworks/base/api/test-removed.txt
+
+
 endif
diff --git a/core/tasks/check_boot_jars/package_whitelist.txt b/core/tasks/check_boot_jars/package_whitelist.txt
index 4d62615..3cb6e28 100644
--- a/core/tasks/check_boot_jars/package_whitelist.txt
+++ b/core/tasks/check_boot_jars/package_whitelist.txt
@@ -2,7 +2,7 @@
 # Each line is interpreted as a regular expression.
 
 ###################################################
-# core-libart.jar
+# core-libart.jar & core-oj.jar
 java\.awt\.font
 java\.beans
 java\.io
@@ -13,6 +13,9 @@
 java\.math
 java\.net
 java\.nio
+java\.nio\.file
+java\.nio\.file\.spi
+java\.nio\.file\.attribute
 java\.nio\.channels
 java\.nio\.channels\.spi
 java\.nio\.charset
@@ -24,14 +27,18 @@
 java\.security\.spec
 java\.sql
 java\.text
+java\.text\.spi
 java\.util
 java\.util\.concurrent
 java\.util\.concurrent\.atomic
 java\.util\.concurrent\.locks
+java\.util\.function
 java\.util\.jar
 java\.util\.logging
 java\.util\.prefs
 java\.util\.regex
+java\.util\.spi
+java\.util\.stream
 java\.util\.zip
 javax\.crypto
 javax\.crypto\.interfaces
@@ -54,10 +61,18 @@
 javax\.xml\.transform\.stream
 javax\.xml\.validation
 javax\.xml\.xpath
-sun\.misc
 org\.w3c\.dom
 org\.w3c\.dom\.ls
 org\.w3c\.dom\.traversal
+# OpenJdk internal implementation.
+sun\.misc
+sun\.util.*
+sun\.text.*
+sun\.security.*
+sun\.reflect.*
+sun\.nio.*
+sun\.net.*
+com\.sun\..*
 
 # TODO: Move these internal org.apache.harmony classes to libcore.*
 org\.apache\.harmony\.crypto\.internal
diff --git a/core/tasks/cts.mk b/core/tasks/cts.mk
index 56a7f6f..fdd9591 100644
--- a/core/tasks/cts.mk
+++ b/core/tasks/cts.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2008 The Android Open Source Project
+# Copyright (C) 2015 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,384 +12,16 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-cts_dir := $(HOST_OUT)/cts
-cts_tools_src_dir := cts/tools
+test_suite_name := cts
+test_suite_tradefed := cts-tradefed
+test_suite_dynamic_config := cts/tools/cts-tradefed/DynamicConfig.xml
+test_suite_readme := cts/tools/cts-tradefed/README
 
-cts_name := android-cts
-
-JUNIT_HOST_JAR := $(HOST_OUT_JAVA_LIBRARIES)/junit.jar
-HOSTTESTLIB_JAR := $(HOST_OUT_JAVA_LIBRARIES)/hosttestlib.jar
-TF_JAR := $(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar
-CTS_TF_JAR := $(HOST_OUT_JAVA_LIBRARIES)/cts-tradefed.jar
-CTS_TF_EXEC_PATH ?= $(HOST_OUT_EXECUTABLES)/cts-tradefed
-CTS_TF_README_PATH := $(cts_tools_src_dir)/tradefed-host/README
-
-VMTESTSTF_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,vm-tests-tf,HOST)
-VMTESTSTF_JAR := $(VMTESTSTF_INTERMEDIATES)/android.core.vm-tests-tf.jar
-
-# The list of test packages that core-tests (libcore/Android.mk)
-# is split into.
-CTS_CORE_CASE_LIST := \
-	android.core.tests.libcore.package.dalvik \
-	android.core.tests.libcore.package.com \
-	android.core.tests.libcore.package.conscrypt \
-	android.core.tests.libcore.package.sun \
-	android.core.tests.libcore.package.tests \
-	android.core.tests.libcore.package.org \
-	android.core.tests.libcore.package.libcore \
-	android.core.tests.libcore.package.jsr166 \
-	android.core.tests.libcore.package.harmony_annotation \
-	android.core.tests.libcore.package.harmony_java_io \
-	android.core.tests.libcore.package.harmony_java_lang \
-	android.core.tests.libcore.package.harmony_java_math \
-	android.core.tests.libcore.package.harmony_java_net \
-	android.core.tests.libcore.package.harmony_java_nio \
-	android.core.tests.libcore.package.harmony_java_text \
-	android.core.tests.libcore.package.harmony_java_util \
-	android.core.tests.libcore.package.harmony_javax_security \
-	android.core.tests.libcore.package.okhttp \
-	android.core.tests.runner
-
-# Additional CTS packages for code under libcore
-CTS_CORE_CASE_LIST += \
-	android.core.tests.libcore.package.tzdata
-
-# The list of test packages that apache-harmony-tests (external/apache-harmony/Android.mk)
-# is split into.
-CTS_CORE_CASE_LIST += \
-	android.core.tests.libcore.package.harmony_beans \
-	android.core.tests.libcore.package.harmony_logging \
-	android.core.tests.libcore.package.harmony_prefs \
-	android.core.tests.libcore.package.harmony_sql
-
-
-CTS_TEST_JAR_LIST := \
-	cts-junit \
-	CtsJdwp
-
-# Depend on the full package paths rather than the phony targets to avoid
-# rebuilding the packages every time.
-CTS_CORE_CASES := $(foreach pkg,$(CTS_CORE_CASE_LIST),$(call intermediates-dir-for,APPS,$(pkg))/package.apk)
-CTS_TEST_JAR_FILES := $(foreach c,$(CTS_TEST_JAR_LIST),$(call intermediates-dir-for,JAVA_LIBRARIES,$(c))/javalib.jar)
-
--include cts/CtsTestCaseList.mk
-
-# A module may have mutliple installed files (e.g. split apks)
-CTS_CASE_LIST_APKS :=
-$(foreach m, $(CTS_TEST_CASE_LIST),\
-  $(foreach fp, $(ALL_MODULES.$(m).BUILT_INSTALLED),\
-    $(eval pair := $(subst :,$(space),$(fp)))\
-    $(eval CTS_CASE_LIST_APKS += $(CTS_TESTCASES_OUT)/$(notdir $(word 2,$(pair))))))\
-$(foreach m, $(CTS_CORE_CASE_LIST),\
-  $(foreach fp, $(ALL_MODULES.$(m).BUILT_INSTALLED),\
-    $(eval pair := $(subst :,$(space),$(fp)))\
-    $(eval built := $(word 1,$(pair)))\
-    $(eval installed := $(CTS_TESTCASES_OUT)/$(notdir $(word 2,$(pair))))\
-    $(eval $(call copy-one-file, $(built), $(installed)))\
-    $(eval CTS_CASE_LIST_APKS += $(installed))))
-
-CTS_CASE_LIST_JARS :=
-$(foreach m, $(CTS_TEST_JAR_LIST),\
-  $(eval CTS_CASE_LIST_JARS += $(CTS_TESTCASES_OUT)/$(m).jar))
-
-CTS_SHARED_LIBS :=
-
-DEFAULT_TEST_PLAN := $(cts_dir)/$(cts_name)/resource/plans
-$(cts_dir)/all_cts_files_stamp: $(CTS_CORE_CASES) $(CTS_TEST_JAR_FILES) $(CTS_TEST_CASES) $(CTS_CASE_LIST_APKS) $(CTS_CASE_LIST_JARS) $(JUNIT_HOST_JAR) $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(VMTESTSTF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(ADDITIONAL_TF_JARS) $(ACP) $(CTS_SHARED_LIBS)
-
-# Make necessary directory for CTS
-	$(hide) mkdir -p $(TMP_DIR)
-	$(hide) mkdir -p $(PRIVATE_DIR)/docs
-	$(hide) mkdir -p $(PRIVATE_DIR)/tools
-	$(hide) mkdir -p $(PRIVATE_DIR)/repository/testcases
-	$(hide) mkdir -p $(PRIVATE_DIR)/repository/plans
-# Copy executable and JARs to CTS directory
-	$(hide) $(ACP) -fp $(VMTESTSTF_JAR) $(CTS_TESTCASES_OUT)
-	$(hide) $(ACP) -fp $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(ADDITIONAL_TF_JARS) $(CTS_TF_README_PATH) $(PRIVATE_DIR)/tools
-	$(hide) $(call copy-files-with-structure, $(CTS_SHARED_LIBS),$(HOST_OUT)/,$(PRIVATE_DIR))
-	$(hide) touch $@
-
-# Generate the test descriptions for the core-tests
-# Parameters:
-# $1 : The output file where the description should be written (without the '.xml' extension)
-# $2 : The AndroidManifest.xml corresponding to the test package
-# $3 : The jar file name on PRIVATE_CLASSPATH containing junit tests to search for
-# $4 : The package prefix of classes to include, possible empty
-# $5 : The architecture of the current build
-# $6 : The directory containing vogar expectations files
-# $7 : The Android.mk corresponding to the test package (required for host-side tests only)
-define generate-core-test-description
-@echo "Generate core-test description ("$(notdir $(1))")"
-$(hide) java -Xmx256M \
-	-Xbootclasspath/a:$(PRIVATE_CLASSPATH):$(JUNIT_HOST_JAR) \
-	-classpath $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar:$(HOST_JDK_TOOLS_JAR) \
-	$(PRIVATE_PARAMS) CollectAllTests $(1) $(2) $(3) "$(4)" $(5) $(6) $(7)
-endef
-
-CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)
-CONSCRYPT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt,,COMMON)
-BOUNCYCASTLE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,bouncycastle,,COMMON)
-APACHEXML_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-xml,,COMMON)
-OKHTTP_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp-nojarjar,,COMMON)
-OKHTTPTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp-tests-nojarjar,,COMMON)
-OKHTTP_REPACKAGED_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp,,COMMON)
-APACHEHARMONYTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-harmony-tests,,COMMON)
-SQLITEJDBC_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,sqlite-jdbc,,COMMON)
-JUNIT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-junit,,COMMON)
-CORETESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-tests,,COMMON)
-JSR166TESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,jsr166-tests,,COMMON)
-CONSCRYPTTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt-tests,,COMMON)
-TZDATAUPDATETESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,tzdata_update-tests,,COMMON)
-
-GEN_CLASSPATH := \
-    $(CORE_INTERMEDIATES)/classes.jar:$(CONSCRYPT_INTERMEDIATES)/classes.jar:$(BOUNCYCASTLE_INTERMEDIATES)/classes.jar:$(APACHEXML_INTERMEDIATES)/classes.jar:$(APACHEHARMONYTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_INTERMEDIATES)/classes.jar:$(OKHTTPTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_REPACKAGED_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(SQLITEJDBC_INTERMEDIATES)/javalib.jar:$(CORETESTS_INTERMEDIATES)/javalib.jar:$(JSR166TESTS_INTERMEDIATES)/javalib.jar:$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar:$(TZDATAUPDATETESTS_INTERMEDIATES)/javalib.jar
-
-CTS_CORE_XMLS := \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.com.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.conscrypt.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.sun.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tests.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.org.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.libcore.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.jsr166.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_annotation.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_io.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_lang.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_math.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_net.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_nio.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_text.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_util.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_javax_security.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp.xml \
-	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tzdata.xml \
-
-$(CTS_CORE_XMLS): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
-# Why does this depend on javalib.jar instead of classes.jar?  Because
-# even though the tool will operate on the classes.jar files, the
-# build system requires that dependencies use javalib.jar.  If
-# javalib.jar is up-to-date, then classes.jar is as well.  Depending
-# on classes.jar will build the files incorrectly.
-CTS_CORE_XMLS_DEPS := $(CTS_CORE_CASES) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(JUNIT_HOST_JAR) $(CORE_INTERMEDIATES)/javalib.jar $(BOUNCYCASTLE_INTERMEDIATES)/javalib.jar $(APACHEXML_INTERMEDIATES)/javalib.jar $(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar $(OKHTTP_INTERMEDIATES)/javalib.jar $(OKHTTPTESTS_INTERMEDIATES)/javalib.jar $(OKHTTP_REPACKAGED_INTERMEDIATES)/javalib.jar $(SQLITEJDBC_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(CORETESTS_INTERMEDIATES)/javalib.jar $(JSR166TESTS_INTERMEDIATES)/javalib.jar $(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar $(TZDATAUPDATETESTS_INTERMEDIATES)/javalib.jar build/core/tasks/cts.mk | $(ACP)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik,\
-		cts/tests/core/libcore/dalvik/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,dalvik,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.com.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.com,\
-		cts/tests/core/libcore/com/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,com,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.conscrypt.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.conscrypt,\
-		cts/tests/core/libcore/conscrypt/AndroidManifest.xml,\
-		$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar,,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.sun.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.sun,\
-		cts/tests/core/libcore/sun/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,sun,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tests.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tests,\
-		cts/tests/core/libcore/tests/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,tests,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.org.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.org,\
-		cts/tests/core/libcore/org/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,\
-		org.w3c.domts:\
-		org.apache.harmony.security.tests:\
-		org.apache.harmony.nio.tests:\
-		org.apache.harmony.crypto.tests:\
-		org.apache.harmony.regex.tests:\
-		org.apache.harmony.luni.tests:\
-		org.apache.harmony.tests.internal.net.www.protocol:\
-		org.apache.harmony.tests.javax.net:\
-		org.json,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.libcore.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.libcore,\
-		cts/tests/core/libcore/libcore/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,libcore,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.jsr166.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.jsr166,\
-		cts/tests/core/libcore/jsr166/AndroidManifest.xml,\
-		$(JSR166TESTS_INTERMEDIATES)/javalib.jar,jsr166,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_annotation.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_annotation,\
-		cts/tests/core/libcore/harmony_annotation/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.annotation.tests,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_io.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_io,\
-		cts/tests/core/libcore/harmony_java_io/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.io,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_lang.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_lang,\
-		cts/tests/core/libcore/harmony_java_lang/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.lang,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_math.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_math,\
-		cts/tests/core/libcore/harmony_java_math/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.math,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_net.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_net,\
-		cts/tests/core/libcore/harmony_java_net/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.net,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_nio.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_nio,\
-		cts/tests/core/libcore/harmony_java_nio/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.nio,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_text.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_text,\
-		cts/tests/core/libcore/harmony_java_text/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.text,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_util.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_util,\
-		cts/tests/core/libcore/harmony_java_util/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.util,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_javax_security.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_javax_security,\
-		cts/tests/core/libcore/harmony_javax_security/AndroidManifest.xml,\
-		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.javax.security,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans,\
-		cts/tests/core/libcore/harmony_beans/AndroidManifest.xml,\
-		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.beans,\
-		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging,\
-		cts/tests/core/libcore/harmony_logging/AndroidManifest.xml,\
-		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.logging,\
-		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs,\
-		cts/tests/core/libcore/harmony_prefs/AndroidManifest.xml,\
-		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.prefs,\
-		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql,\
-		cts/tests/core/libcore/harmony_sql/AndroidManifest.xml,\
-		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.sql,\
-		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp,\
-		cts/tests/core/libcore/okhttp/AndroidManifest.xml,\
-		$(OKHTTPTESTS_INTERMEDIATES)/javalib.jar,,\
-		$(TARGET_ARCH),libcore/expectations)
-
-$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tzdata.xml: $(CTS_CORE_XMLS_DEPS)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tzdata,\
-		cts/tests/core/libcore/tzdata/AndroidManifest.xml,\
-		$(TZDATAUPDATETESTS_INTERMEDIATES)/javalib.jar,,\
-		$(TARGET_ARCH),libcore/expectations)
-
-# ----- Generate the test descriptions for the vm-tests-tf -----
-#
-CORE_VM_TEST_TF_DESC := $(CTS_TESTCASES_OUT)/android.core.vm-tests-tf.xml
-
-# core tests only needed to get hold of junit-framework-classes
-CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)
-JUNIT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-junit,,COMMON)
-
-GEN_CLASSPATH := $(CORE_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(VMTESTSTF_JAR):$(TF_JAR)
-
-$(CORE_VM_TEST_TF_DESC): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
-# Please see big comment above on why this line depends on javalib.jar instead of classes.jar
-$(CORE_VM_TEST_TF_DESC): $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(JUNIT_HOST_JAR) $(CORE_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(VMTESTSTF_JAR) | $(ACP)
-	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
-	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.vm-tests-tf,\
-		cts/tests/vm-tests-tf/AndroidManifest.xml,\
-		$(VMTESTSTF_JAR),"",\
-		$(TARGET_ARCH),\
-		libcore/expectations,\
-		cts/tools/vm-tests-tf/Android.mk)
-
-# Generate the default test plan for User.
-# Usage: buildCts.py <testRoot> <ctsOutputDir> <tempDir> <androidRootDir> <docletPath>
-
-$(DEFAULT_TEST_PLAN): $(cts_dir)/all_cts_files_stamp $(cts_tools_src_dir)/utils/buildCts.py $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(CTS_CORE_XMLS) $(CTS_TEST_XMLS) $(CORE_VM_TEST_TF_DESC)
-	$(hide) $(cts_tools_src_dir)/utils/buildCts.py cts/tests/tests/ $(PRIVATE_DIR) $(TMP_DIR) \
-		$(TOP) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar
-	$(hide) mkdir -p $(dir $@) && touch $@
-
-# Package CTS and clean up.
-#
-# TODO:
-#   Pack cts.bat into the same zip file as well. See http://buganizer/issue?id=1656821 for more details
-INTERNAL_CTS_TARGET := $(cts_dir)/$(cts_name).zip
-$(INTERNAL_CTS_TARGET): PRIVATE_NAME := $(cts_name)
-$(INTERNAL_CTS_TARGET): PRIVATE_CTS_DIR := $(cts_dir)
-$(INTERNAL_CTS_TARGET): PRIVATE_DIR := $(cts_dir)/$(cts_name)
-$(INTERNAL_CTS_TARGET): TMP_DIR := $(cts_dir)/temp
-$(INTERNAL_CTS_TARGET): $(cts_dir)/all_cts_files_stamp $(DEFAULT_TEST_PLAN)
-	$(hide) echo "Package CTS: $@"
-	$(hide) cd $(dir $@) && zip -rq $(notdir $@) $(PRIVATE_NAME)
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
 
 .PHONY: cts
-cts: $(INTERNAL_CTS_TARGET) adb
-$(call dist-for-goals,cts,$(INTERNAL_CTS_TARGET))
+cts: $(compatibility_zip)
+$(call dist-for-goals, cts, $(compatibility_zip))
 
+.PHONY: cts_v2
+cts_v2: cts
diff --git a/core/tasks/module-info.mk b/core/tasks/module-info.mk
new file mode 100644
index 0000000..e9b2ac7
--- /dev/null
+++ b/core/tasks/module-info.mk
@@ -0,0 +1,24 @@
+# Print a list of the modules that could be built
+
+MODULE_INFO_JSON := $(PRODUCT_OUT)/module-info.json
+
+$(MODULE_INFO_JSON):
+	@echo Generating $@
+	$(hide) echo -ne '{\n ' > $@
+	$(hide) echo -ne $(foreach m, $(sort $(ALL_MODULES)), \
+		' "$(m)": {' \
+			'"class": [$(foreach w,$(sort $(ALL_MODULES.$(m).CLASS)),"$(w)", )], ' \
+			'"path": [$(foreach w,$(sort $(ALL_MODULES.$(m).PATH)),"$(w)", )], ' \
+			'"tags": [$(foreach w,$(sort $(ALL_MODULES.$(m).TAGS)),"$(w)", )], ' \
+			'"installed": [$(foreach w,$(sort $(ALL_MODULES.$(m).INSTALLED)),"$(w)", )], ' \
+			'},\n' \
+	 ) | sed -e 's/, *\]/]/g' -e 's/, *\}/ }/g' -e '$$s/,$$//' >> $@
+	$(hide) echo '}' >> $@
+
+
+# If ONE_SHOT_MAKEFILE is set, our view of the world is smaller, so don't
+# rewrite the file in that came.
+ifndef ONE_SHOT_MAKEFILE
+files: $(MODULE_INFO_JSON)
+endif
+
diff --git a/core/tasks/old-cts.mk b/core/tasks/old-cts.mk
new file mode 100644
index 0000000..7024638
--- /dev/null
+++ b/core/tasks/old-cts.mk
@@ -0,0 +1,399 @@
+# Copyright (C) 2008 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+cts_dir := $(HOST_OUT)/old-cts
+cts_tools_src_dir := cts/tools
+
+cts_name := old-android-cts
+
+JUNIT_HOST_JAR := $(HOST_OUT_JAVA_LIBRARIES)/junit.jar
+HOSTTESTLIB_JAR := $(HOST_OUT_JAVA_LIBRARIES)/hosttestlib.jar
+TF_JAR := $(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar
+CTS_TF_JAR := $(HOST_OUT_JAVA_LIBRARIES)/old-cts-tradefed.jar
+CTS_TF_EXEC_PATH ?= $(HOST_OUT_EXECUTABLES)/old-cts-tradefed
+CTS_TF_README_PATH := $(cts_tools_src_dir)/tradefed-host/README
+
+VMTESTSTF_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,vm-tests-tf,HOST)
+VMTESTSTF_JAR := $(VMTESTSTF_INTERMEDIATES)/android.core.vm-tests-tf.jar
+
+# The list of test packages that core-tests (libcore/Android.mk)
+# is split into.
+CTS_CORE_CASE_LIST := \
+	android.core.tests.libcore.package.dalvik \
+	android.core.tests.libcore.package.com \
+	android.core.tests.libcore.package.conscrypt \
+	android.core.tests.libcore.package.sun \
+	android.core.tests.libcore.package.tests \
+	android.core.tests.libcore.package.org \
+	android.core.tests.libcore.package.libcore \
+	android.core.tests.libcore.package.jsr166 \
+	android.core.tests.libcore.package.harmony_annotation \
+	android.core.tests.libcore.package.harmony_java_io \
+	android.core.tests.libcore.package.harmony_java_lang \
+	android.core.tests.libcore.package.harmony_java_math \
+	android.core.tests.libcore.package.harmony_java_net \
+	android.core.tests.libcore.package.harmony_java_nio \
+	android.core.tests.libcore.package.harmony_java_text \
+	android.core.tests.libcore.package.harmony_java_util \
+	android.core.tests.libcore.package.harmony_javax_security \
+	android.core.tests.libcore.package.okhttp \
+	android.core.tests.runner
+
+# Additional CTS packages for code under libcore
+CTS_CORE_CASE_LIST += \
+	android.core.tests.libcore.package.tzdata
+
+# The list of test packages that apache-harmony-tests (external/apache-harmony/Android.mk)
+# is split into.
+CTS_CORE_CASE_LIST += \
+	android.core.tests.libcore.package.harmony_beans \
+	android.core.tests.libcore.package.harmony_logging \
+	android.core.tests.libcore.package.harmony_prefs \
+	android.core.tests.libcore.package.harmony_sql
+
+
+CTS_TEST_JAR_LIST := \
+	cts-junit \
+	CtsJdwp \
+	cts-testng \
+	CtsLibcoreOj
+
+# Depend on the full package paths rather than the phony targets to avoid
+# rebuilding the packages every time.
+CTS_CORE_CASES := $(foreach pkg,$(CTS_CORE_CASE_LIST),$(call intermediates-dir-for,APPS,$(pkg))/package.apk)
+CTS_TEST_JAR_FILES := $(foreach c,$(CTS_TEST_JAR_LIST),$(call intermediates-dir-for,JAVA_LIBRARIES,$(c))/javalib.jar)
+
+-include cts/OldCtsTestCaseList.mk
+
+# A module may have mutliple installed files (e.g. split apks)
+CTS_CASE_LIST_APKS :=
+$(foreach m, $(CTS_TEST_CASE_LIST),\
+  $(foreach fp, $(ALL_MODULES.$(m).BUILT_INSTALLED),\
+    $(eval pair := $(subst :,$(space),$(fp)))\
+    $(eval CTS_CASE_LIST_APKS += $(CTS_TESTCASES_OUT)/$(notdir $(word 2,$(pair))))))\
+$(foreach m, $(CTS_CORE_CASE_LIST),\
+  $(foreach fp, $(ALL_MODULES.$(m).BUILT_INSTALLED),\
+    $(eval pair := $(subst :,$(space),$(fp)))\
+    $(eval built := $(word 1,$(pair)))\
+    $(eval installed := $(CTS_TESTCASES_OUT)/$(notdir $(word 2,$(pair))))\
+    $(eval $(call copy-one-file, $(built), $(installed)))\
+    $(eval CTS_CASE_LIST_APKS += $(installed))))
+
+CTS_CASE_LIST_JARS :=
+$(foreach m, $(CTS_TEST_JAR_LIST),\
+  $(eval CTS_CASE_LIST_JARS += $(CTS_TESTCASES_OUT)/$(m).jar))
+
+CTS_SHARED_LIBS :=
+
+DEFAULT_TEST_PLAN := $(cts_dir)/$(cts_name)/resource/plans
+$(cts_dir)/all_cts_files_stamp: $(CTS_CORE_CASES) $(CTS_TEST_JAR_FILES) $(CTS_TEST_CASES) $(CTS_CASE_LIST_APKS) $(CTS_CASE_LIST_JARS) $(JUNIT_HOST_JAR) $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(VMTESTSTF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(CTS_TF_README_PATH) $(ADDITIONAL_TF_JARS) $(ACP) $(CTS_SHARED_LIBS)
+
+# Make necessary directory for CTS
+	$(hide) mkdir -p $(TMP_DIR)
+	$(hide) mkdir -p $(PRIVATE_DIR)/docs
+	$(hide) mkdir -p $(PRIVATE_DIR)/tools
+	$(hide) mkdir -p $(PRIVATE_DIR)/repository/testcases
+	$(hide) mkdir -p $(PRIVATE_DIR)/repository/plans
+# Copy executable and JARs to CTS directory
+	$(hide) $(ACP) -fp $(VMTESTSTF_JAR) $(CTS_TESTCASES_OUT)
+	$(hide) $(ACP) -fp $(HOSTTESTLIB_JAR) $(CTS_HOST_LIBRARY_JARS) $(TF_JAR) $(CTS_TF_JAR) $(CTS_TF_EXEC_PATH) $(ADDITIONAL_TF_JARS) $(CTS_TF_README_PATH) $(PRIVATE_DIR)/tools
+	$(hide) $(call copy-files-with-structure, $(CTS_SHARED_LIBS),$(HOST_OUT)/,$(PRIVATE_DIR))
+	$(hide) touch $@
+
+# Generate the test descriptions for the core-tests
+# Parameters:
+# $1 : The output file where the description should be written (without the '.xml' extension)
+# $2 : The AndroidManifest.xml corresponding to the test package
+# $3 : The jar file name on PRIVATE_CLASSPATH containing junit tests to search for
+# $4 : The package prefix of classes to include, possible empty
+# $5 : The architecture of the current build
+# $6 : The directory containing vogar expectations files
+# $7 : The Android.mk corresponding to the test package (required for host-side tests only)
+define generate-core-test-description
+@echo "Generate core-test description ("$(notdir $(1))")"
+$(hide) java -Xmx256M \
+	-Xbootclasspath/a:$(PRIVATE_CLASSPATH):$(JUNIT_HOST_JAR) \
+	-classpath $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar:$(HOST_JDK_TOOLS_JAR) \
+	$(PRIVATE_PARAMS) CollectAllTests $(1) $(2) $(3) "$(4)" $(5) $(6) $(7)
+endef
+
+OJ_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-oj,,COMMON)
+CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)
+CONSCRYPT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt,,COMMON)
+BOUNCYCASTLE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,bouncycastle,,COMMON)
+APACHEXML_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-xml,,COMMON)
+OKHTTP_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp-nojarjar,,COMMON)
+OKHTTPTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp-tests-nojarjar,,COMMON)
+OKHTTP_REPACKAGED_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,okhttp,,COMMON)
+APACHEHARMONYTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,apache-harmony-tests,,COMMON)
+SQLITEJDBC_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,sqlite-jdbc,,COMMON)
+JUNIT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-junit,,COMMON)
+CORETESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-tests,,COMMON)
+JSR166TESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,jsr166-tests,,COMMON)
+CONSCRYPTTESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,conscrypt-tests,,COMMON)
+TZDATAUPDATETESTS_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,tzdata_update-tests,,COMMON)
+
+GEN_CLASSPATH := \
+    $(OJ_INTERMEDIATES)/classes.jar:$(CORE_INTERMEDIATES)/classes.jar:$(CONSCRYPT_INTERMEDIATES)/classes.jar:$(BOUNCYCASTLE_INTERMEDIATES)/classes.jar:$(APACHEXML_INTERMEDIATES)/classes.jar:$(APACHEHARMONYTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_INTERMEDIATES)/classes.jar:$(OKHTTPTESTS_INTERMEDIATES)/classes.jar:$(OKHTTP_REPACKAGED_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(SQLITEJDBC_INTERMEDIATES)/javalib.jar:$(CORETESTS_INTERMEDIATES)/javalib.jar:$(JSR166TESTS_INTERMEDIATES)/javalib.jar:$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar:$(TZDATAUPDATETESTS_INTERMEDIATES)/javalib.jar
+
+CTS_CORE_XMLS := \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.com.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.conscrypt.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.sun.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tests.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.org.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.libcore.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.jsr166.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_annotation.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_io.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_lang.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_math.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_net.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_nio.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_text.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_util.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_javax_security.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp.xml \
+	$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tzdata.xml \
+
+$(CTS_CORE_XMLS): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
+# Why does this depend on javalib.jar instead of classes.jar?  Because
+# even though the tool will operate on the classes.jar files, the
+# build system requires that dependencies use javalib.jar.  If
+# javalib.jar is up-to-date, then classes.jar is as well.  Depending
+# on classes.jar will build the files incorrectly.
+CTS_CORE_XMLS_DEPS := $(CTS_CORE_CASES) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(JUNIT_HOST_JAR) $(CORE_INTERMEDIATES)/javalib.jar $(BOUNCYCASTLE_INTERMEDIATES)/javalib.jar $(APACHEXML_INTERMEDIATES)/javalib.jar $(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar $(OKHTTP_INTERMEDIATES)/javalib.jar $(OKHTTPTESTS_INTERMEDIATES)/javalib.jar $(OKHTTP_REPACKAGED_INTERMEDIATES)/javalib.jar $(SQLITEJDBC_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(CORETESTS_INTERMEDIATES)/javalib.jar $(JSR166TESTS_INTERMEDIATES)/javalib.jar $(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar $(TZDATAUPDATETESTS_INTERMEDIATES)/javalib.jar build/core/tasks/cts.mk | $(ACP)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.dalvik,\
+		cts/tests/core/libcore/dalvik/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,dalvik,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.com.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.com,\
+		cts/tests/core/libcore/com/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,com,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.conscrypt.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.conscrypt,\
+		cts/tests/core/libcore/conscrypt/AndroidManifest.xml,\
+		$(CONSCRYPTTESTS_INTERMEDIATES)/javalib.jar,,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.sun.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.sun,\
+		cts/tests/core/libcore/sun/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,sun,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tests.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tests,\
+		cts/tests/core/libcore/tests/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,tests,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.org.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.org,\
+		cts/tests/core/libcore/org/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,\
+		org.w3c.domts:\
+		org.apache.harmony.security.tests:\
+		org.apache.harmony.nio.tests:\
+		org.apache.harmony.crypto.tests:\
+		org.apache.harmony.regex.tests:\
+		org.apache.harmony.luni.tests:\
+		org.apache.harmony.tests.internal.net.www.protocol:\
+		org.apache.harmony.tests.javax.net:\
+		org.apache.harmony.tests.javax.xml:\
+		org.json,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.libcore.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.libcore,\
+		cts/tests/core/libcore/libcore/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,libcore,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.jsr166.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.jsr166,\
+		cts/tests/core/libcore/jsr166/AndroidManifest.xml,\
+		$(JSR166TESTS_INTERMEDIATES)/javalib.jar,jsr166,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_annotation.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_annotation,\
+		cts/tests/core/libcore/harmony_annotation/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.annotation.tests,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_io.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_io,\
+		cts/tests/core/libcore/harmony_java_io/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.io,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_lang.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_lang,\
+		cts/tests/core/libcore/harmony_java_lang/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.lang,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_math.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_math,\
+		cts/tests/core/libcore/harmony_java_math/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.math,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_net.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_net,\
+		cts/tests/core/libcore/harmony_java_net/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.net,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_nio.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_nio,\
+		cts/tests/core/libcore/harmony_java_nio/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.nio,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_text.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_text,\
+		cts/tests/core/libcore/harmony_java_text/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.text,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_util.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_java_util,\
+		cts/tests/core/libcore/harmony_java_util/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.java.util,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_javax_security.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_javax_security,\
+		cts/tests/core/libcore/harmony_javax_security/AndroidManifest.xml,\
+		$(CORETESTS_INTERMEDIATES)/javalib.jar,org.apache.harmony.tests.javax.security,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_beans,\
+		cts/tests/core/libcore/harmony_beans/AndroidManifest.xml,\
+		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.beans,\
+		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_logging,\
+		cts/tests/core/libcore/harmony_logging/AndroidManifest.xml,\
+		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.logging,\
+		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_prefs,\
+		cts/tests/core/libcore/harmony_prefs/AndroidManifest.xml,\
+		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.prefs,\
+		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.harmony_sql,\
+		cts/tests/core/libcore/harmony_sql/AndroidManifest.xml,\
+		$(APACHEHARMONYTESTS_INTERMEDIATES)/javalib.jar,com.android.org.apache.harmony.sql,\
+		$(TARGET_ARCH),libcore/expectations external/apache-harmony/Android.mk)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.okhttp,\
+		cts/tests/core/libcore/okhttp/AndroidManifest.xml,\
+		$(OKHTTPTESTS_INTERMEDIATES)/javalib.jar,,\
+		$(TARGET_ARCH),libcore/expectations)
+
+$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tzdata.xml: $(CTS_CORE_XMLS_DEPS)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.tests.libcore.package.tzdata,\
+		cts/tests/core/libcore/tzdata/AndroidManifest.xml,\
+		$(TZDATAUPDATETESTS_INTERMEDIATES)/javalib.jar,,\
+		$(TARGET_ARCH),libcore/expectations)
+
+# ----- Generate the test descriptions for the vm-tests-tf -----
+#
+CORE_VM_TEST_TF_DESC := $(CTS_TESTCASES_OUT)/android.core.vm-tests-tf.xml
+
+# core tests only needed to get hold of junit-framework-classes
+OJ_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-oj,,COMMON)
+CORE_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-libart,,COMMON)
+JUNIT_INTERMEDIATES :=$(call intermediates-dir-for,JAVA_LIBRARIES,core-junit,,COMMON)
+
+GEN_CLASSPATH := $(OJ_INTERMEDIATES)/classes.jar:$(CORE_INTERMEDIATES)/classes.jar:$(JUNIT_INTERMEDIATES)/classes.jar:$(VMTESTSTF_JAR):$(TF_JAR)
+
+$(CORE_VM_TEST_TF_DESC): PRIVATE_CLASSPATH:=$(GEN_CLASSPATH)
+# Please see big comment above on why this line depends on javalib.jar instead of classes.jar
+$(CORE_VM_TEST_TF_DESC): $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(JUNIT_HOST_JAR) $(CORE_INTERMEDIATES)/javalib.jar $(JUNIT_INTERMEDIATES)/javalib.jar $(VMTESTSTF_JAR) | $(ACP)
+	$(hide) mkdir -p $(CTS_TESTCASES_OUT)
+	$(call generate-core-test-description,$(CTS_TESTCASES_OUT)/android.core.vm-tests-tf,\
+		cts/tests/vm-tests-tf/AndroidManifest.xml,\
+		$(VMTESTSTF_JAR),"",\
+		$(TARGET_ARCH),\
+		libcore/expectations,\
+		cts/tools/vm-tests-tf/Android.mk)
+
+# Generate the default test plan for User.
+# Usage: buildCts.py <testRoot> <ctsOutputDir> <tempDir> <androidRootDir> <docletPath>
+
+$(DEFAULT_TEST_PLAN): $(cts_dir)/all_cts_files_stamp $(cts_tools_src_dir)/utils/buildCts.py $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar $(CTS_CORE_XMLS) $(CTS_TEST_XMLS) $(CORE_VM_TEST_TF_DESC)
+	$(hide) $(cts_tools_src_dir)/utils/buildCts.py cts/tests/tests/ $(PRIVATE_DIR) $(TMP_DIR) \
+		$(TOP) $(HOST_OUT_JAVA_LIBRARIES)/descGen.jar
+	$(hide) mkdir -p $(dir $@) && touch $@
+
+# Package CTS and clean up.
+#
+# TODO:
+#   Pack cts.bat into the same zip file as well. See http://buganizer/issue?id=1656821 for more details
+INTERNAL_CTS_TARGET := $(cts_dir)/$(cts_name).zip
+$(INTERNAL_CTS_TARGET): PRIVATE_NAME := $(cts_name)
+$(INTERNAL_CTS_TARGET): PRIVATE_CTS_DIR := $(cts_dir)
+$(INTERNAL_CTS_TARGET): PRIVATE_DIR := $(cts_dir)/$(cts_name)
+$(INTERNAL_CTS_TARGET): TMP_DIR := $(cts_dir)/temp
+$(INTERNAL_CTS_TARGET): $(cts_dir)/all_cts_files_stamp $(DEFAULT_TEST_PLAN)
+	$(hide) echo "Package CTS: $@"
+	$(hide) cd $(dir $@) && zip -rqX $(notdir $@) $(PRIVATE_NAME)
+
+.PHONY: old-cts
+old-cts: $(INTERNAL_CTS_TARGET) adb
+$(call dist-for-goals,old-cts,$(INTERNAL_CTS_TARGET))
diff --git a/core/tasks/sdk-addon.mk b/core/tasks/sdk-addon.mk
index 5ac9b7d..362b229 100644
--- a/core/tasks/sdk-addon.mk
+++ b/core/tasks/sdk-addon.mk
@@ -111,13 +111,13 @@
 	    $(ACP) -r $$d $(PRIVATE_STAGING_DIR)/docs ;\
 	  done
 	$(hide) mkdir -p $(dir $@)
-	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_STAGING_DIR)/.. && zip -rq $$F $(notdir $(PRIVATE_STAGING_DIR)) )
+	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_STAGING_DIR)/.. && zip -rqX $$F $(notdir $(PRIVATE_STAGING_DIR)) )
 
 $(full_target_img): PRIVATE_STAGING_DIR := $(call append-path,$(staging),$(addon_dir_img))/images/$(TARGET_CPU_ABI)
 $(full_target_img): $(full_target) $(addon_img_source_prop)
 	@echo Packaging SDK Addon System-Image: $@
 	$(hide) mkdir -p $(dir $@)
-	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_STAGING_DIR)/.. && zip -rq $$F $(notdir $(PRIVATE_STAGING_DIR)) )
+	$(hide) ( F=$$(pwd)/$@ ; cd $(PRIVATE_STAGING_DIR)/.. && zip -rqX $$F $(notdir $(PRIVATE_STAGING_DIR)) )
 
 
 .PHONY: sdk_addon
diff --git a/core/tasks/tools/compatibility.mk b/core/tasks/tools/compatibility.mk
new file mode 100644
index 0000000..d8f900e
--- /dev/null
+++ b/core/tasks/tools/compatibility.mk
@@ -0,0 +1,52 @@
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Package up a compatibility test suite in a zip file.
+#
+# Input variables:
+#   test_suite_name: the name of this test suite eg. cts
+#   test_suite_tradefed: the name of this test suite's tradefed wrapper
+#   test_suite_dynamic_config: the path to this test suite's dynamic configuration file
+#   test_suite_readme: the path to a README file for this test suite
+# Output variables:
+#   compatibility_zip: the path to the output zip file.
+
+out_dir := $(HOST_OUT)/$(test_suite_name)/android-$(test_suite_name)
+test_artifacts := $(COMPATIBILITY.$(test_suite_name).FILES)
+test_tools := $(HOST_OUT_JAVA_LIBRARIES)/hosttestlib.jar \
+  $(HOST_OUT_JAVA_LIBRARIES)/tradefed-prebuilt.jar \
+  $(HOST_OUT_JAVA_LIBRARIES)/compatibility-host-util.jar \
+  $(HOST_OUT_JAVA_LIBRARIES)/$(test_suite_tradefed).jar \
+  $(HOST_OUT_EXECUTABLES)/$(test_suite_tradefed) \
+  $(test_suite_readme)
+
+compatibility_zip := $(out_dir).zip
+$(compatibility_zip): PRIVATE_NAME := android-$(test_suite_name)
+$(compatibility_zip): PRIVATE_OUT_DIR := $(out_dir)
+$(compatibility_zip): PRIVATE_TOOLS := $(test_tools)
+$(compatibility_zip): PRIVATE_SUITE_NAME := $(test_suite_name)
+$(compatibility_zip): PRIVATE_DYNAMIC_CONFIG := $(test_suite_dynamic_config)
+$(compatibility_zip): $(test_artifacts) $(test_tools) $(test_suite_dynamic_config) | $(ADB) $(ACP)
+# Make dir structure
+	$(hide) mkdir -p $(PRIVATE_OUT_DIR)/tools $(PRIVATE_OUT_DIR)/testcases
+# Copy tools
+	$(hide) $(ACP) -fp $(PRIVATE_TOOLS) $(PRIVATE_OUT_DIR)/tools
+	$(if $(PRIVATE_DYNAMIC_CONFIG),$(hide) $(ACP) -fp $(PRIVATE_DYNAMIC_CONFIG) $(PRIVATE_OUT_DIR)/testcases/$(PRIVATE_SUITE_NAME).dynamic)
+	$(hide) cd $(dir $@) && zip -rq $(notdir $@) $(PRIVATE_NAME)
+
+# Reset all input variables
+test_suite_name :=
+test_suite_tradefed :=
+test_suite_dynamic_config :=
+test_suite_readme :=
diff --git a/core/tasks/tools/package-modules.mk b/core/tasks/tools/package-modules.mk
index a70e644..24a7608 100644
--- a/core/tasks/tools/package-modules.mk
+++ b/core/tasks/tools/package-modules.mk
@@ -59,4 +59,4 @@
 	$(call copy-tests-in-batch,$(wordlist 1201,9999,$(PRIVATE_COPY_PAIRS)))
 	$(hide) $(foreach f, $(PRIVATE_PICKUP_FILES),\
 	  cp -RfL $(f) $(dir $@);)
-	$(hide) cd $(dir $@) && zip -rq $(notdir $@) *
+	$(hide) cd $(dir $@) && zip -rqX $(notdir $@) *
diff --git a/tools/check_prereq/Android.mk b/core/tasks/vts.mk
similarity index 62%
copy from tools/check_prereq/Android.mk
copy to core/tasks/vts.mk
index 4329aff..507f22e 100644
--- a/tools/check_prereq/Android.mk
+++ b/core/tasks/vts.mk
@@ -1,4 +1,4 @@
-# Copyright (C) 2009 The Android Open Source Project
+# Copyright (C) 2016 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -12,14 +12,12 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 
-LOCAL_PATH := $(call my-dir)
-include $(CLEAR_VARS)
+test_suite_name := vts
+test_suite_tradefed := vts-tradefed
+test_suite_readme := test/vts/README.md
 
-LOCAL_SRC_FILES := check_prereq.c
-LOCAL_MODULE := check_prereq
-LOCAL_FORCE_STATIC_EXECUTABLE := true
-LOCAL_MODULE_TAGS := eng
-LOCAL_C_INCLUDES +=
-LOCAL_STATIC_LIBRARIES += libcutils libc
+include $(BUILD_SYSTEM)/tasks/tools/compatibility.mk
 
-include $(BUILD_EXECUTABLE)
+.PHONY: vts
+vts: $(compatibility_zip)
+$(call dist-for-goals, vts, $(compatibility_zip))
diff --git a/core/version_defaults.mk b/core/version_defaults.mk
index a67a82e..363a0d4 100644
--- a/core/version_defaults.mk
+++ b/core/version_defaults.mk
@@ -24,6 +24,7 @@
 #     DEFAULT_APP_TARGET_SDK
 #     BUILD_ID
 #     BUILD_NUMBER
+#     BUILD_DATETIME
 #     SECURITY_PATCH
 #
 
@@ -42,7 +43,7 @@
   # which is the version that we reveal to the end user.
   # Update this value when the platform version changes (rather
   # than overriding it somewhere else).  Can be an arbitrary string.
-  PLATFORM_VERSION := 6.0.1
+  PLATFORM_VERSION := N
 endif
 
 ifeq "" "$(PLATFORM_SDK_VERSION)"
@@ -57,10 +58,19 @@
   PLATFORM_SDK_VERSION := 23
 endif
 
+ifeq "" "$(PLATFORM_JACK_MIN_SDK_VERSION)"
+  # This is definition of the min SDK version given to Jack for the current
+  # platform. For released version it should be the same as
+  # PLATFORM_SDK_VERSION. During development, this number may be incremented
+  # before PLATFORM_SDK_VERSION if the plateform starts to add new java
+  # language supports.
+  PLATFORM_JACK_MIN_SDK_VERSION := 24
+endif
+
 ifeq "" "$(PLATFORM_VERSION_CODENAME)"
   # This is the current development code-name, if the build is not a final
   # release build.  If this is a final release build, it is simply "REL".
-  PLATFORM_VERSION_CODENAME := REL
+  PLATFORM_VERSION_CODENAME := N
 
   # This is all of the development codenames that are active.  Should be either
   # the same as PLATFORM_VERSION_CODENAME or a comma-separated list of additional
@@ -81,7 +91,7 @@
     # assuming the device can only support APIs as of the previous official
     # public release.
     # This value will always be 0 for release builds.
-    PLATFORM_PREVIEW_SDK_VERSION := 0
+    PLATFORM_PREVIEW_SDK_VERSION := 4
   endif
 endif
 
@@ -100,10 +110,10 @@
 
 ifeq "" "$(PLATFORM_SECURITY_PATCH)"
   # Used to indicate the security patch that has been applied to the device.
-  # Can be an arbitrary string, but must be a single word.
+  # Must be of the form "YYYY-MM-DD" on production devices.
   #
   # If there is no $PLATFORM_SECURITY_PATCH set, keep it empty.
-  PLATFORM_SECURITY_PATCH := 2015-12-01
+  PLATFORM_SECURITY_PATCH := 2016-06-01
 endif
 
 ifeq "" "$(PLATFORM_BASE_OS)"
@@ -123,6 +133,18 @@
   BUILD_ID := UNKNOWN
 endif
 
+ifeq "" "$(BUILD_DATETIME)"
+  # Used to reproduce builds by setting the same time. Must be the number
+  # of seconds since the Epoch.
+  BUILD_DATETIME := $(shell date +%s)
+endif
+
+ifneq (,$(findstring Darwin,$(shell uname -sm)))
+DATE := date -r $(BUILD_DATETIME)
+else
+DATE := date -d @$(BUILD_DATETIME)
+endif
+
 ifeq "" "$(BUILD_NUMBER)"
   # BUILD_NUMBER should be set to the source control value that
   # represents the current state of the source code.  E.g., a
@@ -133,5 +155,5 @@
   # If no BUILD_NUMBER is set, create a useful "I am an engineering build
   # from this date/time" value.  Make it start with a non-digit so that
   # anyone trying to parse it as an integer will probably get "0".
-  BUILD_NUMBER := eng.$(USER).$(shell date +%Y%m%d.%H%M%S)
+  BUILD_NUMBER := eng.$(shell echo $${USER:0:6}).$(shell $(DATE) +%Y%m%d.%H%M%S)
 endif
diff --git a/envsetup.sh b/envsetup.sh
index 6ad3a9e..35df2d5 100644
--- a/envsetup.sh
+++ b/envsetup.sh
@@ -1,25 +1,27 @@
 function hmm() {
 cat <<EOF
 Invoke ". build/envsetup.sh" from your shell to add the following functions to your environment:
-- lunch:   lunch <product_name>-<build_variant>
-- tapas:   tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
-- croot:   Changes directory to the top of the tree.
-- m:       Makes from the top of the tree.
-- mm:      Builds all of the modules in the current directory, but not their dependencies.
-- mmm:     Builds all of the modules in the supplied directories, but not their dependencies.
-           To limit the modules being built use the syntax: mmm dir/:target1,target2.
-- mma:     Builds all of the modules in the current directory, and their dependencies.
-- mmma:    Builds all of the modules in the supplied directories, and their dependencies.
-- cgrep:   Greps on all local C/C++ files.
-- ggrep:   Greps on all local Gradle files.
-- jgrep:   Greps on all local Java files.
-- resgrep: Greps on all local res/*.xml files.
-- mangrep: Greps on all local AndroidManifest.xml files.
-- sepgrep: Greps on all local sepolicy files.
-- sgrep:   Greps on all local source files.
-- godir:   Go to the directory containing a file.
+- lunch:     lunch <product_name>-<build_variant>
+- tapas:     tapas [<App1> <App2> ...] [arm|x86|mips|armv5|arm64|x86_64|mips64] [eng|userdebug|user]
+- croot:     Changes directory to the top of the tree.
+- m:         Makes from the top of the tree.
+- mm:        Builds all of the modules in the current directory, but not their dependencies.
+- mmm:       Builds all of the modules in the supplied directories, but not their dependencies.
+             To limit the modules being built use the syntax: mmm dir/:target1,target2.
+- mma:       Builds all of the modules in the current directory, and their dependencies.
+- mmma:      Builds all of the modules in the supplied directories, and their dependencies.
+- provision: Flash device with all required partitions. Options will be passed on to fastboot.
+- cgrep:     Greps on all local C/C++ files.
+- ggrep:     Greps on all local Gradle files.
+- jgrep:     Greps on all local Java files.
+- resgrep:   Greps on all local res/*.xml files.
+- mangrep:   Greps on all local AndroidManifest.xml files.
+- mgrep:     Greps on all local Makefiles files.
+- sepgrep:   Greps on all local sepolicy files.
+- sgrep:     Greps on all local source files.
+- godir:     Go to the directory containing a file.
 
-Environemnt options:
+Environment options:
 - SANITIZE_HOST: Set to 'true' to use ASAN for all host modules. Note that
                  ASAN_OPTIONS=detect_leaks=0 will be set by default until the
                  build is leak-check clean.
@@ -29,15 +31,68 @@
     T=$(gettop)
     local A
     A=""
-    for i in `cat $T/build/envsetup.sh | sed -n "/^[ \t]*function /s/function \([a-z_]*\).*/\1/p" | sort | uniq`; do
+    for i in `cat $T/build/envsetup.sh | sed -n "/^[[:blank:]]*function /s/function \([a-z_]*\).*/\1/p" | sort | uniq`; do
       A="$A $i"
     done
     echo $A
 }
 
+# Get all the build variables needed by this script in a single call to the build system.
+function build_build_var_cache()
+{
+    T=$(gettop)
+    # Grep out the variable names from the script.
+    cached_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
+    cached_abs_vars=`cat $T/build/envsetup.sh | tr '()' '  ' | awk '{for(i=1;i<=NF;i++) if($i~/get_abs_build_var/) print $(i+1)}' | sort -u | tr '\n' ' '`
+    # Call the build system to dump the "<val>=<value>" pairs as a shell script.
+    build_dicts_script=`\cd $T; CALLED_FROM_SETUP=true BUILD_SYSTEM=build/core \
+                        command make --no-print-directory -f build/core/config.mk \
+                        dump-many-vars \
+                        DUMP_MANY_VARS="$cached_vars" \
+                        DUMP_MANY_ABS_VARS="$cached_abs_vars" \
+                        DUMP_VAR_PREFIX="var_cache_" \
+                        DUMP_ABS_VAR_PREFIX="abs_var_cache_"`
+    local ret=$?
+    if [ $ret -ne 0 ]
+    then
+        unset build_dicts_script
+        return $ret
+    fi
+    # Excute the script to store the "<val>=<value>" pairs as shell variables.
+    eval "$build_dicts_script"
+    ret=$?
+    unset build_dicts_script
+    if [ $ret -ne 0 ]
+    then
+        return $ret
+    fi
+    BUILD_VAR_CACHE_READY="true"
+}
+
+# Delete the build var cache, so that we can still call into the build system
+# to get build variables not listed in this script.
+function destroy_build_var_cache()
+{
+    unset BUILD_VAR_CACHE_READY
+    for v in $cached_vars; do
+      unset var_cache_$v
+    done
+    unset cached_vars
+    for v in $cached_abs_vars; do
+      unset abs_var_cache_$v
+    done
+    unset cached_abs_vars
+}
+
 # Get the value of a build variable as an absolute path.
 function get_abs_build_var()
 {
+    if [ "$BUILD_VAR_CACHE_READY" = "true" ]
+    then
+        eval echo \"\${abs_var_cache_$1}\"
+    return
+    fi
+
     T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
@@ -50,6 +105,12 @@
 # Get the exact value of a build variable.
 function get_build_var()
 {
+    if [ "$BUILD_VAR_CACHE_READY" = "true" ]
+    then
+        eval echo \"\${var_cache_$1}\"
+    return
+    fi
+
     T=$(gettop)
     if [ ! "$T" ]; then
         echo "Couldn't locate the top of the tree.  Try setting TOP." >&2
@@ -160,23 +221,8 @@
         export ANDROID_TOOLCHAIN_2ND_ARCH=$gccprebuiltdir/$toolchaindir2
     fi
 
-    unset ANDROID_KERNEL_TOOLCHAIN_PATH
-    case $ARCH in
-        arm)
-            # Legacy toolchain configuration used for ARM kernel compilation
-            toolchaindir=arm/arm-eabi-$targetgccversion/bin
-            if [ -d "$gccprebuiltdir/$toolchaindir" ]; then
-                 export ARM_EABI_TOOLCHAIN="$gccprebuiltdir/$toolchaindir"
-                 ANDROID_KERNEL_TOOLCHAIN_PATH="$ARM_EABI_TOOLCHAIN":
-            fi
-            ;;
-        *)
-            # No need to set ARM_EABI_TOOLCHAIN for other ARCHs
-            ;;
-    esac
-
     export ANDROID_DEV_SCRIPTS=$T/development/scripts:$T/prebuilts/devtools/tools:$T/external/selinux/prebuilts/bin
-    export ANDROID_BUILD_PATHS=$(get_build_var ANDROID_BUILD_PATHS):$ANDROID_TOOLCHAIN:$ANDROID_TOOLCHAIN_2ND_ARCH:$ANDROID_KERNEL_TOOLCHAIN_PATH$ANDROID_DEV_SCRIPTS:
+    export ANDROID_BUILD_PATHS=$(get_build_var ANDROID_BUILD_PATHS):$ANDROID_TOOLCHAIN:$ANDROID_TOOLCHAIN_2ND_ARCH:$ANDROID_DEV_SCRIPTS:
 
     # If prebuilts/android-emulator/<system>/ exists, prepend it to our PATH
     # to ensure that the corresponding 'emulator' binaries are used.
@@ -197,6 +243,7 @@
     fi
 
     export PATH=$ANDROID_BUILD_PATHS$PATH
+    export PYTHONPATH=$T/development/python-packages:$PYTHONPATH
 
     unset ANDROID_JAVA_TOOLCHAIN
     unset ANDROID_PRE_BUILD_PATHS
@@ -333,7 +380,9 @@
         fi
     done
 
+    build_build_var_cache
     set_stuff_for_environment
+    destroy_build_var_cache
 }
 
 #
@@ -347,9 +396,10 @@
     if [ "x$TARGET_PRODUCT" != x ] ; then
         default_value=$TARGET_PRODUCT
     else
-        default_value=full
+        default_value=aosp_arm
     fi
 
+    export TARGET_BUILD_APPS=
     export TARGET_PRODUCT=
     local ANSWER
     while [ -z "$TARGET_PRODUCT" ]
@@ -377,7 +427,9 @@
         fi
     done
 
+    build_build_var_cache
     set_stuff_for_environment
+    destroy_build_var_cache
 }
 
 function choosevariant()
@@ -440,8 +492,10 @@
     choosevariant $3
 
     echo
+    build_build_var_cache
     set_stuff_for_environment
     printconfig
+    destroy_build_var_cache
 }
 
 # Clear this variable.  It will be built up again when the vendorsetup.sh
@@ -523,16 +577,6 @@
 
     export TARGET_BUILD_APPS=
 
-    local product=$(echo -n $selection | sed -e "s/-.*$//")
-    check_product $product
-    if [ $? -ne 0 ]
-    then
-        echo
-        echo "** Don't have a product spec for: '$product'"
-        echo "** Do you have the right repo manifest?"
-        product=
-    fi
-
     local variant=$(echo -n $selection | sed -e "s/^[^\-]*-//")
     check_variant $variant
     if [ $? -ne 0 ]
@@ -543,6 +587,18 @@
         variant=
     fi
 
+    local product=$(echo -n $selection | sed -e "s/-.*$//")
+    TARGET_PRODUCT=$product \
+    TARGET_BUILD_VARIANT=$variant \
+    build_build_var_cache
+    if [ $? -ne 0 ]
+    then
+        echo
+        echo "** Don't have a product spec for: '$product'"
+        echo "** Do you have the right repo manifest?"
+        product=
+    fi
+
     if [ -z "$product" -o -z "$variant" ]
     then
         echo
@@ -557,6 +613,7 @@
 
     set_stuff_for_environment
     printconfig
+    destroy_build_var_cache
 }
 
 # Tab completion for lunch.
@@ -594,10 +651,10 @@
         return
     fi
 
-    local product=full
+    local product=aosp_arm
     case $arch in
-      x86)    product=full_x86;;
-      mips)   product=full_mips;;
+      x86)    product=aosp_x86;;
+      mips)   product=aosp_mips;;
       armv5)  product=generic_armv5;;
       arm64)  product=aosp_arm64;;
       x86_64) product=aosp_x86_64;;
@@ -619,8 +676,10 @@
     export TARGET_BUILD_TYPE=release
     export TARGET_BUILD_APPS=$apps
 
+    build_build_var_cache
     set_stuff_for_environment
     printconfig
+    destroy_build_var_cache
 }
 
 function gettop
@@ -764,9 +823,14 @@
                 MAKEFILE="$MAKEFILE $MFILE"
             else
                 case $DIR in
-                  showcommands | snod | dist | incrementaljavac | *=*) ARGS="$ARGS $DIR";;
+                  showcommands | snod | dist | *=*) ARGS="$ARGS $DIR";;
                   GET-INSTALL-PATH) GET_INSTALL_PATH=$DIR;;
-                  *) echo "No Android.mk in $DIR."; return 1;;
+                  *) if [ -d $DIR ]; then
+                         echo "No Android.mk in $DIR.";
+                     else
+                         echo "Couldn't locate the directory $DIR";
+                     fi
+                     return 1;;
                 esac
             fi
         done
@@ -793,7 +857,10 @@
       return 1
     fi
     local MY_PWD=`PWD= /bin/pwd|sed 's:'$T'/::'`
-    $DRV make -C $T -f build/core/main.mk $@ all_modules BUILD_MODULES_IN_PATHS="$MY_PWD"
+    local MODULES_IN_PATHS=MODULES-IN-$MY_PWD
+    # Convert "/" to "-".
+    MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
+    $DRV make -C $T -f build/core/main.mk $@ $MODULES_IN_PATHS
   fi
 }
 
@@ -811,23 +878,27 @@
       MY_PWD=`echo $MY_PWD|sed 's:'$T'/::'`
     fi
     local DIR=
-    local MODULE_PATHS=
+    local MODULES_IN_PATHS=
     local ARGS=
     for DIR in $DIRS ; do
       if [ -d $DIR ]; then
-        if [ "$MY_PWD" = "" ]; then
-          MODULE_PATHS="$MODULE_PATHS $DIR"
-        else
-          MODULE_PATHS="$MODULE_PATHS $MY_PWD/$DIR"
+        # Remove the leading ./ and trailing / if any exists.
+        DIR=${DIR#./}
+        DIR=${DIR%/}
+        if [ "$MY_PWD" != "" ]; then
+          DIR=$MY_PWD/$DIR
         fi
+        MODULES_IN_PATHS="$MODULES_IN_PATHS MODULES-IN-$DIR"
       else
         case $DIR in
-          showcommands | snod | dist | incrementaljavac | *=*) ARGS="$ARGS $DIR";;
+          showcommands | snod | dist | *=*) ARGS="$ARGS $DIR";;
           *) echo "Couldn't find directory $DIR"; return 1;;
         esac
       fi
     done
-    $DRV make -C $T -f build/core/main.mk $DASH_ARGS $ARGS all_modules BUILD_MODULES_IN_PATHS="$MODULE_PATHS"
+    # Convert "/" to "-".
+    MODULES_IN_PATHS=${MODULES_IN_PATHS//\//-}
+    $DRV make -C $T -f build/core/main.mk $DASH_ARGS $ARGS $MODULES_IN_PATHS
   else
     echo "Couldn't locate the top of the tree.  Try setting TOP."
     return 1
@@ -871,18 +942,18 @@
         append='$'
         shift
     elif [ "$1" = "--help" -o "$1" = "-h" ]; then
-		echo "usage: qpid [[--exact] <process name|pid>"
-		return 255
-	fi
+        echo "usage: qpid [[--exact] <process name|pid>"
+        return 255
+    fi
 
     local EXE="$1"
     if [ "$EXE" ] ; then
-		qpid | \grep "$prepend$EXE$append"
-	else
-		adb shell ps \
-			| tr -d '\r' \
-			| sed -e 1d -e 's/^[^ ]* *\([0-9]*\).* \([^ ]*\)$/\1 \2/'
-	fi
+        qpid | \grep "$prepend$EXE$append"
+    else
+        adb shell ps \
+            | tr -d '\r' \
+            | sed -e 1d -e 's/^[^ ]* *\([0-9]*\).* \([^ ]*\)$/\1 \2/'
+    fi
 }
 
 function pid()
@@ -903,7 +974,7 @@
         echo "$PID"
     else
         echo "usage: pid [--exact] <process name>"
-		return 255
+        return 255
     fi
 }
 
@@ -916,25 +987,25 @@
 
 function coredump_setup()
 {
-	echo "Getting root...";
-	adb root;
-	adb wait-for-device;
+    echo "Getting root...";
+    adb root;
+    adb wait-for-device;
 
-	echo "Remounting root parition read-write...";
-	adb shell mount -w -o remount -t rootfs rootfs;
-	sleep 1;
-	adb wait-for-device;
-	adb shell mkdir -p /cores;
-	adb shell mount -t tmpfs tmpfs /cores;
-	adb shell chmod 0777 /cores;
+    echo "Remounting root partition read-write...";
+    adb shell mount -w -o remount -t rootfs rootfs;
+    sleep 1;
+    adb wait-for-device;
+    adb shell mkdir -p /cores;
+    adb shell mount -t tmpfs tmpfs /cores;
+    adb shell chmod 0777 /cores;
 
-	echo "Granting SELinux permission to dump in /cores...";
-	adb shell restorecon -R /cores;
+    echo "Granting SELinux permission to dump in /cores...";
+    adb shell restorecon -R /cores;
 
-	echo "Set core pattern.";
-	adb shell 'echo /cores/core.%p > /proc/sys/kernel/core_pattern';
+    echo "Set core pattern.";
+    adb shell 'echo /cores/core.%p > /proc/sys/kernel/core_pattern';
 
-	echo "Done."
+    echo "Done."
 }
 
 # coredump_enable - enable core dumps for the specified process
@@ -945,13 +1016,13 @@
 
 function coredump_enable()
 {
-	local PID=$1;
-	if [ -z "$PID" ]; then
-		printf "Expecting a PID!\n";
-		return;
-	fi;
-	echo "Setting core limit for $PID to infinite...";
-	adb shell prlimit $PID 4 -1 -1
+    local PID=$1;
+    if [ -z "$PID" ]; then
+        printf "Expecting a PID!\n";
+        return;
+    fi;
+    echo "Setting core limit for $PID to infinite...";
+    adb shell prlimit $PID 4 -1 -1
 }
 
 # core - send SIGV and pull the core for process
@@ -962,28 +1033,28 @@
 
 function core()
 {
-	local PID=$1;
+    local PID=$1;
 
-	if [ -z "$PID" ]; then
-		printf "Expecting a PID!\n";
-		return;
-	fi;
+    if [ -z "$PID" ]; then
+        printf "Expecting a PID!\n";
+        return;
+    fi;
 
-	local CORENAME=core.$PID;
-	local COREPATH=/cores/$CORENAME;
-	local SIG=SEGV;
+    local CORENAME=core.$PID;
+    local COREPATH=/cores/$CORENAME;
+    local SIG=SEGV;
 
-	coredump_enable $1;
+    coredump_enable $1;
 
-	local done=0;
-	while [ $(adb shell "[ -d /proc/$PID ] && echo -n yes") ]; do
-		printf "\tSending SIG%s to %d...\n" $SIG $PID;
-		adb shell kill -$SIG $PID;
-		sleep 1;
-	done;
+    local done=0;
+    while [ $(adb shell "[ -d /proc/$PID ] && echo -n yes") ]; do
+        printf "\tSending SIG%s to %d...\n" $SIG $PID;
+        adb shell kill -$SIG $PID;
+        sleep 1;
+    done;
 
-	adb shell "while [ ! -f $COREPATH ] ; do echo waiting for $COREPATH to be generated; sleep 1; done"
-	echo "Done: core is under $COREPATH on device.";
+    adb shell "while [ ! -f $COREPATH ] ; do echo waiting for $COREPATH to be generated; sleep 1; done"
+    echo "Done: core is under $COREPATH on device.";
 }
 
 # systemstack - dump the current stack trace of all threads in the system process
@@ -1061,14 +1132,16 @@
     Darwin)
         function sgrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|S|java|xml|sh|mk|aidl)' -print0 | xargs -0 grep --color -n "$@"
+            find -E . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.(c|h|cc|cpp|S|java|xml|sh|mk|aidl|vts)' \
+                -exec grep --color -n "$@" {} +
         }
 
         ;;
     *)
         function sgrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|S\|java\|xml\|sh\|mk\|aidl\)' -print0 | xargs -0 grep --color -n "$@"
+            find . -name .repo -prune -o -name .git -prune -o  -type f -iregex '.*\.\(c\|h\|cc\|cpp\|S\|java\|xml\|sh\|mk\|aidl\|vts\)' \
+                -exec grep --color -n "$@" {} +
         }
         ;;
 esac
@@ -1080,61 +1153,73 @@
 
 function ggrep()
 {
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.gradle" -print0 | xargs -0 grep --color -n "$@"
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.gradle" \
+        -exec grep --color -n "$@" {} +
 }
 
 function jgrep()
 {
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.java" -print0 | xargs -0 grep --color -n "$@"
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.java" \
+        -exec grep --color -n "$@" {} +
 }
 
 function cgrep()
 {
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f \( -name '*.c' -o -name '*.cc' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' \) -print0 | xargs -0 grep --color -n "$@"
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f \( -name '*.c' -o -name '*.cc' -o -name '*.cpp' -o -name '*.h' -o -name '*.hpp' \) \
+        -exec grep --color -n "$@" {} +
 }
 
 function resgrep()
 {
-    for dir in `find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -name res -type d`; do find $dir -type f -name '*\.xml' -print0 | xargs -0 grep --color -n "$@"; done;
+    for dir in `find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -name res -type d`; do
+        find $dir -type f -name '*\.xml' -exec grep --color -n "$@" {} +
+    done
 }
 
 function mangrep()
 {
-    find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -type f -name 'AndroidManifest.xml' -print0 | xargs -0 grep --color -n "$@"
+    find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -type f -name 'AndroidManifest.xml' \
+        -exec grep --color -n "$@" {} +
 }
 
 function sepgrep()
 {
-    find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -name sepolicy -type d -print0 | xargs -0 grep --color -n -r --exclude-dir=\.git "$@"
+    find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -name sepolicy -type d \
+        -exec grep --color -n -r --exclude-dir=\.git "$@" {} +
 }
 
 function rcgrep()
 {
-    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.rc*" -print0 | xargs -0 grep --color -n "$@"
+    find . -name .repo -prune -o -name .git -prune -o -name out -prune -o -type f -name "*\.rc*" \
+        -exec grep --color -n "$@" {} +
 }
 
 case `uname -s` in
     Darwin)
         function mgrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -type f -iregex '.*/(Makefile|Makefile\..*|.*\.make|.*\.mak|.*\.mk)' -print0 | xargs -0 grep --color -n "$@"
+            find -E . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -type f -iregex '.*/(Makefile|Makefile\..*|.*\.make|.*\.mak|.*\.mk)' \
+                -exec grep --color -n "$@" {} +
         }
 
         function treegrep()
         {
-            find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|S|java|xml)' -print0 | xargs -0 grep --color -n -i "$@"
+            find -E . -name .repo -prune -o -name .git -prune -o -type f -iregex '.*\.(c|h|cpp|S|java|xml)' \
+                -exec grep --color -n -i "$@" {} +
         }
 
         ;;
     *)
         function mgrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -regextype posix-egrep -iregex '(.*\/Makefile|.*\/Makefile\..*|.*\.make|.*\.mak|.*\.mk)' -type f -print0 | xargs -0 grep --color -n "$@"
+            find . -name .repo -prune -o -name .git -prune -o -path ./out -prune -o -regextype posix-egrep -iregex '(.*\/Makefile|.*\/Makefile\..*|.*\.make|.*\.mak|.*\.mk)' -type f \
+                -exec grep --color -n "$@" {} +
         }
 
         function treegrep()
         {
-            find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|S|java|xml)' -type f -print0 | xargs -0 grep --color -n -i "$@"
+            find . -name .repo -prune -o -name .git -prune -o -regextype posix-egrep -iregex '.*\.(c|h|cpp|S|java|xml)' -type f \
+                -exec grep --color -n -i "$@" {} +
         }
 
         ;;
@@ -1399,11 +1484,7 @@
     \cd $T/$pathname
 }
 
-# Force JAVA_HOME to point to java 1.7 if it isn't already set.
-#
-# Note that the MacOS path for java 1.7 includes a minor revision number (sigh).
-# For some reason, installing the JDK doesn't make it show up in the
-# JavaVM.framework/Versions/1.7/ folder.
+# Force JAVA_HOME to point to java 1.7/1.8 if it isn't already set.
 function set_java_home() {
     # Clear the existing JAVA_HOME value if we set it ourselves, so that
     # we can reset it later, depending on the version of java the build
@@ -1416,14 +1497,26 @@
     fi
 
     if [ ! "$JAVA_HOME" ]; then
-      case `uname -s` in
-          Darwin)
-              export JAVA_HOME=$(/usr/libexec/java_home -v 1.7)
-              ;;
-          *)
-              export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64
-              ;;
-      esac
+      if [ -n "$LEGACY_USE_JAVA7" ]; then
+        echo Warning: Support for JDK 7 will be dropped. Switch to JDK 8.
+        case `uname -s` in
+            Darwin)
+                export JAVA_HOME=$(/usr/libexec/java_home -v 1.7)
+                ;;
+            *)
+                export JAVA_HOME=/usr/lib/jvm/java-7-openjdk-amd64
+                ;;
+        esac
+      else
+        case `uname -s` in
+            Darwin)
+                export JAVA_HOME=$(/usr/libexec/java_home -v 1.8)
+                ;;
+            *)
+                export JAVA_HOME=/usr/lib/jvm/java-8-openjdk-amd64
+                ;;
+        esac
+      fi
 
       # Keep track of the fact that we set JAVA_HOME ourselves, so that
       # we can change it on the next envsetup.sh, if required.
@@ -1437,9 +1530,9 @@
     local retval=$?
     if [ $retval -ne 0 ]
     then
-        echo -e "\e[0;31mFAILURE\e[00m"
+        echo $'\E'"[0;31mFAILURE\e[00m"
     else
-        echo -e "\e[0;32mSUCCESS\e[00m"
+        echo $'\E'"[0;32mSUCCESS\e[00m"
     fi
     return $retval
 }
@@ -1461,9 +1554,9 @@
     local secs=$(($tdiff % 60))
     local ncolors=$(tput colors 2>/dev/null)
     if [ -n "$ncolors" ] && [ $ncolors -ge 8 ]; then
-        color_failed="\e[0;31m"
-        color_success="\e[0;32m"
-        color_reset="\e[00m"
+        color_failed=$'\E'"[0;31m"
+        color_success=$'\E'"[0;32m"
+        color_reset=$'\E'"[00m"
     else
         color_failed=""
         color_success=""
@@ -1471,9 +1564,9 @@
     fi
     echo
     if [ $ret -eq 0 ] ; then
-        echo -n -e "${color_success}#### make completed successfully "
+        echo -n "${color_success}#### make completed successfully "
     else
-        echo -n -e "${color_failed}#### make failed to build some targets "
+        echo -n "${color_failed}#### make failed to build some targets "
     fi
     if [ $hours -gt 0 ] ; then
         printf "(%02g:%02g:%02g (hh:mm:ss))" $hours $mins $secs
@@ -1482,11 +1575,40 @@
     elif [ $secs -gt 0 ] ; then
         printf "(%s seconds)" $secs
     fi
-    echo -e " ####${color_reset}"
+    echo " ####${color_reset}"
     echo
     return $ret
 }
 
+function provision()
+{
+    if [ ! "$ANDROID_PRODUCT_OUT" ]; then
+        echo "Couldn't locate output files.  Try running 'lunch' first." >&2
+        return 1
+    fi
+    if [ ! -e "$ANDROID_PRODUCT_OUT/provision-device" ]; then
+        echo "There is no provisioning script for the device." >&2
+        return 1
+    fi
+
+    # Check if user really wants to do this.
+    if [ "$1" = "--no-confirmation" ]; then
+        shift 1
+    else
+        echo "This action will reflash your device."
+        echo ""
+        echo "ALL DATA ON THE DEVICE WILL BE IRREVOCABLY ERASED."
+        echo ""
+        echo -n "Are you sure you want to do this (yes/no)? "
+        read
+        if [[ "${REPLY}" != "yes" ]] ; then
+            echo "Not taking any action. Exiting." >&2
+            return 1
+        fi
+    fi
+    "$ANDROID_PRODUCT_OUT/provision-device" "$@"
+}
+
 if [ "x$SHELL" != "x/bin/bash" ]; then
     case `ps -o command -p $$` in
         *bash*)
@@ -1499,7 +1621,8 @@
 
 # Execute the contents of any vendorsetup.sh files we can find.
 for f in `test -d device && find -L device -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort` \
-         `test -d vendor && find -L vendor -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort`
+         `test -d vendor && find -L vendor -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort` \
+         `test -d product && find -L product -maxdepth 4 -name 'vendorsetup.sh' 2> /dev/null | sort`
 do
     echo "including $f"
     . $f
diff --git a/libs/host/Android.mk b/libs/host/Android.mk
index cab878b..bc25e4b 100644
--- a/libs/host/Android.mk
+++ b/libs/host/Android.mk
@@ -1,28 +1,21 @@
 LOCAL_PATH:= $(call my-dir)
 include $(CLEAR_VARS)
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
 
 LOCAL_SRC_FILES:= \
     CopyFile.c
 
-ifeq ($(HOST_OS),cygwin)
-LOCAL_CFLAGS += -DWIN32_EXE
-endif
-ifeq ($(HOST_OS),darwin)
-LOCAL_CFLAGS += -DMACOSX_RSRC
-endif
-ifeq ($(HOST_OS),linux)
-endif
+LOCAL_CFLAGS := -Werror -Wall
 
 LOCAL_MODULE:= libhost
+LOCAL_MODULE_HOST_OS := darwin linux windows
 LOCAL_C_INCLUDES := $(LOCAL_PATH)/include
+LOCAL_EXPORT_C_INCLUDE_DIRS := $(LOCAL_PATH)/include
+LOCAL_CXX_STL := none
 
 # acp uses libhost, so we can't use
 # acp to install libhost.
 LOCAL_ACP_UNAVAILABLE:= true
 
-LOCAL_ADDRESS_SANITIZER := false
-
 include $(BUILD_HOST_STATIC_LIBRARY)
 
 # Include toolchain prebuilt modules if they exist.
diff --git a/libs/host/CopyFile.c b/libs/host/CopyFile.c
index 855a565..5be012c 100644
--- a/libs/host/CopyFile.c
+++ b/libs/host/CopyFile.c
@@ -24,11 +24,9 @@
 #include <errno.h>
 #include <assert.h>
 
-#ifdef HAVE_MS_C_RUNTIME
-#  define  mkdir(path,mode)   _mkdir(path)
-#endif
-
 #if defined(_WIN32)
+#include <direct.h>  /* For _mkdir() */
+#  define mkdir(path,mode)   _mkdir(path)
 #  define S_ISLNK(s) 0
 #  define lstat stat
 #  ifndef EACCESS   /* seems to be missing from the Mingw headers */
@@ -69,9 +67,9 @@
  */
 static bool isHiresMtime(const struct stat* pSrcStat)
 {
-#if defined(__CYGWIN__) || defined(__MINGW32__)
-  return 0;
-#elif defined(MACOSX_RSRC)
+#if defined(_WIN32)
+    return 0;
+#elif defined(__APPLE__)
     return pSrcStat->st_mtimespec.tv_nsec > 0;
 #else
     return pSrcStat->st_mtim.tv_nsec > 0;
@@ -85,7 +83,9 @@
  */
 static bool isSameFile(const struct stat* pSrcStat, const struct stat* pDstStat)
 {
-#ifndef HAVE_VALID_STAT_ST_INO
+#if defined(_WIN32)
+  (void)pSrcStat;
+  (void)pDstStat;
     /* with MSVCRT.DLL, stat always sets st_ino to 0, and there is no simple way to */
 	/* get the equivalent information with Win32 (Cygwin does some weird stuff in   */
 	/* its winsup/cygwin/fhandler_disk_file.cc to emulate this, too complex for us) */
@@ -103,6 +103,7 @@
 
 static void printNotNewerMsg(const char* src, const char* dst, unsigned int options)
 {
+    (void)src;
     if ((options & COPY_VERBOSE_MASK) > 1)
         printf("    '%s' is up-to-date\n", dst);
 }
@@ -183,7 +184,7 @@
             DBUG(("---   unable to set perms on '%s' to 0%o: %s\n",
                 dst, pSrcStat->st_mode & ~(S_IFMT), strerror(errno)));
         }
-#ifndef HAVE_MS_C_RUNTIME
+#ifndef _WIN32
         /*
          * Set the owner.
          */
@@ -261,7 +262,7 @@
         /* if "force" is set, try removing the destination file and retry */
         if (options & COPY_FORCE) {
             if (unlink(dst) != 0) {
-#ifdef HAVE_MS_C_RUNTIME
+#ifdef _WIN32
 				/* MSVCRT.DLL unlink will fail with EACCESS if the file is set read-only */
 				/* so try to change its mode, and unlink again                           */
 				if (errno == EACCESS) {
@@ -274,7 +275,7 @@
                 (void) close(srcFd);
                 return -1;
             }
-#ifdef HAVE_MS_C_RUNTIME
+#ifdef _WIN32
         Open_File:
 #endif			
             dstFd = open(dst, O_CREAT | O_TRUNC | O_WRONLY | O_BINARY, 0644);
@@ -294,7 +295,8 @@
     if (copyResult != 0)
         return -1;
 
-#ifdef MACOSX_RSRC
+#if defined(__APPLE__)
+    // Copy Mac OS X resource forks too.
     {
         char* srcRsrcName = NULL;
         char* dstRsrcName = NULL;
@@ -536,6 +538,7 @@
     struct stat srcStat;
     int retVal = 0;
     int statResult, statErrno;
+    (void)isCmdLine;
 
     /*
      * Stat the source file.  If it doesn't exist, fail.
@@ -546,57 +549,6 @@
         statResult = stat(src, &srcStat);
     statErrno = errno;        /* preserve across .exe attempt */
 
-#ifdef WIN32_EXE
-    /*
-     * Here's the interesting part.  Under Cygwin, if you have a file
-     * called "foo.exe", stat("foo", ...) will succeed, but open("foo", ...)
-     * will fail.  We need to figure out what its name is supposed to be
-     * so we can create the correct destination file.
-     *
-     * If we don't have the "-e" flag set, we want "acp foo bar" to fail,
-     * not automatically find "foo.exe".  That way, if we really were
-     * trying to copy "foo", it doesn't grab something we don't want.
-     */
-    if (isCmdLine && statResult == 0) {
-        int tmpFd;
-        tmpFd = open(src, O_RDONLY | O_BINARY, 0);
-        if (tmpFd < 0) {
-            statResult = -1;
-            statErrno = ENOENT;
-        } else {
-            (void) close(tmpFd);
-        }
-    }
-
-    /*
-     * If we didn't find the file, try it again with ".exe".
-     */
-    if (isCmdLine && statResult < 0 && statErrno == ENOENT && (options & COPY_TRY_EXE)) {
-        srcExe = malloc(strlen(src) + 4 +1);
-        strcpy(srcExe, src);
-        strcat(srcExe, ".exe");
-
-        if (options & COPY_NO_DEREFERENCE)
-            statResult = lstat(srcExe, &srcStat);
-        else
-            statResult = stat(srcExe, &srcStat);
-
-        if (statResult == 0 && !S_ISREG(srcStat.st_mode))
-            statResult = -1;        /* fail, use original statErrno below */
-
-        if (statResult == 0) {
-            /* found a .exe, copy that instead */
-            dstExe = malloc(strlen(dst) + 4 +1);
-            strcpy(dstExe, dst);
-            strcat(dstExe, ".exe");
-
-            src = srcExe;
-            dst = dstExe;
-        } else {
-            DBUG(("---  couldn't find '%s' either\n", srcExe));
-        }
-    }
-#endif
     if (statResult < 0) {
         if (statErrno == ENOENT)
             fprintf(stderr, "acp: file '%s' does not exist\n", src);
diff --git a/libs/host/include/host/Directories.h b/libs/host/include/host/Directories.h
deleted file mode 100644
index fccce46..0000000
--- a/libs/host/include/host/Directories.h
+++ /dev/null
@@ -1,10 +0,0 @@
-#ifndef HOST_MKDIRS_H
-#define HOST_MKDIRS_H
-
-#include <string>
-
-std::string parent_dir(const std::string& path);
-
-extern "C" int mkdirs(const char* path);
-
-#endif // HOST_MKDIRS_H
diff --git a/target/board/generic/BoardConfig.mk b/target/board/generic/BoardConfig.mk
index 4e5504e..325b0ce 100644
--- a/target/board/generic/BoardConfig.mk
+++ b/target/board/generic/BoardConfig.mk
@@ -68,7 +68,7 @@
 VSYNC_EVENT_PHASE_OFFSET_NS := 0
 
 TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1610612736
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192  # 1.75 GB
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
 BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/board/generic/sepolicy/bootanim.te b/target/board/generic/sepolicy/bootanim.te
index d6506e1..b4b1eef 100644
--- a/target/board/generic/sepolicy/bootanim.te
+++ b/target/board/generic/sepolicy/bootanim.te
@@ -1,2 +1,4 @@
 allow bootanim self:process execmem;
 allow bootanim ashmem_device:chr_file execute;
+
+set_prop(bootanim, qemu_prop)
diff --git a/target/board/generic/sepolicy/domain.te b/target/board/generic/sepolicy/domain.te
index c28ca74..5d5e4ac 100644
--- a/target/board/generic/sepolicy/domain.te
+++ b/target/board/generic/sepolicy/domain.te
@@ -1,3 +1,6 @@
 # For /sys/qemu_trace files in the emulator.
+allow domain sysfs_writable:dir search;
 allow domain sysfs_writable:file rw_file_perms;
 allow domain qemu_device:chr_file rw_file_perms;
+
+get_prop(domain, qemu_prop)
diff --git a/target/board/generic/sepolicy/file_contexts b/target/board/generic/sepolicy/file_contexts
index d057dc3..e8d32f7 100644
--- a/target/board/generic/sepolicy/file_contexts
+++ b/target/board/generic/sepolicy/file_contexts
@@ -1,11 +1,19 @@
-/dev/block/mtdblock0       u:object_r:system_block_device:s0
-/dev/block/mtdblock1       u:object_r:userdata_block_device:s0
-/dev/block/mtdblock2       u:object_r:cache_block_device:s0
-/dev/goldfish_pipe	u:object_r:qemu_device:s0
-/dev/qemu_.*		u:object_r:qemu_device:s0
-/dev/socket/qemud	u:object_r:qemud_socket:s0
-/dev/ttyGF[0-9]*	u:object_r:serial_device:s0
-/system/bin/qemud	u:object_r:qemud_exec:s0
-/sys/qemu_trace(/.*)?	--	u:object_r:sysfs_writable:s0
+# goldfish
+/dev/block/mtdblock0         u:object_r:system_block_device:s0
+/dev/block/mtdblock1         u:object_r:userdata_block_device:s0
+/dev/block/mtdblock2         u:object_r:cache_block_device:s0
+
+# ranchu
+/dev/block/vda               u:object_r:system_block_device:s0
+/dev/block/vdb               u:object_r:cache_block_device:s0
+/dev/block/vdc               u:object_r:userdata_block_device:s0
+
+/dev/goldfish_pipe           u:object_r:qemu_device:s0
+/dev/qemu_.*                 u:object_r:qemu_device:s0
+/dev/socket/qemud            u:object_r:qemud_socket:s0
+/dev/ttyGF[0-9]*             u:object_r:serial_device:s0
+/dev/ttyS2                   u:object_r:console_device:s0
+/system/bin/qemud            u:object_r:qemud_exec:s0
+/sys/qemu_trace(/.*)?        u:object_r:sysfs_writable:s0
 /system/etc/init.goldfish.sh u:object_r:goldfish_setup_exec:s0
-/system/bin/qemu-props	     u:object_r:qemu_props_exec:s0
+/system/bin/qemu-props       u:object_r:qemu_props_exec:s0
diff --git a/target/board/generic/sepolicy/goldfish_setup.te b/target/board/generic/sepolicy/goldfish_setup.te
index 3fb79e7..bce196a 100644
--- a/target/board/generic/sepolicy/goldfish_setup.te
+++ b/target/board/generic/sepolicy/goldfish_setup.te
@@ -1,5 +1,5 @@
 # goldfish-setup service: runs init.goldfish.sh script
-type goldfish_setup, domain;
+type goldfish_setup, domain, domain_deprecated;
 type goldfish_setup_exec, exec_type, file_type;
 
 init_daemon_domain(goldfish_setup)
@@ -9,9 +9,12 @@
 
 # Run ifconfig, route commands to configure interfaces and routes.
 allow goldfish_setup system_file:file execute_no_trans;
+allow goldfish_setup toolbox_exec:file rx_file_perms;
 allow goldfish_setup self:capability { net_admin net_raw };
 allow goldfish_setup self:udp_socket create_socket_perms;
 
+net_domain(goldfish_setup)
+
 # Set net.eth0.dns*, debug.sf.nobootanimation
 set_prop(goldfish_setup, system_prop)
 set_prop(goldfish_setup, debug_prop)
diff --git a/target/board/generic/sepolicy/netd.te b/target/board/generic/sepolicy/netd.te
new file mode 100644
index 0000000..2b002ec
--- /dev/null
+++ b/target/board/generic/sepolicy/netd.te
@@ -0,0 +1 @@
+dontaudit netd self:capability sys_module;
diff --git a/target/board/generic/sepolicy/property.te b/target/board/generic/sepolicy/property.te
index b316d08..22d580a 100644
--- a/target/board/generic/sepolicy/property.te
+++ b/target/board/generic/sepolicy/property.te
@@ -1,2 +1,3 @@
 type qemu_prop, property_type;
 type radio_noril_prop, property_type;
+type opengles_prop, property_type;
diff --git a/target/board/generic/sepolicy/property_contexts b/target/board/generic/sepolicy/property_contexts
index a0a4020..142b062 100644
--- a/target/board/generic/sepolicy/property_contexts
+++ b/target/board/generic/sepolicy/property_contexts
@@ -1,4 +1,5 @@
 qemu.                   u:object_r:qemu_prop:s0
-emu.                    u:object_r:qemu_prop:s0
-emulator.               u:object_r:qemu_prop:s0
-radio.noril             u:object_r:radio_noril_prop:s0
+ro.emu.                 u:object_r:qemu_prop:s0
+ro.emulator.            u:object_r:qemu_prop:s0
+ro.radio.noril          u:object_r:radio_noril_prop:s0
+ro.opengles.            u:object_r:opengles_prop:s0
diff --git a/target/board/generic/sepolicy/qemu_props.te b/target/board/generic/sepolicy/qemu_props.te
index 4a91c4c..6768ce7 100644
--- a/target/board/generic/sepolicy/qemu_props.te
+++ b/target/board/generic/sepolicy/qemu_props.te
@@ -1,5 +1,5 @@
 # qemu-props service:  Sets system properties on boot.
-type qemu_props, domain;
+type qemu_props, domain, domain_deprecated;
 type qemu_props_exec, exec_type, file_type;
 
 init_daemon_domain(qemu_props)
@@ -8,3 +8,4 @@
 set_prop(qemu_props, qemu_prop)
 set_prop(qemu_props, dalvik_prop)
 set_prop(qemu_props, config_prop)
+set_prop(qemu_props, opengles_prop)
diff --git a/target/board/generic/sepolicy/qemud.te b/target/board/generic/sepolicy/qemud.te
index eee21c4..797cf5c 100644
--- a/target/board/generic/sepolicy/qemud.te
+++ b/target/board/generic/sepolicy/qemud.te
@@ -1,5 +1,5 @@
 # qemu support daemon
-type qemud, domain;
+type qemud, domain, domain_deprecated;
 type qemud_exec, exec_type, file_type;
 
 init_daemon_domain(qemud)
diff --git a/target/board/generic/sepolicy/surfaceflinger.te b/target/board/generic/sepolicy/surfaceflinger.te
index 4c35469..e03d07e 100644
--- a/target/board/generic/sepolicy/surfaceflinger.te
+++ b/target/board/generic/sepolicy/surfaceflinger.te
@@ -1,2 +1,4 @@
 allow surfaceflinger self:process execmem;
 allow surfaceflinger ashmem_device:chr_file execute;
+
+set_prop(surfaceflinger, qemu_prop)
diff --git a/target/board/generic/sepolicy/system_server.te b/target/board/generic/sepolicy/system_server.te
index d0fb79d..f9e277b 100644
--- a/target/board/generic/sepolicy/system_server.te
+++ b/target/board/generic/sepolicy/system_server.te
@@ -1 +1,3 @@
 unix_socket_connect(system_server, qemud, qemud)
+get_prop(system_server, opengles_prop)
+get_prop(system_server, radio_noril_prop)
diff --git a/target/board/generic/sepolicy/zygote.te b/target/board/generic/sepolicy/zygote.te
new file mode 100644
index 0000000..a90f02b
--- /dev/null
+++ b/target/board/generic/sepolicy/zygote.te
@@ -0,0 +1 @@
+set_prop(zygote, qemu_prop)
diff --git a/target/board/generic_arm64/BoardConfig.mk b/target/board/generic_arm64/BoardConfig.mk
index b757da1..02d0a6f 100644
--- a/target/board/generic_arm64/BoardConfig.mk
+++ b/target/board/generic_arm64/BoardConfig.mk
@@ -76,7 +76,7 @@
 USE_OPENGL_RENDERER := true
 
 TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1610612736
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192  # 1.75 GB
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
 BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/board/generic_mips/BoardConfig.mk b/target/board/generic_mips/BoardConfig.mk
index 76a2ef4..1152105 100644
--- a/target/board/generic_mips/BoardConfig.mk
+++ b/target/board/generic_mips/BoardConfig.mk
@@ -53,7 +53,7 @@
 USE_OPENGL_RENDERER := true
 
 TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1342177280
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192  # 1.75 GB
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 734003200
 BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/board/generic_mips64/BoardConfig.mk b/target/board/generic_mips64/BoardConfig.mk
index 5c13447..ebc1675 100644
--- a/target/board/generic_mips64/BoardConfig.mk
+++ b/target/board/generic_mips64/BoardConfig.mk
@@ -65,11 +65,13 @@
 USE_OPENGL_RENDERER := true
 
 TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1342177280  # 1.25 GB swag, 20% more than before
-BOARD_USERDATAIMAGE_PARTITION_SIZE := 734003200
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192  # 1.75 GB
+BOARD_USERDATAIMAGE_PARTITION_SIZE := 1610612736  # 1.5 GB, lots of space for running tests
 BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
 BOARD_FLASH_BLOCK_SIZE := 512
 TARGET_USERIMAGES_SPARSE_EXT_DISABLED := true
 
 BOARD_SEPOLICY_DIRS += build/target/board/generic/sepolicy
+
+DEX_PREOPT_DEFAULT := nostripping
diff --git a/target/board/generic_x86/BoardConfig.mk b/target/board/generic_x86/BoardConfig.mk
index 0b8e1cd..50ecb98 100644
--- a/target/board/generic_x86/BoardConfig.mk
+++ b/target/board/generic_x86/BoardConfig.mk
@@ -18,9 +18,6 @@
 # no hardware camera
 USE_CAMERA_STUB := true
 
-# customize the malloced address to be 16-byte aligned
-BOARD_MALLOC_ALIGNMENT := 16
-
 # Enable dex-preoptimization to speed up the first boot sequence
 # of an SDK AVD. Note that this operation only works on Linux for now
 ifeq ($(HOST_OS),linux)
@@ -35,7 +32,7 @@
 USE_OPENGL_RENDERER := true
 
 TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1342177280  # 1.25 GB
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1610612736
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
 BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/board/generic_x86/sepolicy/system_server.te b/target/board/generic_x86/sepolicy/system_server.te
deleted file mode 100644
index 5d98a14..0000000
--- a/target/board/generic_x86/sepolicy/system_server.te
+++ /dev/null
@@ -1 +0,0 @@
-allow system_server self:process execmem;
diff --git a/target/board/generic_x86_64/BoardConfig.mk b/target/board/generic_x86_64/BoardConfig.mk
index 5105161..553bec9 100755
--- a/target/board/generic_x86_64/BoardConfig.mk
+++ b/target/board/generic_x86_64/BoardConfig.mk
@@ -24,9 +24,6 @@
 # no hardware camera
 USE_CAMERA_STUB := true
 
-# customize the malloced address to be 16-byte aligned
-BOARD_MALLOC_ALIGNMENT := 16
-
 # Enable dex-preoptimization to speed up the first boot sequence
 # of an SDK AVD. Note that this operation only works on Linux for now
 ifeq ($(HOST_OS),linux)
@@ -41,7 +38,7 @@
 USE_OPENGL_RENDERER := true
 
 TARGET_USERIMAGES_USE_EXT4 := true
-BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1342177280  # 1.25 GB
+BOARD_SYSTEMIMAGE_PARTITION_SIZE := 1879048192  # 1.75 GB
 BOARD_USERDATAIMAGE_PARTITION_SIZE := 576716800
 BOARD_CACHEIMAGE_PARTITION_SIZE := 69206016
 BOARD_CACHEIMAGE_FILE_SYSTEM_TYPE := ext4
diff --git a/target/product/AndroidProducts.mk b/target/product/AndroidProducts.mk
index ac5902c..69edc72 100644
--- a/target/product/AndroidProducts.mk
+++ b/target/product/AndroidProducts.mk
@@ -59,7 +59,6 @@
     $(LOCAL_DIR)/aosp_arm64.mk \
     $(LOCAL_DIR)/aosp_mips64.mk \
     $(LOCAL_DIR)/aosp_x86_64.mk \
-    $(LOCAL_DIR)/full_x86_64.mk \
     $(LOCAL_DIR)/sdk_phone_armv7.mk \
     $(LOCAL_DIR)/sdk_phone_x86.mk \
     $(LOCAL_DIR)/sdk_phone_mips.mk \
diff --git a/target/product/aosp_arm.mk b/target/product/aosp_arm.mk
index 86b715c..781cae6 100644
--- a/target/product/aosp_arm.mk
+++ b/target/product/aosp_arm.mk
@@ -13,6 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-$(call inherit-product, $(SRC_TARGET_DIR)/product/full.mk)
+include $(SRC_TARGET_DIR)/product/full.mk
 
 PRODUCT_NAME := aosp_arm
diff --git a/target/product/aosp_arm64.mk b/target/product/aosp_arm64.mk
index ea8ec06..98afe5f 100644
--- a/target/product/aosp_arm64.mk
+++ b/target/product/aosp_arm64.mk
@@ -19,6 +19,10 @@
 # build quite specifically for the emulator, and might not be
 # entirely appropriate to inherit from for on-device configurations.
 
+# This is for enabling ethernet support for ranchu.
+# Consider removing this after RIL support is provided in ranchu.
+PRODUCT_COPY_FILES += frameworks/native/data/etc/android.hardware.ethernet.xml:system/etc/permissions/android.hardware.ethernet.xml
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_arm64/device.mk)
diff --git a/target/product/aosp_mips.mk b/target/product/aosp_mips.mk
index ceeb433..a76b93a 100644
--- a/target/product/aosp_mips.mk
+++ b/target/product/aosp_mips.mk
@@ -13,6 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-$(call inherit-product, $(SRC_TARGET_DIR)/product/full_mips.mk)
+include $(SRC_TARGET_DIR)/product/full_mips.mk
 
 PRODUCT_NAME := aosp_mips
diff --git a/target/product/aosp_mips64.mk b/target/product/aosp_mips64.mk
index 3a6026c..f606858 100644
--- a/target/product/aosp_mips64.mk
+++ b/target/product/aosp_mips64.mk
@@ -13,6 +13,24 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-$(call inherit-product, $(SRC_TARGET_DIR)/product/full_mips64.mk)
 
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# mips64 build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+# This is for enabling ethernet support for ranchu.
+# Consider removing this after RIL support is provided in ranchu.
+PRODUCT_COPY_FILES += frameworks/native/data/etc/android.hardware.ethernet.xml:system/etc/permissions/android.hardware.ethernet.xml
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_mips64/device.mk)
+
+include $(SRC_TARGET_DIR)/product/emulator.mk
+
+# Overrides
 PRODUCT_NAME := aosp_mips64
+PRODUCT_DEVICE := generic_mips64
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on MIPS64 Emulator
diff --git a/target/product/aosp_x86.mk b/target/product/aosp_x86.mk
index 3e9b018..cba43c4 100644
--- a/target/product/aosp_x86.mk
+++ b/target/product/aosp_x86.mk
@@ -13,6 +13,6 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-$(call inherit-product, $(SRC_TARGET_DIR)/product/full_x86.mk)
+include $(SRC_TARGET_DIR)/product/full_x86.mk
 
 PRODUCT_NAME := aosp_x86
diff --git a/target/product/aosp_x86_64.mk b/target/product/aosp_x86_64.mk
index 5a12c08..4006346 100644
--- a/target/product/aosp_x86_64.mk
+++ b/target/product/aosp_x86_64.mk
@@ -13,6 +13,37 @@
 # See the License for the specific language governing permissions and
 # limitations under the License.
 #
-$(call inherit-product, $(SRC_TARGET_DIR)/product/full_x86_64.mk)
 
+# This is a build configuration for a full-featured build of the
+# Open-Source part of the tree. It's geared toward a US-centric
+# build quite specifically for the emulator, and might not be
+# entirely appropriate to inherit from for on-device configurations.
+
+# If running on an emulator or some other device that has a LAN connection
+# that isn't a wifi connection. This will instruct init.rc to enable the
+# network connection so that you can use it with ADB
+
+# This is for enabling ethernet support for ranchu.
+# Consider removing this after RIL support is provided in ranchu.
+PRODUCT_COPY_FILES += frameworks/native/data/etc/android.hardware.ethernet.xml:system/etc/permissions/android.hardware.ethernet.xml
+
+$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
+$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
+
+include $(SRC_TARGET_DIR)/product/emulator.mk
+
+ifdef NET_ETH0_STARTONBOOT
+  PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
+endif
+
+# Ensure we package the BIOS files too.
+PRODUCT_PACKAGES += \
+	bios.bin \
+	vgabios-cirrus.bin \
+
+# Overrides
 PRODUCT_NAME := aosp_x86_64
+PRODUCT_DEVICE := generic_x86_64
+PRODUCT_BRAND := Android
+PRODUCT_MODEL := AOSP on IA x86_64 Emulator
diff --git a/target/product/base.mk b/target/product/base.mk
index 4c49e86..4d70664 100644
--- a/target/product/base.mk
+++ b/target/product/base.mk
@@ -26,12 +26,13 @@
     android.test.runner \
     app_process \
     applypatch \
+    audioserver \
     blkid \
     bmgr \
     bugreport \
+    bugreportz \
+    cameraserver \
     content \
-    dhcpcd \
-    dhcpcd-run-hooks \
     dnsmasq \
     dpm \
     framework \
@@ -50,6 +51,7 @@
     libbundlewrapper \
     libcamera_client \
     libcameraservice \
+    libcamera2ndk \
     libdl \
     libdrmclearkeyplugin \
     libeffectproxy \
@@ -94,10 +96,14 @@
     libvisualizer \
     libvorbisidec \
     libmediandk \
+    libvulkan \
     libwifi-service \
     media \
     media_cmd \
+    mediadrmserver \
     mediaserver \
+    mediacodec \
+    mediaextractor \
     monkey \
     mtpd \
     ndc \
diff --git a/target/product/core.mk b/target/product/core.mk
index d453303..75cf649 100644
--- a/target/product/core.mk
+++ b/target/product/core.mk
@@ -22,7 +22,9 @@
 
 PRODUCT_PACKAGES += \
     BasicDreams \
-    Browser \
+    BlockedNumberProvider \
+    BookmarkProvider \
+    Browser2 \
     Calendar \
     CalendarProvider \
     CaptivePortalLogin \
@@ -33,7 +35,6 @@
     DownloadProviderUi \
     Email \
     ExactCalculator \
-    Exchange2 \
     ExternalStorageProvider \
     FusedLocation \
     InputDevices \
@@ -42,10 +43,12 @@
     LatinIME \
     Launcher2 \
     ManagedProvisioning \
+    MtpDocumentsProvider \
     PicoTts \
     PacProcessor \
     libpac \
     PrintSpooler \
+    PrintRecommendationService \
     ProxyHandler \
     QuickSearchBox \
     Settings \
diff --git a/target/product/core_minimal.mk b/target/product/core_minimal.mk
index 27c10af..627d496 100644
--- a/target/product/core_minimal.mk
+++ b/target/product/core_minimal.mk
@@ -24,13 +24,17 @@
 
 PRODUCT_PACKAGES += \
     BackupRestoreConfirmation \
+    CtsShimPrivPrebuilt \
     DownloadProvider \
+    ExtShared \
+    ExtServices \
     HTMLViewer \
     MediaProvider \
     PackageInstaller \
     SettingsProvider \
     Shell \
     StatementService \
+    WallpaperBackup \
     bcc \
     bu \
     com.android.future.usb.accessory \
@@ -63,7 +67,6 @@
     libfilterfw \
     libkeystore \
     libgatekeeper \
-    libsqlite_jni \
     libwilhelm \
     logd \
     make_ext4fs \
@@ -83,6 +86,7 @@
 
 # The order of PRODUCT_BOOT_JARS matters.
 PRODUCT_BOOT_JARS := \
+    core-oj \
     core-libart \
     conscrypt \
     okhttp \
@@ -114,5 +118,27 @@
 PRODUCT_COPY_FILES += \
     system/core/rootdir/init.zygote32.rc:root/init.zygote32.rc
 
+PRODUCT_COPY_FILES += \
+    system/core/rootdir/etc/public.libraries.android.txt:system/etc/public.libraries.txt
+
+# Different dexopt types for different package update/install times.
+# On eng builds, make "boot" reasons do pure JIT for faster turnaround.
+ifeq (eng,$(TARGET_BUILD_VARIANT))
+    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+        pm.dexopt.first-boot=verify-at-runtime \
+        pm.dexopt.boot=verify-at-runtime
+else
+    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+        pm.dexopt.first-boot=interpret-only \
+        pm.dexopt.boot=verify-profile
+endif
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+    pm.dexopt.install=interpret-only \
+    pm.dexopt.bg-dexopt=speed-profile \
+    pm.dexopt.ab-ota=speed-profile \
+    pm.dexopt.nsys-library=speed \
+    pm.dexopt.shared-apk=speed \
+    pm.dexopt.forced-dexopt=speed
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
diff --git a/target/product/core_tiny.mk b/target/product/core_tiny.mk
index 2be0507..58b75ea 100644
--- a/target/product/core_tiny.mk
+++ b/target/product/core_tiny.mk
@@ -36,13 +36,14 @@
     power.default
 
 PRODUCT_PACKAGES += \
-    local_time.default
-
-PRODUCT_PACKAGES += \
     BackupRestoreConfirmation \
+    CtsShimPrivPrebuilt \
     DefaultContainerService \
+    ExtShared \
+    ExtServices \
     SettingsProvider \
     Shell \
+    WallpaperBackup \
     bcc \
     bu \
     com.android.location.provider \
@@ -64,7 +65,6 @@
     libfilterfw \
     libgatekeeper \
     libkeystore \
-    libsqlite_jni \
     libwilhelm \
     libdrmframework_jni \
     libdrmframework \
@@ -83,6 +83,7 @@
 
 # The order matters
 PRODUCT_BOOT_JARS := \
+    core-oj \
     core-libart \
     conscrypt \
     okhttp \
@@ -110,6 +111,25 @@
 PRODUCT_PROPERTY_OVERRIDES += \
     ro.carrier=unknown
 
+# Different dexopt types for different package update/install times.
+# On eng builds, make "boot" reasons do pure JIT for faster turnaround.
+ifeq (eng,$(TARGET_BUILD_VARIANT))
+    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+        pm.dexopt.first-boot=verify-at-runtime \
+        pm.dexopt.boot=verify-at-runtime
+else
+    PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+        pm.dexopt.first-boot=interpret-only \
+        pm.dexopt.boot=verify-profile
+endif
+PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
+    pm.dexopt.install=interpret-only \
+    pm.dexopt.bg-dexopt=speed-profile \
+    pm.dexopt.ab-ota=speed-profile \
+    pm.dexopt.nsys-library=speed \
+    pm.dexopt.shared-apk=speed \
+    pm.dexopt.forced-dexopt=speed
+
 $(call inherit-product, $(SRC_TARGET_DIR)/product/runtime_libart.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/base.mk)
 $(call inherit-product-if-exists, frameworks/base/data/fonts/fonts.mk)
diff --git a/target/product/embedded.mk b/target/product/embedded.mk
index 25a8975..55de3b9 100644
--- a/target/product/embedded.mk
+++ b/target/product/embedded.mk
@@ -22,6 +22,8 @@
     adbd \
     atrace \
     bootanimation \
+    bootstat \
+    cmd \
     debuggerd \
     dumpstate \
     dumpsys \
@@ -38,8 +40,10 @@
     libFFTEm \
     libGLESv1_CM \
     libGLESv2 \
+    libGLESv3 \
     libbinder \
     libc \
+    libc_malloc_debug \
     libcutils \
     libdl \
     libgui \
@@ -75,7 +79,7 @@
 # SELinux packages
 PRODUCT_PACKAGES += \
     sepolicy \
-    file_contexts \
+    file_contexts.bin \
     seapp_contexts \
     property_contexts \
     mac_permissions.xml \
@@ -90,6 +94,5 @@
 PRODUCT_COPY_FILES += \
     system/core/rootdir/init.usb.rc:root/init.usb.rc \
     system/core/rootdir/init.usb.configfs.rc:root/init.usb.configfs.rc \
-    system/core/rootdir/init.trace.rc:root/init.trace.rc \
     system/core/rootdir/ueventd.rc:root/ueventd.rc \
     system/core/rootdir/etc/hosts:system/etc/hosts
diff --git a/target/product/emulator.mk b/target/product/emulator.mk
index 7394d4f..b08a28a 100644
--- a/target/product/emulator.mk
+++ b/target/product/emulator.mk
@@ -26,6 +26,7 @@
 PRODUCT_PACKAGES += \
     egl.cfg \
     gralloc.goldfish \
+    gralloc.ranchu \
     libGLESv1_CM_emulation \
     lib_renderControl_enc \
     libEGL_emulation \
@@ -38,8 +39,11 @@
     qemud \
     camera.goldfish \
     camera.goldfish.jpeg \
+    camera.ranchu \
+    camera.ranchu.jpeg \
     lights.goldfish \
     gps.goldfish \
+    gps.ranchu \
     fingerprint.goldfish \
     sensors.goldfish \
     audio.primary.goldfish \
@@ -50,11 +54,15 @@
     sensors.ranchu
 
 PRODUCT_COPY_FILES += \
-    frameworks/native/data/etc/android.hardware.ethernet.xml:system/etc/permissions/android.hardware.ethernet.xml \
     device/generic/goldfish/fstab.goldfish:root/fstab.goldfish \
     device/generic/goldfish/init.goldfish.rc:root/init.goldfish.rc \
     device/generic/goldfish/init.goldfish.sh:system/etc/init.goldfish.sh \
     device/generic/goldfish/ueventd.goldfish.rc:root/ueventd.goldfish.rc \
     device/generic/goldfish/init.ranchu.rc:root/init.ranchu.rc \
     device/generic/goldfish/fstab.ranchu:root/fstab.ranchu \
-    device/generic/goldfish/ueventd.ranchu.rc:root/ueventd.ranchu.rc
+    device/generic/goldfish/ueventd.ranchu.rc:root/ueventd.ranchu.rc \
+    frameworks/native/data/etc/android.hardware.usb.accessory.xml:system/etc/permissions/android.hardware.usb.accessory.xml
+
+PRODUCT_PACKAGE_OVERLAYS := device/generic/goldfish/overlay
+
+PRODUCT_CHARACTERISTICS := emulator
diff --git a/target/product/full_mips64.mk b/target/product/full_mips64.mk
deleted file mode 100644
index e813e41..0000000
--- a/target/product/full_mips64.mk
+++ /dev/null
@@ -1,32 +0,0 @@
-#
-# Copyright (C) 2013 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# mips64 build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_mips64/device.mk)
-
-include $(SRC_TARGET_DIR)/product/emulator.mk
-
-# Overrides
-PRODUCT_NAME := full_mips64
-PRODUCT_DEVICE := generic_mips64
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on MIPS64 Emulator
diff --git a/target/product/full_x86_64.mk b/target/product/full_x86_64.mk
deleted file mode 100755
index 051a86e..0000000
--- a/target/product/full_x86_64.mk
+++ /dev/null
@@ -1,45 +0,0 @@
-#
-# Copyright (C) 2009 The Android Open Source Project
-#
-# Licensed under the Apache License, Version 2.0 (the "License");
-# you may not use this file except in compliance with the License.
-# You may obtain a copy of the License at
-#
-#      http://www.apache.org/licenses/LICENSE-2.0
-#
-# Unless required by applicable law or agreed to in writing, software
-# distributed under the License is distributed on an "AS IS" BASIS,
-# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-# See the License for the specific language governing permissions and
-# limitations under the License.
-#
-
-# This is a build configuration for a full-featured build of the
-# Open-Source part of the tree. It's geared toward a US-centric
-# build quite specifically for the emulator, and might not be
-# entirely appropriate to inherit from for on-device configurations.
-
-# If running on an emulator or some other device that has a LAN connection
-# that isn't a wifi connection. This will instruct init.rc to enable the
-# network connection so that you can use it with ADB
-
-$(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/product/aosp_base_telephony.mk)
-$(call inherit-product, $(SRC_TARGET_DIR)/board/generic_x86_64/device.mk)
-
-include $(SRC_TARGET_DIR)/product/emulator.mk
-
-ifdef NET_ETH0_STARTONBOOT
-  PRODUCT_PROPERTY_OVERRIDES += net.eth0.startonboot=1
-endif
-
-# Ensure we package the BIOS files too.
-PRODUCT_PACKAGES += \
-	bios.bin \
-	vgabios-cirrus.bin \
-
-# Overrides
-PRODUCT_NAME := full_x86_64
-PRODUCT_DEVICE := generic_x86_64
-PRODUCT_BRAND := Android
-PRODUCT_MODEL := AOSP on IA x86_64 Emulator
diff --git a/target/product/languages_full.mk b/target/product/languages_full.mk
index 9d80b0e..98d8c3c 100644
--- a/target/product/languages_full.mk
+++ b/target/product/languages_full.mk
@@ -21,4 +21,4 @@
 
 # These are all the locales that have translations and are displayable
 # by TextView in this branch.
-PRODUCT_LOCALES := en_US en_AU en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR rm_CH sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET hi_IN en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE my_MM mr_IN ml_IN is_IS mk_MK ky_KG eu_ES gl_ES bn_BD ta_IN kn_IN te_IN uz_UZ ur_PK kk_KZ sq_AL gu_IN pa_IN
+PRODUCT_LOCALES := en_US en_AU en_IN fr_FR it_IT es_ES et_EE de_DE nl_NL cs_CZ pl_PL ja_JP zh_TW zh_CN zh_HK ru_RU ko_KR nb_NO es_US da_DK el_GR tr_TR pt_PT pt_BR sv_SE bg_BG ca_ES en_GB fi_FI hi_IN hr_HR hu_HU in_ID iw_IL lt_LT lv_LV ro_RO sk_SK sl_SI sr_RS uk_UA vi_VN tl_PH ar_EG fa_IR th_TH sw_TZ ms_MY af_ZA zu_ZA am_ET en_XA ar_XB fr_CA km_KH lo_LA ne_NP si_LK mn_MN hy_AM az_AZ ka_GE my_MM mr_IN ml_IN is_IS mk_MK ky_KG eu_ES gl_ES bn_BD ta_IN kn_IN te_IN uz_UZ ur_PK kk_KZ sq_AL gu_IN pa_IN be_BY bs_BA
diff --git a/target/product/runtime_libart.mk b/target/product/runtime_libart.mk
index a35122b..5fd4d7e 100644
--- a/target/product/runtime_libart.mk
+++ b/target/product/runtime_libart.mk
@@ -18,9 +18,11 @@
 
 PRODUCT_PACKAGES += \
     apache-xml \
+    ahat \
     bouncycastle \
     cacerts \
     conscrypt \
+    core-oj \
     core-junit \
     core-libart \
     dalvikvm \
@@ -33,17 +35,21 @@
     ext \
     hprof-conv \
     libart \
+    libart_fake \
     libcrypto \
     libexpat \
     libicui18n \
     libicuuc \
     libjavacore \
+    libopenjdk \
+    libopenjdkjvm \
     libnativehelper \
     libssl \
     libz \
     oatdump \
     okhttp \
-    patchoat
+    patchoat \
+    profman
 
 PRODUCT_DEFAULT_PROPERTY_OVERRIDES += \
     dalvik.vm.image-dex2oat-Xms=64m \
@@ -51,3 +57,6 @@
     dalvik.vm.dex2oat-Xms=64m \
     dalvik.vm.dex2oat-Xmx=512m \
     ro.dalvik.vm.native.bridge=0 \
+    dalvik.vm.usejit=true \
+    dalvik.vm.usejitprofiles=true \
+    dalvik.vm.appimageformat=lz4
diff --git a/target/product/sdk_base.mk b/target/product/sdk_base.mk
index 73c2524..2945f8c 100644
--- a/target/product/sdk_base.mk
+++ b/target/product/sdk_base.mk
@@ -61,8 +61,10 @@
 
 PRODUCT_COPY_FILES := \
 	device/generic/goldfish/data/etc/apns-conf.xml:system/etc/apns-conf.xml \
+	device/sample/etc/old-apns-conf.xml:system/etc/old-apns-conf.xml \
 	frameworks/base/data/sounds/effects/camera_click.ogg:system/media/audio/ui/camera_click.ogg \
 	frameworks/base/data/sounds/effects/VideoRecord.ogg:system/media/audio/ui/VideoRecord.ogg \
+	frameworks/base/data/sounds/effects/VideoStop.ogg:system/media/audio/ui/VideoStop.ogg \
 	device/generic/goldfish/data/etc/handheld_core_hardware.xml:system/etc/permissions/handheld_core_hardware.xml \
 	device/generic/goldfish/camera/media_profiles.xml:system/etc/media_profiles.xml \
 	frameworks/av/media/libstagefright/data/media_codecs_google_audio.xml:system/etc/media_codecs_google_audio.xml \
@@ -71,6 +73,7 @@
 	device/generic/goldfish/camera/media_codecs.xml:system/etc/media_codecs.xml \
 	frameworks/native/data/etc/android.hardware.touchscreen.multitouch.jazzhand.xml:system/etc/permissions/android.hardware.touchscreen.multitouch.jazzhand.xml \
 	frameworks/native/data/etc/android.hardware.camera.autofocus.xml:system/etc/permissions/android.hardware.camera.autofocus.xml \
+	frameworks/native/data/etc/android.hardware.fingerprint.xml:system/etc/permissions/android.hardware.fingerprint.xml \
 	frameworks/av/media/libeffects/data/audio_effects.conf:system/etc/audio_effects.conf \
 	hardware/libhardware_legacy/audio/audio_policy.conf:system/etc/audio_policy.conf
 
diff --git a/target/product/sdk_phone_arm64.mk b/target/product/sdk_phone_arm64.mk
index a0cf6c1..a689475 100644
--- a/target/product/sdk_phone_arm64.mk
+++ b/target/product/sdk_phone_arm64.mk
@@ -23,8 +23,14 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/board/generic_arm64/device.mk)
 
+# AOSP emulator images build the AOSP messaging app.
+# Google API images override with the Google API app.
+# See vendor/google/products/sdk_google_phone_*.mk
+PRODUCT_PACKAGES += \
+    messaging
+
 # Overrides
-PRODUCT_BRAND := generic_arm64
+PRODUCT_BRAND := Android
 PRODUCT_NAME := sdk_phone_arm64
 PRODUCT_DEVICE := generic_arm64
 PRODUCT_MODEL := Android SDK built for arm64
diff --git a/target/product/sdk_phone_armv7.mk b/target/product/sdk_phone_armv7.mk
index aeb4940..ebdd0e7 100644
--- a/target/product/sdk_phone_armv7.mk
+++ b/target/product/sdk_phone_armv7.mk
@@ -16,7 +16,13 @@
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
+# AOSP emulator images build the AOSP messaging app.
+# Google API images override with the Google API app.
+# See vendor/google/products/sdk_google_phone_*.mk
+PRODUCT_PACKAGES += \
+    messaging
+
 # Overrides
-PRODUCT_BRAND := generic
+PRODUCT_BRAND := Android
 PRODUCT_NAME := sdk_phone_armv7
 PRODUCT_DEVICE := generic
diff --git a/target/product/sdk_phone_mips.mk b/target/product/sdk_phone_mips.mk
index 818491f..1cc2fe4 100644
--- a/target/product/sdk_phone_mips.mk
+++ b/target/product/sdk_phone_mips.mk
@@ -21,8 +21,14 @@
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
+# AOSP emulator images build the AOSP messaging app.
+# Google API images override with the Google API app.
+# See vendor/google/products/sdk_google_phone_*.mk
+PRODUCT_PACKAGES += \
+    messaging
+
 # Overrides
-PRODUCT_BRAND := generic_mips
+PRODUCT_BRAND := Android
 PRODUCT_NAME := sdk_phone_mips
 PRODUCT_DEVICE := generic_mips
 PRODUCT_MODEL := Android SDK for Mips
diff --git a/target/product/sdk_phone_mips64.mk b/target/product/sdk_phone_mips64.mk
index afdb2a8..e45d71b 100644
--- a/target/product/sdk_phone_mips64.mk
+++ b/target/product/sdk_phone_mips64.mk
@@ -22,8 +22,14 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
+# AOSP emulator images build the AOSP messaging app.
+# Google API images override with the Google API app.
+# See vendor/google/products/sdk_google_phone_*.mk
+PRODUCT_PACKAGES += \
+    messaging
+
 # Overrides
-PRODUCT_BRAND := generic_mips64
+PRODUCT_BRAND := Android
 PRODUCT_NAME := sdk_phone_mips64
 PRODUCT_DEVICE := generic_mips64
 PRODUCT_MODEL := Android SDK built for mips64
diff --git a/target/product/sdk_phone_x86.mk b/target/product/sdk_phone_x86.mk
index 95c49ab..01c2e83 100644
--- a/target/product/sdk_phone_x86.mk
+++ b/target/product/sdk_phone_x86.mk
@@ -21,8 +21,14 @@
 
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
+# AOSP emulator images build the AOSP messaging app.
+# Google API images override with the Google API app.
+# See vendor/google/products/sdk_google_phone_*.mk
+PRODUCT_PACKAGES += \
+    messaging
+
 # Overrides
-PRODUCT_BRAND := generic_x86
+PRODUCT_BRAND := Android
 PRODUCT_NAME := sdk_phone_x86
 PRODUCT_DEVICE := generic_x86
 PRODUCT_MODEL := Android SDK built for x86
diff --git a/target/product/sdk_phone_x86_64.mk b/target/product/sdk_phone_x86_64.mk
index 69e37af..c3bc5e9 100644
--- a/target/product/sdk_phone_x86_64.mk
+++ b/target/product/sdk_phone_x86_64.mk
@@ -22,8 +22,14 @@
 $(call inherit-product, $(SRC_TARGET_DIR)/product/core_64_bit.mk)
 $(call inherit-product, $(SRC_TARGET_DIR)/product/sdk_base.mk)
 
+# AOSP emulator images build the AOSP messaging app.
+# Google API images override with the Google API app.
+# See vendor/google/products/sdk_google_phone_*.mk
+PRODUCT_PACKAGES += \
+    messaging
+
 # Overrides
-PRODUCT_BRAND := generic_x86_64
+PRODUCT_BRAND := Android
 PRODUCT_NAME := sdk_phone_x86_64
 PRODUCT_DEVICE := generic_x86_64
 PRODUCT_MODEL := Android SDK built for x86_64
diff --git a/target/product/security/README b/target/product/security/README
index 24f984c..15f2e93 100644
--- a/target/product/security/README
+++ b/target/product/security/README
@@ -1,3 +1,14 @@
+For detailed information on key types and image signing, please see:
+
+https://source.android.com/devices/tech/ota/sign_builds.html
+
+The test keys in this directory are used in development only and should
+NEVER be used to sign packages in publicly released images (as that would
+open a major security hole).
+
+key generation
+--------------
+
 The following commands were used to generate the test key pairs:
 
   development/tools/make_key testkey  '/C=US/ST=California/L=Mountain View/O=Android/OU=Android/CN=Android/emailAddress=android@android.com'
@@ -5,18 +16,6 @@
   development/tools/make_key shared   '/C=US/ST=California/L=Mountain View/O=Android/OU=Android/CN=Android/emailAddress=android@android.com'
   development/tools/make_key media    '/C=US/ST=California/L=Mountain View/O=Android/OU=Android/CN=Android/emailAddress=android@android.com'
 
-The following standard test keys are currently included:
-
-testkey -- a generic key for packages that do not otherwise specify a key.
-platform -- a test key for packages that are part of the core platform.
-shared -- a test key for things that are shared in the home/contacts process.
-media -- a test key for packages that are part of the media/download system.
-
-These test keys are used strictly in development, and should never be assumed
-to convey any sort of validity.  When $BUILD_SECURE=true, the code should not
-honor these keys in any context.
-
-
 signing using the openssl commandline (for boot/system images)
 --------------------------------------------------------------
 
@@ -28,7 +27,12 @@
 
 extracting public keys for embedding
 ------------------------------------
-it's a Java tool
-but it generates C code
-take a look at commands/recovery/Android.mk
-you'll see it running $(HOST_OUT_JAVA_LIBRARIES)/dumpkey.jar
+
+dumpkey.jar is a Java tool that takes an x.509 certificate in PEM format as
+input and prints a C structure to standard output:
+
+    $ java -jar out/host/linux-x86/framework/dumpkey.jar build/target/product/security/testkey.x509.pem
+    {64,0xc926ad21,{1795090719,2141396315,950055447,2581568430,4268923165,1920809988,546586521,3498997798,1776797858,3740060814,1805317999,1429410244,129622599,1422441418,1783893377,1222374759,2563319927,323993566,28517732,609753416,1826472888,215237850,4261642700,4049082591,3228462402,774857746,154822455,2497198897,2758199418,3019015328,2794777644,87251430,2534927978,120774784,571297800,3695899472,2479925187,3811625450,3401832990,2394869647,3267246207,950095497,555058928,414729973,1136544882,3044590084,465547824,4058146728,2731796054,1689838846,3890756939,1048029507,895090649,247140249,178744550,3547885223,3165179243,109881576,3944604415,1044303212,3772373029,2985150306,3737520932,3599964420},{3437017481,3784475129,2800224972,3086222688,251333580,2131931323,512774938,325948880,2657486437,2102694287,3820568226,792812816,1026422502,2053275343,2800889200,3113586810,165549746,4273519969,4065247892,1902789247,772932719,3941848426,3652744109,216871947,3164400649,1942378755,3996765851,1055777370,964047799,629391717,2232744317,3910558992,191868569,2758883837,3682816752,2997714732,2702529250,3570700455,3776873832,3924067546,3555689545,2758825434,1323144535,61311905,1997411085,376844204,213777604,4077323584,9135381,1625809335,2804742137,2952293945,1117190829,4237312782,1825108855,3013147971,1111251351,2568837572,1684324211,2520978805,367251975,810756730,2353784344,1175080310}}
+
+This is called by build/core/Makefile to incorporate the OTA signing keys
+into the recovery image.
diff --git a/target/product/telephony.mk b/target/product/telephony.mk
index 919d024..e840ba1 100644
--- a/target/product/telephony.mk
+++ b/target/product/telephony.mk
@@ -21,6 +21,8 @@
     CarrierConfig \
     Dialer \
     CallLogBackup \
+    CellBroadcastReceiver \
+    EmergencyInfo \
     rild
 
 PRODUCT_COPY_FILES := \
diff --git a/target/product/verity.mk b/target/product/verity.mk
index 3e00b49..0badb9f 100644
--- a/target/product/verity.mk
+++ b/target/product/verity.mk
@@ -14,15 +14,20 @@
 # limitations under the License.
 #
 
-# Provides dependencies necessary for verified boot
+# Provides dependencies necessary for verified boot (only for user and
+# userdebug builds)
 
-PRODUCT_SUPPORTS_BOOT_SIGNER := true
-PRODUCT_SUPPORTS_VERITY := true
+user_variant := $(filter user userdebug,$(TARGET_BUILD_VARIANT))
+ifneq (,$(user_variant))
+    PRODUCT_SUPPORTS_BOOT_SIGNER := true
+    PRODUCT_SUPPORTS_VERITY := true
+    PRODUCT_SUPPORTS_VERITY_FEC := true
 
-# The dev key is used to sign boot and recovery images, and the verity
-# metadata table. Actual product deliverables will be re-signed by hand.
-# We expect this file to exist with the suffixes ".x509.pem" and ".pk8".
-PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity
+    # The dev key is used to sign boot and recovery images, and the verity
+    # metadata table. Actual product deliverables will be re-signed by hand.
+    # We expect this file to exist with the suffixes ".x509.pem" and ".pk8".
+    PRODUCT_VERITY_SIGNING_KEY := build/target/product/security/verity
 
-PRODUCT_PACKAGES += \
-        verity_key
+    PRODUCT_PACKAGES += \
+            verity_key
+endif
diff --git a/tools/Android.mk b/tools/Android.mk
index 30febd6..9073ac3 100644
--- a/tools/Android.mk
+++ b/tools/Android.mk
@@ -16,17 +16,4 @@
 
 LOCAL_PATH := $(call my-dir)
 
-ifeq (,$(TARGET_BUILD_APPS))
-
-ifeq ($(TARGET_BUILD_PDK),true)
-include $(filter-out %/acp/Android.mk %/signapk/Android.mk %/zipalign/Android.mk,\
-  $(call all-makefiles-under,$(LOCAL_PATH)))
-else # !PDK
 include $(call all-makefiles-under,$(LOCAL_PATH))
-endif # PDK
-
-else # TARGET_BUILD_APPS
-
-include $(LOCAL_PATH)/apicheck/Android.mk
-
-endif
diff --git a/tools/acp/Android.mk b/tools/acp/Android.mk
index 2b41bc1..eec9c9d 100644
--- a/tools/acp/Android.mk
+++ b/tools/acp/Android.mk
@@ -3,26 +3,15 @@
 # Custom version of cp.
 
 LOCAL_PATH:= $(call my-dir)
+
 include $(CLEAR_VARS)
-LOCAL_ADDITIONAL_DEPENDENCIES := $(LOCAL_PATH)/Android.mk
 
 LOCAL_SRC_FILES := \
     acp.c
 
-ifeq ($(HOST_OS),cygwin)
-LOCAL_CFLAGS += -DWIN32_EXE
-endif
-ifeq ($(HOST_OS),darwin)
-LOCAL_CFLAGS += -DMACOSX_RSRC
-endif
-ifeq ($(HOST_OS),linux)
-endif
-
 LOCAL_STATIC_LIBRARIES := libhost
-LOCAL_C_INCLUDES := build/libs/host/include
 LOCAL_MODULE := acp
 LOCAL_ACP_UNAVAILABLE := true
 LOCAL_CXX_STL := none
-LOCAL_ADDRESS_SANITIZER := false
 
 include $(BUILD_HOST_EXECUTABLE)
diff --git a/tools/apicheck/Android.mk b/tools/apicheck/Android.mk
index 1674a17..b547058 100644
--- a/tools/apicheck/Android.mk
+++ b/tools/apicheck/Android.mk
@@ -15,26 +15,15 @@
 ifneq ($(TARGET_BUILD_PDK),true)
 LOCAL_PATH := $(call my-dir)
 
-# We use copy-file-to-new-target so that the installed
-# script file's timestamp is at least as new as the
-# .jar file it wraps.
-
-#TODO(dbort): add a template to do this stuff; share with jx
-
 # the hat script
 # ============================================================
 include $(CLEAR_VARS)
 LOCAL_IS_HOST_MODULE := true
 LOCAL_MODULE_CLASS := EXECUTABLES
 LOCAL_MODULE := apicheck
-
-include $(BUILD_SYSTEM)/base_rules.mk
-
-$(LOCAL_BUILT_MODULE): $(HOST_OUT_JAVA_LIBRARIES)/doclava$(COMMON_JAVA_PACKAGE_SUFFIX)
-$(LOCAL_BUILT_MODULE): $(LOCAL_PATH)/etc/apicheck | $(ACP)
-	@echo "Copy: $(PRIVATE_MODULE) ($@)"
-	$(copy-file-to-new-target)
-	$(hide) chmod 755 $@
+LOCAL_SRC_FILES := etc/apicheck
+LOCAL_REQUIRED_MODULES := doclava
+include $(BUILD_PREBUILT)
 
 # Apicheck is now part of Doclava -- See external/doclava.
 endif
diff --git a/tools/atree/Android.mk b/tools/atree/Android.mk
index d895810..f598db5 100644
--- a/tools/atree/Android.mk
+++ b/tools/atree/Android.mk
@@ -12,7 +12,6 @@
 
 LOCAL_STATIC_LIBRARIES := \
 	libhost
-LOCAL_C_INCLUDES := build/libs/host/include
 
 LOCAL_MODULE := atree
 
diff --git a/tools/buildinfo.sh b/tools/buildinfo.sh
index 5c199b8..dcb66bf 100755
--- a/tools/buildinfo.sh
+++ b/tools/buildinfo.sh
@@ -13,13 +13,19 @@
 echo "ro.build.version.release=$PLATFORM_VERSION"
 echo "ro.build.version.security_patch=$PLATFORM_SECURITY_PATCH"
 echo "ro.build.version.base_os=$PLATFORM_BASE_OS"
-echo "ro.build.date=`date`"
-echo "ro.build.date.utc=`date +%s`"
+echo "ro.build.date=`$DATE`"
+echo "ro.build.date.utc=`$DATE +%s`"
 echo "ro.build.type=$TARGET_BUILD_TYPE"
 echo "ro.build.user=$USER"
 echo "ro.build.host=`hostname`"
 echo "ro.build.tags=$BUILD_VERSION_TAGS"
 echo "ro.build.flavor=$TARGET_BUILD_FLAVOR"
+if [ -n "$BOARD_BUILD_SYSTEM_ROOT_IMAGE" ] ; then
+  echo "ro.build.system_root_image=$BOARD_BUILD_SYSTEM_ROOT_IMAGE"
+fi
+if [ -n "$AB_OTA_UPDATER" ] ; then
+  echo "ro.build.ab_update=$AB_OTA_UPDATER"
+fi
 echo "ro.product.model=$PRODUCT_MODEL"
 echo "ro.product.brand=$PRODUCT_BRAND"
 echo "ro.product.name=$PRODUCT_NAME"
diff --git a/tools/check_prereq/check_prereq.c b/tools/check_prereq/check_prereq.c
deleted file mode 100644
index d7b8918..0000000
--- a/tools/check_prereq/check_prereq.c
+++ /dev/null
@@ -1,46 +0,0 @@
-/*
- * Copyright (C) 2009 The Android Open Source Project
- *
- * Licensed under the Apache License, Version 2.0 (the "License");
- * you may not use this file except in compliance with the License.
- * You may obtain a copy of the License at
- *
- *      http://www.apache.org/licenses/LICENSE-2.0
- *
- * Unless required by applicable law or agreed to in writing, software
- * distributed under the License is distributed on an "AS IS" BASIS,
- * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
- * See the License for the specific language governing permissions and
- * limitations under the License.
- */
-
-#include <stdio.h>
-#include <stdlib.h>
-#include <sys/system_properties.h>
-#include <cutils/properties.h>
-
-// Compare the timestamp of the new build (passed on the command line)
-// against the current value of ro.build.date.utc.  Exit successfully
-// if the new build is newer than the current build (or if the
-// timestamps are the same).
-int main(int argc, char** argv) {
-  if (argc != 2) {
- usage:
-    fprintf(stderr, "usage: %s <timestamp>\n", argv[0]);
-    return 2;
-  }
-
-  char value[PROPERTY_VALUE_MAX];
-  char* default_value = "0";
-
-  property_get("ro.build.date.utc", value, default_value);
-
-  long current = strtol(value, NULL, 10);
-  char* end;
-  long install = strtol(argv[1], &end, 10);
-
-  printf("current build time: [%ld]  new build time: [%ld]\n",
-         current, install);
-
-  return (*end == 0 && current > 0 && install >= current) ? 0 : 1;
-}
diff --git a/tools/droiddoc/templates-ds/class.cs b/tools/droiddoc/templates-ds/class.cs
index d82f1c1..ffd8dcd 100644
--- a/tools/droiddoc/templates-ds/class.cs
+++ b/tools/droiddoc/templates-ds/class.cs
@@ -184,8 +184,9 @@
     <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
         <td class="jd-typecol"><nobr>
             <?cs var:method.abstract ?>
-            <?cs var:method.final ?>
+            <?cs var:method.default ?>
             <?cs var:method.static ?>
+            <?cs var:method.final ?>
             <?cs call:type_link(method.generic) ?>
             <?cs call:type_link(method.returnType) ?></nobr>
         </td>
@@ -530,10 +531,11 @@
 <div class="jd-details api apilevel-<?cs var:method.since ?>"> 
     <h4 class="jd-details-title">
       <span class="normal">
-        <?cs var:method.scope ?> 
-        <?cs var:method.static ?> 
-        <?cs var:method.final ?> 
-        <?cs var:method.abstract ?> 
+        <?cs var:method.scope ?>
+        <?cs var:method.abstract ?>
+        <?cs var:method.default ?>
+        <?cs var:method.static ?>
+        <?cs var:method.final ?>
         <?cs call:type_link(method.returnType) ?>
       </span>
       <span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/droiddoc/templates-ndk/class.cs b/tools/droiddoc/templates-ndk/class.cs
index e033fa2..e55ac31 100644
--- a/tools/droiddoc/templates-ndk/class.cs
+++ b/tools/droiddoc/templates-ndk/class.cs
@@ -188,8 +188,9 @@
     <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
         <td class="jd-typecol"><nobr>
             <?cs var:method.abstract ?>
-            <?cs var:method.final ?>
+            <?cs var:method.default ?>
             <?cs var:method.static ?>
+            <?cs var:method.final ?>
             <?cs call:type_link(method.generic) ?>
             <?cs call:type_link(method.returnType) ?></nobr>
         </td>
@@ -553,10 +554,11 @@
 <div class="jd-details api apilevel-<?cs var:method.since ?>"> 
     <h4 class="jd-details-title">
       <span class="normal">
-        <?cs var:method.scope ?> 
-        <?cs var:method.static ?> 
-        <?cs var:method.final ?> 
-        <?cs var:method.abstract ?> 
+        <?cs var:method.scope ?>
+        <?cs var:method.abstract ?>
+        <?cs var:method.default ?>
+        <?cs var:method.static ?>
+        <?cs var:method.final ?>
         <?cs call:type_link(method.returnType) ?>
       </span>
       <span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/droiddoc/templates-sac/assets/css/default.css b/tools/droiddoc/templates-sac/assets/css/default.css
index f5dd624..9b1fe5a 100644
--- a/tools/droiddoc/templates-sac/assets/css/default.css
+++ b/tools/droiddoc/templates-sac/assets/css/default.css
@@ -408,7 +408,7 @@
 .content-header.just-links {
   margin-bottom:0;
   padding-bottom:0;}
-    
+ 
 .content-header h1 {
   color:#000;
   margin:0;
@@ -420,8 +420,12 @@
   border-top: 1px solid #ccc;
   margin-top: 10px;
   padding-top:10px;
-  height: 30px; }
+  height: 30px;
+}
 
+.content-footer-sac {
+  border-top: 1px solid #ccc;
+}
 .content-footer .col-9 {
   margin-left:0;
 }
@@ -1507,7 +1511,7 @@
 .devices a.selected {
     color: #F80;
 }
-.accessories a.selected {
+.security a.selected {
     color: #9C0;
 }
 .compatibility a.selected {
@@ -2204,6 +2208,7 @@
 #nav-swap {
   height:30px;
   border-top:1px solid #ccc;
+  display: none;
 }
 #nav-swap a {
   display:inline-block;
@@ -2238,6 +2243,10 @@
   margin-left:0;
 }
 
+#nav-tree, #swapper {
+  display: none;
+}
+
 #nav-tree ul {
   list-style:none;
   padding:0;
@@ -2332,11 +2341,13 @@
   border-bottom: 1px solid #CCC;
   background:#e9e9e9;
   background: rgba(0, 0, 0, 0.05); /* matches #nav li.expanded */
+  display: none;
 
 }
 #api-nav-title {
   padding:0 5px;
   white-space:nowrap;
+  display: none;
 }
 
 #api-level-toggle {
@@ -3576,8 +3587,9 @@
   border-color: #33b5e5;
 }
 
+// Fudging this so SAC looks OK
 .develop #header .wrap {
-  border-color: #F80;
+  border-color: #CCC;
 }
 
 .distribute #header .wrap {
@@ -4461,6 +4473,11 @@
   float:right;
 }
 
+.feedback {
+  float:right !important;
+  margin: 0 0 0 10px;
+  font-size: 14px;
+}
 
 
 /************* HOME/LANDING PAGE *****************/
@@ -4559,3 +4576,83 @@
 .annotation-message {
   display: block;
 }
+
+.dac-custom-search {
+  background: #fff;
+  margin: 0 -10px;
+  padding: 20px 10px;
+  z-index: 1;
+}
+
+.dac-custom-search .dac-fab, .dac-custom-search .dac-button-social {
+  top: -48px;
+}
+
+.dac-custom-search-section-title {
+  color: #505050;
+}
+
+.dac-custom-search-entry {
+  margin-bottom: 36px;
+  margin-top: 24px;
+  margin-left: 0em;
+}
+
+.dac-custom-search-image {
+  background-size: cover;
+  height: 112px;
+}
+
+.dac-custom-search-title {
+  color: #333;
+  font-size: 14px;
+  font-weight: 700;
+  line-height: 0px;
+  padding: 0;
+  margin: 1em 0em 1em 0em;
+}
+
+.dac-custom-search-title a {
+  color: inherit;
+}
+
+.dac-custom-search-section {
+  color: #999;
+  font-size: 16px;
+  font-variant: small-caps;
+  font-weight: 700;
+  margin: -5px 0 0 0;
+}
+
+.dac-custom-search-snippet {
+  color: #666;
+  margin: 0em 0em .25em 0em;
+}
+
+.dac-custom-search-link {
+  font-weight: 500;
+  word-wrap: break-word;
+  width: 100%;
+}
+
+.dac-custom-search-load-more {
+  background: none;
+  border: none;
+  color: #333;
+  cursor: pointer;
+  display: block;
+  font-size: 14px;
+  font-weight: 700;
+  margin: 75px auto;
+  outline: none;
+  padding: 10px;
+}
+
+.dac-custom-search-load-more:hover {
+  opacity: 0.7;
+}
+
+.dac-custom-search-no-results {
+  color: #999;
+}
+
diff --git a/tools/droiddoc/templates-sac/assets/js/docs.js b/tools/droiddoc/templates-sac/assets/js/docs.js
index d3a9223..d0c12a8 100644
--- a/tools/droiddoc/templates-sac/assets/js/docs.js
+++ b/tools/droiddoc/templates-sac/assets/js/docs.js
@@ -1630,7 +1630,10 @@
             } else {
               // otherwise, results are already showing, so allow ajax to auto refresh the results
               // and ignore this Enter press to avoid the reload.
-              return false;
+              // return false;
+              //
+              // For now, we're not using AJAX so we respond to every Enter.
+              return true;
             }
         } else if (kd && gSelectedIndex >= 0) {
             window.location = $("a",$('#search_filtered li')[gSelectedIndex]).attr("href");
@@ -1691,6 +1694,11 @@
 
 
         // Search for Google matches
+        /*
+         *  Commented this out because GOOGLE_DATA not defined for us and code
+         *  causes an error.  This probably has to do with the missing
+         *  gms_lists.js file in SAC. TODO figure it all out.
+         *
         for (var i=0; i<GOOGLE_DATA.length; i++) {
             var s = GOOGLE_DATA[i];
             if (text.length != 0 &&
@@ -1703,6 +1711,7 @@
         for (var i=0; i<gGoogleMatches.length; i++) {
             var s = gGoogleMatches[i];
         }
+        */
 
         highlight_autocomplete_result_labels(text);
         sync_selection_table(toroot);
@@ -1830,81 +1839,138 @@
   $("#search_autocomplete").val("").blur();
 
   // reset the ajax search callback to nothing, so results don't appear unless ENTER
-  searchControl.setSearchStartingCallback(this, function(control, searcher, query) {});
+  //  searchControl.setSearchStartingCallback(this, function(control, searcher, query) {});
   return false;
 }
 
 
-
 /* ########################################################## */
 /* ################  CUSTOM SEARCH ENGINE  ################## */
 /* ########################################################## */
 
-google.load('search', '1');
-var searchControl;
+// TODO, add localized search.
+function getLangPref() {
+  return "en";
+}
+
+// Package of functions that does custom search, from DAC redesign.
+(function($) {
+  var LANG;
+
+  function getSearchLang() {
+    if (!LANG) {
+      LANG = getLangPref();
+
+      // Fix zh-cn to be zh-CN.
+      LANG = LANG.replace(/-\w+/, function(m) { return m.toUpperCase(); });
+    }
+    return LANG;
+  }
+
+  function customSearch(query, start) {
+    var searchParams = {
+      // Keys for SAC
+      cx:'016258643462168859875:qqpm8fiwgc0',
+      key: 'AIzaSyBOWHD3JAF6Q9LIJ4NiahGAF70W7iDAI9M',
+
+      // Keys for DAC
+      // cx: '000521750095050289010:zpcpi1ea4s8',
+      // key: 'AIzaSyCFhbGnjW06dYwvRCU8h_zjdpS4PYYbEe8',
+
+      q: query,
+      start: start || 1,
+      num: 6,
+      hl: getSearchLang(),
+      fields: 'queries,items(pagemap,link,title,htmlSnippet,formattedUrl)'
+    };
+
+    return $.get('https://content.googleapis.com/customsearch/v1?' +  $.param(searchParams));
+  }
+
+  function renderResults(el, results) {
+    if (!results.items) {
+      el.append($('<div>').text('No results'));
+      return;
+    }
+
+    for (var i = 0; i < results.items.length; i++) {
+      var item = results.items[i];
+      // No thumbnail images in SAC.
+      // var hasImage = item.pagemap && item.pagemap.cse_thumbnail;
+      var sectionMatch = item.link.match(/source\.android\.com\/(\w*)/);
+      var section = (sectionMatch && sectionMatch[1]) || 'blog';
+
+      var entry = $('<div>').addClass('dac-custom-search-entry cols');
+
+// No thumbnail images in SAC.
+//      if (hasImage) {
+//        var image = item.pagemap.cse_thumbnail[0];
+//        entry.append($('<div>').addClass('col-1of6')
+//          .append($('<div>').addClass('dac-custom-search-image').css('background-image', 'url(' + image.src + ')')));
+//      }
+// entry.append($('<div>').addClass(hasImage ? 'col-5of6' : 'col-6of6')
+      entry.append($('<div>')
+        .append($('<p>').addClass('dac-custom-search-section').text(section))
+        .append(
+          $('<a>').text(item.title).attr('href', item.link).wrap('<h2>').parent().addClass('dac-custom-search-title')
+        )
+        .append($('<p>').addClass('dac-custom-search-snippet').html(item.htmlSnippet.replace(/<br>/g, '')))
+        .append($('<a>').addClass('dac-custom-search-link').text(item.formattedUrl).attr('href', item.link)));
+
+      el.append(entry);
+    }
+
+    if (results.queries.nextPage) {
+      var loadMoreButton = $('<button id="dac-custom-search-load-more">')
+        .addClass('dac-custom-search-load-more')
+        .text('Load more')
+        .click(function() {
+          loadMoreResults(el, results);
+        });
+
+      el.append(loadMoreButton);
+    }
+  }
+
+  function loadMoreResults(el, results) {
+    var query = results.queries.request.searchTerms;
+    var start = results.queries.nextPage.startIndex;
+    var loadMoreButton = el.find('#dac-custom-search-load-more');
+
+    loadMoreButton.text('Loading more...');
+
+    customSearch(query, start).then(function(results) {
+      loadMoreButton.remove();
+      renderResults(el, results);
+    });
+  }
+
+  $.fn.customSearch = function(query) {
+    var el = $(this);
+
+    customSearch(query).then(function(results) {
+      el.empty();
+      renderResults(el, results);
+    });
+  };
+})(jQuery);
+
 
 function loadSearchResults() {
-  document.getElementById("search_autocomplete").style.color = "#000";
 
-  // create search control
-  searchControl = new google.search.SearchControl();
-
-  // use our existing search form and use tabs when multiple searchers are used
-  drawOptions = new google.search.DrawOptions();
-  drawOptions.setDrawMode(google.search.SearchControl.DRAW_MODE_TABBED);
-  drawOptions.setInput(document.getElementById("search_autocomplete"));
-
-  // configure search result options
-  searchOptions = new google.search.SearcherOptions();
-  searchOptions.setExpandMode(GSearchControl.EXPAND_MODE_OPEN);
-
-  // Configure s.a.c searchers
-  sacSiteSearcher = new google.search.WebSearch();
-  sacSiteSearcher.setUserDefinedLabel("All");
-  sacSiteSearcher.setSiteRestriction("http://source.android.com/");
-
-  sourceSearcher = new google.search.WebSearch();
-  sourceSearcher.setUserDefinedLabel("Source");
-  sourceSearcher.setSiteRestriction("http://source.android.com/source/");
-
-  devicesSearcher = new google.search.WebSearch();
-  devicesSearcher.setUserDefinedLabel("Devices");
-  devicesSearcher.setSiteRestriction("http://source.android.com/devices/");
-
-  accessoriesSearcher = new google.search.WebSearch();
-  accessoriesSearcher.setUserDefinedLabel("Accessories");
-  accessoriesSearcher.setSiteRestriction("http://source.android.com/accessories/");
-
-  compatibilitySearcher = new google.search.WebSearch();
-  compatibilitySearcher.setUserDefinedLabel("Compatibility");
-  compatibilitySearcher.setSiteRestriction("http://source.android.com/compatibility/");
-
-  // add each searcher to the search control
-  searchControl.addSearcher(sacSiteSearcher, searchOptions);
-  searchControl.addSearcher(sourceSearcher, searchOptions);
-  searchControl.addSearcher(devicesSearcher, searchOptions);
-  searchControl.addSearcher(accessoriesSearcher, searchOptions);
-  searchControl.addSearcher(compatibilitySearcher, searchOptions);
+  // Draw the search results box
+  //searchControl.draw(document.getElementById("leftSearchControl"), drawOptions);
+  $(searchResults).append('<div class="leftSearchControl"></div>');
 
 
-  // configure result options
-  searchControl.setResultSetSize(google.search.Search.LARGE_RESULTSET);
-  searchControl.setLinkTarget(google.search.Search.LINK_TARGET_SELF);
-  searchControl.setTimeoutInterval(google.search.SearchControl.TIMEOUT_SHORT);
-  searchControl.setNoResultsString(google.search.SearchControl.NO_RESULTS_DEFAULT_STRING);
-
-  // upon ajax search, refresh the url and search title
-  searchControl.setSearchStartingCallback(this, function(control, searcher, query) {
-    updateResultTitle(query);
-    var query = document.getElementById('search_autocomplete').value;
-    location.hash = 'q=' + query;
-  });
-
-  // draw the search results box
-  searchControl.draw(document.getElementById("leftSearchControl"), drawOptions);
+  // Refresh the url and search title
+  var query = document.getElementById('search_autocomplete').value || getQuery(location.hash);
+  updateResultTitle(query);
+  location.hash = 'q=' + query;
 
   // get query and execute the search
-  searchControl.execute(decodeURI(getQuery(location.hash)));
+  //searchControl.execute(decodeURI(getQuery(location.hash)));
+  $(leftSearchControl).customSearch(getQuery(location.hash));
 
   document.getElementById("search_autocomplete").focus();
   addTabListeners();
@@ -1912,7 +1978,9 @@
 // End of loadSearchResults
 
 google.setOnLoadCallback(function(){
-  if (location.hash.indexOf("q=") == -1) {
+
+  var query = decodeURI(getQuery(location.hash));
+  if (location.hash.indexOf("q=") == -1 || query == '') {
     // if there's no query in the url, don't search and make sure results are hidden
     $('#searchResults').hide();
     return;
@@ -1937,7 +2005,8 @@
 
   // Otherwise, we have a search to do
   var query = decodeURI(getQuery(location.hash));
-  searchControl.execute(query);
+  //searchControl.execute(query);
+  $('#leftSearchControl').customSearch(query);
   $('#searchResults').slideDown('slow');
   $("#search_autocomplete").focus();
   $(".search .close").removeClass("hide");
@@ -1946,7 +2015,7 @@
 });
 
 function updateResultTitle(query) {
-  $("#searchTitle").html("Results for <em>" + escapeHTML(query) + "</em>");
+  $("#searchTitle").html("Results for <em>" + encodeURIComponent(query) + "</em>");
 }
 
 // forcefully regain key-up event control (previously jacked by search api)
@@ -1983,18 +2052,6 @@
   return queryParts[1];
 }
 
-/* returns the given string with all HTML brackets converted to entities
-    TODO: move this to the site's JS library */
-function escapeHTML(string) {
-  return string.replace(/</g,"&lt;")
-                .replace(/>/g,"&gt;");
-}
-
-
-
-
-
-
 
 /* ######################################################## */
 /* #################  JAVADOC REFERENCE ################### */
@@ -2077,7 +2134,9 @@
 
   // get the DOM element and use setAttribute cuz IE6 fails when using jquery .attr('selected',true)
   var selectedLevelItem = $("#apiLevelSelector option[value='"+userApiLevel+"']").get(0);
-  selectedLevelItem.setAttribute('selected',true);
+//  Another piece of functionality that we don't use that produces an error.
+//  TODO figure it all out.
+//  selectedLevelItem.setAttribute('selected',true);
 }
 
 function changeApiLevel() {
diff --git a/tools/droiddoc/templates-sac/class.cs b/tools/droiddoc/templates-sac/class.cs
index 440e705..98633fb 100644
--- a/tools/droiddoc/templates-sac/class.cs
+++ b/tools/droiddoc/templates-sac/class.cs
@@ -185,8 +185,9 @@
     <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
         <td class="jd-typecol"><nobr>
             <?cs var:method.abstract ?>
-            <?cs var:method.final ?>
+            <?cs var:method.default ?>
             <?cs var:method.static ?>
+            <?cs var:method.final ?>
             <?cs call:type_link(method.generic) ?>
             <?cs call:type_link(method.returnType) ?></nobr>
         </td>
@@ -534,10 +535,11 @@
 <div class="jd-details api apilevel-<?cs var:method.since ?>"> 
     <h4 class="jd-details-title">
       <span class="normal">
-        <?cs var:method.scope ?> 
-        <?cs var:method.static ?> 
-        <?cs var:method.final ?> 
-        <?cs var:method.abstract ?> 
+        <?cs var:method.scope ?>
+        <?cs var:method.abstract ?>
+        <?cs var:method.default ?>
+        <?cs var:method.static ?>
+        <?cs var:method.final ?>
         <?cs call:type_link(method.returnType) ?>
       </span>
       <span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/droiddoc/templates-sac/components/masthead.cs b/tools/droiddoc/templates-sac/components/masthead.cs
index a8618c0..ee2582f 100644
--- a/tools/droiddoc/templates-sac/components/masthead.cs
+++ b/tools/droiddoc/templates-sac/components/masthead.cs
@@ -255,7 +255,7 @@
           <a href="<?cs var:toroot ?>index.html">
             <img src="<?cs var:toroot ?>assets/images/sac_logo.png"
                 srcset="<?cs var:toroot ?>assets/images/sac_logo@2x.png 2x"
-                width="123" height="25" alt="Android Developers" />
+                width="123" height="25" alt="Android Open Source Project" />
           </a>
           </div>
             <ul class="nav-x col-9">
@@ -266,9 +266,9 @@
                 <li class="devices"><a href="<?cs var:toroot ?>devices/index.html" <?cs
                   if:devices ?>class="selected"<?cs /if ?>
                   >Devices</a></li>
-                <li class="accessories"><a href="<?cs var:toroot ?>accessories/index.html" <?cs
-                  if:accessories ?>class="selected"<?cs /if ?>
-                  >Accessories</a></li>
+                <li class="security"><a href="<?cs var:toroot ?>security/index.html" <?cs
+                  if:security ?>class="selected"<?cs /if ?>
+                  >Security</a></li>
                 <li class="compatibility last"><a href="<?cs var:toroot ?>compatibility/index.html" <?cs
                   if:compatibility ?>class="selected"<?cs /if ?>
                   >Compatibility</a></li>
diff --git a/tools/droiddoc/templates-sac/customizations.cs b/tools/droiddoc/templates-sac/customizations.cs
index 6bdfe36..1120e70 100644
--- a/tools/droiddoc/templates-sac/customizations.cs
+++ b/tools/droiddoc/templates-sac/customizations.cs
@@ -355,7 +355,7 @@
 def:custom_footerlinks() ?>
   <p>
     <a href="<?cs var:toroot ?>source/index.html">About Android</a>&nbsp;&nbsp;|&nbsp;
-    <a href="<?cs var:toroot ?>source/community/index.html">Community</a>&nbsp;&nbsp;|&nbsp;
+    <a href="<?cs var:toroot ?>source/community.html">Community</a>&nbsp;&nbsp;|&nbsp;
     <a href="<?cs var:toroot ?>legal.html">Legal</a>&nbsp;&nbsp;|&nbsp;
   </p><?cs 
 /def ?>
@@ -382,8 +382,8 @@
     <?cs call:compatibility_nav() ?>
   <?cs elif:source ?>
     <?cs call:source_nav() ?>
-  <?cs elif:accessories ?>
-    <?cs call:accessories_nav() ?>
+  <?cs elif:security ?>
+    <?cs call:security_nav() ?>
   <?cs elif:reference ?>
     <?cs call:default_left_nav() ?>
   <?cs /if ?>
@@ -450,14 +450,14 @@
 <?cs /def ?>
 
 <?cs
-def:accessories_nav() ?>
+def:security_nav() ?>
   <div class="wrap clearfix" id="body-content">
     <div class="col-4" id="side-nav" itemscope itemtype="http://schema.org/SiteNavigationElement">
       <div id="devdoc-nav" class="scroll-pane">
 <a class="totop" href="#top" data-g-event="left-nav-top">to top</a>
 
 <?cs 
-        include:"../../../../docs/source.android.com/src/accessories/accessories_toc.cs" ?>
+        include:"../../../../docs/source.android.com/src/security/security_toc.cs" ?>
 
       </div>
     </div> <!-- end side-nav -->
diff --git a/tools/droiddoc/templates-sac/docpage.cs b/tools/droiddoc/templates-sac/docpage.cs
index 3e5c24f..3d120b6 100644
--- a/tools/droiddoc/templates-sac/docpage.cs
+++ b/tools/droiddoc/templates-sac/docpage.cs
@@ -34,7 +34,7 @@
             ru-lang="Предыдущий"
             ko-lang="이전"
             ja-lang="前へ"
-            es-lang="Anterior"               
+            es-lang="Anterior"
             >Previous</a>
         <a href="#" class="next-page-link hide"
             zh-TW-lang="下一堂課"
@@ -42,7 +42,7 @@
             ru-lang="Следующий"
             ko-lang="다음"
             ja-lang="次へ"
-            es-lang="Siguiente"               
+            es-lang="Siguiente"
             >Next</a>
         <a href="#" class="start-class-link hide"
             zh-TW-lang="開始上課"
@@ -50,7 +50,7 @@
             ru-lang="Начало работы"
             ko-lang="시작하기"
             ja-lang="開始する"
-            es-lang="Empezar"               
+            es-lang="Empezar"
             >Get started</a>
       </div>
     <?cs elif:!page.trainingcourse ?>
@@ -61,7 +61,7 @@
             ru-lang="Предыдущий"
             ko-lang="이전"
             ja-lang="前へ"
-            es-lang="Anterior"               
+            es-lang="Anterior"
             >Previous</a>
         <a href="#" class="next-page-link hide"
             zh-TW-lang="下一堂課"
@@ -69,7 +69,7 @@
             ru-lang="Следующий"
             ko-lang="다음"
             ja-lang="次へ"
-            es-lang="Siguiente"               
+            es-lang="Siguiente"
             >Next</a>
       </div>
     <?cs /if ?><?cs # end if training ?>
@@ -87,7 +87,6 @@
         <?cs /if ?>
           <h1 itemprop="name" style="margin-bottom:0;"><?cs var:page.title ?></h1>
           <p itemprop="description"><?cs var:page.landing.intro ?></p>
-          
           <p><a class="next-page-link topic-start-link"></a></p>
         <?cs if:page.landing.image ?>
         </div>
@@ -112,22 +111,16 @@
   <?cs # THIS IS THE MAIN DOC CONTENT ?>
   <div id="jd-content">
 
-
     <div class="jd-descr" itemprop="articleBody">
     <?cs call:tag_list(root.descr) ?>
     </div>
-      
-      <div class="content-footer <?cs 
-                    if:fullpage ?>wrap<?cs
-                    else ?>layout-content-row<?cs /if ?>" 
+
+      <div class="content-footer-sac"
                     itemscope itemtype="http://schema.org/SiteNavigationElement">
         <div class="layout-content-col <?cs 
                     if:fullpage ?>col-16<?cs 
                     elif:training||guide ?>col-8<?cs 
                     else ?>col-9<?cs /if ?>" style="padding-top:4px">
-          <?cs if:!page.noplus ?><?cs if:fullpage ?><style>#___plusone_0 {float:right !important;}</style><?cs /if ?>
-            <div class="g-plusone" data-size="medium"></div>
-          <?cs /if ?>
         </div>
         <?cs if:!fullscreen ?>
         <div class="paging-links layout-content-col col-4">
@@ -138,7 +131,7 @@
                 ru-lang="Предыдущий"
                 ko-lang="이전"
                 ja-lang="前へ"
-                es-lang="Anterior"               
+                es-lang="Anterior"
                 >Previous</a>
             <a href="#" class="next-page-link hide"
                 zh-TW-lang="下一堂課"
@@ -146,23 +139,20 @@
                 ru-lang="Следующий"
                 ko-lang="다음"
                 ja-lang="次へ"
-                es-lang="Siguiente"               
+                es-lang="Siguiente"
                 >Next</a>
           <?cs /if ?>
         </div>
         <?cs /if ?>
       </div>
-      
       <?cs # for training classes, provide a different kind of link when the next page is a different class ?>
       <?cs if:training && !page.article ?>
       <div class="layout-content-row content-footer next-class" style="display:none" itemscope itemtype="http://schema.org/SiteNavigationElement">
           <a href="#" class="next-class-link hide">Next class: </a>
       </div>
       <?cs /if ?>
-
-  </div> <!-- end jd-content -->
-
-<?cs include:"footer.cs" ?>
+    </div> <!-- end jd-content -->
+  <?cs include:"footer.cs" ?>
 </div><!-- end doc-content -->
 
 <?cs include:"trailer.cs" ?>
diff --git a/tools/droiddoc/templates-sac/footer.cs b/tools/droiddoc/templates-sac/footer.cs
index 6979ca0..e43adb0 100644
--- a/tools/droiddoc/templates-sac/footer.cs
+++ b/tools/droiddoc/templates-sac/footer.cs
@@ -1,4 +1,9 @@
 <div id="footer" class="wrap" <?cs if:fullpage ?>style="width:940px"<?cs /if ?>>
+  <style>.feedback { float: right !Important }</style>
+  <div class="feedback">
+    <a href="#" class="button" onclick=" try {
+      userfeedback.api.startFeedback({'productId':'715571','authuser':'1'});return false;}catch(e){}">Site Feedback</a>
+  </div>
   <div id="copyright">
     <?cs call:custom_cc_copyright() ?>
   </div>
diff --git a/tools/droiddoc/templates-sac/head_tag.cs b/tools/droiddoc/templates-sac/head_tag.cs
index 5cee68c..b672b25 100644
--- a/tools/droiddoc/templates-sac/head_tag.cs
+++ b/tools/droiddoc/templates-sac/head_tag.cs
@@ -46,6 +46,8 @@
 <script src="<?cs var:toroot ?>navtree_data.js" async type="text/javascript"></script>
 <?cs /if ?>
 
+<script type="text/javascript" src="//www.gstatic.com/feedback/api.js"></script>
+
 <script type="text/javascript">
   var _gaq = _gaq || [];
   _gaq.push(['_setAccount', 'UA-45455297-1']);
diff --git a/tools/droiddoc/templates-sdk-dev/class.cs b/tools/droiddoc/templates-sdk-dev/class.cs
index 86cdc25..dee7a4c 100644
--- a/tools/droiddoc/templates-sdk-dev/class.cs
+++ b/tools/droiddoc/templates-sdk-dev/class.cs
@@ -19,8 +19,9 @@
   <?cs if:subcount(method.returnType) ?>
     <td><code>
         <?cs var:method.abstract ?>
-        <?cs var:method.final ?>
+        <?cs var:method.default ?>
         <?cs var:method.static ?>
+        <?cs var:method.final ?>
         <?cs call:type_link(method.generic) ?>
         <?cs call:type_link(method.returnType) ?></code>
     </td>
diff --git a/tools/droiddoc/templates-sdk/class.cs b/tools/droiddoc/templates-sdk/class.cs
index 93fcf88..8312b25 100644
--- a/tools/droiddoc/templates-sdk/class.cs
+++ b/tools/droiddoc/templates-sdk/class.cs
@@ -188,8 +188,9 @@
     <tr class="<?cs if:count % #2 ?>alt-color<?cs /if ?> api apilevel-<?cs var:method.since ?>" >
         <td class="jd-typecol"><nobr>
             <?cs var:method.abstract ?>
-            <?cs var:method.final ?>
+            <?cs var:method.default ?>
             <?cs var:method.static ?>
+            <?cs var:method.final ?>
             <?cs call:type_link(method.generic) ?>
             <?cs call:type_link(method.returnType) ?></nobr>
         </td>
@@ -553,10 +554,11 @@
 <div class="jd-details api apilevel-<?cs var:method.since ?>"> 
     <h4 class="jd-details-title">
       <span class="normal">
-        <?cs var:method.scope ?> 
-        <?cs var:method.static ?> 
-        <?cs var:method.final ?> 
-        <?cs var:method.abstract ?> 
+        <?cs var:method.scope ?>
+        <?cs var:method.abstract ?>
+        <?cs var:method.default ?>
+        <?cs var:method.static ?>
+        <?cs var:method.final ?>
         <?cs call:type_link(method.returnType) ?>
       </span>
       <span class="sympad"><?cs var:method.name ?></span>
diff --git a/tools/droiddoc/templates-sdk/components/masthead.cs b/tools/droiddoc/templates-sdk/components/masthead.cs
index 04fee65..e17612d 100644
--- a/tools/droiddoc/templates-sdk/components/masthead.cs
+++ b/tools/droiddoc/templates-sdk/components/masthead.cs
@@ -80,7 +80,7 @@
         <?cs call:header_search_widget() ?>
         <?cs /if ?>
 
-        <?cs if:ndk ?><a class="dac-header-console-btn" href="http://developer.android.com">
+        <?cs if:ndk ?><a class="dac-header-console-btn" href="//developer.android.com">
           <span class="dac-visible-desktop-inline">Back to Android Developers</span>
         </a><?cs else ?><a class="dac-header-console-btn" href="https://play.google.com/apps/publish/">
           <span class="dac-sprite dac-google-play"></span>
@@ -140,7 +140,7 @@
   </nav>
   <!-- end NDK navigation-->
   <?cs else ?>
-  <!-- Navigation-->
+  <!-- Standard DAC Navigation-->
   <nav class="dac-nav">
     <div class="dac-nav-dimmer" data-dac-toggle-nav></div>
 
@@ -151,8 +151,32 @@
                srcset="<?cs var:toroot ?>assets/images/android_logo@2x.png 2x"
                width="32" height="36" alt="Android" /> Developers
         </a>
+      </li><?cs
+      #
+      # For the reference only docs, include just one tab
+      #
+      ?><?cs if:referenceonly ?>
+      <li class="dac-nav-item develop">
+        <a class="dac-nav-link has-subnav" href="<?cs var:toroot ?>reference/packages.html" zh-tw-lang="參考資源"
+        zh-cn-lang="参考" ru-lang="Справочник" ko-lang="참조문서" ja-lang="リファレンス"
+        es-lang="Referencia">API Reference</a>
+      <div class="dac-nav-secondary develop">
+        <h1 style="font-size: 20px; line-height: 24px; margin: 20px; color:#444"
+          ><?cs
+          if:sdk.preview
+            ?>Android <?cs var:sdk.codename ?>
+              Preview <?cs var:sdk.preview.version ?><?cs
+          else
+            ?>Android <?cs var:sdk.version ?>
+              r<?cs var:sdk.rel.id ?><?cs
+          /if ?></h1>
+      </div>
       </li>
-      <li class="dac-nav-item home">
+      <?cs else ?><?cs
+      #
+      # End reference only docs, now the online DAC tabs...
+      #
+      ?><li class="dac-nav-item home">
         <a class="dac-nav-link dac-visible-mobile-block" href="<?cs var:toroot ?>index.html">Home</a>
         <ul class="dac-nav-secondary about">
           <li class="dac-nav-item about">
@@ -261,7 +285,12 @@
             <a class="dac-nav-link" href="<?cs var:toroot ?>distribute/stories/index.html">Stories</a>
           </li>
         </ul>
-      </li>
+      </li><?cs
+      /if ?><?cs
+      #
+      # End if/else reference only docs
+      #
+      ?>
     </ul>
   </nav>
   <!-- end navigation-->
@@ -280,7 +309,7 @@
     <div style="height:20px"><!-- spacer to bump header down --></div>
     <div id="butterbar-wrapper">
       <div id="butterbar">
-        <a href="http://googleblog.blogspot.com/" id="butterbar-message">
+        <a href="//googleblog.blogspot.com/" id="butterbar-message">
           The Android 5.0 SDK will be available on October 17th!
         </a>
       </div>
diff --git a/tools/droiddoc/templates-sdk/customizations.cs b/tools/droiddoc/templates-sdk/customizations.cs
index 16469ac..4cf5abb 100644
--- a/tools/droiddoc/templates-sdk/customizations.cs
+++ b/tools/droiddoc/templates-sdk/customizations.cs
@@ -585,7 +585,7 @@
 <?cs # appears at the bottom of every page ?><?cs
 def:custom_cc_copyright() ?>
   Except as noted, this content is
-  licensed under <a href="http://creativecommons.org/licenses/by/2.5/">
+  licensed under <a href="//creativecommons.org/licenses/by/2.5/">
   Creative Commons Attribution 2.5</a>. For details and
   restrictions, see the <a href="<?cs var:toroot ?>license.html">Content
   License</a>.<?cs
@@ -594,7 +594,7 @@
 <?cs
 def:custom_copyright() ?>
   Except as noted, this content is licensed under <a
-  href="http://www.apache.org/licenses/LICENSE-2.0">Apache 2.0</a>.
+  href="//www.apache.org/licenses/LICENSE-2.0">Apache 2.0</a>.
   For details and restrictions, see the <a href="<?cs var:toroot ?>license.html">
   Content License</a>.<?cs
 /def ?>
diff --git a/tools/droiddoc/templates-sdk/designpage.cs b/tools/droiddoc/templates-sdk/designpage.cs
index 9393b64..d9c2681 100644
--- a/tools/droiddoc/templates-sdk/designpage.cs
+++ b/tools/droiddoc/templates-sdk/designpage.cs
@@ -7,7 +7,7 @@
       Android Design<?cs if:page.title ?> - <?cs var:page.title ?><?cs /if ?>
     </title>
     <link rel="shortcut icon" type="image/x-icon" href="/favicon.ico">
-    <link rel="stylesheet" href="http://fonts.googleapis.com/css?family=Roboto:regular,medium,thin,italic,mediumitalic">
+    <link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:regular,medium,thin,italic,mediumitalic">
     <link rel="stylesheet" href="<?cs var:toroot ?>assets/yui-3.3.0-reset-min.css">
     <link rel="stylesheet" href="<?cs var:toroot ?>assets/design/default.css?v=19">
     <script src="<?cs var:toroot ?>assets/jquery-1.6.2.min.js"></script>
@@ -70,16 +70,16 @@
 
         <p id="copyright">
           Except as noted, this content is licensed under
-          <a href="http://creativecommons.org/licenses/by/2.5/">
+          <a href="//creativecommons.org/licenses/by/2.5/">
           Creative Commons Attribution 2.5</a>.<br>
           For details and restrictions, see the
-          <a href="http://developer.android.com/license.html">Content License</a>.
+          <a href="//developer.android.com/license.html">Content License</a>.
         </p>
 
         <p>
-          <a href="http://www.android.com/terms.html">Site Terms of Service</a> &ndash;
-          <a href="http://www.android.com/privacy.html">Privacy Policy</a> &ndash;
-          <a href="http://www.android.com/branding.html">Brand Guidelines</a>
+          <a href="//www.android.com/terms.html">Site Terms of Service</a> &ndash;
+          <a href="//www.android.com/privacy.html">Privacy Policy</a> &ndash;
+          <a href="//www.android.com/branding.html">Brand Guidelines</a>
         </p>
 
       </div>
diff --git a/tools/droiddoc/templates-sdk/footer.cs b/tools/droiddoc/templates-sdk/footer.cs
index 666f594..095c7fd 100644
--- a/tools/droiddoc/templates-sdk/footer.cs
+++ b/tools/droiddoc/templates-sdk/footer.cs
@@ -7,7 +7,7 @@
       </div>
       <div class="col-1of2 dac-footer-reachout">
         <div class="dac-footer-contact">
-          <a class="dac-footer-contact-link" href="http://android-developers.blogspot.com/">Blog</a>
+          <a class="dac-footer-contact-link" href="//android-developers.blogspot.com/">Blog</a>
           <a class="dac-footer-contact-link" href="/support.html">Support</a>
         </div>
         <div class="dac-footer-social">
diff --git a/tools/droiddoc/templates-sdk/head_tag.cs b/tools/droiddoc/templates-sdk/head_tag.cs
index 27a66cb..1dbb3c3 100644
--- a/tools/droiddoc/templates-sdk/head_tag.cs
+++ b/tools/droiddoc/templates-sdk/head_tag.cs
@@ -41,15 +41,15 @@
 <!-- STYLESHEETS -->
 <link rel="stylesheet"
 href="<?cs
-if:android.whichdoc != 'online' ?>http:<?cs
+if:android.whichdoc != 'online' ?>https:<?cs
 /if ?>//fonts.googleapis.com/css?family=Roboto+Condensed">
 <link rel="stylesheet" href="<?cs
-if:android.whichdoc != 'online' ?>http:<?cs
+if:android.whichdoc != 'online' ?>https:<?cs
 /if ?>//fonts.googleapis.com/css?family=Roboto:light,regular,medium,thin,italic,mediumitalic,bold"
   title="roboto">
 <?cs
   if:ndk ?><link rel="stylesheet" href="<?cs
-  if:android.whichdoc != 'online' ?>http:<?cs
+  if:android.whichdoc != 'online' ?>https:<?cs
   /if ?>//fonts.googleapis.com/css?family=Roboto+Mono:400,500,700" title="roboto-mono" type="text/css"><?cs
 /if ?>
 <link href="<?cs var:toroot ?>assets/css/default.css?v=19" rel="stylesheet" type="text/css">
@@ -61,7 +61,7 @@
 <?cs /if ?>
 
 <!-- JAVASCRIPT -->
-<script src="<?cs if:android.whichdoc != 'online' ?>http:<?cs /if ?>//www.google.com/jsapi" type="text/javascript"></script>
+<script src="<?cs if:android.whichdoc != 'online' ?>https:<?cs /if ?>//www.google.com/jsapi" type="text/javascript"></script>
 <?cs
 if:devsite
   ?><script src="<?cs var:toroot ?>_static/js/android_3p-bundle.js" type="text/javascript"></script><?cs
diff --git a/tools/droiddoc/templates-sdk/macros_override.cs b/tools/droiddoc/templates-sdk/macros_override.cs
index 0a94598..9d146b1 100644
--- a/tools/droiddoc/templates-sdk/macros_override.cs
+++ b/tools/droiddoc/templates-sdk/macros_override.cs
@@ -33,4 +33,12 @@
       <?cs set:count = count + #1 ?>
     <?cs /each ?>
   </table>
-<?cs /def ?>
\ No newline at end of file
+<?cs /def ?>
+
+<?cs def:doc_root_override() ?><?cs
+  if:referenceonly
+    ?>https://developer.android.com/<?cs
+  else ?><?cs
+    var:toroot ?><?cs
+  /if ?><?cs
+/def ?>
\ No newline at end of file
diff --git a/tools/droiddoc/templates-sdk/sdkpage.cs b/tools/droiddoc/templates-sdk/sdkpage.cs
index bf96a5f..c6679a6 100644
--- a/tools/droiddoc/templates-sdk/sdkpage.cs
+++ b/tools/droiddoc/templates-sdk/sdkpage.cs
@@ -87,7 +87,7 @@
     <td>Windows 32-bit</td>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.win32_download ?>"><?cs var:ndk.win32_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.win32_download ?>"><?cs var:ndk.win32_download ?></a>
     </td>
     <td><?cs var:ndk.win32_bytes ?></td>
     <td><?cs var:ndk.win32_checksum ?></td>
@@ -95,7 +95,7 @@
  <!-- <tr>
    <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/ndk/<?cs var:ndk.win32.legacy_download ?>"><?cs var:ndk.win32.legacy_download ?></a>
+     href="//dl.google.com/android/ndk/<?cs var:ndk.win32.legacy_download ?>"><?cs var:ndk.win32.legacy_download ?></a>
     </td>
     <td><?cs var:ndk.win32.legacy_bytes ?></td>
     <td><?cs var:ndk.win32.legacy_checksum ?></td>
@@ -104,7 +104,7 @@
     <td>Windows 64-bit</td>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.win64_download ?>"><?cs var:ndk.win64_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.win64_download ?>"><?cs var:ndk.win64_download ?></a>
     </td>
     <td><?cs var:ndk.win64_bytes ?></td>
     <td><?cs var:ndk.win64_checksum ?></td>
@@ -112,7 +112,7 @@
  <!--  <tr>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.win64.legacy_download ?>"><?cs var:ndk.win64.legacy_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.win64.legacy_download ?>"><?cs var:ndk.win64.legacy_download ?></a>
     </td>
     <td><?cs var:ndk.win64.legacy_bytes ?></td>
     <td><?cs var:ndk.win64.legacy_checksum ?></td>
@@ -122,7 +122,7 @@
     <td>Mac OS X 32-bit</td>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.mac32_download ?>"><?cs var:ndk.mac32_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.mac32_download ?>"><?cs var:ndk.mac32_download ?></a>
     </td>
     <td><?cs var:ndk.mac32_bytes ?></td>
     <td><?cs var:ndk.mac32_checksum ?></td>
@@ -131,7 +131,7 @@
   <tr>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.mac32.legacy_download ?>"><?cs var:ndk.mac32.legacy_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.mac32.legacy_download ?>"><?cs var:ndk.mac32.legacy_download ?></a>
     </td>
     <td><?cs var:ndk.mac32.legacy_bytes ?></td>
     <td><?cs var:ndk.mac32.legacy_checksum ?></td>
@@ -139,7 +139,7 @@
     <td>Mac OS X 64-bit</td>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.mac64_download ?>"><?cs var:ndk.mac64_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.mac64_download ?>"><?cs var:ndk.mac64_download ?></a>
     </td>
     <td><?cs var:ndk.mac64_bytes ?></td>
     <td><?cs var:ndk.mac64_checksum ?></td>
@@ -147,7 +147,7 @@
  <!--  <tr>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.mac64.legacy_download ?>"><?cs var:ndk.mac64.legacy_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.mac64.legacy_download ?>"><?cs var:ndk.mac64.legacy_download ?></a>
     </td>
     <td><?cs var:ndk.mac64.legacy_bytes ?></td>
     <td><?cs var:ndk.mac64.legacy_checksum ?></td>
@@ -155,7 +155,7 @@
  <!--  <tr>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.linux32.legacy_download ?>"><?cs var:ndk.linux32.legacy_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.linux32.legacy_download ?>"><?cs var:ndk.linux32.legacy_download ?></a>
     </td>
     <td><?cs var:ndk.linux32.legacy_bytes ?></td>
     <td><?cs var:ndk.linux32.legacy_checksum ?></td>
@@ -164,7 +164,7 @@
     <td>Linux 64-bit (x86)</td>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.linux64_download ?>"><?cs var:ndk.linux64_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.linux64_download ?>"><?cs var:ndk.linux64_download ?></a>
     </td>
     <td><?cs var:ndk.linux64_bytes ?></td>
     <td><?cs var:ndk.linux64_checksum ?></td>
@@ -172,7 +172,7 @@
   <!--  <tr>
     <td>
   <a onClick="return onDownload(this)" data-modal-toggle="ndk_tos"
-     href="http://dl.google.com/android/repository/<?cs var:ndk.linux64.legacy_download ?>"><?cs var:ndk.linux64.legacy_download ?></a>
+     href="//dl.google.com/android/repository/<?cs var:ndk.linux64.legacy_download ?>"><?cs var:ndk.linux64.legacy_download ?></a>
     </td>
     <td><?cs var:ndk.linux64.legacy_bytes ?></td>
     <td><?cs var:ndk.linux64.legacy_checksum ?></td>
diff --git a/tools/fs_config/fs_config.c b/tools/fs_config/fs_config.c
index b9a14e1..e797957 100644
--- a/tools/fs_config/fs_config.c
+++ b/tools/fs_config/fs_config.c
@@ -24,7 +24,6 @@
 
 #include <selinux/selinux.h>
 #include <selinux/label.h>
-#include <selinux/android.h>
 
 #include "private/android_filesystem_config.h"
 
diff --git a/tools/generate-notice-files.py b/tools/generate-notice-files.py
index 4571b70..5b13bf5 100755
--- a/tools/generate-notice-files.py
+++ b/tools/generate-notice-files.py
@@ -99,7 +99,7 @@
     # most browsers, but href's to table row ids do)
     id_table = {}
     id_count = 0
-    for value in file_hash.values():
+    for value in file_hash:
         for filename in value:
              id_table[filename] = id_count
         id_count += 1
@@ -116,7 +116,7 @@
     print >> output_file, "<ul>"
 
     # Flatten the list of lists into a single list of filenames
-    sorted_filenames = sorted(itertools.chain.from_iterable(file_hash.values()))
+    sorted_filenames = sorted(itertools.chain.from_iterable(file_hash))
 
     # Print out a nice table of contents
     for filename in sorted_filenames:
@@ -127,11 +127,11 @@
     print >> output_file, "</div><!-- table of contents -->"
     # Output the individual notice file lists
     print >>output_file, '<table cellpadding="0" cellspacing="0" border="0">'
-    for value in file_hash.values():
+    for value in file_hash:
         print >> output_file, '<tr id="id%d"><td class="same-license">' % id_table.get(value[0])
         print >> output_file, '<div class="label">Notices for file(s):</div>'
         print >> output_file, '<div class="file-list">'
-        for filename in sorted(value):
+        for filename in value:
             print >> output_file, "%s <br/>" % (SRC_DIR_STRIP_RE.sub(r"\1", filename))
         print >> output_file, "</div><!-- file-list -->"
         print >> output_file
@@ -154,10 +154,10 @@
     SRC_DIR_STRIP_RE = re.compile(input_dir + "(/.*).txt")
     output_file = open(output_filename, "wb")
     print >> output_file, file_title
-    for value in file_hash.values():
+    for value in file_hash:
       print >> output_file, "============================================================"
       print >> output_file, "Notices for file(s):"
-      for filename in sorted(value):
+      for filename in value:
         print >> output_file, SRC_DIR_STRIP_RE.sub(r"\1", filename)
       print >> output_file, "------------------------------------------------------------"
       print >> output_file, open(value[0]).read()
@@ -178,11 +178,12 @@
                 file_md5sum = md5sum(filename)
                 files_with_same_hash[file_md5sum].append(filename)
 
+    filesets = [sorted(files_with_same_hash[md5]) for md5 in sorted(files_with_same_hash.keys())]
 
     print "Combining NOTICE files into HTML"
-    combine_notice_files_html(files_with_same_hash, input_dir, html_output_file)
+    combine_notice_files_html(filesets, input_dir, html_output_file)
     print "Combining NOTICE files into text"
-    combine_notice_files_text(files_with_same_hash, input_dir, txt_output_file, file_title)
+    combine_notice_files_text(filesets, input_dir, txt_output_file, file_title)
 
 if __name__ == "__main__":
     main(args)
diff --git a/tools/ijar/Android.mk b/tools/ijar/Android.mk
new file mode 100644
index 0000000..8b2a02c
--- /dev/null
+++ b/tools/ijar/Android.mk
@@ -0,0 +1,16 @@
+# Copyright 2015 The Android Open Source Project
+#
+# The rest of files in this directory comes from
+# https://github.com/bazelbuild/bazel/tree/master/third_party/ijar
+
+LOCAL_PATH:= $(call my-dir)
+
+include $(CLEAR_VARS)
+LOCAL_CPP_EXTENSION := .cc
+LOCAL_SRC_FILES := classfile.cc ijar.cc zip.cc
+LOCAL_CFLAGS += -Wall -Werror
+LOCAL_SHARED_LIBRARIES := libz-host
+LOCAL_MODULE := ijar
+# libc++ is not supported for TARGET_BUILD_APPS builds
+LOCAL_CXX_STL := libstdc++
+include $(BUILD_HOST_EXECUTABLE)
diff --git a/tools/ijar/LICENSE b/tools/ijar/LICENSE
new file mode 100644
index 0000000..6b0b127
--- /dev/null
+++ b/tools/ijar/LICENSE
@@ -0,0 +1,203 @@
+
+                                 Apache License
+                           Version 2.0, January 2004
+                        http://www.apache.org/licenses/
+
+   TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
+
+   1. Definitions.
+
+      "License" shall mean the terms and conditions for use, reproduction,
+      and distribution as defined by Sections 1 through 9 of this document.
+
+      "Licensor" shall mean the copyright owner or entity authorized by
+      the copyright owner that is granting the License.
+
+      "Legal Entity" shall mean the union of the acting entity and all
+      other entities that control, are controlled by, or are under common
+      control with that entity. For the purposes of this definition,
+      "control" means (i) the power, direct or indirect, to cause the
+      direction or management of such entity, whether by contract or
+      otherwise, or (ii) ownership of fifty percent (50%) or more of the
+      outstanding shares, or (iii) beneficial ownership of such entity.
+
+      "You" (or "Your") shall mean an individual or Legal Entity
+      exercising permissions granted by this License.
+
+      "Source" form shall mean the preferred form for making modifications,
+      including but not limited to software source code, documentation
+      source, and configuration files.
+
+      "Object" form shall mean any form resulting from mechanical
+      transformation or translation of a Source form, including but
+      not limited to compiled object code, generated documentation,
+      and conversions to other media types.
+
+      "Work" shall mean the work of authorship, whether in Source or
+      Object form, made available under the License, as indicated by a
+      copyright notice that is included in or attached to the work
+      (an example is provided in the Appendix below).
+
+      "Derivative Works" shall mean any work, whether in Source or Object
+      form, that is based on (or derived from) the Work and for which the
+      editorial revisions, annotations, elaborations, or other modifications
+      represent, as a whole, an original work of authorship. For the purposes
+      of this License, Derivative Works shall not include works that remain
+      separable from, or merely link (or bind by name) to the interfaces of,
+      the Work and Derivative Works thereof.
+
+      "Contribution" shall mean any work of authorship, including
+      the original version of the Work and any modifications or additions
+      to that Work or Derivative Works thereof, that is intentionally
+      submitted to Licensor for inclusion in the Work by the copyright owner
+      or by an individual or Legal Entity authorized to submit on behalf of
+      the copyright owner. For the purposes of this definition, "submitted"
+      means any form of electronic, verbal, or written communication sent
+      to the Licensor or its representatives, including but not limited to
+      communication on electronic mailing lists, source code control systems,
+      and issue tracking systems that are managed by, or on behalf of, the
+      Licensor for the purpose of discussing and improving the Work, but
+      excluding communication that is conspicuously marked or otherwise
+      designated in writing by the copyright owner as "Not a Contribution."
+
+      "Contributor" shall mean Licensor and any individual or Legal Entity
+      on behalf of whom a Contribution has been received by Licensor and
+      subsequently incorporated within the Work.
+
+   2. Grant of Copyright License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      copyright license to reproduce, prepare Derivative Works of,
+      publicly display, publicly perform, sublicense, and distribute the
+      Work and such Derivative Works in Source or Object form.
+
+   3. Grant of Patent License. Subject to the terms and conditions of
+      this License, each Contributor hereby grants to You a perpetual,
+      worldwide, non-exclusive, no-charge, royalty-free, irrevocable
+      (except as stated in this section) patent license to make, have made,
+      use, offer to sell, sell, import, and otherwise transfer the Work,
+      where such license applies only to those patent claims licensable
+      by such Contributor that are necessarily infringed by their
+      Contribution(s) alone or by combination of their Contribution(s)
+      with the Work to which such Contribution(s) was submitted. If You
+      institute patent litigation against any entity (including a
+      cross-claim or counterclaim in a lawsuit) alleging that the Work
+      or a Contribution incorporated within the Work constitutes direct
+      or contributory patent infringement, then any patent licenses
+      granted to You under this License for that Work shall terminate
+      as of the date such litigation is filed.
+
+   4. Redistribution. You may reproduce and distribute copies of the
+      Work or Derivative Works thereof in any medium, with or without
+      modifications, and in Source or Object form, provided that You
+      meet the following conditions:
+
+      (a) You must give any other recipients of the Work or
+          Derivative Works a copy of this License; and
+
+      (b) You must cause any modified files to carry prominent notices
+          stating that You changed the files; and
+
+      (c) You must retain, in the Source form of any Derivative Works
+          that You distribute, all copyright, patent, trademark, and
+          attribution notices from the Source form of the Work,
+          excluding those notices that do not pertain to any part of
+          the Derivative Works; and
+
+      (d) If the Work includes a "NOTICE" text file as part of its
+          distribution, then any Derivative Works that You distribute must
+          include a readable copy of the attribution notices contained
+          within such NOTICE file, excluding those notices that do not
+          pertain to any part of the Derivative Works, in at least one
+          of the following places: within a NOTICE text file distributed
+          as part of the Derivative Works; within the Source form or
+          documentation, if provided along with the Derivative Works; or,
+          within a display generated by the Derivative Works, if and
+          wherever such third-party notices normally appear. The contents
+          of the NOTICE file are for informational purposes only and
+          do not modify the License. You may add Your own attribution
+          notices within Derivative Works that You distribute, alongside
+          or as an addendum to the NOTICE text from the Work, provided
+          that such additional attribution notices cannot be construed
+          as modifying the License.
+
+      You may add Your own copyright statement to Your modifications and
+      may provide additional or different license terms and conditions
+      for use, reproduction, or distribution of Your modifications, or
+      for any such Derivative Works as a whole, provided Your use,
+      reproduction, and distribution of the Work otherwise complies with
+      the conditions stated in this License.
+
+   5. Submission of Contributions. Unless You explicitly state otherwise,
+      any Contribution intentionally submitted for inclusion in the Work
+      by You to the Licensor shall be under the terms and conditions of
+      this License, without any additional terms or conditions.
+      Notwithstanding the above, nothing herein shall supersede or modify
+      the terms of any separate license agreement you may have executed
+      with Licensor regarding such Contributions.
+
+   6. Trademarks. This License does not grant permission to use the trade
+      names, trademarks, service marks, or product names of the Licensor,
+      except as required for reasonable and customary use in describing the
+      origin of the Work and reproducing the content of the NOTICE file.
+
+   7. Disclaimer of Warranty. Unless required by applicable law or
+      agreed to in writing, Licensor provides the Work (and each
+      Contributor provides its Contributions) on an "AS IS" BASIS,
+      WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
+      implied, including, without limitation, any warranties or conditions
+      of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
+      PARTICULAR PURPOSE. You are solely responsible for determining the
+      appropriateness of using or redistributing the Work and assume any
+      risks associated with Your exercise of permissions under this License.
+
+   8. Limitation of Liability. In no event and under no legal theory,
+      whether in tort (including negligence), contract, or otherwise,
+      unless required by applicable law (such as deliberate and grossly
+      negligent acts) or agreed to in writing, shall any Contributor be
+      liable to You for damages, including any direct, indirect, special,
+      incidental, or consequential damages of any character arising as a
+      result of this License or out of the use or inability to use the
+      Work (including but not limited to damages for loss of goodwill,
+      work stoppage, computer failure or malfunction, or any and all
+      other commercial damages or losses), even if such Contributor
+      has been advised of the possibility of such damages.
+
+   9. Accepting Warranty or Additional Liability. While redistributing
+      the Work or Derivative Works thereof, You may choose to offer,
+      and charge a fee for, acceptance of support, warranty, indemnity,
+      or other liability obligations and/or rights consistent with this
+      License. However, in accepting such obligations, You may act only
+      on Your own behalf and on Your sole responsibility, not on behalf
+      of any other Contributor, and only if You agree to indemnify,
+      defend, and hold each Contributor harmless for any liability
+      incurred by, or claims asserted against, such Contributor by reason
+      of your accepting any such warranty or additional liability.
+
+   END OF TERMS AND CONDITIONS
+
+   APPENDIX: How to apply the Apache License to your work.
+
+      To apply the Apache License to your work, attach the following
+      boilerplate notice, with the fields enclosed by brackets "[]"
+      replaced with your own identifying information. (Don't include
+      the brackets!)  The text should be enclosed in the appropriate
+      comment syntax for the file format. We also recommend that a
+      file or class name and description of purpose be included on the
+      same "printed page" as the copyright notice for easier
+      identification within third-party archives.
+
+   Copyright [yyyy] [name of copyright owner]
+
+   Licensed under the Apache License, Version 2.0 (the "License");
+   you may not use this file except in compliance with the License.
+   You may obtain a copy of the License at
+
+       http://www.apache.org/licenses/LICENSE-2.0
+
+   Unless required by applicable law or agreed to in writing, software
+   distributed under the License is distributed on an "AS IS" BASIS,
+   WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+   See the License for the specific language governing permissions and
+   limitations under the License.
+
diff --git a/tools/ijar/README.txt b/tools/ijar/README.txt
new file mode 100644
index 0000000..d5a6a0f
--- /dev/null
+++ b/tools/ijar/README.txt
@@ -0,0 +1,120 @@
+
+ijar: A tool for generating interface .jars from normal .jars
+=============================================================
+
+Alan Donovan, 26 May 2007.
+
+Rationale:
+
+  In order to improve the speed of compilation of Java programs in
+  Bazel, the output of build steps is cached.
+
+  This works very nicely for C++ compilation: a compilation unit
+  includes a .cc source file and typically dozens of header files.
+  Header files change relatively infrequently, so the need for a
+  rebuild is usually driven by a change in the .cc file.  Even after
+  syncing a slightly newer version of the tree and doing a rebuild,
+  many hits in the cache are still observed.
+
+  In Java, by contrast, a compilation unit involves a set of .java
+  source files, plus a set of .jar files containing already-compiled
+  JVM .class files.  Class files serve a dual purpose: from the JVM's
+  perspective, they are containers of executable code, but from the
+  compiler's perspective, they are interface definitions.  The problem
+  here is that .jar files are very much more sensitive to change than
+  C++ header files, so even a change that is insignificant to the
+  compiler (such as the addition of a print statement to a method in a
+  prerequisite class) will cause the jar to change, and any code that
+  depends on this jar's interface will be recompiled unnecessarily.
+
+  The purpose of ijar is to produce, from a .jar file, a much smaller,
+  simpler .jar file containing only the parts that are significant for
+  the purposes of compilation.  In other words, an interface .jar
+  file.  By changing ones compilation dependencies to be the interface
+  jar files, unnecessary recompilation is avoided when upstream
+  changes don't affect the interface.
+
+Details:
+
+  ijar is a tool that reads a .jar file and emits a .jar file
+  containing only the parts that are relevant to Java compilation.
+  For example, it throws away:
+
+  - Files whose name does not end in ".class".
+  - All executable method code.
+  - All private methods and fields.
+  - All constants and attributes except the minimal set necessary to
+    describe the class interface.
+  - All debugging information
+    (LineNumberTable, SourceFile, LocalVariableTables attributes).
+
+  It also sets to zero the file modification times in the index of the
+  .jar file.
+
+Implementation:
+
+  ijar is implemented in C++, and runs very quickly.  For example
+  (when optimized) it takes only 530ms to process a 42MB
+  .jar file containing 5878 classe, resulting in an interface .jar
+  file of only 11.4MB in size.  For more usual .jar sizes of a few
+  megabytes, a runtime of 50ms is typical.
+
+  The implementation strategy is to mmap both the input jar and the
+  newly-created _interface.jar, and to scan through the former and
+  emit the latter in a single pass. There are a couple of locations
+  where some kind of "backpatching" is required:
+
+  - in the .zip file format, for each file, the size field precedes
+    the data.  We emit a zero but note its location, generate and emit
+    the stripped classfile, then poke the correct size into the
+    location.
+
+  - for JVM .class files, the header (including the constant table)
+    precedes the body, but cannot be emitted before it because it's
+    not until we emit the body that we know which constants are
+    referenced and which are garbage.  So we emit the body into a
+    temporary buffer, then emit the header to the output jar, followed
+    by the contents of the temp buffer.
+
+  Also note that the zip file format has unnecessary duplication of
+  the index metadata: it has header+data for each file, then another
+  set of (similar) headers at the end.  Rather than save the metadata
+  explicitly in some datastructure, we just record the addresses of
+  the already-emitted zip metadata entries in the output file, and
+  then read from there as necessary.
+
+Notes:
+
+  This code has no dependency except on the STL and on zlib.
+
+  Almost all of the getX/putX/ReadX/WriteX functions in the code
+  advance their first argument pointer, which is passed by reference.
+
+  It's tempting to discard package-private classes and class members.
+  However, this would be incorrect because they are a necessary part
+  of the package interface, as a Java package is often compiled in
+  multiple stages.  For example: in Bazel, both java tests and java
+  code inhabit the same Java package but are compiled separately.
+
+Assumptions:
+
+  We assume that jar files are uncompressed v1.0 zip files (created
+  with 'jar c0f') with a zero general_purpose_bit_flag.
+
+  We assume that javap/javac don't need the correct CRC checksums in
+  the .jar file.
+
+  We assume that it's better simply to abort in the face of unknown
+  input than to risk leaving out something important from the output
+  (although in the case of annotations, it should be safe to ignore
+  ones we don't understand).
+
+TODO:
+  Maybe: ensure a canonical sort order is used for every list (jar
+  entries, class members, attributes, etc.)  This isn't essential
+  because we can assume the compiler is deterministic and the order in
+  the source files changes little.  Also, it would require two passes. :(
+
+  Maybe: delete dynamically-allocated memory.
+
+  Add (a lot) more tests.  Include a test of idempotency.
diff --git a/tools/ijar/classfile.cc b/tools/ijar/classfile.cc
new file mode 100644
index 0000000..e0cf42e
--- /dev/null
+++ b/tools/ijar/classfile.cc
@@ -0,0 +1,1788 @@
+// Copyright 2001,2007 Alan Donovan. All rights reserved.
+//
+// Author: Alan Donovan <adonovan@google.com>
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// classfile.cc -- classfile parsing and stripping.
+//
+
+// TODO(adonovan) don't pass pointers by reference; this is not
+// compatible with Google C++ style.
+
+// See README.txt for details.
+//
+// For definition of JVM class file format, see:
+// Java SE 8 Edition:
+// http://docs.oracle.com/javase/specs/jvms/se8/html/jvms-4.html#jvms-4
+
+#define __STDC_FORMAT_MACROS 1
+#define __STDC_LIMIT_MACROS 1
+#include <inttypes.h> // for PRIx32
+#include <stddef.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+
+#include <set>
+#include <string>
+#include <vector>
+
+#include "common.h"
+
+namespace devtools_ijar {
+
+// See Table 4.3 in JVM Spec.
+enum CONSTANT {
+  CONSTANT_Class              = 7,
+  CONSTANT_FieldRef           = 9,
+  CONSTANT_Methodref          = 10,
+  CONSTANT_Interfacemethodref = 11,
+  CONSTANT_String             = 8,
+  CONSTANT_Integer            = 3,
+  CONSTANT_Float              = 4,
+  CONSTANT_Long               = 5,
+  CONSTANT_Double             = 6,
+  CONSTANT_NameAndType        = 12,
+  CONSTANT_Utf8               = 1,
+  CONSTANT_MethodHandle       = 15,
+  CONSTANT_MethodType         = 16,
+  CONSTANT_InvokeDynamic      = 18
+};
+
+// See Tables 4.1, 4.4, 4.5 in JVM Spec.
+enum ACCESS  {
+  ACC_PUBLIC          = 0x0001,
+  ACC_PRIVATE         = 0x0002,
+  ACC_PROTECTED       = 0x0004,
+  ACC_STATIC          = 0x0008,
+  ACC_FINAL           = 0x0010,
+  ACC_SYNCHRONIZED    = 0x0020,
+  ACC_VOLATILE        = 0x0040,
+  ACC_TRANSIENT       = 0x0080,
+  ACC_INTERFACE       = 0x0200,
+  ACC_ABSTRACT        = 0x0400
+};
+
+// See Table 4.7.20-A in Java 8 JVM Spec.
+enum TARGET_TYPE {
+  // Targets for type parameter declarations (ElementType.TYPE_PARAMETER):
+  CLASS_TYPE_PARAMETER        = 0x00,
+  METHOD_TYPE_PARAMETER       = 0x01,
+
+  // Targets for type uses that may be externally visible in classes and members
+  // (ElementType.TYPE_USE):
+  CLASS_EXTENDS               = 0x10,
+  CLASS_TYPE_PARAMETER_BOUND  = 0x11,
+  METHOD_TYPE_PARAMETER_BOUND = 0x12,
+  FIELD                       = 0x13,
+  METHOD_RETURN               = 0x14,
+  METHOD_RECEIVER             = 0x15,
+  METHOD_FORMAL_PARAMETER     = 0x16,
+  THROWS                      = 0x17,
+
+  // TARGET_TYPE >= 0x40 is reserved for type uses that occur only within code
+  // blocks. Ijar doesn't need to know about these.
+};
+
+struct Constant;
+
+// TODO(adonovan) these globals are unfortunate
+static std::vector<Constant*>        const_pool_in; // input constant pool
+static std::vector<Constant*>        const_pool_out; // output constant_pool
+static std::set<std::string>         used_class_names;
+static Constant *                    class_name;
+
+// Returns the Constant object, given an index into the input constant pool.
+// Note: constant(0) == NULL; this invariant is exploited by the
+// InnerClassesAttribute, inter alia.
+inline Constant *constant(int idx) {
+  if (idx < 0 || (unsigned)idx >= const_pool_in.size()) {
+    fprintf(stderr, "Illegal constant pool index: %d\n", idx);
+    abort();
+  }
+  return const_pool_in[idx];
+}
+
+/**********************************************************************
+ *                                                                    *
+ *                             Constants                              *
+ *                                                                    *
+ **********************************************************************/
+
+// See sec.4.4 of JVM spec.
+struct Constant {
+
+  Constant(u1 tag) :
+      slot_(0),
+      tag_(tag) {}
+
+  virtual ~Constant() {}
+
+  // For UTF-8 string constants, returns the encoded string.
+  // Otherwise, returns an undefined string value suitable for debugging.
+  virtual std::string Display() = 0;
+
+  virtual void Write(u1 *&p) = 0;
+
+  // Called by slot() when a constant has been identified as required
+  // in the output classfile's constant pool.  This is a hook allowing
+  // constants to register their dependency on other constants, by
+  // calling slot() on them in turn.
+  virtual void Keep() {}
+
+  bool Kept() {
+    return slot_ != 0;
+  }
+
+  // Returns the index of this constant in the output class's constant
+  // pool, assigning a slot if not already done.
+  u2 slot() {
+    if (slot_ == 0) {
+      Keep();
+      slot_ = const_pool_out.size(); // BugBot's "narrowing" warning
+                                     // is bogus.  The number of
+                                     // output constants can't exceed
+                                     // the number of input constants.
+      if (slot_ == 0) {
+        fprintf(stderr, "Constant::slot() called before output phase.\n");
+        abort();
+      }
+      const_pool_out.push_back(this);
+      if (tag_ == CONSTANT_Long || tag_ == CONSTANT_Double) {
+        const_pool_out.push_back(NULL);
+      }
+    }
+    return slot_;
+  }
+
+  u2 slot_; // zero => "this constant is unreachable garbage"
+  u1 tag_;
+};
+
+// Extracts class names from a signature and puts them into the global
+// variable used_class_names.
+//
+// desc: the descriptor class names should be extracted from.
+// p: the position where the extraction should tart.
+void ExtractClassNames(const std::string& desc, size_t* p);
+
+// See sec.4.4.1 of JVM spec.
+struct Constant_Class : Constant
+{
+  Constant_Class(u2 name_index) :
+      Constant(CONSTANT_Class),
+      name_index_(name_index) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, constant(name_index_)->slot());
+  }
+
+  std::string Display() {
+    return constant(name_index_)->Display();
+  }
+
+  void Keep() { constant(name_index_)->slot(); }
+
+  u2 name_index_;
+};
+
+// See sec.4.4.2 of JVM spec.
+struct Constant_FMIref : Constant
+{
+  Constant_FMIref(u1 tag,
+                  u2 class_index,
+                  u2 name_type_index) :
+      Constant(tag),
+      class_index_(class_index),
+      name_type_index_(name_type_index) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, constant(class_index_)->slot());
+    put_u2be(p, constant(name_type_index_)->slot());
+  }
+
+  std::string Display() {
+    return constant(class_index_)->Display() + "::" +
+        constant(name_type_index_)->Display();
+  }
+
+  void Keep() {
+    constant(class_index_)->slot();
+    constant(name_type_index_)->slot();
+  }
+
+  u2 class_index_;
+  u2 name_type_index_;
+};
+
+// See sec.4.4.3 of JVM spec.
+struct Constant_String : Constant
+{
+  Constant_String(u2 string_index) :
+      Constant(CONSTANT_String),
+      string_index_(string_index) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, constant(string_index_)->slot());
+  }
+
+  std::string Display() {
+    return "\"" + constant(string_index_)->Display() + "\"";
+  }
+
+  void Keep() { constant(string_index_)->slot(); }
+
+  u2 string_index_;
+};
+
+// See sec.4.4.4 of JVM spec.
+struct Constant_IntegerOrFloat : Constant
+{
+  Constant_IntegerOrFloat(u1 tag, u4 bytes) :
+      Constant(tag),
+      bytes_(bytes) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u4be(p, bytes_);
+  }
+
+  std::string Display() { return "int/float"; }
+
+  u4 bytes_;
+};
+
+// See sec.4.4.5 of JVM spec.
+struct Constant_LongOrDouble : Constant_IntegerOrFloat
+{
+  Constant_LongOrDouble(u1 tag, u4 high_bytes, u4 low_bytes) :
+      Constant_IntegerOrFloat(tag, high_bytes),
+      low_bytes_(low_bytes) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u4be(p, bytes_);
+    put_u4be(p, low_bytes_);
+  }
+
+  std::string Display() { return "long/double"; }
+
+  u4 low_bytes_;
+};
+
+// See sec.4.4.6 of JVM spec.
+struct Constant_NameAndType : Constant
+{
+  Constant_NameAndType(u2 name_index, u2 descr_index) :
+      Constant(CONSTANT_NameAndType),
+      name_index_(name_index),
+      descr_index_(descr_index) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, constant(name_index_)->slot());
+    put_u2be(p, constant(descr_index_)->slot());
+  }
+
+  std::string Display() {
+    return constant(name_index_)->Display() + "::" +
+        constant(descr_index_)->Display();
+  }
+
+  void Keep() {
+    constant(name_index_)->slot();
+    constant(descr_index_)->slot();
+  }
+
+  u2 name_index_;
+  u2 descr_index_;
+};
+
+// See sec.4.4.7 of JVM spec.
+struct Constant_Utf8 : Constant
+{
+  Constant_Utf8(u4 length, const u1 *utf8) :
+      Constant(CONSTANT_Utf8),
+      length_(length),
+      utf8_(utf8) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, length_);
+    put_n(p, utf8_, length_);
+  }
+
+  std::string Display() {
+    return std::string((const char*) utf8_, length_);
+  }
+
+  u4 length_;
+  const u1 *utf8_;
+};
+
+// See sec.4.4.8 of JVM spec.
+struct Constant_MethodHandle : Constant
+{
+  Constant_MethodHandle(u1 reference_kind, u2 reference_index) :
+      Constant(CONSTANT_MethodHandle),
+      reference_kind_(reference_kind),
+      reference_index_(reference_index) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u1(p, reference_kind_);
+    put_u2be(p, reference_index_);
+  }
+
+  std::string Display() {
+    return "Constant_MethodHandle::" + std::to_string(reference_kind_) + "::"
+        + constant(reference_index_)->Display();
+  }
+
+  u1 reference_kind_;
+  u2 reference_index_;
+};
+
+// See sec.4.4.9 of JVM spec.
+struct Constant_MethodType : Constant
+{
+  Constant_MethodType(u2 descriptor_index) :
+      Constant(CONSTANT_MethodType),
+      descriptor_index_(descriptor_index) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, descriptor_index_);
+  }
+
+  std::string Display() {
+    return  "Constant_MethodType::" + constant(descriptor_index_)->Display();
+  }
+
+  u2 descriptor_index_;
+};
+
+// See sec.4.4.10 of JVM spec.
+struct Constant_InvokeDynamic : Constant
+{
+  Constant_InvokeDynamic(u2 bootstrap_method_attr_index, u2 name_and_type_index) :
+      Constant(CONSTANT_InvokeDynamic),
+      bootstrap_method_attr_index_(bootstrap_method_attr_index),
+      name_and_type_index_(name_and_type_index) {}
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, bootstrap_method_attr_index_);
+    put_u2be(p, name_and_type_index_);
+  }
+
+  std::string Display() {
+    return  "Constant_InvokeDynamic::"
+        + std::to_string(bootstrap_method_attr_index_) + "::"
+        + constant(name_and_type_index_)->Display();
+  }
+
+  u2 bootstrap_method_attr_index_;
+  u2 name_and_type_index_;
+};
+
+/**********************************************************************
+ *                                                                    *
+ *                             Attributes                             *
+ *                                                                    *
+ **********************************************************************/
+
+// See sec.4.7 of JVM spec.
+struct Attribute {
+
+  virtual ~Attribute() {}
+  virtual void Write(u1 *&p) = 0;
+  virtual void ExtractClassNames() {}
+
+  void WriteProlog(u1 *&p, u2 length) {
+    put_u2be(p, attribute_name_->slot());
+    put_u4be(p, length);
+  }
+
+  Constant *attribute_name_;
+};
+
+// See sec.4.7.5 of JVM spec.
+struct ExceptionsAttribute : Attribute {
+
+  static ExceptionsAttribute* Read(const u1 *&p, Constant *attribute_name) {
+    ExceptionsAttribute *attr = new ExceptionsAttribute;
+    attr->attribute_name_ = attribute_name;
+    u2 number_of_exceptions = get_u2be(p);
+    for (int ii = 0; ii < number_of_exceptions; ++ii) {
+      attr->exceptions_.push_back(constant(get_u2be(p)));
+    }
+    return attr;
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, exceptions_.size() * 2 + 2);
+    put_u2be(p, exceptions_.size());
+    for (size_t ii = 0; ii < exceptions_.size(); ++ii) {
+      put_u2be(p, exceptions_[ii]->slot());
+    }
+  }
+
+  std::vector<Constant*> exceptions_;
+};
+
+// See sec.4.7.6 of JVM spec.
+struct InnerClassesAttribute : Attribute {
+
+  struct Entry {
+    Constant *inner_class_info;
+    Constant *outer_class_info;
+    Constant *inner_name;
+    u2 inner_class_access_flags;
+  };
+
+  virtual ~InnerClassesAttribute() {
+    for (size_t i = 0; i < entries_.size(); i++) {
+      delete entries_[i];
+    }
+  }
+
+  static InnerClassesAttribute* Read(const u1 *&p, Constant *attribute_name) {
+    InnerClassesAttribute *attr = new InnerClassesAttribute;
+    attr->attribute_name_ = attribute_name;
+
+    u2 number_of_classes = get_u2be(p);
+    for (int ii = 0; ii < number_of_classes; ++ii) {
+      Entry *entry = new Entry;
+      entry->inner_class_info = constant(get_u2be(p));
+      entry->outer_class_info = constant(get_u2be(p));
+      entry->inner_name = constant(get_u2be(p));
+      entry->inner_class_access_flags = get_u2be(p);
+
+      attr->entries_.push_back(entry);
+    }
+    return attr;
+  }
+
+  void Write(u1 *&p) {
+    std::set<int> kept_entries;
+    // We keep an entry if the constant referring to the inner class is already
+    // kept. Then we mark its outer class and its class name as kept, too, then
+    // iterate until a fixed point is reached.
+    size_t entry_count;
+    int iteration = 0;
+
+    do {
+      entry_count = kept_entries.size();
+      for (size_t i_entry = 0; i_entry < entries_.size(); ++i_entry) {
+        Entry* entry = entries_[i_entry];
+        if (entry->inner_class_info->Kept() ||
+            used_class_names.find(entry->inner_class_info->Display())
+                != used_class_names.end() ||
+            entry->outer_class_info == class_name ||
+            entry->outer_class_info == NULL ||
+            entry->inner_name == NULL) {
+          kept_entries.insert(i_entry);
+
+          // These are zero for anonymous inner classes
+          if (entry->outer_class_info != NULL) {
+            entry->outer_class_info->slot();
+          }
+
+          if (entry->inner_name != NULL) {
+            entry->inner_name->slot();
+          }
+        }
+      }
+      iteration += 1;
+    } while (entry_count != kept_entries.size());
+
+    if (kept_entries.size() == 0) {
+      return;
+    }
+
+    WriteProlog(p, 2 + kept_entries.size() * 8);
+    put_u2be(p, kept_entries.size());
+
+    for (std::set<int>::iterator it = kept_entries.begin();
+         it != kept_entries.end();
+         ++it) {
+      Entry *entry = entries_[*it];
+      put_u2be(p, entry->inner_class_info == NULL
+               ? 0
+               : entry->inner_class_info->slot());
+      put_u2be(p, entry->outer_class_info == NULL
+               ? 0
+               : entry->outer_class_info->slot());
+      put_u2be(p, entry->inner_name == NULL
+               ? 0
+               : entry->inner_name->slot());
+      put_u2be(p, entry->inner_class_access_flags);
+    }
+  }
+
+  std::vector<Entry*> entries_;
+};
+
+// See sec.4.7.7 of JVM spec.
+// We preserve EnclosingMethod attributes to be able to identify local and
+// anonymous classes. These classes will be stripped of most content, as they
+// represent implementation details that shoudn't leak into the ijars. Omitting
+// EnclosingMethod attributes can lead to type-checking failures in the presence
+// of generics (see b/9070939).
+struct EnclosingMethodAttribute : Attribute {
+
+  static EnclosingMethodAttribute* Read(const u1 *&p,
+                                        Constant *attribute_name) {
+    EnclosingMethodAttribute *attr = new EnclosingMethodAttribute;
+    attr->attribute_name_ = attribute_name;
+    attr->class_ = constant(get_u2be(p));
+    attr->method_ = constant(get_u2be(p));
+    return attr;
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, 4);
+    put_u2be(p, class_->slot());
+    put_u2be(p, method_ == NULL ? 0 : method_->slot());
+  }
+
+  Constant *class_;
+  Constant *method_;
+};
+
+// See sec.4.7.16.1 of JVM spec.
+// Used by AnnotationDefault and other attributes.
+struct ElementValue {
+  virtual ~ElementValue() {}
+  virtual void Write(u1 *&p) = 0;
+  virtual void ExtractClassNames() {}
+  static ElementValue* Read(const u1 *&p);
+  u1 tag_;
+  u4 length_;
+};
+
+struct BaseTypeElementValue : ElementValue {
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, const_value_->slot());
+  }
+  static BaseTypeElementValue *Read(const u1 *&p) {
+    BaseTypeElementValue *value = new BaseTypeElementValue;
+    value->const_value_ = constant(get_u2be(p));
+    return value;
+  }
+  Constant *const_value_;
+};
+
+struct EnumTypeElementValue : ElementValue {
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, type_name_->slot());
+    put_u2be(p, const_name_->slot());
+  }
+  static EnumTypeElementValue *Read(const u1 *&p) {
+    EnumTypeElementValue *value = new EnumTypeElementValue;
+    value->type_name_ = constant(get_u2be(p));
+    value->const_name_ = constant(get_u2be(p));
+    return value;
+  }
+  Constant *type_name_;
+  Constant *const_name_;
+};
+
+struct ClassTypeElementValue : ElementValue {
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, class_info_->slot());
+  }
+
+  virtual void ExtractClassNames() {
+    size_t idx = 0;
+    devtools_ijar::ExtractClassNames(class_info_->Display(), &idx);
+  }
+
+  static ClassTypeElementValue *Read(const u1 *&p) {
+    ClassTypeElementValue *value = new ClassTypeElementValue;
+    value->class_info_ = constant(get_u2be(p));
+    return value;
+  }
+  Constant *class_info_;
+};
+
+struct ArrayTypeElementValue : ElementValue {
+  virtual ~ArrayTypeElementValue() {
+    for (size_t i = 0; i < values_.size(); i++) {
+      delete values_[i];
+    }
+  }
+
+  virtual void ExtractClassNames() {
+    for (size_t i = 0; i < values_.size(); i++) {
+      values_[i]->ExtractClassNames();
+    }
+  }
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    put_u2be(p, values_.size());
+    for (size_t ii = 0; ii < values_.size(); ++ii) {
+      values_[ii]->Write(p);
+    }
+  }
+  static ArrayTypeElementValue *Read(const u1 *&p) {
+    ArrayTypeElementValue *value = new ArrayTypeElementValue;
+    u2 num_values = get_u2be(p);
+    for (int ii = 0; ii < num_values; ++ii) {
+      value->values_.push_back(ElementValue::Read(p));
+    }
+    return value;
+  }
+  std::vector<ElementValue*> values_;
+};
+
+// See sec.4.7.16 of JVM spec.
+struct Annotation {
+  virtual ~Annotation() {
+    for (size_t i = 0; i < element_value_pairs_.size(); i++) {
+      delete element_value_pairs_[i]->element_value_;
+      delete element_value_pairs_[i];
+    }
+  }
+
+  void ExtractClassNames() {
+    for (size_t i = 0; i < element_value_pairs_.size(); i++) {
+      element_value_pairs_[i]->element_value_->ExtractClassNames();
+    }
+  }
+
+  void Write(u1 *&p) {
+    put_u2be(p, type_->slot());
+    put_u2be(p, element_value_pairs_.size());
+    for (size_t ii = 0; ii < element_value_pairs_.size(); ++ii) {
+      put_u2be(p, element_value_pairs_[ii]->element_name_->slot());
+      element_value_pairs_[ii]->element_value_->Write(p);
+    }
+  }
+  static Annotation *Read(const u1 *&p) {
+    Annotation *value = new Annotation;
+    value->type_ = constant(get_u2be(p));
+    u2 num_element_value_pairs = get_u2be(p);
+    for (int ii = 0; ii < num_element_value_pairs; ++ii) {
+      ElementValuePair *pair = new ElementValuePair;
+      pair->element_name_ = constant(get_u2be(p));
+      pair->element_value_ = ElementValue::Read(p);
+      value->element_value_pairs_.push_back(pair);
+    }
+    return value;
+  }
+  Constant *type_;
+  struct ElementValuePair {
+    Constant *element_name_;
+    ElementValue *element_value_;
+  };
+  std::vector<ElementValuePair*> element_value_pairs_;
+};
+
+// See sec 4.7.20 of Java 8 JVM Spec
+//
+// Each entry in the annotations table represents a single run-time visible
+// annotation on a type used in a declaration or expression. The type_annotation
+// structure has the following format:
+//
+// type_annotation {
+//   u1 target_type;
+//   union {
+//     type_parameter_target;
+//     supertype_target;
+//     type_parameter_bound_target;
+//     empty_target;
+//     method_formal_parameter_target;
+//     throws_target;
+//     localvar_target;
+//     catch_target;
+//     offset_target;
+//     type_argument_target;
+//   } target_info;
+//   type_path target_path;
+//   u2        type_index;
+//   u2        num_element_value_pairs;
+//   {
+//     u2            element_name_index;
+//     element_value value;
+//   }
+//   element_value_pairs[num_element_value_pairs];
+// }
+//
+struct TypeAnnotation {
+  virtual ~TypeAnnotation() {
+    delete target_info_;
+    delete type_path_;
+    delete annotation_;
+  }
+
+  void ExtractClassNames() {
+    annotation_->ExtractClassNames();
+  }
+
+  void Write(u1 *&p) {
+    put_u1(p, target_type_);
+    target_info_->Write(p);
+    type_path_->Write(p);
+    annotation_->Write(p);
+  }
+
+  static TypeAnnotation *Read(const u1 *&p) {
+    TypeAnnotation *value = new TypeAnnotation;
+    value->target_type_ = get_u1(p);
+    value->target_info_ = ReadTargetInfo(p, value->target_type_);
+    value->type_path_ = TypePath::Read(p);
+    value->annotation_ = Annotation::Read(p);
+    return value;
+  }
+
+  struct TargetInfo {
+    virtual ~TargetInfo() {}
+    virtual void Write(u1 *&p) = 0;
+  };
+
+  struct TypeParameterTargetInfo : TargetInfo {
+    void Write(u1 *&p) {
+      put_u1(p, type_parameter_index_);
+    }
+    static TypeParameterTargetInfo *Read(const u1 *&p) {
+      TypeParameterTargetInfo *value = new TypeParameterTargetInfo;
+      value->type_parameter_index_ = get_u1(p);
+      return value;
+    }
+    u1 type_parameter_index_;
+  };
+
+  struct ClassExtendsInfo : TargetInfo {
+    void Write(u1 *&p) {
+      put_u2be(p, supertype_index_);
+    }
+    static ClassExtendsInfo *Read(const u1 *&p) {
+      ClassExtendsInfo *value = new ClassExtendsInfo;
+      value->supertype_index_ = get_u2be(p);
+      return value;
+    }
+    u2 supertype_index_;
+  };
+
+  struct TypeParameterBoundInfo : TargetInfo {
+    void Write(u1 *&p) {
+      put_u1(p, type_parameter_index_);
+      put_u1(p, bound_index_);
+    }
+    static TypeParameterBoundInfo *Read(const u1 *&p) {
+      TypeParameterBoundInfo *value = new TypeParameterBoundInfo;
+      value->type_parameter_index_ = get_u1(p);
+      value->bound_index_ = get_u1(p);
+      return value;
+    }
+    u1 type_parameter_index_;
+    u1 bound_index_;
+  };
+
+  struct EmptyInfo : TargetInfo {
+    void Write(u1 *&) {}
+    static EmptyInfo *Read(const u1 *&) {
+      return new EmptyInfo;
+    }
+  };
+
+  struct MethodFormalParameterInfo : TargetInfo {
+    void Write(u1 *&p) {
+      put_u1(p, method_formal_parameter_index_);
+    }
+    static MethodFormalParameterInfo *Read(const u1 *&p) {
+      MethodFormalParameterInfo *value = new MethodFormalParameterInfo;
+      value->method_formal_parameter_index_ = get_u1(p);
+      return value;
+    }
+    u1 method_formal_parameter_index_;
+  };
+
+  struct ThrowsTypeInfo : TargetInfo {
+    void Write(u1 *&p) {
+      put_u2be(p, throws_type_index_);
+    }
+    static ThrowsTypeInfo *Read(const u1 *&p) {
+      ThrowsTypeInfo *value = new ThrowsTypeInfo;
+      value->throws_type_index_ = get_u2be(p);
+      return value;
+    }
+    u2 throws_type_index_;
+  };
+
+  static TargetInfo *ReadTargetInfo(const u1 *&p, u1 target_type) {
+    switch (target_type) {
+      case CLASS_TYPE_PARAMETER:
+      case METHOD_TYPE_PARAMETER:
+        return TypeParameterTargetInfo::Read(p);
+      case CLASS_EXTENDS:
+        return ClassExtendsInfo::Read(p);
+      case CLASS_TYPE_PARAMETER_BOUND:
+      case METHOD_TYPE_PARAMETER_BOUND:
+        return TypeParameterBoundInfo::Read(p);
+      case FIELD:
+      case METHOD_RETURN:
+      case METHOD_RECEIVER:
+        return new EmptyInfo;
+      case METHOD_FORMAL_PARAMETER:
+        return MethodFormalParameterInfo::Read(p);
+      case THROWS:
+        return ThrowsTypeInfo::Read(p);
+      default:
+        fprintf(stderr, "Illegal type annotation target type: %d\n",
+                target_type);
+        abort();
+    }
+  }
+
+  struct TypePath {
+    void Write(u1 *&p) {
+      put_u1(p, path_.size());
+      for (TypePathEntry entry : path_) {
+        put_u1(p, entry.type_path_kind_);
+        put_u1(p, entry.type_argument_index_);
+      }
+    }
+    static TypePath *Read(const u1 *&p) {
+      TypePath *value = new TypePath;
+      u1 path_length = get_u1(p);
+      for (int ii = 0; ii < path_length; ++ii) {
+        TypePathEntry entry;
+        entry.type_path_kind_ = get_u1(p);
+        entry.type_argument_index_ = get_u1(p);
+        value->path_.push_back(entry);
+      }
+      return value;
+    }
+
+    struct TypePathEntry {
+      u1 type_path_kind_;
+      u1 type_argument_index_;
+    };
+    std::vector<TypePathEntry> path_;
+  };
+
+  u1 target_type_;
+  TargetInfo *target_info_;
+  TypePath *type_path_;
+  Annotation *annotation_;
+};
+
+struct AnnotationTypeElementValue : ElementValue {
+  virtual ~AnnotationTypeElementValue() {
+    delete annotation_;
+  }
+
+  void Write(u1 *&p) {
+    put_u1(p, tag_);
+    annotation_->Write(p);
+  }
+  static AnnotationTypeElementValue *Read(const u1 *&p) {
+    AnnotationTypeElementValue *value = new AnnotationTypeElementValue;
+    value->annotation_ = Annotation::Read(p);
+    return value;
+  }
+
+  Annotation *annotation_;
+};
+
+ElementValue* ElementValue::Read(const u1 *&p) {
+  const u1* start = p;
+  ElementValue *result;
+  u1 tag = get_u1(p);
+  if (tag != 0 && strchr("BCDFIJSZs", (char) tag) != NULL) {
+    result = BaseTypeElementValue::Read(p);
+  } else if ((char) tag == 'e') {
+    result = EnumTypeElementValue::Read(p);
+  } else if ((char) tag == 'c') {
+    result = ClassTypeElementValue::Read(p);
+  } else if ((char) tag == '[') {
+    result = ArrayTypeElementValue::Read(p);
+  } else if ((char) tag == '@') {
+    result = AnnotationTypeElementValue::Read(p);
+  } else {
+    fprintf(stderr, "Illegal element_value::tag: %d\n", tag);
+    abort();
+  }
+  result->tag_ = tag;
+  result->length_ = p - start;
+  return result;
+}
+
+// See sec.4.7.20 of JVM spec.
+// We preserve AnnotationDefault attributes because they are required
+// in order to make use of an annotation in new code.
+struct AnnotationDefaultAttribute : Attribute {
+  virtual ~AnnotationDefaultAttribute() {
+    delete default_value_;
+  }
+
+  static AnnotationDefaultAttribute* Read(const u1 *&p,
+                                          Constant *attribute_name) {
+    AnnotationDefaultAttribute *attr = new AnnotationDefaultAttribute;
+    attr->attribute_name_ = attribute_name;
+    attr->default_value_ = ElementValue::Read(p);
+    return attr;
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, default_value_->length_);
+    default_value_->Write(p);
+  }
+
+  virtual void ExtractClassNames() {
+    default_value_->ExtractClassNames();
+  }
+
+  ElementValue *default_value_;
+};
+
+// See sec.4.7.2 of JVM spec.
+// We preserve ConstantValue attributes because they are required for
+// compile-time constant propagation.
+struct ConstantValueAttribute : Attribute {
+
+  static ConstantValueAttribute* Read(const u1 *&p, Constant *attribute_name) {
+    ConstantValueAttribute *attr = new ConstantValueAttribute;
+    attr->attribute_name_ = attribute_name;
+    attr->constantvalue_ = constant(get_u2be(p));
+    return attr;
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, 2);
+    put_u2be(p, constantvalue_->slot());
+  }
+
+  Constant *constantvalue_;
+};
+
+// See sec.4.7.9 of JVM spec.
+// We preserve Signature attributes because they are required by the
+// compiler for type-checking of generics.
+struct SignatureAttribute : Attribute {
+
+  static SignatureAttribute* Read(const u1 *&p, Constant *attribute_name) {
+    SignatureAttribute *attr = new SignatureAttribute;
+    attr->attribute_name_ = attribute_name;
+    attr->signature_  = constant(get_u2be(p));
+    return attr;
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, 2);
+    put_u2be(p, signature_->slot());
+  }
+
+  virtual void ExtractClassNames() {
+    size_t signature_idx = 0;
+    devtools_ijar::ExtractClassNames(signature_->Display(), &signature_idx);
+  }
+
+  Constant *signature_;
+};
+
+// See sec.4.7.15 of JVM spec.
+// We preserve Deprecated attributes because they are required by the
+// compiler to generate warning messages.
+struct DeprecatedAttribute : Attribute {
+
+  static DeprecatedAttribute* Read(const u1 *&, Constant *attribute_name) {
+    DeprecatedAttribute *attr = new DeprecatedAttribute;
+    attr->attribute_name_ = attribute_name;
+    return attr;
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, 0);
+  }
+};
+
+// See sec.4.7.16-17 of JVM spec v3.  Includes RuntimeVisible and
+// RuntimeInvisible.
+//
+// We preserve all annotations.
+struct AnnotationsAttribute : Attribute {
+  virtual ~AnnotationsAttribute() {
+    for (size_t i = 0; i < annotations_.size(); i++) {
+      delete annotations_[i];
+    }
+  }
+
+  static AnnotationsAttribute* Read(const u1 *&p, Constant *attribute_name) {
+    AnnotationsAttribute *attr = new AnnotationsAttribute;
+    attr->attribute_name_ = attribute_name;
+    u2 num_annotations = get_u2be(p);
+    for (int ii = 0; ii < num_annotations; ++ii) {
+      Annotation *annotation = Annotation::Read(p);
+      attr->annotations_.push_back(annotation);
+    }
+    return attr;
+  }
+
+  virtual void ExtractClassNames() {
+    for (size_t i = 0; i < annotations_.size(); i++) {
+      annotations_[i]->ExtractClassNames();
+    }
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, -1);
+    u1 *payload_start = p - 4;
+    put_u2be(p, annotations_.size());
+    for (size_t ii = 0; ii < annotations_.size(); ++ii) {
+      annotations_[ii]->Write(p);
+    }
+    put_u4be(payload_start, p - 4 - payload_start);  // backpatch length
+  }
+
+  std::vector<Annotation*> annotations_;
+};
+
+// See sec.4.7.18-19 of JVM spec.  Includes RuntimeVisible and
+// RuntimeInvisible.
+//
+// We preserve all annotations.
+struct ParameterAnnotationsAttribute : Attribute {
+
+  static ParameterAnnotationsAttribute* Read(const u1 *&p,
+                                             Constant *attribute_name) {
+    ParameterAnnotationsAttribute *attr = new ParameterAnnotationsAttribute;
+    attr->attribute_name_ = attribute_name;
+    u1 num_parameters = get_u1(p);
+    for (int ii = 0; ii < num_parameters; ++ii) {
+      std::vector<Annotation*> annotations;
+      u2 num_annotations = get_u2be(p);
+      for (int ii = 0; ii < num_annotations; ++ii) {
+        Annotation *annotation = Annotation::Read(p);
+        annotations.push_back(annotation);
+      }
+      attr->parameter_annotations_.push_back(annotations);
+    }
+    return attr;
+  }
+
+  virtual void ExtractClassNames() {
+    for (size_t i = 0; i < parameter_annotations_.size(); i++) {
+      const std::vector<Annotation*>& annotations = parameter_annotations_[i];
+      for (size_t j = 0; j < annotations.size(); j++) {
+        annotations[j]->ExtractClassNames();
+      }
+    }
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, -1);
+    u1 *payload_start = p - 4;
+    put_u1(p, parameter_annotations_.size());
+    for (size_t ii = 0; ii < parameter_annotations_.size(); ++ii) {
+      std::vector<Annotation *> &annotations = parameter_annotations_[ii];
+      put_u2be(p, annotations.size());
+      for (size_t jj = 0; jj < annotations.size(); ++jj) {
+        annotations[jj]->Write(p);
+      }
+    }
+    put_u4be(payload_start, p - 4 - payload_start);  // backpatch length
+  }
+
+  std::vector<std::vector<Annotation*> > parameter_annotations_;
+};
+
+// See sec.4.7.20 of Java 8 JVM spec. Includes RuntimeVisibleTypeAnnotations
+// and RuntimeInvisibleTypeAnnotations.
+struct TypeAnnotationsAttribute : Attribute {
+  static TypeAnnotationsAttribute* Read(const u1 *&p, Constant *attribute_name,
+                                        u4) {
+    auto attr = new TypeAnnotationsAttribute;
+    attr->attribute_name_ = attribute_name;
+    u2 num_annotations = get_u2be(p);
+    for (int ii = 0; ii < num_annotations; ++ii) {
+      TypeAnnotation *annotation = TypeAnnotation::Read(p);
+      attr->type_annotations_.push_back(annotation);
+    }
+    return attr;
+  }
+
+  virtual void ExtractClassNames() {
+    for (size_t i = 0; i < type_annotations_.size(); i++) {
+      type_annotations_[i]->ExtractClassNames();
+    }
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, -1);
+    u1 *payload_start = p - 4;
+    put_u2be(p, type_annotations_.size());
+    for (TypeAnnotation *annotation : type_annotations_) {
+      annotation->Write(p);
+    }
+    put_u4be(payload_start, p - 4 - payload_start);  // backpatch length
+  }
+
+  std::vector<TypeAnnotation*> type_annotations_;
+};
+
+struct GeneralAttribute : Attribute {
+  static GeneralAttribute* Read(const u1 *&p, Constant *attribute_name,
+                                u4 attribute_length) {
+    auto attr = new GeneralAttribute;
+    attr->attribute_name_ = attribute_name;
+    attr->attribute_length_ = attribute_length;
+    attr->attribute_content_ = p;
+    p += attribute_length;
+    return attr;
+  }
+
+  void Write(u1 *&p) {
+    WriteProlog(p, attribute_length_);
+    put_n(p, attribute_content_, attribute_length_);
+  }
+
+  u4 attribute_length_;
+  const u1 *attribute_content_;
+};
+
+/**********************************************************************
+ *                                                                    *
+ *                             ClassFile                              *
+ *                                                                    *
+ **********************************************************************/
+
+struct HasAttrs {
+  std::vector<Attribute*> attributes;
+
+  void WriteAttrs(u1 *&p);
+  void ReadAttrs(const u1 *&p);
+
+  virtual ~HasAttrs() {
+    for (size_t i = 0; i < attributes.size(); i++) {
+      delete attributes[i];
+    }
+  }
+
+  void ExtractClassNames() {
+    for (size_t i = 0; i < attributes.size(); i++) {
+      attributes[i]->ExtractClassNames();
+    }
+  }
+};
+
+// A field or method.
+// See sec.4.5 and 4.6 of JVM spec.
+struct Member : HasAttrs {
+  u2 access_flags;
+  Constant *name;
+  Constant *descriptor;
+
+  static Member* Read(const u1 *&p) {
+    Member *m = new Member;
+    m->access_flags = get_u2be(p);
+    m->name = constant(get_u2be(p));
+    m->descriptor = constant(get_u2be(p));
+    m->ReadAttrs(p);
+    return m;
+  }
+
+  void Write(u1 *&p) {
+    put_u2be(p, access_flags);
+    put_u2be(p, name->slot());
+    put_u2be(p, descriptor->slot());
+    WriteAttrs(p);
+  }
+};
+
+// See sec.4.1 of JVM spec.
+struct ClassFile : HasAttrs {
+
+  size_t length;
+
+  // Header:
+  u4 magic;
+  u2 major;
+  u2 minor;
+
+  // Body:
+  u2 access_flags;
+  Constant *this_class;
+  Constant *super_class;
+  std::vector<Constant*> interfaces;
+  std::vector<Member*> fields;
+  std::vector<Member*> methods;
+
+  virtual ~ClassFile() {
+    for (size_t i = 0; i < fields.size(); i++) {
+      delete fields[i];
+    }
+
+    for (size_t i = 0; i < methods.size(); i++) {
+      delete methods[i];
+    }
+
+    // Constants do not need to be deleted; they are owned by the constant pool.
+  }
+
+  void WriteClass(u1 *&p);
+
+  bool ReadConstantPool(const u1 *&p);
+
+  void StripIfAnonymous();
+
+  void WriteHeader(u1 *&p) {
+    put_u4be(p, magic);
+    put_u2be(p, major);
+    put_u2be(p, minor);
+
+    put_u2be(p, const_pool_out.size());
+    for (u2 ii = 1; ii < const_pool_out.size(); ++ii) {
+      if (const_pool_out[ii] != NULL) { // NB: NULLs appear after long/double.
+        const_pool_out[ii]->Write(p);
+      }
+    }
+  }
+
+  void WriteBody(u1 *&p) {
+    put_u2be(p, access_flags);
+    put_u2be(p, this_class->slot());
+    put_u2be(p, super_class == NULL ? 0 : super_class->slot());
+    put_u2be(p, interfaces.size());
+    for (size_t ii = 0; ii < interfaces.size(); ++ii) {
+      put_u2be(p, interfaces[ii]->slot());
+    }
+    put_u2be(p, fields.size());
+    for (size_t ii = 0; ii < fields.size(); ++ii) {
+      fields[ii]->Write(p);
+    }
+    put_u2be(p, methods.size());
+    for (size_t ii = 0; ii < methods.size(); ++ii) {
+      methods[ii]->Write(p);
+    }
+
+    Attribute* inner_classes = NULL;
+
+    // Make the inner classes attribute the last, so that it can know which
+    // constants were needed
+    for (size_t ii = 0; ii < attributes.size(); ii++) {
+      if (attributes[ii]->attribute_name_->Display() == "InnerClasses") {
+        inner_classes = attributes[ii];
+        attributes.erase(attributes.begin() + ii);
+        break;
+      }
+    }
+
+    if (inner_classes != NULL) {
+      attributes.push_back(inner_classes);
+    }
+
+    WriteAttrs(p);
+  }
+
+};
+
+void HasAttrs::ReadAttrs(const u1 *&p) {
+  u2 attributes_count = get_u2be(p);
+  for (int ii = 0; ii < attributes_count; ii++) {
+    Constant *attribute_name = constant(get_u2be(p));
+    u4 attribute_length = get_u4be(p);
+
+    std::string attr_name = attribute_name->Display();
+    if (attr_name == "SourceFile" ||
+        attr_name == "LineNumberTable" ||
+        attr_name == "LocalVariableTable" ||
+        attr_name == "LocalVariableTypeTable" ||
+        attr_name == "Code" ||
+        attr_name == "Synthetic" ||
+        attr_name == "BootstrapMethods") {
+      p += attribute_length; // drop these attributes
+    } else if (attr_name == "Exceptions") {
+      attributes.push_back(ExceptionsAttribute::Read(p, attribute_name));
+    } else if (attr_name == "Signature") {
+      attributes.push_back(SignatureAttribute::Read(p, attribute_name));
+    } else if (attr_name == "Deprecated") {
+      attributes.push_back(DeprecatedAttribute::Read(p, attribute_name));
+    } else if (attr_name == "EnclosingMethod") {
+      attributes.push_back(EnclosingMethodAttribute::Read(p, attribute_name));
+    } else if (attr_name == "InnerClasses") {
+      // TODO(bazel-team): omit private inner classes
+      attributes.push_back(InnerClassesAttribute::Read(p, attribute_name));
+    } else if (attr_name == "AnnotationDefault") {
+      attributes.push_back(AnnotationDefaultAttribute::Read(p, attribute_name));
+    } else if (attr_name == "ConstantValue") {
+      attributes.push_back(ConstantValueAttribute::Read(p, attribute_name));
+    } else if (attr_name == "RuntimeVisibleAnnotations" ||
+               attr_name == "RuntimeInvisibleAnnotations") {
+      attributes.push_back(AnnotationsAttribute::Read(p, attribute_name));
+    } else if (attr_name == "RuntimeVisibleParameterAnnotations" ||
+               attr_name == "RuntimeInvisibleParameterAnnotations") {
+      attributes.push_back(
+          ParameterAnnotationsAttribute::Read(p, attribute_name));
+    } else if (attr_name == "Scala" ||
+               attr_name == "ScalaSig" ||
+               attr_name == "ScalaInlineInfo") {
+      // These are opaque blobs, so can be handled with a general
+      // attribute handler
+      attributes.push_back(GeneralAttribute::Read(p, attribute_name,
+                                                  attribute_length));
+    } else if (attr_name == "RuntimeVisibleTypeAnnotations" ||
+               attr_name == "RuntimeInvisibleTypeAnnotations") {
+      // JSR 308: annotations on types. JDK 7 has no use for these yet, but the
+      // Checkers Framework relies on them.
+      attributes.push_back(TypeAnnotationsAttribute::Read(p, attribute_name,
+                                                          attribute_length));
+    } else {
+      // Skip over unknown attributes with a warning.  The JVM spec
+      // says this is ok, so long as we handle the mandatory attributes.
+      fprintf(stderr, "ijar: skipping unknown attribute: \"%s\".\n",
+              attr_name.c_str());
+      p += attribute_length;
+    }
+  }
+}
+
+void HasAttrs::WriteAttrs(u1 *&p) {
+  u1* p_size = p;
+
+  put_u2be(p, 0);
+  int n_written_attrs = 0;
+  for (size_t ii = 0; ii < attributes.size(); ii++) {
+    u1* before = p;
+    attributes[ii]->Write(p);
+    if (p != before) {
+      n_written_attrs++;
+    }
+  }
+
+  put_u2be(p_size, n_written_attrs);
+}
+
+// See sec.4.4 of JVM spec.
+bool ClassFile::ReadConstantPool(const u1 *&p) {
+
+  const_pool_in.clear();
+  const_pool_in.push_back(NULL); // dummy first item
+
+  u2 cp_count = get_u2be(p);
+  for (int ii = 1; ii < cp_count; ++ii) {
+    u1 tag = get_u1(p);
+
+    if (devtools_ijar::verbose) {
+      fprintf(stderr, "cp[%d/%d] = tag %d\n", ii, cp_count, tag);
+    }
+
+    switch(tag) {
+      case CONSTANT_Class: {
+        u2 name_index = get_u2be(p);
+        const_pool_in.push_back(new Constant_Class(name_index));
+        break;
+      }
+      case CONSTANT_FieldRef:
+      case CONSTANT_Methodref:
+      case CONSTANT_Interfacemethodref: {
+        u2 class_index = get_u2be(p);
+        u2 nti = get_u2be(p);
+        const_pool_in.push_back(new Constant_FMIref(tag, class_index, nti));
+        break;
+      }
+      case CONSTANT_String: {
+        u2 string_index = get_u2be(p);
+        const_pool_in.push_back(new Constant_String(string_index));
+        break;
+      }
+      case CONSTANT_NameAndType: {
+        u2 name_index = get_u2be(p);
+        u2 descriptor_index = get_u2be(p);
+        const_pool_in.push_back(
+            new Constant_NameAndType(name_index, descriptor_index));
+        break;
+      }
+      case CONSTANT_Utf8: {
+        u2 length = get_u2be(p);
+        if (devtools_ijar::verbose) {
+          fprintf(stderr, "Utf8: \"%s\" (%d)\n",
+                  std::string((const char*) p, length).c_str(), length);
+        }
+
+        const_pool_in.push_back(new Constant_Utf8(length, p));
+        p += length;
+        break;
+      }
+      case CONSTANT_Integer:
+      case CONSTANT_Float: {
+        u4 bytes = get_u4be(p);
+        const_pool_in.push_back(new Constant_IntegerOrFloat(tag, bytes));
+        break;
+      }
+      case CONSTANT_Long:
+      case CONSTANT_Double: {
+        u4 high_bytes = get_u4be(p);
+        u4 low_bytes = get_u4be(p);
+        const_pool_in.push_back(
+            new Constant_LongOrDouble(tag, high_bytes, low_bytes));
+        // Longs and doubles occupy two constant pool slots.
+        // ("In retrospect, making 8-byte constants take two "constant
+        // pool entries was a poor choice." --JVM Spec.)
+        const_pool_in.push_back(NULL);
+        ii++;
+        break;
+      }
+      case CONSTANT_MethodHandle: {
+        u1 reference_kind = get_u1(p);
+        u2 reference_index = get_u2be(p);
+        const_pool_in.push_back(
+            new Constant_MethodHandle(reference_kind, reference_index));
+        break;
+      }
+      case CONSTANT_MethodType: {
+        u2 descriptor_index = get_u2be(p);
+        const_pool_in.push_back(new Constant_MethodType(descriptor_index));
+        break;
+      }
+      case CONSTANT_InvokeDynamic: {
+        u2 bootstrap_method_attr = get_u2be(p);
+        u2 name_name_type_index = get_u2be(p);
+        const_pool_in.push_back(new Constant_InvokeDynamic(
+            bootstrap_method_attr, name_name_type_index));
+        break;
+      }
+      default: {
+        fprintf(stderr, "Unknown constant: %02x. Passing class through.\n",
+                tag);
+        return false;
+      }
+    }
+  }
+
+  return true;
+}
+
+// Anonymous inner classes are stripped to opaque classes that only extend
+// Object. None of their methods or fields are accessible anyway.
+void ClassFile::StripIfAnonymous() {
+  int enclosing_index = -1;
+  int inner_classes_index = -1;
+
+  for (size_t ii = 0; ii < attributes.size(); ++ii) {
+    if (attributes[ii]->attribute_name_->Display() == "EnclosingMethod") {
+      enclosing_index = ii;
+    } else if (attributes[ii]->attribute_name_->Display() == "InnerClasses") {
+      inner_classes_index = ii;
+    }
+  }
+
+  // Presence of an EnclosingMethod attribute indicates a local or anonymous
+  // class, which can be stripped.
+  if (enclosing_index > -1) {
+    // Clear the signature to only extend java.lang.Object.
+    super_class = NULL;
+    interfaces.clear();
+
+    // Clear away all fields (implementation details).
+    for (size_t ii = 0; ii < fields.size(); ++ii) {
+      delete fields[ii];
+    }
+    fields.clear();
+
+    // Clear away all methods (implementation details).
+    for (size_t ii = 0; ii < methods.size(); ++ii) {
+      delete methods[ii];
+    }
+    methods.clear();
+
+    // Only preserve the InnerClasses attribute to comply with the spec.
+    Attribute *attr = NULL;
+    for (size_t ii = 0; ii < attributes.size(); ++ii) {
+      if (static_cast<int>(ii) != inner_classes_index) {
+        delete attributes[ii];
+      } else {
+        attr = attributes[ii];
+      }
+    }
+    attributes.clear();
+    if (attr != NULL) {
+      attributes.push_back(attr);
+    }
+  }
+}
+
+static ClassFile *ReadClass(const void *classdata, size_t length) {
+  const u1 *p = (u1*) classdata;
+
+  ClassFile *clazz = new ClassFile;
+
+  clazz->length = length;
+
+  clazz->magic = get_u4be(p);
+  if (clazz->magic != 0xCAFEBABE) {
+    fprintf(stderr, "Bad magic %" PRIx32 "\n", clazz->magic);
+    abort();
+  }
+  clazz->major = get_u2be(p);
+  clazz->minor = get_u2be(p);
+
+  if (!clazz->ReadConstantPool(p)) {
+    delete clazz;
+    return NULL;
+  }
+
+  clazz->access_flags = get_u2be(p);
+  clazz->this_class = constant(get_u2be(p));
+  class_name = clazz->this_class;
+
+  u2 super_class_id = get_u2be(p);
+  clazz->super_class = super_class_id == 0 ? NULL : constant(super_class_id);
+
+  u2 interfaces_count = get_u2be(p);
+  for (int ii = 0; ii < interfaces_count; ++ii) {
+    clazz->interfaces.push_back(constant(get_u2be(p)));
+  }
+
+  u2 fields_count = get_u2be(p);
+  for (int ii = 0; ii < fields_count; ++ii) {
+    Member *field = Member::Read(p);
+
+    if (!(field->access_flags & ACC_PRIVATE)) { // drop private fields
+      clazz->fields.push_back(field);
+    }
+  }
+
+  u2 methods_count = get_u2be(p);
+  for (int ii = 0; ii < methods_count; ++ii) {
+    Member *method = Member::Read(p);
+
+    // drop class initializers
+    if (method->name->Display() == "<clinit>") continue;
+
+    if (!(method->access_flags & ACC_PRIVATE)) { // drop private methods
+      clazz->methods.push_back(method);
+    }
+  }
+
+  clazz->ReadAttrs(p);
+  clazz->StripIfAnonymous();
+
+  return clazz;
+}
+
+// In theory, '/' is also reserved, but it's okay if we just parse package
+// identifiers as part of the class name. Note that signatures are UTF-8, but
+// this works just as well as in plain ASCII.
+static const char *SIGNATURE_NON_IDENTIFIER_CHARS = ".;[<>:";
+
+void Expect(const std::string& desc, size_t* p, char expected) {
+  if (desc[*p] != expected) {
+    fprintf(stderr, "Expected '%c' in '%s' at %zd in signature\n",
+            expected, desc.substr(*p).c_str(), *p);
+    exit(1);
+  }
+
+  *p += 1;
+}
+
+// These functions form a crude recursive descent parser for descriptors and
+// signatures in class files (see JVM spec 4.3).
+//
+// This parser is a bit more liberal than the spec, but this should be fine,
+// because it accepts all valid class files and croaks only on invalid ones.
+void ParseFromClassTypeSignature(const std::string& desc, size_t* p);
+void ParseSimpleClassTypeSignature(const std::string& desc, size_t* p);
+void ParseClassTypeSignatureSuffix(const std::string& desc, size_t* p);
+void ParseIdentifier(const std::string& desc, size_t* p);
+void ParseTypeArgumentsOpt(const std::string& desc, size_t* p);
+void ParseMethodDescriptor(const std::string& desc, size_t* p);
+
+void ParseClassTypeSignature(const std::string& desc, size_t* p) {
+  Expect(desc, p, 'L');
+  ParseSimpleClassTypeSignature(desc, p);
+  ParseClassTypeSignatureSuffix(desc, p);
+  Expect(desc, p, ';');
+}
+
+void ParseSimpleClassTypeSignature(const std::string& desc, size_t* p) {
+  ParseIdentifier(desc, p);
+  ParseTypeArgumentsOpt(desc, p);
+}
+
+void ParseClassTypeSignatureSuffix(const std::string& desc, size_t* p) {
+  while (desc[*p] == '.') {
+    *p += 1;
+    ParseSimpleClassTypeSignature(desc, p);
+  }
+}
+
+void ParseIdentifier(const std::string& desc, size_t* p) {
+  size_t next = desc.find_first_of(SIGNATURE_NON_IDENTIFIER_CHARS, *p);
+  std::string id = desc.substr(*p, next - *p);
+  used_class_names.insert(id);
+  *p = next;
+}
+
+void ParseTypeArgumentsOpt(const std::string& desc, size_t* p) {
+  if (desc[*p] != '<') {
+    return;
+  }
+
+  *p += 1;
+  while (desc[*p] != '>') {
+    switch (desc[*p]) {
+      case '*':
+        *p += 1;
+        break;
+
+      case '+':
+      case '-':
+        *p += 1;
+        ExtractClassNames(desc, p);
+        break;
+
+      default:
+        ExtractClassNames(desc, p);
+        break;
+    }
+  }
+
+  *p += 1;
+}
+
+void ParseMethodDescriptor(const std::string& desc, size_t* p) {
+  Expect(desc, p, '(');
+  while (desc[*p] != ')') {
+    ExtractClassNames(desc, p);
+  }
+
+  Expect(desc, p, ')');
+  ExtractClassNames(desc, p);
+}
+
+void ParseFormalTypeParameters(const std::string& desc, size_t* p) {
+  Expect(desc, p, '<');
+  while (desc[*p] != '>') {
+    ParseIdentifier(desc, p);
+    Expect(desc, p, ':');
+    if (desc[*p] != ':' && desc[*p] != '>') {
+      ExtractClassNames(desc, p);
+    }
+
+    while (desc[*p] == ':') {
+      Expect(desc, p, ':');
+      ExtractClassNames(desc, p);
+    }
+  }
+
+  Expect(desc, p, '>');
+}
+
+void ExtractClassNames(const std::string& desc, size_t* p) {
+  switch (desc[*p]) {
+    case '<':
+      ParseFormalTypeParameters(desc, p);
+      ExtractClassNames(desc, p);
+      break;
+
+    case 'L':
+      ParseClassTypeSignature(desc, p);
+      break;
+
+    case '[':
+      *p += 1;
+      ExtractClassNames(desc, p);
+      break;
+
+    case 'T':
+      *p += 1;
+      ParseIdentifier(desc, p);
+      Expect(desc, p, ';');
+      break;
+
+    case '(':
+      ParseMethodDescriptor(desc, p);
+      break;
+
+    case 'B':
+    case 'C':
+    case 'D':
+    case 'F':
+    case 'I':
+    case 'J':
+    case 'S':
+    case 'Z':
+    case 'V':
+      *p += 1;
+      break;
+
+    default:
+      fprintf(stderr, "Invalid signature %s\n", desc.substr(*p).c_str());
+  }
+}
+
+void ClassFile::WriteClass(u1 *&p) {
+  used_class_names.clear();
+  std::vector<Member *> members;
+  members.insert(members.end(), fields.begin(), fields.end());
+  members.insert(members.end(), methods.begin(), methods.end());
+  ExtractClassNames();
+  for (size_t i = 0; i < members.size(); i++) {
+    Member *member = members[i];
+    size_t idx = 0;
+    devtools_ijar::ExtractClassNames(member->descriptor->Display(), &idx);
+    member->ExtractClassNames();
+  }
+
+  // We have to write the body out before the header in order to reference
+  // the essential constants and populate the output constant pool:
+  u1 *body = new u1[length];
+  u1 *q = body;
+  WriteBody(q); // advances q
+  u4 body_length = q - body;
+
+  WriteHeader(p); // advances p
+  put_n(p, body, body_length);
+  delete[] body;
+}
+
+
+void StripClass(u1 *&classdata_out, const u1 *classdata_in, size_t in_length) {
+  ClassFile *clazz = ReadClass(classdata_in, in_length);
+  if (clazz == NULL) {
+    // Class is invalid. Simply copy it to the output and call it a day.
+    put_n(classdata_out, classdata_in, in_length);
+  } else {
+
+    // Constant pool item zero is a dummy entry.  Setting it marks the
+    // beginning of the output phase; calls to Constant::slot() will
+    // fail if called prior to this.
+    const_pool_out.push_back(NULL);
+    clazz->WriteClass(classdata_out);
+
+    delete clazz;
+  }
+
+  // Now clean up all the mess we left behind.
+
+  for (size_t i = 0; i < const_pool_in.size(); i++) {
+    delete const_pool_in[i];
+  }
+
+  const_pool_in.clear();
+  const_pool_out.clear();
+}
+
+}  // namespace devtools_ijar
diff --git a/tools/ijar/common.h b/tools/ijar/common.h
new file mode 100644
index 0000000..118041b
--- /dev/null
+++ b/tools/ijar/common.h
@@ -0,0 +1,102 @@
+// Copyright 2001,2007 Alan Donovan. All rights reserved.
+//
+// Author: Alan Donovan <adonovan@google.com>
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// common.h -- common definitions.
+//
+
+#ifndef INCLUDED_DEVTOOLS_IJAR_COMMON_H
+#define INCLUDED_DEVTOOLS_IJAR_COMMON_H
+
+#include <stddef.h>
+#include <stdint.h>
+#include <string.h>
+
+namespace devtools_ijar {
+
+typedef unsigned long long u8;
+typedef uint32_t u4;
+typedef uint16_t u2;
+typedef uint8_t  u1;
+
+// be = big endian, le = little endian
+
+inline u1 get_u1(const u1 *&p) {
+    return *p++;
+}
+
+inline u2 get_u2be(const u1 *&p) {
+    u4 x = (p[0] << 8) | p[1];
+    p += 2;
+    return x;
+}
+
+inline u2 get_u2le(const u1 *&p) {
+    u4 x = (p[1] << 8) | p[0];
+    p += 2;
+    return x;
+}
+
+inline u4 get_u4be(const u1 *&p) {
+    u4 x = (p[0] << 24) | (p[1] << 16) | (p[2] << 8) | p[3];
+    p += 4;
+    return x;
+}
+
+inline u4 get_u4le(const u1 *&p) {
+    u4 x = (p[3] << 24) | (p[2] << 16) | (p[1] << 8) | p[0];
+    p += 4;
+    return x;
+}
+
+inline void put_u1(u1 *&p, u1 x) {
+    *p++ = x;
+}
+
+inline void put_u2be(u1 *&p, u2 x) {
+    *p++ = x >> 8;
+    *p++ = x & 0xff;
+}
+
+inline void put_u2le(u1 *&p, u2 x) {
+    *p++ = x & 0xff;
+    *p++ = x >> 8;;
+}
+
+inline void put_u4be(u1 *&p, u4 x) {
+    *p++ = x >> 24;
+    *p++ = (x >> 16) & 0xff;
+    *p++ = (x >> 8) & 0xff;
+    *p++ = x & 0xff;
+}
+
+inline void put_u4le(u1 *&p, u4 x) {
+    *p++ = x & 0xff;
+    *p++ = (x >> 8) & 0xff;
+    *p++ = (x >> 16) & 0xff;
+    *p++ = x >> 24;
+}
+
+// Copy n bytes from src to p, and advance p.
+inline void put_n(u1 *&p, const u1 *src, size_t n) {
+  memcpy(p, src, n);
+  p += n;
+}
+
+extern bool verbose;
+
+}  // namespace devtools_ijar
+
+#endif // INCLUDED_DEVTOOLS_IJAR_COMMON_H
diff --git a/tools/ijar/ijar.cc b/tools/ijar/ijar.cc
new file mode 100644
index 0000000..1925b48
--- /dev/null
+++ b/tools/ijar/ijar.cc
@@ -0,0 +1,182 @@
+// Copyright 2001,2007 Alan Donovan. All rights reserved.
+//
+// Author: Alan Donovan <adonovan@google.com>
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// ijar.cpp -- .jar -> _interface.jar tool.
+//
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <limits.h>
+#include <errno.h>
+#include <memory>
+
+#include "zip.h"
+
+namespace devtools_ijar {
+
+bool verbose = false;
+
+// Reads a JVM class from classdata_in (of the specified length), and
+// writes out a simplified class to classdata_out, advancing the
+// pointer.
+void StripClass(u1 *&classdata_out, const u1 *classdata_in, size_t in_length);
+
+const char* CLASS_EXTENSION = ".class";
+const size_t CLASS_EXTENSION_LENGTH = strlen(CLASS_EXTENSION);
+
+// ZipExtractorProcessor that select only .class file and use
+// StripClass to generate an interface class, storing as a new file
+// in the specified ZipBuilder.
+class JarStripperProcessor : public ZipExtractorProcessor {
+ public:
+  JarStripperProcessor() {}
+  virtual ~JarStripperProcessor() {}
+
+  virtual void Process(const char* filename, const u4 attr,
+                       const u1* data, const size_t size);
+  virtual bool Accept(const char* filename, const u4 attr);
+
+ private:
+  // Not owned by JarStripperProcessor, see SetZipBuilder().
+  ZipBuilder* builder;
+
+ public:
+  // Set the ZipBuilder to add the ijar class to the output zip file.
+  // This pointer should not be deleted while this class is still in use and
+  // it should be set before any call to the Process() method.
+  void SetZipBuilder(ZipBuilder* builder) {
+    this->builder = builder;
+  }
+};
+
+bool JarStripperProcessor::Accept(const char* filename, const u4) {
+  ssize_t offset = strlen(filename) - CLASS_EXTENSION_LENGTH;
+  if (offset >= 0) {
+    return strcmp(filename + offset, CLASS_EXTENSION) == 0;
+  }
+  return false;
+}
+
+void JarStripperProcessor::Process(const char* filename, const u4,
+                                   const u1* data, const size_t size) {
+  if (verbose) {
+    fprintf(stderr, "INFO: StripClass: %s\n", filename);
+  }
+  u1 *q = builder->NewFile(filename, 0);
+  u1 *classdata_out = q;
+  StripClass(q, data, size);  // actually process it
+  size_t out_length = q - classdata_out;
+  builder->FinishFile(out_length);
+}
+
+// Opens "file_in" (a .jar file) for reading, and writes an interface
+// .jar to "file_out".
+void OpenFilesAndProcessJar(const char *file_out, const char *file_in) {
+  JarStripperProcessor processor;
+  std::unique_ptr<ZipExtractor> in(ZipExtractor::Create(file_in, &processor));
+  if (in.get() == NULL) {
+    fprintf(stderr, "Unable to open Zip file %s: %s\n", file_in,
+            strerror(errno));
+    abort();
+  }
+  u8 output_length = in->CalculateOutputLength();
+  std::unique_ptr<ZipBuilder> out(ZipBuilder::Create(file_out, output_length));
+  if (out.get() == NULL) {
+    fprintf(stderr, "Unable to open output file %s: %s\n", file_out,
+            strerror(errno));
+    abort();
+  }
+  processor.SetZipBuilder(out.get());
+
+  // Process all files in the zip
+  if (in->ProcessAll() < 0) {
+    fprintf(stderr, "%s\n", in->GetError());
+    abort();
+  }
+
+  // Add dummy file, since javac doesn't like truly empty jars.
+  if (out->GetNumberFiles() == 0) {
+    out->WriteEmptyFile("dummy");
+  }
+  // Finish writing the output file
+  if (out->Finish() < 0) {
+    fprintf(stderr, "%s\n", out->GetError());
+    abort();
+  }
+  // Get all file size
+  size_t in_length = in->GetSize();
+  size_t out_length = out->GetSize();
+  if (verbose) {
+    fprintf(stderr, "INFO: produced interface jar: %s -> %s (%d%%).\n",
+            file_in, file_out,
+            static_cast<int>(100.0 * out_length / in_length));
+  }
+}
+
+}  // namespace devtools_ijar
+
+//
+// main method
+//
+static void usage() {
+  fprintf(stderr, "Usage: ijar [-v] x.jar [x_interface.jar>]\n");
+  fprintf(stderr, "Creates an interface jar from the specified jar file.\n");
+  exit(1);
+}
+
+int main(int argc, char **argv) {
+  const char *filename_in = NULL;
+  const char *filename_out = NULL;
+
+  for (int ii = 1; ii < argc; ++ii) {
+    if (strcmp(argv[ii], "-v") == 0) {
+      devtools_ijar::verbose = true;
+    } else if (filename_in == NULL) {
+      filename_in = argv[ii];
+    } else if (filename_out == NULL) {
+      filename_out = argv[ii];
+    } else {
+      usage();
+    }
+  }
+
+  if (filename_in == NULL) {
+    usage();
+  }
+
+  // Guess output filename from input:
+  char filename_out_buf[PATH_MAX];
+  if (filename_out == NULL) {
+    size_t len = strlen(filename_in);
+    if (len > 4 && strncmp(filename_in + len - 4, ".jar", 4) == 0) {
+      strcpy(filename_out_buf, filename_in);
+      strcpy(filename_out_buf + len - 4, "-interface.jar");
+      filename_out = filename_out_buf;
+    } else {
+      fprintf(stderr, "Can't determine output filename since input filename "
+              "doesn't end with '.jar'.\n");
+      return 1;
+    }
+  }
+
+  if (devtools_ijar::verbose) {
+    fprintf(stderr, "INFO: writing to '%s'.\n", filename_out);
+  }
+
+  devtools_ijar::OpenFilesAndProcessJar(filename_out, filename_in);
+  return 0;
+}
diff --git a/tools/ijar/zip.cc b/tools/ijar/zip.cc
new file mode 100644
index 0000000..ca5f396
--- /dev/null
+++ b/tools/ijar/zip.cc
@@ -0,0 +1,1031 @@
+// Copyright 2007 Alan Donovan. All rights reserved.
+//
+// Author: Alan Donovan <adonovan@google.com>
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// zip.cc -- .zip (.jar) file reading/writing routines.
+//
+
+// See README.txt for details.
+//
+// See http://www.pkware.com/documents/casestudies/APPNOTE.TXT
+// for definition of PKZIP file format.
+
+#define _FILE_OFFSET_BITS 64  // Support zip files larger than 2GB
+
+#include <errno.h>
+#include <fcntl.h>
+#include <stddef.h>
+#include <stdint.h>
+#include <stdarg.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <sys/mman.h>
+#include <unistd.h>
+#include <limits.h>
+#include <limits>
+#include <vector>
+
+#include "zip.h"
+#include <zlib.h>
+
+#define LOCAL_FILE_HEADER_SIGNATURE           0x04034b50
+#define CENTRAL_FILE_HEADER_SIGNATURE         0x02014b50
+#define END_OF_CENTRAL_DIR_SIGNATURE          0x06054b50
+#define DATA_DESCRIPTOR_SIGNATURE             0x08074b50
+
+// version to extract: 1.0 - default value from APPNOTE.TXT.
+// Output JAR files contain no extra ZIP features, so this is enough.
+#define ZIP_VERSION_TO_EXTRACT                10
+#define COMPRESSION_METHOD_STORED             0   // no compression
+#define COMPRESSION_METHOD_DEFLATED           8
+
+#define GENERAL_PURPOSE_BIT_FLAG_COMPRESSED (1 << 3)
+#define GENERAL_PURPOSE_BIT_FLAG_UTF8_ENCODED (1 << 11)
+#define GENERAL_PURPOSE_BIT_FLAG_COMPRESSION_SPEED ((1 << 2) | (1 << 1))
+#define GENERAL_PURPOSE_BIT_FLAG_SUPPORTED \
+  (GENERAL_PURPOSE_BIT_FLAG_COMPRESSED \
+  | GENERAL_PURPOSE_BIT_FLAG_UTF8_ENCODED \
+  | GENERAL_PURPOSE_BIT_FLAG_COMPRESSION_SPEED)
+
+namespace devtools_ijar {
+// In the absence of ZIP64 support, zip files are limited to 4GB.
+// http://www.info-zip.org/FAQ.html#limits
+static const u8 kMaximumOutputSize = std::numeric_limits<uint32_t>::max();
+
+static bool ProcessCentralDirEntry(const u1 *&p,
+                                   size_t *compressed_size,
+                                   size_t *uncompressed_size,
+                                   char *filename,
+                                   size_t filename_size,
+                                   u4 *attr,
+                                   u4 *offset);
+
+//
+// A class representing a ZipFile for reading. Its public API is exposed
+// using the ZipExtractor abstract class.
+//
+class InputZipFile : public ZipExtractor {
+ public:
+  InputZipFile(ZipExtractorProcessor *processor, int fd, off_t in_length,
+               off_t in_offset, const u1* zipdata_in, const u1* central_dir);
+  virtual ~InputZipFile();
+
+  virtual const char* GetError() {
+    if (errmsg[0] == 0) {
+      return NULL;
+    }
+    return errmsg;
+  }
+
+  virtual bool ProcessNext();
+  virtual void Reset();
+  virtual size_t GetSize() {
+    return in_length_;
+  }
+
+  virtual u8 CalculateOutputLength();
+
+ private:
+  ZipExtractorProcessor *processor;
+
+  int fd_in;  // Input file descripor
+
+  // InputZipFile is responsible for maintaining the following
+  // pointers. They are allocated by the Create() method before
+  // the object is actually created using mmap.
+  const u1 * const zipdata_in_;   // start of input file mmap
+  const u1 * zipdata_in_mapped_;  // start of still mapped region
+  const u1 * const central_dir_;  // central directory in input file
+
+  size_t in_length_;  // size of the input file
+  size_t in_offset_;  // offset  the input file
+
+  const u1 *p;  // input cursor
+
+  const u1* central_dir_current_;  // central dir input cursor
+
+  // Buffer size is initially INITIAL_BUFFER_SIZE. It doubles in size every
+  // time it is found too small, until it reaches MAX_BUFFER_SIZE. If that is
+  // not enough, we bail out. We only decompress class files, so they should
+  // be smaller than 64K anyway, but we give a little leeway.
+  // MAX_BUFFER_SIZE must be bigger than the size of the biggest file in the
+  // ZIP. It is set to 128M here so we can uncompress the Bazel server with
+  // this library.
+  static const size_t INITIAL_BUFFER_SIZE = 256 * 1024;  // 256K
+  static const size_t MAX_BUFFER_SIZE = 128 * 1024 * 1024;
+  static const size_t MAX_MAPPED_REGION = 32 * 1024 * 1024;
+
+  // These metadata fields are the fields of the ZIP header of the file being
+  // processed.
+  u2 extract_version_;
+  u2 general_purpose_bit_flag_;
+  u2 compression_method_;
+  u4 uncompressed_size_;
+  u4 compressed_size_;
+  u2 file_name_length_;
+  u2 extra_field_length_;
+  const u1 *file_name_;
+  const u1 *extra_field_;
+
+  // Administration of memory reserved for decompressed data. We use the same
+  // buffer for each file to avoid some malloc()/free() calls and free the
+  // memory only in the dtor. C-style memory management is used so that we
+  // can call realloc.
+  u1 *uncompressed_data_;
+  size_t uncompressed_data_allocated_;
+
+  // Copy of the last filename entry - Null-terminated.
+  char filename[PATH_MAX];
+  // The external file attribute field
+  u4 attr;
+
+  // last error
+  char errmsg[4*PATH_MAX];
+
+  int error(const char *fmt, ...) {
+    va_list ap;
+    va_start(ap, fmt);
+    vsnprintf(errmsg, 4*PATH_MAX, fmt, ap);
+    va_end(ap);
+    return -1;
+  }
+
+  // Check that at least n bytes remain in the input file, otherwise
+  // abort with an error message.  "state" is the name of the field
+  // we're about to read, for diagnostics.
+  int EnsureRemaining(size_t n, const char *state) {
+    size_t in_offset = p - zipdata_in_;
+    size_t remaining = in_length_ - in_offset;
+    if (n > remaining) {
+      return error("Premature end of file (at offset %zd, state=%s); "
+                   "expected %zd more bytes but found %zd.\n",
+                   in_offset, state, n, remaining);
+    }
+    return 0;
+  }
+
+  // Read one entry from input zip file
+  int ProcessLocalFileEntry(size_t compressed_size, size_t uncompressed_size);
+
+  // Uncompress a file from the archive using zlib. The pointer returned
+  // is owned by InputZipFile, so it must not be freed. Advances the input
+  // cursor to the first byte after the compressed data.
+  u1* UncompressFile();
+
+  // Skip a file
+  int SkipFile(const bool compressed);
+
+  // Process a file
+  int ProcessFile(const bool compressed);
+};
+
+//
+// A class implementing ZipBuilder that represent an open zip file for writing.
+//
+class OutputZipFile : public ZipBuilder {
+ public:
+  OutputZipFile(int fd, u1 * const zipdata_out) :
+      fd_out(fd),
+      zipdata_out_(zipdata_out),
+      q(zipdata_out) {
+    errmsg[0] = 0;
+  }
+
+  virtual const char* GetError() {
+    if (errmsg[0] == 0) {
+      return NULL;
+    }
+    return errmsg;
+  }
+
+  virtual ~OutputZipFile() { Finish(); }
+  virtual u1* NewFile(const char* filename, const u4 attr);
+  virtual int FinishFile(size_t filelength, bool compress = false,
+                         bool compute_crc = false);
+  virtual int WriteEmptyFile(const char *filename);
+  virtual size_t GetSize() {
+    return Offset(q);
+  }
+  virtual int GetNumberFiles() {
+    return entries_.size();
+  }
+  virtual int Finish();
+
+ private:
+  struct LocalFileEntry {
+    // Start of the local header (in the output buffer).
+    size_t local_header_offset;
+
+    // Sizes of the file entry
+    size_t uncompressed_length;
+    size_t compressed_length;
+
+    // Compression method
+    u2 compression_method;
+
+    // CRC32
+    u4 crc32;
+
+    // external attributes field
+    u4 external_attr;
+
+    // Start/length of the file_name in the local header.
+    u1 *file_name;
+    u2 file_name_length;
+
+    // Start/length of the extra_field in the local header.
+    const u1 *extra_field;
+    u2 extra_field_length;
+  };
+
+  int fd_out;  // file descriptor for the output file
+
+  // OutputZipFile is responsible for maintaining the following
+  // pointers. They are allocated by the Create() method before
+  // the object is actually created using mmap.
+  u1 * const zipdata_out_;        // start of output file mmap
+  u1 *q;  // output cursor
+
+  u1 *header_ptr;  // Current pointer to "compression method" entry.
+
+  // List of entries to write the central directory
+  std::vector<LocalFileEntry*> entries_;
+
+  // last error
+  char errmsg[4*PATH_MAX];
+
+  int error(const char *fmt, ...) {
+    va_list ap;
+    va_start(ap, fmt);
+    vsnprintf(errmsg, 4*PATH_MAX, fmt, ap);
+    va_end(ap);
+    return -1;
+  }
+
+  // Write the ZIP central directory structure for each local file
+  // entry in "entries".
+  void WriteCentralDirectory();
+
+  // Returns the offset of the pointer relative to the start of the
+  // output zip file.
+  size_t Offset(const u1 *const x) {
+    return x - zipdata_out_;
+  }
+
+  // Write ZIP file header in the output. Since the compressed size is not
+  // known in advance, it must be recorded later. This method returns a pointer
+  // to "compressed size" in the file header that should be passed to
+  // WriteFileSizeInLocalFileHeader() later.
+  u1* WriteLocalFileHeader(const char *filename, const u4 attr);
+
+  // Fill in the "compressed size" and "uncompressed size" fields in a local
+  // file header previously written by WriteLocalFileHeader().
+  size_t WriteFileSizeInLocalFileHeader(u1 *header_ptr,
+                                        size_t out_length,
+                                        bool compress = false,
+                                        const u4 crc = 0);
+};
+
+//
+// Implementation of InputZipFile
+//
+bool InputZipFile::ProcessNext() {
+  // Process the next entry in the central directory. Also make sure that the
+  // content pointer is in sync.
+  size_t compressed, uncompressed;
+  u4 offset;
+  if (!ProcessCentralDirEntry(central_dir_current_, &compressed, &uncompressed,
+                              filename, PATH_MAX, &attr, &offset)) {
+    return false;
+  }
+
+  // There might be an offset specified in the central directory that does
+  // not match the file offset, if so, correct the pointer.
+  if (offset != 0 && (p != (zipdata_in_ + in_offset_ + offset))) {
+    p = zipdata_in_ + offset;
+  }
+
+  if (EnsureRemaining(4, "signature") < 0) {
+    return false;
+  }
+  u4 signature = get_u4le(p);
+  if (signature == LOCAL_FILE_HEADER_SIGNATURE) {
+    if (ProcessLocalFileEntry(compressed, uncompressed) < 0) {
+      return false;
+    }
+  } else {
+    error("local file header signature for file %s not found\n", filename);
+    return false;
+  }
+
+  return true;
+}
+
+int InputZipFile::ProcessLocalFileEntry(
+    size_t compressed_size, size_t uncompressed_size) {
+  if (EnsureRemaining(26, "extract_version") < 0) {
+    return -1;
+  }
+  extract_version_ = get_u2le(p);
+  general_purpose_bit_flag_ = get_u2le(p);
+
+  if ((general_purpose_bit_flag_ & ~GENERAL_PURPOSE_BIT_FLAG_SUPPORTED) != 0) {
+    return error("Unsupported value (0x%04x) in general purpose bit flag.\n",
+                 general_purpose_bit_flag_);
+  }
+
+  compression_method_ = get_u2le(p);
+
+  if (compression_method_ != COMPRESSION_METHOD_DEFLATED &&
+      compression_method_ != COMPRESSION_METHOD_STORED) {
+    return error("Unsupported compression method (%d).\n",
+                 compression_method_);
+  }
+
+  // skip over: last_mod_file_time, last_mod_file_date, crc32
+  p += 2 + 2 + 4;
+  compressed_size_ = get_u4le(p);
+  uncompressed_size_ = get_u4le(p);
+  file_name_length_ = get_u2le(p);
+  extra_field_length_ = get_u2le(p);
+
+  if (EnsureRemaining(file_name_length_, "file_name") < 0) {
+    return -1;
+  }
+  file_name_ = p;
+  p += file_name_length_;
+
+  if (EnsureRemaining(extra_field_length_, "extra_field") < 0) {
+    return -1;
+  }
+  extra_field_ = p;
+  p += extra_field_length_;
+
+  bool is_compressed = compression_method_ == COMPRESSION_METHOD_DEFLATED;
+
+  // If the zip is compressed, compressed and uncompressed size members are
+  // zero in the local file header. If not, check that they are the same as the
+  // lengths from the central directory, otherwise, just believe the central
+  // directory
+  if (compressed_size_ == 0) {
+    compressed_size_ = compressed_size;
+  } else {
+    if (compressed_size_ != compressed_size) {
+      return error("central directory and file header inconsistent\n");
+    }
+  }
+
+  if (uncompressed_size_ == 0) {
+    uncompressed_size_ = uncompressed_size;
+  } else {
+    if (uncompressed_size_ != uncompressed_size) {
+      return error("central directory and file header inconsistent\n");
+    }
+  }
+
+  if (processor->Accept(filename, attr)) {
+    if (ProcessFile(is_compressed) < 0) {
+      return -1;
+    }
+  } else {
+    if (SkipFile(is_compressed) < 0) {
+      return -1;
+    }
+  }
+
+  if (general_purpose_bit_flag_ & GENERAL_PURPOSE_BIT_FLAG_COMPRESSED) {
+    // Skip the data descriptor. Some implementations do not put the signature
+    // here, so check if the next 4 bytes are a signature, and if so, skip the
+    // next 12 bytes (for CRC, compressed/uncompressed size), otherwise skip
+    // the next 8 bytes (because the value just read was the CRC).
+    u4 signature = get_u4le(p);
+    if (signature == DATA_DESCRIPTOR_SIGNATURE) {
+      p += 4 * 3;
+    } else {
+      p += 4 * 2;
+    }
+  }
+
+  if (p > zipdata_in_mapped_ + MAX_MAPPED_REGION) {
+    munmap(const_cast<u1 *>(zipdata_in_mapped_), MAX_MAPPED_REGION);
+    zipdata_in_mapped_ += MAX_MAPPED_REGION;
+  }
+
+  return 0;
+}
+
+int InputZipFile::SkipFile(const bool compressed) {
+  if (!compressed) {
+    // In this case, compressed_size_ == uncompressed_size_ (since the file is
+    // uncompressed), so we can use either.
+    if (compressed_size_ != uncompressed_size_) {
+      return error("compressed size != uncompressed size, although the file "
+                   "is uncompressed.\n");
+    }
+  }
+
+  if (EnsureRemaining(compressed_size_, "file_data") < 0) {
+    return -1;
+  }
+  p += compressed_size_;
+  return 0;
+}
+
+u1* InputZipFile::UncompressFile() {
+  size_t in_offset = p - zipdata_in_;
+  size_t remaining = in_length_ - in_offset;
+  z_stream stream;
+
+  stream.zalloc = Z_NULL;
+  stream.zfree = Z_NULL;
+  stream.opaque = Z_NULL;
+  stream.avail_in = remaining;
+  stream.next_in = (Bytef *) p;
+
+  int ret = inflateInit2(&stream, -MAX_WBITS);
+  if (ret != Z_OK) {
+    error("inflateInit: %d\n", ret);
+    return NULL;
+  }
+
+  int uncompressed_until_now = 0;
+
+  while (true) {
+    stream.avail_out = uncompressed_data_allocated_ - uncompressed_until_now;
+    stream.next_out = uncompressed_data_ + uncompressed_until_now;
+    int old_avail_out = stream.avail_out;
+
+    ret = inflate(&stream, Z_SYNC_FLUSH);
+    int uncompressed_now = old_avail_out - stream.avail_out;
+    uncompressed_until_now += uncompressed_now;
+
+    switch (ret) {
+      case Z_STREAM_END: {
+        // zlib said that there is no more data to decompress.
+
+        u1 *new_p = reinterpret_cast<u1*>(stream.next_in);
+        compressed_size_ = new_p - p;
+        uncompressed_size_ = uncompressed_until_now;
+        p = new_p;
+        inflateEnd(&stream);
+        return uncompressed_data_;
+      }
+
+      case Z_OK: {
+        // zlib said that there is no more room in the buffer allocated for
+        // the decompressed data. Enlarge that buffer and try again.
+
+        if (uncompressed_data_allocated_ == MAX_BUFFER_SIZE) {
+          error("ijar does not support decompressing files "
+                "larger than %dMB.\n",
+                (int) (MAX_BUFFER_SIZE/(1024*1024)));
+          return NULL;
+        }
+
+        uncompressed_data_allocated_ *= 2;
+        if (uncompressed_data_allocated_ > MAX_BUFFER_SIZE) {
+          uncompressed_data_allocated_ = MAX_BUFFER_SIZE;
+        }
+
+        uncompressed_data_ = reinterpret_cast<u1*>(
+            realloc(uncompressed_data_, uncompressed_data_allocated_));
+        break;
+      }
+
+      case Z_DATA_ERROR:
+      case Z_BUF_ERROR:
+      case Z_STREAM_ERROR:
+      case Z_NEED_DICT:
+      default: {
+        error("zlib returned error code %d during inflate.\n", ret);
+        return NULL;
+      }
+    }
+  }
+}
+
+int InputZipFile::ProcessFile(const bool compressed) {
+  const u1 *file_data;
+  if (compressed) {
+    file_data = UncompressFile();
+    if (file_data == NULL) {
+      return -1;
+    }
+  } else {
+    // In this case, compressed_size_ == uncompressed_size_ (since the file is
+    // uncompressed), so we can use either.
+    if (compressed_size_ != uncompressed_size_) {
+      return error("compressed size != uncompressed size, although the file "
+                   "is uncompressed.\n");
+    }
+
+    if (EnsureRemaining(compressed_size_, "file_data") < 0) {
+      return -1;
+    }
+    file_data = p;
+    p += compressed_size_;
+  }
+  processor->Process(filename, attr, file_data, uncompressed_size_);
+  return 0;
+}
+
+
+// Reads and returns some metadata of the next file from the central directory:
+// - compressed size
+// - uncompressed size
+// - whether the entry is a class file (to be included in the output).
+// Precondition: p points to the beginning of an entry in the central dir
+// Postcondition: p points to the beginning of the next entry in the central dir
+// Returns true if the central directory contains another file and false if not.
+// Of course, in the latter case, the size output variables are not changed.
+// Note that the central directory is always followed by another data structure
+// that has a signature, so parsing it this way is safe.
+static bool ProcessCentralDirEntry(
+    const u1 *&p, size_t *compressed_size, size_t *uncompressed_size,
+    char *filename, size_t filename_size, u4 *attr, u4 *offset) {
+  u4 signature = get_u4le(p);
+  if (signature != CENTRAL_FILE_HEADER_SIGNATURE) {
+    return false;
+  }
+
+  p += 16;  // skip to 'compressed size' field
+  *compressed_size = get_u4le(p);
+  *uncompressed_size = get_u4le(p);
+  u2 file_name_length = get_u2le(p);
+  u2 extra_field_length = get_u2le(p);
+  u2 file_comment_length = get_u2le(p);
+  p += 4;  // skip to external file attributes field
+  *attr = get_u4le(p);
+  *offset = get_u4le(p);
+  {
+    size_t len = (file_name_length < filename_size)
+      ? file_name_length
+      : (filename_size - 1);
+    memcpy(reinterpret_cast<void*>(filename), p, len);
+    filename[len] = 0;
+  }
+  p += file_name_length;
+  p += extra_field_length;
+  p += file_comment_length;
+  return true;
+}
+
+// Gives a maximum bound on the size of the interface JAR. Basically, adds
+// the difference between the compressed and uncompressed sizes to the size
+// of the input file.
+u8 InputZipFile::CalculateOutputLength() {
+  const u1* current = central_dir_;
+
+  u8 compressed_size = 0;
+  u8 uncompressed_size = 0;
+  u8 skipped_compressed_size = 0;
+  u4 attr;
+  u4 offset;
+  char filename[PATH_MAX];
+
+  while (true) {
+    size_t file_compressed, file_uncompressed;
+    if (!ProcessCentralDirEntry(current,
+                                &file_compressed, &file_uncompressed,
+                                filename, PATH_MAX, &attr, &offset)) {
+      break;
+    }
+
+    if (processor->Accept(filename, attr)) {
+      compressed_size += (u8) file_compressed;
+      uncompressed_size += (u8) file_uncompressed;
+    } else {
+      skipped_compressed_size += file_compressed;
+    }
+  }
+
+  // The worst case is when the output is simply the input uncompressed. The
+  // metadata in the zip file will stay the same, so the file will grow by the
+  // difference between the compressed and uncompressed sizes.
+  return (u8) in_length_ - skipped_compressed_size
+      + (uncompressed_size - compressed_size);
+}
+
+// Given the data in the zip file, returns the offset of the central directory
+// and the number of files contained in it.
+bool FindZipCentralDirectory(const u1* bytes, size_t in_length,
+                             u4* offset, const u1** central_dir) {
+  static const int MAX_COMMENT_LENGTH = 0xffff;
+  static const int CENTRAL_DIR_LOCATOR_SIZE = 22;
+  // Maximum distance of start of central dir locator from end of file
+  static const int MAX_DELTA = MAX_COMMENT_LENGTH + CENTRAL_DIR_LOCATOR_SIZE;
+  const u1* last_pos_to_check = in_length < MAX_DELTA
+      ? bytes
+      : bytes + (in_length - MAX_DELTA);
+  const u1* current;
+  bool found = false;
+
+  for (current = bytes + in_length - CENTRAL_DIR_LOCATOR_SIZE;
+       current >= last_pos_to_check;
+       current-- ) {
+    const u1* p = current;
+    if (get_u4le(p) != END_OF_CENTRAL_DIR_SIGNATURE) {
+      continue;
+    }
+
+    p += 16;  // skip to comment length field
+    u2 comment_length = get_u2le(p);
+
+    // Does the comment go exactly till the end of the file?
+    if (current + comment_length + CENTRAL_DIR_LOCATOR_SIZE
+        != bytes + in_length) {
+      continue;
+    }
+
+    // Hooray, we found it!
+    found = true;
+    break;
+  }
+
+  if (!found) {
+    fprintf(stderr, "file is invalid or corrupted (missing end of central "
+                    "directory record)\n");
+    return false;
+  }
+
+  const u1* end_of_central_dir = current;
+  get_u4le(current);  // central directory locator signature, already checked
+  u2 number_of_this_disk = get_u2le(current);
+  u2 disk_with_central_dir = get_u2le(current);
+  u2 central_dir_entries_on_this_disk = get_u2le(current);
+  u2 central_dir_entries = get_u2le(current);
+  u4 central_dir_size = get_u4le(current);
+  u4 central_dir_offset = get_u4le(current);
+  u2 file_comment_length = get_u2le(current);
+  current += file_comment_length;  // set current to the end of the central dir
+
+  if (number_of_this_disk != 0
+    || disk_with_central_dir != 0
+    || central_dir_entries_on_this_disk != central_dir_entries) {
+    fprintf(stderr, "multi-disk JAR files are not supported\n");
+    return false;
+  }
+
+  // Do not change output values before determining that they are OK.
+  *offset = central_dir_offset;
+  // Central directory start can then be used to determine the actual
+  // starts of the zip file (which can be different in case of a non-zip
+  // header like for auto-extractable binaries).
+  *central_dir = end_of_central_dir - central_dir_size;
+  return true;
+}
+
+void InputZipFile::Reset() {
+  central_dir_current_ = central_dir_;
+  zipdata_in_mapped_ = zipdata_in_;
+  p = zipdata_in_ + in_offset_;
+}
+
+int ZipExtractor::ProcessAll() {
+  while (ProcessNext()) {}
+  if (GetError() != NULL) {
+    return -1;
+  }
+  return 0;
+}
+
+ZipExtractor* ZipExtractor::Create(const char* filename,
+                                   ZipExtractorProcessor *processor) {
+  int fd_in = open(filename, O_RDONLY);
+  if (fd_in < 0) {
+    return NULL;
+  }
+
+  off_t length = lseek(fd_in, 0, SEEK_END);
+  if (length < 0) {
+    return NULL;
+  }
+
+  void *zipdata_in = mmap(NULL, length, PROT_READ, MAP_PRIVATE, fd_in, 0);
+  if (zipdata_in == MAP_FAILED) {
+    return NULL;
+  }
+
+  u4 central_dir_offset;
+  const u1 *central_dir = NULL;
+
+  if (!devtools_ijar::FindZipCentralDirectory(
+          static_cast<const u1*>(zipdata_in), length,
+          &central_dir_offset, &central_dir)) {
+    errno = EIO;  // we don't really have a good error number
+    return NULL;
+  }
+  const u1 *zipdata_start = static_cast<const u1*>(zipdata_in);
+  off_t offset = - static_cast<off_t>(zipdata_start
+                                      + central_dir_offset
+                                      - central_dir);
+
+  return new InputZipFile(processor, fd_in, length, offset,
+                          zipdata_start, central_dir);
+}
+
+InputZipFile::InputZipFile(ZipExtractorProcessor *processor, int fd,
+                           off_t in_length, off_t in_offset,
+                           const u1* zipdata_in, const u1* central_dir)
+  : processor(processor), fd_in(fd),
+    zipdata_in_(zipdata_in), zipdata_in_mapped_(zipdata_in),
+    central_dir_(central_dir), in_length_(in_length), in_offset_(in_offset),
+    p(zipdata_in + in_offset), central_dir_current_(central_dir) {
+  uncompressed_data_allocated_ = INITIAL_BUFFER_SIZE;
+  uncompressed_data_ =
+    reinterpret_cast<u1*>(malloc(uncompressed_data_allocated_));
+  errmsg[0] = 0;
+}
+
+InputZipFile::~InputZipFile() {
+  free(uncompressed_data_);
+  close(fd_in);
+}
+
+
+//
+// Implementation of OutputZipFile
+//
+int OutputZipFile::WriteEmptyFile(const char *filename) {
+  const u1* file_name = (const u1*) filename;
+  size_t file_name_length = strlen(filename);
+
+  LocalFileEntry *entry = new LocalFileEntry;
+  entry->local_header_offset = Offset(q);
+  entry->external_attr = 0;
+  entry->crc32 = 0;
+
+  // Output the ZIP local_file_header:
+  put_u4le(q, LOCAL_FILE_HEADER_SIGNATURE);
+  put_u2le(q, 10);  // extract_version
+  put_u2le(q, 0);  // general_purpose_bit_flag
+  put_u2le(q, 0);  // compression_method
+  put_u2le(q, 0);  // last_mod_file_time
+  put_u2le(q, 0);  // last_mod_file_date
+  put_u4le(q, entry->crc32);  // crc32
+  put_u4le(q, 0);  // compressed_size
+  put_u4le(q, 0);  // uncompressed_size
+  put_u2le(q, file_name_length);
+  put_u2le(q, 0);  // extra_field_length
+  put_n(q, file_name, file_name_length);
+
+  entry->file_name_length = file_name_length;
+  entry->extra_field_length = 0;
+  entry->compressed_length = 0;
+  entry->uncompressed_length = 0;
+  entry->compression_method = 0;
+  entry->extra_field = (const u1 *)"";
+  entry->file_name = (u1*) strdup((const char *) file_name);
+  entries_.push_back(entry);
+
+  return 0;
+}
+
+void OutputZipFile::WriteCentralDirectory() {
+  // central directory:
+  const u1 *central_directory_start = q;
+  for (size_t ii = 0; ii < entries_.size(); ++ii) {
+    LocalFileEntry *entry = entries_[ii];
+    put_u4le(q, CENTRAL_FILE_HEADER_SIGNATURE);
+    put_u2le(q, 0);  // version made by
+
+    put_u2le(q, ZIP_VERSION_TO_EXTRACT);  // version to extract
+    put_u2le(q, 0);  // general purpose bit flag
+    put_u2le(q, entry->compression_method);  // compression method:
+    put_u2le(q, 0);                          // last_mod_file_time
+    put_u2le(q, 0);  // last_mod_file_date
+    put_u4le(q, entry->crc32);  // crc32
+    put_u4le(q, entry->compressed_length);    // compressed_size
+    put_u4le(q, entry->uncompressed_length);  // uncompressed_size
+    put_u2le(q, entry->file_name_length);
+    put_u2le(q, entry->extra_field_length);
+
+    put_u2le(q, 0);  // file comment length
+    put_u2le(q, 0);  // disk number start
+    put_u2le(q, 0);  // internal file attributes
+    put_u4le(q, entry->external_attr);  // external file attributes
+    // relative offset of local header:
+    put_u4le(q, entry->local_header_offset);
+
+    put_n(q, entry->file_name, entry->file_name_length);
+    put_n(q, entry->extra_field, entry->extra_field_length);
+  }
+  u4 central_directory_size = q - central_directory_start;
+
+  put_u4le(q, END_OF_CENTRAL_DIR_SIGNATURE);
+  put_u2le(q, 0);  // number of this disk
+  put_u2le(q, 0);  // number of the disk with the start of the central directory
+  put_u2le(q, entries_.size());  // # central dir entries on this disk
+  put_u2le(q, entries_.size());  // total # entries in the central directory
+  put_u4le(q, central_directory_size);  // size of the central directory
+  put_u4le(q, Offset(central_directory_start));  // offset of start of central
+                                                 // directory wrt starting disk
+  put_u2le(q, 0);  // .ZIP file comment length
+}
+
+u1* OutputZipFile::WriteLocalFileHeader(const char* filename, const u4 attr) {
+  off_t file_name_length_ = strlen(filename);
+  LocalFileEntry *entry = new LocalFileEntry;
+  entry->local_header_offset = Offset(q);
+  entry->file_name_length = file_name_length_;
+  entry->file_name = new u1[file_name_length_];
+  entry->external_attr = attr;
+  memcpy(entry->file_name, filename, file_name_length_);
+  entry->extra_field_length = 0;
+  entry->extra_field = (const u1 *)"";
+
+  // Output the ZIP local_file_header:
+  put_u4le(q, LOCAL_FILE_HEADER_SIGNATURE);
+  put_u2le(q, ZIP_VERSION_TO_EXTRACT);     // version to extract
+  put_u2le(q, 0);                          // general purpose bit flag
+  u1 *header_ptr = q;
+  put_u2le(q, COMPRESSION_METHOD_STORED);  // compression method = placeholder
+  put_u2le(q, 0);                          // last_mod_file_time
+  put_u2le(q, 0);                          // last_mod_file_date
+  put_u4le(q, entry->crc32);               // crc32
+  put_u4le(q, 0);  // compressed_size = placeholder
+  put_u4le(q, 0);  // uncompressed_size = placeholder
+  put_u2le(q, entry->file_name_length);
+  put_u2le(q, entry->extra_field_length);
+
+  put_n(q, entry->file_name, entry->file_name_length);
+  put_n(q, entry->extra_field, entry->extra_field_length);
+  entries_.push_back(entry);
+
+  return header_ptr;
+}
+
+// Try to compress a file entry in memory using the deflate algorithm.
+// It will compress buf (of size length) unless the compressed size is bigger
+// than the input size. The result will overwrite the content of buf and the
+// final size is returned.
+size_t TryDeflate(u1 *buf, size_t length) {
+  u1 *outbuf = reinterpret_cast<u1 *>(malloc(length));
+  z_stream stream;
+
+  // Initialize the z_stream strcut for reading from buf and wrinting in outbuf.
+  stream.zalloc = Z_NULL;
+  stream.zfree = Z_NULL;
+  stream.opaque = Z_NULL;
+  stream.total_in = length;
+  stream.avail_in = length;
+  stream.total_out = length;
+  stream.avail_out = length;
+  stream.next_in = buf;
+  stream.next_out = outbuf;
+
+  // deflateInit2 negative windows size prevent the zlib wrapper to be used.
+  if (deflateInit2(&stream, Z_DEFAULT_COMPRESSION, Z_DEFLATED,
+                  -MAX_WBITS, 8, Z_DEFAULT_STRATEGY) != Z_OK) {
+    // Failure to compress => return the buffer uncompressed
+    free(outbuf);
+    return length;
+  }
+
+  if (deflate(&stream, Z_FINISH) == Z_STREAM_END) {
+    // Compression successful and fits in outbuf, let's copy the result in buf.
+    length = stream.total_out;
+    memcpy(buf, outbuf, length);
+  }
+
+  deflateEnd(&stream);
+  free(outbuf);
+
+  // Return the length of the resulting buffer
+  return length;
+}
+
+size_t OutputZipFile::WriteFileSizeInLocalFileHeader(u1 *header_ptr,
+                                                     size_t out_length,
+                                                     bool compress,
+                                                     const u4 crc) {
+  size_t compressed_size = out_length;
+  if (compress) {
+    compressed_size = TryDeflate(q, out_length);
+  }
+  // compression method
+  if (compressed_size < out_length) {
+    put_u2le(header_ptr, COMPRESSION_METHOD_DEFLATED);
+  } else {
+    put_u2le(header_ptr, COMPRESSION_METHOD_STORED);
+  }
+  header_ptr += 4;
+  put_u4le(header_ptr, crc);              // crc32
+  put_u4le(header_ptr, compressed_size);  // compressed_size
+  put_u4le(header_ptr, out_length);       // uncompressed_size
+  return compressed_size;
+}
+
+int OutputZipFile::Finish() {
+  if (fd_out > 0) {
+    WriteCentralDirectory();
+    if (ftruncate(fd_out, GetSize()) < 0) {
+      return error("ftruncate(fd_out, GetSize()): %s", strerror(errno));
+    }
+    if (close(fd_out) < 0) {
+      return error("close(fd_out): %s", strerror(errno));
+    }
+    fd_out = -1;
+  }
+  return 0;
+}
+
+u1* OutputZipFile::NewFile(const char* filename, const u4 attr) {
+  header_ptr = WriteLocalFileHeader(filename, attr);
+  return q;
+}
+
+int OutputZipFile::FinishFile(size_t filelength, bool compress,
+                              bool compute_crc) {
+  u4 crc = 0;
+  if (compute_crc) {
+    crc = crc32(crc, q, filelength);
+  }
+  size_t compressed_size =
+      WriteFileSizeInLocalFileHeader(header_ptr, filelength, compress, crc);
+  entries_.back()->crc32 = crc;
+  entries_.back()->compressed_length = compressed_size;
+  entries_.back()->uncompressed_length = filelength;
+  if (compressed_size < filelength) {
+    entries_.back()->compression_method = COMPRESSION_METHOD_DEFLATED;
+  } else {
+    entries_.back()->compression_method = COMPRESSION_METHOD_STORED;
+  }
+  q += compressed_size;
+  return 0;
+}
+
+ZipBuilder* ZipBuilder::Create(const char* zip_file, u8 estimated_size) {
+  if (estimated_size > kMaximumOutputSize) {
+    fprintf(stderr,
+            "Uncompressed input jar has size %llu, "
+            "which exceeds the maximum supported output size %llu.\n"
+            "Assuming that ijar will be smaller and hoping for the best.\n",
+            estimated_size, kMaximumOutputSize);
+    estimated_size = kMaximumOutputSize;
+  }
+
+  int fd_out = open(zip_file, O_CREAT|O_RDWR|O_TRUNC, 0644);
+  if (fd_out < 0) {
+    return NULL;
+  }
+
+  // Create mmap-able sparse file
+  if (ftruncate(fd_out, estimated_size) < 0) {
+    return NULL;
+  }
+
+  // Ensure that any buffer overflow in JarStripper will result in
+  // SIGSEGV or SIGBUS by over-allocating beyond the end of the file.
+  size_t mmap_length = std::min(estimated_size + sysconf(_SC_PAGESIZE),
+                                (u8) std::numeric_limits<size_t>::max());
+
+  void *zipdata_out = mmap(NULL, mmap_length, PROT_WRITE,
+                           MAP_SHARED, fd_out, 0);
+  if (zipdata_out == MAP_FAILED) {
+    fprintf(stderr, "output_length=%llu\n", estimated_size);
+    return NULL;
+  }
+
+  return new OutputZipFile(fd_out, (u1*) zipdata_out);
+}
+
+u8 ZipBuilder::EstimateSize(char **files) {
+  struct stat statst;
+  // Digital signature field size = 6, End of central directory = 22, Total = 28
+  u8 size = 28;
+  // Count the size of all the files in the input to estimate the size of the
+  // output.
+  for (int i = 0; files[i] != NULL; i++) {
+    if (stat(files[i], &statst) != 0) {
+      fprintf(stderr, "File %s does not seem to exist.", files[i]);
+      return 0;
+    }
+    size += statst.st_size;
+    // Add sizes of Zip meta data
+    // local file header = 30 bytes
+    // data descriptor = 12 bytes
+    // central directory descriptor = 46 bytes
+    //    Total: 88bytes
+    size += 88;
+    // The filename is stored twice (once in the central directory
+    // and once in the local file header).
+    size += strlen(files[i]) * 2;
+  }
+  return size;
+}
+
+}  // namespace devtools_ijar
diff --git a/tools/ijar/zip.h b/tools/ijar/zip.h
new file mode 100644
index 0000000..dda2c6e
--- /dev/null
+++ b/tools/ijar/zip.h
@@ -0,0 +1,173 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+//
+// zip.h -- .zip (.jar) file reading/writing routines.
+//
+// This file specifies the interface to use the ZIP implementation of ijar.
+//
+
+#ifndef INCLUDED_THIRD_PARTY_IJAR_ZIP_H
+#define INCLUDED_THIRD_PARTY_IJAR_ZIP_H
+
+#include <sys/stat.h>
+
+#include "common.h"
+
+namespace devtools_ijar {
+
+// Tells if this is a directory entry from the mode. This method
+// is safer than zipattr_to_mode(attr) & S_IFDIR because the unix
+// mode might not be set in DOS zip files.
+inline bool zipattr_is_dir(u4 attr) { return (attr & 0x10) != 0; }
+
+// Convert a Unix file mode to a ZIP file attribute
+inline u4 mode_to_zipattr(mode_t m) {
+  return (((u4) m) << 16) + ((m & S_IFDIR) != 0 ? 0x10 : 0);
+}
+
+// Convert a ZIP file attribute to a Unix file mode
+inline mode_t zipattr_to_mode(u4 attr) {
+  return ((mode_t) ((attr >> 16) & 0xffff));
+}
+
+//
+// Class interface for building ZIP files
+//
+class ZipBuilder {
+ public:
+  virtual ~ZipBuilder() {}
+
+  // Returns the text for the last error, or null on no last error.
+  virtual const char* GetError() = 0;
+
+  // Add a new file to the ZIP, the file will have path "filename"
+  // and external attributes "attr". This function returns a pointer
+  // to a memory buffer to write the data of the file into. This buffer
+  // is owned by ZipBuilder and should not be free'd by the caller. The
+  // file length is then specified when the files is finished written
+  // using the FinishFile(size_t) function.
+  // On failure, returns NULL and GetError() will return an non-empty message.
+  virtual u1* NewFile(const char* filename, const u4 attr) = 0;
+
+  // Finish writing a file and specify its length. After calling this method
+  // one should not reuse the pointer given by NewFile. The file can be
+  // compressed using the deflate algorithm by setting `compress` to true.
+  // By default, CRC32 are not computed as java tooling doesn't care, but
+  // computing it can be activated by setting `compute_crc` to true.
+  // On failure, returns -1 and GetError() will return an non-empty message.
+  virtual int FinishFile(size_t filelength,
+                         bool compress = false,
+                         bool compute_crc = false) = 0;
+
+  // Write an empty file, it is equivalent to:
+  //   NewFile(filename, 0);
+  //   FinishFile(0);
+  // On failure, returns -1 and GetError() will return an non-empty message.
+  virtual int WriteEmptyFile(const char* filename) = 0;
+
+  // Finish writing the ZIP file. This method can be called only once
+  // (subsequent calls will do nothing) and none of
+  // NewFile/FinishFile/WriteEmptyFile should be called after calling Finish. If
+  // this method was not called when the object is destroyed, it will be called.
+  // It is here as a convenience to get information on the final generated ZIP
+  // file.
+  // On failure, returns -1 and GetError() will return an non-empty message.
+  virtual int Finish() = 0;
+
+  // Get the current size of the ZIP file. This size will not be matching the
+  // final ZIP file until Finish() has been called because Finish() is actually
+  // writing the central directory of the ZIP File.
+  virtual size_t GetSize() = 0;
+
+  // Returns the current number of files stored in the ZIP.
+  virtual int GetNumberFiles() = 0;
+
+  // Create a new ZipBuilder writing the file zip_file and the size of the
+  // output will be at most estimated_size. Use ZipBuilder::EstimateSize() or
+  // ZipExtractor::CalculateOuputLength() to have an estimated_size depending on
+  // a list of file to store.
+  // On failure, returns NULL. Refer to errno for error code.
+  static ZipBuilder* Create(const char* zip_file, u8 estimated_size);
+
+  // Estimate the maximum size of the ZIP files containing files in the "files"
+  // null-terminated array.
+  // Returns 0 on error.
+  static u8 EstimateSize(char **files);
+};
+
+//
+// An abstract class to process data from a ZipExtractor.
+// Derive from this class if you wish to process data from a ZipExtractor.
+//
+class ZipExtractorProcessor {
+ public:
+  virtual ~ZipExtractorProcessor() {}
+
+  // Tells whether to skip or process the file "filename". "attr" is the
+  // external file attributes and can be converted to unix mode using the
+  // zipattr_to_mode() function. This method is suppoed to returns true
+  // if the file should be processed and false if it should be skipped.
+  virtual bool Accept(const char* filename, const u4 attr) = 0;
+
+  // Process a file accepted by Accept. The file "filename" has external
+  // attributes "attr" and length "size". The file content is accessible
+  // in the buffer pointed by "data".
+  virtual void Process(const char* filename, const u4 attr,
+                       const u1* data, const size_t size) = 0;
+};
+
+//
+// Class interface for reading ZIP files
+//
+class ZipExtractor {
+ public:
+  virtual ~ZipExtractor() {}
+
+  // Returns the text for the last error, or null on no last error.
+  virtual const char* GetError() = 0;
+
+  // Process the next files, returns false if the end of ZIP file has been
+  // reached. The processor provided by the Create method will be called
+  // if a file is encountered. If false is returned, check the return value
+  // of GetError() for potential errors.
+  virtual bool ProcessNext() = 0;
+
+  // Process the all files, returns -1 on error (GetError() will be populated
+  // on error).
+  virtual int ProcessAll();
+
+  // Reset the file pointer to the beginning.
+  virtual void Reset() = 0;
+
+  // Return the size of the ZIP file.
+  virtual size_t GetSize() = 0;
+
+  // Return the size of the resulting zip file by keeping only file
+  // accepted by the processor and storing them uncompressed. This
+  // method can be used to create a ZipBuilder for storing a subset
+  // of the input files.
+  // On error, 0 is returned and GetError() returns a non-empty message.
+  virtual u8 CalculateOutputLength() = 0;
+
+  // Create a ZipExtractor that extract the zip file "filename" and process
+  // it with "processor".
+  // On error, a null pointer is returned and the value of errno should be
+  // checked.
+  static ZipExtractor* Create(const char* filename,
+                              ZipExtractorProcessor *processor);
+};
+
+}  // namespace devtools_ijar
+
+#endif  // INCLUDED_THIRD_PARTY_IJAR_ZIP_H
diff --git a/tools/ijar/zip_main.cc b/tools/ijar/zip_main.cc
new file mode 100644
index 0000000..3f4a50c
--- /dev/null
+++ b/tools/ijar/zip_main.cc
@@ -0,0 +1,312 @@
+// Copyright 2015 Google Inc. All rights reserved.
+//
+// Author: Alan Donovan <adonovan@google.com>
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//    http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+//
+// Zip / Unzip file using ijar zip implementation.
+//
+// Note that this Zip implementation intentionally don't compute CRC-32
+// because it is useless computation for jar because Java doesn't care.
+// CRC-32 of all files in the zip file will be set to 0.
+//
+
+#include <stdio.h>
+#include <string.h>
+#include <stdlib.h>
+#include <limits.h>
+#include <fcntl.h>
+#include <unistd.h>
+#include <sys/mman.h>
+#include <errno.h>
+#include <memory>
+
+#include "zip.h"
+
+namespace devtools_ijar {
+
+#define SYSCALL(expr)  do { \
+                         if ((expr) < 0) { \
+                           perror(#expr); \
+                           abort(); \
+                         } \
+                       } while (0)
+
+//
+// A ZipExtractorProcessor that extract all files in the ZIP file.
+//
+class UnzipProcessor : public ZipExtractorProcessor {
+ public:
+  // Create a processor who will extract the files into output_root
+  // if "extract" is set to true and will print the list of files and
+  // their unix modes if "verbose" is set to true.
+  UnzipProcessor(const char *output_root, bool verbose, bool extract)
+    : output_root_(output_root), verbose_(verbose), extract_(extract) {}
+  virtual ~UnzipProcessor() {}
+
+  virtual void Process(const char* filename, const u4 attr,
+                       const u1* data, const size_t size);
+  virtual bool Accept(const char* filename, const u4 attr) {
+    return true;
+  }
+
+ private:
+  const char *output_root_;
+  const bool verbose_;
+  const bool extract_;
+};
+
+// Concatene 2 path, path1 and path2, using / as a directory separator and
+// puting the result in "out". "size" specify the size of the output buffer
+void concat_path(char* out, const size_t size,
+                 const char *path1, const char *path2) {
+  int len1 = strlen(path1);
+  size_t l = len1;
+  strncpy(out, path1, size - 1);
+  out[size-1] = 0;
+  if (l < size - 1 && path1[len1] != '/' && path2[0] != '/') {
+    out[l] = '/';
+    l++;
+    out[l] = 0;
+  }
+  if (l < size - 1) {
+    strncat(out, path2, size - 1 - l);
+  }
+}
+
+// Do a recursive mkdir of all folders of path except the last path
+// segment (if path ends with a / then the last path segment is empty).
+// All folders are created using "mode" for creation mode.
+void mkdirs(const char *path, mode_t mode) {
+  char path_[PATH_MAX];
+  struct stat statst;
+  strncpy(path_, path, PATH_MAX);
+  path_[PATH_MAX-1] = 0;
+  char *pointer = path_;
+  while ((pointer = strchr(pointer, '/')) != NULL) {
+    if (path_ != pointer) {  // skip leading slash
+      *pointer = 0;
+      if (stat(path_, &statst) != 0) {
+        if (mkdir(path_, mode) < 0) {
+          fprintf(stderr, "Cannot create folder %s: %s\n",
+                  path_, strerror(errno));
+          abort();
+        }
+      }
+      *pointer = '/';
+    }
+    pointer++;
+  }
+}
+
+void UnzipProcessor::Process(const char* filename, const u4 attr,
+                             const u1* data, const size_t size) {
+  mode_t mode = zipattr_to_mode(attr);
+  mode_t perm = mode & 0777;
+  bool isdir = (mode & S_IFDIR) != 0;
+  if (attr == 0) {
+    // Fallback when the external attribute is not set.
+    isdir = filename[strlen(filename)-1] == '/';
+    perm = 0777;
+  }
+  if (verbose_) {
+    printf("%c %o %s\n", isdir ? 'd' : 'f', perm, filename);
+  }
+  if (extract_) {
+    char path[PATH_MAX];
+    int fd;
+    concat_path(path, PATH_MAX, output_root_, filename);
+    mkdirs(path, perm);
+    if (!isdir) {
+      fd = open(path, O_CREAT | O_WRONLY, perm);
+      if (fd < 0) {
+        fprintf(stderr, "Cannot open file %s for writing: %s\n",
+                path, strerror(errno));
+        abort();
+      }
+      SYSCALL(write(fd, data, size));
+      SYSCALL(close(fd));
+    }
+  }
+}
+
+// Get the basename of path and store it in output. output_size
+// is the size of the output buffer.
+void basename(const char *path, char *output, size_t output_size) {
+  const char *pointer = strrchr(path, '/');
+  if (pointer == NULL) {
+    pointer = path;
+  } else {
+    pointer++;  // Skip the leading slash.
+  }
+  strncpy(output, pointer, output_size);
+  output[output_size-1] = 0;
+}
+
+
+// Execute the extraction (or just listing if just v is provided)
+int extract(char *zipfile, bool verbose, bool extract) {
+  char output_root[PATH_MAX];
+  getcwd(output_root, PATH_MAX);
+
+  UnzipProcessor processor(output_root, verbose, extract);
+  std::unique_ptr<ZipExtractor> extractor(ZipExtractor::Create(zipfile,
+                                                               &processor));
+  if (extractor.get() == NULL) {
+    fprintf(stderr, "Unable to open zip file %s: %s.\n", zipfile,
+            strerror(errno));
+    return -1;
+  }
+
+  if (extractor->ProcessAll() < 0) {
+    fprintf(stderr, "%s.\n", extractor->GetError());
+    return -1;
+  }
+  return 0;
+}
+
+// Execute the create operation
+int create(char *zipfile, char **files, bool flatten, bool verbose,
+           bool compress) {
+  struct stat statst;
+  u8 size = ZipBuilder::EstimateSize(files);
+  if (size == 0) {
+    return -1;
+  }
+  std::unique_ptr<ZipBuilder> builder(ZipBuilder::Create(zipfile, size));
+  if (builder.get() == NULL) {
+    fprintf(stderr, "Unable to create zip file %s: %s.\n",
+            zipfile, strerror(errno));
+    return -1;
+  }
+  for (int i = 0; files[i] != NULL; i++) {
+    stat(files[i], &statst);
+    char path[PATH_MAX];
+    bool isdir = (statst.st_mode & S_IFDIR) != 0;
+
+    if (flatten && isdir) {
+      continue;
+    }
+
+    // Compute the path, flattening it if requested
+    if (flatten) {
+      basename(files[i], path, PATH_MAX);
+    } else {
+      strncpy(path, files[i], PATH_MAX);
+      path[PATH_MAX-1] = 0;
+      size_t len = strlen(path);
+      if (isdir && len < PATH_MAX - 1) {
+        // Add the trailing slash for folders
+        path[len] = '/';
+        path[len+1] = 0;
+      }
+    }
+
+    if (verbose) {
+      mode_t perm = statst.st_mode & 0777;
+      printf("%c %o %s\n", isdir ? 'd' : 'f', perm, path);
+    }
+
+    u1 *buffer = builder->NewFile(path, mode_to_zipattr(statst.st_mode));
+    if (isdir || statst.st_size == 0) {
+      builder->FinishFile(0);
+    } else {
+      // mmap the input file and memcpy
+      int fd = open(files[i], O_RDONLY);
+      if (fd < 0) {
+        fprintf(stderr, "Can't open file %s for reading: %s.\n",
+                files[i], strerror(errno));
+        return -1;
+      }
+      void *data = mmap(NULL, statst.st_size, PROT_READ, MAP_PRIVATE, fd, 0);
+      if (data == MAP_FAILED) {
+        fprintf(stderr, "Can't mmap file %s for reading: %s.\n",
+                files[i], strerror(errno));
+        return -1;
+      }
+      memcpy(buffer, data, statst.st_size);
+      munmap(data, statst.st_size);
+      builder->FinishFile(statst.st_size, compress, true);
+    }
+  }
+  if (builder->Finish() < 0) {
+    fprintf(stderr, "%s\n", builder->GetError());
+    return -1;
+  }
+  return 0;
+}
+
+}  // namespace devtools_ijar
+
+//
+// main method
+//
+static void usage(char *progname) {
+  fprintf(stderr, "Usage: %s [vxc[fC]] x.zip [file1...filen]\n", progname);
+  fprintf(stderr, "  v verbose - list all file in x.zip\n");
+  fprintf(stderr, "  x extract - extract file in x.zip in current directory\n");
+  fprintf(stderr, "  c create  - add files to x.zip\n");
+  fprintf(stderr, "  f flatten - flatten files to use with create operation\n");
+  fprintf(stderr,
+          "  C compress - compress files when using the create operation\n");
+  fprintf(stderr, "x and c cannot be used in the same command-line.\n");
+  exit(1);
+}
+
+int main(int argc, char **argv) {
+  bool extract = false;
+  bool verbose = false;
+  bool create = false;
+  bool compress = false;
+  bool flatten = false;
+
+  if (argc < 3) {
+    usage(argv[0]);
+  }
+
+  for (int i = 0; argv[1][i] != 0; i++) {
+    switch (argv[1][i]) {
+    case 'x':
+      extract = true;
+      break;
+    case 'v':
+      verbose = true;
+      break;
+    case 'c':
+      create = true;
+      break;
+    case 'f':
+      flatten = true;
+      break;
+    case 'C':
+      compress = true;
+      break;
+    default:
+      usage(argv[0]);
+    }
+  }
+  if (create) {
+    if (extract) {
+      usage(argv[0]);
+    }
+    // Create a zip
+    return devtools_ijar::create(argv[2], argv + 3, flatten, verbose, compress);
+  } else {
+    if (flatten) {
+      usage(argv[0]);
+    }
+    // Extraction / list mode
+    return devtools_ijar::extract(argv[2], verbose, extract);
+  }
+}
diff --git a/tools/makeparallel/.gitignore b/tools/makeparallel/.gitignore
new file mode 100644
index 0000000..a7d6181
--- /dev/null
+++ b/tools/makeparallel/.gitignore
@@ -0,0 +1,4 @@
+makeparallel
+*.o
+*.d
+test.out
diff --git a/tools/makeparallel/Android.bp b/tools/makeparallel/Android.bp
new file mode 100644
index 0000000..cb81817
--- /dev/null
+++ b/tools/makeparallel/Android.bp
@@ -0,0 +1,26 @@
+// Copyright 2016 Google Inc. All rights reserved
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+cc_binary_host {
+    name: "makeparallel",
+    srcs: [
+        "makeparallel.cpp",
+    ],
+    cflags: ["-Wall", "-Werror"],
+    target: {
+        linux: {
+            host_ldlibs: ["-lrt", "-lpthread"],
+        },
+    },
+}
diff --git a/tools/makeparallel/Makefile b/tools/makeparallel/Makefile
new file mode 100644
index 0000000..4e12b10
--- /dev/null
+++ b/tools/makeparallel/Makefile
@@ -0,0 +1,94 @@
+# Copyright 2015 Google Inc. All rights reserved
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Find source file location from path to this Makefile
+MAKEPARALLEL_SRC_PATH := $(patsubst %/,%,$(dir $(lastword $(MAKEFILE_LIST))))
+ifndef MAKEPARALLEL_SRC_PATH
+  MAKEPARALLEL_SRC_PATH := .
+endif
+
+# Set defaults if they weren't set by the including Makefile
+MAKEPARALLEL_CXX ?= $(CXX)
+MAKEPARALLEL_LD ?= $(CXX)
+MAKEPARALLEL_INTERMEDIATES_PATH ?= .
+MAKEPARALLEL_BIN_PATH ?= .
+
+MAKEPARALLEL_CXX_SRCS := \
+	makeparallel.cpp
+
+MAKEPARALLEL_CXXFLAGS := -Wall -Werror -MMD -MP
+
+MAKEPARALLEL_CXX_SRCS := $(addprefix $(MAKEPARALLEL_SRC_PATH)/,\
+	$(MAKEPARALLEL_CXX_SRCS))
+
+MAKEPARALLEL_CXX_OBJS := $(patsubst $(MAKEPARALLEL_SRC_PATH)/%.cpp,$(MAKEPARALLEL_INTERMEDIATES_PATH)/%.o,$(MAKEPARALLEL_CXX_SRCS))
+
+MAKEPARALLEL := $(MAKEPARALLEL_BIN_PATH)/makeparallel
+
+ifeq ($(shell uname),Linux)
+MAKEPARALLEL_LIBS := -lrt -lpthread
+endif
+
+# Rule to build makeparallel into MAKEPARALLEL_BIN_PATH
+$(MAKEPARALLEL): $(MAKEPARALLEL_CXX_OBJS)
+	@mkdir -p $(dir $@)
+	$(MAKEPARALLEL_LD) -std=c++11 $(MAKEPARALLEL_CXXFLAGS) -o $@ $^ $(MAKEPARALLEL_LIBS)
+
+# Rule to build source files into object files in MAKEPARALLEL_INTERMEDIATES_PATH
+$(MAKEPARALLEL_CXX_OBJS): $(MAKEPARALLEL_INTERMEDIATES_PATH)/%.o: $(MAKEPARALLEL_SRC_PATH)/%.cpp
+	@mkdir -p $(dir $@)
+	$(MAKEPARALLEL_CXX) -c -std=c++11 $(MAKEPARALLEL_CXXFLAGS) -o $@ $<
+
+makeparallel_clean:
+	rm -rf $(MAKEPARALLEL)
+	rm -rf $(MAKEPARALLEL_INTERMEDIATES_PATH)/*.o
+	rm -rf $(MAKEPARALLEL_INTERMEDIATES_PATH)/*.d
+
+.PHONY: makeparallel_clean
+
+-include $(MAKEPARALLEL_INTERMEDIATES_PATH)/*.d
+
+.PHONY: makeparallel_test
+MAKEPARALLEL_TEST := MAKEFLAGS= MAKELEVEL= MAKEPARALLEL=$(MAKEPARALLEL) $(MAKE) -f Makefile.test test
+MAKEPARALLEL_NINJA_TEST := MAKEFLAGS= MAKELEVEL= MAKEPARALLEL="$(MAKEPARALLEL) --ninja" $(MAKE) -f Makefile.test test
+makeparallel_test: $(MAKEPARALLEL)
+	@EXPECTED="-j1234" $(MAKEPARALLEL_TEST) -j1234
+	@EXPECTED="-j123"  $(MAKEPARALLEL_TEST) -j123
+	@EXPECTED="-j1"    $(MAKEPARALLEL_TEST) -j1
+	@EXPECTED="-j1"    $(MAKEPARALLEL_TEST)
+
+	@EXPECTED="-j1234" $(MAKEPARALLEL_NINJA_TEST) -j1234
+	@EXPECTED="-j123"  $(MAKEPARALLEL_NINJA_TEST) -j123
+	@EXPECTED="-j1"    $(MAKEPARALLEL_NINJA_TEST) -j1
+	@EXPECTED="-j1"    $(MAKEPARALLEL_NINJA_TEST)
+	@EXPECTED=""       $(MAKEPARALLEL_NINJA_TEST) -j
+	@EXPECTED=""       $(MAKEPARALLEL_NINJA_TEST) -j -l
+
+	@EXPECTED="-j1234" $(MAKEPARALLEL_TEST) --no-print-directory -j1234
+	@EXPECTED="-j1234" $(MAKEPARALLEL_TEST) --no-print-directory -k -j1234
+	@EXPECTED="-j1234" $(MAKEPARALLEL_TEST) -k -j1234
+	@EXPECTED="-j1234" $(MAKEPARALLEL_TEST) -j1234 -k
+	@EXPECTED="-j1234" $(MAKEPARALLEL_TEST) -kt -j1234
+
+	@EXPECTED="-j1234"     $(MAKEPARALLEL_NINJA_TEST) --no-print-directory -j1234
+	@EXPECTED="-j1234 -k0" $(MAKEPARALLEL_NINJA_TEST) --no-print-directory -k -j1234
+	@EXPECTED="-j1234 -k0" $(MAKEPARALLEL_NINJA_TEST) -k -j1234
+	@EXPECTED="-j1234 -k0" $(MAKEPARALLEL_NINJA_TEST) -j1234 -k
+	@EXPECTED="-j1234 -k0" $(MAKEPARALLEL_NINJA_TEST) -kt -j1234
+
+	@EXPECTED="-j1"    $(MAKEPARALLEL_TEST) A=-j1234
+	@EXPECTED="-j1"    $(MAKEPARALLEL_TEST) A\ -j1234=-j1234
+	@EXPECTED="-j1234" $(MAKEPARALLEL_TEST) A\ -j1234=-j1234 -j1234
+
+	@EXPECTED="-j1234 args" ARGS="args" $(MAKEPARALLEL_TEST) -j1234
diff --git a/tools/makeparallel/Makefile.test b/tools/makeparallel/Makefile.test
new file mode 100644
index 0000000..cf53684
--- /dev/null
+++ b/tools/makeparallel/Makefile.test
@@ -0,0 +1,12 @@
+MAKEPARALLEL ?= ./makeparallel
+
+.PHONY: test
+test:
+	@+echo MAKEFLAGS=$${MAKEFLAGS};              \
+	result=$$($(MAKEPARALLEL) echo $(ARGS));     \
+	echo result: $${result};                     \
+	if [ "$${result}" = "$(EXPECTED)" ]; then    \
+	  echo SUCCESS && echo;                      \
+	else                                         \
+	  echo FAILED expected $(EXPECTED) && false; \
+	fi
diff --git a/tools/makeparallel/README.md b/tools/makeparallel/README.md
new file mode 100644
index 0000000..2e5fbf9
--- /dev/null
+++ b/tools/makeparallel/README.md
@@ -0,0 +1,54 @@
+<!---
+Copyright (C) 2015 The Android Open Source Project
+
+Licensed under the Apache License, Version 2.0 (the "License");
+you may not use this file except in compliance with the License.
+You may obtain a copy of the License at
+
+     http://www.apache.org/licenses/LICENSE-2.0
+
+Unless required by applicable law or agreed to in writing, software
+distributed under the License is distributed on an "AS IS" BASIS,
+WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+See the License for the specific language governing permissions and
+limitations under the License.
+-->
+
+makeparallel
+============
+makeparallel communicates with the [GNU make jobserver](http://make.mad-scientist.net/papers/jobserver-implementation/)
+in order claim all available jobs, and then passes the number of jobs
+claimed to a subprocess with `-j<jobs>`.
+
+The number of available jobs is determined by reading tokens from the jobserver
+until a read would block.  If the makeparallel rule is the only one running the
+number of jobs will be the total size of the jobserver pool, i.e. the value
+passed to make with `-j`.  Any jobs running in parallel with with the
+makeparellel rule will reduce the measured value, and thus reduce the
+parallelism available to the subprocess.
+
+To run a multi-thread or multi-process binary inside GNU make using
+makeparallel, add
+```Makefile
+	+makeparallel subprocess arguments
+```
+to a rule.  For example, to wrap ninja in make, use something like:
+```Makefile
+	+makeparallel ninja -f build.ninja
+```
+
+To determine the size of the jobserver pool, add
+```Makefile
+	+makeparallel echo > make.jobs
+```
+to a rule that is guarantee to run alone (i.e. all other rules are either
+dependencies of the makeparallel rule, or the depend on the makeparallel
+rule.  The output file will contain the `-j<num>` flag passed to the parent
+make process, or `-j1` if no flag was found.  Since GNU make will run
+makeparallel during the execution phase, after all variables have been
+set and evaluated, it is not possible to get the output of makeparallel
+into a make variable.  Instead, use a shell substitution to read the output
+file directly in a recipe.  For example:
+```Makefile
+	echo Make was started with $$(cat make.jobs)
+```
diff --git a/tools/makeparallel/makeparallel.cpp b/tools/makeparallel/makeparallel.cpp
new file mode 100644
index 0000000..3c39846
--- /dev/null
+++ b/tools/makeparallel/makeparallel.cpp
@@ -0,0 +1,369 @@
+// Copyright (C) 2015 The Android Open Source Project
+//
+// Licensed under the Apache License, Version 2.0 (the "License");
+// you may not use this file except in compliance with the License.
+// You may obtain a copy of the License at
+//
+//      http://www.apache.org/licenses/LICENSE-2.0
+//
+// Unless required by applicable law or agreed to in writing, software
+// distributed under the License is distributed on an "AS IS" BASIS,
+// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+// See the License for the specific language governing permissions and
+// limitations under the License.
+
+// makeparallel communicates with the GNU make jobserver
+// (http://make.mad-scientist.net/papers/jobserver-implementation/)
+// in order claim all available jobs, and then passes the number of jobs
+// claimed to a subprocess with -j<jobs>.
+
+#include <errno.h>
+#include <fcntl.h>
+#include <getopt.h>
+#include <poll.h>
+#include <signal.h>
+#include <stdio.h>
+#include <stdlib.h>
+#include <string.h>
+#include <unistd.h>
+#include <sys/time.h>
+#include <sys/types.h>
+#include <sys/wait.h>
+
+#include <string>
+#include <vector>
+
+#ifdef __linux__
+#include <error.h>
+#endif
+
+#ifdef __APPLE__
+#include <err.h>
+#define error(code, eval, fmt, ...) errc(eval, code, fmt, ##__VA_ARGS__)
+// Darwin does not interrupt syscalls by default.
+#define TEMP_FAILURE_RETRY(exp) (exp)
+#endif
+
+// Throw an error if fd is not valid.
+static void CheckFd(int fd) {
+  int ret = fcntl(fd, F_GETFD);
+  if (ret < 0) {
+    if (errno == EBADF) {
+      error(errno, 0, "no jobserver pipe, prefix recipe command with '+'");
+    } else {
+      error(errno, errno, "fnctl failed");
+    }
+  }
+}
+
+// Extract flags from MAKEFLAGS that need to be propagated to subproccess
+static std::vector<std::string> ReadMakeflags() {
+  std::vector<std::string> args;
+
+  const char* makeflags_env = getenv("MAKEFLAGS");
+  if (makeflags_env == nullptr) {
+    return args;
+  }
+
+  // The MAKEFLAGS format is pretty useless.  The first argument might be empty
+  // (starts with a leading space), or it might be a set of one-character flags
+  // merged together with no leading space, or it might be a variable
+  // definition.
+
+  std::string makeflags = makeflags_env;
+
+  // Split makeflags into individual args on spaces.  Multiple spaces are
+  // elided, but an initial space will result in a blank arg.
+  size_t base = 0;
+  size_t found;
+  do {
+    found = makeflags.find_first_of(" ", base);
+    args.push_back(makeflags.substr(base, found - base));
+    base = found + 1;
+  } while (found != makeflags.npos);
+
+  // Drop the first argument if it is empty
+  while (args.size() > 0 && args[0].size() == 0) {
+	  args.erase(args.begin());
+  }
+
+  // Prepend a - to the first argument if it does not have one and is not a
+  // variable definition
+  if (args.size() > 0 && args[0][0] != '-') {
+    if (args[0].find('=') == makeflags.npos) {
+      args[0] = '-' + args[0];
+    }
+  }
+
+  return args;
+}
+
+static bool ParseMakeflags(std::vector<std::string>& args,
+    int* in_fd, int* out_fd, bool* parallel, bool* keep_going) {
+
+  std::vector<char*> getopt_argv;
+  // getopt starts reading at argv[1]
+  getopt_argv.reserve(args.size() + 1);
+  getopt_argv.push_back(strdup(""));
+  for (std::string& v : args) {
+    getopt_argv.push_back(strdup(v.c_str()));
+  }
+
+  opterr = 0;
+  optind = 1;
+  while (1) {
+    const static option longopts[] = {
+        {"jobserver-fds", required_argument, 0, 0},
+        {0, 0, 0, 0},
+    };
+    int longopt_index = 0;
+
+    int c = getopt_long(getopt_argv.size(), getopt_argv.data(), "kj",
+        longopts, &longopt_index);
+
+    if (c == -1) {
+      break;
+    }
+
+    switch (c) {
+    case 0:
+      switch (longopt_index) {
+      case 0:
+      {
+        // jobserver-fds
+        if (sscanf(optarg, "%d,%d", in_fd, out_fd) != 2) {
+          error(EXIT_FAILURE, 0, "incorrect format for --jobserver-fds: %s", optarg);
+        }
+        // TODO: propagate in_fd, out_fd
+        break;
+      }
+      default:
+        abort();
+      }
+      break;
+    case 'j':
+      *parallel = true;
+      break;
+    case 'k':
+      *keep_going = true;
+      break;
+    case '?':
+      // ignore unknown arguments
+      break;
+    default:
+      abort();
+    }
+  }
+
+  for (char *v : getopt_argv) {
+    free(v);
+  }
+
+  return true;
+}
+
+// Read a single byte from fd, with timeout in milliseconds.  Returns true if
+// a byte was read, false on timeout.  Throws away the read value.
+// Non-reentrant, uses timer and signal handler global state, plus static
+// variable to communicate with signal handler.
+//
+// Uses a SIGALRM timer to fire a signal after timeout_ms that will interrupt
+// the read syscall if it hasn't yet completed.  If the timer fires before the
+// read the read could block forever, so read from a dup'd fd and close it from
+// the signal handler, which will cause the read to return EBADF if it occurs
+// after the signal.
+// The dup/read/close combo is very similar to the system described to avoid
+// a deadlock between SIGCHLD and read at
+// http://make.mad-scientist.net/papers/jobserver-implementation/
+static bool ReadByteTimeout(int fd, int timeout_ms) {
+  // global variable to communicate with the signal handler
+  static int dup_fd = -1;
+
+  // dup the fd so the signal handler can close it without losing the real one
+  dup_fd = dup(fd);
+  if (dup_fd < 0) {
+    error(errno, errno, "dup failed");
+  }
+
+  // set up a signal handler that closes dup_fd on SIGALRM
+  struct sigaction action = {};
+  action.sa_flags = SA_SIGINFO,
+  action.sa_sigaction = [](int, siginfo_t*, void*) {
+    close(dup_fd);
+  };
+  struct sigaction oldaction = {};
+  int ret = sigaction(SIGALRM, &action, &oldaction);
+  if (ret < 0) {
+    error(errno, errno, "sigaction failed");
+  }
+
+  // queue a SIGALRM after timeout_ms
+  const struct itimerval timeout = {{}, {0, timeout_ms * 1000}};
+  ret = setitimer(ITIMER_REAL, &timeout, NULL);
+  if (ret < 0) {
+    error(errno, errno, "setitimer failed");
+  }
+
+  // start the blocking read
+  char buf;
+  int read_ret = read(dup_fd, &buf, 1);
+  int read_errno = errno;
+
+  // cancel the alarm in case it hasn't fired yet
+  const struct itimerval cancel = {};
+  ret = setitimer(ITIMER_REAL, &cancel, NULL);
+  if (ret < 0) {
+    error(errno, errno, "reset setitimer failed");
+  }
+
+  // remove the signal handler
+  ret = sigaction(SIGALRM, &oldaction, NULL);
+  if (ret < 0) {
+    error(errno, errno, "reset sigaction failed");
+  }
+
+  // clean up the dup'd fd in case the signal never fired
+  close(dup_fd);
+  dup_fd = -1;
+
+  if (read_ret == 0) {
+    error(EXIT_FAILURE, 0, "EOF on jobserver pipe");
+  } else if (read_ret > 0) {
+    return true;
+  } else if (read_errno == EINTR || read_errno == EBADF) {
+    return false;
+  } else {
+    error(read_errno, read_errno, "read failed");
+  }
+  abort();
+}
+
+// Measure the size of the jobserver pool by reading from in_fd until it blocks
+static int GetJobserverTokens(int in_fd) {
+  int tokens = 0;
+  pollfd pollfds[] = {{in_fd, POLLIN, 0}};
+  int ret;
+  while ((ret = TEMP_FAILURE_RETRY(poll(pollfds, 1, 0))) != 0) {
+    if (ret < 0) {
+      error(errno, errno, "poll failed");
+    } else if (pollfds[0].revents != POLLIN) {
+      error(EXIT_FAILURE, 0, "unexpected event %d\n", pollfds[0].revents);
+    }
+
+    // There is probably a job token in the jobserver pipe.  There is a chance
+    // another process reads it first, which would cause a blocking read to
+    // block forever (or until another process put a token back in the pipe).
+    // The file descriptor can't be set to O_NONBLOCK as that would affect
+    // all users of the pipe, including the parent make process.
+    // ReadByteTimeout emulates a non-blocking read on a !O_NONBLOCK socket
+    // using a SIGALRM that fires after a short timeout.
+    bool got_token = ReadByteTimeout(in_fd, 10);
+    if (!got_token) {
+      // No more tokens
+      break;
+    } else {
+      tokens++;
+    }
+  }
+
+  // This process implicitly gets a token, so pool size is measured size + 1
+  return tokens;
+}
+
+// Return tokens to the jobserver pool.
+static void PutJobserverTokens(int out_fd, int tokens) {
+  // Return all the tokens to the pipe
+  char buf = '+';
+  for (int i = 0; i < tokens; i++) {
+    int ret = TEMP_FAILURE_RETRY(write(out_fd, &buf, 1));
+    if (ret < 0) {
+      error(errno, errno, "write failed");
+    } else if (ret == 0) {
+      error(EXIT_FAILURE, 0, "EOF on jobserver pipe");
+    }
+  }
+}
+
+int main(int argc, char* argv[]) {
+  int in_fd = -1;
+  int out_fd = -1;
+  bool parallel = false;
+  bool keep_going = false;
+  bool ninja = false;
+  int tokens = 0;
+
+  if (argc > 1 && strcmp(argv[1], "--ninja") == 0) {
+    ninja = true;
+    argv++;
+    argc--;
+  }
+
+  if (argc < 2) {
+    error(EXIT_FAILURE, 0, "expected command to run");
+  }
+
+  const char* path = argv[1];
+  std::vector<char*> args({argv[1]});
+
+  std::vector<std::string> makeflags = ReadMakeflags();
+  if (ParseMakeflags(makeflags, &in_fd, &out_fd, &parallel, &keep_going)) {
+    if (in_fd >= 0 && out_fd >= 0) {
+      CheckFd(in_fd);
+      CheckFd(out_fd);
+      fcntl(in_fd, F_SETFD, FD_CLOEXEC);
+      fcntl(out_fd, F_SETFD, FD_CLOEXEC);
+      tokens = GetJobserverTokens(in_fd);
+    }
+  }
+
+  std::string jarg = "-j" + std::to_string(tokens + 1);
+
+  if (ninja) {
+    if (!parallel) {
+      // ninja is parallel by default, pass -j1 to disable parallelism if make wasn't parallel
+      args.push_back(strdup("-j1"));
+    } else if (tokens > 0) {
+      args.push_back(strdup(jarg.c_str()));
+    }
+    if (keep_going) {
+      args.push_back(strdup("-k0"));
+    }
+  } else {
+    args.push_back(strdup(jarg.c_str()));
+  }
+
+  args.insert(args.end(), &argv[2], &argv[argc]);
+
+  args.push_back(nullptr);
+
+  pid_t pid = fork();
+  if (pid < 0) {
+    error(errno, errno, "fork failed");
+  } else if (pid == 0) {
+    // child
+    unsetenv("MAKEFLAGS");
+    unsetenv("MAKELEVEL");
+    int ret = execvp(path, args.data());
+    if (ret < 0) {
+      error(errno, errno, "exec %s failed", path);
+    }
+    abort();
+  }
+
+  // parent
+  siginfo_t status = {};
+  int exit_status = 0;
+  int ret = waitid(P_PID, pid, &status, WEXITED);
+  if (ret < 0) {
+    error(errno, errno, "waitpid failed");
+  } else if (status.si_code == CLD_EXITED) {
+    exit_status = status.si_status;
+  } else {
+    exit_status = -(status.si_status);
+  }
+
+  if (tokens > 0) {
+    PutJobserverTokens(out_fd, tokens);
+  }
+  exit(exit_status);
+}
diff --git a/tools/check_prereq/Android.mk b/tools/normalize_path.py
old mode 100644
new mode 100755
similarity index 61%
copy from tools/check_prereq/Android.mk
copy to tools/normalize_path.py
index 4329aff..6c4d548
--- a/tools/check_prereq/Android.mk
+++ b/tools/normalize_path.py
@@ -1,4 +1,6 @@
-# Copyright (C) 2009 The Android Open Source Project
+#!/usr/bin/env python
+#
+# Copyright (C) 2015 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -11,15 +13,17 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+"""
+Normalize and output paths from arguments, or stdin if no arguments provided.
+"""
 
-LOCAL_PATH := $(call my-dir)
-include $(CLEAR_VARS)
+import os.path
+import sys
 
-LOCAL_SRC_FILES := check_prereq.c
-LOCAL_MODULE := check_prereq
-LOCAL_FORCE_STATIC_EXECUTABLE := true
-LOCAL_MODULE_TAGS := eng
-LOCAL_C_INCLUDES +=
-LOCAL_STATIC_LIBRARIES += libcutils libc
+if len(sys.argv) > 1:
+  for p in sys.argv[1:]:
+    print os.path.normpath(p)
+  sys.exit(0)
 
-include $(BUILD_EXECUTABLE)
+for line in sys.stdin:
+  print os.path.normpath(line.strip())
diff --git a/tools/post_process_props.py b/tools/post_process_props.py
index fa6106f..9dcaadf 100755
--- a/tools/post_process_props.py
+++ b/tools/post_process_props.py
@@ -37,11 +37,12 @@
   # (this is for userdebug builds)
   if prop.get("ro.debuggable") == "1":
     val = prop.get("persist.sys.usb.config")
-    if val == "":
-      val = "adb"
-    else:
-      val = val + ",adb"
-    prop.put("persist.sys.usb.config", val)
+    if "adb" not in val:
+      if val == "":
+        val = "adb"
+      else:
+        val = val + ",adb"
+      prop.put("persist.sys.usb.config", val)
   # UsbDeviceManager expects a value here.  If it doesn't get it, it will
   # default to "adb". That might not the right policy there, but it's better
   # to be explicit.
diff --git a/tools/releasetools/add_img_to_target_files.py b/tools/releasetools/add_img_to_target_files.py
index f2bf1e1..7cb9072 100755
--- a/tools/releasetools/add_img_to_target_files.py
+++ b/tools/releasetools/add_img_to_target_files.py
@@ -31,6 +31,7 @@
 import datetime
 import errno
 import os
+import shutil
 import tempfile
 import zipfile
 
@@ -139,21 +140,11 @@
   if not os.path.exists(fs_config):
     fs_config = None
 
-  fc_config = os.path.join(input_dir, "BOOT/RAMDISK/file_contexts")
-  if not os.path.exists(fc_config):
-    fc_config = None
-
   # Override values loaded from info_dict.
   if fs_config:
     image_props["fs_config"] = fs_config
-  if fc_config:
-    image_props["selinux_fc"] = fc_config
   if block_list:
     image_props["block_list"] = block_list
-  if image_props.get("system_root_image") == "true":
-    image_props["ramdisk_dir"] = os.path.join(input_dir, "BOOT/RAMDISK")
-    image_props["ramdisk_fs_config"] = os.path.join(
-        input_dir, "META/boot_filesystem_config.txt")
 
   succ = build_image.BuildImage(os.path.join(input_dir, what),
                                 image_props, img)
@@ -163,15 +154,20 @@
 
 
 def AddUserdata(output_zip, prefix="IMAGES/"):
-  """Create an empty userdata image and store it in output_zip."""
+  """Create a userdata image and store it in output_zip.
+
+  In most case we just create and store an empty userdata.img;
+  But the invoker can also request to create userdata.img with real
+  data from the target files, by setting "userdata_img_with_data=true"
+  in OPTIONS.info_dict.
+  """
 
   prebuilt_path = os.path.join(OPTIONS.input_tmp, prefix, "userdata.img")
   if os.path.exists(prebuilt_path):
     print "userdata.img already exists in %s, no need to rebuild..." % (prefix,)
     return
 
-  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
-                                                    "data")
+  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "data")
   # We only allow yaffs to have a 0/missing partition_size.
   # Extfs, f2fs must have a size. Skip userdata.img if no size.
   if (not image_props.get("fs_type", "").startswith("yaffs") and
@@ -188,10 +184,19 @@
 
   # The name of the directory it is making an image out of matters to
   # mkyaffs2image.  So we create a temp dir, and within it we create an
-  # empty dir named "data", and build the image from that.
+  # empty dir named "data", or a symlink to the DATA dir,
+  # and build the image from that.
   temp_dir = tempfile.mkdtemp()
   user_dir = os.path.join(temp_dir, "data")
-  os.mkdir(user_dir)
+  empty = (OPTIONS.info_dict.get("userdata_img_with_data") != "true")
+  if empty:
+    # Create an empty dir.
+    os.mkdir(user_dir)
+  else:
+    # Symlink to the DATA dir.
+    os.symlink(os.path.join(OPTIONS.input_tmp, "DATA"),
+               user_dir)
+
   img = tempfile.NamedTemporaryFile()
 
   fstab = OPTIONS.info_dict["fstab"]
@@ -203,8 +208,7 @@
   common.CheckSize(img.name, "userdata.img", OPTIONS.info_dict)
   common.ZipWrite(output_zip, img.name, prefix + "userdata.img")
   img.close()
-  os.rmdir(user_dir)
-  os.rmdir(temp_dir)
+  shutil.rmtree(temp_dir)
 
 
 def AddCache(output_zip, prefix="IMAGES/"):
@@ -215,8 +219,7 @@
     print "cache.img already exists in %s, no need to rebuild..." % (prefix,)
     return
 
-  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict,
-                                                    "cache")
+  image_props = build_image.ImagePropFromGlobalDict(OPTIONS.info_dict, "cache")
   # The build system has to explicitly request for cache.img.
   if "fs_type" not in image_props:
     return
@@ -265,15 +268,14 @@
   except KeyError:
     has_vendor = False
 
-  OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-  if "selinux_fc" in OPTIONS.info_dict:
-    OPTIONS.info_dict["selinux_fc"] = os.path.join(
-        OPTIONS.input_tmp, "BOOT", "RAMDISK", "file_contexts")
+  OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.input_tmp)
 
   common.ZipClose(input_zip)
   output_zip = zipfile.ZipFile(filename, "a",
                                compression=zipfile.ZIP_DEFLATED)
 
+  has_recovery = (OPTIONS.info_dict.get("no_recovery") != "true")
+
   def banner(s):
     print "\n\n++++ " + s + " ++++\n\n"
 
@@ -291,19 +293,21 @@
     if boot_image:
       boot_image.AddToZip(output_zip)
 
-  banner("recovery")
   recovery_image = None
-  prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "recovery.img")
-  if os.path.exists(prebuilt_path):
-    print "recovery.img already exists in IMAGES/, no need to rebuild..."
-    if OPTIONS.rebuild_recovery:
+  if has_recovery:
+    banner("recovery")
+    prebuilt_path = os.path.join(OPTIONS.input_tmp, "IMAGES", "recovery.img")
+    if os.path.exists(prebuilt_path):
+      print "recovery.img already exists in IMAGES/, no need to rebuild..."
+      if OPTIONS.rebuild_recovery:
+        recovery_image = common.GetBootableImage(
+            "IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp,
+            "RECOVERY")
+    else:
       recovery_image = common.GetBootableImage(
           "IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
-  else:
-    recovery_image = common.GetBootableImage(
-        "IMAGES/recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
-    if recovery_image:
-      recovery_image.AddToZip(output_zip)
+      if recovery_image:
+        recovery_image.AddToZip(output_zip)
 
   banner("system")
   AddSystem(output_zip, recovery_img=recovery_image, boot_img=boot_image)
@@ -315,6 +319,23 @@
   banner("cache")
   AddCache(output_zip)
 
+  # For devices using A/B update, copy over images from RADIO/ to IMAGES/ and
+  # make sure we have all the needed images ready under IMAGES/.
+  ab_partitions = os.path.join(OPTIONS.input_tmp, "META", "ab_partitions.txt")
+  if os.path.exists(ab_partitions):
+    with open(ab_partitions, 'r') as f:
+      lines = f.readlines()
+    for line in lines:
+      img_name = line.strip() + ".img"
+      img_radio_path = os.path.join(OPTIONS.input_tmp, "RADIO", img_name)
+      if os.path.exists(img_radio_path):
+        common.ZipWrite(output_zip, img_radio_path,
+                        os.path.join("IMAGES", img_name))
+
+      # Zip spec says: All slashes MUST be forward slashes.
+      img_path = 'IMAGES/' + img_name
+      assert img_path in output_zip.namelist(), "cannot find " + img_name
+
   common.ZipClose(output_zip)
 
 def main(argv):
diff --git a/tools/releasetools/blockimgdiff.py b/tools/releasetools/blockimgdiff.py
index cb6fc85..82d5397 100644
--- a/tools/releasetools/blockimgdiff.py
+++ b/tools/releasetools/blockimgdiff.py
@@ -16,7 +16,9 @@
 
 from collections import deque, OrderedDict
 from hashlib import sha1
+import array
 import common
+import functools
 import heapq
 import itertools
 import multiprocessing
@@ -24,6 +26,7 @@
 import re
 import subprocess
 import threading
+import time
 import tempfile
 
 from rangelib import RangeSet
@@ -128,10 +131,10 @@
     # verification if it has non-zero contents in the padding bytes.
     # Bug: 23828506
     if padded:
-      self.clobbered_blocks = RangeSet(
-          data=(self.total_blocks-1, self.total_blocks))
+      clobbered_blocks = [self.total_blocks-1, self.total_blocks]
     else:
-      self.clobbered_blocks = RangeSet()
+      clobbered_blocks = []
+    self.clobbered_blocks = clobbered_blocks
     self.extended = RangeSet()
 
     zero_blocks = []
@@ -147,11 +150,15 @@
         nonzero_blocks.append(i)
         nonzero_blocks.append(i+1)
 
-    self.file_map = {"__ZERO": RangeSet(zero_blocks),
-                     "__NONZERO": RangeSet(nonzero_blocks)}
+    assert zero_blocks or nonzero_blocks or clobbered_blocks
 
-    if self.clobbered_blocks:
-      self.file_map["__COPY"] = self.clobbered_blocks
+    self.file_map = dict()
+    if zero_blocks:
+      self.file_map["__ZERO"] = RangeSet(data=zero_blocks)
+    if nonzero_blocks:
+      self.file_map["__NONZERO"] = RangeSet(data=nonzero_blocks)
+    if clobbered_blocks:
+      self.file_map["__COPY"] = RangeSet(data=clobbered_blocks)
 
   def ReadRangeSet(self, ranges):
     return [self.data[s*self.blocksize:e*self.blocksize] for (s, e) in ranges]
@@ -200,6 +207,23 @@
             " to " + str(self.tgt_ranges) + ">")
 
 
+@functools.total_ordering
+class HeapItem(object):
+  def __init__(self, item):
+    self.item = item
+    # Negate the score since python's heap is a min-heap and we want
+    # the maximum score.
+    self.score = -item.score
+  def clear(self):
+    self.item = None
+  def __bool__(self):
+    return self.item is None
+  def __eq__(self, other):
+    return self.score == other.score
+  def __le__(self, other):
+    return self.score <= other.score
+
+
 # BlockImageDiff works on two image objects.  An image object is
 # anything that provides the following attributes:
 #
@@ -237,7 +261,7 @@
 # original image.
 
 class BlockImageDiff(object):
-  def __init__(self, tgt, src=None, threads=None, version=3):
+  def __init__(self, tgt, src=None, threads=None, version=4):
     if threads is None:
       threads = multiprocessing.cpu_count() // 2
       if threads == 0:
@@ -247,8 +271,11 @@
     self.transfers = []
     self.src_basenames = {}
     self.src_numpatterns = {}
+    self._max_stashed_size = 0
+    self.touched_src_ranges = RangeSet()
+    self.touched_src_sha1 = None
 
-    assert version in (1, 2, 3)
+    assert version in (1, 2, 3, 4)
 
     self.tgt = tgt
     if src is None:
@@ -264,6 +291,10 @@
     self.AssertPartition(src.care_map, src.file_map.values())
     self.AssertPartition(tgt.care_map, tgt.file_map.values())
 
+  @property
+  def max_stashed_size(self):
+    return self._max_stashed_size
+
   def Compute(self, prefix):
     # When looking for a source file to use as the diff input for a
     # target file, we try:
@@ -334,8 +365,8 @@
           sid = next_stash_id
           next_stash_id += 1
         stashes[s] = sid
-        stashed_blocks += sr.size()
         if self.version == 2:
+          stashed_blocks += sr.size()
           out.append("stash %d %s\n" % (sid, sr.to_string_raw()))
         else:
           sh = self.HashBlocks(self.src, sr)
@@ -343,15 +374,18 @@
             stashes[sh] += 1
           else:
             stashes[sh] = 1
+            stashed_blocks += sr.size()
+            self.touched_src_ranges = self.touched_src_ranges.union(sr)
             out.append("stash %s %s\n" % (sh, sr.to_string_raw()))
 
       if stashed_blocks > max_stashed_blocks:
         max_stashed_blocks = stashed_blocks
 
       free_string = []
+      free_size = 0
 
       if self.version == 1:
-        src_str = xf.src_ranges.to_string_raw()
+        src_str = xf.src_ranges.to_string_raw() if xf.src_ranges else ""
       elif self.version >= 2:
 
         #   <# blocks> <src ranges>
@@ -367,18 +401,25 @@
         mapped_stashes = []
         for s, sr in xf.use_stash:
           sid = stashes.pop(s)
-          stashed_blocks -= sr.size()
           unstashed_src_ranges = unstashed_src_ranges.subtract(sr)
           sh = self.HashBlocks(self.src, sr)
           sr = xf.src_ranges.map_within(sr)
           mapped_stashes.append(sr)
           if self.version == 2:
             src_str.append("%d:%s" % (sid, sr.to_string_raw()))
+            # A stash will be used only once. We need to free the stash
+            # immediately after the use, instead of waiting for the automatic
+            # clean-up at the end. Because otherwise it may take up extra space
+            # and lead to OTA failures.
+            # Bug: 23119955
+            free_string.append("free %d\n" % (sid,))
+            free_size += sr.size()
           else:
             assert sh in stashes
             src_str.append("%s:%s" % (sh, sr.to_string_raw()))
             stashes[sh] -= 1
             if stashes[sh] == 0:
+              free_size += sr.size()
               free_string.append("free %s\n" % (sh))
               stashes.pop(sh)
           heapq.heappush(free_stash_ids, sid)
@@ -441,6 +482,9 @@
               if temp_stash_usage > max_stashed_blocks:
                 max_stashed_blocks = temp_stash_usage
 
+            self.touched_src_ranges = self.touched_src_ranges.union(
+                xf.src_ranges)
+
             out.append("%s %s %s %s\n" % (
                 xf.style,
                 self.HashBlocks(self.tgt, xf.tgt_ranges),
@@ -464,6 +508,9 @@
             if temp_stash_usage > max_stashed_blocks:
               max_stashed_blocks = temp_stash_usage
 
+          self.touched_src_ranges = self.touched_src_ranges.union(
+              xf.src_ranges)
+
           out.append("%s %d %d %s %s %s %s\n" % (
               xf.style,
               xf.patch_start, xf.patch_len,
@@ -482,8 +529,9 @@
 
       if free_string:
         out.append("".join(free_string))
+        stashed_blocks -= free_size
 
-      if self.version >= 2:
+      if self.version >= 2 and common.OPTIONS.cache_size is not None:
         # Sanity check: abort if we're going to need more stash space than
         # the allowed size (cache_size * threshold). There are two purposes
         # of having a threshold here. a) Part of the cache may have been
@@ -498,20 +546,35 @@
                    self.tgt.blocksize, max_allowed, cache_size,
                    stash_threshold)
 
+    if self.version >= 3:
+      self.touched_src_sha1 = self.HashBlocks(
+          self.src, self.touched_src_ranges)
+
     # Zero out extended blocks as a workaround for bug 20881595.
     if self.tgt.extended:
       out.append("zero %s\n" % (self.tgt.extended.to_string_raw(),))
+      total += self.tgt.extended.size()
 
     # We erase all the blocks on the partition that a) don't contain useful
-    # data in the new image and b) will not be touched by dm-verity.
+    # data in the new image; b) will not be touched by dm-verity. Out of those
+    # blocks, we erase the ones that won't be used in this update at the
+    # beginning of an update. The rest would be erased at the end. This is to
+    # work around the eMMC issue observed on some devices, which may otherwise
+    # get starving for clean blocks and thus fail the update. (b/28347095)
     all_tgt = RangeSet(data=(0, self.tgt.total_blocks))
     all_tgt_minus_extended = all_tgt.subtract(self.tgt.extended)
     new_dontcare = all_tgt_minus_extended.subtract(self.tgt.care_map)
-    if new_dontcare:
-      out.append("erase %s\n" % (new_dontcare.to_string_raw(),))
+
+    erase_first = new_dontcare.subtract(self.touched_src_ranges)
+    if erase_first:
+      out.insert(0, "erase %s\n" % (erase_first.to_string_raw(),))
+
+    erase_last = new_dontcare.subtract(erase_first)
+    if erase_last:
+      out.append("erase %s\n" % (erase_last.to_string_raw(),))
 
     out.insert(0, "%d\n" % (self.version,))   # format version number
-    out.insert(1, str(total) + "\n")
+    out.insert(1, "%d\n" % (total,))
     if self.version >= 2:
       # version 2 only: after the total block count, we give the number
       # of stash slots needed, and the maximum size needed (in blocks)
@@ -523,11 +586,17 @@
         f.write(i)
 
     if self.version >= 2:
-      max_stashed_size = max_stashed_blocks * self.tgt.blocksize
-      max_allowed = common.OPTIONS.cache_size * common.OPTIONS.stash_threshold
-      print("max stashed blocks: %d  (%d bytes), limit: %d bytes (%.2f%%)\n" % (
-          max_stashed_blocks, max_stashed_size, max_allowed,
-          max_stashed_size * 100.0 / max_allowed))
+      self._max_stashed_size = max_stashed_blocks * self.tgt.blocksize
+      OPTIONS = common.OPTIONS
+      if OPTIONS.cache_size is not None:
+        max_allowed = OPTIONS.cache_size * OPTIONS.stash_threshold
+        print("max stashed blocks: %d  (%d bytes), "
+              "limit: %d bytes (%.2f%%)\n" % (
+              max_stashed_blocks, self._max_stashed_size, max_allowed,
+              self._max_stashed_size * 100.0 / max_allowed))
+      else:
+        print("max stashed blocks: %d  (%d bytes), limit: <unknown>\n" % (
+              max_stashed_blocks, self._max_stashed_size))
 
   def ReviseStashSize(self):
     print("Revising stash size...")
@@ -592,12 +661,15 @@
           def_cmd = stashes[idx][1]
           assert (idx, sr) in def_cmd.stash_before
           def_cmd.stash_before.remove((idx, sr))
-          new_blocks += sr.size()
 
+        # Add up blocks that violates space limit and print total number to
+        # screen later.
+        new_blocks += cmd.tgt_ranges.size()
         cmd.ConvertToNew()
 
-    print("  Total %d blocks are packed as new blocks due to insufficient "
-          "cache size." % (new_blocks,))
+    num_of_bytes = new_blocks * self.tgt.blocksize
+    print("  Total %d blocks (%d bytes) are packed as new blocks due to "
+          "insufficient cache size." % (new_blocks, num_of_bytes))
 
   def ComputePatches(self, prefix):
     print("Reticulating splines...")
@@ -710,7 +782,7 @@
     # - we write every block we care about exactly once.
 
     # Start with no blocks having been touched yet.
-    touched = RangeSet()
+    touched = array.array("B", "\0" * self.tgt.total_blocks)
 
     # Imagine processing the transfers in order.
     for xf in self.transfers:
@@ -721,14 +793,25 @@
         for _, sr in xf.use_stash:
           x = x.subtract(sr)
 
-      assert not touched.overlaps(x)
-      # Check that the output blocks for this transfer haven't yet been touched.
-      assert not touched.overlaps(xf.tgt_ranges)
-      # Touch all the blocks written by this transfer.
-      touched = touched.union(xf.tgt_ranges)
+      for s, e in x:
+        # Source image could be larger. Don't check the blocks that are in the
+        # source image only. Since they are not in 'touched', and won't ever
+        # be touched.
+        for i in range(s, min(e, self.tgt.total_blocks)):
+          assert touched[i] == 0
+
+      # Check that the output blocks for this transfer haven't yet
+      # been touched, and touch all the blocks written by this
+      # transfer.
+      for s, e in xf.tgt_ranges:
+        for i in range(s, e):
+          assert touched[i] == 0
+          touched[i] = 1
 
     # Check that we've written every target block.
-    assert touched == self.tgt.care_map
+    for s, e in self.tgt.care_map:
+      for i in range(s, e):
+        assert touched[i] == 1
 
   def ImproveVertexSequence(self):
     print("Improving vertex order...")
@@ -865,6 +948,7 @@
     for xf in self.transfers:
       xf.incoming = xf.goes_after.copy()
       xf.outgoing = xf.goes_before.copy()
+      xf.score = sum(xf.outgoing.values()) - sum(xf.incoming.values())
 
     # We use an OrderedDict instead of just a set so that the output
     # is repeatable; otherwise it would depend on the hash values of
@@ -875,52 +959,67 @@
     s1 = deque()  # the left side of the sequence, built from left to right
     s2 = deque()  # the right side of the sequence, built from right to left
 
-    while G:
+    heap = []
+    for xf in self.transfers:
+      xf.heap_item = HeapItem(xf)
+      heap.append(xf.heap_item)
+    heapq.heapify(heap)
 
+    sinks = set(u for u in G if not u.outgoing)
+    sources = set(u for u in G if not u.incoming)
+
+    def adjust_score(iu, delta):
+      iu.score += delta
+      iu.heap_item.clear()
+      iu.heap_item = HeapItem(iu)
+      heapq.heappush(heap, iu.heap_item)
+
+    while G:
       # Put all sinks at the end of the sequence.
-      while True:
-        sinks = [u for u in G if not u.outgoing]
-        if not sinks:
-          break
+      while sinks:
+        new_sinks = set()
         for u in sinks:
+          if u not in G: continue
           s2.appendleft(u)
           del G[u]
           for iu in u.incoming:
-            del iu.outgoing[u]
+            adjust_score(iu, -iu.outgoing.pop(u))
+            if not iu.outgoing: new_sinks.add(iu)
+        sinks = new_sinks
 
       # Put all the sources at the beginning of the sequence.
-      while True:
-        sources = [u for u in G if not u.incoming]
-        if not sources:
-          break
+      while sources:
+        new_sources = set()
         for u in sources:
+          if u not in G: continue
           s1.append(u)
           del G[u]
           for iu in u.outgoing:
-            del iu.incoming[u]
+            adjust_score(iu, +iu.incoming.pop(u))
+            if not iu.incoming: new_sources.add(iu)
+        sources = new_sources
 
-      if not G:
-        break
+      if not G: break
 
       # Find the "best" vertex to put next.  "Best" is the one that
       # maximizes the net difference in source blocks saved we get by
       # pretending it's a source rather than a sink.
 
-      max_d = None
-      best_u = None
-      for u in G:
-        d = sum(u.outgoing.values()) - sum(u.incoming.values())
-        if best_u is None or d > max_d:
-          max_d = d
-          best_u = u
+      while True:
+        u = heapq.heappop(heap)
+        if u and u.item in G:
+          u = u.item
+          break
 
-      u = best_u
       s1.append(u)
       del G[u]
       for iu in u.outgoing:
-        del iu.incoming[u]
+        adjust_score(iu, +iu.incoming.pop(u))
+        if not iu.incoming: sources.add(iu)
+
       for iu in u.incoming:
-        del iu.outgoing[u]
+        adjust_score(iu, -iu.outgoing.pop(u))
+        if not iu.outgoing: sinks.add(iu)
 
     # Now record the sequence in the 'order' field of each transfer,
     # and by rearranging self.transfers to be in the chosen sequence.
@@ -936,10 +1035,38 @@
 
   def GenerateDigraph(self):
     print("Generating digraph...")
+
+    # Each item of source_ranges will be:
+    #   - None, if that block is not used as a source,
+    #   - a transfer, if one transfer uses it as a source, or
+    #   - a set of transfers.
+    source_ranges = []
+    for b in self.transfers:
+      for s, e in b.src_ranges:
+        if e > len(source_ranges):
+          source_ranges.extend([None] * (e-len(source_ranges)))
+        for i in range(s, e):
+          if source_ranges[i] is None:
+            source_ranges[i] = b
+          else:
+            if not isinstance(source_ranges[i], set):
+              source_ranges[i] = set([source_ranges[i]])
+            source_ranges[i].add(b)
+
     for a in self.transfers:
-      for b in self.transfers:
-        if a is b:
-          continue
+      intersections = set()
+      for s, e in a.tgt_ranges:
+        for i in range(s, e):
+          if i >= len(source_ranges): break
+          b = source_ranges[i]
+          if b is not None:
+            if isinstance(b, set):
+              intersections.update(b)
+            else:
+              intersections.add(b)
+
+      for b in intersections:
+        if a is b: continue
 
         # If the blocks written by A are read by B, then B needs to go before A.
         i = a.tgt_ranges.intersect(b.src_ranges)
@@ -966,29 +1093,36 @@
       too many blocks (greater than MAX_BLOCKS_PER_DIFF_TRANSFER), we split it
       into smaller pieces by getting multiple Transfer()s.
 
-      The downside is that after splitting, we can no longer use imgdiff but
-      only bsdiff."""
-
-      MAX_BLOCKS_PER_DIFF_TRANSFER = 1024
+      The downside is that after splitting, we may increase the package size
+      since the split pieces don't align well. According to our experiments,
+      1/8 of the cache size as the per-piece limit appears to be optimal.
+      Compared to the fixed 1024-block limit, it reduces the overall package
+      size by 30% volantis, and 20% for angler and bullhead."""
 
       # We care about diff transfers only.
       if style != "diff" or not split:
         Transfer(tgt_name, src_name, tgt_ranges, src_ranges, style, by_id)
         return
 
+      pieces = 0
+      cache_size = common.OPTIONS.cache_size
+      split_threshold = 0.125
+      max_blocks_per_transfer = int(cache_size * split_threshold /
+                                    self.tgt.blocksize)
+
       # Change nothing for small files.
-      if (tgt_ranges.size() <= MAX_BLOCKS_PER_DIFF_TRANSFER and
-          src_ranges.size() <= MAX_BLOCKS_PER_DIFF_TRANSFER):
+      if (tgt_ranges.size() <= max_blocks_per_transfer and
+          src_ranges.size() <= max_blocks_per_transfer):
         Transfer(tgt_name, src_name, tgt_ranges, src_ranges, style, by_id)
         return
 
-      pieces = 0
-      while (tgt_ranges.size() > MAX_BLOCKS_PER_DIFF_TRANSFER and
-             src_ranges.size() > MAX_BLOCKS_PER_DIFF_TRANSFER):
+      while (tgt_ranges.size() > max_blocks_per_transfer and
+             src_ranges.size() > max_blocks_per_transfer):
         tgt_split_name = "%s-%d" % (tgt_name, pieces)
         src_split_name = "%s-%d" % (src_name, pieces)
-        tgt_first = tgt_ranges.first(MAX_BLOCKS_PER_DIFF_TRANSFER)
-        src_first = src_ranges.first(MAX_BLOCKS_PER_DIFF_TRANSFER)
+        tgt_first = tgt_ranges.first(max_blocks_per_transfer)
+        src_first = src_ranges.first(max_blocks_per_transfer)
+
         Transfer(tgt_split_name, src_split_name, tgt_first, src_first, style,
                  by_id)
 
@@ -1061,6 +1195,7 @@
     """Assert that all the RangeSets in 'seq' form a partition of the
     'total' RangeSet (ie, they are nonintersecting and their union
     equals 'total')."""
+
     so_far = RangeSet()
     for i in seq:
       assert not so_far.overlaps(i)
diff --git a/tools/releasetools/build_image.py b/tools/releasetools/build_image.py
index 357a666..ce60667 100755
--- a/tools/releasetools/build_image.py
+++ b/tools/releasetools/build_image.py
@@ -28,11 +28,13 @@
 import commands
 import common
 import shutil
+import sparse_img
 import tempfile
 
 OPTIONS = common.OPTIONS
 
 FIXED_SALT = "aee087a5be3b982978c923f566a94613496b417f2af592639bc80d141e34dfe7"
+BLOCK_SIZE = 4096
 
 def RunCommand(cmd):
   """Echo and run the given command.
@@ -48,6 +50,14 @@
   print "%s" % (output.rstrip(),)
   return (output, p.returncode)
 
+def GetVerityFECSize(partition_size):
+  cmd = "fec -s %d" % partition_size
+  status, output = commands.getstatusoutput(cmd)
+  if status:
+    print output
+    return False, 0
+  return True, int(output)
+
 def GetVerityTreeSize(partition_size):
   cmd = "build_verity_tree -s %d"
   cmd %= partition_size
@@ -67,7 +77,32 @@
     return False, 0
   return True, int(output)
 
-def AdjustPartitionSizeForVerity(partition_size):
+def GetVeritySize(partition_size, fec_supported):
+  success, verity_tree_size = GetVerityTreeSize(partition_size)
+  if not success:
+    return 0
+  success, verity_metadata_size = GetVerityMetadataSize(partition_size)
+  if not success:
+    return 0
+  verity_size = verity_tree_size + verity_metadata_size
+  if fec_supported:
+    success, fec_size = GetVerityFECSize(partition_size + verity_size)
+    if not success:
+      return 0
+    return verity_size + fec_size
+  return verity_size
+
+def GetSimgSize(image_file):
+  simg = sparse_img.SparseImage(image_file, build_map=False)
+  return simg.blocksize * simg.total_blocks
+
+def ZeroPadSimg(image_file, pad_size):
+  blocks = pad_size // BLOCK_SIZE
+  print("Padding %d blocks (%d bytes)" % (blocks, pad_size))
+  simg = sparse_img.SparseImage(image_file, mode="r+b", build_map=False)
+  simg.AppendFillChunk(0, blocks)
+
+def AdjustPartitionSizeForVerity(partition_size, fec_supported):
   """Modifies the provided partition size to account for the verity metadata.
 
   This information is used to size the created image appropriately.
@@ -76,13 +111,43 @@
   Returns:
     The size of the partition adjusted for verity metadata.
   """
-  success, verity_tree_size = GetVerityTreeSize(partition_size)
-  if not success:
-    return 0
-  success, verity_metadata_size = GetVerityMetadataSize(partition_size)
-  if not success:
-    return 0
-  return partition_size - verity_tree_size - verity_metadata_size
+  key = "%d %d" % (partition_size, fec_supported)
+  if key in AdjustPartitionSizeForVerity.results:
+    return AdjustPartitionSizeForVerity.results[key]
+
+  hi = partition_size
+  if hi % BLOCK_SIZE != 0:
+    hi = (hi // BLOCK_SIZE) * BLOCK_SIZE
+
+  # verity tree and fec sizes depend on the partition size, which
+  # means this estimate is always going to be unnecessarily small
+  lo = partition_size - GetVeritySize(hi, fec_supported)
+  result = lo
+
+  # do a binary search for the optimal size
+  while lo < hi:
+    i = ((lo + hi) // (2 * BLOCK_SIZE)) * BLOCK_SIZE
+    size = i + GetVeritySize(i, fec_supported)
+    if size <= partition_size:
+      if result < i:
+        result = i
+      lo = i + BLOCK_SIZE
+    else:
+      hi = i
+
+  AdjustPartitionSizeForVerity.results[key] = result
+  return result
+
+AdjustPartitionSizeForVerity.results = {}
+
+def BuildVerityFEC(sparse_image_path, verity_path, verity_fec_path):
+  cmd = "fec -e %s %s %s" % (sparse_image_path, verity_path, verity_fec_path)
+  print cmd
+  status, output = commands.getstatusoutput(cmd)
+  if status:
+    print "Could not build FEC data! Error: %s" % output
+    return False
+  return True
 
 def BuildVerityTree(sparse_image_path, verity_image_path, prop_dict):
   cmd = "build_verity_tree -A %s %s %s" % (
@@ -128,13 +193,33 @@
     return False
   return True
 
-def BuildVerifiedImage(data_image_path, verity_image_path,
-                       verity_metadata_path):
-  if not Append2Simg(data_image_path, verity_metadata_path,
-                     "Could not append verity metadata!"):
+def Append(target, file_to_append, error_message):
+  cmd = 'cat %s >> %s' % (file_to_append, target)
+  print cmd
+  status, output = commands.getstatusoutput(cmd)
+  if status:
+    print "%s: %s" % (error_message, output)
     return False
+  return True
+
+def BuildVerifiedImage(data_image_path, verity_image_path,
+                       verity_metadata_path, verity_fec_path,
+                       fec_supported):
+  if not Append(verity_image_path, verity_metadata_path,
+                "Could not append verity metadata!"):
+    return False
+
+  if fec_supported:
+    # build FEC for the entire partition, including metadata
+    if not BuildVerityFEC(data_image_path, verity_image_path,
+                          verity_fec_path):
+      return False
+
+    if not Append(verity_image_path, verity_fec_path, "Could not append FEC!"):
+      return False
+
   if not Append2Simg(data_image_path, verity_image_path,
-                     "Could not append verity tree!"):
+                     "Could not append verity data!"):
     return False
   return True
 
@@ -154,7 +239,7 @@
     return False, None
   return True, unsparse_image_path
 
-def MakeVerityEnabledImage(out_file, prop_dict):
+def MakeVerityEnabledImage(out_file, fec_supported, prop_dict):
   """Creates an image that is verifiable using dm-verity.
 
   Args:
@@ -180,6 +265,7 @@
   # get partial image paths
   verity_image_path = os.path.join(tempdir_name, "verity.img")
   verity_metadata_path = os.path.join(tempdir_name, "verity_metadata.img")
+  verity_fec_path = os.path.join(tempdir_name, "verity_fec.img")
 
   # build the verity tree and get the root hash and salt
   if not BuildVerityTree(out_file, verity_image_path, prop_dict):
@@ -197,13 +283,27 @@
   # build the full verified image
   if not BuildVerifiedImage(out_file,
                             verity_image_path,
-                            verity_metadata_path):
+                            verity_metadata_path,
+                            verity_fec_path,
+                            fec_supported):
     shutil.rmtree(tempdir_name, ignore_errors=True)
     return False
 
   shutil.rmtree(tempdir_name, ignore_errors=True)
   return True
 
+def ConvertBlockMapToBaseFs(block_map_file):
+  fd, base_fs_file = tempfile.mkstemp(prefix="script_gen_",
+                                      suffix=".base_fs")
+  os.close(fd)
+
+  convert_command = ["blk_alloc_to_base_fs", block_map_file, base_fs_file]
+  (_, exit_code) = RunCommand(convert_command)
+  if exit_code != 0:
+    os.remove(base_fs_file)
+    return None
+  return base_fs_file
+
 def BuildImage(in_dir, prop_dict, out_file, target_out=None):
   """Build an image to out_file from in_dir with property prop_dict.
 
@@ -220,6 +320,7 @@
   # /system and the ramdisk, and can be mounted at the root of the file system.
   origin_in = in_dir
   fs_config = prop_dict.get("fs_config")
+  base_fs_file = None
   if (prop_dict.get("system_root_image") == "true"
       and prop_dict["mount_point"] == "system"):
     in_dir = tempfile.mkdtemp()
@@ -248,12 +349,14 @@
 
   is_verity_partition = "verity_block_device" in prop_dict
   verity_supported = prop_dict.get("verity") == "true"
+  verity_fec_supported = prop_dict.get("verity_fec") == "true"
+
   # Adjust the partition size to make room for the hashes if this is to be
   # verified.
-  if verity_supported and is_verity_partition and fs_spans_partition:
+  if verity_supported and is_verity_partition:
     partition_size = int(prop_dict.get("partition_size"))
-
-    adjusted_size = AdjustPartitionSizeForVerity(partition_size)
+    adjusted_size = AdjustPartitionSizeForVerity(partition_size,
+                                                 verity_fec_supported)
     if not adjusted_size:
       return False
     prop_dict["partition_size"] = str(adjusted_size)
@@ -277,16 +380,24 @@
       build_command.extend(["-D", target_out])
     if "block_list" in prop_dict:
       build_command.extend(["-B", prop_dict["block_list"]])
+    if "base_fs_file" in prop_dict:
+      base_fs_file = ConvertBlockMapToBaseFs(prop_dict["base_fs_file"])
+      if base_fs_file is None:
+        return False
+      build_command.extend(["-d", base_fs_file])
     build_command.extend(["-L", prop_dict["mount_point"]])
     if "selinux_fc" in prop_dict:
       build_command.append(prop_dict["selinux_fc"])
   elif fs_type.startswith("squash"):
     build_command = ["mksquashfsimage.sh"]
     build_command.extend([in_dir, out_file])
-    build_command.extend(["-s"])
+    if "squashfs_sparse_flag" in prop_dict:
+      build_command.extend([prop_dict["squashfs_sparse_flag"]])
     build_command.extend(["-m", prop_dict["mount_point"]])
     if target_out:
       build_command.extend(["-d", target_out])
+    if fs_config:
+      build_command.extend(["-C", fs_config])
     if "selinux_fc" in prop_dict:
       build_command.extend(["-c", prop_dict["selinux_fc"]])
     if "squashfs_compressor" in prop_dict:
@@ -330,6 +441,8 @@
       shutil.rmtree(in_dir, ignore_errors=True)
       if fs_config:
         os.remove(fs_config)
+    if base_fs_file is not None:
+      os.remove(base_fs_file)
   if exit_code != 0:
     return False
 
@@ -360,21 +473,17 @@
   if not fs_spans_partition:
     mount_point = prop_dict.get("mount_point")
     partition_size = int(prop_dict.get("partition_size"))
-    image_size = os.stat(out_file).st_size
+    image_size = GetSimgSize(out_file)
     if image_size > partition_size:
       print("Error: %s image size of %d is larger than partition size of "
             "%d" % (mount_point, image_size, partition_size))
       return False
     if verity_supported and is_verity_partition:
-      if 2 * image_size - AdjustPartitionSizeForVerity(image_size) > partition_size:
-        print "Error: No more room on %s to fit verity data" % mount_point
-        return False
-    prop_dict["original_partition_size"] = prop_dict["partition_size"]
-    prop_dict["partition_size"] = str(image_size)
+      ZeroPadSimg(out_file, partition_size - image_size)
 
   # create the verified image if this is to be verified
   if verity_supported and is_verity_partition:
-    if not MakeVerityEnabledImage(out_file, prop_dict):
+    if not MakeVerityEnabledImage(out_file, verity_fec_supported, prop_dict):
       return False
 
   if run_fsck and prop_dict.get("skip_fsck") != "true":
@@ -411,12 +520,14 @@
 
   common_props = (
       "extfs_sparse_flag",
+      "squashfs_sparse_flag",
       "mkyaffs2_extra_flags",
       "selinux_fc",
       "skip_fsck",
       "verity",
       "verity_key",
-      "verity_signer_cmd"
+      "verity_signer_cmd",
+      "verity_fec"
       )
   for p in common_props:
     copy_prop(p, p)
@@ -432,9 +543,11 @@
     copy_prop("system_verity_block_device", "verity_block_device")
     copy_prop("system_root_image", "system_root_image")
     copy_prop("ramdisk_dir", "ramdisk_dir")
+    copy_prop("ramdisk_fs_config", "ramdisk_fs_config")
     copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
     copy_prop("system_squashfs_compressor", "squashfs_compressor")
     copy_prop("system_squashfs_compressor_opt", "squashfs_compressor_opt")
+    copy_prop("system_base_fs_file", "base_fs_file")
   elif mount_point == "data":
     # Copy the generic fs type first, override with specific one if available.
     copy_prop("fs_type", "fs_type")
@@ -449,6 +562,9 @@
     copy_prop("vendor_journal_size", "journal_size")
     copy_prop("vendor_verity_block_device", "verity_block_device")
     copy_prop("has_ext4_reserved_blocks", "has_ext4_reserved_blocks")
+    copy_prop("vendor_squashfs_compressor", "squashfs_compressor")
+    copy_prop("vendor_squashfs_compressor_opt", "squashfs_compressor_opt")
+    copy_prop("vendor_base_fs_file", "base_fs_file")
   elif mount_point == "oem":
     copy_prop("fs_type", "fs_type")
     copy_prop("oem_size", "partition_size")
diff --git a/tools/releasetools/common.py b/tools/releasetools/common.py
index 2965fa8..60f44db 100644
--- a/tools/releasetools/common.py
+++ b/tools/releasetools/common.py
@@ -30,7 +30,6 @@
 import zipfile
 
 import blockimgdiff
-import rangelib
 
 from hashlib import sha1 as sha1
 
@@ -44,6 +43,7 @@
 
     self.search_path = platform_search_path.get(sys.platform, None)
     self.signapk_path = "framework/signapk.jar"  # Relative to search_path
+    self.signapk_shared_library_path = "lib64"   # Relative to search_path
     self.extra_signapk_args = []
     self.java_path = "java"  # Use the one on the path by default.
     self.java_args = "-Xmx2048m" # JVM Args
@@ -62,6 +62,9 @@
     self.source_info_dict = None
     self.target_info_dict = None
     self.worker_threads = None
+    # Stash size cannot exceed cache_size * threshold.
+    self.cache_size = None
+    self.stash_threshold = 0.8
 
 
 OPTIONS = Options()
@@ -99,7 +102,7 @@
       pass
 
 
-def LoadInfoDict(input_file):
+def LoadInfoDict(input_file, input_dir=None):
   """Read and parse the META/misc_info.txt key/value pairs from the
   input target files and return a dict."""
 
@@ -150,6 +153,56 @@
   if "fstab_version" not in d:
     d["fstab_version"] = "1"
 
+  # A few properties are stored as links to the files in the out/ directory.
+  # It works fine with the build system. However, they are no longer available
+  # when (re)generating from target_files zip. If input_dir is not None, we
+  # are doing repacking. Redirect those properties to the actual files in the
+  # unzipped directory.
+  if input_dir is not None:
+    # We carry a copy of file_contexts.bin under META/. If not available,
+    # search BOOT/RAMDISK/. Note that sometimes we may need a different file
+    # to build images than the one running on device, such as when enabling
+    # system_root_image. In that case, we must have the one for image
+    # generation copied to META/.
+    fc_basename = os.path.basename(d.get("selinux_fc", "file_contexts"))
+    fc_config = os.path.join(input_dir, "META", fc_basename)
+    if d.get("system_root_image") == "true":
+      assert os.path.exists(fc_config)
+    if not os.path.exists(fc_config):
+      fc_config = os.path.join(input_dir, "BOOT", "RAMDISK", fc_basename)
+      if not os.path.exists(fc_config):
+        fc_config = None
+
+    if fc_config:
+      d["selinux_fc"] = fc_config
+
+    # Similarly we need to redirect "ramdisk_dir" and "ramdisk_fs_config".
+    if d.get("system_root_image") == "true":
+      d["ramdisk_dir"] = os.path.join(input_dir, "ROOT")
+      d["ramdisk_fs_config"] = os.path.join(
+          input_dir, "META", "root_filesystem_config.txt")
+
+    # Redirect {system,vendor}_base_fs_file.
+    if "system_base_fs_file" in d:
+      basename = os.path.basename(d["system_base_fs_file"])
+      system_base_fs_file = os.path.join(input_dir, "META", basename)
+      if os.path.exists(system_base_fs_file):
+        d["system_base_fs_file"] = system_base_fs_file
+      else:
+        print "Warning: failed to find system base fs file: %s" % (
+            system_base_fs_file,)
+        del d["system_base_fs_file"]
+
+    if "vendor_base_fs_file" in d:
+      basename = os.path.basename(d["vendor_base_fs_file"])
+      vendor_base_fs_file = os.path.join(input_dir, "META", basename)
+      if os.path.exists(vendor_base_fs_file):
+        d["vendor_base_fs_file"] = vendor_base_fs_file
+      else:
+        print "Warning: failed to find vendor base fs file: %s" % (
+            vendor_base_fs_file,)
+        del d["vendor_base_fs_file"]
+
   try:
     data = read_helper("META/imagesizes.txt")
     for line in data.split("\n"):
@@ -179,7 +232,11 @@
   makeint("boot_size")
   makeint("fstab_version")
 
-  d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"])
+  if d.get("no_recovery", False) == "true":
+    d["fstab"] = None
+  else:
+    d["fstab"] = LoadRecoveryFSTab(read_helper, d["fstab_version"],
+                                   d.get("system_root_image", False))
   d["build.prop"] = LoadBuildProp(read_helper)
   return d
 
@@ -202,7 +259,7 @@
       d[name] = value
   return d
 
-def LoadRecoveryFSTab(read_helper, fstab_version):
+def LoadRecoveryFSTab(read_helper, fstab_version, system_root_image=False):
   class Partition(object):
     def __init__(self, mount_point, fs_type, device, length, device2, context):
       self.mount_point = mount_point
@@ -294,6 +351,12 @@
   else:
     raise ValueError("Unknown fstab_version: \"%d\"" % (fstab_version,))
 
+  # / is used for the system mount point when the root directory is included in
+  # system. Other areas assume system is always at "/system" so point /system
+  # at /.
+  if system_root_image:
+    assert not d.has_key("/system") and d.has_key("/")
+    d["/system"] = d["/"]
   return d
 
 
@@ -302,34 +365,46 @@
     print "%-25s = (%s) %s" % (k, type(v).__name__, v)
 
 
-def BuildBootableImage(sourcedir, fs_config_file, info_dict=None):
-  """Take a kernel, cmdline, and ramdisk directory from the input (in
-  'sourcedir'), and turn them into a boot image.  Return the image
-  data, or None if sourcedir does not appear to contains files for
-  building the requested image."""
+def _BuildBootableImage(sourcedir, fs_config_file, info_dict=None,
+                        has_ramdisk=False):
+  """Build a bootable image from the specified sourcedir.
 
-  if (not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK) or
-      not os.access(os.path.join(sourcedir, "kernel"), os.F_OK)):
+  Take a kernel, cmdline, and optionally a ramdisk directory from the input (in
+  'sourcedir'), and turn them into a boot image.  Return the image data, or
+  None if sourcedir does not appear to contains files for building the
+  requested image."""
+
+  def make_ramdisk():
+    ramdisk_img = tempfile.NamedTemporaryFile()
+
+    if os.access(fs_config_file, os.F_OK):
+      cmd = ["mkbootfs", "-f", fs_config_file,
+             os.path.join(sourcedir, "RAMDISK")]
+    else:
+      cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
+    p1 = Run(cmd, stdout=subprocess.PIPE)
+    p2 = Run(["minigzip"], stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
+
+    p2.wait()
+    p1.wait()
+    assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
+    assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
+
+    return ramdisk_img
+
+  if not os.access(os.path.join(sourcedir, "kernel"), os.F_OK):
+    return None
+
+  if has_ramdisk and not os.access(os.path.join(sourcedir, "RAMDISK"), os.F_OK):
     return None
 
   if info_dict is None:
     info_dict = OPTIONS.info_dict
 
-  ramdisk_img = tempfile.NamedTemporaryFile()
   img = tempfile.NamedTemporaryFile()
 
-  if os.access(fs_config_file, os.F_OK):
-    cmd = ["mkbootfs", "-f", fs_config_file, os.path.join(sourcedir, "RAMDISK")]
-  else:
-    cmd = ["mkbootfs", os.path.join(sourcedir, "RAMDISK")]
-  p1 = Run(cmd, stdout=subprocess.PIPE)
-  p2 = Run(["minigzip"],
-           stdin=p1.stdout, stdout=ramdisk_img.file.fileno())
-
-  p2.wait()
-  p1.wait()
-  assert p1.returncode == 0, "mkbootfs of %s ramdisk failed" % (sourcedir,)
-  assert p2.returncode == 0, "minigzip of %s ramdisk failed" % (sourcedir,)
+  if has_ramdisk:
+    ramdisk_img = make_ramdisk()
 
   # use MKBOOTIMG from environ, or "mkbootimg" if empty or not set
   mkbootimg = os.getenv('MKBOOTIMG') or "mkbootimg"
@@ -360,14 +435,19 @@
   if args and args.strip():
     cmd.extend(shlex.split(args))
 
+  args = info_dict.get("mkbootimg_version_args", None)
+  if args and args.strip():
+    cmd.extend(shlex.split(args))
+
+  if has_ramdisk:
+    cmd.extend(["--ramdisk", ramdisk_img.name])
+
   img_unsigned = None
   if info_dict.get("vboot", None):
     img_unsigned = tempfile.NamedTemporaryFile()
-    cmd.extend(["--ramdisk", ramdisk_img.name,
-                "--output", img_unsigned.name])
+    cmd.extend(["--output", img_unsigned.name])
   else:
-    cmd.extend(["--ramdisk", ramdisk_img.name,
-                "--output", img.name])
+    cmd.extend(["--output", img.name])
 
   p = Run(cmd, stdout=subprocess.PIPE)
   p.communicate()
@@ -407,7 +487,8 @@
   img.seek(os.SEEK_SET, 0)
   data = img.read()
 
-  ramdisk_img.close()
+  if has_ramdisk:
+    ramdisk_img.close()
   img.close()
 
   return data
@@ -415,11 +496,11 @@
 
 def GetBootableImage(name, prebuilt_name, unpack_dir, tree_subdir,
                      info_dict=None):
-  """Return a File object (with name 'name') with the desired bootable
-  image.  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name
-  'prebuilt_name', otherwise look for it under 'unpack_dir'/IMAGES,
-  otherwise construct it from the source files in
-  'unpack_dir'/'tree_subdir'."""
+  """Return a File object with the desired bootable image.
+
+  Look for it in 'unpack_dir'/BOOTABLE_IMAGES under the name 'prebuilt_name',
+  otherwise look for it under 'unpack_dir'/IMAGES, otherwise construct it from
+  the source files in 'unpack_dir'/'tree_subdir'."""
 
   prebuilt_path = os.path.join(unpack_dir, "BOOTABLE_IMAGES", prebuilt_name)
   if os.path.exists(prebuilt_path):
@@ -432,10 +513,21 @@
     return File.FromLocalFile(name, prebuilt_path)
 
   print "building image from target_files %s..." % (tree_subdir,)
+
+  if info_dict is None:
+    info_dict = OPTIONS.info_dict
+
+  # With system_root_image == "true", we don't pack ramdisk into the boot image.
+  # Unless "recovery_as_boot" is specified, in which case we carry the ramdisk
+  # for recovery.
+  has_ramdisk = (info_dict.get("system_root_image") != "true" or
+                 prebuilt_name != "boot.img" or
+                 info_dict.get("recovery_as_boot") == "true")
+
   fs_config = "META/" + tree_subdir.lower() + "_filesystem_config.txt"
-  data = BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
-                            os.path.join(unpack_dir, fs_config),
-                            info_dict)
+  data = _BuildBootableImage(os.path.join(unpack_dir, tree_subdir),
+                             os.path.join(unpack_dir, fs_config),
+                             info_dict, has_ramdisk)
   if data:
     return File(name, data)
   return None
@@ -524,37 +616,84 @@
   return key_passwords
 
 
-def SignFile(input_name, output_name, key, password, align=None,
-             whole_file=False):
+def GetMinSdkVersion(apk_name):
+  """Get the minSdkVersion delared in the APK. This can be both a decimal number
+  (API Level) or a codename.
+  """
+
+  p = Run(["aapt", "dump", "badging", apk_name], stdout=subprocess.PIPE)
+  output, err = p.communicate()
+  if err:
+    raise ExternalError("Failed to obtain minSdkVersion: aapt return code %s"
+        % (p.returncode,))
+
+  for line in output.split("\n"):
+    # Looking for lines such as sdkVersion:'23' or sdkVersion:'M'
+    m = re.match(r'sdkVersion:\'([^\']*)\'', line)
+    if m:
+      return m.group(1)
+  raise ExternalError("No minSdkVersion returned by aapt")
+
+
+def GetMinSdkVersionInt(apk_name, codename_to_api_level_map):
+  """Get the minSdkVersion declared in the APK as a number (API Level). If
+  minSdkVersion is set to a codename, it is translated to a number using the
+  provided map.
+  """
+
+  version = GetMinSdkVersion(apk_name)
+  try:
+    return int(version)
+  except ValueError:
+    # Not a decimal number. Codename?
+    if version in codename_to_api_level_map:
+      return codename_to_api_level_map[version]
+    else:
+      raise ExternalError("Unknown minSdkVersion: '%s'. Known codenames: %s"
+                          % (version, codename_to_api_level_map))
+
+
+def SignFile(input_name, output_name, key, password, min_api_level=None,
+    codename_to_api_level_map=dict(),
+    whole_file=False):
   """Sign the input_name zip/jar/apk, producing output_name.  Use the
   given key and password (the latter may be None if the key does not
   have a password.
 
-  If align is an integer > 1, zipalign is run to align stored files in
-  the output zip on 'align'-byte boundaries.
-
   If whole_file is true, use the "-w" option to SignApk to embed a
   signature that covers the whole file in the archive comment of the
   zip file.
+
+  min_api_level is the API Level (int) of the oldest platform this file may end
+  up on. If not specified for an APK, the API Level is obtained by interpreting
+  the minSdkVersion attribute of the APK's AndroidManifest.xml.
+
+  codename_to_api_level_map is needed to translate the codename which may be
+  encountered as the APK's minSdkVersion.
   """
 
-  if align == 0 or align == 1:
-    align = None
+  java_library_path = os.path.join(
+      OPTIONS.search_path, OPTIONS.signapk_shared_library_path)
 
-  if align:
-    temp = tempfile.NamedTemporaryFile()
-    sign_name = temp.name
-  else:
-    sign_name = output_name
-
-  cmd = [OPTIONS.java_path, OPTIONS.java_args, "-jar",
+  cmd = [OPTIONS.java_path, OPTIONS.java_args,
+         "-Djava.library.path=" + java_library_path,
+         "-jar",
          os.path.join(OPTIONS.search_path, OPTIONS.signapk_path)]
   cmd.extend(OPTIONS.extra_signapk_args)
   if whole_file:
     cmd.append("-w")
+
+  min_sdk_version = min_api_level
+  if min_sdk_version is None:
+    if not whole_file:
+      min_sdk_version = GetMinSdkVersionInt(
+          input_name, codename_to_api_level_map)
+  if min_sdk_version is not None:
+    cmd.extend(["--min-sdk-version", str(min_sdk_version)])
+
   cmd.extend([key + OPTIONS.public_key_suffix,
               key + OPTIONS.private_key_suffix,
-              input_name, sign_name])
+              input_name, output_name])
 
   p = Run(cmd, stdin=subprocess.PIPE, stdout=subprocess.PIPE)
   if password is not None:
@@ -563,13 +702,6 @@
   if p.returncode != 0:
     raise ExternalError("signapk.jar failed: return code %s" % (p.returncode,))
 
-  if align:
-    p = Run(["zipalign", "-f", "-p", str(align), sign_name, output_name])
-    p.communicate()
-    if p.returncode != 0:
-      raise ExternalError("zipalign failed: return code %s" % (p.returncode,))
-    temp.close()
-
 
 def CheckSize(data, target, info_dict):
   """Check the data string passed against the max size limit, if
@@ -674,7 +806,8 @@
   try:
     opts, args = getopt.getopt(
         argv, "hvp:s:x:" + extra_opts,
-        ["help", "verbose", "path=", "signapk_path=", "extra_signapk_args=",
+        ["help", "verbose", "path=", "signapk_path=",
+         "signapk_shared_library_path=", "extra_signapk_args=",
          "java_path=", "java_args=", "public_key_suffix=",
          "private_key_suffix=", "boot_signer_path=", "boot_signer_args=",
          "verity_signer_path=", "verity_signer_args=", "device_specific=",
@@ -695,6 +828,8 @@
       OPTIONS.search_path = a
     elif o in ("--signapk_path",):
       OPTIONS.signapk_path = a
+    elif o in ("--signapk_shared_library_path",):
+      OPTIONS.signapk_shared_library_path = a
     elif o in ("--extra_signapk_args",):
       OPTIONS.extra_signapk_args = shlex.split(a)
     elif o in ("--java_path",):
@@ -908,7 +1043,7 @@
     zinfo = zipfile.ZipInfo(filename=zinfo_or_arcname)
     zinfo.compress_type = zip_file.compression
     if perms is None:
-      perms = 0o644
+      perms = 0o100644
   else:
     zinfo = zinfo_or_arcname
 
@@ -918,6 +1053,9 @@
 
   # If perms is given, it has a priority.
   if perms is not None:
+    # If perms doesn't set the file type, mark it as a regular file.
+    if perms & 0o770000 == 0:
+      perms |= 0o100000
     zinfo.external_attr = perms << 16
 
   # Use a fixed timestamp so the output is repeatable.
@@ -1021,6 +1159,9 @@
     processor."""
     return self._DoCall("IncrementalOTA_InstallEnd")
 
+  def VerifyOTA_Assertions(self):
+    return self._DoCall("VerifyOTA_Assertions")
+
 class File(object):
   def __init__(self, name, data):
     self.name = name
@@ -1179,9 +1320,6 @@
     self.partition = partition
     self.check_first_block = check_first_block
 
-    # Due to http://b/20939131, check_first_block is disabled temporarily.
-    assert not self.check_first_block
-
     if version is None:
       version = 1
       if OPTIONS.info_dict:
@@ -1196,6 +1334,9 @@
     OPTIONS.tempfiles.append(tmpdir)
     self.path = os.path.join(tmpdir, partition)
     b.Compute(self.path)
+    self._required_cache = b.max_stashed_size
+    self.touched_src_ranges = b.touched_src_ranges
+    self.touched_src_sha1 = b.touched_src_sha1
 
     if src is None:
       _, self.device = GetTypeAndDevice("/" + partition, OPTIONS.info_dict)
@@ -1203,6 +1344,10 @@
       _, self.device = GetTypeAndDevice("/" + partition,
                                         OPTIONS.source_info_dict)
 
+  @property
+  def required_cache(self):
+    return self._required_cache
+
   def WriteScript(self, script, output_zip, progress=None):
     if not self.src:
       # write the output unconditionally
@@ -1213,21 +1358,62 @@
     if progress:
       script.ShowProgress(progress, 0)
     self._WriteUpdate(script, output_zip)
-    self._WritePostInstallVerifyScript(script)
+    if OPTIONS.verify:
+      self._WritePostInstallVerifyScript(script)
 
-  def WriteVerifyScript(self, script):
+  def WriteStrictVerifyScript(self, script):
+    """Verify all the blocks in the care_map, including clobbered blocks.
+
+    This differs from the WriteVerifyScript() function: a) it prints different
+    error messages; b) it doesn't allow half-way updated images to pass the
+    verification."""
+
     partition = self.partition
+    script.Print("Verifying %s..." % (partition,))
+    ranges = self.tgt.care_map
+    ranges_str = ranges.to_string_raw()
+    script.AppendExtra('range_sha1("%s", "%s") == "%s" && '
+                       'ui_print("    Verified.") || '
+                       'ui_print("\\"%s\\" has unexpected contents.");' % (
+                       self.device, ranges_str,
+                       self.tgt.TotalSha1(include_clobbered_blocks=True),
+                       self.device))
+    script.AppendExtra("")
+
+  def WriteVerifyScript(self, script, touched_blocks_only=False):
+    partition = self.partition
+
+    # full OTA
     if not self.src:
       script.Print("Image %s will be patched unconditionally." % (partition,))
+
+    # incremental OTA
     else:
-      ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
+      if touched_blocks_only and self.version >= 3:
+        ranges = self.touched_src_ranges
+        expected_sha1 = self.touched_src_sha1
+      else:
+        ranges = self.src.care_map.subtract(self.src.clobbered_blocks)
+        expected_sha1 = self.src.TotalSha1()
+
+      # No blocks to be checked, skipping.
+      if not ranges:
+        return
+
       ranges_str = ranges.to_string_raw()
-      if self.version >= 3:
+      if self.version >= 4:
         script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
                             'block_image_verify("%s", '
                             'package_extract_file("%s.transfer.list"), '
                             '"%s.new.dat", "%s.patch.dat")) then') % (
-                            self.device, ranges_str, self.src.TotalSha1(),
+                            self.device, ranges_str, expected_sha1,
+                            self.device, partition, partition, partition))
+      elif self.version == 3:
+        script.AppendExtra(('if (range_sha1("%s", "%s") == "%s" || '
+                            'block_image_verify("%s", '
+                            'package_extract_file("%s.transfer.list"), '
+                            '"%s.new.dat", "%s.patch.dat")) then') % (
+                            self.device, ranges_str, expected_sha1,
                             self.device, partition, partition, partition))
       else:
         script.AppendExtra('if range_sha1("%s", "%s") == "%s" then' % (
@@ -1235,22 +1421,36 @@
       script.Print('Verified %s image...' % (partition,))
       script.AppendExtra('else')
 
-      # When generating incrementals for the system and vendor partitions,
-      # explicitly check the first block (which contains the superblock) of
-      # the partition to see if it's what we expect. If this check fails,
-      # give an explicit log message about the partition having been
-      # remounted R/W (the most likely explanation) and the need to flash to
-      # get OTAs working again.
-      if self.check_first_block:
-        self._CheckFirstBlock(script)
+      if self.version >= 4:
+
+        # Bug: 21124327
+        # When generating incrementals for the system and vendor partitions in
+        # version 4 or newer, explicitly check the first block (which contains
+        # the superblock) of the partition to see if it's what we expect. If
+        # this check fails, give an explicit log message about the partition
+        # having been remounted R/W (the most likely explanation).
+        if self.check_first_block:
+          script.AppendExtra('check_first_block("%s");' % (self.device,))
+
+        # If version >= 4, try block recovery before abort update
+        script.AppendExtra((
+            'ifelse (block_image_recover("{device}", "{ranges}") && '
+            'block_image_verify("{device}", '
+            'package_extract_file("{partition}.transfer.list"), '
+            '"{partition}.new.dat", "{partition}.patch.dat"), '
+            'ui_print("{partition} recovered successfully."), '
+            'abort("{partition} partition fails to recover"));\n'
+            'endif;').format(device=self.device, ranges=ranges_str,
+                             partition=partition))
 
       # Abort the OTA update. Note that the incremental OTA cannot be applied
       # even if it may match the checksum of the target partition.
       # a) If version < 3, operations like move and erase will make changes
       #    unconditionally and damage the partition.
       # b) If version >= 3, it won't even reach here.
-      script.AppendExtra(('abort("%s partition has unexpected contents");\n'
-                          'endif;') % (partition,))
+      else:
+        script.AppendExtra(('abort("%s partition has unexpected contents");\n'
+                            'endif;') % (partition,))
 
   def _WritePostInstallVerifyScript(self, script):
     partition = self.partition
@@ -1297,7 +1497,8 @@
 
     call = ('block_image_update("{device}", '
             'package_extract_file("{partition}.transfer.list"), '
-            '"{partition}.new.dat", "{partition}.patch.dat");\n'.format(
+            '"{partition}.new.dat", "{partition}.patch.dat") ||\n'
+            '    abort("Failed to update {partition} image.");'.format(
                 device=self.device, partition=self.partition))
     script.AppendExtra(script.WordWrap(call))
 
@@ -1319,22 +1520,9 @@
 
     return ctx.hexdigest()
 
-  # TODO(tbao): Due to http://b/20939131, block 0 may be changed without
-  # remounting R/W. Will change the checking to a finer-grained way to
-  # mask off those bits.
-  def _CheckFirstBlock(self, script):
-    r = rangelib.RangeSet((0, 1))
-    srchash = self._HashBlocks(self.src, r)
-
-    script.AppendExtra(('(range_sha1("%s", "%s") == "%s") || '
-                        'abort("%s has been remounted R/W; '
-                        'reflash device to reenable OTA updates");')
-                       % (self.device, r.to_string_raw(), srchash,
-                          self.device))
 
 DataImage = blockimgdiff.DataImage
 
-
 # map recovery.fstab's fs_types to mount/format "partition types"
 PARTITION_TYPES = {
     "yaffs2": "MTD",
@@ -1385,18 +1573,25 @@
   if info_dict is None:
     info_dict = OPTIONS.info_dict
 
-  diff_program = ["imgdiff"]
-  path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
-  if os.path.exists(path):
-    diff_program.append("-b")
-    diff_program.append(path)
-    bonus_args = "-b /system/etc/recovery-resource.dat"
-  else:
-    bonus_args = ""
+  full_recovery_image = info_dict.get("full_recovery_image", None) == "true"
+  system_root_image = info_dict.get("system_root_image", None) == "true"
 
-  d = Difference(recovery_img, boot_img, diff_program=diff_program)
-  _, _, patch = d.ComputePatch()
-  output_sink("recovery-from-boot.p", patch)
+  if full_recovery_image:
+    output_sink("etc/recovery.img", recovery_img.data)
+
+  else:
+    diff_program = ["imgdiff"]
+    path = os.path.join(input_dir, "SYSTEM", "etc", "recovery-resource.dat")
+    if os.path.exists(path):
+      diff_program.append("-b")
+      diff_program.append(path)
+      bonus_args = "-b /system/etc/recovery-resource.dat"
+    else:
+      bonus_args = ""
+
+    d = Difference(recovery_img, boot_img, diff_program=diff_program)
+    _, _, patch = d.ComputePatch()
+    output_sink("recovery-from-boot.p", patch)
 
   try:
     # The following GetTypeAndDevice()s need to use the path in the target
@@ -1406,7 +1601,19 @@
   except KeyError:
     return
 
-  sh = """#!/system/bin/sh
+  if full_recovery_image:
+    sh = """#!/system/bin/sh
+if ! applypatch -c %(type)s:%(device)s:%(size)d:%(sha1)s; then
+  applypatch /system/etc/recovery.img %(type)s:%(device)s %(sha1)s %(size)d && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
+else
+  log -t recovery "Recovery image already installed"
+fi
+""" % {'type': recovery_type,
+       'device': recovery_device,
+       'sha1': recovery_img.sha1,
+       'size': recovery_img.size}
+  else:
+    sh = """#!/system/bin/sh
 if ! applypatch -c %(recovery_type)s:%(recovery_device)s:%(recovery_size)d:%(recovery_sha1)s; then
   applypatch %(bonus_args)s %(boot_type)s:%(boot_device)s:%(boot_size)d:%(boot_sha1)s %(recovery_type)s:%(recovery_device)s %(recovery_sha1)s %(recovery_size)d %(boot_sha1)s:/system/recovery-from-boot.p && log -t recovery "Installing new recovery image: succeeded" || log -t recovery "Installing new recovery image: failed"
 else
@@ -1427,7 +1634,10 @@
   # target-files expects it to be, and put it there.
   sh_location = "etc/install-recovery.sh"
   found = False
-  init_rc_dir = os.path.join(input_dir, "BOOT", "RAMDISK")
+  if system_root_image:
+    init_rc_dir = os.path.join(input_dir, "ROOT")
+  else:
+    init_rc_dir = os.path.join(input_dir, "BOOT", "RAMDISK")
   init_rc_files = os.listdir(init_rc_dir)
   for init_rc_file in init_rc_files:
     if (not init_rc_file.startswith('init.') or
diff --git a/tools/releasetools/edify_generator.py b/tools/releasetools/edify_generator.py
index a52e328..57f8cda 100644
--- a/tools/releasetools/edify_generator.py
+++ b/tools/releasetools/edify_generator.py
@@ -23,6 +23,7 @@
   def __init__(self, version, info, fstab=None):
     self.script = []
     self.mounts = set()
+    self._required_cache = 0
     self.version = version
     self.info = info
     if fstab is None:
@@ -38,6 +39,11 @@
     x.mounts = self.mounts
     return x
 
+  @property
+  def required_cache(self):
+    """Return the minimum cache size to apply the update."""
+    return self._required_cache
+
   @staticmethod
   def WordWrap(cmd, linelen=80):
     """'cmd' should be a function call with null characters after each
@@ -77,11 +83,17 @@
       raise ValueError("must specify an OEM property")
     if not value:
       raise ValueError("must specify the OEM value")
-    cmd = ('file_getprop("/oem/oem.prop", "{name}") == "{value}" || '
-           'abort("This package expects the value \\"{value}\\" for '
-           '\\"{name}\\" on the OEM partition; this has value \\"" + '
-           'file_getprop("/oem/oem.prop", "{name}") + "\\".");').format(
-               name=name, value=value)
+    if common.OPTIONS.oem_no_mount:
+      cmd = ('getprop("{name}") == "{value}" || '
+             'abort("This package expects the value \\"{value}\\" for '
+             '\\"{name}\\"; this has value \\"" + '
+             'getprop("{name}") + "\\".");').format(name=name, value=value)
+    else:
+      cmd = ('file_getprop("/oem/oem.prop", "{name}") == "{value}" || '
+             'abort("This package expects the value \\"{value}\\" for '
+             '\\"{name}\\" on the OEM partition; this has value \\"" + '
+             'file_getprop("/oem/oem.prop", "{name}") + "\\".");').format(
+                 name=name, value=value)
     self.script.append(cmd)
 
   def AssertSomeFingerprint(self, *fp):
@@ -152,6 +164,15 @@
         "".join([', "%s"' % (i,) for i in sha1]) +
         ') || abort("\\"%s\\" has unexpected contents.");' % (filename,))
 
+  def Verify(self, filename):
+    """Check that the given file (or MTD reference) has one of the
+    given hashes (encoded in the filename)."""
+    self.script.append(
+        'apply_patch_check("{filename}") && '
+        'ui_print("    Verified.") || '
+        'ui_print("\\"{filename}\\" has unexpected contents.");'.format(
+            filename=filename))
+
   def FileCheck(self, filename, *sha1):
     """Check that the given file (or MTD reference) has one of the
     given *sha1 hashes."""
@@ -162,8 +183,9 @@
   def CacheFreeSpaceCheck(self, amount):
     """Check that there's at least 'amount' space that can be made
     available on /cache."""
+    self._required_cache = max(self._required_cache, amount)
     self.script.append(('apply_patch_space(%d) || abort("Not enough free space '
-                        'on /system to apply patches.");') % (amount,))
+                        'on /cache to apply patches.");') % (amount,))
 
   def Mount(self, mount_point, mount_options_by_format=""):
     """Mount the partition with the given mount_point.
@@ -275,8 +297,8 @@
     cmd = ['apply_patch("%s",\0"%s",\0%s,\0%d'
            % (srcfile, tgtfile, tgtsha1, tgtsize)]
     for i in range(0, len(patchpairs), 2):
-      cmd.append(',\0%s, package_extract_file("%s")' % patchpairs[i:i+2])
-    cmd.append(');')
+      cmd.append(',\0%s,\0package_extract_file("%s")' % patchpairs[i:i+2])
+    cmd.append(') ||\n    abort("Failed to apply patch to %s");' % (srcfile,))
     cmd = "".join(cmd)
     self.script.append(self.WordWrap(cmd))
 
diff --git a/tools/releasetools/img_from_target_files.py b/tools/releasetools/img_from_target_files.py
index c486992..aa21d7e 100755
--- a/tools/releasetools/img_from_target_files.py
+++ b/tools/releasetools/img_from_target_files.py
@@ -95,25 +95,18 @@
       # images, so build them.
       import add_img_to_target_files
 
-      OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-
-      # If this image was originally labelled with SELinux contexts,
-      # make sure we also apply the labels in our new image. During
-      # building, the "file_contexts" is in the out/ directory tree,
-      # but for repacking from target-files.zip it's in the root
-      # directory of the ramdisk.
-      if "selinux_fc" in OPTIONS.info_dict:
-        OPTIONS.info_dict["selinux_fc"] = os.path.join(
-            OPTIONS.input_tmp, "BOOT", "RAMDISK", "file_contexts")
+      OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.input_tmp)
 
       boot_image = common.GetBootableImage(
           "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
       if boot_image:
         boot_image.AddToZip(output_zip)
-      recovery_image = common.GetBootableImage(
-          "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
-      if recovery_image:
-        recovery_image.AddToZip(output_zip)
+
+      if OPTIONS.info_dict.get("no_recovery") != "true":
+        recovery_image = common.GetBootableImage(
+            "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
+        if recovery_image:
+          recovery_image.AddToZip(output_zip)
 
       def banner(s):
         print "\n\n++++ " + s + " ++++\n\n"
diff --git a/tools/releasetools/ota_from_target_files.py b/tools/releasetools/ota_from_target_files.py
index c5c16b4..861c485 100755
--- a/tools/releasetools/ota_from_target_files.py
+++ b/tools/releasetools/ota_from_target_files.py
@@ -42,10 +42,9 @@
       radio image. This option is only meaningful when -i is specified,
       because a full radio is always included in a full OTA if applicable.
 
- --full_bootloader
-      When generating an incremental OTA, always include a full copy of
-      bootloader image. This option is only meaningful when -i is specified,
-      because a full bootloader is always included in a full OTA if applicable.
+  --full_bootloader
+      Similar to --full_radio. When generating an incremental OTA, always
+      include a full copy of bootloader image.
 
   -v  (--verify)
       Remount and verify the checksums of the files written to the
@@ -55,6 +54,12 @@
       Use the file to specify the expected OEM-specific properties
       on the OEM partition of the intended device.
 
+  --oem_no_mount
+      For devices with OEM-specific properties but without an OEM partition,
+      do not mount the OEM partition in the updater-script. This should be
+      very rarely used, since it's expected to have a dedicated OEM partition
+      for OEM-specific properties. Only meaningful when -o is specified.
+
   -w  (--wipe_user_data)
       Generate an OTA package that will wipe the user data partition
       when installed.
@@ -64,6 +69,14 @@
       the build scripts (used for developer OTA packages which
       legitimately need to go back and forth).
 
+  --downgrade
+      Intentionally generate an incremental OTA that updates from a newer
+      build to an older one (based on timestamp comparison). "post-timestamp"
+      will be replaced by "ota-downgrade=yes" in the metadata file. A data
+      wipe will always be enforced, so "ota-wipe=yes" will also be included in
+      the metadata file. The update-binary in the source build will be used in
+      the OTA package, unless --binary flag is specified.
+
   -e  (--extra_script)  <file>
       Insert the contents of file at the end of the update script.
 
@@ -92,6 +105,14 @@
   --stash_threshold <float>
       Specifies the threshold that will be used to compute the maximum
       allowed stash size (defaults to 0.8).
+
+  --gen_verify
+      Generate an OTA package that verifies the partitions.
+
+  --log_diff <file>
+      Generate a log file that shows the differences in the source and target
+      builds for an incremental package. This option is only meaningful when
+      -i is specified.
 """
 
 import sys
@@ -102,6 +123,7 @@
 
 import multiprocessing
 import os
+import subprocess
 import tempfile
 import zipfile
 
@@ -118,6 +140,7 @@
 OPTIONS.patch_threshold = 0.95
 OPTIONS.wipe_user_data = False
 OPTIONS.omit_prereq = False
+OPTIONS.downgrade = False
 OPTIONS.extra_script = None
 OPTIONS.aslr_mode = True
 OPTIONS.worker_threads = multiprocessing.cpu_count() // 2
@@ -128,12 +151,15 @@
 OPTIONS.block_based = False
 OPTIONS.updater_binary = None
 OPTIONS.oem_source = None
+OPTIONS.oem_no_mount = False
 OPTIONS.fallback_to_full = True
 OPTIONS.full_radio = False
 OPTIONS.full_bootloader = False
 # Stash size cannot exceed cache_size * threshold.
 OPTIONS.cache_size = None
 OPTIONS.stash_threshold = 0.8
+OPTIONS.gen_verify = False
+OPTIONS.log_diff = None
 
 def MostPopularKey(d, default):
   """Given a dict, return the key corresponding to the largest
@@ -148,12 +174,12 @@
 def IsSymlink(info):
   """Return true if the zipfile.ZipInfo object passed in represents a
   symlink."""
-  return (info.external_attr >> 16) == 0o120777
+  return (info.external_attr >> 16) & 0o770000 == 0o120000
 
 def IsRegular(info):
   """Return true if the zipfile.ZipInfo object passed in represents a
-  symlink."""
-  return (info.external_attr >> 28) == 0o10
+  regular file."""
+  return (info.external_attr >> 16) & 0o770000 == 0o100000
 
 def ClosestFileMatch(src, tgtfiles, existing):
   """Returns the closest file match between a source file and list
@@ -223,7 +249,8 @@
         if i.is_dir:
           i.children.sort(key=lambda i: i.name)
 
-    # set metadata for the files generated by this script.
+    # Set metadata for the files generated by this script. For full recovery
+    # image at system/etc/recovery.img, it will be taken care by fs_config.
     i = self.ITEMS.get("system/recovery-from-boot.p", None)
     if i:
       i.uid, i.gid, i.mode, i.selabel, i.capabilities = 0, 0, 0o644, None, None
@@ -427,11 +454,9 @@
 
 
 def HasRecoveryPatch(target_files_zip):
-  try:
-    target_files_zip.getinfo("SYSTEM/recovery-from-boot.p")
-    return True
-  except KeyError:
-    return False
+  namelist = [name for name in target_files_zip.namelist()]
+  return ("SYSTEM/recovery-from-boot.p" in namelist or
+          "SYSTEM/etc/recovery.img" in namelist)
 
 def HasVendorPartition(target_files_zip):
   try:
@@ -509,7 +534,8 @@
   if oem_props is not None and len(oem_props) > 0:
     if OPTIONS.oem_source is None:
       raise common.ExternalError("OEM source required for this build")
-    script.Mount("/oem", recovery_mount_options)
+    if not OPTIONS.oem_no_mount:
+      script.Mount("/oem", recovery_mount_options)
     oem_dict = common.LoadDictionaryFromLines(
         open(OPTIONS.oem_source).readlines())
 
@@ -533,6 +559,8 @@
   has_recovery_patch = HasRecoveryPatch(input_zip)
   block_based = OPTIONS.block_based and has_recovery_patch
 
+  metadata["ota-type"] = "BLOCK" if block_based else "FILE"
+
   if not OPTIONS.omit_prereq:
     ts = GetBuildProp("ro.build.date.utc", OPTIONS.info_dict)
     ts_text = GetBuildProp("ro.build.date", OPTIONS.info_dict)
@@ -594,6 +622,8 @@
   if HasVendorPartition(input_zip):
     system_progress -= 0.1
 
+  # Place a copy of file_contexts.bin into the OTA package which will be used
+  # by the recovery program.
   if "selinux_fc" in OPTIONS.info_dict:
     WritePolicyConfig(OPTIONS.info_dict["selinux_fc"], output_zip)
 
@@ -621,8 +651,8 @@
     symlinks = CopyPartitionFiles(system_items, input_zip, output_zip)
     script.MakeSymlinks(symlinks)
 
-  boot_img = common.GetBootableImage("boot.img", "boot.img",
-                                     OPTIONS.input_tmp, "BOOT")
+  boot_img = common.GetBootableImage(
+      "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
 
   if not block_based:
     def output_sink(fn, data):
@@ -685,7 +715,10 @@
 endif;
 endif;
 """ % bcb_dev)
+
+  script.SetProgress(1)
   script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
+  metadata["ota-required-cache"] = str(script.required_cache)
   WriteMetadata(metadata, output_zip)
 
 
@@ -734,6 +767,8 @@
 
 
 def WriteBlockIncrementalOTAPackage(target_zip, source_zip, output_zip):
+  # TODO(tbao): We should factor out the common parts between
+  # WriteBlockIncrementalOTAPackage() and WriteIncrementalOTAPackage().
   source_version = OPTIONS.source_info_dict["recovery_api_version"]
   target_version = OPTIONS.target_info_dict["recovery_api_version"]
 
@@ -744,13 +779,45 @@
       source_version, OPTIONS.target_info_dict,
       fstab=OPTIONS.source_info_dict["fstab"])
 
+  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
+  recovery_mount_options = OPTIONS.source_info_dict.get(
+      "recovery_mount_options")
+  oem_dict = None
+  if oem_props is not None and len(oem_props) > 0:
+    if OPTIONS.oem_source is None:
+      raise common.ExternalError("OEM source required for this build")
+    if not OPTIONS.oem_no_mount:
+      script.Mount("/oem", recovery_mount_options)
+    oem_dict = common.LoadDictionaryFromLines(
+        open(OPTIONS.oem_source).readlines())
+
   metadata = {
-      "pre-device": GetBuildProp("ro.product.device",
-                                 OPTIONS.source_info_dict),
-      "post-timestamp": GetBuildProp("ro.build.date.utc",
-                                     OPTIONS.target_info_dict),
+      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+                                   OPTIONS.source_info_dict),
+      "ota-type": "BLOCK",
   }
 
+  post_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.target_info_dict)
+  pre_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.source_info_dict)
+  is_downgrade = long(post_timestamp) < long(pre_timestamp)
+
+  if OPTIONS.downgrade:
+    metadata["ota-downgrade"] = "yes"
+    if not is_downgrade:
+      raise RuntimeError("--downgrade specified but no downgrade detected: "
+                         "pre: %s, post: %s" % (pre_timestamp, post_timestamp))
+  else:
+    if is_downgrade:
+      # Non-fatal here to allow generating such a package which may require
+      # manual work to adjust the post-timestamp. A legit use case is that we
+      # cut a new build C (after having A and B), but want to enfore the
+      # update path of A -> C -> B. Specifying --downgrade may not help since
+      # that would enforce a data wipe for C -> B update.
+      print("\nWARNING: downgrade detected: pre: %s, post: %s.\n"
+            "The package may not be deployed properly. "
+            "Try --downgrade?\n" % (pre_timestamp, post_timestamp))
+    metadata["post-timestamp"] = post_timestamp
+
   device_specific = common.DeviceSpecificParams(
       source_zip=source_zip,
       source_version=source_version,
@@ -761,14 +828,10 @@
       metadata=metadata,
       info_dict=OPTIONS.source_info_dict)
 
-  # TODO: Currently this works differently from WriteIncrementalOTAPackage().
-  # This function doesn't consider thumbprints when writing
-  # metadata["pre/post-build"]. One possible reason is that the current
-  # devices with thumbprints are all using file-based OTAs. Long term we
-  # should factor out the common parts into a shared one to avoid further
-  # divergence.
-  source_fp = GetBuildProp("ro.build.fingerprint", OPTIONS.source_info_dict)
-  target_fp = GetBuildProp("ro.build.fingerprint", OPTIONS.target_info_dict)
+  source_fp = CalculateFingerprint(oem_props, oem_dict,
+                                   OPTIONS.source_info_dict)
+  target_fp = CalculateFingerprint(oem_props, oem_dict,
+                                   OPTIONS.target_info_dict)
   metadata["pre-build"] = source_fp
   metadata["post-build"] = target_fp
 
@@ -792,7 +855,12 @@
         int(i) for i in
         OPTIONS.info_dict.get("blockimgdiff_versions", "1").split(","))
 
+  # Check first block of system partition for remount R/W only if
+  # disk type is ext4
+  system_partition = OPTIONS.source_info_dict["fstab"]["/system"]
+  check_first_block = system_partition.fs_type == "ext4"
   system_diff = common.BlockDifference("system", system_tgt, system_src,
+                                       check_first_block,
                                        version=blockimgdiff_version)
 
   if HasVendorPartition(target_zip):
@@ -802,22 +870,17 @@
                           OPTIONS.source_info_dict)
     vendor_tgt = GetImage("vendor", OPTIONS.target_tmp,
                           OPTIONS.target_info_dict)
+
+    # Check first block of vendor partition for remount R/W only if
+    # disk type is ext4
+    vendor_partition = OPTIONS.source_info_dict["fstab"]["/vendor"]
+    check_first_block = vendor_partition.fs_type == "ext4"
     vendor_diff = common.BlockDifference("vendor", vendor_tgt, vendor_src,
+                                         check_first_block,
                                          version=blockimgdiff_version)
   else:
     vendor_diff = None
 
-  oem_props = OPTIONS.target_info_dict.get("oem_fingerprint_properties")
-  recovery_mount_options = OPTIONS.source_info_dict.get(
-      "recovery_mount_options")
-  oem_dict = None
-  if oem_props is not None and len(oem_props) > 0:
-    if OPTIONS.oem_source is None:
-      raise common.ExternalError("OEM source required for this build")
-    script.Mount("/oem", recovery_mount_options)
-    oem_dict = common.LoadDictionaryFromLines(
-        open(OPTIONS.oem_source).readlines())
-
   AppendAssertions(script, OPTIONS.target_info_dict, oem_dict)
   device_specific.IncrementalOTA_Assertions()
 
@@ -890,6 +953,13 @@
           GetBuildProp("ro.build.thumbprint", OPTIONS.target_info_dict),
           GetBuildProp("ro.build.thumbprint", OPTIONS.source_info_dict))
 
+  # Check the required cache size (i.e. stashed blocks).
+  size = []
+  if system_diff:
+    size.append(system_diff.required_cache)
+  if vendor_diff:
+    size.append(vendor_diff.required_cache)
+
   if updating_boot:
     boot_type, boot_device = common.GetTypeAndDevice(
         "/boot", OPTIONS.source_info_dict)
@@ -910,6 +980,10 @@
                         (boot_type, boot_device,
                          source_boot.size, source_boot.sha1,
                          target_boot.size, target_boot.sha1))
+      size.append(target_boot.size)
+
+  if size:
+    script.CacheFreeSpaceCheck(max(size))
 
   device_specific.IncrementalOTA_VerifyEnd()
 
@@ -922,9 +996,9 @@
 """ % bcb_dev)
 
   # Verify the existing partitions.
-  system_diff.WriteVerifyScript(script)
+  system_diff.WriteVerifyScript(script, touched_blocks_only=True)
   if vendor_diff:
-    vendor_diff.WriteVerifyScript(script)
+    vendor_diff.WriteVerifyScript(script, touched_blocks_only=True)
 
   script.Comment("---- start making changes here ----")
 
@@ -932,6 +1006,7 @@
 
   system_diff.WriteScript(script, output_zip,
                           progress=0.8 if vendor_diff else 0.9)
+
   if vendor_diff:
     vendor_diff.WriteScript(script, output_zip, progress=0.1)
 
@@ -972,6 +1047,7 @@
   if OPTIONS.wipe_user_data:
     script.Print("Erasing user data...")
     script.FormatPartition("/data")
+    metadata["ota-wipe"] = "yes"
 
   if OPTIONS.two_step:
     script.AppendExtra("""
@@ -981,10 +1057,221 @@
 """ % bcb_dev)
 
   script.SetProgress(1)
-  script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
+  # For downgrade OTAs, we prefer to use the update-binary in the source
+  # build that is actually newer than the one in the target build.
+  if OPTIONS.downgrade:
+    script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
+  else:
+    script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
+  metadata["ota-required-cache"] = str(script.required_cache)
   WriteMetadata(metadata, output_zip)
 
 
+def WriteVerifyPackage(input_zip, output_zip):
+  script = edify_generator.EdifyGenerator(3, OPTIONS.info_dict)
+
+  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties")
+  recovery_mount_options = OPTIONS.info_dict.get(
+      "recovery_mount_options")
+  oem_dict = None
+  if oem_props is not None and len(oem_props) > 0:
+    if OPTIONS.oem_source is None:
+      raise common.ExternalError("OEM source required for this build")
+    script.Mount("/oem", recovery_mount_options)
+    oem_dict = common.LoadDictionaryFromLines(
+        open(OPTIONS.oem_source).readlines())
+
+  target_fp = CalculateFingerprint(oem_props, oem_dict, OPTIONS.info_dict)
+  metadata = {
+      "post-build": target_fp,
+      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+                                   OPTIONS.info_dict),
+      "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
+  }
+
+  device_specific = common.DeviceSpecificParams(
+      input_zip=input_zip,
+      input_version=OPTIONS.info_dict["recovery_api_version"],
+      output_zip=output_zip,
+      script=script,
+      input_tmp=OPTIONS.input_tmp,
+      metadata=metadata,
+      info_dict=OPTIONS.info_dict)
+
+  AppendAssertions(script, OPTIONS.info_dict, oem_dict)
+
+  script.Print("Verifying device images against %s..." % target_fp)
+  script.AppendExtra("")
+
+  script.Print("Verifying boot...")
+  boot_img = common.GetBootableImage(
+      "boot.img", "boot.img", OPTIONS.input_tmp, "BOOT")
+  boot_type, boot_device = common.GetTypeAndDevice(
+      "/boot", OPTIONS.info_dict)
+  script.Verify("%s:%s:%d:%s" % (
+      boot_type, boot_device, boot_img.size, boot_img.sha1))
+  script.AppendExtra("")
+
+  script.Print("Verifying recovery...")
+  recovery_img = common.GetBootableImage(
+      "recovery.img", "recovery.img", OPTIONS.input_tmp, "RECOVERY")
+  recovery_type, recovery_device = common.GetTypeAndDevice(
+      "/recovery", OPTIONS.info_dict)
+  script.Verify("%s:%s:%d:%s" % (
+      recovery_type, recovery_device, recovery_img.size, recovery_img.sha1))
+  script.AppendExtra("")
+
+  system_tgt = GetImage("system", OPTIONS.input_tmp, OPTIONS.info_dict)
+  system_tgt.ResetFileMap()
+  system_diff = common.BlockDifference("system", system_tgt, src=None)
+  system_diff.WriteStrictVerifyScript(script)
+
+  if HasVendorPartition(input_zip):
+    vendor_tgt = GetImage("vendor", OPTIONS.input_tmp, OPTIONS.info_dict)
+    vendor_tgt.ResetFileMap()
+    vendor_diff = common.BlockDifference("vendor", vendor_tgt, src=None)
+    vendor_diff.WriteStrictVerifyScript(script)
+
+  # Device specific partitions, such as radio, bootloader and etc.
+  device_specific.VerifyOTA_Assertions()
+
+  script.SetProgress(1.0)
+  script.AddToZip(input_zip, output_zip, input_path=OPTIONS.updater_binary)
+  metadata["ota-required-cache"] = str(script.required_cache)
+  WriteMetadata(metadata, output_zip)
+
+
+def WriteABOTAPackageWithBrilloScript(target_file, output_file,
+                                      source_file=None):
+  """Generate an Android OTA package that has A/B update payload."""
+
+  # Setup signing keys.
+  if OPTIONS.package_key is None:
+    OPTIONS.package_key = OPTIONS.info_dict.get(
+        "default_system_dev_certificate",
+        "build/target/product/security/testkey")
+
+  # A/B updater expects key in RSA format.
+  cmd = ["openssl", "pkcs8",
+         "-in", OPTIONS.package_key + OPTIONS.private_key_suffix,
+         "-inform", "DER", "-nocrypt"]
+  rsa_key = common.MakeTempFile(prefix="key-", suffix=".key")
+  cmd.extend(["-out", rsa_key])
+  p1 = common.Run(cmd, stdout=subprocess.PIPE)
+  p1.wait()
+  assert p1.returncode == 0, "openssl pkcs8 failed"
+
+  # Stage the output zip package for signing.
+  temp_zip_file = tempfile.NamedTemporaryFile()
+  output_zip = zipfile.ZipFile(temp_zip_file, "w",
+                               compression=zipfile.ZIP_DEFLATED)
+
+  # Metadata to comply with Android OTA package format.
+  oem_props = OPTIONS.info_dict.get("oem_fingerprint_properties", None)
+  oem_dict = None
+  if oem_props:
+    if OPTIONS.oem_source is None:
+      raise common.ExternalError("OEM source required for this build")
+    oem_dict = common.LoadDictionaryFromLines(
+        open(OPTIONS.oem_source).readlines())
+
+  metadata = {
+      "post-build": CalculateFingerprint(oem_props, oem_dict,
+                                         OPTIONS.info_dict),
+      "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
+                                   OPTIONS.info_dict),
+      "post-timestamp": GetBuildProp("ro.build.date.utc", OPTIONS.info_dict),
+      "ota-required-cache": "0",
+      "ota-type": "AB",
+  }
+
+  if source_file is not None:
+    metadata["pre-build"] = CalculateFingerprint(oem_props, oem_dict,
+                                                 OPTIONS.source_info_dict)
+
+  # 1. Generate payload.
+  payload_file = common.MakeTempFile(prefix="payload-", suffix=".bin")
+  cmd = ["brillo_update_payload", "generate",
+         "--payload", payload_file,
+         "--target_image", target_file]
+  if source_file is not None:
+    cmd.extend(["--source_image", source_file])
+  p1 = common.Run(cmd, stdout=subprocess.PIPE)
+  p1.wait()
+  assert p1.returncode == 0, "brillo_update_payload generate failed"
+
+  # 2. Generate hashes of the payload and metadata files.
+  payload_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+  metadata_sig_file = common.MakeTempFile(prefix="sig-", suffix=".bin")
+  cmd = ["brillo_update_payload", "hash",
+         "--unsigned_payload", payload_file,
+         "--signature_size", "256",
+         "--metadata_hash_file", metadata_sig_file,
+         "--payload_hash_file", payload_sig_file]
+  p1 = common.Run(cmd, stdout=subprocess.PIPE)
+  p1.wait()
+  assert p1.returncode == 0, "brillo_update_payload hash failed"
+
+  # 3. Sign the hashes and insert them back into the payload file.
+  signed_payload_sig_file = common.MakeTempFile(prefix="signed-sig-",
+                                                suffix=".bin")
+  signed_metadata_sig_file = common.MakeTempFile(prefix="signed-sig-",
+                                                 suffix=".bin")
+  # 3a. Sign the payload hash.
+  cmd = ["openssl", "pkeyutl", "-sign",
+         "-inkey", rsa_key,
+         "-pkeyopt", "digest:sha256",
+         "-in", payload_sig_file,
+         "-out", signed_payload_sig_file]
+  p1 = common.Run(cmd, stdout=subprocess.PIPE)
+  p1.wait()
+  assert p1.returncode == 0, "openssl sign payload failed"
+
+  # 3b. Sign the metadata hash.
+  cmd = ["openssl", "pkeyutl", "-sign",
+         "-inkey", rsa_key,
+         "-pkeyopt", "digest:sha256",
+         "-in", metadata_sig_file,
+         "-out", signed_metadata_sig_file]
+  p1 = common.Run(cmd, stdout=subprocess.PIPE)
+  p1.wait()
+  assert p1.returncode == 0, "openssl sign metadata failed"
+
+  # 3c. Insert the signatures back into the payload file.
+  signed_payload_file = common.MakeTempFile(prefix="signed-payload-",
+                                            suffix=".bin")
+  cmd = ["brillo_update_payload", "sign",
+         "--unsigned_payload", payload_file,
+         "--payload", signed_payload_file,
+         "--signature_size", "256",
+         "--metadata_signature_file", signed_metadata_sig_file,
+         "--payload_signature_file", signed_payload_sig_file]
+  p1 = common.Run(cmd, stdout=subprocess.PIPE)
+  p1.wait()
+  assert p1.returncode == 0, "brillo_update_payload sign failed"
+
+  # 4. Dump the signed payload properties.
+  properties_file = common.MakeTempFile(prefix="payload-properties-",
+                                        suffix=".txt")
+  cmd = ["brillo_update_payload", "properties",
+         "--payload", signed_payload_file,
+         "--properties_file", properties_file]
+  p1 = common.Run(cmd, stdout=subprocess.PIPE)
+  p1.wait()
+  assert p1.returncode == 0, "brillo_update_payload properties failed"
+
+  # Add the signed payload file and properties into the zip.
+  common.ZipWrite(output_zip, properties_file, arcname="payload_properties.txt")
+  common.ZipWrite(output_zip, signed_payload_file, arcname="payload.bin",
+                  compress_type=zipfile.ZIP_STORED)
+  WriteMetadata(metadata, output_zip)
+
+  # Sign the whole package to comply with the Android OTA package format.
+  common.ZipClose(output_zip)
+  SignOutput(temp_zip_file.name, output_file)
+  temp_zip_file.close()
+
+
 class FileDifference(object):
   def __init__(self, partition, source_zip, target_zip, output_zip):
     self.deferred_patch_list = None
@@ -1075,11 +1362,13 @@
       script.FileCheck(tf.name, tf.sha1)
 
   def RemoveUnneededFiles(self, script, extras=()):
-    script.DeleteFiles(
-        ["/" + i[0] for i in self.verbatim_targets] +
-        ["/" + i for i in sorted(self.source_data)
-         if i not in self.target_data and i not in self.renames] +
-        list(extras))
+    file_list = ["/" + i[0] for i in self.verbatim_targets]
+    file_list += ["/" + i for i in self.source_data
+                  if i not in self.target_data and i not in self.renames]
+    file_list += list(extras)
+    # Sort the list in descending order, which removes all the files first
+    # before attempting to remove the folder. (Bug: 22960996)
+    script.DeleteFiles(sorted(file_list, reverse=True))
 
   def TotalPatchSize(self):
     return sum(i[1].size for i in self.patch_list)
@@ -1140,17 +1429,38 @@
   if oem_props is not None and len(oem_props) > 0:
     if OPTIONS.oem_source is None:
       raise common.ExternalError("OEM source required for this build")
-    script.Mount("/oem", recovery_mount_options)
+    if not OPTIONS.oem_no_mount:
+      script.Mount("/oem", recovery_mount_options)
     oem_dict = common.LoadDictionaryFromLines(
         open(OPTIONS.oem_source).readlines())
 
   metadata = {
       "pre-device": GetOemProperty("ro.product.device", oem_props, oem_dict,
                                    OPTIONS.source_info_dict),
-      "post-timestamp": GetBuildProp("ro.build.date.utc",
-                                     OPTIONS.target_info_dict),
+      "ota-type": "FILE",
   }
 
+  post_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.target_info_dict)
+  pre_timestamp = GetBuildProp("ro.build.date.utc", OPTIONS.source_info_dict)
+  is_downgrade = long(post_timestamp) < long(pre_timestamp)
+
+  if OPTIONS.downgrade:
+    metadata["ota-downgrade"] = "yes"
+    if not is_downgrade:
+      raise RuntimeError("--downgrade specified but no downgrade detected: "
+                         "pre: %s, post: %s" % (pre_timestamp, post_timestamp))
+  else:
+    if is_downgrade:
+      # Non-fatal here to allow generating such a package which may require
+      # manual work to adjust the post-timestamp. A legit use case is that we
+      # cut a new build C (after having A and B), but want to enfore the
+      # update path of A -> C -> B. Specifying --downgrade may not help since
+      # that would enforce a data wipe for C -> B update.
+      print("\nWARNING: downgrade detected: pre: %s, post: %s.\n"
+            "The package may not be deployed properly. "
+            "Try --downgrade?\n" % (pre_timestamp, post_timestamp))
+    metadata["post-timestamp"] = post_timestamp
+
   device_specific = common.DeviceSpecificParams(
       source_zip=source_zip,
       source_version=source_version,
@@ -1262,6 +1572,13 @@
   if vendor_diff:
     so_far += vendor_diff.EmitVerification(script)
 
+  size = []
+  if system_diff.patch_list:
+    size.append(system_diff.largest_source_size)
+  if vendor_diff:
+    if vendor_diff.patch_list:
+      size.append(vendor_diff.largest_source_size)
+
   if updating_boot:
     d = common.Difference(target_boot, source_boot)
     _, _, d = d.ComputePatch()
@@ -1278,14 +1595,9 @@
                        source_boot.size, source_boot.sha1,
                        target_boot.size, target_boot.sha1))
     so_far += source_boot.size
+    size.append(target_boot.size)
 
-  size = []
-  if system_diff.patch_list:
-    size.append(system_diff.largest_source_size)
-  if vendor_diff:
-    if vendor_diff.patch_list:
-      size.append(vendor_diff.largest_source_size)
-  if size or updating_recovery or updating_boot:
+  if size:
     script.CacheFreeSpaceCheck(max(size))
 
   device_specific.IncrementalOTA_VerifyEnd()
@@ -1366,6 +1678,7 @@
       common.MakeRecoveryPatch(OPTIONS.target_tmp, output_sink,
                                target_recovery, target_boot)
       script.DeleteFiles(["/system/recovery-from-boot.p",
+                          "/system/etc/recovery.img",
                           "/system/etc/install-recovery.sh"])
     print "recovery image changed; including as patch from boot."
   else:
@@ -1476,6 +1789,7 @@
   if OPTIONS.wipe_user_data:
     script.Print("Erasing user data...")
     script.FormatPartition("/data")
+    metadata["ota-wipe"] = "yes"
 
   if OPTIONS.two_step:
     script.AppendExtra("""
@@ -1487,16 +1801,23 @@
   if OPTIONS.verify and system_diff:
     script.Print("Remounting and verifying system partition files...")
     script.Unmount("/system")
-    script.Mount("/system")
+    script.Mount("/system", recovery_mount_options)
     system_diff.EmitExplicitTargetVerification(script)
 
   if OPTIONS.verify and vendor_diff:
     script.Print("Remounting and verifying vendor partition files...")
     script.Unmount("/vendor")
-    script.Mount("/vendor")
+    script.Mount("/vendor", recovery_mount_options)
     vendor_diff.EmitExplicitTargetVerification(script)
-  script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
 
+  # For downgrade OTAs, we prefer to use the update-binary in the source
+  # build that is actually newer than the one in the target build.
+  if OPTIONS.downgrade:
+    script.AddToZip(source_zip, output_zip, input_path=OPTIONS.updater_binary)
+  else:
+    script.AddToZip(target_zip, output_zip, input_path=OPTIONS.updater_binary)
+
+  metadata["ota-required-cache"] = str(script.required_cache)
   WriteMetadata(metadata, output_zip)
 
 
@@ -1517,8 +1838,13 @@
       OPTIONS.wipe_user_data = True
     elif o in ("-n", "--no_prereq"):
       OPTIONS.omit_prereq = True
+    elif o == "--downgrade":
+      OPTIONS.downgrade = True
+      OPTIONS.wipe_user_data = True
     elif o in ("-o", "--oem_settings"):
       OPTIONS.oem_source = a
+    elif o == "--oem_no_mount":
+      OPTIONS.oem_no_mount = True
     elif o in ("-e", "--extra_script"):
       OPTIONS.extra_script = a
     elif o in ("-a", "--aslr_mode"):
@@ -1550,6 +1876,10 @@
       except ValueError:
         raise ValueError("Cannot parse value %r for option %r - expecting "
                          "a float" % (a, o))
+    elif o == "--gen_verify":
+      OPTIONS.gen_verify = True
+    elif o == "--log_diff":
+      OPTIONS.log_diff = a
     else:
       return False
     return True
@@ -1564,6 +1894,7 @@
                                  "full_bootloader",
                                  "wipe_user_data",
                                  "no_prereq",
+                                 "downgrade",
                                  "extra_script=",
                                  "worker_threads=",
                                  "aslr_mode=",
@@ -1572,15 +1903,61 @@
                                  "block",
                                  "binary=",
                                  "oem_settings=",
+                                 "oem_no_mount",
                                  "verify",
                                  "no_fallback_to_full",
                                  "stash_threshold=",
+                                 "gen_verify",
+                                 "log_diff=",
                              ], extra_option_handler=option_handler)
 
   if len(args) != 2:
     common.Usage(__doc__)
     sys.exit(1)
 
+  if OPTIONS.downgrade:
+    # Sanity check to enforce a data wipe.
+    if not OPTIONS.wipe_user_data:
+      raise ValueError("Cannot downgrade without a data wipe")
+
+    # We should only allow downgrading incrementals (as opposed to full).
+    # Otherwise the device may go back from arbitrary build with this full
+    # OTA package.
+    if OPTIONS.incremental_source is None:
+      raise ValueError("Cannot generate downgradable full OTAs - consider"
+                       "using --omit_prereq?")
+
+  # Load the dict file from the zip directly to have a peek at the OTA type.
+  # For packages using A/B update, unzipping is not needed.
+  input_zip = zipfile.ZipFile(args[0], "r")
+  OPTIONS.info_dict = common.LoadInfoDict(input_zip)
+  common.ZipClose(input_zip)
+
+  ab_update = OPTIONS.info_dict.get("ab_update") == "true"
+
+  if ab_update:
+    if OPTIONS.incremental_source is not None:
+      OPTIONS.target_info_dict = OPTIONS.info_dict
+      source_zip = zipfile.ZipFile(OPTIONS.incremental_source, "r")
+      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
+      common.ZipClose(source_zip)
+
+    if OPTIONS.verbose:
+      print "--- target info ---"
+      common.DumpInfoDict(OPTIONS.info_dict)
+
+      if OPTIONS.incremental_source is not None:
+        print "--- source info ---"
+        common.DumpInfoDict(OPTIONS.source_info_dict)
+
+    WriteABOTAPackageWithBrilloScript(
+        target_file=args[0],
+        output_file=args[1],
+        source_file=OPTIONS.incremental_source)
+
+    print "done."
+    return
+
   if OPTIONS.extra_script is not None:
     OPTIONS.extra_script = open(OPTIONS.extra_script).read()
 
@@ -1588,15 +1965,7 @@
   OPTIONS.input_tmp, input_zip = common.UnzipTemp(args[0])
 
   OPTIONS.target_tmp = OPTIONS.input_tmp
-  OPTIONS.info_dict = common.LoadInfoDict(input_zip)
-
-  # If this image was originally labelled with SELinux contexts, make sure we
-  # also apply the labels in our new image. During building, the "file_contexts"
-  # is in the out/ directory tree, but for repacking from target-files.zip it's
-  # in the root directory of the ramdisk.
-  if "selinux_fc" in OPTIONS.info_dict:
-    OPTIONS.info_dict["selinux_fc"] = os.path.join(
-        OPTIONS.input_tmp, "BOOT", "RAMDISK", "file_contexts")
+  OPTIONS.info_dict = common.LoadInfoDict(input_zip, OPTIONS.target_tmp)
 
   if OPTIONS.verbose:
     print "--- target info ---"
@@ -1620,59 +1989,74 @@
   if OPTIONS.device_specific is not None:
     OPTIONS.device_specific = os.path.abspath(OPTIONS.device_specific)
 
-  while True:
+  if OPTIONS.info_dict.get("no_recovery") == "true":
+    raise common.ExternalError(
+        "--- target build has specified no recovery ---")
 
-    if OPTIONS.no_signing:
-      if os.path.exists(args[1]):
-        os.unlink(args[1])
-      output_zip = zipfile.ZipFile(args[1], "w",
-                                   compression=zipfile.ZIP_DEFLATED)
-    else:
-      temp_zip_file = tempfile.NamedTemporaryFile()
-      output_zip = zipfile.ZipFile(temp_zip_file, "w",
-                                   compression=zipfile.ZIP_DEFLATED)
+  # Use the default key to sign the package if not specified with package_key.
+  if not OPTIONS.no_signing:
+    if OPTIONS.package_key is None:
+      OPTIONS.package_key = OPTIONS.info_dict.get(
+          "default_system_dev_certificate",
+          "build/target/product/security/testkey")
 
-    cache_size = OPTIONS.info_dict.get("cache_size", None)
-    if cache_size is None:
-      raise RuntimeError("can't determine the cache partition size")
-    OPTIONS.cache_size = cache_size
+  # Set up the output zip. Create a temporary zip file if signing is needed.
+  if OPTIONS.no_signing:
+    if os.path.exists(args[1]):
+      os.unlink(args[1])
+    output_zip = zipfile.ZipFile(args[1], "w",
+                                 compression=zipfile.ZIP_DEFLATED)
+  else:
+    temp_zip_file = tempfile.NamedTemporaryFile()
+    output_zip = zipfile.ZipFile(temp_zip_file, "w",
+                                 compression=zipfile.ZIP_DEFLATED)
 
-    if OPTIONS.incremental_source is None:
+  # Non A/B OTAs rely on /cache partition to store temporary files.
+  cache_size = OPTIONS.info_dict.get("cache_size", None)
+  if cache_size is None:
+    print "--- can't determine the cache partition size ---"
+  OPTIONS.cache_size = cache_size
+
+  # Generate a verify package.
+  if OPTIONS.gen_verify:
+    WriteVerifyPackage(input_zip, output_zip)
+
+  # Generate a full OTA.
+  elif OPTIONS.incremental_source is None:
+    WriteFullOTAPackage(input_zip, output_zip)
+
+  # Generate an incremental OTA. It will fall back to generate a full OTA on
+  # failure unless no_fallback_to_full is specified.
+  else:
+    print "unzipping source target-files..."
+    OPTIONS.source_tmp, source_zip = common.UnzipTemp(
+        OPTIONS.incremental_source)
+    OPTIONS.target_info_dict = OPTIONS.info_dict
+    OPTIONS.source_info_dict = common.LoadInfoDict(source_zip,
+                                                   OPTIONS.source_tmp)
+    if OPTIONS.verbose:
+      print "--- source info ---"
+      common.DumpInfoDict(OPTIONS.source_info_dict)
+    try:
+      WriteIncrementalOTAPackage(input_zip, source_zip, output_zip)
+      if OPTIONS.log_diff:
+        out_file = open(OPTIONS.log_diff, 'w')
+        import target_files_diff
+        target_files_diff.recursiveDiff('',
+                                        OPTIONS.source_tmp,
+                                        OPTIONS.input_tmp,
+                                        out_file)
+        out_file.close()
+    except ValueError:
+      if not OPTIONS.fallback_to_full:
+        raise
+      print "--- failed to build incremental; falling back to full ---"
+      OPTIONS.incremental_source = None
       WriteFullOTAPackage(input_zip, output_zip)
-      if OPTIONS.package_key is None:
-        OPTIONS.package_key = OPTIONS.info_dict.get(
-            "default_system_dev_certificate",
-            "build/target/product/security/testkey")
-      common.ZipClose(output_zip)
-      break
 
-    else:
-      print "unzipping source target-files..."
-      OPTIONS.source_tmp, source_zip = common.UnzipTemp(
-          OPTIONS.incremental_source)
-      OPTIONS.target_info_dict = OPTIONS.info_dict
-      OPTIONS.source_info_dict = common.LoadInfoDict(source_zip)
-      if "selinux_fc" in OPTIONS.source_info_dict:
-        OPTIONS.source_info_dict["selinux_fc"] = os.path.join(
-            OPTIONS.source_tmp, "BOOT", "RAMDISK", "file_contexts")
-      if OPTIONS.package_key is None:
-        OPTIONS.package_key = OPTIONS.source_info_dict.get(
-            "default_system_dev_certificate",
-            "build/target/product/security/testkey")
-      if OPTIONS.verbose:
-        print "--- source info ---"
-        common.DumpInfoDict(OPTIONS.source_info_dict)
-      try:
-        WriteIncrementalOTAPackage(input_zip, source_zip, output_zip)
-        common.ZipClose(output_zip)
-        break
-      except ValueError:
-        if not OPTIONS.fallback_to_full:
-          raise
-        print "--- failed to build incremental; falling back to full ---"
-        OPTIONS.incremental_source = None
-        common.ZipClose(output_zip)
+  common.ZipClose(output_zip)
 
+  # Sign the generated zip package unless no_signing is specified.
   if not OPTIONS.no_signing:
     SignOutput(temp_zip_file.name, args[1])
     temp_zip_file.close()
diff --git a/tools/releasetools/rangelib.py b/tools/releasetools/rangelib.py
index 373bbed..aa572cc 100644
--- a/tools/releasetools/rangelib.py
+++ b/tools/releasetools/rangelib.py
@@ -24,12 +24,13 @@
   lots of runs."""
 
   def __init__(self, data=None):
-    # TODO(tbao): monotonic is broken when passing in a tuple.
     self.monotonic = False
     if isinstance(data, str):
       self._parse_internal(data)
     elif data:
+      assert len(data) % 2 == 0
       self.data = tuple(self._remove_pairs(data))
+      self.monotonic = all(x < y for x, y in zip(self.data, self.data[1:]))
     else:
       self.data = ()
 
@@ -39,8 +40,10 @@
 
   def __eq__(self, other):
     return self.data == other.data
+
   def __ne__(self, other):
     return self.data != other.data
+
   def __nonzero__(self):
     return bool(self.data)
 
@@ -74,9 +77,9 @@
     monotonic = True
     for p in text.split():
       if "-" in p:
-        s, e = p.split("-")
-        data.append(int(s))
-        data.append(int(e)+1)
+        s, e = (int(x) for x in p.split("-"))
+        data.append(s)
+        data.append(e+1)
         if last <= s <= e:
           last = e
         else:
@@ -88,13 +91,16 @@
         if last <= s:
           last = s+1
         else:
-          monotonic = True
+          monotonic = False
     data.sort()
     self.data = tuple(self._remove_pairs(data))
     self.monotonic = monotonic
 
   @staticmethod
   def _remove_pairs(source):
+    """Remove consecutive duplicate items to simplify the result.
+
+    [1, 2, 2, 5, 5, 10] will become [1, 10]."""
     last = None
     for i in source:
       if i == last:
@@ -117,6 +123,7 @@
     return " ".join(out)
 
   def to_string_raw(self):
+    assert self.data
     return str(len(self.data)) + "," + ",".join(str(i) for i in self.data)
 
   def union(self, other):
diff --git a/tools/releasetools/sign_target_files_apks.py b/tools/releasetools/sign_target_files_apks.py
index 60d62c2..f758ae0 100755
--- a/tools/releasetools/sign_target_files_apks.py
+++ b/tools/releasetools/sign_target_files_apks.py
@@ -127,14 +127,34 @@
     sys.exit(1)
 
 
-def SignApk(data, keyname, pw):
+def SignApk(data, keyname, pw, platform_api_level, codename_to_api_level_map):
   unsigned = tempfile.NamedTemporaryFile()
   unsigned.write(data)
   unsigned.flush()
 
   signed = tempfile.NamedTemporaryFile()
 
-  common.SignFile(unsigned.name, signed.name, keyname, pw, align=4)
+  # For pre-N builds, don't upgrade to SHA-256 JAR signatures based on the APK's
+  # minSdkVersion to avoid increasing incremental OTA update sizes. If an APK
+  # didn't change, we don't want its signature to change due to the switch
+  # from SHA-1 to SHA-256.
+  # By default, APK signer chooses SHA-256 signatures if the APK's minSdkVersion
+  # is 18 or higher. For pre-N builds we disable this mechanism by pretending
+  # that the APK's minSdkVersion is 1.
+  # For N+ builds, we let APK signer rely on the APK's minSdkVersion to
+  # determine whether to use SHA-256.
+  min_api_level = None
+  if platform_api_level > 23:
+    # Let APK signer choose whether to use SHA-1 or SHA-256, based on the APK's
+    # minSdkVersion attribute
+    min_api_level = None
+  else:
+    # Force APK signer to use SHA-1
+    min_api_level = 1
+
+  common.SignFile(unsigned.name, signed.name, keyname, pw,
+      min_api_level=min_api_level,
+      codename_to_api_level_map=codename_to_api_level_map)
 
   data = signed.read()
   unsigned.close()
@@ -144,7 +164,8 @@
 
 
 def ProcessTargetFiles(input_tf_zip, output_tf_zip, misc_info,
-                       apk_key_map, key_passwords):
+                       apk_key_map, key_passwords, platform_api_level,
+                       codename_to_api_level_map):
 
   maxsize = max([len(os.path.basename(i.filename))
                  for i in input_tf_zip.infolist()
@@ -175,27 +196,33 @@
     data = input_tf_zip.read(info.filename)
     out_info = copy.copy(info)
 
+    # Replace keys if requested.
     if (info.filename == "META/misc_info.txt" and
         OPTIONS.replace_verity_private_key):
       ReplaceVerityPrivateKey(input_tf_zip, output_tf_zip, misc_info,
                               OPTIONS.replace_verity_private_key[1])
-    elif (info.filename == "BOOT/RAMDISK/verity_key" and
+    elif (info.filename in ("BOOT/RAMDISK/verity_key",
+                            "BOOT/verity_key") and
           OPTIONS.replace_verity_public_key):
-      new_data = ReplaceVerityPublicKey(output_tf_zip,
+      new_data = ReplaceVerityPublicKey(output_tf_zip, info.filename,
                                         OPTIONS.replace_verity_public_key[1])
       write_to_temp(info.filename, info.external_attr, new_data)
+    # Copy BOOT/, RECOVERY/, META/, ROOT/ to rebuild recovery patch.
     elif (info.filename.startswith("BOOT/") or
           info.filename.startswith("RECOVERY/") or
           info.filename.startswith("META/") or
+          info.filename.startswith("ROOT/") or
           info.filename == "SYSTEM/etc/recovery-resource.dat"):
       write_to_temp(info.filename, info.external_attr, data)
 
+    # Sign APKs.
     if info.filename.endswith(".apk"):
       name = os.path.basename(info.filename)
       key = apk_key_map[name]
       if key not in common.SPECIAL_CERT_STRINGS:
         print "    signing: %-*s (%s)" % (maxsize, name, key)
-        signed_data = SignApk(data, key, key_passwords[key])
+        signed_data = SignApk(data, key, key_passwords[key], platform_api_level,
+            codename_to_api_level_map)
         common.ZipWriteStr(output_tf_zip, out_info, signed_data)
       else:
         # an APK we're not supposed to sign.
@@ -216,6 +243,7 @@
       new_data = ReplaceCerts(data)
       common.ZipWriteStr(output_tf_zip, out_info, new_data)
     elif info.filename in ("SYSTEM/recovery-from-boot.p",
+                           "SYSTEM/etc/recovery.img",
                            "SYSTEM/bin/install-recovery.sh"):
       rebuild_recovery = True
     elif (OPTIONS.replace_ota_keys and
@@ -227,7 +255,8 @@
           info.filename == "META/misc_info.txt"):
       pass
     elif (OPTIONS.replace_verity_public_key and
-          info.filename == "BOOT/RAMDISK/verity_key"):
+          info.filename in ("BOOT/RAMDISK/verity_key",
+                            "BOOT/verity_key")):
       pass
     else:
       # a non-APK file; copy it verbatim
@@ -392,18 +421,18 @@
   temp_file = cStringIO.StringIO()
   certs_zip = zipfile.ZipFile(temp_file, "w")
   for k in mapped_keys:
-    certs_zip.write(k)
-  certs_zip.close()
+    common.ZipWrite(certs_zip, k)
+  common.ZipClose(certs_zip)
   common.ZipWriteStr(output_tf_zip, "SYSTEM/etc/security/otacerts.zip",
                      temp_file.getvalue())
 
   return new_recovery_keys
 
-def ReplaceVerityPublicKey(targetfile_zip, key_path):
+def ReplaceVerityPublicKey(targetfile_zip, filename, key_path):
   print "Replacing verity public key with %s" % key_path
   with open(key_path) as f:
     data = f.read()
-  common.ZipWriteStr(targetfile_zip, "BOOT/RAMDISK/verity_key", data)
+  common.ZipWriteStr(targetfile_zip, filename, data)
   return data
 
 def ReplaceVerityPrivateKey(targetfile_input_zip, targetfile_output_zip,
@@ -433,6 +462,57 @@
       OPTIONS.key_map[s] = d
 
 
+def GetApiLevelAndCodename(input_tf_zip):
+  data = input_tf_zip.read("SYSTEM/build.prop")
+  api_level = None
+  codename = None
+  for line in data.split("\n"):
+    line = line.strip()
+    original_line = line
+    if line and line[0] != '#' and "=" in line:
+      key, value = line.split("=", 1)
+      key = key.strip()
+      if key == "ro.build.version.sdk":
+        api_level = int(value.strip())
+      elif key == "ro.build.version.codename":
+        codename = value.strip()
+
+  if api_level is None:
+    raise ValueError("No ro.build.version.sdk in SYSTEM/build.prop")
+  if codename is None:
+    raise ValueError("No ro.build.version.codename in SYSTEM/build.prop")
+
+  return (api_level, codename)
+
+
+def GetCodenameToApiLevelMap(input_tf_zip):
+  data = input_tf_zip.read("SYSTEM/build.prop")
+  api_level = None
+  codenames = None
+  for line in data.split("\n"):
+    line = line.strip()
+    original_line = line
+    if line and line[0] != '#' and "=" in line:
+      key, value = line.split("=", 1)
+      key = key.strip()
+      if key == "ro.build.version.sdk":
+        api_level = int(value.strip())
+      elif key == "ro.build.version.all_codenames":
+        codenames = value.strip().split(",")
+
+  if api_level is None:
+    raise ValueError("No ro.build.version.sdk in SYSTEM/build.prop")
+  if codenames is None:
+    raise ValueError("No ro.build.version.all_codenames in SYSTEM/build.prop")
+
+  result = dict()
+  for codename in codenames:
+    codename = codename.strip()
+    if len(codename) > 0:
+      result[codename] = api_level
+  return result
+
+
 def main(argv):
 
   key_mapping_options = []
@@ -491,8 +571,17 @@
   CheckAllApksSigned(input_zip, apk_key_map)
 
   key_passwords = common.GetKeyPasswords(set(apk_key_map.values()))
+  platform_api_level, platform_codename = GetApiLevelAndCodename(input_zip)
+  codename_to_api_level_map = GetCodenameToApiLevelMap(input_zip)
+  # Android N will be API Level 24, but isn't yet.
+  # TODO: Remove this workaround once Android N is officially API Level 24.
+  if platform_api_level == 23 and platform_codename == "N":
+    platform_api_level = 24
+
   ProcessTargetFiles(input_zip, output_zip, misc_info,
-                     apk_key_map, key_passwords)
+                     apk_key_map, key_passwords,
+                     platform_api_level,
+                     codename_to_api_level_map)
 
   common.ZipClose(input_zip)
   common.ZipClose(output_zip)
diff --git a/tools/releasetools/sparse_img.py b/tools/releasetools/sparse_img.py
index 013044f..4ba7560 100644
--- a/tools/releasetools/sparse_img.py
+++ b/tools/releasetools/sparse_img.py
@@ -31,8 +31,9 @@
   the form of a string like "0" or "0 1-5 8".
   """
 
-  def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None):
-    self.simg_f = f = open(simg_fn, "rb")
+  def __init__(self, simg_fn, file_map_fn=None, clobbered_blocks=None,
+               mode="rb", build_map=True):
+    self.simg_f = f = open(simg_fn, mode)
 
     header_bin = f.read(28)
     header = struct.unpack("<I4H4I", header_bin)
@@ -44,7 +45,7 @@
     chunk_hdr_sz = header[4]
     self.blocksize = blk_sz = header[5]
     self.total_blocks = total_blks = header[6]
-    total_chunks = header[7]
+    self.total_chunks = total_chunks = header[7]
 
     if magic != 0xED26FF3A:
       raise ValueError("Magic should be 0xED26FF3A but is 0x%08X" % (magic,))
@@ -61,6 +62,9 @@
     print("Total of %u %u-byte output blocks in %u input chunks."
           % (total_blks, blk_sz, total_chunks))
 
+    if not build_map:
+      return
+
     pos = 0   # in blocks
     care_data = []
     self.offset_map = offset_map = []
@@ -126,6 +130,20 @@
     else:
       self.file_map = {"__DATA": self.care_map}
 
+  def AppendFillChunk(self, data, blocks):
+    f = self.simg_f
+
+    # Append a fill chunk
+    f.seek(0, os.SEEK_END)
+    f.write(struct.pack("<2H3I", 0xCAC2, 0, blocks, 16, data))
+
+    # Update the sparse header
+    self.total_blocks += blocks
+    self.total_chunks += 1
+
+    f.seek(16, os.SEEK_SET)
+    f.write(struct.pack("<2I", self.total_blocks, self.total_chunks))
+
   def ReadRangeSet(self, ranges):
     return [d for d in self._GetRangeData(ranges)]
 
diff --git a/tools/releasetools/target_files_diff.py b/tools/releasetools/target_files_diff.py
new file mode 100755
index 0000000..0f717e0
--- /dev/null
+++ b/tools/releasetools/target_files_diff.py
@@ -0,0 +1,238 @@
+#!/usr/bin/env python
+#
+# Copyright (C) 2009 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+#
+# Finds differences between two target files packages
+#
+
+from __future__ import print_function
+
+import argparse
+import contextlib
+import os
+import re
+import subprocess
+import sys
+import tempfile
+
+def ignore(name):
+  """
+  Files to ignore when diffing
+
+  These are packages that we're already diffing elsewhere,
+  or files that we expect to be different for every build,
+  or known problems.
+  """
+
+  # We're looking at the files that make the images, so no need to search them
+  if name in ['IMAGES']:
+    return True
+  # These are packages of the recovery partition, which we're already diffing
+  if name in ['SYSTEM/etc/recovery-resource.dat',
+              'SYSTEM/recovery-from-boot.p']:
+    return True
+
+  # These files are just the BUILD_NUMBER, and will always be different
+  if name in ['BOOT/RAMDISK/selinux_version',
+              'RECOVERY/RAMDISK/selinux_version']:
+    return True
+
+  # b/26956807 .odex files are not deterministic
+  if name.endswith('.odex'):
+    return True
+
+  return False
+
+
+def rewrite_build_property(original, new):
+  """
+  Rewrite property files to remove values known to change for every build
+  """
+
+  skipped = ['ro.bootimage.build.date=',
+             'ro.bootimage.build.date.utc=',
+             'ro.bootimage.build.fingerprint=',
+             'ro.build.id=',
+             'ro.build.display.id=',
+             'ro.build.version.incremental=',
+             'ro.build.date=',
+             'ro.build.date.utc=',
+             'ro.build.host=',
+             'ro.build.user=',
+             'ro.build.description=',
+             'ro.build.fingerprint=',
+             'ro.expect.recovery_id=',
+             'ro.vendor.build.date=',
+             'ro.vendor.build.date.utc=',
+             'ro.vendor.build.fingerprint=']
+
+  for line in original:
+    skip = False
+    for s in skipped:
+      if line.startswith(s):
+        skip = True
+        break
+    if not skip:
+      new.write(line)
+
+
+def trim_install_recovery(original, new):
+  """
+  Rewrite the install-recovery script to remove the hash of the recovery
+  partition.
+  """
+  for line in original:
+    new.write(re.sub(r'[0-9a-f]{40}', '0'*40, line))
+
+def sort_file(original, new):
+  """
+  Sort the file. Some OTA metadata files are not in a deterministic order
+  currently.
+  """
+  lines = original.readlines()
+  lines.sort()
+  for line in lines:
+    new.write(line)
+
+# Map files to the functions that will modify them for diffing
+REWRITE_RULES = {
+    'BOOT/RAMDISK/default.prop': rewrite_build_property,
+    'RECOVERY/RAMDISK/default.prop': rewrite_build_property,
+    'SYSTEM/build.prop': rewrite_build_property,
+    'VENDOR/build.prop': rewrite_build_property,
+
+    'SYSTEM/bin/install-recovery.sh': trim_install_recovery,
+
+    'META/boot_filesystem_config.txt': sort_file,
+    'META/filesystem_config.txt': sort_file,
+    'META/recovery_filesystem_config.txt': sort_file,
+    'META/vendor_filesystem_config.txt': sort_file,
+}
+
+@contextlib.contextmanager
+def preprocess(name, filename):
+  """
+  Optionally rewrite files before diffing them, to remove known-variable
+  information.
+  """
+  if name in REWRITE_RULES:
+    with tempfile.NamedTemporaryFile() as newfp:
+      with open(filename, 'r') as oldfp:
+        REWRITE_RULES[name](oldfp, newfp)
+      newfp.flush()
+      yield newfp.name
+  else:
+    yield filename
+
+def diff(name, file1, file2, out_file):
+  """
+  Diff a file pair with diff, running preprocess() on the arguments first.
+  """
+  with preprocess(name, file1) as f1:
+    with preprocess(name, file2) as f2:
+      proc = subprocess.Popen(['diff', f1, f2], stdout=subprocess.PIPE,
+                              stderr=subprocess.STDOUT)
+      (stdout, _) = proc.communicate()
+      if proc.returncode == 0:
+        return
+      stdout = stdout.strip()
+      if stdout == 'Binary files %s and %s differ' % (f1, f2):
+        print("%s: Binary files differ" % name, file=out_file)
+      else:
+        for line in stdout.strip().split('\n'):
+          print("%s: %s" % (name, line), file=out_file)
+
+def recursiveDiff(prefix, dir1, dir2, out_file):
+  """
+  Recursively diff two directories, checking metadata then calling diff()
+  """
+  list1 = sorted(os.listdir(dir1))
+  list2 = sorted(os.listdir(dir2))
+
+  for entry in list1:
+    name = os.path.join(prefix, entry)
+    name1 = os.path.join(dir1, entry)
+    name2 = os.path.join(dir2, entry)
+
+    if ignore(name):
+      continue
+
+    if entry in list2:
+      if os.path.islink(name1) and os.path.islink(name2):
+        link1 = os.readlink(name1)
+        link2 = os.readlink(name2)
+        if link1 != link2:
+          print("%s: Symlinks differ: %s vs %s" % (name, link1, link2),
+                file=out_file)
+        continue
+      elif os.path.islink(name1) or os.path.islink(name2):
+        print("%s: File types differ, skipping compare" % name, file=out_file)
+        continue
+
+      stat1 = os.stat(name1)
+      stat2 = os.stat(name2)
+      type1 = stat1.st_mode & ~0o777
+      type2 = stat2.st_mode & ~0o777
+
+      if type1 != type2:
+        print("%s: File types differ, skipping compare" % name, file=out_file)
+        continue
+
+      if stat1.st_mode != stat2.st_mode:
+        print("%s: Modes differ: %o vs %o" %
+            (name, stat1.st_mode, stat2.st_mode), file=out_file)
+
+      if os.path.isdir(name1):
+        recursiveDiff(name, name1, name2, out_file)
+      elif os.path.isfile(name1):
+        diff(name, name1, name2, out_file)
+      else:
+        print("%s: Unknown file type, skipping compare" % name, file=out_file)
+    else:
+      print("%s: Only in base package" % name, file=out_file)
+
+  for entry in list2:
+    name = os.path.join(prefix, entry)
+    name1 = os.path.join(dir1, entry)
+    name2 = os.path.join(dir2, entry)
+
+    if ignore(name):
+      continue
+
+    if entry not in list1:
+      print("%s: Only in new package" % name, file=out_file)
+
+def main():
+  parser = argparse.ArgumentParser()
+  parser.add_argument('dir1', help='The base target files package (extracted)')
+  parser.add_argument('dir2', help='The new target files package (extracted)')
+  parser.add_argument('--output',
+      help='The output file, otherwise it prints to stdout')
+  args = parser.parse_args()
+
+  if args.output:
+    out_file = open(args.output, 'w')
+  else:
+    out_file = sys.stdout
+
+  recursiveDiff('', args.dir1, args.dir2, out_file)
+
+  if args.output:
+    out_file.close()
+
+if __name__ == '__main__':
+  main()
diff --git a/tools/releasetools/test_rangelib.py b/tools/releasetools/test_rangelib.py
new file mode 100644
index 0000000..a61a64e
--- /dev/null
+++ b/tools/releasetools/test_rangelib.py
@@ -0,0 +1,126 @@
+#
+# Copyright (C) 2015 The Android Open Source Project
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+#      http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+
+import unittest
+
+from rangelib import RangeSet
+
+class RangeSetTest(unittest.TestCase):
+
+  def test_union(self):
+    self.assertEqual(RangeSet("10-19 30-34").union(RangeSet("18-29")),
+                     RangeSet("10-34"))
+    self.assertEqual(RangeSet("10-19 30-34").union(RangeSet("22 32")),
+                     RangeSet("10-19 22 30-34"))
+
+  def test_intersect(self):
+    self.assertEqual(RangeSet("10-19 30-34").intersect(RangeSet("18-32")),
+                     RangeSet("18-19 30-32"))
+    self.assertEqual(RangeSet("10-19 30-34").intersect(RangeSet("22-28")),
+                     RangeSet(""))
+
+  def test_subtract(self):
+    self.assertEqual(RangeSet("10-19 30-34").subtract(RangeSet("18-32")),
+                     RangeSet("10-17 33-34"))
+    self.assertEqual(RangeSet("10-19 30-34").subtract(RangeSet("22-28")),
+                     RangeSet("10-19 30-34"))
+
+  def test_overlaps(self):
+    self.assertTrue(RangeSet("10-19 30-34").overlaps(RangeSet("18-32")))
+    self.assertFalse(RangeSet("10-19 30-34").overlaps(RangeSet("22-28")))
+
+  def test_size(self):
+    self.assertEqual(RangeSet("10-19 30-34").size(), 15)
+    self.assertEqual(RangeSet("").size(), 0)
+
+  def test_map_within(self):
+    self.assertEqual(RangeSet("0-9").map_within(RangeSet("3-4")),
+                     RangeSet("3-4"))
+    self.assertEqual(RangeSet("10-19").map_within(RangeSet("13-14")),
+                     RangeSet("3-4"))
+    self.assertEqual(
+        RangeSet("10-19 30-39").map_within(RangeSet("17-19 30-32")),
+        RangeSet("7-12"))
+    self.assertEqual(
+        RangeSet("10-19 30-39").map_within(RangeSet("12-13 17-19 30-32")),
+        RangeSet("2-3 7-12"))
+
+  def test_first(self):
+    self.assertEqual(RangeSet("0-9").first(1), RangeSet("0"))
+    self.assertEqual(RangeSet("10-19").first(5), RangeSet("10-14"))
+    self.assertEqual(RangeSet("10-19").first(15), RangeSet("10-19"))
+    self.assertEqual(RangeSet("10-19 30-39").first(3), RangeSet("10-12"))
+    self.assertEqual(RangeSet("10-19 30-39").first(15),
+                     RangeSet("10-19 30-34"))
+    self.assertEqual(RangeSet("10-19 30-39").first(30),
+                     RangeSet("10-19 30-39"))
+    self.assertEqual(RangeSet("0-9").first(0), RangeSet(""))
+
+  def test_extend(self):
+    self.assertEqual(RangeSet("0-9").extend(1), RangeSet("0-10"))
+    self.assertEqual(RangeSet("10-19").extend(15), RangeSet("0-34"))
+    self.assertEqual(RangeSet("10-19 30-39").extend(4), RangeSet("6-23 26-43"))
+    self.assertEqual(RangeSet("10-19 30-39").extend(10), RangeSet("0-49"))
+
+  def test_equality(self):
+    self.assertTrue(RangeSet("") == RangeSet(""))
+    self.assertTrue(RangeSet("3") == RangeSet("3"))
+    self.assertTrue(RangeSet("3 5") == RangeSet("5 3"))
+    self.assertTrue(
+        RangeSet("10-19 30-39") == RangeSet("30-32 10-14 33-39 15-19"))
+    self.assertTrue(RangeSet("") != RangeSet("3"))
+    self.assertTrue(RangeSet("10-19") != RangeSet("10-19 20"))
+
+    self.assertFalse(RangeSet(""))
+    self.assertTrue(RangeSet("3"))
+
+  def test_init(self):
+    self.assertIsNotNone(RangeSet(""))
+    self.assertIsNotNone(RangeSet("3"))
+    self.assertIsNotNone(RangeSet("3 5"))
+    self.assertIsNotNone(RangeSet("10 19 30-39"))
+
+    with self.assertRaises(AssertionError):
+      RangeSet(data=[0])
+
+  def test_str(self):
+    self.assertEqual(str(RangeSet("0-9")), "0-9")
+    self.assertEqual(str(RangeSet("2-10 12")), "2-10 12")
+    self.assertEqual(str(RangeSet("11 2-10 12 1 0")), "0-12")
+    self.assertEqual(str(RangeSet("")), "empty")
+
+  def test_to_string_raw(self):
+    self.assertEqual(RangeSet("0-9").to_string_raw(), "2,0,10")
+    self.assertEqual(RangeSet("2-10 12").to_string_raw(), "4,2,11,12,13")
+    self.assertEqual(RangeSet("11 2-10 12 1 0").to_string_raw(), "2,0,13")
+
+    with self.assertRaises(AssertionError):
+      RangeSet("").to_string_raw()
+
+  def test_monotonic(self):
+    self.assertTrue(RangeSet("0-9").monotonic)
+    self.assertTrue(RangeSet("2-9").monotonic)
+    self.assertTrue(RangeSet("2-9 30 31 35").monotonic)
+    self.assertTrue(RangeSet("").monotonic)
+    self.assertTrue(RangeSet("0-4 5-9").monotonic)
+    self.assertFalse(RangeSet("5-9 0-4").monotonic)
+    self.assertFalse(RangeSet("258768-259211 196604").monotonic)
+
+    self.assertTrue(RangeSet(data=[0, 10]).monotonic)
+    self.assertTrue(RangeSet(data=[0, 10, 15, 20]).monotonic)
+    self.assertTrue(RangeSet(data=[2, 9, 30, 31, 31, 32, 35, 36]).monotonic)
+    self.assertTrue(RangeSet(data=[0, 5, 5, 10]).monotonic)
+    self.assertFalse(RangeSet(data=[5, 10, 0, 5]).monotonic)
diff --git a/tools/signapk/Android.mk b/tools/signapk/Android.mk
index 620ccb1..ac217c7 100644
--- a/tools/signapk/Android.mk
+++ b/tools/signapk/Android.mk
@@ -19,13 +19,20 @@
 # ============================================================
 include $(CLEAR_VARS)
 LOCAL_MODULE := signapk
-LOCAL_SRC_FILES := SignApk.java
+LOCAL_SRC_FILES := $(call all-java-files-under, src)
 LOCAL_JAR_MANIFEST := SignApk.mf
-LOCAL_STATIC_JAVA_LIBRARIES := bouncycastle-host bouncycastle-bcpkix-host
+LOCAL_STATIC_JAVA_LIBRARIES := bouncycastle-host bouncycastle-bcpkix-host conscrypt-host
+LOCAL_REQUIRED_MODULES := libconscrypt_openjdk_jni
 include $(BUILD_HOST_JAVA_LIBRARY)
 
 ifeq ($(TARGET_BUILD_APPS),)
-# The post-build signing tools need signapk.jar, but we don't
-# need this if we're just doing unbundled apps.
-$(call dist-for-goals,droidcore,$(LOCAL_INSTALLED_MODULE))
+ifeq ($(BRILLO),)
+# The post-build signing tools need signapk.jar and its shared libraries,
+# but we don't need this if we're just doing unbundled apps.
+my_dist_files := $(LOCAL_INSTALLED_MODULE) \
+    $(HOST_OUT_SHARED_LIBRARIES)/libconscrypt_openjdk_jni$(HOST_SHLIB_SUFFIX)
+
+$(call dist-for-goals,droidcore,$(my_dist_files))
+my_dist_files :=
+endif
 endif
diff --git a/tools/signapk/src/com/android/signapk/ApkSignerV2.java b/tools/signapk/src/com/android/signapk/ApkSignerV2.java
new file mode 100644
index 0000000..46cd11e
--- /dev/null
+++ b/tools/signapk/src/com/android/signapk/ApkSignerV2.java
@@ -0,0 +1,729 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.signapk;
+
+import java.nio.BufferUnderflowException;
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+import java.security.DigestException;
+import java.security.InvalidAlgorithmParameterException;
+import java.security.InvalidKeyException;
+import java.security.KeyFactory;
+import java.security.MessageDigest;
+import java.security.NoSuchAlgorithmException;
+import java.security.PrivateKey;
+import java.security.PublicKey;
+import java.security.Signature;
+import java.security.SignatureException;
+import java.security.cert.CertificateEncodingException;
+import java.security.cert.X509Certificate;
+import java.security.spec.AlgorithmParameterSpec;
+import java.security.spec.InvalidKeySpecException;
+import java.security.spec.MGF1ParameterSpec;
+import java.security.spec.PSSParameterSpec;
+import java.security.spec.X509EncodedKeySpec;
+import java.util.ArrayList;
+import java.util.HashMap;
+import java.util.HashSet;
+import java.util.List;
+import java.util.Map;
+import java.util.Set;
+
+/**
+ * APK Signature Scheme v2 signer.
+ *
+ * <p>APK Signature Scheme v2 is a whole-file signature scheme which aims to protect every single
+ * bit of the APK, as opposed to the JAR Signature Scheme which protects only the names and
+ * uncompressed contents of ZIP entries.
+ */
+public abstract class ApkSignerV2 {
+    /*
+     * The two main goals of APK Signature Scheme v2 are:
+     * 1. Detect any unauthorized modifications to the APK. This is achieved by making the signature
+     *    cover every byte of the APK being signed.
+     * 2. Enable much faster signature and integrity verification. This is achieved by requiring
+     *    only a minimal amount of APK parsing before the signature is verified, thus completely
+     *    bypassing ZIP entry decompression and by making integrity verification parallelizable by
+     *    employing a hash tree.
+     *
+     * The generated signature block is wrapped into an APK Signing Block and inserted into the
+     * original APK immediately before the start of ZIP Central Directory. This is to ensure that
+     * JAR and ZIP parsers continue to work on the signed APK. The APK Signing Block is designed for
+     * extensibility. For example, a future signature scheme could insert its signatures there as
+     * well. The contract of the APK Signing Block is that all contents outside of the block must be
+     * protected by signatures inside the block.
+     */
+
+    public static final int SIGNATURE_RSA_PSS_WITH_SHA256 = 0x0101;
+    public static final int SIGNATURE_RSA_PSS_WITH_SHA512 = 0x0102;
+    public static final int SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA256 = 0x0103;
+    public static final int SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512 = 0x0104;
+    public static final int SIGNATURE_ECDSA_WITH_SHA256 = 0x0201;
+    public static final int SIGNATURE_ECDSA_WITH_SHA512 = 0x0202;
+    public static final int SIGNATURE_DSA_WITH_SHA256 = 0x0301;
+    public static final int SIGNATURE_DSA_WITH_SHA512 = 0x0302;
+
+    /**
+     * {@code .SF} file header section attribute indicating that the APK is signed not just with
+     * JAR signature scheme but also with APK Signature Scheme v2 or newer. This attribute
+     * facilitates v2 signature stripping detection.
+     *
+     * <p>The attribute contains a comma-separated set of signature scheme IDs.
+     */
+    public static final String SF_ATTRIBUTE_ANDROID_APK_SIGNED_NAME = "X-Android-APK-Signed";
+    public static final String SF_ATTRIBUTE_ANDROID_APK_SIGNED_VALUE = "2";
+
+    private static final int CONTENT_DIGEST_CHUNKED_SHA256 = 0;
+    private static final int CONTENT_DIGEST_CHUNKED_SHA512 = 1;
+
+    private static final int CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES = 1024 * 1024;
+
+    private static final byte[] APK_SIGNING_BLOCK_MAGIC =
+          new byte[] {
+              0x41, 0x50, 0x4b, 0x20, 0x53, 0x69, 0x67, 0x20,
+              0x42, 0x6c, 0x6f, 0x63, 0x6b, 0x20, 0x34, 0x32,
+          };
+    private static final int APK_SIGNATURE_SCHEME_V2_BLOCK_ID = 0x7109871a;
+
+    private ApkSignerV2() {}
+
+    /**
+     * Signer configuration.
+     */
+    public static final class SignerConfig {
+        /** Private key. */
+        public PrivateKey privateKey;
+
+        /**
+         * Certificates, with the first certificate containing the public key corresponding to
+         * {@link #privateKey}.
+         */
+        public List<X509Certificate> certificates;
+
+        /**
+         * List of signature algorithms with which to sign (see {@code SIGNATURE_...} constants).
+         */
+        public List<Integer> signatureAlgorithms;
+    }
+
+    /**
+     * Signs the provided APK using APK Signature Scheme v2 and returns the signed APK as a list of
+     * consecutive chunks.
+     *
+     * <p>NOTE: To enable APK signature verifier to detect v2 signature stripping, header sections
+     * of META-INF/*.SF files of APK being signed must contain the
+     * {@code X-Android-APK-Signed: true} attribute.
+     *
+     * @param inputApk contents of the APK to be signed. The APK starts at the current position
+     *        of the buffer and ends at the limit of the buffer.
+     * @param signerConfigs signer configurations, one for each signer.
+     *
+     * @throws ApkParseException if the APK cannot be parsed.
+     * @throws InvalidKeyException if a signing key is not suitable for this signature scheme or
+     *         cannot be used in general.
+     * @throws SignatureException if an error occurs when computing digests of generating
+     *         signatures.
+     */
+    public static ByteBuffer[] sign(
+            ByteBuffer inputApk,
+            List<SignerConfig> signerConfigs)
+                    throws ApkParseException, InvalidKeyException, SignatureException {
+        // Slice/create a view in the inputApk to make sure that:
+        // 1. inputApk is what's between position and limit of the original inputApk, and
+        // 2. changes to position, limit, and byte order are not reflected in the original.
+        ByteBuffer originalInputApk = inputApk;
+        inputApk = originalInputApk.slice();
+        inputApk.order(ByteOrder.LITTLE_ENDIAN);
+
+        // Locate ZIP End of Central Directory (EoCD), Central Directory, and check that Central
+        // Directory is immediately followed by the ZIP End of Central Directory.
+        int eocdOffset = ZipUtils.findZipEndOfCentralDirectoryRecord(inputApk);
+        if (eocdOffset == -1) {
+            throw new ApkParseException("Failed to locate ZIP End of Central Directory");
+        }
+        if (ZipUtils.isZip64EndOfCentralDirectoryLocatorPresent(inputApk, eocdOffset)) {
+            throw new ApkParseException("ZIP64 format not supported");
+        }
+        inputApk.position(eocdOffset);
+        long centralDirSizeLong = ZipUtils.getZipEocdCentralDirectorySizeBytes(inputApk);
+        if (centralDirSizeLong > Integer.MAX_VALUE) {
+            throw new ApkParseException(
+                    "ZIP Central Directory size out of range: " + centralDirSizeLong);
+        }
+        int centralDirSize = (int) centralDirSizeLong;
+        long centralDirOffsetLong = ZipUtils.getZipEocdCentralDirectoryOffset(inputApk);
+        if (centralDirOffsetLong > Integer.MAX_VALUE) {
+            throw new ApkParseException(
+                    "ZIP Central Directory offset in file out of range: " + centralDirOffsetLong);
+        }
+        int centralDirOffset = (int) centralDirOffsetLong;
+        int expectedEocdOffset = centralDirOffset + centralDirSize;
+        if (expectedEocdOffset < centralDirOffset) {
+            throw new ApkParseException(
+                    "ZIP Central Directory extent too large. Offset: " + centralDirOffset
+                            + ", size: " + centralDirSize);
+        }
+        if (eocdOffset != expectedEocdOffset) {
+            throw new ApkParseException(
+                    "ZIP Central Directory not immeiately followed by ZIP End of"
+                            + " Central Directory. CD end: " + expectedEocdOffset
+                            + ", EoCD start: " + eocdOffset);
+        }
+
+        // Create ByteBuffers holding the contents of everything before ZIP Central Directory,
+        // ZIP Central Directory, and ZIP End of Central Directory.
+        inputApk.clear();
+        ByteBuffer beforeCentralDir = getByteBuffer(inputApk, centralDirOffset);
+        ByteBuffer centralDir = getByteBuffer(inputApk, eocdOffset - centralDirOffset);
+        // Create a copy of End of Central Directory because we'll need modify its contents later.
+        byte[] eocdBytes = new byte[inputApk.remaining()];
+        inputApk.get(eocdBytes);
+        ByteBuffer eocd = ByteBuffer.wrap(eocdBytes);
+        eocd.order(inputApk.order());
+
+        // Figure which which digests to use for APK contents.
+        Set<Integer> contentDigestAlgorithms = new HashSet<>();
+        for (SignerConfig signerConfig : signerConfigs) {
+            for (int signatureAlgorithm : signerConfig.signatureAlgorithms) {
+                contentDigestAlgorithms.add(
+                        getSignatureAlgorithmContentDigestAlgorithm(signatureAlgorithm));
+            }
+        }
+
+        // Compute digests of APK contents.
+        Map<Integer, byte[]> contentDigests; // digest algorithm ID -> digest
+        try {
+            contentDigests =
+                    computeContentDigests(
+                            contentDigestAlgorithms,
+                            new ByteBuffer[] {beforeCentralDir, centralDir, eocd});
+        } catch (DigestException e) {
+            throw new SignatureException("Failed to compute digests of APK", e);
+        }
+
+        // Sign the digests and wrap the signatures and signer info into an APK Signing Block.
+        ByteBuffer apkSigningBlock =
+                ByteBuffer.wrap(generateApkSigningBlock(signerConfigs, contentDigests));
+
+        // Update Central Directory Offset in End of Central Directory Record. Central Directory
+        // follows the APK Signing Block and thus is shifted by the size of the APK Signing Block.
+        centralDirOffset += apkSigningBlock.remaining();
+        eocd.clear();
+        ZipUtils.setZipEocdCentralDirectoryOffset(eocd, centralDirOffset);
+
+        // Follow the Java NIO pattern for ByteBuffer whose contents have been consumed.
+        originalInputApk.position(originalInputApk.limit());
+
+        // Reset positions (to 0) and limits (to capacity) in the ByteBuffers below to follow the
+        // Java NIO pattern for ByteBuffers which are ready for their contents to be read by caller.
+        // Contrary to the name, this does not clear the contents of these ByteBuffer.
+        beforeCentralDir.clear();
+        centralDir.clear();
+        eocd.clear();
+
+        // Insert APK Signing Block immediately before the ZIP Central Directory.
+        return new ByteBuffer[] {
+            beforeCentralDir,
+            apkSigningBlock,
+            centralDir,
+            eocd,
+        };
+    }
+
+    private static Map<Integer, byte[]> computeContentDigests(
+            Set<Integer> digestAlgorithms,
+            ByteBuffer[] contents) throws DigestException {
+        // For each digest algorithm the result is computed as follows:
+        // 1. Each segment of contents is split into consecutive chunks of 1 MB in size.
+        //    The final chunk will be shorter iff the length of segment is not a multiple of 1 MB.
+        //    No chunks are produced for empty (zero length) segments.
+        // 2. The digest of each chunk is computed over the concatenation of byte 0xa5, the chunk's
+        //    length in bytes (uint32 little-endian) and the chunk's contents.
+        // 3. The output digest is computed over the concatenation of the byte 0x5a, the number of
+        //    chunks (uint32 little-endian) and the concatenation of digests of chunks of all
+        //    segments in-order.
+
+        int chunkCount = 0;
+        for (ByteBuffer input : contents) {
+            chunkCount += getChunkCount(input.remaining(), CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES);
+        }
+
+        final Map<Integer, byte[]> digestsOfChunks = new HashMap<>(digestAlgorithms.size());
+        for (int digestAlgorithm : digestAlgorithms) {
+            int digestOutputSizeBytes = getContentDigestAlgorithmOutputSizeBytes(digestAlgorithm);
+            byte[] concatenationOfChunkCountAndChunkDigests =
+                    new byte[5 + chunkCount * digestOutputSizeBytes];
+            concatenationOfChunkCountAndChunkDigests[0] = 0x5a;
+            setUnsignedInt32LittleEngian(
+                    chunkCount, concatenationOfChunkCountAndChunkDigests, 1);
+            digestsOfChunks.put(digestAlgorithm, concatenationOfChunkCountAndChunkDigests);
+        }
+
+        int chunkIndex = 0;
+        byte[] chunkContentPrefix = new byte[5];
+        chunkContentPrefix[0] = (byte) 0xa5;
+        // Optimization opportunity: digests of chunks can be computed in parallel.
+        for (ByteBuffer input : contents) {
+            while (input.hasRemaining()) {
+                int chunkSize =
+                        Math.min(input.remaining(), CONTENT_DIGESTED_CHUNK_MAX_SIZE_BYTES);
+                final ByteBuffer chunk = getByteBuffer(input, chunkSize);
+                for (int digestAlgorithm : digestAlgorithms) {
+                    String jcaAlgorithmName =
+                            getContentDigestAlgorithmJcaDigestAlgorithm(digestAlgorithm);
+                    MessageDigest md;
+                    try {
+                        md = MessageDigest.getInstance(jcaAlgorithmName);
+                    } catch (NoSuchAlgorithmException e) {
+                        throw new DigestException(
+                                jcaAlgorithmName + " MessageDigest not supported", e);
+                    }
+                    // Reset position to 0 and limit to capacity. Position would've been modified
+                    // by the preceding iteration of this loop. NOTE: Contrary to the method name,
+                    // this does not modify the contents of the chunk.
+                    chunk.clear();
+                    setUnsignedInt32LittleEngian(chunk.remaining(), chunkContentPrefix, 1);
+                    md.update(chunkContentPrefix);
+                    md.update(chunk);
+                    byte[] concatenationOfChunkCountAndChunkDigests =
+                            digestsOfChunks.get(digestAlgorithm);
+                    int expectedDigestSizeBytes =
+                            getContentDigestAlgorithmOutputSizeBytes(digestAlgorithm);
+                    int actualDigestSizeBytes =
+                            md.digest(
+                                    concatenationOfChunkCountAndChunkDigests,
+                                    5 + chunkIndex * expectedDigestSizeBytes,
+                                    expectedDigestSizeBytes);
+                    if (actualDigestSizeBytes != expectedDigestSizeBytes) {
+                        throw new DigestException(
+                                "Unexpected output size of " + md.getAlgorithm()
+                                        + " digest: " + actualDigestSizeBytes);
+                    }
+                }
+                chunkIndex++;
+            }
+        }
+
+        Map<Integer, byte[]> result = new HashMap<>(digestAlgorithms.size());
+        for (Map.Entry<Integer, byte[]> entry : digestsOfChunks.entrySet()) {
+            int digestAlgorithm = entry.getKey();
+            byte[] concatenationOfChunkCountAndChunkDigests = entry.getValue();
+            String jcaAlgorithmName = getContentDigestAlgorithmJcaDigestAlgorithm(digestAlgorithm);
+            MessageDigest md;
+            try {
+                md = MessageDigest.getInstance(jcaAlgorithmName);
+            } catch (NoSuchAlgorithmException e) {
+                throw new DigestException(jcaAlgorithmName + " MessageDigest not supported", e);
+            }
+            result.put(digestAlgorithm, md.digest(concatenationOfChunkCountAndChunkDigests));
+        }
+        return result;
+    }
+
+    private static final int getChunkCount(int inputSize, int chunkSize) {
+        return (inputSize + chunkSize - 1) / chunkSize;
+    }
+
+    private static void setUnsignedInt32LittleEngian(int value, byte[] result, int offset) {
+        result[offset] = (byte) (value & 0xff);
+        result[offset + 1] = (byte) ((value >> 8) & 0xff);
+        result[offset + 2] = (byte) ((value >> 16) & 0xff);
+        result[offset + 3] = (byte) ((value >> 24) & 0xff);
+    }
+
+    private static byte[] generateApkSigningBlock(
+            List<SignerConfig> signerConfigs,
+            Map<Integer, byte[]> contentDigests) throws InvalidKeyException, SignatureException {
+        byte[] apkSignatureSchemeV2Block =
+                generateApkSignatureSchemeV2Block(signerConfigs, contentDigests);
+        return generateApkSigningBlock(apkSignatureSchemeV2Block);
+    }
+
+    private static byte[] generateApkSigningBlock(byte[] apkSignatureSchemeV2Block) {
+        // FORMAT:
+        // uint64:  size (excluding this field)
+        // repeated ID-value pairs:
+        //     uint64:           size (excluding this field)
+        //     uint32:           ID
+        //     (size - 4) bytes: value
+        // uint64:  size (same as the one above)
+        // uint128: magic
+
+        int resultSize =
+                8 // size
+                + 8 + 4 + apkSignatureSchemeV2Block.length // v2Block as ID-value pair
+                + 8 // size
+                + 16 // magic
+                ;
+        ByteBuffer result = ByteBuffer.allocate(resultSize);
+        result.order(ByteOrder.LITTLE_ENDIAN);
+        long blockSizeFieldValue = resultSize - 8;
+        result.putLong(blockSizeFieldValue);
+
+        long pairSizeFieldValue = 4 + apkSignatureSchemeV2Block.length;
+        result.putLong(pairSizeFieldValue);
+        result.putInt(APK_SIGNATURE_SCHEME_V2_BLOCK_ID);
+        result.put(apkSignatureSchemeV2Block);
+
+        result.putLong(blockSizeFieldValue);
+        result.put(APK_SIGNING_BLOCK_MAGIC);
+
+        return result.array();
+    }
+
+    private static byte[] generateApkSignatureSchemeV2Block(
+            List<SignerConfig> signerConfigs,
+            Map<Integer, byte[]> contentDigests) throws InvalidKeyException, SignatureException {
+        // FORMAT:
+        // * length-prefixed sequence of length-prefixed signer blocks.
+
+        List<byte[]> signerBlocks = new ArrayList<>(signerConfigs.size());
+        int signerNumber = 0;
+        for (SignerConfig signerConfig : signerConfigs) {
+            signerNumber++;
+            byte[] signerBlock;
+            try {
+                signerBlock = generateSignerBlock(signerConfig, contentDigests);
+            } catch (InvalidKeyException e) {
+                throw new InvalidKeyException("Signer #" + signerNumber + " failed", e);
+            } catch (SignatureException e) {
+                throw new SignatureException("Signer #" + signerNumber + " failed", e);
+            }
+            signerBlocks.add(signerBlock);
+        }
+
+        return encodeAsSequenceOfLengthPrefixedElements(
+                new byte[][] {
+                    encodeAsSequenceOfLengthPrefixedElements(signerBlocks),
+                });
+    }
+
+    private static byte[] generateSignerBlock(
+            SignerConfig signerConfig,
+            Map<Integer, byte[]> contentDigests) throws InvalidKeyException, SignatureException {
+        if (signerConfig.certificates.isEmpty()) {
+            throw new SignatureException("No certificates configured for signer");
+        }
+        PublicKey publicKey = signerConfig.certificates.get(0).getPublicKey();
+
+        byte[] encodedPublicKey = encodePublicKey(publicKey);
+
+        V2SignatureSchemeBlock.SignedData signedData = new V2SignatureSchemeBlock.SignedData();
+        try {
+            signedData.certificates = encodeCertificates(signerConfig.certificates);
+        } catch (CertificateEncodingException e) {
+            throw new SignatureException("Failed to encode certificates", e);
+        }
+
+        List<Pair<Integer, byte[]>> digests =
+                new ArrayList<>(signerConfig.signatureAlgorithms.size());
+        for (int signatureAlgorithm : signerConfig.signatureAlgorithms) {
+            int contentDigestAlgorithm =
+                    getSignatureAlgorithmContentDigestAlgorithm(signatureAlgorithm);
+            byte[] contentDigest = contentDigests.get(contentDigestAlgorithm);
+            if (contentDigest == null) {
+                throw new RuntimeException(
+                        getContentDigestAlgorithmJcaDigestAlgorithm(contentDigestAlgorithm)
+                        + " content digest for "
+                        + getSignatureAlgorithmJcaSignatureAlgorithm(signatureAlgorithm)
+                        + " not computed");
+            }
+            digests.add(Pair.create(signatureAlgorithm, contentDigest));
+        }
+        signedData.digests = digests;
+
+        V2SignatureSchemeBlock.Signer signer = new V2SignatureSchemeBlock.Signer();
+        // FORMAT:
+        // * length-prefixed sequence of length-prefixed digests:
+        //   * uint32: signature algorithm ID
+        //   * length-prefixed bytes: digest of contents
+        // * length-prefixed sequence of certificates:
+        //   * length-prefixed bytes: X.509 certificate (ASN.1 DER encoded).
+        // * length-prefixed sequence of length-prefixed additional attributes:
+        //   * uint32: ID
+        //   * (length - 4) bytes: value
+        signer.signedData = encodeAsSequenceOfLengthPrefixedElements(new byte[][] {
+            encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(signedData.digests),
+            encodeAsSequenceOfLengthPrefixedElements(signedData.certificates),
+            // additional attributes
+            new byte[0],
+        });
+        signer.publicKey = encodedPublicKey;
+        signer.signatures = new ArrayList<>();
+        for (int signatureAlgorithm : signerConfig.signatureAlgorithms) {
+            Pair<String, ? extends AlgorithmParameterSpec> signatureParams =
+                    getSignatureAlgorithmJcaSignatureAlgorithm(signatureAlgorithm);
+            String jcaSignatureAlgorithm = signatureParams.getFirst();
+            AlgorithmParameterSpec jcaSignatureAlgorithmParams = signatureParams.getSecond();
+            byte[] signatureBytes;
+            try {
+                Signature signature = Signature.getInstance(jcaSignatureAlgorithm);
+                signature.initSign(signerConfig.privateKey);
+                if (jcaSignatureAlgorithmParams != null) {
+                    signature.setParameter(jcaSignatureAlgorithmParams);
+                }
+                signature.update(signer.signedData);
+                signatureBytes = signature.sign();
+            } catch (InvalidKeyException e) {
+                throw new InvalidKeyException("Failed sign using " + jcaSignatureAlgorithm, e);
+            } catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException
+                    | SignatureException e) {
+                throw new SignatureException("Failed sign using " + jcaSignatureAlgorithm, e);
+            }
+
+            try {
+                Signature signature = Signature.getInstance(jcaSignatureAlgorithm);
+                signature.initVerify(publicKey);
+                if (jcaSignatureAlgorithmParams != null) {
+                    signature.setParameter(jcaSignatureAlgorithmParams);
+                }
+                signature.update(signer.signedData);
+                if (!signature.verify(signatureBytes)) {
+                    throw new SignatureException("Signature did not verify");
+                }
+            } catch (InvalidKeyException e) {
+                throw new InvalidKeyException("Failed to verify generated " + jcaSignatureAlgorithm
+                        + " signature using public key from certificate", e);
+            } catch (NoSuchAlgorithmException | InvalidAlgorithmParameterException
+                    | SignatureException e) {
+                throw new SignatureException("Failed to verify generated " + jcaSignatureAlgorithm
+                        + " signature using public key from certificate", e);
+            }
+
+            signer.signatures.add(Pair.create(signatureAlgorithm, signatureBytes));
+        }
+
+        // FORMAT:
+        // * length-prefixed signed data
+        // * length-prefixed sequence of length-prefixed signatures:
+        //   * uint32: signature algorithm ID
+        //   * length-prefixed bytes: signature of signed data
+        // * length-prefixed bytes: public key (X.509 SubjectPublicKeyInfo, ASN.1 DER encoded)
+        return encodeAsSequenceOfLengthPrefixedElements(
+                new byte[][] {
+                    signer.signedData,
+                    encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(
+                            signer.signatures),
+                    signer.publicKey,
+                });
+    }
+
+    private static final class V2SignatureSchemeBlock {
+        private static final class Signer {
+            public byte[] signedData;
+            public List<Pair<Integer, byte[]>> signatures;
+            public byte[] publicKey;
+        }
+
+        private static final class SignedData {
+            public List<Pair<Integer, byte[]>> digests;
+            public List<byte[]> certificates;
+        }
+    }
+
+    private static byte[] encodePublicKey(PublicKey publicKey) throws InvalidKeyException {
+        byte[] encodedPublicKey = null;
+        if ("X.509".equals(publicKey.getFormat())) {
+            encodedPublicKey = publicKey.getEncoded();
+        }
+        if (encodedPublicKey == null) {
+            try {
+                encodedPublicKey =
+                        KeyFactory.getInstance(publicKey.getAlgorithm())
+                                .getKeySpec(publicKey, X509EncodedKeySpec.class)
+                                .getEncoded();
+            } catch (NoSuchAlgorithmException | InvalidKeySpecException e) {
+                throw new InvalidKeyException(
+                        "Failed to obtain X.509 encoded form of public key " + publicKey
+                                + " of class " + publicKey.getClass().getName(),
+                        e);
+            }
+        }
+        if ((encodedPublicKey == null) || (encodedPublicKey.length == 0)) {
+            throw new InvalidKeyException(
+                    "Failed to obtain X.509 encoded form of public key " + publicKey
+                            + " of class " + publicKey.getClass().getName());
+        }
+        return encodedPublicKey;
+    }
+
+    public static List<byte[]> encodeCertificates(List<X509Certificate> certificates)
+            throws CertificateEncodingException {
+        List<byte[]> result = new ArrayList<>();
+        for (X509Certificate certificate : certificates) {
+            result.add(certificate.getEncoded());
+        }
+        return result;
+    }
+
+    private static byte[] encodeAsSequenceOfLengthPrefixedElements(List<byte[]> sequence) {
+        return encodeAsSequenceOfLengthPrefixedElements(
+                sequence.toArray(new byte[sequence.size()][]));
+    }
+
+    private static byte[] encodeAsSequenceOfLengthPrefixedElements(byte[][] sequence) {
+        int payloadSize = 0;
+        for (byte[] element : sequence) {
+            payloadSize += 4 + element.length;
+        }
+        ByteBuffer result = ByteBuffer.allocate(payloadSize);
+        result.order(ByteOrder.LITTLE_ENDIAN);
+        for (byte[] element : sequence) {
+            result.putInt(element.length);
+            result.put(element);
+        }
+        return result.array();
+      }
+
+    private static byte[] encodeAsSequenceOfLengthPrefixedPairsOfIntAndLengthPrefixedBytes(
+            List<Pair<Integer, byte[]>> sequence) {
+        int resultSize = 0;
+        for (Pair<Integer, byte[]> element : sequence) {
+            resultSize += 12 + element.getSecond().length;
+        }
+        ByteBuffer result = ByteBuffer.allocate(resultSize);
+        result.order(ByteOrder.LITTLE_ENDIAN);
+        for (Pair<Integer, byte[]> element : sequence) {
+            byte[] second = element.getSecond();
+            result.putInt(8 + second.length);
+            result.putInt(element.getFirst());
+            result.putInt(second.length);
+            result.put(second);
+        }
+        return result.array();
+    }
+
+    /**
+     * Relative <em>get</em> method for reading {@code size} number of bytes from the current
+     * position of this buffer.
+     *
+     * <p>This method reads the next {@code size} bytes at this buffer's current position,
+     * returning them as a {@code ByteBuffer} with start set to 0, limit and capacity set to
+     * {@code size}, byte order set to this buffer's byte order; and then increments the position by
+     * {@code size}.
+     */
+    private static ByteBuffer getByteBuffer(ByteBuffer source, int size) {
+        if (size < 0) {
+            throw new IllegalArgumentException("size: " + size);
+        }
+        int originalLimit = source.limit();
+        int position = source.position();
+        int limit = position + size;
+        if ((limit < position) || (limit > originalLimit)) {
+            throw new BufferUnderflowException();
+        }
+        source.limit(limit);
+        try {
+            ByteBuffer result = source.slice();
+            result.order(source.order());
+            source.position(limit);
+            return result;
+        } finally {
+            source.limit(originalLimit);
+        }
+    }
+
+    private static Pair<String, ? extends AlgorithmParameterSpec>
+            getSignatureAlgorithmJcaSignatureAlgorithm(int sigAlgorithm) {
+        switch (sigAlgorithm) {
+            case SIGNATURE_RSA_PSS_WITH_SHA256:
+                return Pair.create(
+                        "SHA256withRSA/PSS",
+                        new PSSParameterSpec(
+                                "SHA-256", "MGF1", MGF1ParameterSpec.SHA256, 256 / 8, 1));
+            case SIGNATURE_RSA_PSS_WITH_SHA512:
+                return Pair.create(
+                        "SHA512withRSA/PSS",
+                        new PSSParameterSpec(
+                                "SHA-512", "MGF1", MGF1ParameterSpec.SHA512, 512 / 8, 1));
+            case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA256:
+                return Pair.create("SHA256withRSA", null);
+            case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512:
+                return Pair.create("SHA512withRSA", null);
+            case SIGNATURE_ECDSA_WITH_SHA256:
+                return Pair.create("SHA256withECDSA", null);
+            case SIGNATURE_ECDSA_WITH_SHA512:
+                return Pair.create("SHA512withECDSA", null);
+            case SIGNATURE_DSA_WITH_SHA256:
+                return Pair.create("SHA256withDSA", null);
+            case SIGNATURE_DSA_WITH_SHA512:
+                return Pair.create("SHA512withDSA", null);
+            default:
+                throw new IllegalArgumentException(
+                        "Unknown signature algorithm: 0x"
+                                + Long.toHexString(sigAlgorithm & 0xffffffff));
+        }
+    }
+
+    private static int getSignatureAlgorithmContentDigestAlgorithm(int sigAlgorithm) {
+        switch (sigAlgorithm) {
+            case SIGNATURE_RSA_PSS_WITH_SHA256:
+            case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA256:
+            case SIGNATURE_ECDSA_WITH_SHA256:
+            case SIGNATURE_DSA_WITH_SHA256:
+                return CONTENT_DIGEST_CHUNKED_SHA256;
+            case SIGNATURE_RSA_PSS_WITH_SHA512:
+            case SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512:
+            case SIGNATURE_ECDSA_WITH_SHA512:
+            case SIGNATURE_DSA_WITH_SHA512:
+                return CONTENT_DIGEST_CHUNKED_SHA512;
+            default:
+                throw new IllegalArgumentException(
+                        "Unknown signature algorithm: 0x"
+                                + Long.toHexString(sigAlgorithm & 0xffffffff));
+        }
+    }
+
+    private static String getContentDigestAlgorithmJcaDigestAlgorithm(int digestAlgorithm) {
+        switch (digestAlgorithm) {
+            case CONTENT_DIGEST_CHUNKED_SHA256:
+                return "SHA-256";
+            case CONTENT_DIGEST_CHUNKED_SHA512:
+                return "SHA-512";
+            default:
+                throw new IllegalArgumentException(
+                        "Unknown content digest algorthm: " + digestAlgorithm);
+        }
+    }
+
+    private static int getContentDigestAlgorithmOutputSizeBytes(int digestAlgorithm) {
+        switch (digestAlgorithm) {
+            case CONTENT_DIGEST_CHUNKED_SHA256:
+                return 256 / 8;
+            case CONTENT_DIGEST_CHUNKED_SHA512:
+                return 512 / 8;
+            default:
+                throw new IllegalArgumentException(
+                        "Unknown content digest algorthm: " + digestAlgorithm);
+        }
+    }
+
+    /**
+     * Indicates that APK file could not be parsed.
+     */
+    public static class ApkParseException extends Exception {
+        private static final long serialVersionUID = 1L;
+
+        public ApkParseException(String message) {
+            super(message);
+        }
+
+        public ApkParseException(String message, Throwable cause) {
+            super(message, cause);
+        }
+    }
+}
diff --git a/tools/signapk/src/com/android/signapk/Pair.java b/tools/signapk/src/com/android/signapk/Pair.java
new file mode 100644
index 0000000..e4a6c92
--- /dev/null
+++ b/tools/signapk/src/com/android/signapk/Pair.java
@@ -0,0 +1,81 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.signapk;
+
+/**
+ * Pair of two elements.
+ */
+public final class Pair<A, B> {
+    private final A mFirst;
+    private final B mSecond;
+
+    private Pair(A first, B second) {
+        mFirst = first;
+        mSecond = second;
+    }
+
+    public static <A, B> Pair<A, B> create(A first, B second) {
+        return new Pair<A, B>(first, second);
+    }
+
+    public A getFirst() {
+        return mFirst;
+    }
+
+    public B getSecond() {
+        return mSecond;
+    }
+
+    @Override
+    public int hashCode() {
+        final int prime = 31;
+        int result = 1;
+        result = prime * result + ((mFirst == null) ? 0 : mFirst.hashCode());
+        result = prime * result + ((mSecond == null) ? 0 : mSecond.hashCode());
+        return result;
+    }
+
+    @Override
+    public boolean equals(Object obj) {
+        if (this == obj) {
+            return true;
+        }
+        if (obj == null) {
+            return false;
+        }
+        if (getClass() != obj.getClass()) {
+            return false;
+        }
+        @SuppressWarnings("rawtypes")
+        Pair other = (Pair) obj;
+        if (mFirst == null) {
+            if (other.mFirst != null) {
+                return false;
+            }
+        } else if (!mFirst.equals(other.mFirst)) {
+            return false;
+        }
+        if (mSecond == null) {
+            if (other.mSecond != null) {
+                return false;
+            }
+        } else if (!mSecond.equals(other.mSecond)) {
+            return false;
+        }
+        return true;
+    }
+}
diff --git a/tools/signapk/SignApk.java b/tools/signapk/src/com/android/signapk/SignApk.java
similarity index 68%
rename from tools/signapk/SignApk.java
rename to tools/signapk/src/com/android/signapk/SignApk.java
index 88f486a..ba84b42 100644
--- a/tools/signapk/SignApk.java
+++ b/tools/signapk/src/com/android/signapk/SignApk.java
@@ -34,6 +34,7 @@
 import org.bouncycastle.operator.jcajce.JcaContentSignerBuilder;
 import org.bouncycastle.operator.jcajce.JcaDigestCalculatorProviderBuilder;
 import org.bouncycastle.util.encoders.Base64;
+import org.conscrypt.OpenSSLProvider;
 
 import java.io.Console;
 import java.io.BufferedReader;
@@ -50,13 +51,16 @@
 import java.io.OutputStream;
 import java.io.PrintStream;
 import java.lang.reflect.Constructor;
+import java.nio.ByteBuffer;
 import java.security.DigestOutputStream;
 import java.security.GeneralSecurityException;
+import java.security.InvalidKeyException;
 import java.security.Key;
 import java.security.KeyFactory;
 import java.security.MessageDigest;
 import java.security.PrivateKey;
 import java.security.Provider;
+import java.security.PublicKey;
 import java.security.Security;
 import java.security.cert.CertificateEncodingException;
 import java.security.cert.CertificateFactory;
@@ -66,8 +70,11 @@
 import java.util.ArrayList;
 import java.util.Collections;
 import java.util.Enumeration;
+import java.util.Iterator;
+import java.util.List;
 import java.util.Locale;
 import java.util.Map;
+import java.util.TimeZone;
 import java.util.TreeMap;
 import java.util.jar.Attributes;
 import java.util.jar.JarEntry;
@@ -100,7 +107,8 @@
 /**
  * Command line tool to sign JAR files (including APKs and OTA updates) in a way
  * compatible with the mincrypt verifier, using EC or RSA keys and SHA1 or
- * SHA-256 (see historical note).
+ * SHA-256 (see historical note). The tool can additionally sign APKs using
+ * APK Signature Scheme v2.
  */
 class SignApk {
     private static final String CERT_SF_NAME = "META-INF/CERT.SF";
@@ -110,21 +118,32 @@
 
     private static final String OTACERT_NAME = "META-INF/com/android/otacert";
 
-    private static Provider sBouncyCastleProvider;
-
     // bitmasks for which hash algorithms we need the manifest to include.
     private static final int USE_SHA1 = 1;
     private static final int USE_SHA256 = 2;
 
+    /** Digest algorithm used when signing the APK using APK Signature Scheme v2. */
+    private static final String APK_SIG_SCHEME_V2_DIGEST_ALGORITHM = "SHA-256";
+
+    /**
+     * Minimum Android SDK API Level which accepts JAR signatures which use SHA-256. Older platform
+     * versions accept only SHA-1 signatures.
+     */
+    private static final int MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES = 18;
+
     /**
      * Return one of USE_SHA1 or USE_SHA256 according to the signature
      * algorithm specified in the cert.
      */
-    private static int getDigestAlgorithm(X509Certificate cert) {
+    private static int getDigestAlgorithm(X509Certificate cert, int minSdkVersion) {
         String sigAlg = cert.getSigAlgName().toUpperCase(Locale.US);
-        if ("SHA1WITHRSA".equals(sigAlg) ||
-            "MD5WITHRSA".equals(sigAlg)) {     // see "HISTORICAL NOTE" above.
-            return USE_SHA1;
+        if ("SHA1WITHRSA".equals(sigAlg) || "MD5WITHRSA".equals(sigAlg)) {
+            // see "HISTORICAL NOTE" above.
+            if (minSdkVersion < MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES) {
+                return USE_SHA1;
+            } else {
+                return USE_SHA256;
+            }
         } else if (sigAlg.startsWith("SHA256WITH")) {
             return USE_SHA256;
         } else {
@@ -134,11 +153,11 @@
     }
 
     /** Returns the expected signature algorithm for this key type. */
-    private static String getSignatureAlgorithm(X509Certificate cert) {
-        String sigAlg = cert.getSigAlgName().toUpperCase(Locale.US);
+    private static String getSignatureAlgorithm(X509Certificate cert, int minSdkVersion) {
         String keyType = cert.getPublicKey().getAlgorithm().toUpperCase(Locale.US);
         if ("RSA".equalsIgnoreCase(keyType)) {
-            if (getDigestAlgorithm(cert) == USE_SHA256) {
+            if ((minSdkVersion >= MIN_API_LEVEL_FOR_SHA256_JAR_SIGNATURES)
+                    || (getDigestAlgorithm(cert, minSdkVersion) == USE_SHA256)) {
                 return "SHA256withRSA";
             } else {
                 return "SHA1withRSA";
@@ -167,18 +186,29 @@
     }
 
     /**
-     * Reads the password from console and returns it as a string.
+     * If a console doesn't exist, reads the password from stdin
+     * If a console exists, reads the password from console and returns it as a string.
      *
      * @param keyFile The file containing the private key.  Used to prompt the user.
      */
     private static String readPassword(File keyFile) {
         Console console;
         char[] pwd;
-        if((console = System.console()) != null &&
-           (pwd = console.readPassword("[%s]", "Enter password for " + keyFile)) != null){
-            return String.valueOf(pwd);
+        if ((console = System.console()) == null) {
+            System.out.print("Enter password for " + keyFile + " (password will not be hidden): ");
+            System.out.flush();
+            BufferedReader stdin = new BufferedReader(new InputStreamReader(System.in));
+            try {
+                return stdin.readLine();
+            } catch (IOException ex) {
+                return null;
+            }
         } else {
-            return null;
+            if ((pwd = console.readPassword("[%s]", "Enter password for " + keyFile)) != null) {
+                return String.valueOf(pwd);
+            } else {
+                return null;
+            }
         }
     }
 
@@ -235,8 +265,11 @@
              * Now it's in a PKCS#8 PrivateKeyInfo structure. Read its Algorithm
              * OID and use that to construct a KeyFactory.
              */
-            ASN1InputStream bIn = new ASN1InputStream(new ByteArrayInputStream(spec.getEncoded()));
-            PrivateKeyInfo pki = PrivateKeyInfo.getInstance(bIn.readObject());
+            PrivateKeyInfo pki;
+            try (ASN1InputStream bIn =
+                    new ASN1InputStream(new ByteArrayInputStream(spec.getEncoded()))) {
+                pki = PrivateKeyInfo.getInstance(bIn.readObject());
+            }
             String algOid = pki.getPrivateKeyAlgorithm().getAlgorithm().getId();
 
             return KeyFactory.getInstance(algOid).generatePrivate(spec);
@@ -297,10 +330,24 @@
                 Attributes attr = null;
                 if (input != null) attr = input.getAttributes(name);
                 attr = attr != null ? new Attributes(attr) : new Attributes();
+                // Remove any previously computed digests from this entry's attributes.
+                for (Iterator<Object> i = attr.keySet().iterator(); i.hasNext();) {
+                    Object key = i.next();
+                    if (!(key instanceof Attributes.Name)) {
+                        continue;
+                    }
+                    String attributeNameLowerCase =
+                            ((Attributes.Name) key).toString().toLowerCase(Locale.US);
+                    if (attributeNameLowerCase.endsWith("-digest")) {
+                        i.remove();
+                    }
+                }
+                // Add SHA-1 digest if requested
                 if (md_sha1 != null) {
                     attr.putValue("SHA1-Digest",
                                   new String(Base64.encode(md_sha1.digest()), "ASCII"));
                 }
+                // Add SHA-256 digest if requested
                 if (md_sha256 != null) {
                     attr.putValue("SHA-256-Digest",
                                   new String(Base64.encode(md_sha256.digest()), "ASCII"));
@@ -376,12 +423,22 @@
 
     /** Write a .SF file with a digest of the specified manifest. */
     private static void writeSignatureFile(Manifest manifest, OutputStream out,
-                                           int hash)
+            int hash, boolean additionallySignedUsingAnApkSignatureScheme)
         throws IOException, GeneralSecurityException {
         Manifest sf = new Manifest();
         Attributes main = sf.getMainAttributes();
         main.putValue("Signature-Version", "1.0");
         main.putValue("Created-By", "1.0 (Android SignApk)");
+        if (additionallySignedUsingAnApkSignatureScheme) {
+            // Add APK Signature Scheme v2 signature stripping protection.
+            // This attribute indicates that this APK is supposed to have been signed using one or
+            // more APK-specific signature schemes in addition to the standard JAR signature scheme
+            // used by this code. APK signature verifier should reject the APK if it does not
+            // contain a signature for the signature scheme the verifier prefers out of this set.
+            main.putValue(
+                    ApkSignerV2.SF_ATTRIBUTE_ANDROID_APK_SIGNED_NAME,
+                    ApkSignerV2.SF_ATTRIBUTE_ANDROID_APK_SIGNED_VALUE);
+        }
 
         MessageDigest md = MessageDigest.getInstance(
             hash == USE_SHA256 ? "SHA256" : "SHA1");
@@ -406,7 +463,7 @@
             print.flush();
 
             Attributes sfAttr = new Attributes();
-            sfAttr.putValue(hash == USE_SHA256 ? "SHA-256-Digest" : "SHA1-Digest-Manifest",
+            sfAttr.putValue(hash == USE_SHA256 ? "SHA-256-Digest" : "SHA1-Digest",
                             new String(Base64.encode(md.digest()), "ASCII"));
             sf.getEntries().put(entry.getKey(), sfAttr);
         }
@@ -426,7 +483,7 @@
 
     /** Sign data and write the digital signature to 'out'. */
     private static void writeSignatureBlock(
-        CMSTypedData data, X509Certificate publicKey, PrivateKey privateKey,
+        CMSTypedData data, X509Certificate publicKey, PrivateKey privateKey, int minSdkVersion,
         OutputStream out)
         throws IOException,
                CertificateEncodingException,
@@ -437,22 +494,22 @@
         JcaCertStore certs = new JcaCertStore(certList);
 
         CMSSignedDataGenerator gen = new CMSSignedDataGenerator();
-        ContentSigner signer = new JcaContentSignerBuilder(getSignatureAlgorithm(publicKey))
-            .setProvider(sBouncyCastleProvider)
-            .build(privateKey);
+        ContentSigner signer =
+                new JcaContentSignerBuilder(getSignatureAlgorithm(publicKey, minSdkVersion))
+                        .build(privateKey);
         gen.addSignerInfoGenerator(
             new JcaSignerInfoGeneratorBuilder(
                 new JcaDigestCalculatorProviderBuilder()
-                .setProvider(sBouncyCastleProvider)
                 .build())
             .setDirectSignature(true)
             .build(signer, publicKey));
         gen.addCertificates(certs);
         CMSSignedData sigData = gen.generate(data, false);
 
-        ASN1InputStream asn1 = new ASN1InputStream(sigData.getEncoded());
-        DEROutputStream dos = new DEROutputStream(out);
-        dos.writeObject(asn1.readObject());
+        try (ASN1InputStream asn1 = new ASN1InputStream(sigData.getEncoded())) {
+            DEROutputStream dos = new DEROutputStream(out);
+            dos.writeObject(asn1.readObject());
+        }
     }
 
     /**
@@ -462,7 +519,7 @@
      * more efficient.
      */
     private static void copyFiles(Manifest manifest, JarFile in, JarOutputStream out,
-                                  long timestamp, int alignment) throws IOException {
+                                  long timestamp, int defaultAlignment) throws IOException {
         byte[] buffer = new byte[4096];
         int num;
 
@@ -487,6 +544,11 @@
             // Preserve the STORED method of the input entry.
             outEntry = new JarEntry(inEntry);
             outEntry.setTime(timestamp);
+            // Discard comment and extra fields of this entry to
+            // simplify alignment logic below and for consistency with
+            // how compressed entries are handled later.
+            outEntry.setComment(null);
+            outEntry.setExtra(null);
 
             // 'offset' is the offset into the file at which we expect
             // the file data to begin.  This is the value we need to
@@ -501,6 +563,7 @@
                 offset += 4;
                 firstEntry = false;
             }
+            int alignment = getStoredEntryDataAlignment(name, defaultAlignment);
             if (alignment > 0 && (offset % alignment != 0)) {
                 // Set the "extra data" of the entry to between 1 and
                 // alignment-1 bytes, to make the file data begin at
@@ -541,6 +604,24 @@
         }
     }
 
+    /**
+     * Returns the multiple (in bytes) at which the provided {@code STORED} entry's data must start
+     * relative to start of file or {@code 0} if alignment of this entry's data is not important.
+     */
+    private static int getStoredEntryDataAlignment(String entryName, int defaultAlignment) {
+        if (defaultAlignment <= 0) {
+            return 0;
+        }
+
+        if (entryName.endsWith(".so")) {
+            // Align .so contents to memory page boundary to enable memory-mapped
+            // execution.
+            return 4096;
+        } else {
+            return defaultAlignment;
+        }
+    }
+
     private static class WholeFileSignerOutputStream extends FilterOutputStream {
         private boolean closing = false;
         private ByteArrayOutputStream footer = new ByteArrayOutputStream();
@@ -601,22 +682,25 @@
     }
 
     private static class CMSSigner implements CMSTypedData {
-        private JarFile inputJar;
-        private File publicKeyFile;
-        private X509Certificate publicKey;
-        private PrivateKey privateKey;
-        private String outputFile;
-        private OutputStream outputStream;
+        private final JarFile inputJar;
+        private final File publicKeyFile;
+        private final X509Certificate publicKey;
+        private final PrivateKey privateKey;
+        private final long timestamp;
+        private final int minSdkVersion;
+        private final OutputStream outputStream;
         private final ASN1ObjectIdentifier type;
         private WholeFileSignerOutputStream signer;
 
         public CMSSigner(JarFile inputJar, File publicKeyFile,
-                         X509Certificate publicKey, PrivateKey privateKey,
-                         OutputStream outputStream) {
+                         X509Certificate publicKey, PrivateKey privateKey, long timestamp,
+                         int minSdkVersion, OutputStream outputStream) {
             this.inputJar = inputJar;
             this.publicKeyFile = publicKeyFile;
             this.publicKey = publicKey;
             this.privateKey = privateKey;
+            this.timestamp = timestamp;
+            this.minSdkVersion = minSdkVersion;
             this.outputStream = outputStream;
             this.type = new ASN1ObjectIdentifier(CMSObjectIdentifiers.data.getId());
         }
@@ -625,31 +709,34 @@
          * This should actually return byte[] or something similar, but nothing
          * actually checks it currently.
          */
+        @Override
         public Object getContent() {
             return this;
         }
 
+        @Override
         public ASN1ObjectIdentifier getContentType() {
             return type;
         }
 
+        @Override
         public void write(OutputStream out) throws IOException {
             try {
                 signer = new WholeFileSignerOutputStream(out, outputStream);
                 JarOutputStream outputJar = new JarOutputStream(signer);
 
-                int hash = getDigestAlgorithm(publicKey);
-
-                // Assume the certificate is valid for at least an hour.
-                long timestamp = publicKey.getNotBefore().getTime() + 3600L * 1000;
+                int hash = getDigestAlgorithm(publicKey, minSdkVersion);
 
                 Manifest manifest = addDigestsToManifest(inputJar, hash);
                 copyFiles(manifest, inputJar, outputJar, timestamp, 0);
                 addOtacert(outputJar, publicKeyFile, timestamp, manifest, hash);
 
-                signFile(manifest, inputJar,
+                signFile(manifest,
                          new X509Certificate[]{ publicKey },
                          new PrivateKey[]{ privateKey },
+                         timestamp,
+                         minSdkVersion,
+                         false, // Don't sign using APK Signature Scheme v2
                          outputJar);
 
                 signer.notifyClosing();
@@ -666,7 +753,7 @@
                    CertificateEncodingException,
                    OperatorCreationException,
                    CMSException {
-            SignApk.writeSignatureBlock(this, publicKey, privateKey, temp);
+            SignApk.writeSignatureBlock(this, publicKey, privateKey, minSdkVersion, temp);
         }
 
         public WholeFileSignerOutputStream getSigner() {
@@ -676,9 +763,10 @@
 
     private static void signWholeFile(JarFile inputJar, File publicKeyFile,
                                       X509Certificate publicKey, PrivateKey privateKey,
+                                      long timestamp, int minSdkVersion,
                                       OutputStream outputStream) throws Exception {
         CMSSigner cmsOut = new CMSSigner(inputJar, publicKeyFile,
-                                         publicKey, privateKey, outputStream);
+                publicKey, privateKey, timestamp, minSdkVersion, outputStream);
 
         ByteArrayOutputStream temp = new ByteArrayOutputStream();
 
@@ -742,12 +830,13 @@
         temp.writeTo(outputStream);
     }
 
-    private static void signFile(Manifest manifest, JarFile inputJar,
+    private static void signFile(Manifest manifest,
                                  X509Certificate[] publicKey, PrivateKey[] privateKey,
+                                 long timestamp,
+                                 int minSdkVersion,
+                                 boolean additionallySignedUsingAnApkSignatureScheme,
                                  JarOutputStream outputJar)
         throws Exception {
-        // Assume the certificate is valid for at least an hour.
-        long timestamp = publicKey[0].getNotBefore().getTime() + 3600L * 1000;
 
         // MANIFEST.MF
         JarEntry je = new JarEntry(JarFile.MANIFEST_NAME);
@@ -763,7 +852,11 @@
             je.setTime(timestamp);
             outputJar.putNextEntry(je);
             ByteArrayOutputStream baos = new ByteArrayOutputStream();
-            writeSignatureFile(manifest, baos, getDigestAlgorithm(publicKey[k]));
+            writeSignatureFile(
+                    manifest,
+                    baos,
+                    getDigestAlgorithm(publicKey[k], minSdkVersion),
+                    additionallySignedUsingAnApkSignatureScheme);
             byte[] signedData = baos.toByteArray();
             outputJar.write(signedData);
 
@@ -775,7 +868,7 @@
             je.setTime(timestamp);
             outputJar.putNextEntry(je);
             writeSignatureBlock(new CMSProcessableByteArray(signedData),
-                                publicKey[k], privateKey[k], outputJar);
+                                publicKey[k], privateKey[k], minSdkVersion, outputJar);
         }
     }
 
@@ -831,10 +924,89 @@
         Security.insertProviderAt((Provider) o, 1);
     }
 
+    /**
+     * Converts the provided lists of private keys, their X.509 certificates, and digest algorithms
+     * into a list of APK Signature Scheme v2 {@code SignerConfig} instances.
+     */
+    public static List<ApkSignerV2.SignerConfig> createV2SignerConfigs(
+            PrivateKey[] privateKeys, X509Certificate[] certificates, String[] digestAlgorithms)
+                    throws InvalidKeyException {
+        if (privateKeys.length != certificates.length) {
+            throw new IllegalArgumentException(
+                    "The number of private keys must match the number of certificates: "
+                            + privateKeys.length + " vs" + certificates.length);
+        }
+        List<ApkSignerV2.SignerConfig> result = new ArrayList<>(privateKeys.length);
+        for (int i = 0; i < privateKeys.length; i++) {
+            PrivateKey privateKey = privateKeys[i];
+            X509Certificate certificate = certificates[i];
+            PublicKey publicKey = certificate.getPublicKey();
+            String keyAlgorithm = privateKey.getAlgorithm();
+            if (!keyAlgorithm.equalsIgnoreCase(publicKey.getAlgorithm())) {
+                throw new InvalidKeyException(
+                        "Key algorithm of private key #" + (i + 1) + " does not match key"
+                        + " algorithm of public key #" + (i + 1) + ": " + keyAlgorithm
+                        + " vs " + publicKey.getAlgorithm());
+            }
+            ApkSignerV2.SignerConfig signerConfig = new ApkSignerV2.SignerConfig();
+            signerConfig.privateKey = privateKey;
+            signerConfig.certificates = Collections.singletonList(certificate);
+            List<Integer> signatureAlgorithms = new ArrayList<>(digestAlgorithms.length);
+            for (String digestAlgorithm : digestAlgorithms) {
+                try {
+                    signatureAlgorithms.add(
+                            getV2SignatureAlgorithm(keyAlgorithm, digestAlgorithm));
+                } catch (IllegalArgumentException e) {
+                    throw new InvalidKeyException(
+                            "Unsupported key and digest algorithm combination for signer #"
+                                    + (i + 1),
+                            e);
+                }
+            }
+            signerConfig.signatureAlgorithms = signatureAlgorithms;
+            result.add(signerConfig);
+        }
+        return result;
+    }
+
+    private static int getV2SignatureAlgorithm(String keyAlgorithm, String digestAlgorithm) {
+        if ("SHA-256".equalsIgnoreCase(digestAlgorithm)) {
+            if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+                // Use RSASSA-PKCS1-v1_5 signature scheme instead of RSASSA-PSS to guarantee
+                // deterministic signatures which make life easier for OTA updates (fewer files
+                // changed when deterministic signature schemes are used).
+                return ApkSignerV2.SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA256;
+            } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+                return ApkSignerV2.SIGNATURE_ECDSA_WITH_SHA256;
+            } else if ("DSA".equalsIgnoreCase(keyAlgorithm)) {
+                return ApkSignerV2.SIGNATURE_DSA_WITH_SHA256;
+            } else {
+                throw new IllegalArgumentException("Unsupported key algorithm: " + keyAlgorithm);
+            }
+        } else if ("SHA-512".equalsIgnoreCase(digestAlgorithm)) {
+            if ("RSA".equalsIgnoreCase(keyAlgorithm)) {
+                // Use RSASSA-PKCS1-v1_5 signature scheme instead of RSASSA-PSS to guarantee
+                // deterministic signatures which make life easier for OTA updates (fewer files
+                // changed when deterministic signature schemes are used).
+                return ApkSignerV2.SIGNATURE_RSA_PKCS1_V1_5_WITH_SHA512;
+            } else if ("EC".equalsIgnoreCase(keyAlgorithm)) {
+                return ApkSignerV2.SIGNATURE_ECDSA_WITH_SHA512;
+            } else if ("DSA".equalsIgnoreCase(keyAlgorithm)) {
+                return ApkSignerV2.SIGNATURE_DSA_WITH_SHA512;
+            } else {
+                throw new IllegalArgumentException("Unsupported key algorithm: " + keyAlgorithm);
+            }
+        } else {
+            throw new IllegalArgumentException("Unsupported digest algorithm: " + digestAlgorithm);
+        }
+    }
+
     private static void usage() {
         System.err.println("Usage: signapk [-w] " +
                            "[-a <alignment>] " +
                            "[-providerClass <className>] " +
+                           "[--min-sdk-version <n>] " +
+                           "[--disable-v2] " +
                            "publickey.x509[.pem] privatekey.pk8 " +
                            "[publickey2.x509[.pem] privatekey2.pk8 ...] " +
                            "input.jar output.jar");
@@ -844,13 +1016,19 @@
     public static void main(String[] args) {
         if (args.length < 4) usage();
 
-        sBouncyCastleProvider = new BouncyCastleProvider();
-        Security.addProvider(sBouncyCastleProvider);
+        // Install Conscrypt as the highest-priority provider. Its crypto primitives are faster than
+        // the standard or Bouncy Castle ones.
+        Security.insertProviderAt(new OpenSSLProvider(), 1);
+        // Install Bouncy Castle (as the lowest-priority provider) because Conscrypt does not offer
+        // DSA which may still be needed.
+        // TODO: Stop installing Bouncy Castle provider once DSA is no longer needed.
+        Security.addProvider(new BouncyCastleProvider());
 
         boolean signWholeFile = false;
         String providerClass = null;
-        String providerArg = null;
         int alignment = 4;
+        int minSdkVersion = 0;
+        boolean signUsingApkSignatureSchemeV2 = true;
 
         int argstart = 0;
         while (argstart < args.length && args[argstart].startsWith("-")) {
@@ -866,6 +1044,18 @@
             } else if ("-a".equals(args[argstart])) {
                 alignment = Integer.parseInt(args[++argstart]);
                 ++argstart;
+            } else if ("--min-sdk-version".equals(args[argstart])) {
+                String minSdkVersionString = args[++argstart];
+                try {
+                    minSdkVersion = Integer.parseInt(minSdkVersionString);
+                } catch (NumberFormatException e) {
+                    throw new IllegalArgumentException(
+                            "--min-sdk-version must be a decimal number: " + minSdkVersionString);
+                }
+                ++argstart;
+            } else if ("--disable-v2".equals(args[argstart])) {
+                signUsingApkSignatureSchemeV2 = false;
+                ++argstart;
             } else {
                 usage();
             }
@@ -895,17 +1085,19 @@
                 for (int i = 0; i < numKeys; ++i) {
                     int argNum = argstart + i*2;
                     publicKey[i] = readPublicKey(new File(args[argNum]));
-                    hashes |= getDigestAlgorithm(publicKey[i]);
+                    hashes |= getDigestAlgorithm(publicKey[i], minSdkVersion);
                 }
             } catch (IllegalArgumentException e) {
                 System.err.println(e);
                 System.exit(1);
             }
 
-            // Set the ZIP file timestamp to the starting valid time
-            // of the 0th certificate plus one hour (to match what
-            // we've historically done).
-            long timestamp = publicKey[0].getNotBefore().getTime() + 3600L * 1000;
+            // Set all ZIP file timestamps to Jan 1 2009 00:00:00.
+            long timestamp = 1230768000000L;
+            // The Java ZipEntry API we're using converts milliseconds since epoch into MS-DOS
+            // timestamp using the current timezone. We thus adjust the milliseconds since epoch
+            // value to end up with MS-DOS timestamp of Jan 1 2009 00:00:00.
+            timestamp -= TimeZone.getDefault().getOffset(timestamp);
 
             PrivateKey[] privateKey = new PrivateKey[numKeys];
             for (int i = 0; i < numKeys; ++i) {
@@ -916,25 +1108,59 @@
 
             outputFile = new FileOutputStream(outputFilename);
 
-
+            // NOTE: Signing currently recompresses any compressed entries using Deflate (default
+            // compression level for OTA update files and maximum compession level for APKs).
             if (signWholeFile) {
                 SignApk.signWholeFile(inputJar, firstPublicKeyFile,
-                                      publicKey[0], privateKey[0], outputFile);
+                                      publicKey[0], privateKey[0],
+                                      timestamp, minSdkVersion,
+                                      outputFile);
             } else {
-                JarOutputStream outputJar = new JarOutputStream(outputFile);
-
-                // For signing .apks, use the maximum compression to make
-                // them as small as possible (since they live forever on
-                // the system partition).  For OTA packages, use the
-                // default compression level, which is much much faster
-                // and produces output that is only a tiny bit larger
-                // (~0.1% on full OTA packages I tested).
+                // Generate, in memory, an APK signed using standard JAR Signature Scheme.
+                ByteArrayOutputStream v1SignedApkBuf = new ByteArrayOutputStream();
+                JarOutputStream outputJar = new JarOutputStream(v1SignedApkBuf);
+                // Use maximum compression for compressed entries because the APK lives forever on
+                // the system partition.
                 outputJar.setLevel(9);
-
                 Manifest manifest = addDigestsToManifest(inputJar, hashes);
                 copyFiles(manifest, inputJar, outputJar, timestamp, alignment);
-                signFile(manifest, inputJar, publicKey, privateKey, outputJar);
+                signFile(
+                        manifest,
+                        publicKey, privateKey,
+                        timestamp, minSdkVersion, signUsingApkSignatureSchemeV2,
+                        outputJar);
                 outputJar.close();
+                ByteBuffer v1SignedApk = ByteBuffer.wrap(v1SignedApkBuf.toByteArray());
+                v1SignedApkBuf.reset();
+
+                ByteBuffer[] outputChunks;
+                if (signUsingApkSignatureSchemeV2) {
+                    // Additionally sign the APK using the APK Signature Scheme v2.
+                    ByteBuffer apkContents = v1SignedApk;
+                    List<ApkSignerV2.SignerConfig> signerConfigs =
+                            createV2SignerConfigs(
+                                    privateKey,
+                                    publicKey,
+                                    new String[] {APK_SIG_SCHEME_V2_DIGEST_ALGORITHM});
+                    outputChunks = ApkSignerV2.sign(apkContents, signerConfigs);
+                } else {
+                    // Output the JAR-signed APK as is.
+                    outputChunks = new ByteBuffer[] {v1SignedApk};
+                }
+
+                // This assumes outputChunks are array-backed. To avoid this assumption, the
+                // code could be rewritten to use FileChannel.
+                for (ByteBuffer outputChunk : outputChunks) {
+                    outputFile.write(
+                            outputChunk.array(),
+                            outputChunk.arrayOffset() + outputChunk.position(),
+                            outputChunk.remaining());
+                    outputChunk.position(outputChunk.limit());
+                }
+
+                outputFile.close();
+                outputFile = null;
+                return;
             }
         } catch (Exception e) {
             e.printStackTrace();
diff --git a/tools/signapk/src/com/android/signapk/ZipUtils.java b/tools/signapk/src/com/android/signapk/ZipUtils.java
new file mode 100644
index 0000000..7575a77
--- /dev/null
+++ b/tools/signapk/src/com/android/signapk/ZipUtils.java
@@ -0,0 +1,162 @@
+/*
+ * Copyright (C) 2016 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+package com.android.signapk;
+
+import java.nio.ByteBuffer;
+import java.nio.ByteOrder;
+
+/**
+ * Assorted ZIP format helpers.
+ *
+ * <p>NOTE: Most helper methods operating on {@code ByteBuffer} instances expect that the byte
+ * order of these buffers is little-endian.
+ */
+public abstract class ZipUtils {
+    private ZipUtils() {}
+
+    private static final int ZIP_EOCD_REC_MIN_SIZE = 22;
+    private static final int ZIP_EOCD_REC_SIG = 0x06054b50;
+    private static final int ZIP_EOCD_CENTRAL_DIR_SIZE_FIELD_OFFSET = 12;
+    private static final int ZIP_EOCD_CENTRAL_DIR_OFFSET_FIELD_OFFSET = 16;
+    private static final int ZIP_EOCD_COMMENT_LENGTH_FIELD_OFFSET = 20;
+
+    private static final int ZIP64_EOCD_LOCATOR_SIZE = 20;
+    private static final int ZIP64_EOCD_LOCATOR_SIG = 0x07064b50;
+
+    private static final int UINT16_MAX_VALUE = 0xffff;
+
+    /**
+     * Returns the position at which ZIP End of Central Directory record starts in the provided
+     * buffer or {@code -1} if the record is not present.
+     *
+     * <p>NOTE: Byte order of {@code zipContents} must be little-endian.
+     */
+    public static int findZipEndOfCentralDirectoryRecord(ByteBuffer zipContents) {
+        assertByteOrderLittleEndian(zipContents);
+
+        // ZIP End of Central Directory (EOCD) record is located at the very end of the ZIP archive.
+        // The record can be identified by its 4-byte signature/magic which is located at the very
+        // beginning of the record. A complication is that the record is variable-length because of
+        // the comment field.
+        // The algorithm for locating the ZIP EOCD record is as follows. We search backwards from
+        // end of the buffer for the EOCD record signature. Whenever we find a signature, we check
+        // the candidate record's comment length is such that the remainder of the record takes up
+        // exactly the remaining bytes in the buffer. The search is bounded because the maximum
+        // size of the comment field is 65535 bytes because the field is an unsigned 16-bit number.
+
+        int archiveSize = zipContents.capacity();
+        if (archiveSize < ZIP_EOCD_REC_MIN_SIZE) {
+            return -1;
+        }
+        int maxCommentLength = Math.min(archiveSize - ZIP_EOCD_REC_MIN_SIZE, UINT16_MAX_VALUE);
+        int eocdWithEmptyCommentStartPosition = archiveSize - ZIP_EOCD_REC_MIN_SIZE;
+        for (int expectedCommentLength = 0; expectedCommentLength < maxCommentLength;
+                expectedCommentLength++) {
+            int eocdStartPos = eocdWithEmptyCommentStartPosition - expectedCommentLength;
+            if (zipContents.getInt(eocdStartPos) == ZIP_EOCD_REC_SIG) {
+                int actualCommentLength =
+                        getUnsignedInt16(
+                                zipContents, eocdStartPos + ZIP_EOCD_COMMENT_LENGTH_FIELD_OFFSET);
+                if (actualCommentLength == expectedCommentLength) {
+                    return eocdStartPos;
+                }
+            }
+        }
+
+        return -1;
+    }
+
+    /**
+     * Returns {@code true} if the provided buffer contains a ZIP64 End of Central Directory
+     * Locator.
+     *
+     * <p>NOTE: Byte order of {@code zipContents} must be little-endian.
+     */
+    public static final boolean isZip64EndOfCentralDirectoryLocatorPresent(
+            ByteBuffer zipContents, int zipEndOfCentralDirectoryPosition) {
+        assertByteOrderLittleEndian(zipContents);
+
+        // ZIP64 End of Central Directory Locator immediately precedes the ZIP End of Central
+        // Directory Record.
+
+        int locatorPosition = zipEndOfCentralDirectoryPosition - ZIP64_EOCD_LOCATOR_SIZE;
+        if (locatorPosition < 0) {
+            return false;
+        }
+
+        return zipContents.getInt(locatorPosition) == ZIP64_EOCD_LOCATOR_SIG;
+    }
+
+    /**
+     * Returns the offset of the start of the ZIP Central Directory in the archive.
+     *
+     * <p>NOTE: Byte order of {@code zipEndOfCentralDirectory} must be little-endian.
+     */
+    public static long getZipEocdCentralDirectoryOffset(ByteBuffer zipEndOfCentralDirectory) {
+        assertByteOrderLittleEndian(zipEndOfCentralDirectory);
+        return getUnsignedInt32(
+                zipEndOfCentralDirectory,
+                zipEndOfCentralDirectory.position() + ZIP_EOCD_CENTRAL_DIR_OFFSET_FIELD_OFFSET);
+    }
+
+    /**
+     * Sets the offset of the start of the ZIP Central Directory in the archive.
+     *
+     * <p>NOTE: Byte order of {@code zipEndOfCentralDirectory} must be little-endian.
+     */
+    public static void setZipEocdCentralDirectoryOffset(
+            ByteBuffer zipEndOfCentralDirectory, long offset) {
+        assertByteOrderLittleEndian(zipEndOfCentralDirectory);
+        setUnsignedInt32(
+                zipEndOfCentralDirectory,
+                zipEndOfCentralDirectory.position() + ZIP_EOCD_CENTRAL_DIR_OFFSET_FIELD_OFFSET,
+                offset);
+    }
+
+    /**
+     * Returns the size (in bytes) of the ZIP Central Directory.
+     *
+     * <p>NOTE: Byte order of {@code zipEndOfCentralDirectory} must be little-endian.
+     */
+    public static long getZipEocdCentralDirectorySizeBytes(ByteBuffer zipEndOfCentralDirectory) {
+        assertByteOrderLittleEndian(zipEndOfCentralDirectory);
+        return getUnsignedInt32(
+                zipEndOfCentralDirectory,
+                zipEndOfCentralDirectory.position() + ZIP_EOCD_CENTRAL_DIR_SIZE_FIELD_OFFSET);
+    }
+
+    private static void assertByteOrderLittleEndian(ByteBuffer buffer) {
+        if (buffer.order() != ByteOrder.LITTLE_ENDIAN) {
+            throw new IllegalArgumentException("ByteBuffer byte order must be little endian");
+        }
+    }
+
+    private static int getUnsignedInt16(ByteBuffer buffer, int offset) {
+        return buffer.getShort(offset) & 0xffff;
+    }
+
+    private static long getUnsignedInt32(ByteBuffer buffer, int offset) {
+        return buffer.getInt(offset) & 0xffffffffL;
+    }
+
+    private static void setUnsignedInt32(ByteBuffer buffer, int offset, long value) {
+        if ((value < 0) || (value > 0xffffffffL)) {
+            throw new IllegalArgumentException("uint32 value of out range: " + value);
+        }
+        buffer.putInt(buffer.position() + offset, (int) value);
+    }
+}
diff --git a/tools/zipalign/Android.mk b/tools/zipalign/Android.mk
index 4194f81..8c0240a 100644
--- a/tools/zipalign/Android.mk
+++ b/tools/zipalign/Android.mk
@@ -22,20 +22,17 @@
 	liblog \
 	libzopfli
 
-ifeq ($(HOST_OS),linux)
-LOCAL_LDLIBS += -lrt
-endif
+LOCAL_LDLIBS_linux += -lrt
 
-ifdef USE_MINGW
-LOCAL_STATIC_LIBRARIES += libz
-else
-LOCAL_LDLIBS += -lz
-endif
+LOCAL_STATIC_LIBRARIES_windows += libz
+LOCAL_LDLIBS_linux += -lz
+LOCAL_LDLIBS_darwin += -lz
 
 ifneq ($(strip $(BUILD_HOST_static)),)
 LOCAL_LDLIBS += -lpthread
 endif # BUILD_HOST_static
 
 LOCAL_MODULE := zipalign
+LOCAL_MODULE_HOST_OS := darwin linux windows
 
 include $(BUILD_HOST_EXECUTABLE)
diff --git a/tools/zipalign/ZipEntry.cpp b/tools/zipalign/ZipEntry.cpp
index b2270cb..2f33e23 100644
--- a/tools/zipalign/ZipEntry.cpp
+++ b/tools/zipalign/ZipEntry.cpp
@@ -26,6 +26,7 @@
 #include <stdio.h>
 #include <string.h>
 #include <assert.h>
+#include <inttypes.h>
 
 using namespace android;
 
@@ -56,7 +57,7 @@
     /* using the info in the CDE, go load up the LFH */
     posn = ftell(fp);
     if (fseek(fp, mCDE.mLocalHeaderRelOffset, SEEK_SET) != 0) {
-        ALOGD("local header seek failed (%ld)\n",
+        ALOGD("local header seek failed (%" PRIu32 ")\n",
             mCDE.mLocalHeaderRelOffset);
         return UNKNOWN_ERROR;
     }
@@ -123,12 +124,12 @@
     mCDE.mExternalAttrs = 0x81b60020;   // matches what WinZip does
 
     if (mCDE.mFileNameLength > 0) {
-        mCDE.mFileName = new unsigned char[mCDE.mFileNameLength+1];
+        mCDE.mFileName = new uint8_t[mCDE.mFileNameLength+1];
         strcpy((char*) mCDE.mFileName, fileName);
     }
     if (mCDE.mFileCommentLength > 0) {
         /* TODO: stop assuming null-terminated ASCII here? */
-        mCDE.mFileComment = new unsigned char[mCDE.mFileCommentLength+1];
+        mCDE.mFileComment = new uint8_t[mCDE.mFileCommentLength+1];
         strcpy((char*) mCDE.mFileComment, comment);
     }
 
@@ -141,8 +142,7 @@
  *
  * Initializes the CDE and the LFH.
  */
-status_t ZipEntry::initFromExternal(const ZipFile* pZipFile,
-    const ZipEntry* pEntry)
+status_t ZipEntry::initFromExternal(const ZipEntry* pEntry)
 {
     /*
      * Copy everything in the CDE over, then fix up the hairy bits.
@@ -150,20 +150,20 @@
     memcpy(&mCDE, &pEntry->mCDE, sizeof(mCDE));
 
     if (mCDE.mFileNameLength > 0) {
-        mCDE.mFileName = new unsigned char[mCDE.mFileNameLength+1];
+        mCDE.mFileName = new uint8_t[mCDE.mFileNameLength+1];
         if (mCDE.mFileName == NULL)
             return NO_MEMORY;
         strcpy((char*) mCDE.mFileName, (char*)pEntry->mCDE.mFileName);
     }
     if (mCDE.mFileCommentLength > 0) {
-        mCDE.mFileComment = new unsigned char[mCDE.mFileCommentLength+1];
+        mCDE.mFileComment = new uint8_t[mCDE.mFileCommentLength+1];
         if (mCDE.mFileComment == NULL)
             return NO_MEMORY;
         strcpy((char*) mCDE.mFileComment, (char*)pEntry->mCDE.mFileComment);
     }
     if (mCDE.mExtraFieldLength > 0) {
         /* we null-terminate this, though it may not be a string */
-        mCDE.mExtraField = new unsigned char[mCDE.mExtraFieldLength+1];
+        mCDE.mExtraField = new uint8_t[mCDE.mExtraFieldLength+1];
         if (mCDE.mExtraField == NULL)
             return NO_MEMORY;
         memcpy(mCDE.mExtraField, pEntry->mCDE.mExtraField,
@@ -180,7 +180,7 @@
     assert(mLFH.mExtraField == NULL);
     mLFH.mExtraFieldLength = pEntry->mLFH.mExtraFieldLength;
     if (mLFH.mExtraFieldLength > 0) {
-        mLFH.mExtraField = new unsigned char[mLFH.mExtraFieldLength+1];
+        mLFH.mExtraField = new uint8_t[mLFH.mExtraFieldLength+1];
         if (mLFH.mExtraField == NULL)
             return NO_MEMORY;
         memcpy(mLFH.mExtraField, pEntry->mLFH.mExtraField,
@@ -205,9 +205,9 @@
 
     if (mLFH.mExtraFieldLength > 0) {
         /* extend existing field */
-        unsigned char* newExtra;
+        uint8_t* newExtra;
 
-        newExtra = new unsigned char[mLFH.mExtraFieldLength + padding];
+        newExtra = new uint8_t[mLFH.mExtraFieldLength + padding];
         if (newExtra == NULL)
             return NO_MEMORY;
         memset(newExtra + mLFH.mExtraFieldLength, 0, padding);
@@ -218,7 +218,7 @@
         mLFH.mExtraFieldLength += padding;
     } else {
         /* create new field */
-        mLFH.mExtraField = new unsigned char[padding];
+        mLFH.mExtraField = new uint8_t[padding];
         memset(mLFH.mExtraField, 0, padding);
         mLFH.mExtraFieldLength = padding;
     }
@@ -246,7 +246,7 @@
 
     delete[] mLFH.mFileName;
     if (mLFH.mFileNameLength > 0) {
-        mLFH.mFileName = new unsigned char[mLFH.mFileNameLength+1];
+        mLFH.mFileName = new uint8_t[mLFH.mFileNameLength+1];
         strcpy((char*) mLFH.mFileName, (const char*) mCDE.mFileName);
     } else {
         mLFH.mFileName = NULL;
@@ -256,7 +256,7 @@
 /*
  * Set some information about a file after we add it.
  */
-void ZipEntry::setDataInfo(long uncompLen, long compLen, unsigned long crc32,
+void ZipEntry::setDataInfo(long uncompLen, long compLen, uint32_t crc32,
     int compressionMethod)
 {
     mCDE.mCompressionMethod = compressionMethod;
@@ -360,7 +360,7 @@
     struct tm tmResult;
 #endif
     time_t even;
-    unsigned short zdate, ztime;
+    uint16_t zdate, ztime;
 
     struct tm* ptm;
 
@@ -402,7 +402,7 @@
 status_t ZipEntry::LocalFileHeader::read(FILE* fp)
 {
     status_t result = NO_ERROR;
-    unsigned char buf[kLFHLen];
+    uint8_t buf[kLFHLen];
 
     assert(mFileName == NULL);
     assert(mExtraField == NULL);
@@ -433,7 +433,7 @@
 
     /* grab filename */
     if (mFileNameLength != 0) {
-        mFileName = new unsigned char[mFileNameLength+1];
+        mFileName = new uint8_t[mFileNameLength+1];
         if (mFileName == NULL) {
             result = NO_MEMORY;
             goto bail;
@@ -447,7 +447,7 @@
 
     /* grab extra field */
     if (mExtraFieldLength != 0) {
-        mExtraField = new unsigned char[mExtraFieldLength+1];
+        mExtraField = new uint8_t[mExtraFieldLength+1];
         if (mExtraField == NULL) {
             result = NO_MEMORY;
             goto bail;
@@ -468,7 +468,7 @@
  */
 status_t ZipEntry::LocalFileHeader::write(FILE* fp)
 {
-    unsigned char buf[kLFHLen];
+    uint8_t buf[kLFHLen];
 
     ZipEntry::putLongLE(&buf[0x00], kSignature);
     ZipEntry::putShortLE(&buf[0x04], mVersionToExtract);
@@ -507,13 +507,13 @@
 void ZipEntry::LocalFileHeader::dump(void) const
 {
     ALOGD(" LocalFileHeader contents:\n");
-    ALOGD("  versToExt=%u gpBits=0x%04x compression=%u\n",
+    ALOGD("  versToExt=%" PRIu16 " gpBits=0x%04" PRIx16 " compression=%" PRIu16 "\n",
         mVersionToExtract, mGPBitFlag, mCompressionMethod);
-    ALOGD("  modTime=0x%04x modDate=0x%04x crc32=0x%08lx\n",
+    ALOGD("  modTime=0x%04" PRIx16 " modDate=0x%04" PRIx16 " crc32=0x%08" PRIx32 "\n",
         mLastModFileTime, mLastModFileDate, mCRC32);
-    ALOGD("  compressedSize=%lu uncompressedSize=%lu\n",
+    ALOGD("  compressedSize=%" PRIu32 " uncompressedSize=%" PRIu32 "\n",
         mCompressedSize, mUncompressedSize);
-    ALOGD("  filenameLen=%u extraLen=%u\n",
+    ALOGD("  filenameLen=%" PRIu16 " extraLen=%" PRIu16 "\n",
         mFileNameLength, mExtraFieldLength);
     if (mFileName != NULL)
         ALOGD("  filename: '%s'\n", mFileName);
@@ -536,7 +536,7 @@
 status_t ZipEntry::CentralDirEntry::read(FILE* fp)
 {
     status_t result = NO_ERROR;
-    unsigned char buf[kCDELen];
+    uint8_t buf[kCDELen];
 
     /* no re-use */
     assert(mFileName == NULL);
@@ -575,7 +575,7 @@
 
     /* grab filename */
     if (mFileNameLength != 0) {
-        mFileName = new unsigned char[mFileNameLength+1];
+        mFileName = new uint8_t[mFileNameLength+1];
         if (mFileName == NULL) {
             result = NO_MEMORY;
             goto bail;
@@ -589,7 +589,7 @@
 
     /* read "extra field" */
     if (mExtraFieldLength != 0) {
-        mExtraField = new unsigned char[mExtraFieldLength+1];
+        mExtraField = new uint8_t[mExtraFieldLength+1];
         if (mExtraField == NULL) {
             result = NO_MEMORY;
             goto bail;
@@ -604,7 +604,7 @@
 
     /* grab comment, if any */
     if (mFileCommentLength != 0) {
-        mFileComment = new unsigned char[mFileCommentLength+1];
+        mFileComment = new uint8_t[mFileCommentLength+1];
         if (mFileComment == NULL) {
             result = NO_MEMORY;
             goto bail;
@@ -626,7 +626,7 @@
  */
 status_t ZipEntry::CentralDirEntry::write(FILE* fp)
 {
-    unsigned char buf[kCDELen];
+    uint8_t buf[kCDELen];
 
     ZipEntry::putLongLE(&buf[0x00], kSignature);
     ZipEntry::putShortLE(&buf[0x04], mVersionMadeBy);
@@ -676,15 +676,15 @@
 void ZipEntry::CentralDirEntry::dump(void) const
 {
     ALOGD(" CentralDirEntry contents:\n");
-    ALOGD("  versMadeBy=%u versToExt=%u gpBits=0x%04x compression=%u\n",
+    ALOGD("  versMadeBy=%" PRIu16 " versToExt=%" PRIu16 " gpBits=0x%04" PRIx16 " compression=%" PRIu16 "\n",
         mVersionMadeBy, mVersionToExtract, mGPBitFlag, mCompressionMethod);
-    ALOGD("  modTime=0x%04x modDate=0x%04x crc32=0x%08lx\n",
+    ALOGD("  modTime=0x%04" PRIx16 " modDate=0x%04" PRIx16 " crc32=0x%08" PRIx32 "\n",
         mLastModFileTime, mLastModFileDate, mCRC32);
-    ALOGD("  compressedSize=%lu uncompressedSize=%lu\n",
+    ALOGD("  compressedSize=%" PRIu32 " uncompressedSize=%" PRIu32 "\n",
         mCompressedSize, mUncompressedSize);
-    ALOGD("  filenameLen=%u extraLen=%u commentLen=%u\n",
+    ALOGD("  filenameLen=%" PRIu16 " extraLen=%" PRIu16 " commentLen=%" PRIu16 "\n",
         mFileNameLength, mExtraFieldLength, mFileCommentLength);
-    ALOGD("  diskNumStart=%u intAttr=0x%04x extAttr=0x%08lx relOffset=%lu\n",
+    ALOGD("  diskNumStart=%" PRIu16 " intAttr=0x%04" PRIx16 " extAttr=0x%08" PRIx32 " relOffset=%" PRIu32 "\n",
         mDiskNumberStart, mInternalAttrs, mExternalAttrs,
         mLocalHeaderRelOffset);
 
diff --git a/tools/zipalign/ZipEntry.h b/tools/zipalign/ZipEntry.h
index 7f721b4..e06567d 100644
--- a/tools/zipalign/ZipEntry.h
+++ b/tools/zipalign/ZipEntry.h
@@ -25,6 +25,7 @@
 #include <utils/Errors.h>
 
 #include <stdlib.h>
+#include <stdint.h>
 #include <stdio.h>
 
 namespace android {
@@ -85,7 +86,7 @@
     /*
      * Return the data CRC.
      */
-    unsigned long getCRC32(void) const { return mCDE.mCRC32; }
+    uint32_t getCRC32(void) const { return mCDE.mCRC32; }
 
     /*
      * Return file modification time in UNIX seconds-since-epoch.
@@ -108,21 +109,21 @@
      * Some basic functions for raw data manipulation.  "LE" means
      * Little Endian.
      */
-    static inline unsigned short getShortLE(const unsigned char* buf) {
+    static inline uint16_t getShortLE(const uint8_t* buf) {
         return buf[0] | (buf[1] << 8);
     }
-    static inline unsigned long getLongLE(const unsigned char* buf) {
+    static inline uint32_t getLongLE(const uint8_t* buf) {
         return buf[0] | (buf[1] << 8) | (buf[2] << 16) | (buf[3] << 24);
     }
-    static inline void putShortLE(unsigned char* buf, short val) {
-        buf[0] = (unsigned char) val;
-        buf[1] = (unsigned char) (val >> 8);
+    static inline void putShortLE(uint8_t* buf, uint16_t val) {
+        buf[0] = (uint8_t) val;
+        buf[1] = (uint8_t) (val >> 8);
     }
-    static inline void putLongLE(unsigned char* buf, long val) {
-        buf[0] = (unsigned char) val;
-        buf[1] = (unsigned char) (val >> 8);
-        buf[2] = (unsigned char) (val >> 16);
-        buf[3] = (unsigned char) (val >> 24);
+    static inline void putLongLE(uint8_t* buf, uint32_t val) {
+        buf[0] = (uint8_t) val;
+        buf[1] = (uint8_t) (val >> 8);
+        buf[2] = (uint8_t) (val >> 16);
+        buf[3] = (uint8_t) (val >> 24);
     }
 
     /* defined for Zip archives */
@@ -166,7 +167,7 @@
      * Initialize the structure with the contents of a ZipEntry from
      * another file.
      */
-    status_t initFromExternal(const ZipFile* pZipFile, const ZipEntry* pEntry);
+    status_t initFromExternal(const ZipEntry* pEntry);
 
     /*
      * Add some pad bytes to the LFH.  We do this by adding or resizing
@@ -177,7 +178,7 @@
     /*
      * Set information about the data for this entry.
      */
-    void setDataInfo(long uncompLen, long compLen, unsigned long crc32,
+    void setDataInfo(long uncompLen, long compLen, uint32_t crc32,
         int compressionMethod);
 
     /*
@@ -195,7 +196,7 @@
      * the current file.
      */
     void setLFHOffset(off_t offset) {
-        mCDE.mLocalHeaderRelOffset = (long) offset;
+        mCDE.mLocalHeaderRelOffset = (uint32_t) offset;
     }
 
     /* mark for deletion; used by ZipFile::remove() */
@@ -240,19 +241,19 @@
         status_t read(FILE* fp);
         status_t write(FILE* fp);
 
-        // unsigned long mSignature;
-        unsigned short  mVersionToExtract;
-        unsigned short  mGPBitFlag;
-        unsigned short  mCompressionMethod;
-        unsigned short  mLastModFileTime;
-        unsigned short  mLastModFileDate;
-        unsigned long   mCRC32;
-        unsigned long   mCompressedSize;
-        unsigned long   mUncompressedSize;
-        unsigned short  mFileNameLength;
-        unsigned short  mExtraFieldLength;
-        unsigned char*  mFileName;
-        unsigned char*  mExtraField;
+        // uint32_t mSignature;
+        uint16_t mVersionToExtract;
+        uint16_t mGPBitFlag;
+        uint16_t mCompressionMethod;
+        uint16_t mLastModFileTime;
+        uint16_t mLastModFileDate;
+        uint32_t mCRC32;
+        uint32_t mCompressedSize;
+        uint32_t mUncompressedSize;
+        uint16_t mFileNameLength;
+        uint16_t mExtraFieldLength;
+        uint8_t* mFileName;
+        uint8_t* mExtraField;
 
         enum {
             kSignature      = 0x04034b50,
@@ -298,26 +299,26 @@
         status_t read(FILE* fp);
         status_t write(FILE* fp);
 
-        // unsigned long mSignature;
-        unsigned short  mVersionMadeBy;
-        unsigned short  mVersionToExtract;
-        unsigned short  mGPBitFlag;
-        unsigned short  mCompressionMethod;
-        unsigned short  mLastModFileTime;
-        unsigned short  mLastModFileDate;
-        unsigned long   mCRC32;
-        unsigned long   mCompressedSize;
-        unsigned long   mUncompressedSize;
-        unsigned short  mFileNameLength;
-        unsigned short  mExtraFieldLength;
-        unsigned short  mFileCommentLength;
-        unsigned short  mDiskNumberStart;
-        unsigned short  mInternalAttrs;
-        unsigned long   mExternalAttrs;
-        unsigned long   mLocalHeaderRelOffset;
-        unsigned char*  mFileName;
-        unsigned char*  mExtraField;
-        unsigned char*  mFileComment;
+        // uint32_t mSignature;
+        uint16_t mVersionMadeBy;
+        uint16_t mVersionToExtract;
+        uint16_t mGPBitFlag;
+        uint16_t mCompressionMethod;
+        uint16_t mLastModFileTime;
+        uint16_t mLastModFileDate;
+        uint32_t mCRC32;
+        uint32_t mCompressedSize;
+        uint32_t mUncompressedSize;
+        uint16_t mFileNameLength;
+        uint16_t mExtraFieldLength;
+        uint16_t mFileCommentLength;
+        uint16_t mDiskNumberStart;
+        uint16_t mInternalAttrs;
+        uint32_t mExternalAttrs;
+        uint32_t mLocalHeaderRelOffset;
+        uint8_t* mFileName;
+        uint8_t* mExtraField;
+        uint8_t* mFileComment;
 
         void dump(void) const;
 
diff --git a/tools/zipalign/ZipFile.cpp b/tools/zipalign/ZipFile.cpp
index 3c5ec15..4edf0aa 100644
--- a/tools/zipalign/ZipFile.cpp
+++ b/tools/zipalign/ZipFile.cpp
@@ -34,6 +34,7 @@
 #include <sys/stat.h>
 #include <errno.h>
 #include <assert.h>
+#include <inttypes.h>
 
 using namespace android;
 
@@ -206,7 +207,7 @@
 status_t ZipFile::readCentralDir(void)
 {
     status_t result = NO_ERROR;
-    unsigned char* buf = NULL;
+    uint8_t* buf = NULL;
     off_t fileLength, seekStart;
     long readAmount;
     int i;
@@ -222,7 +223,7 @@
         goto bail;
     }
 
-    buf = new unsigned char[EndOfCentralDir::kMaxEOCDSearch];
+    buf = new uint8_t[EndOfCentralDir::kMaxEOCDSearch];
     if (buf == NULL) {
         ALOGD("Failure allocating %d bytes for EOCD search",
              EndOfCentralDir::kMaxEOCDSearch);
@@ -296,7 +297,7 @@
      * we're hoping to preserve.
      */
     if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
-        ALOGD("Failure seeking to central dir offset %ld\n",
+        ALOGD("Failure seeking to central dir offset %" PRIu32 "\n",
              mEOCD.mCentralDirOffset);
         result = UNKNOWN_ERROR;
         goto bail;
@@ -305,7 +306,7 @@
     /*
      * Loop through and read the central dir entries.
      */
-    ALOGV("Scanning %d entries...\n", mEOCD.mTotalNumEntries);
+    ALOGV("Scanning %" PRIu16 " entries...\n", mEOCD.mTotalNumEntries);
     int entry;
     for (entry = 0; entry < mEOCD.mTotalNumEntries; entry++) {
         ZipEntry* pEntry = new ZipEntry;
@@ -325,7 +326,7 @@
      * If all went well, we should now be back at the EOCD.
      */
     {
-        unsigned char checkBuf[4];
+        uint8_t checkBuf[4];
         if (fread(checkBuf, 1, 4, mZipFp) != 4) {
             ALOGD("EOCD check read failed\n");
             result = INVALID_OPERATION;
@@ -365,7 +366,7 @@
     status_t result = NO_ERROR;
     long lfhPosn, startPosn, endPosn, uncompressedLen;
     FILE* inputFp = NULL;
-    unsigned long crc;
+    uint32_t crc;
     time_t modWhen;
 
     if (mReadOnly)
@@ -466,14 +467,16 @@
         bool scanResult;
         int method;
         long compressedLen;
+        unsigned long longcrc;
 
         scanResult = ZipUtils::examineGzip(inputFp, &method, &uncompressedLen,
-                        &compressedLen, &crc);
+                        &compressedLen, &longcrc);
         if (!scanResult || method != ZipEntry::kCompressDeflated) {
             ALOGD("this isn't a deflated gzip file?");
             result = UNKNOWN_ERROR;
             goto bail;
         }
+        crc = longcrc;
 
         result = copyPartialFpToFp(mZipFp, inputFp, compressedLen, NULL);
         if (result != NO_ERROR) {
@@ -563,7 +566,7 @@
         goto bail;
     }
 
-    result = pEntry->initFromExternal(pSourceZip, pSourceEntry);
+    result = pEntry->initFromExternal(pSourceEntry);
     if (result != NO_ERROR)
         goto bail;
     if (padding != 0) {
@@ -670,7 +673,7 @@
         goto bail;
     }
 
-    result = pEntry->initFromExternal(pSourceZip, pSourceEntry);
+    result = pEntry->initFromExternal(pSourceEntry);
     if (result != NO_ERROR)
         goto bail;
 
@@ -710,7 +713,7 @@
             goto bail;
         }
         long startPosn = ftell(mZipFp);
-        unsigned long crc;
+        uint32_t crc;
         if (compressFpToFp(mZipFp, NULL, buf, uncompressedLen, &crc) != NO_ERROR) {
             ALOGW("recompress of '%s' failed\n", pEntry->mCDE.mFileName);
             result = UNKNOWN_ERROR;
@@ -780,9 +783,9 @@
  * On exit, "srcFp" will be seeked to the end of the file, and "dstFp"
  * will be seeked immediately past the data.
  */
-status_t ZipFile::copyFpToFp(FILE* dstFp, FILE* srcFp, unsigned long* pCRC32)
+status_t ZipFile::copyFpToFp(FILE* dstFp, FILE* srcFp, uint32_t* pCRC32)
 {
-    unsigned char tmpBuf[32768];
+    uint8_t tmpBuf[32768];
     size_t count;
 
     *pCRC32 = crc32(0L, Z_NULL, 0);
@@ -811,7 +814,7 @@
  * On exit, "dstFp" will be seeked immediately past the data.
  */
 status_t ZipFile::copyDataToFp(FILE* dstFp,
-    const void* data, size_t size, unsigned long* pCRC32)
+    const void* data, size_t size, uint32_t* pCRC32)
 {
     size_t count;
 
@@ -836,9 +839,9 @@
  * will be seeked immediately past the data just written.
  */
 status_t ZipFile::copyPartialFpToFp(FILE* dstFp, FILE* srcFp, long length,
-    unsigned long* pCRC32)
+    uint32_t* pCRC32)
 {
-    unsigned char tmpBuf[32768];
+    uint8_t tmpBuf[32768];
     size_t count;
 
     if (pCRC32 != NULL)
@@ -846,7 +849,7 @@
 
     while (length) {
         long readSize;
-        
+
         readSize = sizeof(tmpBuf);
         if (readSize > length)
             readSize = length;
@@ -878,15 +881,15 @@
  * will be seeked immediately past the compressed data.
  */
 status_t ZipFile::compressFpToFp(FILE* dstFp, FILE* srcFp,
-    const void* data, size_t size, unsigned long* pCRC32)
+    const void* data, size_t size, uint32_t* pCRC32)
 {
     status_t result = NO_ERROR;
     const size_t kBufSize = 1024 * 1024;
-    unsigned char* inBuf = NULL;
-    unsigned char* outBuf = NULL;
+    uint8_t* inBuf = NULL;
+    uint8_t* outBuf = NULL;
     size_t outSize = 0;
     bool atEof = false;     // no feof() aviailable yet
-    unsigned long crc;
+    uint32_t crc;
     ZopfliOptions options;
     unsigned char bp = 0;
 
@@ -902,7 +905,7 @@
         /*
          * Create an input buffer and an output buffer.
          */
-        inBuf = new unsigned char[kBufSize];
+        inBuf = new uint8_t[kBufSize];
         if (inBuf == NULL) {
             result = NO_MEMORY;
             goto bail;
@@ -1128,7 +1131,7 @@
     if (dst == src || n <= 0)
         return NO_ERROR;
 
-    unsigned char readBuf[32768];
+    uint8_t readBuf[32768];
 
     if (dst < src) {
         /* shift stuff toward start of file; must read from start */
@@ -1294,7 +1297,7 @@
  * "buf" should be positioned at the EOCD signature, and should contain
  * the entire EOCD area including the comment.
  */
-status_t ZipFile::EndOfCentralDir::readBuf(const unsigned char* buf, int len)
+status_t ZipFile::EndOfCentralDir::readBuf(const uint8_t* buf, int len)
 {
     /* don't allow re-use */
     assert(mComment == NULL);
@@ -1322,11 +1325,11 @@
 
     if (mCommentLen > 0) {
         if (kEOCDLen + mCommentLen > len) {
-            ALOGD("EOCD(%d) + comment(%d) exceeds len (%d)\n",
+            ALOGD("EOCD(%d) + comment(%" PRIu16 ") exceeds len (%d)\n",
                 kEOCDLen, mCommentLen, len);
             return UNKNOWN_ERROR;
         }
-        mComment = new unsigned char[mCommentLen];
+        mComment = new uint8_t[mCommentLen];
         memcpy(mComment, buf + kEOCDLen, mCommentLen);
     }
 
@@ -1338,7 +1341,7 @@
  */
 status_t ZipFile::EndOfCentralDir::write(FILE* fp)
 {
-    unsigned char buf[kEOCDLen];
+    uint8_t buf[kEOCDLen];
 
     ZipEntry::putLongLE(&buf[0x00], kSignature);
     ZipEntry::putShortLE(&buf[0x04], mDiskNumber);
@@ -1366,9 +1369,9 @@
 void ZipFile::EndOfCentralDir::dump(void) const
 {
     ALOGD(" EndOfCentralDir contents:\n");
-    ALOGD("  diskNum=%u diskWCD=%u numEnt=%u totalNumEnt=%u\n",
+    ALOGD("  diskNum=%" PRIu16 " diskWCD=%" PRIu16 " numEnt=%" PRIu16 " totalNumEnt=%" PRIu16 "\n",
         mDiskNumber, mDiskWithCentralDir, mNumEntries, mTotalNumEntries);
-    ALOGD("  centDirSize=%lu centDirOff=%lu commentLen=%u\n",
+    ALOGD("  centDirSize=%" PRIu32 " centDirOff=%" PRIu32 " commentLen=%" PRIu32 "\n",
         mCentralDirSize, mCentralDirOffset, mCommentLen);
 }
 
diff --git a/tools/zipalign/ZipFile.h b/tools/zipalign/ZipFile.h
index b99cda5..b0bafe9 100644
--- a/tools/zipalign/ZipFile.h
+++ b/tools/zipalign/ZipFile.h
@@ -194,18 +194,18 @@
             delete[] mComment;
         }
 
-        status_t readBuf(const unsigned char* buf, int len);
+        status_t readBuf(const uint8_t* buf, int len);
         status_t write(FILE* fp);
 
-        //unsigned long   mSignature;
-        unsigned short  mDiskNumber;
-        unsigned short  mDiskWithCentralDir;
-        unsigned short  mNumEntries;
-        unsigned short  mTotalNumEntries;
-        unsigned long   mCentralDirSize;
-        unsigned long   mCentralDirOffset;      // offset from first disk
-        unsigned short  mCommentLen;
-        unsigned char*  mComment;
+        //uint32_t mSignature;
+        uint16_t mDiskNumber;
+        uint16_t mDiskWithCentralDir;
+        uint16_t mNumEntries;
+        uint16_t mTotalNumEntries;
+        uint32_t mCentralDirSize;
+        uint32_t mCentralDirOffset;      // offset from first disk
+        uint16_t mCommentLen;
+        uint8_t* mComment;
 
         enum {
             kSignature      = 0x06054b50,
@@ -235,18 +235,18 @@
         ZipEntry** ppEntry);
 
     /* copy all of "srcFp" into "dstFp" */
-    status_t copyFpToFp(FILE* dstFp, FILE* srcFp, unsigned long* pCRC32);
+    status_t copyFpToFp(FILE* dstFp, FILE* srcFp, uint32_t* pCRC32);
     /* copy all of "data" into "dstFp" */
     status_t copyDataToFp(FILE* dstFp,
-        const void* data, size_t size, unsigned long* pCRC32);
+        const void* data, size_t size, uint32_t* pCRC32);
     /* copy some of "srcFp" into "dstFp" */
     status_t copyPartialFpToFp(FILE* dstFp, FILE* srcFp, long length,
-        unsigned long* pCRC32);
+        uint32_t* pCRC32);
     /* like memmove(), but on parts of a single file */
     status_t filemove(FILE* fp, off_t dest, off_t src, size_t n);
     /* compress all of "srcFp" into "dstFp", using Deflate */
     status_t compressFpToFp(FILE* dstFp, FILE* srcFp,
-        const void* data, size_t size, unsigned long* pCRC32);
+        const void* data, size_t size, uint32_t* pCRC32);
 
     /* get modification date from a file descriptor */
     time_t getModTime(int fd);
diff --git a/tools/check_prereq/Android.mk b/tools/ziptime/Android.mk
similarity index 65%
rename from tools/check_prereq/Android.mk
rename to tools/ziptime/Android.mk
index 4329aff..3575229 100644
--- a/tools/check_prereq/Android.mk
+++ b/tools/ziptime/Android.mk
@@ -1,4 +1,5 @@
-# Copyright (C) 2009 The Android Open Source Project
+#
+# Copyright 2015 The Android Open Source Project
 #
 # Licensed under the Apache License, Version 2.0 (the "License");
 # you may not use this file except in compliance with the License.
@@ -11,15 +12,21 @@
 # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 # See the License for the specific language governing permissions and
 # limitations under the License.
+#
 
-LOCAL_PATH := $(call my-dir)
+#
+# Zip timestamp removal tool
+#
+
+LOCAL_PATH:= $(call my-dir)
 include $(CLEAR_VARS)
 
-LOCAL_SRC_FILES := check_prereq.c
-LOCAL_MODULE := check_prereq
-LOCAL_FORCE_STATIC_EXECUTABLE := true
-LOCAL_MODULE_TAGS := eng
-LOCAL_C_INCLUDES +=
-LOCAL_STATIC_LIBRARIES += libcutils libc
+LOCAL_SRC_FILES := \
+	ZipTime.cpp \
+	ZipEntry.cpp \
+	ZipFile.cpp
 
-include $(BUILD_EXECUTABLE)
+LOCAL_MODULE := ziptime
+LOCAL_MODULE_HOST_OS := darwin linux windows
+
+include $(BUILD_HOST_EXECUTABLE)
diff --git a/tools/ziptime/README.txt b/tools/ziptime/README.txt
new file mode 100644
index 0000000..8a101e9
--- /dev/null
+++ b/tools/ziptime/README.txt
@@ -0,0 +1,10 @@
+ziptime -- zip timestamp tool
+
+usage: ziptime file.zip
+
+  file.zip is an existing Zip archive to rewrite
+
+
+This tools replaces the timestamps in the zip headers with a static time
+(Jan 1 2008). The extra fields are not changed, so you'll need to use the
+-X option to zip so that it doesn't create the 'universal time' extra.
diff --git a/tools/ziptime/ZipEntry.cpp b/tools/ziptime/ZipEntry.cpp
new file mode 100644
index 0000000..51ce09f
--- /dev/null
+++ b/tools/ziptime/ZipEntry.cpp
@@ -0,0 +1,157 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// Access to entries in a Zip archive.
+//
+
+#include "ZipEntry.h"
+
+#include <stdio.h>
+#include <string.h>
+#include <assert.h>
+#include <inttypes.h>
+
+using namespace android;
+
+#define LOG(...) fprintf(stderr, __VA_ARGS__)
+
+/* Jan 01 2008 */
+#define STATIC_DATE (28 << 9 | 1 << 5 | 1)
+#define STATIC_TIME 0
+
+/*
+ * Initialize a new ZipEntry structure from a FILE* positioned at a
+ * CentralDirectoryEntry. Rewrites the headers to remove the dynamic
+ * timestamps.
+ *
+ * On exit, the file pointer will be at the start of the next CDE or
+ * at the EOCD.
+ */
+status_t ZipEntry::initAndRewriteFromCDE(FILE* fp)
+{
+    status_t result;
+    long posn;
+
+    /* read the CDE */
+    result = mCDE.rewrite(fp);
+    if (result != 0) {
+        LOG("mCDE.rewrite failed\n");
+        return result;
+    }
+
+    /* using the info in the CDE, go load up the LFH */
+    posn = ftell(fp);
+    if (fseek(fp, mCDE.mLocalHeaderRelOffset, SEEK_SET) != 0) {
+        LOG("local header seek failed (%" PRIu32 ")\n",
+            mCDE.mLocalHeaderRelOffset);
+        return -1;
+    }
+
+    result = mLFH.rewrite(fp);
+    if (result != 0) {
+        LOG("mLFH.rewrite failed\n");
+        return result;
+    }
+
+    if (fseek(fp, posn, SEEK_SET) != 0)
+        return -1;
+
+    return 0;
+}
+
+/*
+ * ===========================================================================
+ *      ZipEntry::LocalFileHeader
+ * ===========================================================================
+ */
+
+/*
+ * Rewrite a local file header.
+ *
+ * On entry, "fp" points to the signature at the start of the header.
+ */
+status_t ZipEntry::LocalFileHeader::rewrite(FILE* fp)
+{
+    status_t result = 0;
+    uint8_t buf[kLFHLen];
+
+    if (fread(buf, 1, kLFHLen, fp) != kLFHLen)
+        return -1;
+
+    if (ZipEntry::getLongLE(&buf[0x00]) != kSignature) {
+        LOG("whoops: didn't find expected signature\n");
+        return -1;
+    }
+
+    ZipEntry::putShortLE(&buf[0x0a], STATIC_TIME);
+    ZipEntry::putShortLE(&buf[0x0c], STATIC_DATE);
+
+    if (fseek(fp, -kLFHLen, SEEK_CUR) != 0)
+        return -1;
+
+    if (fwrite(buf, 1, kLFHLen, fp) != kLFHLen)
+        return -1;
+
+    return 0;
+}
+
+/*
+ * ===========================================================================
+ *      ZipEntry::CentralDirEntry
+ * ===========================================================================
+ */
+
+/*
+ * Read and rewrite the central dir entry that appears next in the file.
+ *
+ * On entry, "fp" should be positioned on the signature bytes for the
+ * entry.  On exit, "fp" will point at the signature word for the next
+ * entry or for the EOCD.
+ */
+status_t ZipEntry::CentralDirEntry::rewrite(FILE* fp)
+{
+    status_t result = 0;
+    uint8_t buf[kCDELen];
+    uint16_t fileNameLength, extraFieldLength, fileCommentLength;
+
+    if (fread(buf, 1, kCDELen, fp) != kCDELen)
+        return -1;
+
+    if (ZipEntry::getLongLE(&buf[0x00]) != kSignature) {
+        LOG("Whoops: didn't find expected signature\n");
+        return -1;
+    }
+
+    ZipEntry::putShortLE(&buf[0x0c], STATIC_TIME);
+    ZipEntry::putShortLE(&buf[0x0e], STATIC_DATE);
+
+    fileNameLength = ZipEntry::getShortLE(&buf[0x1c]);
+    extraFieldLength = ZipEntry::getShortLE(&buf[0x1e]);
+    fileCommentLength = ZipEntry::getShortLE(&buf[0x20]);
+    mLocalHeaderRelOffset = ZipEntry::getLongLE(&buf[0x2a]);
+
+    if (fseek(fp, -kCDELen, SEEK_CUR) != 0)
+        return -1;
+
+    if (fwrite(buf, 1, kCDELen, fp) != kCDELen)
+        return -1;
+
+    if (fseek(fp, fileNameLength + extraFieldLength + fileCommentLength, SEEK_CUR) != 0)
+        return -1;
+
+    return 0;
+}
diff --git a/tools/ziptime/ZipEntry.h b/tools/ziptime/ZipEntry.h
new file mode 100644
index 0000000..26bf596
--- /dev/null
+++ b/tools/ziptime/ZipEntry.h
@@ -0,0 +1,117 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// Zip archive entries.
+//
+// The ZipEntry class is tightly meshed with the ZipFile class.
+//
+#ifndef __LIBS_ZIPENTRY_H
+#define __LIBS_ZIPENTRY_H
+
+#include <stdlib.h>
+#include <stdint.h>
+#include <stdio.h>
+
+typedef int status_t;
+
+namespace android {
+
+class ZipFile;
+
+/*
+ * ZipEntry objects represent a single entry in a Zip archive.
+ *
+ * File information is stored in two places: next to the file data (the Local
+ * File Header, and possibly a Data Descriptor), and at the end of the file
+ * (the Central Directory Entry).  The two must be kept in sync.
+ */
+class ZipEntry {
+public:
+    friend class ZipFile;
+
+    ZipEntry(void) {}
+    ~ZipEntry(void) {}
+
+    /*
+     * Some basic functions for raw data manipulation.  "LE" means
+     * Little Endian.
+     */
+    static inline uint16_t getShortLE(const uint8_t* buf) {
+        return buf[0] | (buf[1] << 8);
+    }
+    static inline uint32_t getLongLE(const uint8_t* buf) {
+        return buf[0] | (buf[1] << 8) | (buf[2] << 16) | (buf[3] << 24);
+    }
+    static inline void putShortLE(uint8_t* buf, uint16_t val) {
+        buf[0] = (uint8_t) val;
+        buf[1] = (uint8_t) (val >> 8);
+    }
+
+protected:
+    /*
+     * Initialize the structure from the file, which is pointing at
+     * our Central Directory entry. And rewrite it.
+     */
+    status_t initAndRewriteFromCDE(FILE* fp);
+
+private:
+    /* these are private and not defined */
+    ZipEntry(const ZipEntry& src);
+    ZipEntry& operator=(const ZipEntry& src);
+
+    /*
+     * Every entry in the Zip archive starts off with one of these.
+     */
+    class LocalFileHeader {
+    public:
+        LocalFileHeader(void) {}
+
+        status_t rewrite(FILE* fp);
+
+        enum {
+            kSignature      = 0x04034b50,
+            kLFHLen         = 30,       // LocalFileHdr len, excl. var fields
+        };
+    };
+
+    /*
+     * Every entry in the Zip archive has one of these in the "central
+     * directory" at the end of the file.
+     */
+    class CentralDirEntry {
+    public:
+        CentralDirEntry(void) :
+            mLocalHeaderRelOffset(0)
+        {}
+
+        status_t rewrite(FILE* fp);
+
+        uint32_t mLocalHeaderRelOffset;
+
+        enum {
+            kSignature      = 0x02014b50,
+            kCDELen         = 46,       // CentralDirEnt len, excl. var fields
+        };
+    };
+
+    LocalFileHeader     mLFH;
+    CentralDirEntry     mCDE;
+};
+
+}; // namespace android
+
+#endif // __LIBS_ZIPENTRY_H
diff --git a/tools/ziptime/ZipFile.cpp b/tools/ziptime/ZipFile.cpp
new file mode 100644
index 0000000..1d111af
--- /dev/null
+++ b/tools/ziptime/ZipFile.cpp
@@ -0,0 +1,241 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// Access to Zip archives.
+//
+
+#include "ZipFile.h"
+
+#include <memory.h>
+#include <sys/stat.h>
+#include <errno.h>
+#include <assert.h>
+#include <inttypes.h>
+
+using namespace android;
+
+#define LOG(...) fprintf(stderr, __VA_ARGS__)
+
+/*
+ * Open a file and rewrite the headers
+ */
+status_t ZipFile::rewrite(const char* zipFileName)
+{
+    assert(mZipFp == NULL);     // no reopen
+
+    /* open the file */
+    mZipFp = fopen(zipFileName, "r+b");
+    if (mZipFp == NULL) {
+        int err = errno;
+        LOG("fopen failed: %d\n", err);
+        return -1;
+    }
+
+    /*
+     * Load the central directory.  If that fails, then this probably
+     * isn't a Zip archive.
+     */
+    return rewriteCentralDir();
+}
+
+/*
+ * Find the central directory, read and rewrite the contents.
+ *
+ * The fun thing about ZIP archives is that they may or may not be
+ * readable from start to end.  In some cases, notably for archives
+ * that were written to stdout, the only length information is in the
+ * central directory at the end of the file.
+ *
+ * Of course, the central directory can be followed by a variable-length
+ * comment field, so we have to scan through it backwards.  The comment
+ * is at most 64K, plus we have 18 bytes for the end-of-central-dir stuff
+ * itself, plus apparently sometimes people throw random junk on the end
+ * just for the fun of it.
+ *
+ * This is all a little wobbly.  If the wrong value ends up in the EOCD
+ * area, we're hosed.  This appears to be the way that everbody handles
+ * it though, so we're in pretty good company if this fails.
+ */
+status_t ZipFile::rewriteCentralDir(void)
+{
+    status_t result = 0;
+    uint8_t* buf = NULL;
+    off_t fileLength, seekStart;
+    long readAmount;
+    int i;
+
+    fseek(mZipFp, 0, SEEK_END);
+    fileLength = ftell(mZipFp);
+    rewind(mZipFp);
+
+    /* too small to be a ZIP archive? */
+    if (fileLength < EndOfCentralDir::kEOCDLen) {
+        LOG("Length is %ld -- too small\n", (long)fileLength);
+        result = -1;
+        goto bail;
+    }
+
+    buf = new uint8_t[EndOfCentralDir::kMaxEOCDSearch];
+    if (buf == NULL) {
+        LOG("Failure allocating %d bytes for EOCD search",
+             EndOfCentralDir::kMaxEOCDSearch);
+        result = -1;
+        goto bail;
+    }
+
+    if (fileLength > EndOfCentralDir::kMaxEOCDSearch) {
+        seekStart = fileLength - EndOfCentralDir::kMaxEOCDSearch;
+        readAmount = EndOfCentralDir::kMaxEOCDSearch;
+    } else {
+        seekStart = 0;
+        readAmount = (long) fileLength;
+    }
+    if (fseek(mZipFp, seekStart, SEEK_SET) != 0) {
+        LOG("Failure seeking to end of zip at %ld", (long) seekStart);
+        result = -1;
+        goto bail;
+    }
+
+    /* read the last part of the file into the buffer */
+    if (fread(buf, 1, readAmount, mZipFp) != (size_t) readAmount) {
+        LOG("short file? wanted %ld\n", readAmount);
+        result = -1;
+        goto bail;
+    }
+
+    /* find the end-of-central-dir magic */
+    for (i = readAmount - 4; i >= 0; i--) {
+        if (buf[i] == 0x50 &&
+            ZipEntry::getLongLE(&buf[i]) == EndOfCentralDir::kSignature)
+        {
+            break;
+        }
+    }
+    if (i < 0) {
+        LOG("EOCD not found, not Zip\n");
+        result = -1;
+        goto bail;
+    }
+
+    /* extract eocd values */
+    result = mEOCD.readBuf(buf + i, readAmount - i);
+    if (result != 0) {
+        LOG("Failure reading %ld bytes of EOCD values", readAmount - i);
+        goto bail;
+    }
+
+    /*
+     * So far so good.  "mCentralDirSize" is the size in bytes of the
+     * central directory, so we can just seek back that far to find it.
+     * We can also seek forward mCentralDirOffset bytes from the
+     * start of the file.
+     *
+     * We're not guaranteed to have the rest of the central dir in the
+     * buffer, nor are we guaranteed that the central dir will have any
+     * sort of convenient size.  We need to skip to the start of it and
+     * read the header, then the other goodies.
+     *
+     * The only thing we really need right now is the file comment, which
+     * we're hoping to preserve.
+     */
+    if (fseek(mZipFp, mEOCD.mCentralDirOffset, SEEK_SET) != 0) {
+        LOG("Failure seeking to central dir offset %" PRIu32 "\n",
+             mEOCD.mCentralDirOffset);
+        result = -1;
+        goto bail;
+    }
+
+    /*
+     * Loop through and read the central dir entries.
+     */
+    int entry;
+    for (entry = 0; entry < mEOCD.mTotalNumEntries; entry++) {
+        ZipEntry* pEntry = new ZipEntry;
+
+        result = pEntry->initAndRewriteFromCDE(mZipFp);
+        if (result != 0) {
+            LOG("initFromCDE failed\n");
+            delete pEntry;
+            goto bail;
+        }
+
+        delete pEntry;
+    }
+
+
+    /*
+     * If all went well, we should now be back at the EOCD.
+     */
+    uint8_t checkBuf[4];
+    if (fread(checkBuf, 1, 4, mZipFp) != 4) {
+        LOG("EOCD check read failed\n");
+        result = -1;
+        goto bail;
+    }
+    if (ZipEntry::getLongLE(checkBuf) != EndOfCentralDir::kSignature) {
+        LOG("EOCD read check failed\n");
+        result = -1;
+        goto bail;
+    }
+
+bail:
+    delete[] buf;
+    return result;
+}
+
+/*
+ * ===========================================================================
+ *      ZipFile::EndOfCentralDir
+ * ===========================================================================
+ */
+
+/*
+ * Read the end-of-central-dir fields.
+ *
+ * "buf" should be positioned at the EOCD signature, and should contain
+ * the entire EOCD area including the comment.
+ */
+status_t ZipFile::EndOfCentralDir::readBuf(const uint8_t* buf, int len)
+{
+    uint16_t diskNumber, diskWithCentralDir, numEntries;
+
+    if (len < kEOCDLen) {
+        /* looks like ZIP file got truncated */
+        LOG(" Zip EOCD: expected >= %d bytes, found %d\n",
+            kEOCDLen, len);
+        return -1;
+    }
+
+    /* this should probably be an assert() */
+    if (ZipEntry::getLongLE(&buf[0x00]) != kSignature)
+        return -1;
+
+    diskNumber = ZipEntry::getShortLE(&buf[0x04]);
+    diskWithCentralDir = ZipEntry::getShortLE(&buf[0x06]);
+    numEntries = ZipEntry::getShortLE(&buf[0x08]);
+    mTotalNumEntries = ZipEntry::getShortLE(&buf[0x0a]);
+    mCentralDirOffset = ZipEntry::getLongLE(&buf[0x10]);
+
+    if (diskNumber != 0 || diskWithCentralDir != 0 ||
+        numEntries != mTotalNumEntries)
+    {
+        LOG("Archive spanning not supported\n");
+        return -1;
+    }
+
+    return 0;
+}
diff --git a/tools/ziptime/ZipFile.h b/tools/ziptime/ZipFile.h
new file mode 100644
index 0000000..b049e05
--- /dev/null
+++ b/tools/ziptime/ZipFile.h
@@ -0,0 +1,84 @@
+/*
+ * Copyright (C) 2006 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+//
+// Class to rewrite zip file headers to remove dynamic timestamps.
+//
+#ifndef __LIBS_ZIPFILE_H
+#define __LIBS_ZIPFILE_H
+
+#include <stdio.h>
+
+#include "ZipEntry.h"
+
+namespace android {
+
+/*
+ * Manipulate a Zip archive.
+ */
+class ZipFile {
+public:
+    ZipFile(void) : mZipFp(NULL) {}
+    ~ZipFile(void) {
+        if (mZipFp != NULL)
+            fclose(mZipFp);
+    }
+
+    /*
+     * Rewrite an archive's headers to remove dynamic timestamps.
+     */
+    status_t rewrite(const char* zipFileName);
+
+private:
+    /* these are private and not defined */
+    ZipFile(const ZipFile& src);
+    ZipFile& operator=(const ZipFile& src);
+
+    class EndOfCentralDir {
+    public:
+        EndOfCentralDir(void) : mTotalNumEntries(0), mCentralDirOffset(0) {}
+
+        status_t readBuf(const uint8_t* buf, int len);
+
+        uint16_t mTotalNumEntries;
+        uint32_t mCentralDirOffset;      // offset from first disk
+
+        enum {
+            kSignature      = 0x06054b50,
+            kEOCDLen        = 22,       // EndOfCentralDir len, excl. comment
+
+            kMaxCommentLen  = 65535,    // longest possible in ushort
+            kMaxEOCDSearch  = kMaxCommentLen + EndOfCentralDir::kEOCDLen,
+
+        };
+    };
+
+    /* read all entries in the central dir */
+    status_t rewriteCentralDir(void);
+
+    /*
+     * We use stdio FILE*, which gives us buffering but makes dealing
+     * with files >2GB awkward.  Until we support Zip64, we're fine.
+     */
+    FILE*           mZipFp;             // Zip file pointer
+
+    /* one of these per file */
+    EndOfCentralDir mEOCD;
+};
+
+}; // namespace android
+
+#endif // __LIBS_ZIPFILE_H
diff --git a/tools/ziptime/ZipTime.cpp b/tools/ziptime/ZipTime.cpp
new file mode 100644
index 0000000..99d3231
--- /dev/null
+++ b/tools/ziptime/ZipTime.cpp
@@ -0,0 +1,48 @@
+/*
+ * Copyright (C) 2015 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *      http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+/*
+ * Zip tool to remove dynamic timestamps
+ */
+#include "ZipFile.h"
+
+#include <stdlib.h>
+#include <stdio.h>
+
+using namespace android;
+
+static void usage(void)
+{
+    fprintf(stderr, "Zip timestamp utility\n");
+    fprintf(stderr, "Copyright (C) 2015 The Android Open Source Project\n\n");
+    fprintf(stderr, "Usage: ziptime file.zip\n");
+}
+
+int main(int argc, char* const argv[])
+{
+    if (argc != 2) {
+        usage();
+        return 2;
+    }
+
+    ZipFile zip;
+    if (zip.rewrite(argv[1]) != 0) {
+        fprintf(stderr, "Unable to rewrite '%s' as zip archive\n", argv[1]);
+        return 1;
+    }
+
+    return 0;
+}