am 1cda92ea: am 2999ca2c: Merge "Docs: Adding dm-verity to Security section. Bug: 10706838 Staging location: http://claym.mtv.corp.google.com:8091/devices/tech/security/dm-verity.html"

* commit '1cda92ea9d19d43ddab71c7f7605710a6a9ee806':
diff --git a/src/accessories/accessories_toc.cs b/src/accessories/accessories_toc.cs
index 273977b..d1badbe 100644
--- a/src/accessories/accessories_toc.cs
+++ b/src/accessories/accessories_toc.cs
@@ -1,5 +1,5 @@
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -38,4 +38,4 @@
       <li><a href="<?cs var:toroot ?>accessories/custom.html">Building Custom Accessories</a></li>
   </li>
   <!-- End Accessories -->
-</ul>
\ No newline at end of file
+</ul>
diff --git a/src/accessories/aoa.jd b/src/accessories/aoa.jd
index 3d0c29e..7388d54 100644
--- a/src/accessories/aoa.jd
+++ b/src/accessories/aoa.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/accessories/aoa2.jd b/src/accessories/aoa2.jd
index 81a0b23..c48bf25 100644
--- a/src/accessories/aoa2.jd
+++ b/src/accessories/aoa2.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/accessories/audio.jd b/src/accessories/audio.jd
index cb7a669..240f4ea 100644
--- a/src/accessories/audio.jd
+++ b/src/accessories/audio.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/accessories/custom.jd b/src/accessories/custom.jd
index 032aaa2..3f84d50 100644
--- a/src/accessories/custom.jd
+++ b/src/accessories/custom.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/accessories/index.jd b/src/accessories/index.jd
index e374f72..adf3f5c 100644
--- a/src/accessories/index.jd
+++ b/src/accessories/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/accessories/protocol.jd b/src/accessories/protocol.jd
index 23122a1..7ce3bb4 100644
--- a/src/accessories/protocol.jd
+++ b/src/accessories/protocol.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2013The Android Open Source Project     
+    Copyright 2013 The Android Open Source Project     
 
     Licensed under the Apache License, Version 2.0 (the "License");    
     you may not use this file except in compliance with the License.   
diff --git a/src/compatibility/4.3/versions.jd b/src/compatibility/4.3/versions.jd
index 8d2009d..321a4fc 100644
--- a/src/compatibility/4.3/versions.jd
+++ b/src/compatibility/4.3/versions.jd
@@ -14,4 +14,5 @@
 <code>android.os.Build.VERSION.RELEASE</code> for Android 4.3 are:</p>
 <ul>
 <li>4.3</li>
+<li>4.3.1</li>
 </ul>
diff --git a/src/compatibility/4.4/android-4.4-cdd.pdf b/src/compatibility/4.4/android-4.4-cdd.pdf
new file mode 100644
index 0000000..730f634
--- /dev/null
+++ b/src/compatibility/4.4/android-4.4-cdd.pdf
Binary files differ
diff --git a/src/compatibility/4.4/versions.jd b/src/compatibility/4.4/versions.jd
new file mode 100644
index 0000000..d2118df
--- /dev/null
+++ b/src/compatibility/4.4/versions.jd
@@ -0,0 +1,19 @@
+page.title=Permitted Version Strings for Android 4.4
+@jd:body
+
+<p>As described in Section 3.2.2 of the <a
+href="/compatibility/android-4.4-cdd.pdf">Android 4.4 Compatibility Definition</a>, 
+only certain strings are allowable for the system property
+<code>android.os.Build.VERSION.RELEASE</code>. The reason for this is that
+applications and web sites may rely on predictable values for this string, and
+so that end users can easily and reliably identify the version of Android
+running on their devices.</p>
+<p>Because subsequent releases of the Android software may revise this string,
+but not change any API behavior, such releases may not be accompanied by a new
+Compatibility Definition Document. This page lists the versions that are
+allowable by an Android 4.3-based system. The only permitted values for
+<code>android.os.Build.VERSION.RELEASE</code> for Android 4.4 are:</p>
+<ul>
+<li>4.4</li>
+<li>4.4.1</li>
+</ul>
diff --git a/src/compatibility/android-4.4-cdd.pdf b/src/compatibility/android-4.4-cdd.pdf
new file mode 100644
index 0000000..730f634
--- /dev/null
+++ b/src/compatibility/android-4.4-cdd.pdf
Binary files differ
diff --git a/src/compatibility/android-cdd.pdf b/src/compatibility/android-cdd.pdf
new file mode 100644
index 0000000..730f634
--- /dev/null
+++ b/src/compatibility/android-cdd.pdf
Binary files differ
diff --git a/src/compatibility/android-cts-manual-r6.pdf b/src/compatibility/android-cts-manual-r6.pdf
deleted file mode 100644
index 5f4173b..0000000
--- a/src/compatibility/android-cts-manual-r6.pdf
+++ /dev/null
Binary files differ
diff --git a/src/compatibility/android-cts-manual.pdf b/src/compatibility/android-cts-manual.pdf
new file mode 100644
index 0000000..2c77b5c
--- /dev/null
+++ b/src/compatibility/android-cts-manual.pdf
Binary files differ
diff --git a/src/compatibility/calibration-pattern.pdf b/src/compatibility/calibration-pattern.pdf
new file mode 100644
index 0000000..1800fa0
--- /dev/null
+++ b/src/compatibility/calibration-pattern.pdf
Binary files differ
diff --git a/src/compatibility/compatibility_toc.cs b/src/compatibility/compatibility_toc.cs
index e32c071..6d4b69e 100644
--- a/src/compatibility/compatibility_toc.cs
+++ b/src/compatibility/compatibility_toc.cs
@@ -1,5 +1,5 @@
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -26,7 +26,7 @@
       <li><a href="<?cs var:toroot ?>compatibility/overview.html">Overview</a></li>
       <li><a href="<?cs var:toroot ?>compatibility/cts-intro.html">Compatibility Test Suite</a></li>
       <li><a href="<?cs var:toroot ?>compatibility/cts-development.html">CTS Development</a></li>
-      <li><a href="<?cs var:toroot ?>compatibility/android-4.3-cdd.pdf">Compatibility Definition Document (CDD)</a></li>
+      <li><a href="<?cs var:toroot ?>compatibility/android-cdd.pdf">Compatibility Definition Document (CDD)</a></li>
       <li><a href="<?cs var:toroot ?>compatibility/downloads.html">Downloads</a></li>
       <li><a href="<?cs var:toroot ?>compatibility/contact-us.html">Contact Us</a></li>
     </ul>
diff --git a/src/compatibility/contact-us.jd b/src/compatibility/contact-us.jd
index b9e969f..585c2a8 100644
--- a/src/compatibility/contact-us.jd
+++ b/src/compatibility/contact-us.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project     
+    Copyright 2013 The Android Open Source Project     
 
     Licensed under the Apache License, Version 2.0 (the "License");    
     you may not use this file except in compliance with the License.   
@@ -16,25 +16,35 @@
     See the License for the specific language governing permissions and    
     limitations under the License.   
 -->
+<p>Thanks for your interest in Android compatibility! This page describes the
+contact methods for inquiries regarding the Android compatibility program,
+including the Compatibility Definition Document (CDD) and Compatibility Test
+Suite (CTS). See the <a href="{@docRoot}community/index.html">Community</a>
+page for communication channels regarding other topics.</p>
 
-<p>Thanks for your interest in Android compatibility!</p>
-<p>If you have questions about Android compatibility that aren't covered in
-this site, you can reach us in one of a few different ways. To get the most
-out of any of these options, please first read "Getting the Most from Our
-Lists" on the <a href="{@docRoot}community/index.html">Community page</a></p>
-<h2 id="for-android-compatibility-definition-and-compatibility-test-suite-technical-questions">For Android Compatibility Definition and Compatibility Test Suite Technical Questions</h2>
-<p>If you have questions about Android compatibility that aren't covered in this site, you can reach
-us in one of a few different ways. To get the most out of any of these options, please first read "Getting the Most from Our
-Lists" on the <a href="{@docRoot}community/index.html">Community page</a>. If you have specific issues with the Compatibility Test Suite or the Compatibility Definition
-<a href="https://groups.google.com/forum/?fromgroups#!forum/android-compatibility">android-compatibility list.</a> is the discussion forum for you.</p>
+<h2
+id="for-android-compatibility-definition-and-compatibility-test-suite-technical-questions">For
+CDD and CTS technical questions</h2>
+<p>If you have technical questions about Android compatibility that aren't covered in
+this site, you can seek help from your peers on the <a
+href="https://groups.google.com/forum/?fromgroups#!forum/android-compatibility">android-compatibility</a>
+list.</p>
+
 <ul>
-<li>Subscribe using Google Groups: <a href="https://groups.google.com/forum/?fromgroups#!forum/android-compatibility">android-compatibility</a></li>
-<li>Subscribe via email: <a href="mailto:android-compatibility+subscribe@googlegroups.com">android-compatibility</a></li>
+<li>Subscribe using Google Groups: <a
+href="https://groups.google.com/forum/?fromgroups#!forum/android-compatibility">android-compatibility</a></li>
+<li>Subscribe via email: <a
+href="mailto:android-compatibility+subscribe@googlegroups.com">android-compatibility</a></li>
 </ul>
-<p>Note that if you're a user looking for help with your Android device, this page probably isn't for you;
-you should contact your carrier or manufacturer for help with your Android device.</p>
-<h2 id="for-business-inquiries">For Business Inquiries</h2>
+
+<p>To make best use of this list, please first read <em>Getting the Most from
+Our Lists</em> on the <a href="{@docRoot}community/index.html">Community</a>
+page. Users looking for help with Android devices should contact their carrier
+or manufacturer for help.</p>
+
+<h2 id="for-business-inquiries">For business inquiries</h2>
 <p>Finally, business inquiries about the compatibility program, including
-requests to use branding elements and so on, can be sent to the address <a href="mailto:android-partnerships@google.com">android-partnerships@google.com</a>. Like
-the CTS address, this address is for specific, private inquiries; general
-questions will be directed back to the android-compatibility list.</p>
+requests to use branding elements and similar, can be sent to the address <a
+href="mailto:android-partnerships@google.com">android-partnerships@google.com</a>.
+This address is for specific, private inquiries; general questions will be
+directed back to the android-compatibility list.</p>
diff --git a/src/compatibility/cts-development.jd b/src/compatibility/cts-development.jd
index 68c4891..4631aca 100644
--- a/src/compatibility/cts-development.jd
+++ b/src/compatibility/cts-development.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project     
+    Copyright 2013 The Android Open Source Project     
 
     Licensed under the Apache License, Version 2.0 (the "License");    
     you may not use this file except in compliance with the License.   
diff --git a/src/compatibility/cts-intro.jd b/src/compatibility/cts-intro.jd
index b1458e6..2813aef 100644
--- a/src/compatibility/cts-intro.jd
+++ b/src/compatibility/cts-intro.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project     
+    Copyright 2013 The Android Open Source Project     
 
     Licensed under the Apache License, Version 2.0 (the "License");    
     you may not use this file except in compliance with the License.   
@@ -34,6 +34,8 @@
 </li>
 </ul>
 <h2 id="workflow">Workflow</h2>
+<p>This section summarizes CTS setup. Please refer to the <a href="android-cts-manual.pdf">
+CTS User Manual</a> for detailed instructions.</p>
 <ol>
 <li>
 <p><a href="downloads.html">Download</a> the CTS and CTS media files.</p>
@@ -45,7 +47,7 @@
 <p>For CTS versions 2.1 R2 through 4.2 R4, set up your device (or emulator) to run the accessibility tests:</p>
 <ol>
 <li>
-<p>adb install -r android-cts/repository/testcases/CtsDelegatingAccessibilityService.apk</p>
+<code>adb install -r android-cts/repository/testcases/CtsDelegatingAccessibilityService.apk</code>
 </li>
 <li>
 <p>On the device, enable Settings &gt; Accessibility &gt; Accessibility &gt; Delegating Accessibility Service</p>
@@ -56,10 +58,13 @@
 <p>For CTS 2.3 R4 and beyond, set up your device to run the device administration tests:</p>
 <ol>
 <li>
-<p>adb install -r android-cts/repository/testcases/CtsDeviceAdmin.apk</p>
+<code>adb install -r android-cts/repository/testcases/CtsDeviceAdmin.apk</code>
 </li>
 <li>
-<p>On the device, enable the two android.deviceadmin.cts.CtsDeviceAdminReceiver* device administrators under Settings &gt; Location &amp; security &gt; Select device administrators</p>
+<p>On the device, enable the two <code>android.deviceadmin.cts.CtsDeviceAdminReceiver*</code> device
+administrators under Settings &gt; Location &amp; security &gt; Select device administrators</p>
+<p><strong>Note</strong>: Make sure the <code>android.deviceadmin.cts.CtsDeviceAdminDeactivatedReceiver</code>
+stays disabled in the same menu.</p>
 </li>
 </ol>
 </li>
@@ -70,7 +75,9 @@
 <p>Unzip the CTS Media zip file.</p>
 </li>
 <li>
-<p>Run copy_media.sh [720x480|1280x720|1920x1080|all] [-s serial]. If no resolution is specified, the default maximum resolution of 480x360 is assumed.</p>
+<p>Run the following command. If no resolution is specified, the default maximum resolution of
+480x360 is assumed:</p>
+<code>copy_media.sh [720x480|1280x720|1920x1080|all] [-s serial]</code>
 </li>
 </ol>
 </li>
@@ -78,7 +85,7 @@
 <p>Launch the CTS. The CTS test harness loads the test plan onto the attached devices. For each test in the test harness:</p>
 <ul>
 <li>
-<p>The test harness pushes a .apk file to each device, executes the test through instrumentation, and records test results.</p>
+<p>The test harness pushes an .apk file to each device, executes the test through instrumentation, and records test results.</p>
 </li>
 <li>
 <p>The test harness removes the .apk file from each device.</p>
@@ -86,7 +93,8 @@
 </ul>
 </li>
 <li>
-<p>Once all the tests are executed, you can view the test results in your browser and use the results to adjust your design. You can continue to run the CTS throughout your development process.</p>
+<p>Once all the tests are executed, view the test results in your browser and
+use them to adjust your design. You can continue to run the CTS throughout your development process.</p>
 </li>
 </ol>
 <h2 id="types-of-test-cases">Types of test cases</h2>
@@ -99,7 +107,8 @@
 <p><em>Functional tests</em> test a combination of APIs together in a higher-level use-case.</p>
 </li>
 <li>
-<p><em>Reference application tests</em> instrument a complete sample application to exercise a full set of APIs and Android runtime services</p>
+<p><em>Reference application tests</em> instrument a complete sample application
+to exercise a full set of APIs and Android runtime services.</p>
 </li>
 </ul>
 <p>Future versions of the CTS will include the following types of test cases:</p>
@@ -131,7 +140,7 @@
 </tr>
 <tr>
 <td>Dalvik VM Tests</td>
-<td>The tests focus on testing the Dalvik VM</td>
+<td>The tests focus on testing the Dalvik VM.</td>
 </tr>
 <tr>
 <td>Platform Data Model</td>
diff --git a/src/compatibility/downloads.jd b/src/compatibility/downloads.jd
index d86be34..8d1cbbd 100644
--- a/src/compatibility/downloads.jd
+++ b/src/compatibility/downloads.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project     
+    Copyright 2013 The Android Open Source Project     
 
     Licensed under the Apache License, Version 2.0 (the "License");    
     you may not use this file except in compliance with the License.   
@@ -17,10 +17,23 @@
     limitations under the License.   
 -->
 
-<p>Thanks for your interest in Android Compatibility! The links below allow
-you to access the key documents and information.</p>
-<p>Thanks for your interest in Android Compatibility! The links below allow
-you to access the key documents and information.</p>
+<p>Thank you for your interest in Android Compatibility! The links below give
+you access to key documents and information about the program.</p>
+
+<h2 id="android-44">Android 4.4</h2>
+<p>Android 4.4 is the release of the development milestone code-named
+KitKat. Source code for Android 4.4 is found in the
+'android-cts-4.4_r1' branch in the open-source tree.</p>
+<ul>
+<li><a href="4.4/android-4.4-cdd.pdf">Android 4.4 Compatibility Definition
+Document (CDD)</a></li>
+<li><a
+href="https://dl.google.com/dl/android/cts/android-cts-4.4_r1-linux_x86-arm.zip">Android
+4.4 R1 Compatibility Test Suite (CTS)</a></li>
+<li><a
+href="https://dl.google.com/dl/android/cts/android-cts-verifier-4.4_r1-linux_x86-arm.zip">Android
+4.4 R1 CTS Verifier</a></li>
+</ul>
 
 <h2 id="android-43">Android 4.3</h2>
 <p>Android 4.3 is the release of the development milestone code-named
@@ -98,7 +111,7 @@
 <p>The CTS user manual is applicable to any CTS version, but CTS 2.1 R2 and
 beyond require <a href="cts-intro.html">additional steps</a> to run the accessibility tests.</p>
 <ul>
-<li><a href="android-cts-manual-r6.pdf">Compatibility Test Suite (CTS) User Manual</a></li>
+<li><a href="android-cts-manual.pdf">Compatibility Test Suite (CTS) User Manual</a></li>
 </ul>
 <h2 id="cts-media-files">CTS Media Files</h2>
 <p>These media files are required for the CTS media stress tests.</p>
diff --git a/src/compatibility/index.jd b/src/compatibility/index.jd
index 13ee12e..60c2951 100644
--- a/src/compatibility/index.jd
+++ b/src/compatibility/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project     
+    Copyright 2013 The Android Open Source Project     
 
     Licensed under the Apache License, Version 2.0 (the "License");    
     you may not use this file except in compliance with the License.   
@@ -18,7 +18,7 @@
 -->
 
 <p>Android's purpose is to establish an open platform for developers to build innovative apps.
-The Android Compatibility program defines the technical details of Android platform and provides
+The Android Compatibility program defines the technical details of the Android platform and provides
 tools used by OEMs to ensure that developers' apps run on a variety of devices. The Android SDK
 provides built-in tools that developers use to clearly state the device features their apps
 require. And Google Play shows apps only to those devices that can properly run them.
@@ -29,10 +29,10 @@
 <p>A mobile phone is a highly personal, always-on, always-present gateway to
 the Internet. We haven't met a user yet who didn't want to customize it by
 extending its functionality. That's why Android was designed as a robust
-platform for running after-market applications.</p>
+platform for running aftermarket applications.</p>
 <h3 id="developers-outnumber-us-all">Developers outnumber us all.</h3>
 <p>No device manufacturer can hope to write all the software that a person could
-conceivably need. We need third-party developers to write the apps users want,
+conceivably need. We need third-party developers to write the apps users want;
 so the Android Open Source Project aims to make it as easy and open as
 possible for developers to build apps.</p>
 <h3 id="everyone-needs-a-common-ecosystem">Everyone needs a common ecosystem.</h3>
@@ -48,22 +48,25 @@
 <p>Building a compatible device is a three-step process:</p>
 <ol>
 <li>
-<p><em>Obtain the Android software source code</em>.
-    This is <a href="{@docRoot}source/index.html">the source code for the Android platform</a>, that you port to your hardware.</p>
+<p><em>Obtain the <a href="{@docRoot}source/index.html">Android software source
+code</a></em>.
+    This is the source code for the Android platform that you port to your hardware.</p>
 </li>
 <li>
-<p><em>Comply with Android Compatibility Definition Document (CDD)</em>.
+<p><em>Comply with the <a href="{@docRoot}compatibility/android-cdd.pdf">Android
+Compatibility Definition Document (CDD)</a></em>.
     The CDD enumerates the software and hardware requirements of a compatible Android device.</p>
 </li>
 <li>
-<p><em>Pass the Compatibility Test Suite (CTS)</em>.
-    You can use the CTS (included in the Android source code) as an ongoing aid to compatibility during the development process.</p>
+<p><em>Pass the <a href="{@docRoot}compatibility/cts-intro.html">Compatibility
+Test Suite (CTS)</a></em>.
+    Use the CTS as an ongoing aid to compatibility during the development process.</p>
 </li>
 </ol>
 
-<h2 id="joining-the-ecosystem">Joining the Ecosystem</h2>
+<h2 id="joining-the-ecosystem">Joining the ecosystem</h2>
 <p>Once you've built a compatible device, you may wish to include Google
 Play to provide your users access to the third-party app ecosystem.
 Unfortunately, for a variety of legal and business reasons, we aren't able to
 automatically license Google Play to all compatible devices. To inquire
-about access about Google Play, you can <a href="contact-us.html">contact us</a>.</p>
+about access to Google Play, you can <a href="contact-us.html">contact us</a>.</p>
diff --git a/src/compatibility/overview.jd b/src/compatibility/overview.jd
index befc946..ac0fb9f 100644
--- a/src/compatibility/overview.jd
+++ b/src/compatibility/overview.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project     
+    Copyright 2013 The Android Open Source Project     
 
     Licensed under the Apache License, Version 2.0 (the "License");    
     you may not use this file except in compliance with the License.   
@@ -46,8 +46,8 @@
 any other device that is compatible with the same Android platform version.
 Android devices will differ in hardware and software capabilities, so the
 compatibility program also provides the tools needed for distribution systems
-such as Google Play to implement appropriate filtering. This means that
-users can only see applications which they can actually run.</p>
+such as Google Play to implement appropriate filtering. This means
+users see only the applications they can actually run.</p>
 </li>
 <li>
 <p><em>Enable device manufacturers to differentiate while being
@@ -60,9 +60,9 @@
 <li>
 <p><em>Minimize costs and overhead associated with compatibility.</em>
     Ensuring compatibility should be easy and inexpensive to
-device manufacturers. The testing tool (CTS) is free, open source, and
+device manufacturers. The testing tool is free, open source, and
 available for <a href="downloads.html">download</a>. 
-CTS is designed to be used for continuous self-testing
+It is designed to be used for continuous self-testing
 during the device development process to eliminate the cost of changing your
 workflow or sending your device to a third party for testing. Meanwhile, there
 are no required certifications, and thus no corresponding costs and
@@ -72,35 +72,34 @@
 <p>The Android compatibility program consists of three key components:</p>
 <ul>
 <li>The source code to the Android software stack</li>
-<li>The Compatilbility Definition Document, representing the "policy" aspect of compatibility</li>
-<li>The Compatilbility Test Suite, representing the "mechanism" of compatibility</li>
+<li>The Compatilbility Definition Document (CDD), representing the "policy" aspect of compatibility</li>
+<li>The Compatilbility Test Suite (CTS), representing the "mechanism" of compatibility</li>
 </ul>
 <p>Just as each version of the Android platform exists in a separate branch in
 the source code tree, there is a separate CTS and CDD for each version as
 well. The CDD, CTS, and source code are -- along with your hardware and your
 software customizations -- everything you need to create a compatible device.</p>
-<h1 id="compatibility-definition-document-cdd">Compatibility Definition Document (CDD)</h1>
-<p>For each release of the Android platform, a detailed Compatibility
-Definition Document (CDD) will be provided. The CDD represents the "policy"
+<h1 id="compatibility-definition-document-cdd">Compatibility Definition Document</h1>
+<p>For each release of the Android platform, a detailed CDD will be provided. The CDD represents the "policy"
 aspect of Android compatibility.</p>
 <p>No test suite, including CTS, can truly be comprehensive. For instance, the
 CTS includes a test that checks for the presence and correct behavior of
 OpenGL graphics APIs, but no software test can verify that the graphics
 actually appear correctly on the screen. More generally, it's impossible to
 test the presence of hardware features such as keyboards, display density,
-WiFi, and Bluetooth.</p>
+Wi-Fi, and Bluetooth.</p>
 <p>The CDD's role is to codify and clarify specific requirements, and
 eliminate ambiguity.  The CDD does not attempt to be comprehensive. Since
 Android is a single corpus of open-source code, the code itself is the
 comprehensive "specification" of the platform and its APIs. The CDD acts as a
-"hub", referencing other content (such as SDK API documentation) that provides
+"hub" referencing other content (such as SDK API documentation) that provides
 a framework in which the Android source code may be used so that the end
 result is a compatible system.</p>
 <p>If you want to build a device compatible with a given Android version,
 start by checking out the source code for that version, and then read the
 corresponding CDD and stay within its guidelines. For additional details,
-simply examine <a href="/compatibility/android-4.3-cdd.pdf">the latest CDD</a>.</p>
-<h1 id="compatibility-test-suite-cts">Compatibility Test Suite (CTS)</h1>
+simply examine <a href="/compatibility/android-cdd.pdf">the latest CDD</a>.</p>
+<h1 id="compatibility-test-suite-cts">Compatibility Test Suite</h1>
 <p>The CTS is a free, commercial-grade test suite, available for
 <a href="downloads.html">download</a>.
 The CTS represents the "mechanism" of compatibility.</p>
@@ -112,7 +111,7 @@
 development process.</p>
 <h1 id="compatibility-test-suite-verifier-cts-verifier">Compatibility Test Suite Verifier (CTS Verifier)</h1>
 <p>The Compatibility Test Suite Verifier (CTS Verifier) is a supplement to the
-Compatibility Test Suite (CTS), available for <a href="downloads.html">download</a>.
+CTS available for <a href="downloads.html">download</a>.
 CTS Verifier provides tests for APIs and functions that cannot be tested on a
 stationary device without manual input (e.g. audio quality, accelerometer, etc).</p>
 <p>For details on the CTS, consult the <a href="cts-intro.html">CTS introduction</a>.</p>
diff --git a/src/devices/audio.jd b/src/devices/audio.jd
index 9f6338d..4435494 100644
--- a/src/devices/audio.jd
+++ b/src/devices/audio.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -16,14 +16,6 @@
     See the License for the specific language governing permissions and
     limitations under the License.
 -->
-<div id="qv-wrapper">
-  <div id="qv">
-    <h2>In this document</h2>
-    <ol id="auto-toc">
-    </ol>
-  </div>
-</div>
-
 <p>
   Android's audio Hardware Abstraction Layer (HAL) connects the higher level, audio-specific
   framework APIs in <a href="http://developer.android.com/reference/android/media/package-summary.html">android.media</a>
diff --git a/src/devices/audio_implement.jd b/src/devices/audio_implement.jd
index 2007b2c..a4e6b7f 100644
--- a/src/devices/audio_implement.jd
+++ b/src/devices/audio_implement.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/audio_latency.jd b/src/devices/audio_latency.jd
index 2d3623e..25865bc 100644
--- a/src/devices/audio_latency.jd
+++ b/src/devices/audio_latency.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/audio_latency_measure.jd b/src/devices/audio_latency_measure.jd
index d5d1c17..7bb6ac5 100644
--- a/src/devices/audio_latency_measure.jd
+++ b/src/devices/audio_latency_measure.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/audio_warmup.jd b/src/devices/audio_warmup.jd
index ba1217c..d2b5c28 100644
--- a/src/devices/audio_warmup.jd
+++ b/src/devices/audio_warmup.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/bluetooth.jd b/src/devices/bluetooth.jd
index c974227..62cf79d 100644
--- a/src/devices/bluetooth.jd
+++ b/src/devices/bluetooth.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/camera.jd b/src/devices/camera/camera.jd
similarity index 97%
rename from src/devices/camera.jd
rename to src/devices/camera/camera.jd
index e85a23d..4b4b22c 100644
--- a/src/devices/camera.jd
+++ b/src/devices/camera/camera.jd
@@ -1,8 +1,8 @@
-page.title=Camera Version 1
+page.title=Camera HAL overview
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -138,7 +138,7 @@
 <li>Declare your camera’s media codec, format, and resolution capabilities in
 <code>device/&lt;company_name&gt;/&lt;device_name&gt;/media_profiles.xml</code> and
 <code>device/&lt;company_name&gt;/&lt;device_name&gt;/media_codecs.xml</code> XML files.
- For more information, see <a href="media.html#expose"> Exposing
+ For more information, see <a href="{@docRoot}devices/media.html#expose"> Exposing
  Codecs and Profiles to the Framework</a> for information on how to do this.
 </p></code>
 
diff --git a/src/devices/camera/camera3.jd b/src/devices/camera/camera3.jd
new file mode 100644
index 0000000..6fe9770
--- /dev/null
+++ b/src/devices/camera/camera3.jd
@@ -0,0 +1,184 @@
+page.title=Camera HAL v3 overview
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<p>
+Android's camera Hardware Abstraction Layer (HAL) connects the higher level 
+camera framework APIs in 
+<a
+href="http://developer.android.com/reference/android/hardware/Camera.html">android.hardware.Camera</a> 
+to your underlying camera driver and hardware. The latest version of Android 
+introduces a new, underlying implementation of the camera stack. If you have 
+previously developed a camera HAL module and driver for other versions of 
+Android, be aware that there are significant changes in the camera pipeline.</p>
+<p>Version 1 of the camera HAL is still supported for future releases of Android 
+  because many devices still rely on it. Implementing both HALs is also supported 
+  by the Android camera service, which is useful when you want to support a less 
+  capable front-facing camera with version 1 of the HAL and a more advanced 
+  back-facing camera with version 3 of the HAL. Version 2 was a stepping stone to 
+  version 3 and is not supported.</p>
+<p>
+There is only one camera HAL module (with its own version number, currently 1, 2,
+or 2.1), which lists multiple independent camera devices that each have
+their own version. Camera module v2 or newer is required to support devices v2 or newer, and such
+camera modules can have a mix of camera device versions. This is what we mean
+when we say we Android supports implementing both HALs.
+</p>
+<p><strong>Note:</strong> The new camera HAL is in active development and can change at any 
+  time. This document describes at a high level the design of the camera subsystem 
+  and omits many details. Stay tuned for more updates to the PDK repository and 
+  look out for updates to the Camera HAL and reference implementation for more 
+  information.</p>
+
+<h2 id="overview">Overview</h2>
+
+<p>
+Version 1 of the camera subsystem was designed as a black box with high-level 
+controls. Roughly speaking, the old subsystem has three operating modes:</p>
+
+<ul>
+<li>Preview</li>
+<li>Video Record</li>
+<li>Still Capture</li>
+</ul>
+
+<p>Each mode has slightly different and overlapping capabilities. This made it hard 
+to implement new types of features, such as burst mode, since it would fall 
+between two of these modes.<br/>
+<img src="images/camera_block.png" alt="Camera block diagram"/><br/>
+<strong>Figure 1.</strong> Camera components</p>
+
+<h2 id="v3-enhance">Version 3 enhancements</h2>
+
+<p>The aim of the Android Camera API redesign is to substantially increase the 
+ability of applications to control the camera subsystem on Android devices while 
+reorganizing the API to make it more efficient and maintainable.</p>
+
+<p>The additional control makes it easier to build high-quality camera applications 
+on Android devices that can operate reliably across multiple products while 
+still using device-specific algorithms whenever possible to maximize quality and 
+performance.</p>
+
+<p>Version 3 of the camera subsystem structures the operation modes into a single 
+unified view, which can be used to implement any of the previous modes and 
+several others, such as burst mode. This results in better user control for 
+focus and exposure and more post-processing, such as noise reduction, contrast 
+and sharpening. Further, this simplified view makes it easier for application 
+developers to use the camera's various functions.<br/>
+The API models the camera subsystem as a pipeline that converts incoming 
+requests for frame captures into frames, on a 1:1 basis. The requests 
+encapsulate all configuration information about the capture and processing of a 
+frame. This includes: resolution and pixel format; manual sensor, lens and flash 
+control; 3A operating modes; RAW->YUV processing control; statistics generation; 
+and so on.</p>
+
+<p>In simple terms, the application framework requests a frame from the camera 
+subsystem, and the camera subsystem returns results to an output stream. In 
+addition, metadata that contains information such as color spaces and lens 
+shading is generated for each set of results. The following sections and 
+diagrams give you more detail about each component.<br/>
+You can think of camera version 3 as a pipeline to camera version 1's one-way 
+stream. It converts each capture request into one image captured by the sensor, 
+which is processed into: </p>
+
+<ul>
+<li>A Result object with metadata about the capture.</li>
+<li>One to N buffers of image data, each into its own destination Surface.</li>
+</ul>
+
+<p>The set of possible output Surfaces is preconfigured:</p>
+
+<ul>
+<li>Each Surface is a destination for a stream of image buffers of a fixed 
+resolution.</li>
+<li>Only a small number of Surfaces can be configured as outputs at once (~3).</li>
+</ul>
+
+<p>A request contains all desired capture settings and the list of output Surfaces 
+to push image buffers into for this request (out of the total configured set). A 
+request can be one-shot ( with capture() ), or it may be repeated indefinitely 
+(with setRepeatingRequest() ). Captures have priority over repeating
+requests.</p>
+<img src="images/camera_simple_model.png" alt="Camera data model"/>
+<p><strong>Figure 2.</strong> Camera core operation model</p>
+
+<h2 id="supported-version">Supported version</h2>
+
+<p>Camera devices that support this version of the HAL must return 
+CAMERA_DEVICE_API_VERSION_3_1 in camera_device_t.common.version and in 
+camera_info_t.device_version (from camera_module_t.get_camera_info).<br/>
+Camera modules that may contain version 3.1 devices must implement at least 
+version 2.0 of the camera module interface (as defined by 
+camera_module_t.common.module_api_version).<br/>
+See camera_common.h for more versioning details.</p>
+
+<h2 id="version-history">Version history</h2>
+
+<h4><strong>1.0</strong></h4>
+
+<p>Initial Android camera HAL (Android 4.0) [camera.h]:</p>
+
+<ul>
+<li>Converted from C++ CameraHardwareInterface abstraction layer.</li>
+<li>Supports android.hardware.Camera API.</li>
+</ul>
+
+<h4><strong>2.0</strong></h4>
+
+<p>Initial release of expanded-capability HAL (Android 4.2) [camera2.h]:</p>
+
+<ul>
+<li>Sufficient for implementing existing android.hardware.Camera API.</li>
+<li>Allows for ZSL queue in camera service layer</li>
+<li>Not tested for any new features such manual capture control, Bayer RAW 
+capture, reprocessing of RAW data.</li>
+</ul>
+
+<h4><strong>3.0</strong></h4>
+
+<p>First revision of expanded-capability HAL:</p>
+
+<ul>
+<li>Major version change since the ABI is completely different. No change to the 
+required hardware capabilities or operational model from 2.0.</li>
+<li>Reworked input request and stream queue interfaces: Framework calls into HAL 
+with next request and stream buffers already dequeued. Sync framework support 
+is included, necessary for efficient implementations.</li>
+<li>Moved triggers into requests, most notifications into results.</li>
+<li>Consolidated all callbacks into framework into one structure, and all setup 
+methods into a single initialize() call.</li>
+<li>Made stream configuration into a single call to simplify stream management. 
+Bidirectional streams replace STREAM_FROM_STREAM construct.</li>
+<li>Limited mode semantics for older/limited hardware devices.</li>
+</ul>
+
+<h4><strong>3.1</strong></h4>
+
+<p>Minor revision of expanded-capability HAL:</p>
+
+<ul>
+<li>configure_streams passes consumer usage flags to the HAL.</li>
+<li>flush call to drop all in-flight requests/buffers as fast as possible.</li>
+</ul>
diff --git a/src/devices/camera/camera3_3Amodes.jd b/src/devices/camera/camera3_3Amodes.jd
new file mode 100644
index 0000000..89d9841
--- /dev/null
+++ b/src/devices/camera/camera3_3Amodes.jd
@@ -0,0 +1,662 @@
+page.title=3A Modes and State Transition
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<p>
+  While the actual 3A algorithms are up to the HAL implementation, a high-level 
+  state machine description is defined by the HAL interface to allow the HAL 
+  device and the framework to communicate about the current state of 3A and 
+  trigger 3A events.</p>
+<p>When the device is opened, all the individual 3A states must be STATE_INACTIVE. 
+  Stream configuration does not reset 3A. For example, locked focus must be 
+  maintained across the configure() call.</p>
+<p>Triggering a 3A action involves simply setting the relevant trigger entry in the 
+  settings for the next request to indicate start of trigger. For example, the 
+  trigger for starting an autofocus scan is setting the entry 
+  ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTROL_AF_TRIGGER_START for one request; 
+  and cancelling an autofocus scan is triggered by setting 
+  ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTRL_AF_TRIGGER_CANCEL. Otherwise, the 
+  entry will not exist or be set to ANDROID_CONTROL_AF_TRIGGER_IDLE. Each request 
+  with a trigger entry set to a non-IDLE value will be treated as an independent 
+  triggering event.</p>
+<p>At the top level, 3A is controlled by the ANDROID_CONTROL_MODE setting. It 
+  selects between no 3A (ANDROID_CONTROL_MODE_OFF), normal AUTO mode 
+  (ANDROID_CONTROL_MODE_AUTO), and using the scene mode setting 
+  (ANDROID_CONTROL_USE_SCENE_MODE):</p>
+<ul>
+  <li>In OFF mode, each of the individual Auto-focus(AF), auto-exposure (AE), and 
+    auto-whitebalance (AWB) modes are effectively OFF, and none of the capture 
+    controls may be overridden by the 3A routines.</li>
+  <li>In AUTO mode, AF, AE, and AWB modes all run their own independent algorithms, 
+    and have their own mode, state, and trigger metadata entries, as listed in the 
+    next section.</li>
+  <li>In USE_SCENE_MODE, the value of the ANDROID_CONTROL_SCENE_MODE entry must be 
+    used to determine the behavior of 3A routines. In SCENE_MODEs other than 
+    FACE_PRIORITY, the HAL must override the values of 
+    ANDROID_CONTROL_AE/AWB/AF_MODE to be the mode it prefers for the selected 
+    SCENE_MODE. For example, the HAL may prefer SCENE_MODE_NIGHT to use 
+    CONTINUOUS_FOCUS AF mode. Any user selection of AE/AWB/AF_MODE when scene must 
+    be ignored for these scene modes.</li>
+  <li>For SCENE_MODE_FACE_PRIORITY, the AE/AWB/AFMODE controls work as in 
+    ANDROID_CONTROL_MODE_AUTO, but the 3A routines must bias toward metering and 
+    focusing on any detected faces in the scene.</li>
+</ul>
+<h2 id="auto-focus">Auto-focus settings and result entries</h2>
+<p>Main metadata entries:<br/>
+  ANDROID_CONTROL_AF_MODE: Control for selecting the current autofocus mode. Set 
+  by the framework in the request settings.<br/>
+  AF_MODE_OFF: AF is disabled; the framework/app directly controls lens position.<br/>
+  AF_MODE_AUTO: Single-sweep autofocus. No lens movement unless AF is triggered.<br/>
+  AF_MODE_MACRO: Single-sweep up-close autofocus. No lens movement unless AF is 
+  triggered.<br/>
+  AF_MODE_CONTINUOUS_VIDEO: Smooth continuous focusing, for recording video. 
+  Triggering immediately locks focus in current position. Canceling resumes 
+  cotinuous focusing.<br/>
+  AF_MODE_CONTINUOUS_PICTURE: Fast continuous focusing, for zero-shutter-lag still 
+  capture. Triggering locks focus once currently active sweep concludes. Canceling 
+  resumes continuous focusing.<br/>
+  AF_MODE_EDOF: Advanced extended depth of field focusing. There is no autofocus 
+  scan, so triggering one or canceling one has no effect. Images are focused 
+  automatically by the HAL.<br/>
+  ANDROID_CONTROL_AF_STATE: Dynamic metadata describing the current AF algorithm 
+  state, reported by the HAL in the result metadata.<br/>
+  AF_STATE_INACTIVE: No focusing has been done, or algorithm was reset. Lens is 
+  not moving. Always the state for MODE_OFF or MODE_EDOF. When the device is 
+  opened, it must start in this state.<br/>
+  AF_STATE_PASSIVE_SCAN: A continuous focus algorithm is currently scanning for 
+  good focus. The lens is moving.<br/>
+  AF_STATE_PASSIVE_FOCUSED: A continuous focus algorithm believes it is well 
+  focused. The lens is not moving. The HAL may spontaneously leave this state.<br/>
+  AF_STATE_PASSIVE_UNFOCUSED: A continuous focus algorithm believes it is not well 
+  focused. The lens is not moving. The HAL may spontaneously leave this state.<br/>
+  AF_STATE_ACTIVE_SCAN: A scan triggered by the user is underway.<br/>
+  AF_STATE_FOCUSED_LOCKED: The AF algorithm believes it is focused. The lens is 
+  not moving.<br/>
+  AF_STATE_NOT_FOCUSED_LOCKED: The AF algorithm has been unable to focus. The lens 
+  is not moving.<br/>
+  ANDROID_CONTROL_AFTRIGGER: Control for starting an autofocus scan, the meaning 
+  of which depends on mode and state. Set by the framework in the request 
+  settings.<br/>
+  AF_TRIGGER_IDLE: No current trigger.<br/>
+  AF_TRIGGER_START: Trigger start of AF scan. Effect depends on mode and state.<br/>
+  AF_TRIGGER_CANCEL: Cancel current AF scan if any, and reset algorithm to 
+  default.<br/>
+  Additional metadata entries:<br/>
+  ANDROID_CONTROL_AF_REGIONS: Control for selecting the regions of the field of 
+  view (FOV) that should be used to determine good focus. This applies to all AF 
+  modes that scan for focus. Set by the framework in the request settings.</p>
+<h2 id="auto-exposure">Auto-exposure settings and result entries</h2>
+<p>Main metadata entries:<br/>
+  ANDROID_CONTROL_AE_MODE: Control for selecting the current auto-exposure mode. 
+  Set by the framework in the request settings.<br/>
+  AE_MODE_OFF: Autoexposure is disabled; the user controls exposure, gain, frame 
+  duration, and flash.<br/>
+  AE_MODE_ON: Standard autoexposure, with flash control disabled. User may set 
+  flash to fire or to torch mode.<br/>
+  AE_MODE_ON_AUTO_FLASH: Standard autoexposure, with flash on at HAL's discretion 
+  for precapture and still capture. User control of flash disabled.<br/>
+  AE_MODE_ON_ALWAYS_FLASH: Standard autoexposure, with flash always fired for 
+  capture, and at HAL's discretion for precapture. User control of flash disabled.<br/>
+  AE_MODE_ON_AUTO_FLASH_REDEYE: Standard autoexposure, with flash on at HAL's 
+  discretion for precapture and still capture. Use a flash burst at end of 
+  precapture sequence to reduce redeye in the final picture. User control of flash 
+  disabled.<br/>
+  ANDROID_CONTROL_AE_STATE: Dynamic metadata describing the current AE algorithm 
+  state, reported by the HAL in the result metadata.<br/>
+  AE_STATE_INACTIVE: Initial AE state after mode switch. When the device is 
+  opened, it must start in this state.<br/>
+  AE_STATE_SEARCHING: AE is not converged to a good value and is adjusting 
+  exposure parameters.<br/>
+  AE_STATE_CONVERGED: AE has found good exposure values for the current scene, and 
+  the exposure parameters are not changing. HAL may spontaneously leave this state 
+  to search for a better solution.<br/>
+  AE_STATE_LOCKED: AE has been locked with the AE_LOCK control. Exposure values 
+  are not changing.<br/>
+  AE_STATE_FLASH_REQUIRED: The HAL has converged exposure but believes flash is 
+  required for a sufficiently bright picture. Used for determining if a 
+  zero-shutter-lag frame can be used.<br/>
+  AE_STATE_PRECAPTURE: The HAL is in the middle of a precapture sequence. 
+  Depending on AE mode, this mode may involve firing the flash for metering or a 
+  burst of flash pulses for redeye reduction.<br/>
+  ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: Control for starting a metering sequence 
+  before capturing a high-quality image. Set by the framework in the request 
+  settings.<br/>
+  PRECAPTURE_TRIGGER_IDLE: No current trigger.<br/>
+  PRECAPTURE_TRIGGER_START: Start a precapture sequence. The HAL should use the 
+  subsequent requests to measure good exposure/white balance for an upcoming 
+  high-resolution capture.<br/>
+  Additional metadata entries:<br/>
+  ANDROID_CONTROL_AE_LOCK: Control for locking AE controls to their current 
+  values.<br/>
+  ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: Control for adjusting AE algorithm 
+  target brightness point.<br/>
+  ANDROID_CONTROL_AE_TARGET_FPS_RANGE: Control for selecting the target frame rate 
+  range for the AE algorithm. The AE routine cannot change the frame rate to be 
+  outside these bounds.<br/>
+  ANDROID_CONTROL_AE_REGIONS: Control for selecting the regions of the FOV that 
+  should be used to determine good exposure levels. This applies to all AE modes 
+  besides OFF.</p>
+<h2 id="auto-wb">Auto-whitebalance settings and result entries</h2>
+<p>Main metadata entries:<br/>
+  ANDROID_CONTROL_AWB_MODE: Control for selecting the current white-balance mode.<br/>
+  AWB_MODE_OFF: Auto-whitebalance is disabled. User controls color matrix.<br/>
+  AWB_MODE_AUTO: Automatic white balance is enabled; 3A controls color transform, 
+  possibly using more complex transforms than a simple matrix.<br/>
+  AWB_MODE_INCANDESCENT: Fixed white balance settings good for indoor incandescent 
+  (tungsten) lighting, roughly 2700K.<br/>
+  AWB_MODE_FLUORESCENT: Fixed white balance settings good for fluorescent 
+  lighting, roughly 5000K.<br/>
+  AWB_MODE_WARM_FLUORESCENT: Fixed white balance settings good for fluorescent 
+  lighting, roughly 3000K.<br/>
+  AWB_MODE_DAYLIGHT: Fixed white balance settings good for daylight, roughly 
+  5500K.<br/>
+  AWB_MODE_CLOUDY_DAYLIGHT: Fixed white balance settings good for clouded 
+  daylight, roughly 6500K.<br/>
+  AWB_MODE_TWILIGHT: Fixed white balance settings good for near-sunset/sunrise, 
+  roughly 15000K.<br/>
+  AWB_MODE_SHADE: Fixed white balance settings good for areas indirectly lit by 
+  the sun, roughly 7500K.<br/>
+  ANDROID_CONTROL_AWB_STATE: Dynamic metadata describing the current AWB algorithm 
+  state, reported by the HAL in the result metadata.<br/>
+  AWB_STATE_INACTIVE: Initial AWB state after mode switch. When the device is 
+  opened, it must start in this state.<br/>
+  AWB_STATE_SEARCHING: AWB is not converged to a good value and is changing color 
+  adjustment parameters.<br/>
+  AWB_STATE_CONVERGED: AWB has found good color adjustment values for the current 
+  scene, and the parameters are not changing. HAL may spontaneously leave this 
+  state to search for a better solution.<br/>
+  AWB_STATE_LOCKED: AWB has been locked with the AWB_LOCK control. Color 
+  adjustment values are not changing.<br/>
+  Additional metadata entries:<br/>
+  ANDROID_CONTROL_AWB_LOCK: Control for locking AWB color adjustments to their 
+  current values.<br/>
+  ANDROID_CONTROL_AWB_REGIONS: Control for selecting the regions of the FOV that 
+  should be used to determine good color balance. This applies only to 
+  auto-whitebalance mode.</p>
+<h2 id="state-transition">General state machine transition notes</h2>
+<p>Switching between AF, AE, or AWB modes always resets the algorithm's state to 
+  INACTIVE. Similarly, switching between CONTROL_MODE or CONTROL_SCENE_MODE if 
+  CONTROL_MODE == USE_SCENE_MODE resets all the algorithm states to INACTIVE.<br/>
+  The tables below are per-mode.</p>
+<h2 id="af-state">AF state machines</h2>
+<table>
+  <tr>
+    <td><strong>mode = AF_MODE_OFF or AF_MODE_EDOF</strong></td>
+    <td></td>
+    <td></td>
+    <td></td>
+  </tr>
+  <tr>
+    <th>State</th>
+    <th>Transformation cause</th>
+    <th>New state</th>
+    <th>Notes</th>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td></td>
+    <td></td>
+    <td>AF is disabled</td>
+  </tr>
+  <tr>
+    <td><strong>mode = AF_MODE_AUTO or AF_MODE_MACRO</strong></td>
+    <td></td>
+    <td></td>
+    <td></td>
+  </tr>
+  <tr>
+    <th>State</th>
+    <th>Transformation cause</th>
+    <th>New state</th>
+    <th>Notes</th>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td>AF_TRIGGER</td>
+    <td>ACTIVE_SCAN</td>
+    <td>Start AF sweep
+      Lens now moving</td>
+  </tr>
+  <tr>
+    <td>ACTIVE_SCAN</td>
+    <td>AF sweep done</td>
+    <td>FOCUSED_LOCKED</td>
+    <td>If AF successful
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>ACTIVE_SCAN</td>
+    <td>AF sweep done</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>If AF successful
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>ACTIVE_SCAN</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Cancel/reset AF
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>FOCUSED_LOCKED</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Cancel/reset AF</td>
+  </tr>
+  <tr>
+    <td>FOCUSED_LOCKED</td>
+    <td>AF_TRIGGER</td>
+    <td>ACTIVE_SCAN</td>
+    <td>Start new sweep
+      Lens now moving</td>
+  </tr>
+  <tr>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Cancel/reset AF</td>
+  </tr>
+  <tr>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>AF_TRIGGER</td>
+    <td>ACTIVE_SCAN</td>
+    <td>Start new sweep
+      Lens now moving</td>
+  </tr>
+  <tr>
+    <td>All states</td>
+    <td>mode change</td>
+    <td>INACTIVE</td>
+    <td></td>
+  </tr>
+  <tr>
+    <td><strong>mode = AF_MODE_CONTINUOUS_VIDEO</strong></td>
+    <td></td>
+    <td></td>
+    <td></td>
+  </tr>
+  <tr>
+    <th>State</th>
+    <th>Transformation cause</th>
+    <th>New state</th>
+    <th>Notes</th>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td>HAL initiates new scan</td>
+    <td>PASSIVE_SCAN</td>
+    <td>Start AF sweep
+      Lens now moving</td>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td>AF_TRIGGER</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>AF state query 
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_SCAN</td>
+    <td>HAL completes current scan</td>
+    <td>PASSIVE_FOCUSED</td>
+    <td>End AF scan
+      Lens now locked </td>
+  </tr>
+  <tr>
+    <td>PASSIVE_SCAN</td>
+    <td>AF_TRIGGER</td>
+    <td>FOCUSED_LOCKED</td>
+    <td>Immediate transformation
+      if focus is good
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_SCAN</td>
+    <td>AF_TRIGGER</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>Immediate transformation
+      if focus is bad
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_SCAN</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Reset lens position
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_FOCUSED</td>
+    <td>HAL initiates new scan</td>
+    <td>PASSIVE_SCAN</td>
+    <td>Start AF scan
+      Lens now moving</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_FOCUSED</td>
+    <td>AF_TRIGGER</td>
+    <td>FOCUSED_LOCKED</td>
+    <td>Immediate transformation
+      if focus is good
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_FOCUSED</td>
+    <td>AF_TRIGGER</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>Immediate transformation
+      if focus is bad
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>FOCUSED_LOCKED</td>
+    <td>AF_TRIGGER</td>
+    <td>FOCUSED_LOCKED</td>
+    <td>No effect</td>
+  </tr>
+  <tr>
+    <td>FOCUSED_LOCKED</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Restart AF scan</td>
+  </tr>
+  <tr>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>AF_TRIGGER</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>No effect</td>
+  </tr>
+  <tr>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Restart AF scan</td>
+  </tr>
+  <tr>
+    <td><strong>mode = AF_MODE_CONTINUOUS_PICTURE</strong></td>
+    <td></td>
+    <td></td>
+    <td></td>
+  </tr>
+  <tr>
+    <th>State</th>
+    <th>Transformation cause</th>
+    <th>New state</th>
+    <th>Notes</th>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td>HAL initiates new scan</td>
+    <td>PASSIVE_SCAN</td>
+    <td>Start AF scan
+      Lens now moving</td>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td>AF_TRIGGER</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>AF state query
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_SCAN</td>
+    <td>HAL completes current scan</td>
+    <td>PASSIVE_FOCUSED</td>
+    <td>End AF scan
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_SCAN</td>
+    <td>AF_TRIGGER</td>
+    <td>FOCUSED_LOCKED</td>
+    <td>Eventual transformation once focus good
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_SCAN</td>
+    <td>AF_TRIGGER</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>Eventual transformation if cannot focus
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_SCAN</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Reset lens position
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_FOCUSED</td>
+    <td>HAL initiates new scan</td>
+    <td>PASSIVE_SCAN</td>
+    <td>Start AF scan
+      Lens now moving</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_FOCUSED</td>
+    <td>AF_TRIGGER</td>
+    <td>FOCUSED_LOCKED</td>
+    <td>Immediate transformation if focus is good
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>PASSIVE_FOCUSED</td>
+    <td>AF_TRIGGER</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>Immediate transformation if focus is bad
+      Lens now locked</td>
+  </tr>
+  <tr>
+    <td>FOCUSED_LOCKED</td>
+    <td>AF_TRIGGER</td>
+    <td>FOCUSED_LOCKED</td>
+    <td>No effect</td>
+  </tr>
+  <tr>
+    <td>FOCUSED_LOCKED</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Restart AF scan</td>
+  </tr>
+  <tr>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>AF_TRIGGER</td>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>No effect</td>
+  </tr>
+  <tr>
+    <td>NOT_FOCUSED_LOCKED</td>
+    <td>AF_CANCEL</td>
+    <td>INACTIVE</td>
+    <td>Restart AF scan</td>
+  </tr>
+</table>
+<h2 id="ae-wb">AE and AWB state machines</h2>
+<p>The AE and AWB state machines are mostly identical. AE has additional 
+  FLASH_REQUIRED and PRECAPTURE states. So rows below that refer to those two 
+  states should be ignored for the AWB state machine.</p>
+<table>
+  <tr>
+    <td><strong>mode = AE_MODE_OFF / AWB mode not AUTO</strong></td>
+    <td></td>
+    <td></td>
+    <td></td>
+  </tr>
+  <tr>
+    <th>State</th>
+    <th>Transformation cause</th>
+    <th>New state</th>
+    <th>Notes</th>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td></td>
+    <td></td>
+    <td>AE/AWB disabled</td>
+  </tr>
+  <tr>
+    <td><strong>mode = AE_MODE_ON_* / AWB_MODE_AUTO</strong></td>
+    <td></td>
+    <td></td>
+    <td></td>
+  </tr>
+  <tr>
+    <th>State</th>
+    <th>Transformation cause</th>
+    <th>New state</th>
+    <th>Notes</th>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td>HAL initiates AE/AWB scan</td>
+    <td>SEARCHING</td>
+    <td></td>
+  </tr>
+  <tr>
+    <td>INACTIVE</td>
+    <td>AE/AWB_LOCK on</td>
+    <td>LOCKED</td>
+    <td>Values locked</td>
+  </tr>
+  <tr>
+    <td>SEARCHING</td>
+    <td>HAL finishes AE/AWB scan</td>
+    <td>CONVERGED</td>
+    <td>Good values, not changing</td>
+  </tr>
+  <tr>
+    <td>SEARCHING</td>
+    <td>HAL finishes AE scan</td>
+    <td>FLASH_REQUIRED</td>
+    <td>Converged but too dark without flash</td>
+  </tr>
+  <tr>
+    <td>SEARCHING</td>
+    <td>AE/AWB_LOCK on</td>
+    <td>LOCKED</td>
+    <td>Values locked</td>
+  </tr>
+  <tr>
+    <td>CONVERGED</td>
+    <td>HAL initiates AE/AWB scan</td>
+    <td>SEARCHING</td>
+    <td>Values locked</td>
+  </tr>
+  <tr>
+    <td>CONVERGED</td>
+    <td>AE/AWB_LOCK on</td>
+    <td>LOCKED</td>
+    <td>Values locked</td>
+  </tr>
+  <tr>
+    <td>FLASH_REQUIRED</td>
+    <td>HAL initiates AE/AWB scan</td>
+    <td>SEARCHING</td>
+    <td>Values locked</td>
+  </tr>
+  <tr>
+    <td>FLASH_REQUIRED</td>
+    <td>AE/AWB_LOCK on</td>
+    <td>LOCKED</td>
+    <td>Values locked</td>
+  </tr>
+  <tr>
+    <td>LOCKED</td>
+    <td>AE/AWB_LOCK off</td>
+    <td>SEARCHING</td>
+    <td>Values not good after unlock</td>
+  </tr>
+  <tr>
+    <td>LOCKED</td>
+    <td>AE/AWB_LOCK off</td>
+    <td>CONVERGED</td>
+    <td>Values good after unlock</td>
+  </tr>
+  <tr>
+    <td>LOCKED</td>
+    <td>AE_LOCK off</td>
+    <td>FLASH_REQUIRED</td>
+    <td>Exposure good, but too dark</td>
+  </tr>
+  <tr>
+    <td>All AE states</td>
+    <td>PRECAPTURE_START</td>
+    <td>PRECAPTURE</td>
+    <td>Start precapture sequence</td>
+  </tr>
+  <tr>
+    <td>PRECAPTURE</td>
+    <td>Sequence done, AE_LOCK off</td>
+    <td>CONVERGED</td>
+    <td>Ready for high-quality capture</td>
+  </tr>
+  <tr>
+    <td>PRECAPTURE</td>
+    <td>Sequence done, AE_LOCK on</td>
+    <td>LOCKED</td>
+    <td>Ready for high-quality capture</td>
+  </tr>
+</table>
+<h2 id="manual-control">Enabling manual control</h2>
+<p>Several controls are also involved in configuring the device 3A blocks to allow 
+  for direct application control.</p>
+<p>The HAL model for 3A control is that for each request, the HAL inspects the 
+  state of the 3A control fields. If any 3A routine is enabled, then that routine 
+  overrides the control variables that relate to that routine, and these override 
+  values are then available in the result metadata for that capture.  So for 
+  example, if auto-exposure is enabled in a request, the HAL should overwrite the 
+  exposure, gain, and frame duration fields (and potentially the flash fields, 
+  depending on AE mode) of the request. The list of relevant controls is:</p>
+<table>
+  <tr>
+    <th>Control name</th>
+    <th>Unit</th>
+    <th>Notes</th>
+  </tr>
+  <tr>
+    <td>android.control.mode</td>
+    <td>enum: OFF, AUTO, USE_SCENE_MODE</td>
+    <td>High-level 3A control. When set to OFF, all 3A control by the HAL is disabled. The application must set the fields for capture parameters itself.
+      When set to AUTO, the individual algorithm controls in android.control.* are in effect, such as android.control.afMode.
+      When set to USE_SCENE_MODE, the individual controls in android.control.* are mostly disabled, and the HAL implements one of the scene mode settings (such as ACTION, SUNSET, or PARTY) as it wishes.</td>
+  </tr>
+  <tr>
+    <td>android.control.afMode</td>
+    <td>enum</td>
+    <td>OFF means manual control of lens focusing through android.lens.focusDistance.</td>
+  </tr>
+  <tr>
+    <td>android.control.aeMode</td>
+    <td>enum</td>
+    <td>OFF means manual control of exposure/gain/frame duration through android.sensor.exposureTime / .sensitivity / .frameDuration</td>
+  </tr>
+  <tr>
+    <td>android.control.awbMode</td>
+    <td>enum</td>
+    <td>OFF means manual control of white balance. </td>
+  </tr>
+</table>
diff --git a/src/devices/camera/camera3_crop_reprocess.jd b/src/devices/camera/camera3_crop_reprocess.jd
new file mode 100644
index 0000000..e617e1e
--- /dev/null
+++ b/src/devices/camera/camera3_crop_reprocess.jd
@@ -0,0 +1,125 @@
+page.title=Output streams and cropping
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="output-stream">Output streams</h2>
+<p> Unlike the old camera subsystem, which has 3-4 different ways of producing data 
+  from the camera (ANativeWindow-based preview operations, preview callbacks, 
+  video callbacks, and takePicture callbacks), the new subsystem operates solely 
+  on the ANativeWindow-based pipeline for all resolutions and output formats. 
+  Multiple such streams can be configured at once, to send a single frame to many 
+  targets such as the GPU, the video encoder, RenderScript, or app-visible buffers 
+  (RAW Bayer, processed YUV buffers, or JPEG-encoded buffers).</p>
+<p>As an optimization, these output streams must be configured ahead of time, and 
+  only a limited number may exist at once. This allows for pre-allocation of 
+  memory buffers and configuration of the camera hardware, so that when requests 
+  are submitted with multiple or varying output pipelines listed, there won't be 
+  delays or latency in fulfilling the request.</p>
+<p>To support backwards compatibility with the current camera API, at least 3 
+  simultaneous YUV output streams must be supported, plus one JPEG stream. This is 
+  required for video snapshot support with the application also receiving YUV 
+  buffers:</p>
+<ul>
+  <li>One stream to the GPU/SurfaceView (opaque YUV format) for preview</li>
+  <li>One stream to the video encoder (opaque YUV format) for recording</li>
+  <li>One stream to the application (known YUV format) for preview frame callbacks</li>
+  <li>One stream to the application (JPEG) for video snapshots.</li>
+</ul>
+<p>The exact requirements are still being defined since the corresponding API
+isn't yet finalized.</p>
+<h2>Cropping</h2>
+<p>Cropping of the full pixel array (for digital zoom and other use cases where a 
+  smaller FOV is desirable) is communicated through the ANDROID_SCALER_CROP_REGION 
+  setting. This is a per-request setting, and can change on a per-request basis, 
+  which is critical for implementing smooth digital zoom.</p>
+<p>The region is defined as a rectangle (x, y, width, height), with (x, y) 
+  describing the top-left corner of the rectangle. The rectangle is defined on the 
+  coordinate system of the sensor active pixel array, with (0,0) being the 
+  top-left pixel of the active pixel array. Therefore, the width and height cannot 
+  be larger than the dimensions reported in the ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY 
+  static info field. The minimum allowed width and height are reported by the HAL 
+  through the ANDROID_SCALER_MAX_DIGITAL_ZOOM static info field, which describes 
+  the maximum supported zoom factor. Therefore, the minimum crop region width and 
+  height are:</p>
+<pre>
+  {width, height} =
+   { floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[0] /
+       ANDROID_SCALER_MAX_DIGITAL_ZOOM),
+     floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[1] /
+       ANDROID_SCALER_MAX_DIGITAL_ZOOM) }
+  </pre>
+<p>If the crop region needs to fulfill specific requirements (for example, it needs 
+  to start on even coordinates, and its width/height needs to be even), the HAL 
+  must do the necessary rounding and write out the final crop region used in the 
+  output result metadata. Similarly, if the HAL implements video stabilization, it 
+  must adjust the result crop region to describe the region actually included in 
+  the output after video stabilization is applied. In general, a camera-using 
+  application must be able to determine the field of view it is receiving based on 
+  the crop region, the dimensions of the image sensor, and the lens focal length.</p>
+<p>Since the crop region applies to all streams, which may have different aspect 
+  ratios than the crop region, the exact sensor region used for each stream may be 
+  smaller than the crop region. Specifically, each stream should maintain square 
+  pixels and its aspect ratio by minimally further cropping the defined crop 
+  region. If the stream's aspect ratio is wider than the crop region, the stream 
+  should be further cropped vertically, and if the stream's aspect ratio is 
+  narrower than the crop region, the stream should be further cropped 
+  horizontally.</p>
+<p>In all cases, the stream crop must be centered within the full crop region, and 
+  each stream is only either cropped horizontally or vertical relative to the full 
+  crop region, never both.</p>
+<p>For example, if two streams are defined, a 640x480 stream (4:3 aspect), and a 
+  1280x720 stream (16:9 aspect), below demonstrates the expected output regions 
+  for each stream for a few sample crop regions, on a hypothetical 3 MP (2000 x 
+  1500 pixel array) sensor.</p>
+</p>
+  Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)<br/>
+  640x480 stream crop: (500, 375, 1000, 750) (equal to crop region)<br/>
+  1280x720 stream crop: (500, 469, 1000, 562)<br/>
+  <img src="images/crop-region-43-ratio.png" alt="crop-region-43-ratio"/>
+</p>
+<p>Crop region: (500, 375, 1333, 750) (16:9 aspect ratio)<br/>
+  640x480 stream crop: (666, 375, 1000, 750)<br/>
+  1280x720 stream crop: (500, 375, 1333, 750) (equal to crop region)<br/>
+  <img src="images/crop-region-169-ratio.png" alt="crop-region-169-ratio"/>
+  <!-- TODO: Fix alt text and URL -->
+</p>
+<p>Crop region: (500, 375, 750, 750) (1:1 aspect ratio)<br/>
+  640x480 stream crop: (500, 469, 750, 562)<br/>
+  1280x720 stream crop: (500, 543, 750, 414)<br/>
+  <img src="images/crop-region-11-ratio.png" alt="crop-region-11-ratio"/>
+  <br/>
+  And a final example, a 1024x1024 square aspect ratio stream instead of the 480p 
+  stream:<br/>
+  Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)<br/>
+  1024x1024 stream crop: (625, 375, 750, 750)<br/>
+  1280x720 stream crop: (500, 469, 1000, 562)<br/>
+  <img src="images/crop-region-43-square-ratio.png"
+alt="crop-region-43-square-ratio"/>
+</p>
+<h2 id="reprocessing">Reprocessing</h2>
+<p> Additional support for raw image files is provided by reprocessing support for RAW Bayer 
+  data. This support allows the camera pipeline to process a previously captured 
+  RAW buffer and metadata (an entire frame that was recorded previously), to 
+  produce a new rendered YUV or JPEG output.</p>
diff --git a/src/devices/camera/camera3_error_stream.jd b/src/devices/camera/camera3_error_stream.jd
new file mode 100644
index 0000000..c1a1610
--- /dev/null
+++ b/src/devices/camera/camera3_error_stream.jd
@@ -0,0 +1,160 @@
+page.title=Error and stream handling
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="error-mgmt">Error management</h2>
+<p>Camera HAL device ops functions that have a return value will all return -ENODEV 
+  / NULL in case of a serious error. This means the device cannot continue 
+  operation, and must be closed by the framework. Once this error is returned by 
+  some method, or if notify() is called with ERROR_DEVICE, only the close() method 
+  can be called successfully. All other methods will return -ENODEV / NULL.<br/>
+  If a device op is called in the wrong sequence, for example if the framework 
+  calls configure_streams() is called before initialize(), the device must return 
+  -ENOSYS from the call, and do nothing.<br/>
+  Transient errors in image capture must be reported through notify() as follows:</p>
+<ul>
+  <li>The failure of an entire capture to occur must be reported by the HAL by 
+    calling notify() with ERROR_REQUEST. Individual errors for the result metadata 
+    or the output buffers must not be reported in this case.</li>
+  <li>If the metadata for a capture cannot be produced, but some image buffers were 
+    filled, the HAL must call notify() with ERROR_RESULT.</li>
+  <li>If an output image buffer could not be filled, but either the metadata was 
+    produced or some other buffers were filled, the HAL must call notify() with 
+    ERROR_BUFFER for each failed buffer.</li>
+</ul>
+<p>In each of these transient failure cases, the HAL must still call 
+  process_capture_result, with valid output buffer_handle_t. If the result 
+  metadata could not be produced, it should be NULL. If some buffers could not be 
+  filled, their sync fences must be set to the error state.<br/>
+  Invalid input arguments result in -EINVAL from the appropriate methods. In that 
+  case, the framework must act as if that call had never been made.</p>
+<h2 id="stream-mgmt">Stream management</h2>
+<h3 id="configure_streams">configure_streams</h3>
+<p>Reset the HAL camera device processing pipeline and set up new input and output 
+  streams. This call replaces any existing stream configuration with the streams 
+  defined in the stream_list. This method will be called at least once after 
+  initialize() before a request is submitted with process_capture_request().<br/>
+  The stream_list must contain at least one output-capable stream, and may not 
+  contain more than one input-capable stream.<br/>
+  The stream_list may contain streams that are also in the currently-active set of 
+  streams (from the previous call to configure_stream()). These streams will 
+  already have valid values for usage, maxbuffers, and the private pointer. If 
+  such a stream has already had its buffers registered, register_stream_buffers() 
+  will not be called again for the stream, and buffers from the stream can be 
+  immediately included in input requests.<br/>
+  If the HAL needs to change the stream configuration for an existing stream due 
+  to the new configuration, it may rewrite the values of usage and/or maxbuffers 
+  during the configure call. The framework will detect such a change, and will 
+  then reallocate the stream buffers, and call register_stream_buffers() again 
+  before using buffers from that stream in a request.<br/>
+  If a currently-active stream is not included in stream_list, the HAL may safely 
+  remove any references to that stream. It will not be reused in a later 
+  configure() call by the framework, and all the gralloc buffers for it will be 
+  freed after the configure_streams() call returns.<br/>
+  The stream_list structure is owned by the framework, and may not be accessed 
+  once this call completes. The address of an individual camera3streamt 
+  structure will remain valid for access by the HAL until the end of the first 
+  configure_stream() call which no longer includes that camera3streamt in the 
+  stream_list argument. The HAL may not change values in the stream structure 
+  outside of the private pointer, except for the usage and maxbuffers members 
+  during the configure_streams() call itself.<br/>
+  If the stream is new, the usage, maxbuffer, and private pointer fields of the 
+  stream structure will all be set to 0. The HAL device must set these fields 
+  before the configure_streams() call returns. These fields are then used by the 
+  framework and the platform gralloc module to allocate the gralloc buffers for 
+  each stream.<br/>
+  Before such a new stream can have its buffers included in a capture request, the 
+  framework will call register_stream_buffers() with that stream. However, the 
+  framework is not required to register buffers for _all streams before 
+  submitting a request. This allows for quick startup of (for example) a preview 
+  stream, with allocation for other streams happening later or concurrently.</p>
+<h4><strong>Preconditions</strong></h4>
+<p>The framework will only call this method when no captures are being processed. 
+  That is, all results have been returned to the framework, and all in-flight 
+  input and output buffers have been returned and their release sync fences have 
+  been signaled by the HAL. The framework will not submit new requests for capture 
+  while the configure_streams() call is underway.</p>
+<h4><strong>Postconditions</strong></h4>
+<p>The HAL device must configure itself to provide maximum possible output frame 
+  rate given the sizes and formats of the output streams, as documented in the 
+  camera device's static metadata.</p>
+<h4><strong>Performance expectations</strong></h4>
+<p>This call is expected to be heavyweight and possibly take several hundred 
+  milliseconds to complete, since it may require resetting and reconfiguring the 
+  image sensor and the camera processing pipeline. Nevertheless, the HAL device 
+  should attempt to minimize the reconfiguration delay to minimize the 
+  user-visible pauses during application operational mode changes (such as 
+  switching from still capture to video recording).</p>
+<h4><strong>Return values</strong></h4>
+<ul>
+  <li>0: On successful stream configuration</li>
+  <li>undefined</li>
+  <li>-EINVAL: If the requested stream configuration is invalid. Some examples of 
+    invalid stream configurations include:
+    <ul>
+      <li>Including more than 1 input-capable stream (INPUT or BIDIRECTIONAL)</li>
+      <li>Not including any output-capable streams (OUTPUT or BIDIRECTIONAL)</li>
+      <li>Including streams with unsupported formats, or an unsupported size for 
+        that format.</li>
+      <li>Including too many output streams of a certain format.</li>
+      <li>Note that the framework submitting an invalid stream configuration is not 
+        normal operation, since stream configurations are checked before 
+        configure. An invalid configuration means that a bug exists in the 
+        framework code, or there is a mismatch between the HAL's static metadata 
+        and the requirements on streams.</li>
+    </ul>
+  </li>
+  <li>-ENODEV: If there has been a fatal error and the device is no longer 
+    operational. Only close() can be called successfully by the framework after 
+    this error is returned.</li>
+</ul>
+<h3 id="register-stream">register_stream_buffers</h3>
+<p>Register buffers for a given stream with the HAL device. This method is called 
+  by the framework after a new stream is defined by configure_streams, and before 
+  buffers from that stream are included in a capture request. If the same stream 
+  is listed in a subsequent configure_streams() call, register_stream_buffers will 
+  not be called again for that stream.<br/>
+  The framework does not need to register buffers for all configured streams 
+  before it submits the first capture request. This allows quick startup for 
+  preview (or similar use cases) while other streams are still being allocated.<br/>
+  This method is intended to allow the HAL device to map or otherwise prepare the 
+  buffers for later use. The buffers passed in will already be locked for use. At 
+  the end of the call, all the buffers must be ready to be returned to the stream. 
+  The bufferset argument is only valid for the duration of this call.<br/>
+  If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED, the 
+  camera HAL should inspect the passed-in buffers here to determine any 
+  platform-private pixel format information.</p>
+<h4><strong>Return values</strong></h4>
+<ul>
+  <li>0: On successful registration of the new stream buffers</li>
+  <li>-EINVAL: If the streambufferset does not refer to a valid active stream, or 
+    if the buffers array is invalid.</li>
+  <li>-ENOMEM: If there was a failure in registering the buffers. The framework must 
+    consider all the stream buffers to be unregistered, and can try to register 
+    again later.</li>
+  <li>-ENODEV: If there is a fatal error, and the device is no longer operational. 
+    Only close() can be called successfully by the framework after this error is 
+    returned.</li>
+</ul>
diff --git a/src/devices/camera/camera3_metadata.jd b/src/devices/camera/camera3_metadata.jd
new file mode 100644
index 0000000..9e43512
--- /dev/null
+++ b/src/devices/camera/camera3_metadata.jd
@@ -0,0 +1,65 @@
+page.title=Metadata and Controls
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="metadata">Metadata support</h2>
+<p> To support the saving of raw image files by the Android framework, substantial 
+  metadata is required about the sensor's characteristics. This includes 
+  information such as color spaces and lens shading functions.</p>
+<p>Most of this information is a static property of the camera subsystem and can 
+  therefore be queried before configuring any output pipelines or submitting any 
+  requests. The new camera APIs greatly expand the information provided by the 
+  getCameraInfo() method to provide this information to the application.</p>
+<p>In addition, manual control of the camera subsystem requires feedback from the 
+  assorted devices about their current state, and the actual parameters used in 
+  capturing a given frame. The actual values of the controls (exposure time, frame 
+  duration, and sensitivity) as actually used by the hardware must be included in 
+  the output metadata. This is essential so that applications know when either 
+  clamping or rounding took place, and so that the application can compensate for 
+  the real settings used for image capture.</p>
+<p>For example, if an application sets frame duration to 0 in a request, the HAL 
+  must clamp the frame duration to the real minimum frame duration for that 
+  request, and report that clamped minimum duration in the output result metadata.</p>
+<p>So if an application needs to implement a custom 3A routine (for example, to 
+  properly meter for an HDR burst), it needs to know the settings used to capture 
+  the latest set of results it has received in order to update the settings for 
+  the next request. Therefore, the new camera API adds a substantial amount of 
+  dynamic metadata to each captured frame. This includes the requested and actual 
+  parameters used for the capture, as well as additional per-frame metadata such 
+  as timestamps and statistics generator output.</p>
+<h2 id="per-setting">Per-setting control</h2>
+<p> For most settings, the expectation is that they can be changed every frame, 
+  without introducing significant stutter or delay to the output frame stream. 
+  Ideally, the output frame rate should solely be controlled by the capture 
+  request's frame duration field, and be independent of any changes to processing 
+  blocks' configuration.  In reality, some specific controls are known to be slow 
+  to change; these include the output resolution and output format of the camera 
+  pipeline, as well as controls that affect physical devices, such as lens focus 
+  distance. The exact requirements for each control set are detailed later.</p>
+<h2 id="raw-sensor">Raw sensor data support</h2>
+<p>In addition to the pixel formats supported by 
+  the old API, the new API adds a requirement for support for raw sensor data 
+  (Bayer RAW), both for advanced camera applications as well as to support raw
+  image files.</p>
diff --git a/src/devices/camera/camera3_requests_hal.jd b/src/devices/camera/camera3_requests_hal.jd
new file mode 100644
index 0000000..9bd4f28
--- /dev/null
+++ b/src/devices/camera/camera3_requests_hal.jd
@@ -0,0 +1,428 @@
+page.title=HAL subsystem
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="requests">Requests</h2>
+<p> The app framework issues requests for captured results to the camera subsystem. 
+  One request corresponds to one set of results. A request encapsulates all 
+  configuration information about the capturing and processing of those results. 
+  This includes things such as resolution and pixel format; manual sensor, lens, 
+  and flash control; 3A operating modes; RAW to YUV processing control; and 
+  statistics generation. This allows for much more control over the results' 
+  output and processing. Multiple requests can be in flight at once, and 
+  submitting requests is non-blocking. And the requests are always processed in 
+  the order they are received.<br/>
+  <img src="images/camera_model.png" alt="Camera request model"/>
+  <br/>
+  <strong>Figure 3.</strong> Camera model</p>
+<h2 id="hal-subsystem">The HAL and camera subsystem</h2>
+<p> The camera subsystem includes the implementations for components in the camera 
+  pipeline such as the 3A algorithm and processing controls. The camera HAL 
+  provides interfaces for you to implement your versions of these components. To 
+  maintain cross-platform compatibility between multiple device manufacturers and 
+  Image Signal Processor (ISP, or camera sensor) vendors, the camera pipeline 
+  model is virtual and does not directly correspond to any real ISP. However, it 
+  is similar enough to real processing pipelines so that you can map it to your 
+  hardware efficiently. In addition, it is abstract enough to allow for multiple 
+  different algorithms and orders of operation without compromising either 
+  quality, efficiency, or cross-device compatibility.<br/>
+  The camera pipeline also supports triggers that the app framework can initiate 
+  to turn on things such as auto-focus. It also sends notifications back to the 
+  app framework, notifying apps of events such as an auto-focus lock or errors.<br/>
+  <img src="images/camera_hal.png" alt="Camera hardware abstraction layer"/>
+  <br/>
+  <strong>Figure 4.</strong> Camera pipeline<br/>
+  Please note, some image processing blocks shown in the diagram above are not 
+  well-defined in the initial release.<br/>
+  The camera pipeline makes the following assumptions:</p>
+<ul>
+  <li>RAW Bayer output undergoes no processing inside the ISP.</li>
+  <li>Statistics are generated based off the raw sensor data.</li>
+  <li>The various processing blocks that convert raw sensor data to YUV are in an 
+    arbitrary order.</li>
+  <li>While multiple scale and crop units are shown, all scaler units share the 
+    output region controls (digital zoom). However, each unit may have a different 
+    output resolution and pixel format.</li>
+</ul>
+<p><strong>Summary of API use</strong><br/>
+  This is a brief summary of the steps for using the Android camera API. See the 
+  Startup and expected operation sequence section for a detailed breakdown of 
+  these steps, including API calls.</p>
+<ol>
+  <li>Listen for and enumerate camera devices.</li>
+  <li>Open device and connect listeners.</li>
+  <li>Configure outputs for target use case (such as still capture, recording, 
+    etc.).</li>
+  <li>Create request(s) for target use case.</li>
+  <li>Capture/repeat requests and bursts.</li>
+  <li>Receive result metadata and image data.</li>
+  <li>When switching use cases, return to step 3.</li>
+</ol>
+<p><strong>HAL operation summary</strong></p>
+<ul>
+  <li>Asynchronous requests for captures come from the framework.</li>
+  <li>HAL device must process requests in order. And for each request, produce 
+    output result metadata, and one or more output image buffers.</li>
+  <li>First-in, first-out for requests and results, and for streams referenced by 
+    subsequent requests. </li>
+  <li>Timestamps must be identical for all outputs from a given request, so that the 
+    framework can match them together if needed. </li>
+  <li>All capture configuration and state (except for the 3A routines) is 
+    encapsulated in the requests and results.</li>
+</ul>
+<p><img src="images/camera-hal-overview.png" alt="Camera HAL overview"/>
+  <br/>
+  <strong>Figure 5.</strong> Camera HAL overview</p>
+<h2 id="startup">Startup and expected operation sequence</h2>
+<p>This section contains a detailed explanation of the steps expected when using 
+  the camera API. Please see <a href="https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h">platform/hardware/libhardware/include/hardware/camera3.h</a> for definitions of these structures and methods.</p>
+<ol>
+  <li>Framework calls camera_module_t-&gt;common.open(), which returns a 
+    hardware_device_t structure.</li>
+  <li>Framework inspects the hardware_device_t-&gt;version field, and instantiates the 
+    appropriate handler for that version of the camera hardware device. In case 
+    the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to a 
+    camera3_device_t.</li>
+  <li>Framework calls camera3_device_t-&gt;ops-&gt;initialize() with the framework 
+    callback function pointers. This will only be called this one time after 
+    open(), before any other functions in the ops structure are called.</li>
+  <li>The framework calls camera3_device_t-&gt;ops-&gt;configure_streams() with a list of 
+    input/output streams to the HAL device.</li>
+  <li>The framework allocates gralloc buffers and calls 
+    camera3_device_t-&gt;ops-&gt;register_stream_buffers() for at least one of the 
+    output streams listed in configure_streams. The same stream is registered 
+    only once.</li>
+  <li>The framework requests default settings for some number of use cases with 
+    calls to camera3_device_t-&gt;ops-&gt;construct_default_request_settings(). This 
+    may occur any time after step 3.</li>
+  <li>The framework constructs and sends the first capture request to the HAL with 
+    settings based on one of the sets of default settings, and with at least one 
+    output stream that has been registered earlier by the framework. This is sent 
+    to the HAL with camera3_device_t-&gt;ops-&gt;process_capture_request(). The HAL 
+    must block the return of this call until it is ready for the next request to 
+    be sent.</li>
+  <li>The framework continues to submit requests, and possibly call 
+    register_stream_buffers() for not-yet-registered streams, and call 
+    construct_default_request_settings to get default settings buffers for other 
+    use cases.</li>
+  <li>When the capture of a request begins (sensor starts exposing for the 
+    capture), the HAL calls camera3_callback_ops_t-&gt;notify() with the SHUTTER 
+    event, including the frame number and the timestamp for start of exposure. 
+    This notify call must be made before the first call to 
+    process_capture_result() for that frame number.</li>
+  <li>After some pipeline delay, the HAL begins to return completed captures to 
+    the framework with camera3_callback_ops_t-&gt;process_capture_result(). These 
+    are returned in the same order as the requests were submitted. Multiple 
+    requests can be in flight at once, depending on the pipeline depth of the 
+    camera HAL device.</li>
+  <li>After some time, the framework may stop submitting new requests, wait for 
+    the existing captures to complete (all buffers filled, all results 
+    returned), and then call configure_streams() again. This resets the camera 
+    hardware and pipeline for a new set of input/output streams. Some streams 
+    may be reused from the previous configuration; if these streams' buffers had 
+    already been registered with the HAL, they will not be registered again. The 
+    framework then continues from step 7, if at least one registered output 
+    stream remains. (Otherwise, step 5 is required first.)</li>
+  <li>Alternatively, the framework may call camera3_device_t-&gt;common-&gt;close() to 
+    end the camera session. This may be called at any time when no other calls 
+    from the framework are active, although the call may block until all 
+    in-flight captures have completed (all results returned, all buffers 
+    filled). After the close call returns, no more calls to the 
+    camera3_callback_ops_t functions are allowed from the HAL. Once the close() 
+    call is underway, the framework may not call any other HAL device functions.</li>
+  <li>In case of an error or other asynchronous event, the HAL must call 
+    camera3_callback_ops_t-&gt;notify() with the appropriate error/event message. 
+    After returning from a fatal device-wide error notification, the HAL should 
+    act as if close() had been called on it. However, the HAL must either cancel 
+    or complete all outstanding captures before calling notify(), so that once 
+    notify() is called with a fatal error, the framework will not receive 
+    further callbacks from the device. Methods besides close() should return 
+    -ENODEV or NULL after the notify() method returns from a fatal error 
+    message.</li>
+</ol>
+<p><img src="images/camera-ops-flow.png" width="600" height="434" alt="Camera operations flow" />
+</p>
+<p><strong>Figure 6.</strong> Camera operational flow</p>
+<h2 id="ops-modes">Operational modes</h2>
+<p>The camera 3 HAL device can implement one of two possible operational modes: 
+  limited and full. Full support is expected from new higher-end devices. Limited 
+  mode has hardware requirements roughly in line with those for a camera HAL 
+  device v1 implementation, and is expected from older or inexpensive devices. 
+  Full is a strict superset of limited, and they share the same essential 
+  operational flow, as documented above.</p>
+<p>The HAL must indicate its level of support with the 
+  android.info.supportedHardwareLevel static metadata entry, with 0 indicating 
+  limited mode, and 1 indicating full mode support.</p>
+<p>Roughly speaking, limited-mode devices do not allow for application control of 
+  capture settings (3A control only), high-rate capture of high-resolution images, 
+  raw sensor readout, or support for YUV output streams above maximum recording 
+  resolution (JPEG only for large images).<br/>
+  Here are the details of limited-mode behavior:</p>
+<ul>
+  <li>Limited-mode devices do not need to implement accurate synchronization between 
+    capture request settings and the actual image data captured. Instead, changes 
+    to settings may take effect some time in the future, and possibly not for the 
+    same output frame for each settings entry. Rapid changes in settings may 
+    result in some settings never being used for a capture. However, captures that 
+    include high-resolution output buffers ( &gt; 1080p ) have to use the settings as 
+    specified (but see below for processing rate).</li>
+  <li>Captures in limited mode that include high-resolution (&gt; 1080p) output buffers 
+    may block in process_capture_request() until all the output buffers have been 
+    filled. A full-mode HAL device must process sequences of high-resolution 
+    requests at the rate indicated in the static metadata for that pixel format. 
+    The HAL must still call process_capture_result() to provide the output; the 
+    framework must simply be prepared for process_capture_request() to block until 
+    after process_capture_result() for that request completes for high-resolution 
+    captures for limited-mode devices.</li>
+  <li>Limited-mode devices do not need to support most of the settings/result/static 
+    info metadata. Only the following settings are expected to be consumed or 
+    produced by a limited-mode HAL device:
+    <ul>
+      <li>android.control.aeAntibandingMode (controls)</li>
+      <li>android.control.aeExposureCompensation (controls)</li>
+      <li>android.control.aeLock (controls)</li>
+      <li>android.control.aeMode (controls)</li>
+      <li>[OFF means ON_FLASH_TORCH]</li>
+      <li>android.control.aeRegions (controls)</li>
+      <li>android.control.aeTargetFpsRange (controls)</li>
+      <li>android.control.afMode (controls)</li>
+      <li>[OFF means infinity focus]</li>
+      <li>android.control.afRegions (controls)</li>
+      <li>android.control.awbLock (controls)</li>
+      <li>android.control.awbMode (controls)</li>
+      <li>[OFF not supported]</li>
+      <li>android.control.awbRegions (controls)</li>
+      <li>android.control.captureIntent (controls)</li>
+      <li>android.control.effectMode (controls)</li>
+      <li>android.control.mode (controls)</li>
+      <li>[OFF not supported]</li>
+      <li>android.control.sceneMode (controls)</li>
+      <li>android.control.videoStabilizationMode (controls)</li>
+      <li>android.control.aeAvailableAntibandingModes (static)</li>
+      <li>android.control.aeAvailableModes (static)</li>
+      <li>android.control.aeAvailableTargetFpsRanges (static)</li>
+      <li>android.control.aeCompensationRange (static)</li>
+      <li>android.control.aeCompensationStep (static)</li>
+      <li>android.control.afAvailableModes (static)</li>
+      <li>android.control.availableEffects (static)</li>
+      <li>android.control.availableSceneModes (static)</li>
+      <li>android.control.availableVideoStabilizationModes (static)</li>
+      <li>android.control.awbAvailableModes (static)</li>
+      <li>android.control.maxRegions (static)</li>
+      <li>android.control.sceneModeOverrides (static)</li>
+      <li>android.control.aeRegions (dynamic)</li>
+      <li>android.control.aeState (dynamic)</li>
+      <li>android.control.afMode (dynamic)</li>
+      <li>android.control.afRegions (dynamic)</li>
+      <li>android.control.afState (dynamic)</li>
+      <li>android.control.awbMode (dynamic)</li>
+      <li>android.control.awbRegions (dynamic)</li>
+      <li>android.control.awbState (dynamic)</li>
+      <li>android.control.mode (dynamic)</li>
+      <li>android.flash.info.available (static)</li>
+      <li>android.info.supportedHardwareLevel (static)</li>
+      <li>android.jpeg.gpsCoordinates (controls)</li>
+      <li>android.jpeg.gpsProcessingMethod (controls)</li>
+      <li>android.jpeg.gpsTimestamp (controls)</li>
+      <li>android.jpeg.orientation (controls)</li>
+      <li>android.jpeg.quality (controls)</li>
+      <li>android.jpeg.thumbnailQuality (controls)</li>
+      <li>android.jpeg.thumbnailSize (controls)</li>
+      <li>android.jpeg.availableThumbnailSizes (static)</li>
+      <li>android.jpeg.maxSize (static)</li>
+      <li>android.jpeg.gpsCoordinates (dynamic)</li>
+      <li>android.jpeg.gpsProcessingMethod (dynamic)</li>
+      <li>android.jpeg.gpsTimestamp (dynamic)</li>
+      <li>android.jpeg.orientation (dynamic)</li>
+      <li>android.jpeg.quality (dynamic)</li>
+      <li>android.jpeg.size (dynamic)</li>
+      <li>android.jpeg.thumbnailQuality (dynamic)</li>
+      <li>android.jpeg.thumbnailSize (dynamic)</li>
+      <li>android.lens.info.minimumFocusDistance (static)</li>
+      <li>android.request.id (controls)</li>
+      <li>android.request.id (dynamic)</li>
+      <li>android.scaler.cropRegion (controls)</li>
+      <li>[ignores (x,y), assumes center-zoom]</li>
+      <li>android.scaler.availableFormats (static)</li>
+      <li>[RAW not supported]</li>
+      <li>android.scaler.availableJpegMinDurations (static)</li>
+      <li>android.scaler.availableJpegSizes (static)</li>
+      <li>android.scaler.availableMaxDigitalZoom (static)</li>
+      <li>android.scaler.availableProcessedMinDurations (static)</li>
+      <li>android.scaler.availableProcessedSizes (static)</li>
+      <li>[full resolution not supported]</li>
+      <li>android.scaler.maxDigitalZoom (static)</li>
+      <li>android.scaler.cropRegion (dynamic)</li>
+      <li>android.sensor.orientation (static)</li>
+      <li>android.sensor.timestamp (dynamic)</li>
+      <li>android.statistics.faceDetectMode (controls)</li>
+      <li>android.statistics.info.availableFaceDetectModes (static)</li>
+      <li>android.statistics.faceDetectMode (dynamic)</li>
+      <li>android.statistics.faceIds (dynamic)</li>
+      <li>android.statistics.faceLandmarks (dynamic)</li>
+      <li>android.statistics.faceRectangles (dynamic)</li>
+      <li>android.statistics.faceScores (dynamic)</li>
+    </ul>
+  </li>
+</ul>
+<h2 id="interaction">Interaction between the application capture request, 3A
+control, and the processing pipeline</h2>
+<p>Depending on the settings in the 3A control block, the camera pipeline ignores 
+  some of the parameters in the application's capture request and uses the values 
+  provided by the 3A control routines instead. For example, when auto-exposure is 
+  active, the exposure time, frame duration, and sensitivity parameters of the 
+  sensor are controlled by the platform 3A algorithm, and any app-specified values 
+  are ignored. The values chosen for the frame by the 3A routines must be reported 
+  in the output metadata. The following table describes the different modes of the 
+  3A control block and the properties that are controlled by these modes. See 
+  the <a href="https://android.googlesource.com/platform/system/media/+/master/camera/docs/docs.html">platform/system/media/camera/docs/docs.html</a> file for definitions of these properties.</p>
+<table>
+  <tr>
+    <th>Parameter</th>
+    <th>State</th>
+    <th>Properties controlled</th>
+  </tr>
+  <tr>
+    <td>android.control.aeMode</td>
+    <td>OFF</td>
+    <td>None</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>ON</td>
+    <td>android.sensor.exposureTime
+      android.sensor.frameDuration
+      android.sensor.sensitivity
+      android.lens.aperture (if supported)
+      android.lens.filterDensity (if supported)</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>ON_AUTO_FLASH</td>
+    <td>Everything is ON, plus android.flash.firingPower, android.flash.firingTime, and android.flash.mode</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>ON_ALWAYS_FLASH</td>
+    <td>Same as ON_AUTO_FLASH</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>ON_AUTO_FLASH_RED_EYE</td>
+    <td>Same as ON_AUTO_FLASH</td>
+  </tr>
+  <tr>
+    <td>android.control.awbMode</td>
+    <td>OFF</td>
+    <td>None</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>WHITE_BALANCE_*</td>
+    <td>android.colorCorrection.transform. Platform-specific adjustments if android.colorCorrection.mode is FAST or HIGH_QUALITY.</td>
+  </tr>
+  <tr>
+    <td>android.control.afMode</td>
+    <td>OFF</td>
+    <td>None</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>FOCUS_MODE_*</td>
+    <td>android.lens.focusDistance</td>
+  </tr>
+  <tr>
+    <td>android.control.videoStabilization</td>
+    <td>OFF</td>
+    <td>None</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>ON</td>
+    <td>Can adjust android.scaler.cropRegion to implement video stabilization</td>
+  </tr>
+  <tr>
+    <td>android.control.mode</td>
+    <td>OFF</td>
+    <td>AE, AWB, and AF are disabled</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>AUTO</td>
+    <td>Individual AE, AWB, and AF settings are used</td>
+  </tr>
+  <tr>
+    <td></td>
+    <td>SCENE_MODE_*</td>
+    <td>Can override all parameters listed above. Individual 3A controls are disabled.</td>
+  </tr>
+</table>
+<p>The controls exposed for the 3A algorithm mostly map 1:1 to the old API's 
+  parameters (such as exposure compensation, scene mode, or white balance mode).<br/>
+  The controls in the Image Processing block in Figure 2</a> all 
+  operate on a similar principle, and generally each block has three modes:</p>
+<ul>
+  <li>OFF: This processing block is disabled. The demosaic, color correction, and 
+    tone curve adjustment blocks cannot be disabled.</li>
+  <li>FAST: In this mode, the processing block may not slow down the output frame 
+    rate compared to OFF mode, but should otherwise produce the best-quality 
+    output it can given that restriction. Typically, this would be used for 
+    preview or video recording modes, or burst capture for still images. On some 
+    devices, this may be equivalent to OFF mode (no processing can be done without 
+    slowing down the frame rate), and on some devices, this may be equivalent to 
+    HIGH_QUALITY mode (best quality still does not slow down frame rate).</li>
+  <li>HIGHQUALITY: In this mode, the processing block should produce the best 
+    quality result possible, slowing down the output frame rate as needed. 
+    Typically, this would be used for high-quality still capture. Some blocks 
+    include a manual control which can be optionally selected instead of FAST or 
+    HIGHQUALITY. For example, the color correction block supports a color 
+    transform matrix, while the tone curve adjustment supports an arbitrary global 
+    tone mapping curve.</li>
+</ul>
+  <p>The maximum frame rate that can be supported by a camera subsystem is a function 
+  of many factors:</p>
+<ul>
+  <li>Requested resolutions of output image streams</li>
+  <li>Availability of binning / skipping modes on the imager</li>
+  <li>The bandwidth of the imager interface</li>
+  <li>The bandwidth of the various ISP processing blocks</li>
+</ul>
+<p>Since these factors can vary greatly between different ISPs and sensors, the 
+  camera HAL interface tries to abstract the bandwidth restrictions into as simple 
+  model as possible. The model presented has the following characteristics:</p>
+<ul>
+  <li>The image sensor is always configured to output the smallest resolution 
+    possible given the application's requested output stream sizes.  The smallest 
+    resolution is defined as being at least as large as the largest requested 
+    output stream size.</li>
+  <li>Since any request may use any or all the currently configured output streams, 
+    the sensor and ISP must be configured to support scaling a single capture to 
+    all the streams at the same time. </li>
+  <li>JPEG streams act like processed YUV streams for requests for which they are 
+    not included; in requests in which they are directly referenced, they act as 
+    JPEG streams.</li>
+  <li>The JPEG processor can run concurrently to the rest of the camera pipeline but 
+    cannot process more than one capture at a time.</li>
+</ul>
diff --git a/src/devices/camera/camera3_requests_methods.jd b/src/devices/camera/camera3_requests_methods.jd
new file mode 100644
index 0000000..bde2e44
--- /dev/null
+++ b/src/devices/camera/camera3_requests_methods.jd
@@ -0,0 +1,118 @@
+page.title=Request creation and submission
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="request-creation">Request creation and submission</h2>
+<h3 id="default-settings">construct_default_request_settings</h3>
+<p>Create capture settings for standard camera use cases. The device must return a 
+  settings buffer that is configured to meet the requested use case, which must be 
+  one of the CAMERA3_TEMPLATE_* enums. All request control fields must be 
+  included.<br/>
+  The HAL retains ownership of this structure, but the pointer to the structure 
+  must be valid until the device is closed. The framework and the HAL may not 
+  modify the buffer once it is returned by this call. The same buffer may be 
+  returned for subsequent calls for the same template, or for other templates.</p>
+<h4><strong>Return values</strong></h4>
+<ul>
+  <li>Valid metadata: On successful creation of a default settings buffer.</li>
+  <li>NULL: In case of a fatal error. After this is returned, only the close() 
+    method can be called successfully by the framework.</li>
+</ul>
+<h3 id="process-request">process_capture_request</h3>
+<p>Send a new capture request to the HAL. The HAL should not return from this call 
+  until it is ready to accept the next request to process. Only one call to 
+  process_capture_request() will be made at a time by the framework, and the calls 
+  will all be from the same thread. The next call to process_capture_request() 
+  will be made as soon as a new request and its associated buffers are available. 
+  In a normal preview scenario, this means the function will be called again by 
+  the framework almost instantly.<br/>
+  The actual request processing is asynchronous, with the results of capture being 
+  returned by the HAL through the process_capture_result() call. This call 
+  requires the result metadata to be available, but output buffers may simply 
+  provide sync fences to wait on. Multiple requests are expected to be in flight 
+  at once, to maintain full output frame rate.<br/>
+  The framework retains ownership of the request structure. It is only guaranteed 
+  to be valid during this call. The HAL device must make copies of the information 
+  it needs to retain for the capture processing. The HAL is responsible for 
+  waiting on and closing the buffers' fences and returning the buffer handles to 
+  the framework.<br/>
+  The HAL must write the file descriptor for the input buffer's release sync fence 
+  into input_buffer-&gt;release_fence, if input_buffer is not NULL. If the HAL 
+  returns -1 for the input buffer release sync fence, the framework is free to 
+  immediately reuse the input buffer. Otherwise, the framework will wait on the 
+  sync fence before refilling and reusing the input buffer.</p>
+<h4><strong>Return values</strong></h4>
+<ul>
+  <li>0: On a successful start to processing the capture request</li>
+  <li>-EINVAL: If the input is malformed (the settings are NULL when not allowed, 
+    there are 0 output buffers, etc) and capture processing cannot start. Failures 
+    during request processing should be handled by calling 
+    camera3_callback_ops_t.notify(). In case of this error, the framework will 
+    retain responsibility for the stream buffers' fences and the buffer handles; 
+    the HAL should not close the fences or return these buffers with 
+    process_capture_result.</li>
+  <li>-ENODEV: If the camera device has encountered a serious error. After this 
+    error is returned, only the close() method can be successfully called by the 
+    framework.</li>
+</ul>
+<h2 id="misc-methods">Miscellaneous methods</h2>
+<h3 id="get-metadata">get_metadata_vendor_tag_ops</h3>
+<p>Get methods to query for vendor extension metadata tag information. The HAL 
+  should fill in all the vendor tag operation methods, or leave ops unchanged if 
+  no vendor tags are defined. The definition of vendor_tag_query_ops_t can be 
+  found in system/media/camera/include/system/camera_metadata.h.</p>
+<h3 id="dump">dump</h3>
+<p>Print out debugging state for the camera device. This will be called by the 
+  framework when the camera service is asked for a debug dump, which happens when 
+  using the dumpsys tool, or when capturing a bugreport. The passed-in file 
+  descriptor can be used to write debugging text using dprintf() or write(). The 
+  text should be in ASCII encoding only.</p>
+<h3 id="flush">flush</h3>
+<p>Flush all currently in-process captures and all buffers in the pipeline on the 
+  given device. The framework will use this to dump all state as quickly as 
+  possible in order to prepare for a configure_streams() call.<br/>
+  No buffers are required to be successfully returned, so every buffer held at the 
+  time of flush() (whether sucessfully filled or not) may be returned with 
+  CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed to return valid 
+  (STATUS_OK) buffers during this call, provided they are succesfully filled.<br/>
+  All requests currently in the HAL are expected to be returned as soon as 
+  possible. Not-in-process requests should return errors immediately. Any 
+  interruptible hardware blocks should be stopped, and any uninterruptible blocks 
+  should be waited on.<br/>
+  flush() should only return when there are no more outstanding buffers or 
+  requests left in the HAL. The framework may call configure_streams (as the HAL 
+  state is now quiesced) or may issue new requests.<br/>
+  A flush() call should only take 100ms or less. The maximum time it can take is 1 
+  second.</p>
+<h4><strong>Version information</strong></h4>
+<p>This is available only if device version &gt;= CAMERA_DEVICE_API_VERSION_3_1.</p>
+<h4><strong>Return values</strong></h4>
+<ul>
+  <li>0: On a successful flush of the camera HAL.</li>
+  <li>-EINVAL: If the input is malformed (the device is not valid).</li>
+  <li>-ENODEV: If the camera device has encountered a serious error. After this 
+    error is returned, only the close() method can be successfully called by the 
+    framework.</li>
+</ul>
diff --git a/src/devices/camera/images/camera-hal-overview.png b/src/devices/camera/images/camera-hal-overview.png
new file mode 100644
index 0000000..fed29e7
--- /dev/null
+++ b/src/devices/camera/images/camera-hal-overview.png
Binary files differ
diff --git a/src/devices/camera/images/camera-ops-flow.png b/src/devices/camera/images/camera-ops-flow.png
new file mode 100644
index 0000000..7326782
--- /dev/null
+++ b/src/devices/camera/images/camera-ops-flow.png
Binary files differ
diff --git a/src/devices/images/camera2_block.png b/src/devices/camera/images/camera2_block.png
similarity index 100%
rename from src/devices/images/camera2_block.png
rename to src/devices/camera/images/camera2_block.png
Binary files differ
diff --git a/src/devices/images/camera2_hal.png b/src/devices/camera/images/camera2_hal.png
similarity index 100%
rename from src/devices/images/camera2_hal.png
rename to src/devices/camera/images/camera2_hal.png
Binary files differ
diff --git a/src/devices/images/camera2_block.png b/src/devices/camera/images/camera_block.png
similarity index 100%
copy from src/devices/images/camera2_block.png
copy to src/devices/camera/images/camera_block.png
Binary files differ
diff --git a/src/devices/images/camera2_hal.png b/src/devices/camera/images/camera_hal.png
similarity index 100%
copy from src/devices/images/camera2_hal.png
copy to src/devices/camera/images/camera_hal.png
Binary files differ
diff --git a/src/devices/camera/images/camera_model.png b/src/devices/camera/images/camera_model.png
new file mode 100644
index 0000000..50cbabc
--- /dev/null
+++ b/src/devices/camera/images/camera_model.png
Binary files differ
diff --git a/src/devices/camera/images/camera_simple_model.png b/src/devices/camera/images/camera_simple_model.png
new file mode 100644
index 0000000..fd0fac0
--- /dev/null
+++ b/src/devices/camera/images/camera_simple_model.png
Binary files differ
diff --git a/src/devices/camera/images/crop-region-11-ratio.png b/src/devices/camera/images/crop-region-11-ratio.png
new file mode 100644
index 0000000..8e28230
--- /dev/null
+++ b/src/devices/camera/images/crop-region-11-ratio.png
Binary files differ
diff --git a/src/devices/camera/images/crop-region-169-ratio.png b/src/devices/camera/images/crop-region-169-ratio.png
new file mode 100644
index 0000000..62837e2
--- /dev/null
+++ b/src/devices/camera/images/crop-region-169-ratio.png
Binary files differ
diff --git a/src/devices/camera/images/crop-region-43-ratio.png b/src/devices/camera/images/crop-region-43-ratio.png
new file mode 100644
index 0000000..f48046b
--- /dev/null
+++ b/src/devices/camera/images/crop-region-43-ratio.png
Binary files differ
diff --git a/src/devices/camera/images/crop-region-43-square-ratio.png b/src/devices/camera/images/crop-region-43-square-ratio.png
new file mode 100644
index 0000000..3794dbe
--- /dev/null
+++ b/src/devices/camera/images/crop-region-43-square-ratio.png
Binary files differ
diff --git a/src/devices/camera3.jd b/src/devices/camera3.jd
deleted file mode 100644
index 6ebcfed..0000000
--- a/src/devices/camera3.jd
+++ /dev/null
@@ -1,1570 +0,0 @@
-page.title=Camera Version 3 
-@jd:body
-
-<!--
-    Copyright 2010 The Android Open Source Project
-
-    Licensed under the Apache License, Version 2.0 (the "License");
-    you may not use this file except in compliance with the License.
-    You may obtain a copy of the License at
-
-        http://www.apache.org/licenses/LICENSE-2.0
-
-    Unless required by applicable law or agreed to in writing, software
-    distributed under the License is distributed on an "AS IS" BASIS,
-    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-    See the License for the specific language governing permissions and
-    limitations under the License.
--->
-<div id="qv-wrapper">
-  <div id="qv">
-    <h2>In this document</h2>
-    <ol id="auto-toc">
-    </ol>
-  </div>
-</div>
-
-<p>Android's camera HAL connects the higher level
-camera framework APIs in <a
-href="http://developer.android.com/reference/android/hardware/package-summary.html">android.hardware</a>
-to your underlying camera driver and hardware. The latest version of Android introduces a new, underlying 
-implementation of the camera stack. If you have previously developed a camera HAL module and driver for
-other versions of Android, be aware that there are significant changes in the camera pipeline.</p>
-
-<p>Version 1 of the camera HAL is still supported for future releases of Android, because many devices
-still rely on it. Implementing both HALs is also supported by
-the Android camera service, which is useful when you want to support a
-less capable front-facing camera with version 1 of HAL and a more advanced
-back-facing camera with the version 3 of HAL. Version 2 was a stepping stone to
-version 3 and is not supported.</p>
-
-<p class="note"><strong>Note:</strong> The new camera HAL is in active development and can change
-  at any time. This document describes at a high level the design of the camera subsystem and
-  omits many details. Stay tuned for more updates to the PDK repository and look out for updates
-  to the HAL and reference implementation of the HAL for more information.
-</p>
-
-
-<h2 id="overview">Overview</h2>
-<p>Version 1 of the camera subsystem was designed as a black box with high-level controls.
-  Roughly speaking, the old subsystem has three operating modes:
-</p>
-
-<ul>
-<li>Preview</li>
-<li>Video Record</li>
-<li>Still Capture</li>
-</ul>
- 
-<p>Each mode has slightly different capabilities and overlapping capabilities.
-This made it hard to implement new types of features, such as burst mode,
-since it would fall between two of these modes.
-</p>
-
-<p>
-Version 3 of the camera subsystem structures the operation modes into a single unified view,
-which can be used to implement any of the previous modes and several others, such as burst mode.
-In simple terms, the app framework requests a frame from the camera subsystem,
-and the camera subsystem returns results to an output stream.
-In addition, metadata that contains information such as
-color spaces and lens shading is generated for each set of results.
-The following sections and diagram give you more detail about each component.</p>
-
- <img src="images/camera2_block.png" />
-
- <p class="img-caption"><strong>Figure 1.</strong> Camera block diagram</p>
- <h3 id="supported-version">Supported version</h3>
- <p>Camera devices that support this version of the HAL must return
-   CAMERA_DEVICE_API_VERSION_3_1 in camera_device_t.common.version and in
-   camera_info_t.device_version (from camera_module_t.get_camera_info).</p>
-<p>Camera modules that may contain version 3.1 devices must implement at least
-   version 2.0 of the camera module interface (as defined by
-   camera_module_t.common.module_api_version).</p>
- <p>See camera_common.h for more versioning details. </p>
- <h3 id="version-history">Version history</h3>
-<h4>1.0</h4>
-<p>Initial Android camera HAL (Android 4.0) [camera.h]: </p>
- <ul>
-   <li> Converted from C++ CameraHardwareInterface abstraction layer.</li>
-   <li> Supports android.hardware.Camera API.</li>
-</ul>
- <h4>2.0</h4>
- <p>Initial release of expanded-capability HAL (Android 4.2) [camera2.h]:</p>
- <ul>
-   <li> Sufficient for implementing existing android.hardware.Camera API.</li>
-   <li> Allows for ZSL queue in camera service layer</li>
-   <li> Not tested for any new features such manual capture control, Bayer RAW
-     capture, reprocessing of RAW data.</li>
- </ul>
- <h4>3.0</h4>
- <p>First revision of expanded-capability HAL:</p>
- <ul>
-   <li> Major version change since the ABI is completely different. No change to
-     the required hardware capabilities or operational model from 2.0.</li>
-   <li> Reworked input request and stream queue interfaces: Framework calls into
-     HAL with next request and stream buffers already dequeued. Sync framework
-     support is included, necessary for efficient implementations.</li>
-   <li> Moved triggers into requests, most notifications into results.</li>
-   <li> Consolidated all callbacks into framework into one structure, and all
-     setup methods into a single initialize() call.</li>
-   <li> Made stream configuration into a single call to simplify stream
-     management. Bidirectional streams replace STREAM_FROM_STREAM construct.</li>
-   <li> Limited mode semantics for older/limited hardware devices.</li>
- </ul>
- <h4>3.1</h4>
- <p>Minor revision of expanded-capability HAL:</p>
- <ul>
-   <li> configure_streams passes consumer usage flags to the HAL.</li>
-   <li> flush call to drop all in-flight requests/buffers as fast as possible.
-   </li>
- </ul>
-<h2 id="requests">Requests</h2>
-<p>
-The app framework issues requests for captured results to the
-camera subsystem. One request corresponds to one set of results. A request encapsulates
-all configuration information about the capturing
-and processing of those results. This includes things such as resolution and pixel format; manual
-sensor, lens, and flash control; 3A operating modes; RAW to YUV processing control; and statistics
-generation. This allows for much more control over the results' output and processing. Multiple
-requests can be in flight at once and submitting requests is non-blocking. And the requests are always 
-processed in the order they are received.
-</p>
-
-
-<h2 id="hal">The HAL and camera subsystem</h2>
-<p>
-The camera subsystem includes the implementations for components in the camera pipeline such as the 3A algorithm and processing controls. The camera HAL
-provides interfaces for you to implement your versions of these components. To maintain cross-platform compatibility between
-multiple device manufacturers and ISP vendors, the camera pipeline model is virtual and does not directly correspond to any real ISP.
-However, it is similar enough to real processing pipelines so that you can map it to your hardware efficiently. 
-In addition, it is abstract enough to allow for multiple different algorithms and orders of operation
-without compromising either quality, efficiency, or cross-device compatibility.<p>
-
-<p>
- The camera pipeline also supports triggers
-that the app framework can initiate to turn on things such as auto-focus. It also sends notifications back
-to the app framework, notifying apps of events such as an auto-focus lock or errors. </p>
-
- <img id="figure2" src="images/camera2_hal.png" /> <p class="img-caption"><strong>Figure 2.</strong> Camera pipeline
-
-<p>
-Please note, some image processing blocks shown in the diagram above are not
-well-defined in the initial release.
-</p>
-
-<p>
-The camera pipeline makes the following assumptions:
-</p>
-
-<ul>
-  <li>RAW Bayer output undergoes no processing inside the ISP.</li>
-  <li>Statistics are generated based off the raw sensor data.</li>
-  <li>The various processing blocks that convert raw sensor data to YUV are in
-an arbitrary order.</li>
-  <li>While multiple scale and crop units are shown, all scaler units share the output region controls (digital zoom).
-    However, each unit may have a different output resolution and pixel format.</li>
-</ul>
-
-<h3 id="startup">Startup and expected operation sequence</h3>
-<p>Please see <a
-href="https://android.googlesource.com/platform/hardware/libhardware/+/master/include/hardware/camera3.h">platform/hardware/libhardware/include/hardware/camera3.h</a> 
-for definitions of these structures and methods.</p>
-<ol>
-  <li>Framework calls camera_module_t-&gt;common.open(), which returns a
-    hardware_device_t structure.</li>
-  <li>Framework inspects the hardware_device_t-&gt;version field, and
-instantiates
-    the appropriate handler for that version of the camera hardware device. In
-    case the version is CAMERA_DEVICE_API_VERSION_3_0, the device is cast to
-    a camera3_device_t.</li>
-  <li>Framework calls camera3_device_t-&gt;ops-&gt;initialize() with the
-framework
-    callback function pointers. This will only be called this one time after
-    open(), before any other functions in the ops structure are called.</li>
-  <li>The framework calls camera3_device_t-&gt;ops-&gt;configure_streams() with
-a list
-    of input/output streams to the HAL device.</li>
-  <li>The framework allocates gralloc buffers and calls
-    camera3_device_t-&gt;ops-&gt;register_stream_buffers() for at least one of
-the
-    output streams listed in configure_streams. The same stream is registered
-    only once.</li>
-  <li>The framework requests default settings for some number of use cases with
-    calls to camera3_device_t-&gt;ops-&gt;construct_default_request_settings().
-This
-    may occur any time after step 3.</li>
-  <li>The framework constructs and sends the first capture request to the HAL
-    with settings based on one of the sets of default settings, and with at
-    least one output stream that has been registered earlier by the
-    framework. This is sent to the HAL with
-    camera3_device_t-&gt;ops-&gt;process_capture_request(). The HAL must block
-the
-    return of this call until it is ready for the next request to be sent.</li>
-  <li>The framework continues to submit requests, and possibly call
-    register_stream_buffers() for not-yet-registered streams, and call
-    construct_default_request_settings to get default settings buffers for
-    other use cases.</li>
-  <li>When the capture of a request begins (sensor starts exposing for the
-    capture), the HAL calls camera3_callback_ops_t-&gt;notify() with the SHUTTER
-    event, including the frame number and the timestamp for start of exposure.
-    This notify call must be made before the first call to
-    process_capture_result() for that frame number.</li>
-  <li>After some pipeline delay, the HAL begins to return completed captures to
-    the framework with camera3_callback_ops_t-&gt;process_capture_result().
-These
-    are returned in the same order as the requests were submitted. Multiple
-    requests can be in flight at once, depending on the pipeline depth of the
-    camera HAL device.</li>
-  <li>After some time, the framework may stop submitting new requests, wait for
-    the existing captures to complete (all buffers filled, all results
-    returned), and then call configure_streams() again. This resets the camera
-    hardware and pipeline for a new set of input/output streams. Some streams
-    may be reused from the previous configuration; if these streams' buffers
-    had already been registered with the HAL, they will not be registered
-    again. The framework then continues from step 7, if at least one
-    registered output stream remains. (Otherwise, step 5 is required
-first.)</li>
-  <li>Alternatively, the framework may call
-camera3_device_t-&gt;common-&gt;close()
-    to end the camera session. This may be called at any time when no other
-    calls from the framework are active, although the call may block until all
-    in-flight captures have completed (all results returned, all buffers
-    filled). After the close call returns, no more calls to the
-    camera3_callback_ops_t functions are allowed from the HAL. Once the
-    close() call is underway, the framework may not call any other HAL device
-    functions.</li>
-  <li>In case of an error or other asynchronous event, the HAL must call
-    camera3_callback_ops_t-&gt;notify() with the appropriate error/event
-    message. After returning from a fatal device-wide error notification, the
-    HAL should act as if close() had been called on it. However, the HAL must
-    either cancel or complete all outstanding captures before calling
-    notify(), so that once notify() is called with a fatal error, the
-    framework will not receive further callbacks from the device. Methods
-    besides close() should return -ENODEV or NULL after the notify() method
-    returns from a fatal error message.
-  </li>
-</ol>
-<h3>Operational modes</h3>
-<p>The camera 3 HAL device can implement one of two possible operational modes:
-  limited and full. Full support is expected from new higher-end
-  devices. Limited mode has hardware requirements roughly in line with those
-  for a camera HAL device v1 implementation, and is expected from older or
-  inexpensive devices. Full is a strict superset of limited, and they share the
-  same essential operational flow, as documented above.</p>
-<p>The HAL must indicate its level of support with the
-  android.info.supportedHardwareLevel static metadata entry, with 0 indicating
-  limited mode, and 1 indicating full mode support.</p>
-<p>Roughly speaking, limited-mode devices do not allow for application control
-  of capture settings (3A control only), high-rate capture of high-resolution
-  images, raw sensor readout, or support for YUV output streams above maximum
-  recording resolution (JPEG only for large images).</p>
-<p>Here are the details of limited-mode behavior:</p>
-<ul>
-  <li>Limited-mode devices do not need to implement accurate synchronization
-    between capture request settings and the actual image data
-    captured. Instead, changes to settings may take effect some time in the
-    future, and possibly not for the same output frame for each settings
-    entry. Rapid changes in settings may result in some settings never being
-    used for a capture. However, captures that include high-resolution output
-    buffers ( &gt; 1080p ) have to use the settings as specified (but see below
-  for processing rate).<br />
-  <br />
-  </li>
-  <li>(TODO: Is this reference properly located? It was after the settings list below.) Captures in limited mode that include high-resolution (&gt; 1080p) output
-  buffers may block in process_capture_request() until all the output buffers
-  have been filled. A full-mode HAL device must process sequences of
-  high-resolution requests at the rate indicated in the static metadata for
-  that pixel format. The HAL must still call process_capture_result() to
-  provide the output; the framework must simply be prepared for
-  process_capture_request() to block until after process_capture_result() for
-  that request completes for high-resolution captures for limited-mode
-  devices.<br />
-    <br />
-  </li>
-  <li>Limited-mode devices do not need to support most of the
-    settings/result/static info metadata. Full-mode devices must support all
-    metadata fields listed in TODO. Specifically, only the following settings
-    are expected to be consumed or produced by a limited-mode HAL device:
-    <blockquote>
-      <p> android.control.aeAntibandingMode (controls)<br />
-android.control.aeExposureCompensation (controls)<br />
-android.control.aeLock (controls)<br />
-android.control.aeMode (controls)<br />
-&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[OFF means ON_FLASH_TORCH - TODO]<br />
-android.control.aeRegions (controls)<br />
-android.control.aeTargetFpsRange (controls)<br />
-android.control.afMode (controls)<br />
-&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[OFF means infinity focus]<br />
-android.control.afRegions (controls)<br />
-android.control.awbLock (controls)<br />
-android.control.awbMode (controls)<br />
-&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[OFF not supported]<br />
-android.control.awbRegions (controls)<br />
-android.control.captureIntent (controls)<br />
-android.control.effectMode (controls)<br />
-android.control.mode (controls)<br />
-&nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[OFF not supported]<br />
-android.control.sceneMode (controls)<br />
-android.control.videoStabilizationMode (controls)<br />
-android.control.aeAvailableAntibandingModes (static)<br />
-android.control.aeAvailableModes (static)<br />
-android.control.aeAvailableTargetFpsRanges (static)<br />
-android.control.aeCompensationRange (static)<br />
-android.control.aeCompensationStep (static)<br />
-android.control.afAvailableModes (static)<br />
-android.control.availableEffects (static)<br />
-android.control.availableSceneModes (static)<br />
-android.control.availableVideoStabilizationModes (static)<br />
-android.control.awbAvailableModes (static)<br />
-android.control.maxRegions (static)<br />
-android.control.sceneModeOverrides (static)<br />
-android.control.aeRegions (dynamic)<br />
-android.control.aeState (dynamic)<br />
-android.control.afMode (dynamic)<br />
-android.control.afRegions (dynamic)<br />
-android.control.afState (dynamic)<br />
-android.control.awbMode (dynamic)<br />
-android.control.awbRegions (dynamic)<br />
-android.control.awbState (dynamic)<br />
-android.control.mode (dynamic)</p>
-      <p> android.flash.info.available (static)</p>
-      <p> android.info.supportedHardwareLevel (static)</p>
-      <p> android.jpeg.gpsCoordinates (controls)<br />
-        android.jpeg.gpsProcessingMethod (controls)<br />
-        android.jpeg.gpsTimestamp (controls)<br />
-        android.jpeg.orientation (controls)<br />
-        android.jpeg.quality (controls)<br />
-        android.jpeg.thumbnailQuality (controls)<br />
-        android.jpeg.thumbnailSize (controls)<br />
-        android.jpeg.availableThumbnailSizes (static)<br />
-        android.jpeg.maxSize (static)<br />
-        android.jpeg.gpsCoordinates (dynamic)<br />
-        android.jpeg.gpsProcessingMethod (dynamic)<br />
-        android.jpeg.gpsTimestamp (dynamic)<br />
-        android.jpeg.orientation (dynamic)<br />
-        android.jpeg.quality (dynamic)<br />
-        android.jpeg.size (dynamic)<br />
-        android.jpeg.thumbnailQuality (dynamic)<br />
-        android.jpeg.thumbnailSize (dynamic)</p>
-      <p> android.lens.info.minimumFocusDistance (static)</p>
-      <p> android.request.id (controls)<br />
-        android.request.id (dynamic)</p>
-      <p> android.scaler.cropRegion (controls)<br />
-        &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[ignores (x,y), assumes center-zoom]<br />
-        android.scaler.availableFormats (static)<br />
-        &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[RAW not supported]<br />
-        android.scaler.availableJpegMinDurations (static)<br />
-        android.scaler.availableJpegSizes (static)<br />
-        android.scaler.availableMaxDigitalZoom (static)<br />
-        android.scaler.availableProcessedMinDurations (static)<br />
-        android.scaler.availableProcessedSizes (static)<br />
-        &nbsp;&nbsp;&nbsp;&nbsp;&nbsp;[full resolution not supported]<br />
-        android.scaler.maxDigitalZoom (static)<br />
-        android.scaler.cropRegion (dynamic)</p>
-      <p> android.sensor.orientation (static)<br />
-        android.sensor.timestamp (dynamic)</p>
-      <p> android.statistics.faceDetectMode (controls)<br />
-        android.statistics.info.availableFaceDetectModes (static)<br />
-        android.statistics.faceDetectMode (dynamic)<br />
-        android.statistics.faceIds (dynamic)<br />
-        android.statistics.faceLandmarks (dynamic)<br />
-        android.statistics.faceRectangles (dynamic)<br />
-        android.statistics.faceScores (dynamic)</p>
-    </blockquote>
-  </li>
-</ul>
-<h3 id="interaction">Interaction between the application capture request, 3A control, and the
-processing pipeline</h3>
-
-<p>
-Depending on the settings in the 3A control block, the camera pipeline ignores some of the parameters in the application’s capture request
-and uses the values provided by the 3A control routines instead. For example, when auto-exposure is active, the exposure time,
-frame duration, and sensitivity parameters of the sensor are controlled by the platform 3A algorithm,
-and any app-specified values are ignored. The values chosen for the frame by the 3A routines must be
-reported in the output metadata. The following table describes the different modes of the 3A control block
-and the properties that are controlled by these modes. See the
-platform/system/media/camera/docs/docs.html file for definitions of these
-properties. 
-</p>
-
-
-<table>
-  <tr>
-    <th>Parameter</th>
-    <th>State</th>
-    <th>Properties controlled</th>
-  </tr>
-
-  <tr>
-    <td rowspan="5">android.control.aeMode</td>
-    <td>OFF</td>
-    <td>None</td>
-  </tr>
-  <tr>
-    <td>ON</td>
-    <td>
-      <ul>
-        <li>android.sensor.exposureTime</li>
-        <li>android.sensor.frameDuration</li>
-        <li>android.sensor.sensitivity</li>
-        <li>android.lens.aperture (if supported)</li>
-        <li>android.lens.filterDensity (if supported)</li>
-      </ul>
-  </tr>
-  <tr>
-    <td>ON_AUTO_FLASH</td>
-    <td>Everything is ON, plus android.flash.firingPower,  android.flash.firingTime, and android.flash.mode</td>
-  </tr>
-
-  <tr>
-    <td>ON_ALWAYS_FLASH</td>
-    <td>Same as ON_AUTO_FLASH</td>
-  </tr>
-
-  <tr>
-    <td>ON_AUTO_FLASH_RED_EYE</td>
-    <td>Same as ON_AUTO_FLASH</td>
-  </tr>
-
-  <tr>
-    <td rowspan="2">android.control.awbMode</td>
-    <td>OFF</td>
-    <td>None</td>
-  </tr>
-
- <tr>
-    <td>WHITE_BALANCE_*</td>
-    <td>android.colorCorrection.transform. Platform-specific adjustments if android.colorCorrection.mode is FAST or HIGH_QUALITY.</td>
-  </tr>
-
-  <tr>
-    <td rowspan="2">android.control.afMode</td>
-    <td>OFF</td>
-    <td>None</td>
-  </tr>
-
-  <tr>
-    <td>FOCUS_MODE_*</td>
-    <td>android.lens.focusDistance</td>
-  </tr>
-
-  <tr>
-    <td rowspan="2">android.control.videoStabilization</td>
-    <td>OFF</td>
-    <td>None</td>
-  </tr>
-
-  <tr>
-    <td>ON</td>
-    <td>Can adjust android.scaler.cropRegion to implement video stabilization</td>
-  </tr>
-
-  <tr>
-    <td rowspan="3">android.control.mode</td>
-    <td>OFF</td>
-    <td>AE, AWB, and AF are disabled</td>
-  </tr>
-
-  <tr>
-    <td>AUTO</td>
-    <td>Individual AE, AWB, and AF settings are used</td>
-  </tr>
-
-  <tr>
-    <td>SCENE_MODE_*</td>
-    <td>Can override all parameters listed above. Individual 3A controls are disabled.</td>
-  </tr>
-
-</table>
-
-<p>The controls exposed for the 3A algorithm mostly map 1:1 to the old API’s parameters
-  (such as exposure compensation, scene mode, or white balance mode).
-</p>
-
-
-<p>
-The controls in the Image Processing block in <a href="#figure2">Figure 2</a> all operate on a similar principle, and generally each block has three modes:
-</p>
-
-<ul>
-  <li>
-    OFF: This processing block is disabled. The demosaic, color correction, and tone curve adjustment blocks cannot be disabled.
-  </li>
-  <li>
-    FAST: In this mode, the processing block may not slow down the output frame rate compared to OFF mode, but should otherwise produce the best-quality output it can given that restriction. Typically, this would be used for preview or video recording modes, or burst capture for still images. On some devices, this may be equivalent to OFF mode (no processing can be done without slowing down the frame rate), and on some devices, this may be equivalent to HIGH_QUALITY mode (best quality still does not slow down frame rate).
-  </li>
-  <li>
-    HIGH_QUALITY: In this mode, the processing block should produce the best quality result possible, slowing down the output frame rate as needed. Typically, this would be used for high-quality still capture. Some blocks include a manual control which can be optionally selected instead of FAST or HIGH_QUALITY. For example, the color correction block supports a color transform matrix, while the tone curve adjustment supports an arbitrary global tone mapping curve.
-  </li>
-</ul>
-
-<p>See the <a href="">Android Camera Processing Pipeline Properties</a> spreadsheet for more information on all available properties.</p>
-
-<h2 id="metadata">Metadata support</h2>
-
-<p>To support the saving of DNG files by the Android framework, substantial metadata is required about the sensor’s characteristics. This includes information such as color spaces and lens shading functions.</p>
-<p>
-Most of this information is a static property of the camera subsystem, and can therefore be queried before configuring any output pipelines or submitting any requests. The new camera APIs greatly expand the information provided by the <code>getCameraInfo()</code> method to provide this information to the application.
-</p>
-<p>
-In addition, manual control of the camera subsystem requires feedback from the
-assorted devices about their current state, and the actual parameters used in
-capturing a given frame. If an application needs to implement a custom 3A
-routine (for example, to properly meter for an HDR burst), it needs to know the settings used to capture the latest set of results it has received in order to update the settings for the next request. Therefore, the new camera API adds a substantial amount of dynamic metadata to each captured frame. This includes the requested and actual parameters used for the capture, as well as additional per-frame metadata such as timestamps and statistics generator output.
-</p>
-
-<h2 id="3amodes">3A modes and state machines</h2>
-<p>While the actual 3A algorithms are up to the HAL implementation, a high-level
-  state machine description is defined by the HAL interface to allow the HAL
-  device and the framework to communicate about the current state of 3A and
-trigger 3A events.</p>
-<p>When the device is opened, all the individual 3A states must be
-  STATE_INACTIVE. Stream configuration does not reset 3A. For example, locked
-  focus must be maintained across the configure() call.</p>
-<p>Triggering a 3A action involves simply setting the relevant trigger entry in
-  the settings for the next request to indicate start of trigger. For example,
-  the trigger for starting an autofocus scan is setting the entry
-  ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTROL_AF_TRIGGER_START for one
-  request; and cancelling an autofocus scan is triggered by setting
-  ANDROID_CONTROL_AF_TRIGGER to ANDROID_CONTRL_AF_TRIGGER_CANCEL. Otherwise,
-  the entry will not exist or be set to ANDROID_CONTROL_AF_TRIGGER_IDLE. Each
-  request with a trigger entry set to a non-IDLE value will be treated as an
-  independent triggering event.</p>
-<p>At the top level, 3A is controlled by the ANDROID_CONTROL_MODE setting. It
-  selects between no 3A (ANDROID_CONTROL_MODE_OFF), normal AUTO mode
-  (ANDROID_CONTROL_MODE_AUTO), and using the scene mode setting
-  (ANDROID_CONTROL_USE_SCENE_MODE):</p>
-<ul>
-  <li>In OFF mode, each of the individual Auto-focus(AF), auto-exposure (AE),
-and auto-whitebalance (AWB) modes are effectively OFF,
-    and none of the capture controls may be overridden by the 3A routines.</li>
-  <li>In AUTO mode, AF, AE, and AWB modes all run
-    their own independent algorithms, and have their own mode, state, and
-    trigger metadata entries, as listed in the next section.</li>
-  <li>In USE_SCENE_MODE, the value of the ANDROID_CONTROL_SCENE_MODE entry must
-    be used to determine the behavior of 3A routines. In SCENE_MODEs other than
-    FACE_PRIORITY, the HAL must override the values of
-    ANDROID_CONTROL_AE/AWB/AF_MODE to be the mode it prefers for the selected
-    SCENE_MODE. For example, the HAL may prefer SCENE_MODE_NIGHT to use
-    CONTINUOUS_FOCUS AF mode. Any user selection of AE/AWB/AF_MODE when scene
-    must be ignored for these scene modes.</li>
-  <li>For SCENE_MODE_FACE_PRIORITY, the AE/AWB/AF_MODE controls work as in
-    ANDROID_CONTROL_MODE_AUTO, but the 3A routines must bias toward metering
-    and focusing on any detected faces in the scene.
-  </li>
-</ul>
-
-<h3 id="autofocus">Auto-focus settings and result entries</h3>
-<p>Main metadata entries:</p>
-<p>ANDROID_CONTROL_AF_MODE: Control for selecting the current autofocus
-mode. Set by the framework in the request settings.</p>
-<p>AF_MODE_OFF: AF is disabled; the framework/app directly controls lens
-position.</p>
-<p>AF_MODE_AUTO: Single-sweep autofocus. No lens movement unless AF is
-triggered.</p>
-<p>AF_MODE_MACRO: Single-sweep up-close autofocus. No lens movement unless
-AF is triggered.</p>
-<p>AF_MODE_CONTINUOUS_VIDEO: Smooth continuous focusing, for recording
-  video. Triggering immediately locks focus in current
-position. Canceling resumes cotinuous focusing.</p>
-<p>AF_MODE_CONTINUOUS_PICTURE: Fast continuous focusing, for
-  zero-shutter-lag still capture. Triggering locks focus once currently
-active sweep concludes. Canceling resumes continuous focusing.</p>
-<p>AF_MODE_EDOF: Advanced extended depth of field focusing. There is no
-  autofocus scan, so triggering one or canceling one has no effect.
-Images are focused automatically by the HAL.</p>
-<p>ANDROID_CONTROL_AF_STATE: Dynamic metadata describing the current AF
-algorithm state, reported by the HAL in the result metadata.</p>
-<p>AF_STATE_INACTIVE: No focusing has been done, or algorithm was
-  reset. Lens is not moving. Always the state for MODE_OFF or MODE_EDOF.
-When the device is opened, it must start in this state.</p>
-<p>AF_STATE_PASSIVE_SCAN: A continuous focus algorithm is currently scanning
-for good focus. The lens is moving.</p>
-<p>AF_STATE_PASSIVE_FOCUSED: A continuous focus algorithm believes it is
-  well focused. The lens is not moving. The HAL may spontaneously leave
-this state.</p>
-<p>AF_STATE_ACTIVE_SCAN: A scan triggered by the user is underway.</p>
-<p>AF_STATE_FOCUSED_LOCKED: The AF algorithm believes it is focused. The
-lens is not moving.</p>
-<p>AF_STATE_NOT_FOCUSED_LOCKED: The AF algorithm has been unable to
-focus. The lens is not moving.</p>
-<p>ANDROID_CONTROL_AF_TRIGGER: Control for starting an autofocus scan, the
-  meaning of which depends on mode and state. Set by the framework in
-the request settings.</p>
-<p>AF_TRIGGER_IDLE: No current trigger.</p>
-<p>AF_TRIGGER_START: Trigger start of AF scan. Effect depends on mode and
-state.</p>
-<p>AF_TRIGGER_CANCEL: Cancel current AF scan if any, and reset algorithm to
-default.</p>
-<p>Additional metadata entries:</p>
-<p>ANDROID_CONTROL_AF_REGIONS: Control for selecting the regions of the field of
-view (FOV)
-  that should be used to determine good focus. This applies to all AF
-  modes that scan for focus. Set by the framework in the request
-settings.</p>
-
-<h3 id="autoexpose">Auto-exposure settings and result entries</h3>
-<p>Main metadata entries:</p>
-<p>ANDROID_CONTROL_AE_MODE: Control for selecting the current auto-exposure
-mode. Set by the framework in the request settings.</p>
-<p>
-  AE_MODE_OFF: Autoexposure is disabled; the user controls exposure, gain,
-  frame duration, and flash.
-</p>
-<p>AE_MODE_ON: Standard autoexposure, with flash control disabled. User may
-  set flash to fire or to torch mode.
-</p>
-<p>AE_MODE_ON_AUTO_FLASH: Standard autoexposure, with flash on at HAL's
-  discretion for precapture and still capture. User control of flash
-  disabled.
-</p>
-<p>AE_MODE_ON_ALWAYS_FLASH: Standard autoexposure, with flash always fired
-  for capture, and at HAL's discretion for precapture. User control of
-  flash disabled.
-</p>
-<p>AE_MODE_ON_AUTO_FLASH_REDEYE: Standard autoexposure, with flash on at
-  HAL's discretion for precapture and still capture. Use a flash burst
-  at end of precapture sequence to reduce redeye in the final
-  picture. User control of flash disabled.
-</p>
-<p>ANDROID_CONTROL_AE_STATE: Dynamic metadata describing the current AE
-  algorithm state, reported by the HAL in the result metadata.
-</p>
-<p>AE_STATE_INACTIVE: Initial AE state after mode switch. When the device is
-  opened, it must start in this state.
-</p>
-<p>AE_STATE_SEARCHING: AE is not converged to a good value and is adjusting
-  exposure parameters.
-</p>
-<p>AE_STATE_CONVERGED: AE has found good exposure values for the current
-  scene, and the exposure parameters are not changing. HAL may
-  spontaneously leave this state to search for a better solution.
-</p>
-<p>AE_STATE_LOCKED: AE has been locked with the AE_LOCK control. Exposure
-  values are not changing.
-</p>
-<p>AE_STATE_FLASH_REQUIRED: The HAL has converged exposure but believes
-  flash is required for a sufficiently bright picture. Used for
-  determining if a zero-shutter-lag frame can be used.
-</p>
-<p>AE_STATE_PRECAPTURE: The HAL is in the middle of a precapture
-  sequence. Depending on AE mode, this mode may involve firing the
-  flash for metering or a burst of flash pulses for redeye reduction.
-</p>
-<p>ANDROID_CONTROL_AE_PRECAPTURE_TRIGGER: Control for starting a metering
-  sequence before capturing a high-quality image. Set by the framework in
-  the request settings.
-</p>
-<p>PRECAPTURE_TRIGGER_IDLE: No current trigger.
-</p>
-<p>PRECAPTURE_TRIGGER_START: Start a precapture sequence. The HAL should
-  use the subsequent requests to measure good exposure/white balance
-  for an upcoming high-resolution capture.
-</p>
-<p>Additional metadata entries:
-</p>
-<p>ANDROID_CONTROL_AE_LOCK: Control for locking AE controls to their current
-  values.</p>
-<p>ANDROID_CONTROL_AE_EXPOSURE_COMPENSATION: Control for adjusting AE
-  algorithm target brightness point.</p>
-<p>ANDROID_CONTROL_AE_TARGET_FPS_RANGE: Control for selecting the target frame
-  rate range for the AE algorithm. The AE routine cannot change the frame
-  rate to be outside these bounds.</p>
-<p>ANDROID_CONTROL_AE_REGIONS: Control for selecting the regions of the FOV
-  that should be used to determine good exposure levels. This applies to
-  all AE modes besides OFF.
-</p>
-
-<h3 id="autowb">Auto-whitebalance settings and result entries</h3>
-<p>Main metadata entries:</p>
-<p>ANDROID_CONTROL_AWB_MODE: Control for selecting the current white-balance
-  mode.
-</p>
-<p>AWB_MODE_OFF: Auto-whitebalance is disabled. User controls color matrix.
-</p>
-<p>AWB_MODE_AUTO: Automatic white balance is enabled; 3A controls color
-  transform, possibly using more complex transforms than a simple
-  matrix.
-</p>
-<p>AWB_MODE_INCANDESCENT: Fixed white balance settings good for indoor
-  incandescent (tungsten) lighting, roughly 2700K.
-</p>
-<p>AWB_MODE_FLUORESCENT: Fixed white balance settings good for fluorescent
-  lighting, roughly 5000K.
-</p>
-<p>AWB_MODE_WARM_FLUORESCENT: Fixed white balance settings good for
-  fluorescent lighting, roughly 3000K.
-</p>
-<p>AWB_MODE_DAYLIGHT: Fixed white balance settings good for daylight,
-  roughly 5500K.
-</p>
-<p>AWB_MODE_CLOUDY_DAYLIGHT: Fixed white balance settings good for clouded
-  daylight, roughly 6500K.
-</p>
-<p>AWB_MODE_TWILIGHT: Fixed white balance settings good for
-  near-sunset/sunrise, roughly 15000K.
-</p>
-<p>AWB_MODE_SHADE: Fixed white balance settings good for areas indirectly
-  lit by the sun, roughly 7500K.
-</p>
-<p>ANDROID_CONTROL_AWB_STATE: Dynamic metadata describing the current AWB
-  algorithm state, reported by the HAL in the result metadata.
-</p>
-<p>AWB_STATE_INACTIVE: Initial AWB state after mode switch. When the device
-  is opened, it must start in this state.
-</p>
-<p>AWB_STATE_SEARCHING: AWB is not converged to a good value and is
-  changing color adjustment parameters.
-</p>
-<p>AWB_STATE_CONVERGED: AWB has found good color adjustment values for the
-  current scene, and the parameters are not changing. HAL may
-  spontaneously leave this state to search for a better solution.
-</p>
-<p>AWB_STATE_LOCKED: AWB has been locked with the AWB_LOCK control. Color
-  adjustment values are not changing.
-</p>
-<p>Additional metadata entries:
-</p>
-<p>ANDROID_CONTROL_AWB_LOCK: Control for locking AWB color adjustments to
-  their current values.
-</p>
-<p>ANDROID_CONTROL_AWB_REGIONS: Control for selecting the regions of the FOV
-  that should be used to determine good color balance. This applies only
-  to auto-whitebalance mode.
-</p>
-
-<h3 id="genstate">General state machine transition notes
-</h3>
-<p>Switching between AF, AE, or AWB modes always resets the algorithm's state
-  to INACTIVE.  Similarly, switching between CONTROL_MODE or
-  CONTROL_SCENE_MODE if CONTROL_MODE == USE_SCENE_MODE resets all the
-  algorithm states to INACTIVE.
-</p>
-<p>The tables below are per-mode.
-</p>
-
-<h3 id="af-state">AF state machines</h3>
-<table width="100%" border="1">
-  <tr>
-    <td colspan="4" scope="col"><h4>mode = AF_MODE_OFF or AF_MODE_EDOF</h4></td>
-  </tr>
-  <tr>
-    <th scope="col">State</th>
-    <th scope="col">Transformation cause</th>
-    <th scope="col">New state</th>
-    <th scope="col">Notes</th>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>&nbsp;</td>
-    <td>&nbsp;</td>
-    <td>AF is disabled</td>
-  </tr>
-  <tr>
-    <td colspan="4"><h4>mode = AF_MODE_AUTO or AF_MODE_MACRO</h4></td>
-  </tr>
-  <tr>
-    <th scope="col">State</th>
-    <th scope="col">Transformation cause</th>
-    <th scope="col">New state</th>
-    <th scope="col">Notes</th>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>AF_TRIGGER</td>
-    <td>ACTIVE_SCAN</td>
-    <td>Start AF sweep<br />
-  Lens now moving</td>
-  </tr>
-  <tr>
-    <td>ACTIVE_SCAN</td>
-    <td>AF sweep done</td>
-    <td>FOCUSED_LOCKED</td>
-    <td>If AF successful<br />
-  Lens now locked </td>
-  </tr>
-  <tr>
-    <td>ACTIVE_SCAN</td>
-    <td>AF sweep done</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>If AF successful<br />
-Lens now locked </td>
-  </tr>
-  <tr>
-    <td>ACTIVE_SCAN</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Cancel/reset AF<br />
-  Lens now locked</td>
-  </tr>
-  <tr>
-    <td>FOCUSED_LOCKED</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Cancel/reset AF</td>
-  </tr>
-  <tr>
-    <td>FOCUSED_LOCKED</td>
-    <td>AF_TRIGGER</td>
-    <td>ACTIVE_SCAN </td>
-    <td>Start new sweep<br />
-  Lens now moving</td>
-  </tr>
-  <tr>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Cancel/reset AF</td>
-  </tr>
-  <tr>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>AF_TRIGGER</td>
-    <td>ACTIVE_SCAN</td>
-    <td>Start new sweep<br />
-Lens now moving</td>
-  </tr>
-  <tr>
-    <td>All states</td>
-    <td>mode change </td>
-    <td>INACTIVE</td>
-    <td>&nbsp;</td>
-  </tr>
-  <tr>
-    <td colspan="4"><h4>mode = AF_MODE_CONTINUOUS_VIDEO</h4></td>
-  </tr>
-  <tr>
-    <th scope="col">State</th>
-    <th scope="col">Transformation cause</th>
-    <th scope="col">New state</th>
-    <th scope="col">Notes</th>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>HAL initiates new scan</td>
-    <td>PASSIVE_SCAN</td>
-    <td>Start AF sweep<br />
-Lens now moving</td>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>AF_TRIGGER</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>AF state query <br />
-    Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_SCAN</td>
-    <td>HAL completes current scan</td>
-    <td>PASSIVE_FOCUSED</td>
-    <td>End AF scan<br />
-    Lens now locked <br /></td>
-  </tr>
-  <tr>
-    <td>PASSIVE_SCAN</td>
-    <td>AF_TRIGGER</td>
-    <td>FOCUSED_LOCKED</td>
-    <td>Immediate transformation<br />
-      if focus is good<br />
-Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_SCAN</td>
-    <td>AF_TRIGGER</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>Immediate transformation<br />
-if focus is bad<br />
-Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_SCAN</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Reset lens position<br />
-    Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_FOCUSED</td>
-    <td>HAL initiates new scan</td>
-    <td>PASSIVE_SCAN</td>
-    <td>Start AF scan<br />
-      Lens now moving</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_FOCUSED</td>
-    <td>AF_TRIGGER</td>
-    <td>FOCUSED_LOCKED</td>
-    <td>Immediate transformation<br />
-if focus is good<br />
-Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_FOCUSED</td>
-    <td>AF_TRIGGER</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>Immediate transformation<br />
-if focus is bad<br />
-Lens now locked</td>
-  </tr>
-  <tr>
-    <td>FOCUSED_LOCKED</td>
-    <td>AF_TRIGGER</td>
-    <td>FOCUSED_LOCKED</td>
-    <td>No effect</td>
-  </tr>
-  <tr>
-    <td>FOCUSED_LOCKED</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Restart AF scan</td>
-  </tr>
-  <tr>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>AF_TRIGGER</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>No effect</td>
-  </tr>
-  <tr>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Restart AF scan</td>
-  </tr>
-  <tr>
-    <td colspan="4"><h4>mode = AF_MODE_CONTINUOUS_PICTURE</h4></td>
-  </tr>
-  <tr>
-    <th scope="col">State</th>
-    <th scope="col">Transformation cause</th>
-    <th scope="col">New state</th>
-    <th scope="col">Notes</th>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>HAL initiates new scan</td>
-    <td>PASSIVE_SCAN</td>
-    <td>Start AF scan<br />
-      Lens now moving</td>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>AF_TRIGGER</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>AF state query<br />
-    Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_SCAN</td>
-    <td>HAL completes current scan</td>
-    <td>PASSIVE_FOCUSED</td>
-    <td>End AF scan<br />
-      Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_SCAN</td>
-    <td>AF_TRIGGER</td>
-    <td>FOCUSED_LOCKED</td>
-    <td>Eventual transformation once focus good<br />
-    Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_SCAN</td>
-    <td>AF_TRIGGER</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>Eventual transformation if cannot focus<br />
-Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_SCAN</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Reset lens position<br />
-      Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_FOCUSED</td>
-    <td>HAL initiates new scan</td>
-    <td>PASSIVE_SCAN</td>
-    <td>Start AF scan<br />
-Lens now moving</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_FOCUSED</td>
-    <td>AF_TRIGGER</td>
-    <td>FOCUSED_LOCKED</td>
-    <td>Immediate transformation if focus is good<br />
-Lens now locked</td>
-  </tr>
-  <tr>
-    <td>PASSIVE_FOCUSED</td>
-    <td>AF_TRIGGER</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>Immediate transformation if focus is bad<br />
-Lens now locked</td>
-  </tr>
-  <tr>
-    <td>FOCUSED_LOCKED</td>
-    <td>AF_TRIGGER</td>
-    <td>FOCUSED_LOCKED</td>
-    <td>No effect</td>
-  </tr>
-  <tr>
-    <td>FOCUSED_LOCKED</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Restart AF scan</td>
-  </tr>
-  <tr>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>AF_TRIGGER</td>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>No effect</td>
-  </tr>
-  <tr>
-    <td>NOT_FOCUSED_LOCKED</td>
-    <td>AF_CANCEL</td>
-    <td>INACTIVE</td>
-    <td>Restart AF scan</td>
-  </tr>
-</table>
-<h3 id="aeawb-state">AE and AWB state machines</h3>
-<p>The AE and AWB state machines are mostly identical. AE has additional
-FLASH_REQUIRED and PRECAPTURE states. So rows below that refer to those two
-states should be ignored for the AWB state machine.</p>
-<table width="100%" border="1">
-  <tr>
-    <td colspan="4" scope="col"><h4>mode = AE_MODE_OFF / AWB mode not
-AUTO</h4></td>
-  </tr>
-  <tr>
-    <th scope="col">State</th>
-    <th scope="col">Transformation cause</th>
-    <th scope="col">New state</th>
-    <th scope="col">Notes</th>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>&nbsp;</td>
-    <td>&nbsp;</td>
-    <td>AE/AWB disabled</td>
-  </tr>
-  <tr>
-    <td colspan="4"><h4>mode = AE_MODE_ON_* / AWB_MODE_AUTO</h4></td>
-  </tr>
-  <tr>
-    <th scope="col">State</th>
-    <th scope="col">Transformation cause</th>
-    <th scope="col">New state</th>
-    <th scope="col">Notes</th>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>HAL initiates AE/AWB scan</td>
-    <td>SEARCHING</td>
-    <td>&nbsp;</td>
-  </tr>
-  <tr>
-    <td>INACTIVE</td>
-    <td>AE/AWB_LOCK on</td>
-    <td>LOCKED</td>
-    <td>Values locked</td>
-  </tr>
-  <tr>
-    <td>SEARCHING</td>
-    <td>HAL finishes AE/AWB scan</td>
-    <td>CONVERGED</td>
-    <td>Good values, not changing</td>
-  </tr>
-  <tr>
-    <td>SEARCHING</td>
-    <td>HAL finishes AE scan</td>
-    <td>FLASH_REQUIRED</td>
-    <td>Converged but too dark without flash</td>
-  </tr>
-  <tr>
-    <td>SEARCHING</td>
-    <td>AE/AWB_LOCK on</td>
-    <td>LOCKED</td>
-    <td>Values locked</td>
-  </tr>
-  <tr>
-    <td>CONVERGED</td>
-    <td>HAL initiates AE/AWB scan</td>
-    <td>SEARCHING</td>
-    <td>Values locked</td>
-  </tr>
-  <tr>
-    <td>CONVERGED</td>
-    <td>AE/AWB_LOCK on</td>
-    <td>LOCKED</td>
-    <td>Values locked</td>
-  </tr>
-  <tr>
-    <td>FLASH_REQUIRED</td>
-    <td>HAL initiates AE/AWB scan</td>
-    <td>SEARCHING</td>
-    <td>Values locked</td>
-  </tr>
-  <tr>
-    <td>FLASH_REQUIRED</td>
-    <td>AE/AWB_LOCK on</td>
-    <td>LOCKED</td>
-    <td>Values locked</td>
-  </tr>
-  <tr>
-    <td>LOCKED</td>
-    <td>AE/AWB_LOCK off</td>
-    <td>SEARCHING</td>
-    <td>Values not good after unlock</td>
-  </tr>
-  <tr>
-    <td>LOCKED</td>
-    <td>AE/AWB_LOCK off</td>
-    <td>CONVERGED</td>
-    <td>Values  good after unlock</td>
-  </tr>
-  <tr>
-    <td>LOCKED</td>
-    <td>AE_LOCK off</td>
-    <td>FLASH_REQUIRED</td>
-    <td>Exposure good, but too dark</td>
-  </tr>
-  <tr>
-    <td>All AE states </td>
-    <td> PRECAPTURE_START</td>
-    <td>PRECAPTURE</td>
-    <td>Start precapture sequence</td>
-  </tr>
-  <tr>
-    <td>PRECAPTURE</td>
-    <td>Sequence done, AE_LOCK off </td>
-    <td>CONVERGED</td>
-    <td>Ready for high-quality capture</td>
-  </tr>
-  <tr>
-    <td>PRECAPTURE</td>
-    <td>Sequence done, AE_LOCK on </td>
-    <td>LOCKED</td>
-    <td>Ready for high-quality capture</td>
-  </tr>
-</table>
-
-<h2 id="output">Output streams</h2>
-
-<p>Unlike the old camera subsystem, which has 3-4 different ways of producing data from the camera (ANativeWindow-based preview operations, preview callbacks, video callbacks, and takePicture callbacks), the new subsystem operates solely on the ANativeWindow-based pipeline for all resolutions and output formats.  Multiple such streams can be configured at once, to send a single frame to many targets such as the GPU, the video encoder, RenderScript, or app-visible buffers (RAW Bayer, processed YUV buffers, or JPEG-encoded buffers).
-</p>
-
-<p>As an optimization, these output streams must be configured ahead of time, and only a limited number may exist at once. This allows for pre-allocation of memory buffers and configuration of the camera hardware, so that when requests are submitted with multiple or varying output pipelines listed, there won’t be delays or latency in fulfilling the request.
-</p>
-
-<p>
-To support backwards compatibility with the current camera API, at least 3 simultaneous YUV output streams must be supported, plus one JPEG stream. This is required for video snapshot support with the application also receiving YUV buffers: 
-
-<ul>
-  <li>One stream to the GPU/SurfaceView (opaque YUV format) for preview</li>
-  <li>One stream to the video encoder (opaque YUV format) for recording</li>
-  <li>One stream to the application (known YUV format) for preview frame callbacks
-  <li>One stream to the application (JPEG) for video snapshots.</li>
-</ul>
-
-<p> In addition, at least one RAW Bayer output must be supported at the same time for the new camera subsystem.
-This means that the minimum output stream count is five (one RAW, three YUV, and one JPEG).
-</p>
-<h2 id="cropping">Cropping</h2>
-<p>Cropping of the full pixel array (for digital zoom and other use cases where
-  a smaller FOV is desirable) is communicated through the
-  ANDROID_SCALER_CROP_REGION setting. This is a per-request setting, and can
-  change on a per-request basis, which is critical for implementing smooth
-  digital zoom.</p>
-<p>The region is defined as a rectangle (x, y, width, height), with (x, y)
-  describing the top-left corner of the rectangle. The rectangle is defined on
-  the coordinate system of the sensor active pixel array, with (0,0) being the
-  top-left pixel of the active pixel array. Therefore, the width and height
-  cannot be larger than the dimensions reported in the
-  ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY static info field. The minimum allowed
-  width and height are reported by the HAL through the
-  ANDROID_SCALER_MAX_DIGITAL_ZOOM static info field, which describes the
-  maximum supported zoom factor. Therefore, the minimum crop region width and
-  height are:</p>
-<pre>
-{width, height} =
-   { floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[0] /
-       ANDROID_SCALER_MAX_DIGITAL_ZOOM),
-     floor(ANDROID_SENSOR_ACTIVE_PIXEL_ARRAY[1] /
-       ANDROID_SCALER_MAX_DIGITAL_ZOOM) }
-</pre>
-<p>If the crop region needs to fulfill specific requirements (for example, it
-  needs to start on even coordinates, and its width/height needs to be even),
-  the HAL must do the necessary rounding and write out the final crop region
-  used in the output result metadata. Similarly, if the HAL implements video
-  stabilization, it must adjust the result crop region to describe the region
-  actually included in the output after video stabilization is applied. In
-  general, a camera-using application must be able to determine the field of
-  view it is receiving based on the crop region, the dimensions of the image
-  sensor, and the lens focal length.</p>
-<p>Since the crop region applies to all streams, which may have different aspect
-  ratios than the crop region, the exact sensor region used for each stream may
-  be smaller than the crop region. Specifically, each stream should maintain
-  square pixels and its aspect ratio by minimally further cropping the defined
-  crop region. If the stream's aspect ratio is wider than the crop region, the
-  stream should be further cropped vertically, and if the stream's aspect ratio
-  is narrower than the crop region, the stream should be further cropped
-  horizontally.</p>
-<p>In all cases, the stream crop must be centered within the full crop region,
-  and each stream is only either cropped horizontally or vertical relative to
-  the full crop region, never both.</p>
-<p>For example, if two streams are defined, a 640x480 stream (4:3 aspect), and a
-  1280x720 stream (16:9 aspect), below demonstrates the expected output regions
-  for each stream for a few sample crop regions, on a hypothetical 3 MP (2000 x
-  1500 pixel array) sensor.</p>
-<p>Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)</p>
-<blockquote>
-  <p> 640x480 stream crop: (500, 375, 1000, 750) (equal to crop region)<br />
-  1280x720 stream crop: (500, 469, 1000, 562) (marked with =)</p>
-</blockquote>
-<pre>0                   1000               2000
-  +---------+---------+---------+----------+
-  | Active pixel array                     |
-  |                                        |
-  |                                        |
-  +         +-------------------+          + 375
-  |         |                   |          |
-  |         O===================O          |
-  |         I 1280x720 stream   I          |
-  +         I                   I          + 750
-  |         I                   I          |
-  |         O===================O          |
-  |         |                   |          |
-  +         +-------------------+          + 1125
-  |          Crop region, 640x480 stream   |
-  |                                        |
-  |                                        |
-  +---------+---------+---------+----------+ 1500</pre>
-<p>(TODO: Recreate these in Omnigraffle and replace.)</p>
-<p>Crop region: (500, 375, 1333, 750) (16:9 aspect ratio)</p>
-<blockquote>
-  <p> 640x480 stream crop: (666, 375, 1000, 750) (marked with =)<br />
-    1280x720 stream crop: (500, 375, 1333, 750) (equal to crop region)</p>
-</blockquote>
-<pre>0                   1000               2000
-  +---------+---------+---------+----------+
-  | Active pixel array                     |
-  |                                        |
-  |                                        |
-  +         +---O==================O---+   + 375
-  |         |   I 640x480 stream   I   |   |
-  |         |   I                  I   |   |
-  |         |   I                  I   |   |
-  +         |   I                  I   |   + 750
-  |         |   I                  I   |   |
-  |         |   I                  I   |   |
-  |         |   I                  I   |   |
-  +         +---O==================O---+   + 1125
-  |          Crop region, 1280x720 stream  |
-  |                                        |
-  |                                        |
-  +---------+---------+---------+----------+ 1500
-</pre>
-<p>Crop region: (500, 375, 750, 750) (1:1 aspect ratio)</p>
-<blockquote>
-  <p> 640x480 stream crop: (500, 469, 750, 562) (marked with =)<br />
-    1280x720 stream crop: (500, 543, 750, 414) (marged with #)</p>
-</blockquote>
-<pre>0                   1000               2000
-  +---------+---------+---------+----------+
-  | Active pixel array                     |
-  |                                        |
-  |                                        |
-  +         +--------------+               + 375
-  |         O==============O               |
-  |         ################               |
-  |         #              #               |
-  +         #              #               + 750
-  |         #              #               |
-  |         ################ 1280x720      |
-  |         O==============O 640x480       |
-  +         +--------------+               + 1125
-  |          Crop region                   |
-  |                                        |
-  |                                        |
-  +---------+---------+---------+----------+ 1500
-</pre>
-<p>And a final example, a 1024x1024 square aspect ratio stream instead of the
-  480p stream:</p>
-<p>Crop region: (500, 375, 1000, 750) (4:3 aspect ratio)</p>
-<blockquote>
-  <p> 1024x1024 stream crop: (625, 375, 750, 750) (marked with #)<br />
-    1280x720 stream crop: (500, 469, 1000, 562) (marked with =)</p>
-</blockquote>
-<pre>0                   1000               2000
-  +---------+---------+---------+----------+
-  | Active pixel array                     |
-  |                                        |
-  |              1024x1024 stream          |
-  +         +--###############--+          + 375
-  |         |  #             #  |          |
-  |         O===================O          |
-  |         I 1280x720 stream   I          |
-  +         I                   I          + 750
-  |         I                   I          |
-  |         O===================O          |
-  |         |  #             #  |          |
-  +         +--###############--+          + 1125
-  |          Crop region                   |
-  |                                        |
-  |                                        |
-  +---------+---------+---------+----------+ 1500
-</pre>
-<h2 id="reprocessing">Reprocessing</h2>
-
-<p>Additional support for DNGs is provided by reprocessing support for RAW Bayer data.
-This support allows the camera pipeline to process a previously captured RAW buffer and metadata
-(an entire frame that was recorded previously), to produce a new rendered YUV or JPEG output.
-</p>
-<h2 id="errors">Error management</h2>
-<p>Camera HAL device ops functions that have a return value will all return
-  -ENODEV / NULL in case of a serious error. This means the device cannot
-  continue operation, and must be closed by the framework. Once this error is
-  returned by some method, or if notify() is called with ERROR_DEVICE, only
-  the close() method can be called successfully. All other methods will return
-  -ENODEV / NULL.</p>
-<p>If a device op is called in the wrong sequence, for example if the framework
-  calls configure_streams() is called before initialize(), the device must
-  return -ENOSYS from the call, and do nothing.</p>
-<p>Transient errors in image capture must be reported through notify() as follows:</p>
-<ul>
-  <li>The failure of an entire capture to occur must be reported by the HAL by
-    calling notify() with ERROR_REQUEST. Individual errors for the result
-    metadata or the output buffers must not be reported in this case.</li>
-  <li>If the metadata for a capture cannot be produced, but some image buffers
-    were filled, the HAL must call notify() with ERROR_RESULT.</li>
-  <li>If an output image buffer could not be filled, but either the metadata was
-    produced or some other buffers were filled, the HAL must call notify() with
-    ERROR_BUFFER for each failed buffer.</li>
-</ul>
-<p>In each of these transient failure cases, the HAL must still call
-  process_capture_result, with valid output buffer_handle_t. If the result
-  metadata could not be produced, it should be NULL. If some buffers could not
-  be filled, their sync fences must be set to the error state.</p>
-<p>Invalid input arguments result in -EINVAL from the appropriate methods. In
-  that case, the framework must act as if that call had never been made.</p>
-<h2 id="stream-mgmt">Stream management</h2>
-<h3 id="configure-streams">configure_streams</h3>
-<p>Reset the HAL camera device processing pipeline and set up new input and
-  output streams. This call replaces any existing stream configuration with
-  the streams defined in the stream_list. This method will be called at
-  least once after initialize() before a request is submitted with
-  process_capture_request().</p>
-<p>The stream_list must contain at least one output-capable stream, and may
-  not contain more than one input-capable stream.</p>
-<p>The stream_list may contain streams that are also in the currently-active
-  set of streams (from the previous call to configure_stream()). These
-  streams will already have valid values for usage, max_buffers, and the
-  private pointer. If such a stream has already had its buffers registered,
-  register_stream_buffers() will not be called again for the stream, and
-  buffers from the stream can be immediately included in input requests.</p>
-<p>If the HAL needs to change the stream configuration for an existing
-  stream due to the new configuration, it may rewrite the values of usage
-  and/or max_buffers during the configure call. The framework will detect
-  such a change, and will then reallocate the stream buffers, and call
-  register_stream_buffers() again before using buffers from that stream in
-  a request.</p>
-<p>If a currently-active stream is not included in stream_list, the HAL may
-  safely remove any references to that stream. It will not be reused in a
-  later configure() call by the framework, and all the gralloc buffers for
-  it will be freed after the configure_streams() call returns.</p>
-<p>The stream_list structure is owned by the framework, and may not be
-  accessed once this call completes. The address of an individual
-  camera3_stream_t structure will remain valid for access by the HAL until
-  the end of the first configure_stream() call which no longer includes
-  that camera3_stream_t in the stream_list argument. The HAL may not change
-  values in the stream structure outside of the private pointer, except for
-  the usage and max_buffers members during the configure_streams() call
-  itself.</p>
-<p>If the stream is new, the usage, max_buffer, and private pointer fields
-  of the stream structure will all be set to 0. The HAL device must set
-  these fields before the configure_streams() call returns. These fields
-  are then used by the framework and the platform gralloc module to
-  allocate the gralloc buffers for each stream.</p>
-<p>Before such a new stream can have its buffers included in a capture
-  request, the framework will call register_stream_buffers() with that
-  stream. However, the framework is not required to register buffers for
-  _all_ streams before submitting a request. This allows for quick startup
-  of (for example) a preview stream, with allocation for other streams
-  happening later or concurrently.</p>
-<h4>Preconditions</h4>
-<p>The framework will only call this method when no captures are being
-  processed. That is, all results have been returned to the framework, and
-  all in-flight input and output buffers have been returned and their
-  release sync fences have been signaled by the HAL. The framework will not
-  submit new requests for capture while the configure_streams() call is
-  underway.</p>
-<h4>Postconditions</h4>
-<p>The HAL device must configure itself to provide maximum possible output
-  frame rate given the sizes and formats of the output streams, as
-  documented in the camera device's static metadata.</p>
-<h4>Performance expectations</h4>
-<p>This call is expected to be heavyweight and possibly take several hundred
-  milliseconds to complete, since it may require resetting and
-  reconfiguring the image sensor and the camera processing pipeline.
-  Nevertheless, the HAL device should attempt to minimize the
-  reconfiguration delay to minimize the user-visible pauses during
-  application operational mode changes (such as switching from still
-  capture to video recording).</p>
-<h4>Return values</h4>
-<ul>
-  <li>0:      On successful stream configuration<br />
-  </li>
-  <li>-EINVAL: If the requested stream configuration is invalid. Some examples
-    of invalid stream configurations include:
-    <ul>
-      <li>Including more than 1 input-capable stream (INPUT or
-        BIDIRECTIONAL)</li>
-      <li>Not including any output-capable streams (OUTPUT or
-        BIDIRECTIONAL)</li>
-      <li>Including streams with unsupported formats, or an unsupported
-        size for that format.</li>
-      <li>Including too many output streams of a certain format.<br />
-        Note that the framework submitting an invalid stream
-        configuration is not normal operation, since stream
-        configurations are checked before configure. An invalid
-        configuration means that a bug exists in the framework code, or
-        there is a mismatch between the HAL's static metadata and the
-        requirements on streams.</li>
-    </ul>
-  </li>
-  <li>-ENODEV: If there has been a fatal error and the device is no longer
-    operational. Only close() can be called successfully by the
-    framework after this error is returned.</li>
-</ul>
-<h3 id="register-buffers">register_stream_buffers</h3>
-<p>Register buffers for a given stream with the HAL device. This method is
-  called by the framework after a new stream is defined by
-  configure_streams, and before buffers from that stream are included in a
-  capture request. If the same stream is listed in a subsequent
-  configure_streams() call, register_stream_buffers will _not_ be called
-  again for that stream.</p>
-<p>The framework does not need to register buffers for all configured
-  streams before it submits the first capture request. This allows quick
-  startup for preview (or similar use cases) while other streams are still
-  being allocated.</p>
-<p>This method is intended to allow the HAL device to map or otherwise
-  prepare the buffers for later use. The buffers passed in will already be
-  locked for use. At the end of the call, all the buffers must be ready to
-  be returned to the stream.  The buffer_set argument is only valid for the
-  duration of this call.</p>
-<p>If the stream format was set to HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED,
-  the camera HAL should inspect the passed-in buffers here to determine any
-  platform-private pixel format information.</p>
-<h4>Return values</h4>
-<ul>
-  <li>0:      On successful registration of the new stream buffers</li>
-  <li>-EINVAL: If the stream_buffer_set does not refer to a valid active
-    stream, or if the buffers array is invalid.</li>
-  <li>-ENOMEM: If there was a failure in registering the buffers. The framework
-    must consider all the stream buffers to be unregistered, and can
-    try to register again later.</li>
-  <li>-ENODEV: If there is a fatal error, and the device is no longer
-    operational. Only close() can be called successfully by the
-    framework after this error is returned.</li>
-</ul>
-<h2 id="request-creation">Request creation and submission</h2>
-<h3 id="default-settings">construct_default_request_settings</h3>
-<p>Create capture settings for standard camera use cases. The device must return a settings buffer that is configured to meet the
-  requested use case, which must be one of the CAMERA3_TEMPLATE_*
-enums. All request control fields must be included.</p>
-<p>The HAL retains ownership of this structure, but the pointer to the
-  structure must be valid until the device is closed. The framework and the
-  HAL may not modify the buffer once it is returned by this call. The same
-  buffer may be returned for subsequent calls for the same template, or for
-  other templates.</p>
-<h4>Return values</h4>
-<ul>
-  <li>Valid metadata: On successful creation of a default settings
-    buffer.</li>
-  <li>NULL:           In case of a fatal error. After this is returned, only
-    the close() method can be called successfully by the
-    framework.  </li>
-</ul>
-<h3 id="process-capture">process_capture_request</h3>
-<p>Send a new capture request to the HAL. The HAL should not return from
-  this call until it is ready to accept the next request to process. Only
-  one call to process_capture_request() will be made at a time by the
-  framework, and the calls will all be from the same thread. The next call
-  to process_capture_request() will be made as soon as a new request and
-  its associated buffers are available. In a normal preview scenario, this
-  means the function will be called again by the framework almost
-  instantly.</p>
-<p>The actual request processing is asynchronous, with the results of
-  capture being returned by the HAL through the process_capture_result()
-  call. This call requires the result metadata to be available, but output
-  buffers may simply provide sync fences to wait on. Multiple requests are
-  expected to be in flight at once, to maintain full output frame rate.</p>
-<p>The framework retains ownership of the request structure. It is only
-  guaranteed to be valid during this call. The HAL device must make copies
-  of the information it needs to retain for the capture processing. The HAL
-  is responsible for waiting on and closing the buffers' fences and
-  returning the buffer handles to the framework.</p>
-<p>The HAL must write the file descriptor for the input buffer's release
-  sync fence into input_buffer-&gt;release_fence, if input_buffer is not
-  NULL. If the HAL returns -1 for the input buffer release sync fence, the
-  framework is free to immediately reuse the input buffer. Otherwise, the
-  framework will wait on the sync fence before refilling and reusing the
-  input buffer.</p>
-<h4>Return values</h4>
-<ul>
-  <li>0:      On a successful start to processing the capture request</li>
-  <li>-EINVAL: If the input is malformed (the settings are NULL when not
-    allowed, there are 0 output buffers, etc) and capture processing
-    cannot start. Failures during request processing should be
-    handled by calling camera3_callback_ops_t.notify(). In case of
-    this error, the framework will retain responsibility for the
-    stream buffers' fences and the buffer handles; the HAL should
-    not close the fences or return these buffers with
-    process_capture_result.</li>
-  <li>-ENODEV: If the camera device has encountered a serious error. After this
-    error is returned, only the close() method can be successfully
-    called by the framework.</li>
-</ul>
-<h2 id="misc-methods">Miscellaneous methods</h2>
-<h3 id="get-metadata">get_metadata_vendor_tag_ops</h3>
-<p>Get methods to query for vendor extension metadata tag information. The
-  HAL should fill in all the vendor tag operation methods, or leave ops
-  unchanged if no vendor tags are defined.
-  
-  The definition of vendor_tag_query_ops_t can be found in
-  system/media/camera/include/system/camera_metadata.h.</p>
-<h3 id="dump">dump</h3>
-<p>Print out debugging state for the camera device. This will be called by
-  the framework when the camera service is asked for a debug dump, which
-  happens when using the dumpsys tool, or when capturing a bugreport.
-  
-  The passed-in file descriptor can be used to write debugging text using
-  dprintf() or write(). The text should be in ASCII encoding only.</p>
-<h3 id="flush">flush</h3>
-<p>Flush all currently in-process captures and all buffers in the pipeline
-  on the given device. The framework will use this to dump all state as
-  quickly as possible in order to prepare for a configure_streams() call.</p>
-<p>No buffers are required to be successfully returned, so every buffer
-  held at the time of flush() (whether sucessfully filled or not) may be
-  returned with CAMERA3_BUFFER_STATUS_ERROR. Note the HAL is still allowed
-  to return valid (STATUS_OK) buffers during this call, provided they are
-  succesfully filled.</p>
-<p>All requests currently in the HAL are expected to be returned as soon as
-  possible.  Not-in-process requests should return errors immediately. Any
-  interruptible hardware blocks should be stopped, and any uninterruptible
-  blocks should be waited on.</p>
-<p>flush() should only return when there are no more outstanding buffers or
-  requests left in the HAL.  The framework may call configure_streams (as
-  the HAL state is now quiesced) or may issue new requests.</p>
-<p>A flush() call should only take 100ms or less. The maximum time it can
-  take is 1 second.</p>
-<h4>Version information</h4>
-<p>This is available only if device version &gt;= CAMERA_DEVICE_API_VERSION_3_1.</p>
-<h4>Return values</h4>
-<ul>
-  <li>0:      On a successful flush of the camera HAL.</li>
-  <li>-EINVAL: If the input is malformed (the device is not valid).<br />
-    -ENODEV: If the camera device has encountered a serious error. After this
-    error is returned, only the close() method can be successfully
-    called by the framework.</li>
-</ul>
diff --git a/src/devices/debugtune.jd b/src/devices/debugtune.jd
index c852073..50020cc 100644
--- a/src/devices/debugtune.jd
+++ b/src/devices/debugtune.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -18,4 +18,4 @@
 -->
 <p>The following sections contain information, documentation, tips and tricks
 about debugging Android at the platform level, typically during development
-of platform-level features.</p>
\ No newline at end of file
+of platform-level features.</p>
diff --git a/src/devices/devices_toc.cs b/src/devices/devices_toc.cs
index 42f58ea..9de25e1 100644
--- a/src/devices/devices_toc.cs
+++ b/src/devices/devices_toc.cs
@@ -1,5 +1,5 @@
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -50,7 +50,23 @@
         </ul>
       </li>
       <li><a href="<?cs var:toroot ?>devices/bluetooth.html">Bluetooth</a></li>
-      <li><a href="<?cs var:toroot ?>devices/camera.html">Camera</a></li>
+      <li class="nav-section">
+        <div class="nav-section-header">
+          <a href="<?cs var:toroot ?>devices/camera/camera.html">
+            <span class="en">Camera</span>
+          </a>
+        </div>
+        <ul>
+          <li><a href="<?cs var:toroot ?>devices/camera/camera3.html">Camera HAL3</a></li>
+          <li><a href="<?cs var:toroot ?>devices/camera/camera3_requests_hal.html">HAL Subsystem</a></li>
+          <li><a href="<?cs var:toroot ?>devices/camera/camera3_metadata.html">Metadata and Controls</a></li>
+          <li><a href="<?cs var:toroot ?>devices/camera/camera3_3Amodes.html">3A Modes and State</a></li>
+          <li><a href="<?cs var:toroot ?>devices/camera/camera3_crop_reprocess.html">Output and Cropping</a></li>
+          <li><a href="<?cs var:toroot ?>devices/camera/camera3_error_stream.html">Errors and Streams</a></li>
+          <li><a href="<?cs var:toroot ?>devices/camera/camera3_requests_methods.html">Request Creation</a></li>
+        </ul>
+      </li>
+
       <li><a href="<?cs var:toroot ?>devices/drm.html">DRM</a></li>
       <li><a href="<?cs var:toroot ?>devices/tech/encryption/index.html">Encryption</a></li>
       <li class="nav-section">
@@ -124,6 +140,30 @@
 
           </ul>
       </li>
+     <li class="nav-section">
+          <div class="nav-section-header">
+            <a href="<?cs var:toroot ?>devices/sensors/index.html">
+              <span class="en">Sensors</span>
+            </a>
+          </div>
+          <ul>
+            <li>
+              <a href="<?cs var:toroot ?>devices/sensors/base_triggers.html">
+                <span class="en">Base sensors</span>
+              </a>
+            </li>
+            <li>
+              <a href="<?cs var:toroot ?>devices/sensors/composite_sensors.html">
+                <span class="en">Composite sensors</span>
+              </a>
+            </li>
+            <li>
+              <a href="<?cs var:toroot ?>devices/sensors/batching.html">
+                <span class="en">Batching results</span>
+              </a>
+            </li>
+          </ul>
+      </li>
     </ul>
   </li>
 <!-- End Porting Android -->
diff --git a/src/devices/drm.jd b/src/devices/drm.jd
index 92beaa0..828f41b 100644
--- a/src/devices/drm.jd
+++ b/src/devices/drm.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project     
+    Copyright 2013 The Android Open Source Project     
 
     Licensed under the Apache License, Version 2.0 (the "License");    
     you may not use this file except in compliance with the License.   
diff --git a/src/devices/graphics.jd b/src/devices/graphics.jd
index 357e8f9..a906188 100644
--- a/src/devices/graphics.jd
+++ b/src/devices/graphics.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/hal.jd b/src/devices/hal.jd
index 5a53ef7..e464a88 100644
--- a/src/devices/hal.jd
+++ b/src/devices/hal.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -121,4 +121,4 @@
 
   <p>For more information about setting up the build for each HAL, see its respective documentation.</p>
 
-</p>
\ No newline at end of file
+</p>
diff --git a/src/devices/images/camera_hal.png b/src/devices/images/camera_hal.png
deleted file mode 100644
index 48b3b69..0000000
--- a/src/devices/images/camera_hal.png
+++ /dev/null
Binary files differ
diff --git a/src/devices/index.jd b/src/devices/index.jd
index d19f23c..f0b4e42 100644
--- a/src/devices/index.jd
+++ b/src/devices/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -93,4 +93,4 @@
   These additions have less to do with driver development than with the system's functionality. You can use any version of the kernel that you want as long as it supports the required features, such as the binder driver. However, we recommend
   using the latest version of the Android kernel. For the latest Android kernel, see
   <a href="{@docRoot}source/building-kernels.html" >Building Kernels</a>.
-</p>
\ No newline at end of file
+</p>
diff --git a/src/devices/low-ram.jd b/src/devices/low-ram.jd
index 711fadc..19845a9 100644
--- a/src/devices/low-ram.jd
+++ b/src/devices/low-ram.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -26,15 +26,10 @@
 
 <h2 id="intro">Introduction</h2>
 
-<p>In release 4.4, Android begins explicit support for devices with 512MB
-memory. As of Q3 2013, more than 30% of all Android devices run on
-platform versions older than 4.0. Most of these devices tend to be low-end with
-512MB or less RAM. One of the primary goals of this release 
-is to ensure Android can continue to run on a 512MB device.</p>
-
-<p>This documentation is intended to help OEMs optimize and configure Android 4.4 for 
-  low-RAM devices. Several of these optimizations are generic enough that 
-they can be applied to previous releases as well. </p>
+<p>Android now supports devices with 512MB of RAM. This documentation is intended 
+to help OEMs optimize and configure Android 4.4 for low-memory devices. Several 
+of these optimizations are generic enough that they can be applied to previous 
+releases as well.</p>
 
 <h2 id="optimizations">Android 4.4 platform optimizations</h2>
 
@@ -60,7 +55,6 @@
 <li>Reduced per-process font cache overhead.</li>
 <li>Introduced ArrayMap/ArraySet and used extensively in framework as a
 lighter-footprint replacement for HashMap/HashSet.</li>
-levels, smaller graphics caches, etc.</li>
 </ul>
 
 <h3 id="opt-proc">Procstats</h3>
@@ -86,7 +80,7 @@
 <h2 id="build-time">Build-time configuration</h2>
 <h3 id="flag">Enable Low Ram Device flag</h3>
 <p>We are introducing a new API called <code>ActivityManager.isLowRamDevice()</code> for applications to  determine if they should turn off specific memory-intensive 
-  features that work poorly on low-end devices.</p>
+  features that work poorly on low-memory devices.</p>
 <p>For 512MB devices, this API is expected to return: "true" It can be enabled by 
   the following system property in the device makefile.<br/>
 <code>PRODUCT_PROPERTY_OVERRIDES += ro.config.low_ram=true</code></p>
@@ -111,7 +105,7 @@
 
 
   <p>Ensure the default wallpaper setup on launcher is <strong>not</strong>
-using live-wallpaper. Low-end devices should not pre-install any live wallpapers. </p>
+using live-wallpaper. Low-memory devices should not pre-install any live wallpapers. </p>
 
 
 <h2 id="kernel">Kernel configuration</h2>
diff --git a/src/devices/media.jd b/src/devices/media.jd
index eed62c9..69ba702 100644
--- a/src/devices/media.jd
+++ b/src/devices/media.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -111,4 +111,4 @@
   <code>device/samsung/tuna/media_profiles.xml</code> file for complete examples.</p>
 
 <p class="note"><strong>Note:</strong> The <code>&lt;Quirk&gt;</code> element for media codecs is no longer supported
-  by Android starting in Jelly Bean.</p>
\ No newline at end of file
+  by Android starting in Jelly Bean.</p>
diff --git a/src/devices/native-memory.jd b/src/devices/native-memory.jd
index d3beed7..d149ec1 100644
--- a/src/devices/native-memory.jd
+++ b/src/devices/native-memory.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/sensors/base_triggers.jd b/src/devices/sensors/base_triggers.jd
new file mode 100644
index 0000000..1d2bd78
--- /dev/null
+++ b/src/devices/sensors/base_triggers.jd
@@ -0,0 +1,143 @@
+page.title=Base sensors and trigger modes
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="triggers">Trigger modes</h2>
+<p>Sensors can report events in different ways called trigger modes; each sensor 
+  type has one and only one trigger mode associated to it. Four trigger modes 
+  exist:</p>
+<h3 id="continuous">Continuous</h3>
+<p>Events are reported at a constant rate defined by setDelay(). Example sensors 
+  using the continuous trigger mode are accelerometers and gyroscopes.</p>
+<h3 id="on-change">On-change</h3>
+<p>Events are reported only if the sensor's value has changed. setDelay() is used 
+  to set a lower limit to the reporting period, meaning the minimum time between 
+  consecutive events. Activating the sensor also triggers an event. The HAL must 
+  return an event immediately when an on-change sensor is activated. Example 
+  sensors using the on-change trigger mode are the step counter and proximity 
+  sensor types.</p>
+<h3 id="one-shot">One-shot</h3>
+<p>Upon detection of an event, the sensor deactivates itself and then sends a 
+  single event. Order matters to avoid race conditions. No other event is sent 
+  until the sensor is reactivated. setDelay() is ignored. 
+<a
+href="{@docRoot}devices/sensors/composite_sensors.html#Significant">Significant
+motion</a> is an example of this kind of sensor.</p>
+<h3 id="special">Special</h3>
+<p>See the individual sensor type descriptions for details.</p>
+<h2 id="categories">Categories</h2>
+<p>Sensors fall into four primary categories:</p>
+<blockquote>
+  <p><em>Base</em> - records core measurements from which all other sensors are derived <br/>
+    <em>Activity</em> - detects user or device movement<br/>
+    <em>Attitude</em> - measures the orientation of the device<br/>
+    <em>Uncalibrated</em> - is identical to the corresponding base sensor except the 
+    dynamic calibration is reported separately rather than applied to the results</p>
+</blockquote>
+<h2 id="base">Base sensors</h2>
+<p>These sensor types are listed first because they are the fundamental sensors 
+  upon which all other sensor types are based.</p>
+<h3 id="Accelerometer">Accelerometer</h3>
+<p><em>Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+<p>All values are in SI units (m/s^2) and measure the acceleration of the device 
+  minus the force of gravity.</p>
+<p>Acceleration sensors return sensor events for all three axes at a constant rate 
+  defined by setDelay().</p>
+<ul>
+  <li>x: Acceleration on the x-axis</li>
+  <li>y: Acceleration on the y-axis</li>
+  <li>z: Acceleration on the z-axis</li>
+</ul>
+<p>Note the readings from the accelerometer include the acceleration due to gravity 
+  (which is opposite the direction of the gravity vector).</p>
+<p>Here are examples:</p>
+<ul>
+  <li>The norm of (x, y, z)  should be close to 0 when in free fall.</li>
+  <li>When the device lies flat on a table and is pushed on its left side toward the 
+    right, the x acceleration value is positive.</li>
+  <li>When the device lies flat on a table, the acceleration value is +9.81, which 
+    corresponds to the acceleration of the device (0 m/s^2) minus the force of 
+    gravity (-9.81 m/s^2).</li>
+  <li>When the device lies flat on a table and is pushed toward the sky, the 
+    acceleration value is greater than +9.81, which corresponds to the 
+    acceleration of the device (+A m/s^2) minus the force of gravity (-9.81 
+    m/s^2).</li>
+</ul>
+<h3 id="Ambient">Ambient temperature</h3>
+<p><em>Trigger-mode: On-change<br/>
+Wake-up sensor: No</em></p>
+<p>This sensor provides the ambient (room) temperature in degrees Celsius.</p>
+<h3 id="Geomagnetic">Geomagnetic field</h3>
+<p><em>Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+<p>All values are in micro-Tesla (uT) and measure the geomagnetic field in the X, Y 
+  and Z axis.</p>
+<p>Returned values include calibration mechanisms so the vector is aligned with the 
+  magnetic declination and heading of the earth's geomagnetic field.</p>
+<p>Magnetic field sensors return sensor events for all three axes at a constant 
+  rate defined by setDelay().</p>
+<h3 id="Gyroscope">Gyroscope</h3>
+<p><em>Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+<p>All values are in radians/second and measure the rate of rotation around the X, 
+  Y and Z axis.  The coordinate system is the same as is used for the acceleration 
+  sensor. Rotation is positive in the counter-clockwise direction (right-hand 
+  rule).</p>
+<p>That is, an observer looking from some positive location on the x, y or z axis 
+  at a device positioned on the origin would report positive rotation if the 
+  device appeared to be rotating counter clockwise. Note that this is the standard 
+  mathematical definition of positive rotation and does not agree with the 
+  definition of roll given elsewhere.</p>
+<p>The range should at least be 17.45 rad/s (ie: ~1000 deg/s).</p>
+<p>Automatic gyro-drift compensation is required.</p>
+<h3 id="Light">Light</h3>
+<p><em>Trigger-mode: On-change<br/>
+Wake-up sensor: No</em></p>
+<p>The light sensor value is returned in SI lux units.</p>
+<h3 id="Proximity">Proximity</h3>
+<p><em>Trigger-mode: On-change<br/>
+Wake-up sensor: Yes</em></p>
+<p>Measures the distance from the sensor to the closest visible surface. As this is 
+  a wake-up sensor, it should wake up the SoC when it is running and detects a 
+  change in proximity. The distance value is measured in centimeters. Note that 
+  some proximity sensors only support a binary &quot;near&quot; or &quot;far&quot; measurement. In 
+  this case, the sensor should report its maxRange value in the &quot;far&quot; state and a 
+  value less than maxRange in the &quot;near&quot; state.</p>
+<p>To ensure the applications have the time to receive the event before the 
+  application processor goes back to sleep, the driver must hold a &quot;timeout wake 
+  lock&quot; for 200 milliseconds for every wake-up sensor. That is, the application 
+  processor should not be allowed to go back to sleep in the 200 milliseconds 
+  following a wake-up interrupt.</p>
+<h3 id="Pressure">Pressure</h3>
+<p><em>Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+<p>The pressure sensor uses a barometer to return the atmospheric pressure in 
+  hectopascal (hPa).</p>
+<h3 id="humidity">Relative humidity</h3>
+<p><em>Trigger-mode: On-change<br/>
+Wake-up sensor: No</em></p>
+<p>A relative humidity sensor measures relative ambient air humidity and returns a 
+  value in percent.</p>
diff --git a/src/devices/sensors/batching.jd b/src/devices/sensors/batching.jd
new file mode 100644
index 0000000..405df88
--- /dev/null
+++ b/src/devices/sensors/batching.jd
@@ -0,0 +1,201 @@
+page.title=Batching sensor results
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="Why">Why batch?</h2>
+<p>This page presents the specificities of Batch mode and the expected behaviors
+  of sensors while in batch mode. Batching can enable significant power savings by 
+  preventing the application processor from waking up to receive each event. Instead, these 
+  events can be grouped and processed together.</p>
+<h2 id="batch-function">batch(int handle, int flags, int64_t period_ns, int64_t
+  max_report_latency)</h2>
+<p>Enabling batch mode for a given sensor sets the delay between events.
+  <code>max_report_latency</code> sets the maximum time by which events can be delayed and
+  batched together before being reported to the applications. A value of zero 
+  disables batch mode for the given sensor. The <code>period_ns</code> parameter is equivalent
+  to calling setDelay() -- this function both enables or disables the batch mode 
+  AND sets the event's period in nanoseconds. See setDelay() for a detailed 
+  explanation of the <code>period_ns</code> parameter.</p>
+<p>In non-batch mode, all sensor events must be reported as soon as they are 
+  detected. For example, an accelerometer activated at 50Hz will trigger 
+  interrupts 50 times per second.<br/>
+  While in batch mode, sensor events do not need to be reported as soon as they 
+  are detected. They can be temporarily stored and reported in batches, as long as 
+  no event is delayed by more than <code>maxReportingLatency</code> nanoseconds. That is, all events 
+  since the previous batch are recorded and returned at once. This reduces the 
+  amount of interrupts sent to the SoC and allows the SoC to switch to a lower 
+  power mode (idle) while the sensor is capturing and batching data.</p>
+<p>setDelay() is not affected and it behaves as usual. <br/>
+  <br/>
+  Each event has a timestamp associated with it. The timestamp must be accurate 
+  and correspond to the time at which the event physically happened.</p>
+<p>Batching does not modify the behavior of poll(): batches from different sensors 
+  can be interleaved and split. As usual, all events from the same sensor are 
+  time-ordered.</p>
+<h2 id="Suspend">Behavior outside of suspend mode</h2>
+<p>These are the power modes of the application processor: on, idle, and suspend. 
+  The sensors behave differently in each of these modes. As you would imagine, on 
+  mode is when the application processor is running. Idle mode is a medium power mode 
+  where the application processor is powered but doesn't perform any tasks.
+  Suspend is a low-power mode where the application processor is not powered. The
+  power consumption of the device in this mode is usually 100 times less than in the On
+  mode.</p>
+<p>When the SoC is awake (not in suspend mode), events must be reported in batches 
+  at least every maxReportingLatency. No event shall be dropped or lost. If internal 
+  hardware FIFOs fill up before the maxReportingLatency, then events are reported at that 
+  point to ensure no event is lost.</p>
+<h2 id="Normal">Normal behavior in suspend mode</h2>
+<p>By default, batch mode doesn't significantly change the interaction with suspend 
+  mode. That is, sensors must continue to allow the SoC to go into suspend mode 
+  and sensors must stay active to fill their internal FIFO. In this mode, when the 
+  FIFO fills up, it shall wrap around and behave like a circular buffer, 
+  overwriting older events.<br/>
+  <br/>
+  As soon as the SoC comes out of suspend mode, a batch is produced with as much 
+as the recent history as possible, and batch operation resumes as usual.</p>
+<p>The behavior described above allows applications to record the recent history of 
+  a set of sensor types while keeping the SoC in suspend. It also allows the 
+  hardware to not have to rely on a wake-up interrupt line.</p>
+<h2 id="WAKE_UPON_FIFO_FULL">WAKE_UPON_FIFO_FULL behavior in suspend mode</h2>
+<p>There are cases, however, where an application cannot afford to lose any events, 
+  even when the device goes into suspend mode.</p>
+<p>For a given rate, if a sensor has the capability to store at least 10 seconds 
+  worth of events in its FIFO and is able to wake up the SoC, it can implement an 
+  optional secondary mode: the <code>WAKE_UPON_FIFO_FULL</code> mode.</p>
+<p>The caller will set the <code>SENSORS_BATCH_WAKE_UPON_FIFO_FULL</code> flag to activate this
+  mode. If the sensor does not support this mode, batch() will fail when the flag 
+  is set.</p>
+<p>In batch mode, and only when the flag
+<code>SENSORS_BATCH_WAKE_UPON_FIFO_FULL</code> is
+  set and supported, the specified sensor must be able to wake-up the SoC and be
+  able to buffer at least 10 seconds worth of the requested sensor events.</p>
+<p>When running with the <code>WAKE_UPON_FIFO_FULL</code> flag set, no events can be lost. When
+  the FIFO is getting full, the sensor must wake up the SoC from suspend and 
+  return a batch before the FIFO fills-up.</p>
+<p>Depending on the device, it might take a few milliseconds for the SoC to 
+  entirely come out of suspend and start flushing the FIFO. Enough head room must 
+  be allocated in the FIFO to allow the device to entirely come out of suspend 
+  without the FIFO overflowing (no events shall be lost).</p>
+<p>Implementing the <code>WAKE_UPON_FIFO_FULL</code> mode is optional. If the hardware cannot
+  support this mode, or if the physical FIFO is so small that the device would 
+  never be allowed to go into suspend for at least 10 seconds, then this function 
+  <strong>must</strong> fail when the flag
+<code>SENSORS_BATCH_WAKE_UPON_FIFO_FULL</code> is set, regardless
+  of the value of the maxReportingLatency parameter.</p>
+<h2 id="Implementing">Implementing batching</h2>
+<p>Batch mode, if supported, should happen at the hardware level, typically using 
+  hardware FIFOs. In particular, it SHALL NOT be implemented in the HAL, as this 
+  would be counter productive. The goal here is to save significant amounts of 
+  power. Batching should be implemented without the aid of the SoC, which should
+  be allowed to be in suspend mode during batching.</p>
+<p>In some implementations, events from several sensors can share the same physical 
+  FIFO. In that case, all events in the FIFO can be sent and processed by the HAL 
+  as soon as one batch must be reported.</p>
+<p>For example, if the following sensors are activated:</p>
+<ul>
+  <li>accelerometer batched with <code>maxReportingLatency</code> = 20s</li>
+  <li>gyroscope batched with <code>maxReportingLatency</code> = 5s</li>
+</ul>
+<p>Then the accelerometer batches can be reported at the same time the gyroscope 
+  batches are reported (every 5 seconds).<br/>
+  <br/>
+  Batch mode can be enabled or disabled at any time, in particular while the 
+  specified sensor is already enabled; and this shall not result in the loss of 
+  events.</p>
+<h2 id="fifo-allocation">FiFo allocation priority</h2>
+<p>On platforms in which hardware FIFO size is limited, the system designers may 
+  have to choose how much FIFO to reserve for each sensor. To help with this 
+  choice, here is a list of applications made possible when batching is 
+  implemented on the different sensors.</p>
+<p><strong>High value: Low power pedestrian dead reckoning</strong><br/>
+  Target batching time: 20 seconds to 1 minute<br/>
+  Sensors to batch:<br/>
+  - Step detector<br/>
+  - Rotation vector or game rotation vector at 5Hz<br/>
+  Gives us step and heading while letting the SoC go to Suspend.<br/>
+  <br/>
+  <strong>High value: Medium power activity/gesture recognition</strong><br/>
+  Target batching time: 3 seconds<br/>
+  Sensors to batch: accelerometer between 20Hz and 50Hz<br/>
+  Allows recognizing arbitrary activities and gestures without having<br/>
+  to keep the SoC fully awake while the data is collected.<br/>
+  <br/>
+  <strong>Medium-high value: Interrupt load reduction</strong><br/>
+  Target batching time: &lt; 1 second<br/>
+  Sensors to batch: any high frequency sensor.<br/>
+  If the gyroscope is set at 240Hz, even batching just 10 gyro events can<br/>
+  reduce the number of interrupts from 240/second to 24/second.<br/>
+  <br/>
+  <strong>Medium value: Continuous low frequency data collection</strong><br/>
+  Target batching time: &gt; 1 minute<br/>
+  Sensors to batch: barometer, humidity sensor, other low frequency<br/>
+  sensors.<br/>
+  Allows creating monitoring applications at low power.<br/>
+  <br/>
+  <strong>Medium value: Continuous full-sensors collection</strong><br/>
+  Target batching time: &gt; 1 minute<br/>
+  Sensors to batch: all, at high frequencies<br/>
+  Allows full collection of sensor data while leaving the SoC in<br/>
+  suspend mode. Only to consider if fifo space is not an issue.<br/>
+  <br/>
+  In each of the cases above, if <code>WAKE_UPON_FIFO_FULL</code> is implemented, the<br/>
+  applications might decide to let the SoC go to suspend, allowing for even<br/>
+  more power savings.</p>
+<h2 id="Dry-run">Dry run</h2>
+<p>If the flag <code>SENSORS_BATCH_DRY_RUN</code> is set, this function returns without
+  modifying the batch mode or the event period and has no side effects, but 
+  returns errors as usual (as it would if this flag was not set). This flag is 
+  used to check if batch mode is available for a given configuration, in 
+  particular for a given sensor at a given rate.</p>
+<h2 id="Return-values">Return values</h2>
+<p>Because sensors must be independent, the return value must not depend on the 
+  state of the system (whether another sensor is on or not), nor on whether the 
+  flag <code>SENSORS_BATCH_DRY_RUN</code> is set (in other words, if a batch call with
+  <code>SENSORS_BATCH_DRY_RUN</code> is successful, the same call without
+<code>SENSORS_BATCH_DRY_RUN</code>
+  must succeed as well).</p>
+<p>If successful, 0 is returned.</p>
+<p>If the specified sensor doesn't support batch mode, -EINVAL is returned.<br/>
+  If the specified sensor's trigger-mode is one-shot, -EINVAL is returned.</p>
+<p>If WAKE UPON FIFO_FULL is specified and the specified sensor's internal FIFO is 
+  too small to store at least 10 seconds worth of data at the given rate, -EINVAL 
+  is returned. Note that as stated above, this has to be determined at compile 
+  time and not based on the state of the system.</p>
+<p>If some other constraints above cannot be satisfied, -EINVAL is returned.<br/>
+  <br/>
+  Note: The <code>maxReportingLatency</code> parameter when &gt; 0 has no impact on
+  whether this function succeeds or fails.<br/>
+  <br/>
+  If <code>maxReportingLatency</code> is set to 0, this function must succeed.</p>
+<h2 id="Supporting-docs">Supporting documentation</h2>
+<p><a href="http://developer.android.com/guide/topics/sensors/index.html">Developer - Location and Sensors 
+  APIs</a></p>
+<p><a href="http://developer.android.com/guide/topics/sensors/sensors_overview.html">Developer - Sensors 
+  Overview</a></p>
+<p><a href="http://developer.android.com/reference/android/hardware/Sensor.html">Sensors SDK API 
+  reference</a></p>
+<p><a href="{@docRoot}devices/reference/sensors_8h_source.html">Android 
+  Hardware Abstraction Layer - sensors.h</a></p>
+<p><a href="http://developer.android.com/reference/android/hardware/SensorManager.html">SensorManager</a></p>
diff --git a/src/devices/sensors/composite_sensors.jd b/src/devices/sensors/composite_sensors.jd
new file mode 100644
index 0000000..d3fbed2
--- /dev/null
+++ b/src/devices/sensors/composite_sensors.jd
@@ -0,0 +1,534 @@
+page.title=Composite sensors
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="summary">Composite sensor type summary</h2>
+
+<p>The following table lists the composite sensor types and their categories, 
+underlying base sensors, and trigger modes. Certain base sensors are required of 
+each sensor for accuracy. Using other tools to approximate results should be 
+avoided as they will invariably provide a poor user experience.</p>
+
+<p>When there is no gyroscope on the device, and
+only when there is no gyroscope, you may implement the rotation vector and
+other composite sensors without using the gyroscope.</p>
+<table>
+  <tr>
+<th>Sensor type</th>
+<th>Category</th>
+<th>Underlying base sensor</th>
+<th>Trigger mode</th>
+</tr>
+<tr>
+<td>Game rotation vector</td>
+<td>Attitude</td>
+<td>Accelerometer, Gyroscope
+MUST NOT USE Magnetometer</td>
+<td>Continuous</td>
+</tr>
+<tr>
+<td>Geomagnetic rotation vector (Magnetometer) <img src="images/battery_icon.png" width="20" height="20" alt="Low power sensor" /></td>
+<td>Attitude</td>
+<td>Accelerometer, Magnetometer
+NOT Gyroscope</td>
+<td>Continuous</td>
+</tr>
+<tr>
+<td>Gravity</td>
+<td>Attitude</td>
+<td>Accelerometer, Gyroscope</td>
+<td>Continuous</td>
+</tr>
+<tr>
+<td>Gyroscope uncalibrated</td>
+<td>Uncalibrated</td>
+<td>Gyroscope</td>
+<td>Continuous</td>
+</tr>
+<tr>
+<td>Linear acceleration</td>
+<td>Activity</td>
+<td>Accelerometer, Gyroscope
+AND Magnetometer</td>
+<td>Continuous</td>
+</tr>
+<tr>
+<td>Magnetic field uncalibrated</td>
+<td>Uncalibrated</td>
+<td>Magnetometer</td>
+<td>Continuous</td>
+</tr>
+<tr>
+<td>Orientation</td>
+<td>Attitude</td>
+<td>Accelerometer, Magnetometer
+PREFERRED Gyroscope</td>
+<td>Continuous</td>
+</tr>
+<tr>
+<td>Rotation vector</td>
+<td>Attitude</td>
+<td>Accelerometer, Gyroscope
+AND Magnetometer</td>
+<td>Continuous</td>
+</tr>
+<tr>
+<td>Significant motion
+  <img src="images/battery_icon.png" width="20" height="20" alt="Low power sensor" /></td>
+<td>Activity</td>
+<td>Accelerometer (or another as long as very low power)</td>
+<td>One-shot</td>
+</tr>
+<tr>
+<td>Step counter
+  <img src="images/battery_icon.png" width="20" height="20" alt="Low power sensor" /></td>
+<td>Activity</td>
+<td>Accelerometer</td>
+<td>On-
+change</td>
+</tr>
+<tr>
+<td>Step detector
+  <img src="images/battery_icon.png" width="20" height="20" alt="Low power sensor" /></td>
+<td>Activity</td>
+<td>Accelerometer</td>
+<td>Special</td>
+</tr>
+</table>
+
+<p><img src="images/battery_icon.png" alt="low power icon"/> = 
+Low power sensor</p>
+
+<h2 id="Activity">Activity sensors</h2>
+
+<h3 id="acceleration">Linear acceleration</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer, Gyroscope AND Magnetometer<br/>
+Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+
+<p>Indicates the linear acceleration of the device in device coordinates, not 
+including gravity. The output is conceptually:<br/>
+output of <code>TYPE_ACCELERATION</code> minus output of
+<code>TYPE_GRAVITY</code>.</p>
+
+<p>Readings on all axes should be close to 0 when the device is immobile. Units are 
+m/s^2. The coordinate system is the same as is used for the acceleration sensor.</p>
+
+<h3 id="Significant">Significant motion</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer (or another as long as low power)<br/>
+Trigger-mode: One-shot<br/>
+Wake-up sensor: Yes</em></p>
+
+<p>Significant motion allows a device to stay in suspend and idle modes longer and 
+save power. It does this by relying upon last known location until the device 
+experiences "significant motion." Such a movement would trigger on mode and a 
+call to retrieve new location.</p>
+
+<p>Here is an example on how the platform can use significant motion to save
+power. When users are moving, their locations are updated frequently. After some period 
+of inactivity, significant motion presumes the device is static and stops 
+seeking location updates. It instead registers the last known location as valid. 
+The device is then allowed to go into idle and then suspend mode.</p>
+
+<p>This sensor exists to save power by keeping the SoC in suspend mode when the 
+device is at rest. A sensor of this type triggers an event each time significant 
+motion is detected and automatically disables itself. The only allowed value to 
+return is 1.0.</p>
+
+<p>A significant motion is a motion that might lead to a change in the user 
+location. Examples of such significant motions are:</p>
+
+<ul>
+<li>walking or biking</li>
+<li>sitting in a moving car, coach or train</li>
+</ul>
+
+<p>Examples of situations that should not trigger significant motion:</p>
+
+<ul>
+<li>phone in pocket and person is not moving</li>
+<li>phone is on a table and the table shakes a bit due to nearby traffic or 
+washing machine</li>
+</ul>
+
+<p>This sensor makes a tradeoff for power consumption that may result in a small 
+amount of false negatives. This is done for a few reasons:</p>
+
+<ol>
+<li>The goal of this sensor is to save power.</li>
+<li>Triggering an event when the user is not moving (false positive) is costly in 
+terms of power, so it should be avoided.</li>
+<li>Not triggering an event when the user is moving (false negative) is 
+acceptable as long as it is not done repeatedly. If the user has been walking 
+for 10 seconds, not triggering an event within those 10 seconds is not 
+acceptable.</li>
+</ol>
+
+<p>To ensure the applications have the time to receive the significant motion event 
+before the application processor goes back to sleep, the driver must hold a 
+"timeout wake lock" for 200 milliseconds for every wake-up sensor. That is, the 
+application processor should not be allowed to go back to sleep in the 200 
+milliseconds following a wake-up interrupt.</p>
+
+<p><strong>Important</strong>: This sensor is very different from the other types in that it
+must work when the screen is off without the need for holding a partial wake 
+lock (other than the timeout wake lock) and MUST allow the SoC to go into 
+suspend. When significant motion is detected, the sensor must awaken the SoC and 
+the event be reported.</p>
+
+<p>If a particular device cannot support this mode of operation, then this sensor 
+type <strong>must not</strong> be reported by the HAL. ie: it is not acceptable to "emulate" 
+this sensor in the HAL.</p>
+
+<p>When the sensor is not activated, it must also be deactivated in the hardware; 
+it must not wake up the SoC anymore, even in case of significant motion.</p>
+
+<p>setDelay() has no effect and is ignored.</p>
+
+<p>Once a "significant motion" event is returned, a sensor of this type must 
+disable itself automatically, as if activate(..., 0) had been called.</p>
+
+<h3 id="detector">Step detector</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer<br/>
+Trigger-mode: Special<br/>
+Wake-up sensor: No</em></p>
+
+<p>A sensor of this type triggers an event each time a step is taken by the user. 
+The only allowed value to return is 1.0 and an event is generated for each step. 
+Like with any other event, the timestamp indicates when the event (here the 
+step) occurred. This corresponds to when the foot hit the ground, generating a 
+high variation in acceleration.</p>
+
+<p>Compared to the step counter, the step detector should have a lower latency 
+(less than 2 seconds). Both the step detector and the step counter detect when 
+the user is walking, running and walking up the stairs. They should not trigger 
+when the user is biking, driving or in other vehicles.</p>
+
+<p>While this sensor operates, it shall not disrupt any other sensors, in 
+particular, the accelerometer; it might very well be in use.</p>
+
+<p>This sensor must be low power. That is, if the step detection cannot be done in 
+hardware, this sensor should not be defined. Also, when the step detector is 
+activated and the accelerometer is not, only steps should trigger interrupts 
+(not accelerometer data).</p>
+
+<p>setDelay() has no impact on this sensor type.</p>
+
+<h3 id="counter">Step counter</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer<br/>
+Trigger-mode: On-change<br/>
+Wake-up sensor: No</em></p>
+
+<p>A sensor of this type returns the number of steps taken by the user since the 
+last reboot while activated. The value is returned as a uint64_t and is reset to 
+zero only on a system reboot.</p>
+
+<p>The timestamp of the event is set to the time when the last step for that event 
+was taken.<br/>
+See the <a href="#detector">Step detector</a> 
+sensor type for the signification of the time of a step.</p>
+
+<p>Compared to the step detector, the step counter can have a higher latency (less 
+than 10 seconds).  Thanks to this latency, this sensor has a high accuracy; the 
+step count after a full day of measures should be within 10% of the real step 
+count. Both the step detector and the step counter detect when the user is 
+walking, running and walking up the stairs. They should not trigger when the 
+user is biking, driving or in other vehicles.</p>
+
+<p><strong>Important note</strong>: This sensor is different from other types in that it must work 
+when the screen is off without the need of holding a partial wake-lock and MUST 
+allow the SoC to go into suspend.</p>
+
+<p>While in suspend mode this sensor must stay active. No events are reported 
+during that time but steps continue to be accounted for; an event will be 
+reported as soon as the SoC resumes if the timeout has expired.</p>
+
+<p>In other words, when the screen is off and the device is allowed to go into 
+suspend mode, it should not be woken up, regardless of the setDelay() value. But 
+the steps shall continue to be counted.</p>
+
+<p>The driver must however ensure the internal step count never overflows. The 
+minimum size of the hardware's internal counter shall be 16 bits. (This 
+restriction is here to avoid too frequent wake-ups when the delay is very 
+large.) It is allowed in this situation to wake the SoC up so the driver can do 
+the counter maintenance.</p>
+
+<p>While this sensor operates, it shall not disrupt any other sensors, in 
+particular, the accelerometer; it might very well be in use.</p>
+
+<p>If a particular device cannot support these modes of operation, then this sensor 
+type <strong>must not</strong> be reported by the HAL. ie: it is not acceptable to "emulate" 
+this sensor in the HAL.</p>
+
+<p>This sensor must be low power. That is, if the step detection cannot be done in 
+hardware, this sensor should not be defined. Also, when the step counter is 
+activated and the accelerometer is not, only steps should trigger interrupts 
+(not accelerometer data).</p>
+
+<h2 id="Attitude">Attitude sensors</h2>
+
+<h3 id="Rotation-vector">Rotation vector</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer, Gyroscope AND Magnetometer<br/>
+Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+
+<p>The rotation vector symbolizes the orientation of the device relative to the 
+East-North-Up coordinates frame. It is usually obtained by integration of 
+accelerometer, gyroscope and magnetometer readings.</p>
+
+<p>The East-North-Up coordinate system is defined as a direct orthonormal basis 
+where:</p>
+
+<ul>
+<li>X points east and is tangential to the ground.</li>
+<li>Y points north and is tangential to the ground.</li>
+<li>Z points towards the sky and is perpendicular to the ground.</li>
+</ul>
+
+<p>The orientation of the phone is represented by the rotation necessary to align 
+the East-North-Up coordinates with the phone's coordinates. That is, applying 
+the rotation to the world frame (X,Y,Z) would align them with the phone 
+coordinates (x,y,z).</p>
+
+<p>The rotation can be seen as rotating the phone by an angle theta around an axis 
+rot_axis to go from the reference (East-North-Up aligned) device orientation to 
+the current device orientation.</p>
+
+<p>The rotation is encoded as the four (reordered) components of a unit quaternion:</p>
+
+<ul>
+<li><code>sensors_event_t.data[0]</code> = rot_axis.x*sin(theta/2)</li>
+<li><code>sensors_event_t.data[1]</code> = rot_axis.y*sin(theta/2)</li>
+<li><code>sensors_event_t.data[2]</code> = rot_axis.z*sin(theta/2)</li>
+<li><code>sensors_event_t.data[3]</code> = cos(theta/2)</li>
+</ul>
+
+<p>Where:</p>
+
+<ul>
+<li>rot_axis.x,y,z are the North-East-Up coordinates of a unit length vector 
+representing the rotation axis</li>
+<li>theta is the rotation angle</li>
+</ul>
+
+<p>The quaternion must be of norm 1. (It is a unit quaternion.) Failure to ensure 
+this will cause erratic client behaviour.</p>
+
+<p>In addition, this sensor reports an estimated heading accuracy:<br/>
+<code>sensors_event_t.data[4]</code> = estimated_accuracy (in radians)</p>
+
+<p>The heading error must be less than estimated_accuracy 95% of the time. This 
+sensor must use a gyroscope and an accelerometer as main orientation change 
+input.</p>
+
+<p>This sensor should also include magnetometer input to make up for gyro drift, 
+but it cannot be implemented using only a magnetometer.</p>
+
+<h3 id="Game-rotation">Game rotation vector</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer, Gyroscope NOT Magnetometer<br/>
+Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+
+<p>Similar to the <a href="#Rotation-vector">rotation vector</a> sensor but not using 
+the geomagnetic field. Therefore the Y axis doesn't point north but instead to 
+some other reference. That reference is allowed to drift by the same order of 
+magnitude as the gyroscope drifts around the Z axis.</p>
+
+<p>This sensor does not report an estimated heading accuracy:<br/>
+<code>sensors_event_t.data[4]</code> is reserved and should be set to 0</p>
+
+<p>In an ideal case, a phone rotated and returned to the same real-world 
+orientation should report the same game rotation vector (without using the 
+earth's geomagnetic field).</p>
+
+<p>This sensor must be based on a gyroscope. It cannot be implemented using a 
+magnetometer.</p>
+
+<h3 id="Gravity">Gravity</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer, Gyroscope NOT Magnetometer<br/>
+Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+
+<p>The gravity output of this sensor indicates the direction and magnitude of 
+gravity in the device's coordinates. Units are m/s^2. On Earth, the magnitude is 
+9.8 m/s^2. The coordinate system is the same as is used for the acceleration 
+sensor. When the device is at rest, the output of the gravity sensor should be 
+identical to that of the accelerometer.</p>
+
+<h3 id="Magnetometer">Geomagnetic rotation vector (Magnetometer)</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer, Magnetometer NOT Gyroscope<br/>
+Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+
+<p>This sensor is similar to the <a href="#Rotation-vector">rotation vector</a> sensor 
+but using a magnetometer instead of a gyroscope.</p>
+
+<p>This sensor must be based on a magnetometer. It cannot be implemented using a 
+gyroscope, and gyroscope input cannot be used by this sensor.</p>
+
+<p>Just like the rotation vector sensor, this sensor reports an estimated heading 
+accuracy:<br/>
+<code>sensors_event_t.data[4]</code> = estimated_accuracy (in radians)</p>
+
+<p>The heading error must be less than estimated_accuracy 95% of the time.</p>
+
+<p>See the <a href="#Rotation-vector">rotation vector</a> sensor description for more 
+details.</p>
+
+<h3 id="Orientation">Orientation</h3>
+
+<p><em>Underlying base sensor(s): Accelerometer, Magnetometer PREFERRED Gyroscope<br/>
+Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+
+<p><strong>Note</strong>: This is an older sensor type that has been 
+deprecated in the Android SDK although not yet in the HAL. It has been replaced 
+by the rotation vector sensor, which is more clearly defined, requires a 
+gyroscope, and therefore provides more accurate results. Use the rotation vector 
+sensor over the orientation sensor whenever possible.</p>
+
+<p>The orientation sensor tracks the attitude of the device. All values are angles 
+in degrees. Orientation sensors return sensor events for all three axes at a 
+constant rate defined by setDelay().</p>
+
+<ul>
+<li>azimuth: angle between the magnetic north direction and the Y axis, around <br />
+the Z axis (0&lt;=azimuth&lt;360). 0=North, 90=East, 180=South, 270=West</li>
+<li>pitch: Rotation around X axis (-180&lt;=pitch&lt;=180), with positive values when 
+the z-axis moves toward the y-axis.</li>
+<li>roll: Rotation around Y axis (-90&lt;=roll&lt;=90), with positive values when the 
+x-axis moves towards the z-axis.</li>
+</ul>
+
+<p>Please note, for historical reasons the roll angle is positive in the clockwise 
+direction. (Mathematically speaking, it should be positive in the 
+counter-clockwise direction):</p>
+
+<div class="figure" style="width:264px">
+  <img src="images/axis_positive_roll.png" alt="Depiction of orientation relative to a device" height="253" />
+  <p class="img-caption">
+    <strong>Figure 2.</strong> Orientation relative to a device.
+  </p>
+</div>
+
+<p>This definition is different from yaw, pitch and roll used in aviation where the 
+X axis is along the long side of the plane (tail to nose).</p>
+
+<h2 id="Uncalibrated">Uncalibrated sensors</h2>
+
+<p>Uncalibrated sensors provide more raw results and may include some bias but also 
+contain fewer "jumps" from corrections applied through calibration. Some 
+applications may prefer these uncalibrated results as smoother and more 
+reliable. For instance, if an application is attempting to conduct its own 
+sensor fusion, introducing calibrations can actually distort results.</p>
+
+<h3 id="Gyroscope-uncalibrated">Gyroscope uncalibrated</h3>
+
+<p><em>Underlying base sensor(s): Gyroscope<br/>
+Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+
+<p>The uncalibrated gyroscope is useful for post-processing and melding orientation 
+data. All values are in radians/second and measure the rate of rotation around 
+the X, Y and Z axis. An estimation of the drift on each axis is reported as 
+well.</p>
+
+<p>No gyro-drift compensation shall be performed. Factory calibration and 
+temperature compensation should still be applied to the rate of rotation 
+(angular speeds).</p>
+
+<p>The coordinate system is the same as is used for the acceleration sensor. 
+Rotation is positive in the counter-clockwise direction (right-hand rule). That 
+is, an observer looking from some positive location on the x, y or z axis at a 
+device positioned on the origin would report positive rotation if the device 
+appeared to be rotating counter clockwise. Note that this is the standard 
+mathematical definition of positive rotation and does not agree with the 
+definition of roll given elsewhere.</p>
+
+<p>The range should at least be 17.45 rad/s (ie: ~1000 deg/s).</p>
+
+<p>Content of an uncalibrated_gyro event (units are rad/sec):</p>
+
+<ul>
+<li>x_uncalib : angular speed (w/o drift compensation) around the X axis</li>
+<li>y_uncalib : angular speed (w/o drift compensation) around the Y axis</li>
+<li>z_uncalib : angular speed (w/o drift compensation) around the Z axis</li>
+<li>x_bias : estimated drift around X axis in rad/s</li>
+<li>y_bias : estimated drift around Y axis in rad/s</li>
+<li>z_bias : estimated drift around Z axis in rad/s</li>
+</ul>
+
+<p>If the implementation is not able to estimate the drift, then this sensor <strong>must 
+not</strong> be reported by this HAL. Instead, the regular 
+<a href="{@docRoot}devices/sensors/base_triggers.html#Gyroscope">Gyroscope</a> sensor is used without drift compensation.</p>
+
+<p>If this sensor is present, then the corresponding Gyroscope sensor must be 
+present and both must return the same <code>sensor_t::name</code> and
+<code>sensor_t::vendor</code>.</p>
+
+<h3 id="Magnetic-field-uncalibrated">Magnetic field uncalibrated</h3>
+
+<p><em>Underlying base sensor(s): Magnetometer<br/>
+Trigger-mode: Continuous<br/>
+Wake-up sensor: No</em></p>
+
+<p>Similar to <a href="{@docRoot}devices/sensors/base_triggers.html#Geomagnetic">Geomagnetic field</a> sensor, but the hard 
+iron calibration is reported separately instead of being included in the 
+measurement. The uncalibrated magnetometer allows the system to handle bad hard 
+iron estimation.</p>
+
+<p>Factory calibration and temperature compensation should still be applied to the 
+"uncalibrated" measurement. Separating away the hard iron calibration estimation 
+allows the system to better recover from bad hard iron estimation.</p>
+
+<p>All values are in micro-Tesla (uT) and measure the ambient magnetic field in the 
+X, Y and Z axis. Assumptions that the magnetic field is due to the Earth's poles 
+should be avoided.</p>
+
+<p>The uncalibrated_magnetic event contains three fields for uncalibrated measurement: x_uncalib, y_uncalib, z_uncalib. Each is a component of the 
+measured magnetic field, with soft iron and temperature compensation applied, 
+but not hard iron calibration. These values should be continuous (no 
+re-calibration should cause a jump).</p>
+
+<p>The uncalibrated_magnetic event contains three fields for hard iron bias estimates: x_bias, y_bias, z_bias. Each field is a component of the estimated 
+hard iron calibration. They represent the offsets to apply to the calibrated 
+readings to obtain uncalibrated readings (x_uncalib ~= x_calibrated + x_bias). 
+These values are expected to jump as soon as the estimate of the hard iron 
+changes, and they should be stable the rest of the time.</p>
+
+<p>If this sensor is present, then the corresponding Geomagnetic field sensor must 
+be present and both must return the same  <code>sensor_t::name</code> and
+<code>sensor_t::vendor</code>.</p>
+
+<p>See the <a href="{@docRoot}devices/sensors/base_triggers.html#Geomagnetic">geomagnetic field</a> sensor description for more 
+information.<br/></p>
diff --git a/src/devices/sensors/images/axis_positive_roll.png b/src/devices/sensors/images/axis_positive_roll.png
new file mode 100644
index 0000000..c2bf6ba
--- /dev/null
+++ b/src/devices/sensors/images/axis_positive_roll.png
Binary files differ
diff --git a/src/devices/sensors/images/battery_icon.png b/src/devices/sensors/images/battery_icon.png
new file mode 100644
index 0000000..4cd15b8
--- /dev/null
+++ b/src/devices/sensors/images/battery_icon.png
Binary files differ
diff --git a/src/devices/sensors/index.jd b/src/devices/sensors/index.jd
new file mode 100644
index 0000000..e5fa438
--- /dev/null
+++ b/src/devices/sensors/index.jd
@@ -0,0 +1,261 @@
+page.title=Sensors HAL overview
+@jd:body
+
+<!--
+    Copyright 2013 The Android Open Source Project
+
+    Licensed under the Apache License, Version 2.0 (the "License");
+    you may not use this file except in compliance with the License.
+    You may obtain a copy of the License at
+
+        http://www.apache.org/licenses/LICENSE-2.0
+
+    Unless required by applicable law or agreed to in writing, software
+    distributed under the License is distributed on an "AS IS" BASIS,
+    WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+    See the License for the specific language governing permissions and
+    limitations under the License.
+-->
+<div id="qv-wrapper">
+  <div id="qv">
+    <h2>In this document</h2>
+    <ol id="auto-toc">
+    </ol>
+  </div>
+</div>
+
+<h2 id="intro">Introduction</h2>
+<p><a href="http://developer.android.com/guide/topics/sensors/sensors_overview.html">Android 
+  sensors</a> give applications access to a mobile device's underlying base sensor(s): 
+  accelerometer, gyroscope, and magnetometer. Manufacturers develop the drivers 
+  that define additional composite sensor types from those base sensors. For 
+  instance, Android offers both calibrated and uncalibrated gyroscopes, a 
+  geomagnetic rotation vector and a game rotation vector. This variety gives 
+  developers some flexibility in tuning applications for battery life optimization 
+  and accuracy.</p>
+<p>The <a href="{@docRoot}devices/reference/sensors_8h_source.html">Sensors
+Hardware Abstraction Layer (HAL) API</a> is the interface between the hardware drivers 
+and the Android framework; the <a href="http://developer.android.com/reference/android/hardware/Sensor.html">Sensors Software Development Kit (SDK) 
+  API</a> is the interface between the Android framework and the Java applications. Please note, 
+  the Sensors HAL API described in this documentation is not identical to the 
+  Sensors SDK API described on <a href="http://developer.android.com/reference/android/hardware/Sensor.html">developer.android.com</a>. 
+  For example, some sensors that are deprecated in the SDK may still exist in the 
+  HAL, and vice versa.</p>
+<p>Similarly, audio recorders, Global Positioning System (GPS) devices, and 
+  accessory (pluggable) sensors are not supported by the Android Sensors HAL API 
+  described here. This API covers sensors that are physically part of the device 
+  only. Please see the <a href="{@docRoot}devices/audio.html">Audio</a>, <a href="{@docRoot}devices/reference/gps_8h.html">Location </a><a href="{@docRoot}devices/reference/gps_8h.html">Strategies</a>, 
+  and the <a href="{@docRoot}accessories/index.html">Accessories</a> section 
+  for information on those devices.</p>
+<p>Application framework<br/>
+  At the application framework level is the app code, which utilizes the <a href="http://developer.android.com/reference/android/hardware/package-summary.html">android.hardware</a> APIs to interact with the sensors hardware. Internally, this code calls 
+  corresponding JNI glue classes to access the native code that interacts with the 
+  sensors hardware.</p>
+<p>JNI<br/>
+  The JNI code associated with <a href="http://developer.android.com/reference/android/hardware/package-summary.html">android.hardware</a> is located in the frameworks/base/core/jni/ directory. This code calls the lower 
+  level native code to obtain access to the sensor hardware.</p>
+<p>Native framework<br/>
+  The native framework is defined in <code>frameworks/native/</code> and provides a native
+  equivalent to the <a href="http://developer.android.com/reference/android/hardware/package-summary.html">android.hardware</a> package. The native framework calls the Binder IPC proxies to obtain access to 
+  sensor-specific services.</p>
+<p>Binder IPC<br/>
+  The Binder IPC proxies facilitate communication over process boundaries.</p>
+<p>HAL<br/>
+  The Hardware Abstraction Layer (HAL) defines the standard interface that sensor 
+  services call into and that you must implement to have your sensor hardware 
+  function correctly. The sensor HAL interfaces are located in 
+  <code>hardware/libhardware/include/hardware</code>. See <a
+href="http://source.android.com/devices/reference/sensors_8h.html">sensors.h</a> for
+additional details.</p>
+<p>Kernel Driver<br/>
+  The sensors driver interacts with the hardware and your implementation of the 
+  HAL. The HAL is driver-agnostic.</p>
+<h3 id="axis-def">Sensor axis definition</h3>
+<p>The sensor event values are expressed in a specific frame that is static 
+  relative to the phone. This API is relative only to the NATURAL orientation of 
+  the screen. In other words:</p>
+<ul>
+  <li>the axes are not swapped when the device's screen orientation changes.</li>
+  <li>higher level services <em>may</em> perform this transformation.</li>
+</ul>
+<div class="figure" style="width:269px"> <img src="http://developer.android.com/images/axis_device.png" alt="Coordinate system relative to device for Sensor
+    API" height="225" />
+  <p class="img-caption"> <strong>Figure 1.</strong> Coordinate system (relative to a device) that's used by the Sensor
+    API. </p>
+</div>
+<h3 id="accuracy">Accuracy</h3>
+<p>The sensors included by the manufacturer must be accurate and precise to meet
+the expectations of application developers. The sensors included in Android devices are 
+  tested for sensor interaction and accuracy as part of the <a href="{@docRoot}compatibility/index.html">Android Compatibility 
+    program</a> starting in the 
+  Android 4.4 release. Testing will continue to be improved in future releases. 
+  See the <em>Sensors</em> section of the Android Compatibility Definition Document (CDD) 
+  for the exact requirements.</p>
+<h3 id="power">Power consumption</h3>
+<p>Some defined sensor are higher power than others. Others are lower power by 
+  design and should be implemented as such with their processing done in the 
+  hardware. This means they should not require the application processor to be 
+  running. Here are the low-power sensors:</p>
+<ul>
+  <li><a href="{@docRoot}devices/sensors/composite_sensors.html#Magnetometer">Geomagnetic rotation vector</a></li>
+  <li><a href="{@docRoot}devices/sensors/composite_sensors.html#Significant">Significant motion</a></li>
+  <li><a href="{@docRoot}devices/sensors/composite_sensors.html#counter">Step counter</a></li>
+  <li><a href="{@docRoot}devices/sensors/composite_sensors.html#detector">Step detector</a></li>
+</ul>
+<p>They are accompanied by a low-power <img src="images/battery_icon.png"
+alt="low-power sensors"/>
+  icon in the <a href="{@docRoot}devices/sensors/composite_sensors.html#summary">Sensor summary</a> table. </p>
+<p>These sensor types cannot be implemented at high power as their primary benefit 
+  is low battery use. It is better to not implement a low-power sensor at all 
+  rather than implement it as high power.</p>
+<p>Composite low-power sensor types, such as the step detector, must have their 
+  processing conducted in the hardware; power use is much lower than if done in 
+  the software. Power use is low on small microprocessors and even lower still on 
+  application-specific integrated circuits (ASICs). A hardware implementation of 
+  composite sensor types can also make use of more raw sensor data and a better 
+  synchronization between sensors.</p>
+<h3 id="release">HAL release cycle</h3>
+<p>Functionality is tied to versions of the API. Android maintains two versions of 
+  the Sensors HAL API per release. For instance, if version 1 was the latest and 
+  version 1.1 is released, the version prior to 1 will no longer be supported upon 
+  that release. Only the two latest versions of the Sensors HAL API are supported.</p>
+<h2 id="interaction">Interaction</h2>
+<h3 id="concurrent">Concurrent running</h3>
+<p>Android sensors must work independently of one another. Activating one sensor 
+  shall not deactivate another sensor. Activating one shall not reduce the rate of 
+  another. This is a key element of compatibility testing.</p>
+<h3 id="suspend">Interaction with suspend mode</h3>
+<p>Unless otherwise noted, an enabled sensor shall not prevent the system on a chip 
+  (SoC) from going into suspend mode. It is the responsibility of applications to keep a 
+  partial <a href="http://developer.android.com/reference/android/os/PowerManager.WakeLock.html">wake 
+    lock</a> should they wish to receive sensor events while the screen is off. While in 
+  suspend mode, and unless otherwise noted (<a
+href="{@docRoot}devices/sensors/batching.html">batch</a> mode 
+  and sensor particularities), enabled sensors' events are lost.</p>
+<p>Note that conceptually, the sensor itself is not deactivated while in suspend 
+  mode. Instead, the data it returns is missing. The oldest data is dropped to 
+  accommodate the latest data. As soon as the SoC gets out of suspend mode, 
+  operations resume as normal.</p>
+<p>Most applications should either hold a wake lock to ensure the system doesn't go 
+  to suspend, or unregister from the sensors when they do not need them, unless 
+  batch mode is active. When batching, sensors must continue to fill their 
+  internal FIFO. (See the documentation of <a
+href="{@docRoot}devices/sensors/batching.html">batch</a> mode 
+  to learn how suspend interacts with batch mode.)</p>
+<p>Wake-up sensors are a notable exception to the above. Wake-up sensors must
+wake up the SoC to deliver events. They must still let the SoC go into suspend
+mode, but must also wake it up when an event is triggered.</p>
+<h3 id="fusion">Sensor fusion and virtual sensors</h3>
+<p>Many composite sensor types are or can be implemented as virtual sensors from 
+  underlying base sensors on the device. Examples of composite sensors types 
+  include the rotation vector sensor, orientation sensor, step detector and step 
+  counter.</p>
+<p>From the point of view of this API, these virtual sensors <strong>must</strong> appear as 
+  real, individual sensors. It is the responsibility of the driver and HAL to make 
+  sure this is the case.</p>
+<p>In particular, all sensors must be able to function concurrently. For example, 
+  if defining both an accelerometer and a step counter, then both must be able to 
+  work concurrently.</p>
+<h3 id="hal">HAL interface</h3>
+<p>These are the common sensor calls expected at the HAL level:</p>
+<ol>
+  <li><em>getSensorList()</em> - Gets the list of all sensors.</li>
+  <li><em>activate()</em> - Starts or stops the specified sensor.</li>
+  <li><em>batch()</em> - Sets parameters to group event data collection and optimize power use.</li>
+  <li><em>setDelay()</em> - Sets the event's period in 
+    nanoseconds for a given sensor.</li>
+  <li><em>flush()</em> - Flush adds an event to the end of the 
+    &quot;batch mode&quot; FIFO for the specified sensor and flushes the FIFO.</li>
+  <li><em>poll()</em> - Returns an array of sensor data. </li>
+</ol>
+<p>Please note, the implementation must be thread safe and allow these values to be 
+  called from different threads.</p>
+<h4 id="getSensorList">getSensorList(sensor_type)</h4>
+<p>Provide the list of sensors implemented by the HAL for the given sensor type. </p>
+<p>Developers may then make multiple calls to get sensors of different types or use 
+  <code>Sensor.TYPE_ALL</code> to get all the sensors. See getSensorList() defined on
+  developer.android.com for more details.</p>
+<h4 id="activate">activate(sensor, true/false)</h4>
+<pre>
+            int (*activate)(struct sensors_poll_device_t *dev,
+                    int handle, int enabled);</pre>
+<p>Activates or deactivates the sensor with the specified handle. Handles must be 
+  higher than <code>SENSORS_HANDLE_BASE</code> and must be unique. A handle identifies a given
+  sensor. The handle is used to activate and/or deactivate sensors. In this 
+  version of the API, there can only be 256 handles.</p>
+<p>The handle is the handle of the sensor to change. The enabled argument is set to 
+  1 to enable or 0 to disable the sensor.</p>
+<p>Unless otherwise noted in the individual sensor type descriptions, an activated 
+  sensor never prevents the SoC from going into suspend mode; that is, the HAL 
+  shall not hold a partial wake lock on behalf of applications.<br/>
+  <br/>
+  One-shot sensors deactivate themselves automatically upon receiving an event, 
+  and they must still accept to be deactivated through a call to activate(..., 
+  ..., 0).<br/>
+  <br/>
+  If &quot;enabled&quot; is 1 and the sensor is already activated, this function is a no-op 
+  and succeeds. If &quot;enabled&quot; is 0 and the sensor is already deactivated, this 
+  function is a no-op and succeeds. This returns 0 on success and a negative errno 
+  code otherwise.</p>
+<h4 id="batch">batch(sensor, batching parameters)</h4>
+<pre>
+            int (*batch)(struct sensors_poll_device_1* dev,
+                   int handle, int flags, int64_t period_ns, int64_t timeout);
+</pre>
+<p>Sets parameters to group event data collection and reduce power use. Batching 
+  can enable significant power savings by allowing the application processor to 
+  sleep rather than awake for each notification. Instead, these notifications can 
+  be grouped and processed together. See the <a
+href="{@docRoot}devices/sensors/batching.html">Batching</a> section for details.</p>
+<h4 id="setDelay">setDelay(sensor, delay)</h4>
+<pre>
+            int (*setDelay)(struct sensors_poll_device_t *dev,
+                    int handle, int64_t period_ns);
+</pre>
+<p>Sets the event's period in nanoseconds for a given sensor. What the
+<code>period_ns</code> parameter means depends on the specified sensor's trigger mode:</p>
+<ul>
+  <li>Continuous: setDelay() sets the sampling rate.</li>
+  <li>On-change: setDelay() limits the delivery rate of events.</li>
+  <li>One-shot: setDelay() is ignored. It has no effect.</li>
+  <li>Special: See specific sensor type descriptions.</li>
+</ul>
+<p>For continuous and on-change sensors, if the requested value is less than
+<code>sensor_t::minDelay</code>, then it's silently clamped to
+<code>sensor_t::minDelay</code> unless <code>sensor_t::minDelay</code> is 0,
+in which case it is clamped to &gt;= 1ms. setDelay will not be called when the sensor is
+in batching mode. In this case, batch() will be called with the new period. Return 0 if successful, 
+&lt; 0 on error.</p>
+<p>When calculating the sampling period T in setDelay (or batch), the actual period
+should be smaller than T and no smaller than T/2. Finer granularity is not
+necessary.</p>
+<h4 id="flush">flush()</h4>
+<pre>
+            int (*flush)(struct sensors_poll_device_1* dev, int handle);
+</pre>
+<p>Flush adds a <code>META_DATA_FLUSH_COMPLETE</code> event
+(<code>sensors_event_meta_data_t</code>) to the
+  end of the &quot;batch mode&quot; FIFO for the specified sensor and flushes the FIFO; 
+  those events are delivered as usual (i.e.: as if the batch timeout had expired) 
+  and removed from the FIFO.<br/>
+  <br/>
+  The flush happens asynchronously (i.e.: this function must return immediately). 
+  If the implementation uses a single FIFO for several sensors, that FIFO is 
+  flushed and the <code>META_DATA_FLUSH_COMPLETE</code> event is added only for the specified
+  sensor.<br/>
+  <br/>
+  If the specified sensor wasn't in batch mode, flush succeeds and promptly sends 
+  a <code>META_DATA_FLUSH_COMPLETE</code> event for that sensor.</p>
+<p>If the FIFO was empty at the time of the call, flush returns 0 (success) and 
+  promptly sends a <code>META_DATA_FLUSH_COMPLETE</code> event for that sensor.<br/>
+  <br/>
+  If the specified sensor wasn't enabled, flush returns -EINVAL. return 0 on 
+  success, negative errno code otherwise.</p>
+<h4 id="poll">poll()</h4>
+<pre>            int (*poll)(struct sensors_poll_device_t *dev,
+                    sensors_event_t* data, int count);</pre>
+<p>Returns an array of sensor data. This function must block until events are 
+  available. It will return the number of events read on success, or -errno in 
+  case of an error.</p>
+<p>The number of events returned in data must be less or equal to the &quot;count&quot; 
+  argument. This function shall never return 0 (no event).</p>
diff --git a/src/devices/tech/dalvik/dalvik-bytecode.jd b/src/devices/tech/dalvik/dalvik-bytecode.jd
index 959cfbd..8d4f52b 100644
--- a/src/devices/tech/dalvik/dalvik-bytecode.jd
+++ b/src/devices/tech/dalvik/dalvik-bytecode.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -16,8 +16,6 @@
     See the License for the specific language governing permissions and
     limitations under the License.
 -->
-<p>Copyright &copy; 2007 The Android Open Source Project
-
 <h2>General Design</h2>
 
 <ul>
diff --git a/src/devices/tech/dalvik/dex-format.jd b/src/devices/tech/dalvik/dex-format.jd
index e78d18b..744eb86 100644
--- a/src/devices/tech/dalvik/dex-format.jd
+++ b/src/devices/tech/dalvik/dex-format.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -16,8 +16,6 @@
     See the License for the specific language governing permissions and
     limitations under the License.
 -->
-<p>Copyright &copy; 2007 The Android Open Source Project
-
 <p>This document describes the layout and contents of <code>.dex</code>
 files, which are used to hold a set of class definitions and their associated
 adjunct data.</p>
diff --git a/src/devices/tech/dalvik/index.jd b/src/devices/tech/dalvik/index.jd
index 7bc11bb..71324d8 100644
--- a/src/devices/tech/dalvik/index.jd
+++ b/src/devices/tech/dalvik/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/dalvik/instruction-formats.jd b/src/devices/tech/dalvik/instruction-formats.jd
index 37a7438..37640da 100644
--- a/src/devices/tech/dalvik/instruction-formats.jd
+++ b/src/devices/tech/dalvik/instruction-formats.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -16,8 +16,6 @@
     See the License for the specific language governing permissions and
     limitations under the License.
 -->
-<p>Copyright &copy; 2007 The Android Open Source Project
-
 <h2>Introduction and Overview</h2>
 
 <p>This document lists the instruction formats used by Dalvik bytecode
@@ -461,4 +459,4 @@
   <td>const-wide</td>
 </tr>
 </tbody>
-</table>
\ No newline at end of file
+</table>
diff --git a/src/devices/tech/datausage/excluding-network-types.jd b/src/devices/tech/datausage/excluding-network-types.jd
index 528b402..9bc5abc 100644
--- a/src/devices/tech/datausage/excluding-network-types.jd
+++ b/src/devices/tech/datausage/excluding-network-types.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -27,4 +27,4 @@
 APNs to coexist on a single interface.  Android can collect network
 statistics from both designs, but <code>config_data_usage_network_types</code> is
 not be effective at excluding APNs forced to coexist on a single
-interface.</p>
\ No newline at end of file
+interface.</p>
diff --git a/src/devices/tech/datausage/iface-overview.jd b/src/devices/tech/datausage/iface-overview.jd
index 7608aa8..db537f5 100644
--- a/src/devices/tech/datausage/iface-overview.jd
+++ b/src/devices/tech/datausage/iface-overview.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/datausage/index.jd b/src/devices/tech/datausage/index.jd
index adce77c..c7250db 100644
--- a/src/devices/tech/datausage/index.jd
+++ b/src/devices/tech/datausage/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/datausage/kernel-changes.jd b/src/devices/tech/datausage/kernel-changes.jd
index 98624ed..efc6c26 100644
--- a/src/devices/tech/datausage/kernel-changes.jd
+++ b/src/devices/tech/datausage/kernel-changes.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/datausage/kernel-overview.jd b/src/devices/tech/datausage/kernel-overview.jd
index 7d4248b..12966c5 100644
--- a/src/devices/tech/datausage/kernel-overview.jd
+++ b/src/devices/tech/datausage/kernel-overview.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/datausage/tags-explained.jd b/src/devices/tech/datausage/tags-explained.jd
index bacd395..b988431 100644
--- a/src/devices/tech/datausage/tags-explained.jd
+++ b/src/devices/tech/datausage/tags-explained.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -42,4 +42,4 @@
 level). Such tags can be removed, reapplied, or modified during
 runtime.</p>
 <p>The qtaguid module has been implemented on <a href="https://android-review.googlesource.com/#/q/project:kernel/common+branch:android-3.0,n,z">kernel/common branch of
-android-3.0</a></p>
\ No newline at end of file
+android-3.0</a></p>
diff --git a/src/devices/tech/datausage/tethering-data.jd b/src/devices/tech/datausage/tethering-data.jd
index 62d1733..6c7b1b9 100644
--- a/src/devices/tech/datausage/tethering-data.jd
+++ b/src/devices/tech/datausage/tethering-data.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/datausage/usage-cycle-resets-dates.jd b/src/devices/tech/datausage/usage-cycle-resets-dates.jd
index f9bddbd..f032736 100644
--- a/src/devices/tech/datausage/usage-cycle-resets-dates.jd
+++ b/src/devices/tech/datausage/usage-cycle-resets-dates.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -21,4 +21,4 @@
 <code>(00:00) UTC</code> on the requested day. When a month is shorter than the
 requested day, the cycle resets on the first day of the subsequent
 month. For example, a cycle reset day of the 30th would cause a reset
-on January 30 at <code>00:00 UTC</code> and March 1 at <code>00:00 UTC</code>.</p>
\ No newline at end of file
+on January 30 at <code>00:00 UTC</code> and March 1 at <code>00:00 UTC</code>.</p>
diff --git a/src/devices/tech/encryption/android_crypto_implementation.jd b/src/devices/tech/encryption/android_crypto_implementation.jd
index c81c4f5..7ace04d 100644
--- a/src/devices/tech/encryption/android_crypto_implementation.jd
+++ b/src/devices/tech/encryption/android_crypto_implementation.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/encryption/index.jd b/src/devices/tech/encryption/index.jd
index 42fce0d..b33b3ae 100644
--- a/src/devices/tech/encryption/index.jd
+++ b/src/devices/tech/encryption/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -18,4 +18,4 @@
 -->
 <p>Encryption on Android uses the dm-crypt layer in the Linux kernel.  Read the
 detailed description of how it is tied into the Android system and what must
-be done on a new device to get this feature working.</p>
\ No newline at end of file
+be done on a new device to get this feature working.</p>
diff --git a/src/devices/tech/index.jd b/src/devices/tech/index.jd
index 3ee6a3a..7f45337 100644
--- a/src/devices/tech/index.jd
+++ b/src/devices/tech/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/dumpsys.jd b/src/devices/tech/input/dumpsys.jd
index a37584f..21ae764 100644
--- a/src/devices/tech/input/dumpsys.jd
+++ b/src/devices/tech/input/dumpsys.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/getevent.jd b/src/devices/tech/input/getevent.jd
index 19499d1..8bf4093 100644
--- a/src/devices/tech/input/getevent.jd
+++ b/src/devices/tech/input/getevent.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/index.jd b/src/devices/tech/input/index.jd
index 80cc997..80c5e5f 100644
--- a/src/devices/tech/input/index.jd
+++ b/src/devices/tech/input/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -19,4 +19,4 @@
 <p>The Android input subsystem supports many different device classes,
 including keyboard, joystick, trackball, mouse and touch screen.</p>
 <p>The documentation in this section describes how to configure,
-calibrate, test, and write drivers for input devices.</p>
\ No newline at end of file
+calibrate, test, and write drivers for input devices.</p>
diff --git a/src/devices/tech/input/input-device-configuration-files.jd b/src/devices/tech/input/input-device-configuration-files.jd
index 06c8994..877053c 100644
--- a/src/devices/tech/input/input-device-configuration-files.jd
+++ b/src/devices/tech/input/input-device-configuration-files.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/key-character-map-files.jd b/src/devices/tech/input/key-character-map-files.jd
index e60484e..6872cdb 100644
--- a/src/devices/tech/input/key-character-map-files.jd
+++ b/src/devices/tech/input/key-character-map-files.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/key-layout-files.jd b/src/devices/tech/input/key-layout-files.jd
index ecab0ef..d353d08 100644
--- a/src/devices/tech/input/key-layout-files.jd
+++ b/src/devices/tech/input/key-layout-files.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/keyboard-devices.jd b/src/devices/tech/input/keyboard-devices.jd
index e7740fe..0c6ba08 100644
--- a/src/devices/tech/input/keyboard-devices.jd
+++ b/src/devices/tech/input/keyboard-devices.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/migration-guide.jd b/src/devices/tech/input/migration-guide.jd
index ff39dfd..8e30033 100644
--- a/src/devices/tech/input/migration-guide.jd
+++ b/src/devices/tech/input/migration-guide.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/overview.jd b/src/devices/tech/input/overview.jd
index cbf86b9..118fabf 100644
--- a/src/devices/tech/input/overview.jd
+++ b/src/devices/tech/input/overview.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/touch-devices.jd b/src/devices/tech/input/touch-devices.jd
index e7c909a..298ba15 100644
--- a/src/devices/tech/input/touch-devices.jd
+++ b/src/devices/tech/input/touch-devices.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/input/validate-keymaps.jd b/src/devices/tech/input/validate-keymaps.jd
index 7730f11..c86c49e 100644
--- a/src/devices/tech/input/validate-keymaps.jd
+++ b/src/devices/tech/input/validate-keymaps.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/kernel.jd b/src/devices/tech/kernel.jd
index af4700b..da749c5 100644
--- a/src/devices/tech/kernel.jd
+++ b/src/devices/tech/kernel.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -287,4 +287,4 @@
 CONFIG_SCHEDSTATS=y
 CONFIG_TIMER_STATS=y
 CONFIG_SCHED_TRACER=y
-</pre>
\ No newline at end of file
+</pre>
diff --git a/src/devices/tech/power.jd b/src/devices/tech/power.jd
index 3247fbb..0367f26 100644
--- a/src/devices/tech/power.jd
+++ b/src/devices/tech/power.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -605,4 +605,4 @@
 3000
 &lt;!-- Battery capacity is 3000 mAH (at 3.6 Volts) --&gt;
 
-</pre>
\ No newline at end of file
+</pre>
diff --git a/src/devices/tech/security/dm-verity.jd b/src/devices/tech/security/dm-verity.jd
index 79e375f..0522b23 100644
--- a/src/devices/tech/security/dm-verity.jd
+++ b/src/devices/tech/security/dm-verity.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -305,7 +305,7 @@
 </table>
 
 <p>For additional assistance, contact 
-<a href="mailto:gcondra@google.com">gcondra@google.com</a>.</p>
+<a href="mailto:security@google.com?subject=dm-verity">security@google.com</a>.</p>
 
 <h2 id="supporting-docs">Supporting documentation</h2>
 
diff --git a/src/devices/tech/security/index.jd b/src/devices/tech/security/index.jd
index e53895a..57962c9 100644
--- a/src/devices/tech/security/index.jd
+++ b/src/devices/tech/security/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/security/se-linux.jd b/src/devices/tech/security/se-linux.jd
index acf9291..251cafc 100644
--- a/src/devices/tech/security/se-linux.jd
+++ b/src/devices/tech/security/se-linux.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -395,5 +395,4 @@
 results and improve policy settings. Over time, Android intends to support
 common manufacturer additions in its default SELinux policy. For more
 information, contact <a
-href="mailto:security@android.com">security@android.com</a> or Geremy Condra (<a
-href="mailto:gcondra@google.com">gcondra@google.com</a>) directly.
+href="mailto:security@google.com?subject=se-linux">security@android.com</a>.
diff --git a/src/devices/tech/storage/index.jd b/src/devices/tech/storage/index.jd
index 5606092..e50abe9 100644
--- a/src/devices/tech/storage/index.jd
+++ b/src/devices/tech/storage/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/test_infra/tradefed/full_example.jd b/src/devices/tech/test_infra/tradefed/full_example.jd
index 9733d77..1be769f 100644
--- a/src/devices/tech/test_infra/tradefed/full_example.jd
+++ b/src/devices/tech/test_infra/tradefed/full_example.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tech/test_infra/tradefed/index.jd b/src/devices/tech/test_infra/tradefed/index.jd
index ad7367a..dabe07f 100644
--- a/src/devices/tech/test_infra/tradefed/index.jd
+++ b/src/devices/tech/test_infra/tradefed/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/devices/tuning.jd b/src/devices/tuning.jd
index 61151b6..87bf7aa 100644
--- a/src/devices/tuning.jd
+++ b/src/devices/tuning.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -69,6 +69,15 @@
 </tr>
 
 <tr>
+  <td><code>ro.hwui.patch_cache_size</code></td>
+  <td><code>integer</code></td>
+  <td><code>128</code></td>
+  <td>Defines the size, in kilobytes, of the 9-patches cache, per process. This
+cache holds only vertex data and can therefore be kept small. Each vertex is
+made of 4 floats, or 16 bytes.</td>
+</tr>
+
+<tr>
   <td><code>ro.hwui.path_cache_size</code></td>
   <td><code>float</code></td>
   <td><code>4</code></td>
@@ -76,6 +85,7 @@
   cache large enough to hold at least one screen worth of 32-bit textures. For instance,
   on a 1280x800 display, a full screen buffer uses about 4 MB, so the cache should be at least 4 MB.</td>
 </tr>
+
 <tr>
   <td><code>ro.hwui.shape_cache_size</code></td>
   <td><code>float</code></td>
@@ -141,6 +151,16 @@
 </tr>
 
 <tr>
+  <td><code>ro.zygote.disable_gl_preload</code></td>
+  <td><code>boolean</code></td>
+  <td><code>false</code></td>
+  <td>Used to enable/disable preloading of EGL/GL drivers in Zygote at boot time. When this property is 
+set to false, Zygote will preload the GL drivers by invoking eglGetDisplay(EGL_DEFAULT_DISPLAY). 
+The goal is to load the dynamic libraries code in Zygote to share it with all the other processes. If a driver
+does not support being shared, set this property to true.</td>
+</tr>
+
+<tr>
   <td><code>hwui.text_gamma_correction</code></td>
   <td><code>string</code></td>
   <td><code>lookup</code></td>
diff --git a/src/index.jd b/src/index.jd
index 5f2639a..52f2f45 100644
--- a/src/index.jd
+++ b/src/index.jd
@@ -6,7 +6,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -43,46 +43,40 @@
     <div class="col-8">
     <h3>Updates</h3>
       <a href="{@docRoot}source/index.html">
-        <h4>Source Code Available for Android</h4>
+        <h4>Source Code Available for Android</h4></a>
         <p>Android is an open-source software stack for a wide array of mobile devices with different form factors.
 <img border="0" src="images/Android_Robot_100.png" alt="Android Partner icon" style="display:inline;float:right;margin:5px 10px"> 
         We created Android in response to our own experiences launching mobile apps. We wanted to make sure there was 
         no central point of failure so no industry player can restrict or control the innovations of any other. That's 
         why we created Android and made its source code open.</p>
-      </a>
       <a href="{@docRoot}compatibility/index.html">
-        <h4>Compatibility Definition for Android</h4>
+        <h4>Compatibility Definition for Android</h4></a>
         <p>Android's purpose is to establish an open platform for developers to build innovative apps. The Android 
         Compatibility program defines the technical details of the Android platform and provides tools for device manufacturers to 
         ensure developers' apps run on a variety of devices.</p>
-      </a>
     </div>
 
     <div class="col-8">
       <h3>Getting Started</h3>
       <a href="{@docRoot}source/index.html">
-        <h4>Explore the Source</h4>
+        <h4>Explore the Source</h4></a>
         <p>Get the complete Android platform and modify and build it to suit your needs. You can
         also contribute to the Open Source Project to make your changes available to everyone else in
         the Android ecosystem.</p>
-      </a>
       <a href="{@docRoot}devices/index.html">
-        <h4>Port Android to Devices</h4>
+        <h4>Port Android to Devices</h4></a>
         <p>Port the latest Android platform and
         create compelling devices that your customers want.</p>
-      </a>
 
       <a href="{@docRoot}accessories/index.html">
-        <h4>Build Accessories</h4>
+        <h4>Build Accessories</h4></a>
         <p>Sometimes, a device can't do it all. Tap into Android's open accessory standard and build accessories to complement the wide variety of Android-powered devices.</p>
-      </a>
 
       <a href="{@docRoot}compatibility/index.html">
-        <h4>Get Compatible</h4>
+        <h4>Get Compatible</h4></a>
         <p>Being Android-compatible lets you offer custom features but still give users and developers a consistent
           and standard experience across all Android-powered devices. Android provides guidance
           and a test suite to verify your Android compatibility.</p>
-      </a>
     </div>
 
   </div>
diff --git a/src/legal.jd b/src/legal.jd
index 61c8cb4..b15a3b2 100644
--- a/src/legal.jd
+++ b/src/legal.jd
@@ -5,7 +5,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/build-numbers.jd b/src/source/build-numbers.jd
index f7981c2..440ef5d 100644
--- a/src/source/build-numbers.jd
+++ b/src/source/build-numbers.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -130,6 +130,11 @@
 <td>4.3.x</td>
 <td>API level 18</td>
 </tr>
+<tr>
+<td>KitKat</td>
+<td>4.4 - 4.4.2</td>
+<td>API level 19</td>
+</tr>
 </tbody>
 </table>
 <p>Starting with Cupcake, individual builds are identified with a short
@@ -537,10 +542,34 @@
 <td>Latest Jelly Bean version, Nexus 7 (deb)</td>
 </tr>
 
+<tr>
+<td>KRT16M</td>
+<td>android-4.4_r1</td>
+<td>Initial KitKat version, Nexus 5 (hammerhead)</td>
+</tr>
+
+<tr>
+<td>KRT16S</td>
+<td>android-4.4_r1.2</td>
+<td>KitKat version, Nexus 7 (flo/deb/grouper/tilapia), Nexus 4, Nexus 10</td>
+</tr>
+
+<tr>
+<td>KOT49E</td>
+<td>android-4.4.1_r1</td>
+<td>KitKat version, Nexus 5, Nexus 7 (flo/deb/grouper/tilapia), Nexus 4, Nexus 10</td>
+</tr>
+
+<tr>
+<td>KOT49H</td>
+<td>android-4.4.2_r1</td>
+<td>Latest KitKat version, Nexus 5, Nexus 7 (flo/deb/grouper/tilapia), Nexus 4, Nexus 10</td>
+</tr>
+
 </tbody>
 </table>
 <p>The branches froyo, gingerbread, ics-mr0, ics-mr1, jb-dev,
-jb-mr1-dev, jb-mr1.1-dev, jb-mr2-dev
+jb-mr1-dev, jb-mr1.1-dev, jb-mr2-dev, kitkat-dev
 represent development
 branches that do not exactly match configurations that were tested
 by Google. They might contain a variety of changes in addition to
diff --git a/src/source/building-devices.jd b/src/source/building-devices.jd
index e1b912e..dc8ed66 100644
--- a/src/source/building-devices.jd
+++ b/src/source/building-devices.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -76,6 +76,11 @@
 </thead>
 <tbody>
 <tr>
+<td>hammerhead</td>
+<td>Press and hold both <em>Volume Up</em> and <em>Volume Down</em>, then press
+and hold <em>Power</em></td>
+</tr>
+<tr>
 <td>flo</td>
 <td>Press and hold <em>Volume Down</em>, then press and hold <em>Power</em></td>
 </tr>
@@ -193,6 +198,11 @@
 </thead>
 <tbody>
 <tr>
+<td>hammerhead</td>
+<td>android-4.4_r1</td>
+<td>aosp_hammerhead-userdebug</td>
+</tr>
+<tr>
 <td>flo</td>
 <td>android-4.3_r2.3</td>
 <td>aosp_flo-userdebug</td>
diff --git a/src/source/building-dream.jd b/src/source/building-dream.jd
index b454ba8..1b6d9ac 100644
--- a/src/source/building-dream.jd
+++ b/src/source/building-dream.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -59,4 +59,4 @@
 </li>
 </ol>
 <p>Note: these instructions work for the sapphire (ADP2) build target, as
-well. Simply replace "dream" with "sapphire" above.</p>
\ No newline at end of file
+well. Simply replace "dream" with "sapphire" above.</p>
diff --git a/src/source/building-kernels.jd b/src/source/building-kernels.jd
index 092e3d3..8806d80 100644
--- a/src/source/building-kernels.jd
+++ b/src/source/building-kernels.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -40,6 +40,12 @@
     <th>Build configuration</th>
   </tr>
   <tr>
+    <td>hammerhead</td>
+    <td>device/lge/hammerhead-kernel</td>
+    <td>kernel/msm</td>
+    <td>hammerhead_defconfig</td>
+  </tr>
+  <tr>
     <td>flo</td>
     <td>device/asus/flo-kernel/kernel</td>
     <td>kernel/msm</td>
@@ -122,9 +128,7 @@
 <p>You will want to look at the git log for the kernel binary in the device
 project that you are interested in.</p>
 
-
-
-Device projects are of the form device/&lt;vendor&gt;/&lt;name&gt;.</p>
+<p>Device projects are of the form device/&lt;vendor&gt;/&lt;name&gt;.</p>
 <pre><code>$ git clone https://android.googlesource.com/device/ti/panda
 $ cd panda
 $ git log --max-count=1 kernel
@@ -135,6 +139,19 @@
 The first entry in the log is the most recent, i.e. the one used to
 build that kernel. You will need it at a later step.</p>
 
+
+<h2 id="id-version">Identifying kernel version</h2>
+<p>To determine the kernel version used in a particular system image, run the
+following command against the kernel file:</p>
+<pre><code>
+$ dd if=kernel bs=1 skip=$(LC_ALL=C grep -a -b -o $'\x1f\x8b\x08\x00\x00\x00\x00\x00' kernel | cut -d ':' -f 1) | zgrep -a 'Linux version'
+</code></pre>
+<p>For Nexus 5 (hammerhead), this can be accomplished with:</p>
+<pre><code>
+$ bzgrep -a 'Linux version' vmlinux.bz2
+</code></pre>
+
+
 <h2 id="downloading-sources">Downloading sources</h2>
 <p>Depending on which kernel you want,</p>
 <pre><code>$ git clone https://android.googlesource.com/kernel/common.git
@@ -183,6 +200,14 @@
 <p>To build the tuna kernel, you may run the previous commands replacing all
 instances of "panda" with "tuna".</p>
 <p>
-The kernel binary is output as `arch/arm/boot/zImage`, and needs to be copied
+The kernel binary is output as: `arch/arm/boot/zImage` It can be copied
 into the Android source tree in order to build the matching boot image.
 </p>
+<p>Or you can include the <code>TARGET_PREBUILT_KERNEL</code> variable while
+using <code>make bootimage</code> or any other make command line that builds a
+boot image.</p>
+<pre><code>
+$ export TARGET_PREBUILT_KERNEL=$your_kernel_path/arch/arm/boot/zImage
+</code></pre>
+<p>That variable is supported by all devices as it is set up via
+device/common/populate-new-device.sh</p>
diff --git a/src/source/building-running.jd b/src/source/building-running.jd
index ae4116c..bff563a 100644
--- a/src/source/building-running.jd
+++ b/src/source/building-running.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/building.jd b/src/source/building.jd
index 09d7811..3762eb8 100644
--- a/src/source/building.jd
+++ b/src/source/building.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -57,4 +57,4 @@
   Git 1.7 or newer. You can find it at <a href="http://git-scm.com/download">git-scm.com</a>.</p>
   </li>
 
-</ul>
\ No newline at end of file
+</ul>
diff --git a/src/source/cla-corporate.jd b/src/source/cla-corporate.jd
index 9125a0c..efed92e 100644
--- a/src/source/cla-corporate.jd
+++ b/src/source/cla-corporate.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/code-lines.jd b/src/source/code-lines.jd
index 95188ee..59da8bd 100644
--- a/src/source/code-lines.jd
+++ b/src/source/code-lines.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/code-style.jd b/src/source/code-style.jd
index 99bfbb3..2dad502 100644
--- a/src/source/code-style.jd
+++ b/src/source/code-style.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -205,7 +205,7 @@
 line. And then there is the class or interface declaration. In the Javadoc
 comments, describe what the class or interface does.</p>
 <pre><code>/*
- * Copyright (C) 2010 The Android Open Source Project 
+ * Copyright (C) 2013 The Android Open Source Project 
  *
  * Licensed under the Apache License, Version 2.0 (the "License");
  * you may not use this file except in compliance with the License.
diff --git a/src/source/contributing.jd b/src/source/contributing.jd
index dd1ded9..5f1b62f 100644
--- a/src/source/contributing.jd
+++ b/src/source/contributing.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/developing.jd b/src/source/developing.jd
index 6fe7b73..46a51a7 100644
--- a/src/source/developing.jd
+++ b/src/source/developing.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/downloading.jd b/src/source/downloading.jd
index e4dd24b..f014694 100644
--- a/src/source/downloading.jd
+++ b/src/source/downloading.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/faqs.jd b/src/source/faqs.jd
index 60e02c2..e9b018c 100644
--- a/src/source/faqs.jd
+++ b/src/source/faqs.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -25,6 +25,10 @@
 </div>
 
 <a name="top"></a>
+<p>Please see the <a
+href="http://developer.android.com/guide/faq/index.html">Android FAQs</a> on
+developer.android.com for answers to other common questions.
+
 <h2 id="open-source">Open Source</h2>
 <h3 id="what-is-the-android-open-source-project">What is the Android Open Source Project?</h3>
 <p>We use the phrase "Android Open Source Project" or "AOSP" to refer to the
@@ -317,131 +321,3 @@
 tests.</p>
 
 <a href="#top">Back to top</a>
-<h2>Security</h2>
-<h3 id="secure">Is Android secure?</h3>
-
-<p>The security and privacy of our users' data is of primary importance to the
-Android Open Source Project. We are dedicated to building and maintaining one
-of the most secure mobile platforms available while still fulfilling our goal
-of opening the mobile device space to innovation and competition.</p>
-
-<p>See the <a href="{@docRoot}devices/tech/security/index.html">Android Security
-Overview</a> for a comprehensive description of the Android security model and processes.</p>
-
-<p>Application developers play an important part in the security of Android.
-The Android Platform provides developers with a rich <a
-href="http://developer.android.com/training/articles/security-tips.html">security model</a>
-that allows them to request capabilities, or access, from users
-and define new capabilities other applications can request.
-The Android user can choose to grant or deny an application's request for
-certain capabilities on the handset.</p>
-
-<p>We have made great efforts to secure the Android platform, but it is
-inevitable that security bugs will be found in any system of this complexity.
-Therefore, the Android team works hard to find new bugs internally and responds
-quickly and professionally to vulnerability reports from external researchers.
-</p>
-
-
-<h3 id="issue">I think I found a security flaw. How do I report it?</h3>
-
-<p>You can reach the Android security team at <a
-href="mailto:security@android.com">security@android.com</a>. If you like, you
-can protect your message using our <a
-href="http://developer.android.com/security_at_android_dot_com.txt">PGP
-key</a>.</p>
-
-<p>We appreciate researchers practicing responsible disclosure by emailing us
-a detailed summary of the issue and keeping the issue confidential while
-users are at risk. In return, we will make sure to keep the researcher informed
-of our progress in issuing a fix. </p>
-
-
-<h3 id="informed">How can I stay informed about Android security?</h3>
-
-<p>For general discussion of Android platform security, or how to use
-security features in your Android application, please subscribe to <a
-href="http://groups.google.com/group/android-security-discuss">android-security-discuss</a>.
-</p>
-
-
-<h3 id="use">How do I securely use my Android phone?</h3>
-
-<p>Android was designed so you can safely use your phone without making
-any changes to the device or installing any special software.  Android applications
-run in an Application Sandbox that limits access to sensitive information or data
-with the users permission.</p>
-
-<p>To fully benefit from the security protections in Android, it is important that
-users download and install software only from known sources.</p>
-
-<p>As an open platform, Android allows users to visit any website and load
-software from any developer onto a device. As with a home PC, users must be
-aware of who is providing the software they are downloading and must decide
-whether they want to grant the application the capabilities it requests.
-This decision can be informed by the user's judgment of the software
-developer's trustworthiness, and where the software came from.</p>
-
-
-<h3 id="malware">I think I found malicious software being
-distributed for Android. How can I help?</h3>
-
-<p>Like any other platform, it will be possible for unethical developers
-to create malicious software, known as <a
-href="http://en.wikipedia.org/wiki/Malware">malware</a>, for Android. If you
-think somebody is trying to spread malware, please let us know at <a
-href="mailto:security@android.com">security@android.com</a>. Please include as
-much detail about the application as possible, with the location it is
-being distributed from and why you suspect it of being malicious software.</p>
-
-<p>The term <i>malicious software</i> is subjective, and we cannot make an
-exhaustive definition.  Some examples of what the Android security team believes
-to be malicious software is any application that:
-<ul>
-    <li>uses a bug or security vulnerability to gain permissions that have not
-    been granted by the user.</li>
-    <li>shows the user unsolicited messages (especially messages urging the
-    user to buy something).</li>
-    <li>resists (or attempts to resist) the user's effort to uninstall it.</li>
-    <li>attempts to automatically spread itself to other devices.</li>
-    <li>hides its files and/or processes.</li>
-    <li>discloses the user's private information to a third party, without the
-    user's knowledge and consent.</li>
-    <li>destroys the user's data (or the device itself) without the user's
-    knowledge and consent.</li>
-    <li>impersonates the user (such as by sending email or buying things from a
-    web store) without the user's knowledge and consent.</li>
-    <li>otherwise degrades the user's experience with the device.</li>
-</ul>
-</p>
-
-<h3 id="fixes">How do Android-powered devices receive security
-fixes?</h3>
-
-<p>The manufacturer of each device is responsible for distributing software
-upgrades for it, including security fixes. Many devices will update themselves
-automatically with software downloaded "over the air" (OTA), while some devices
-require the user to upgrade them manually.</p>
-
-<p>Google provides software updates for a number of Android devices, including
-the <a href="http://www.google.com/nexus">Nexus</a>
-series of devices, using an OTA update. These updates may include
-security fixes as well as new features.</p>
-
-<h3 id="directfix">Can I get a fix directly from the
-Android Platform Project?</h3>
-
-<p>Android is a mobile platform that is released as open source and
-available for free use by anybody. This means that there are many
-Android-based products available to consumers, and most of them are created
-without the knowledge or participation of the Android Open Source Project. Like
-the maintainers of other open source projects, we cannot build and release
-patches for the entire ecosystem of products using Android. Instead, we will
-work diligently to find and fix flaws as quickly as possible and to distribute
-those fixes to the manufacturers of the products through the open source project.</p>
-
-<p>If you are making an Android-powered device and would like to know how you can
-properly support your customers by keeping abreast of software updates, please
-contact us at <a
-href="mailto:info@openhandsetalliance.com">info@openhandsetalliance.com</a>.</p>
-<a href="#top">Back to top</a>
diff --git a/src/source/flashing.jd b/src/source/flashing.jd
index fa99cf5..06c8e2b 100644
--- a/src/source/flashing.jd
+++ b/src/source/flashing.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -26,4 +26,4 @@
 <h1 id="emulating">Emulating</h1>
 <p>To run the emulator, type</p>
 <pre><code>$ emulator
-</code></pre>
\ No newline at end of file
+</code></pre>
diff --git a/src/source/getting-started.jd b/src/source/getting-started.jd
index bf87be8..7c93494 100644
--- a/src/source/getting-started.jd
+++ b/src/source/getting-started.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -19,4 +19,4 @@
 <p>Thanks for your interest in Android! To begin working with the platform, whether
 you're creating a custom version of Android for existing devices or if you are
 building a hardware device from scratch, you'll need to set up your machine to download and build the
-source.
\ No newline at end of file
+source.
diff --git a/src/source/git-resources.jd b/src/source/git-resources.jd
index bced045..5606b2d 100644
--- a/src/source/git-resources.jd
+++ b/src/source/git-resources.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -30,4 +30,4 @@
 <li>
 <p><a href="http://www.gitcasts.com">GitCasts</a> (Git how-to videos)</p>
 </li>
-</ul>
\ No newline at end of file
+</ul>
diff --git a/src/source/index.jd b/src/source/index.jd
index 46d5431..c0a4a3e 100644
--- a/src/source/index.jd
+++ b/src/source/index.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/initializing.jd b/src/source/initializing.jd
index e3d77f0..8ff9996 100644
--- a/src/source/initializing.jd
+++ b/src/source/initializing.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/known-issues.jd b/src/source/known-issues.jd
index a68dde6..f87a34f 100644
--- a/src/source/known-issues.jd
+++ b/src/source/known-issues.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/licenses.jd b/src/source/licenses.jd
index 6287cda..5135bdb 100644
--- a/src/source/licenses.jd
+++ b/src/source/licenses.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/life-of-a-bug.jd b/src/source/life-of-a-bug.jd
index 21995d5..9e5a4ca 100644
--- a/src/source/life-of-a-bug.jd
+++ b/src/source/life-of-a-bug.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/life-of-a-patch.jd b/src/source/life-of-a-patch.jd
index 5e316a0..b0c821c 100644
--- a/src/source/life-of-a-patch.jd
+++ b/src/source/life-of-a-patch.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/report-bugs.jd b/src/source/report-bugs.jd
index 0a0ffbb..027dcd2 100644
--- a/src/source/report-bugs.jd
+++ b/src/source/report-bugs.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/roles.jd b/src/source/roles.jd
index 44eeae4..dccd402 100644
--- a/src/source/roles.jd
+++ b/src/source/roles.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/source_toc.cs b/src/source/source_toc.cs
index d2b1464..175a96f 100644
--- a/src/source/source_toc.cs
+++ b/src/source/source_toc.cs
@@ -1,5 +1,5 @@
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -82,4 +82,4 @@
     </div>
   </li>
 
-</ul>
\ No newline at end of file
+</ul>
diff --git a/src/source/submit-patches.jd b/src/source/submit-patches.jd
index 11fecca..994087d 100644
--- a/src/source/submit-patches.jd
+++ b/src/source/submit-patches.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
@@ -191,7 +191,7 @@
 <code>external/llvm</code>) should be made upstream at
 <a href="http://llvm.org/">llvm.org/</a>.</p>
 
-<h2 id="mksh">
+<h2 id="mksh">mksh</h2>
 <p>All changes to the MirBSD Korn Shell project at <code>external/mksh</code> should be made upstream
 either by sending an email to miros-mksh on the mirbsd.o®g domain (no subscription
 required to submit there) or (optionally) at <a href="https://launchpad.net/mksh">Launchpad</a>.
diff --git a/src/source/using-eclipse.jd b/src/source/using-eclipse.jd
index d34084a..8f7a61b 100644
--- a/src/source/using-eclipse.jd
+++ b/src/source/using-eclipse.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.
diff --git a/src/source/using-repo.jd b/src/source/using-repo.jd
index 7dfd38f..67ca7b7 100644
--- a/src/source/using-repo.jd
+++ b/src/source/using-repo.jd
@@ -2,7 +2,7 @@
 @jd:body
 
 <!--
-    Copyright 2010 The Android Open Source Project
+    Copyright 2013 The Android Open Source Project
 
     Licensed under the Apache License, Version 2.0 (the "License");
     you may not use this file except in compliance with the License.