Replace NULL with nullptr

Also fixed some lines that were too long, and a few other minor
details.

Change-Id: I6efba5fb6e03eb5d0a300fddb2a75bf8e2f175cb
diff --git a/runtime/scoped_thread_state_change.h b/runtime/scoped_thread_state_change.h
index 11b7df6..b93fcb4 100644
--- a/runtime/scoped_thread_state_change.h
+++ b/runtime/scoped_thread_state_change.h
@@ -36,11 +36,11 @@
   ScopedThreadStateChange(Thread* self, ThreadState new_thread_state)
       LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE
       : self_(self), thread_state_(new_thread_state), expected_has_no_thread_(false) {
-    if (UNLIKELY(self_ == NULL)) {
-      // Value chosen arbitrarily and won't be used in the destructor since thread_ == NULL.
+    if (UNLIKELY(self_ == nullptr)) {
+      // Value chosen arbitrarily and won't be used in the destructor since thread_ == null.
       old_thread_state_ = kTerminated;
       Runtime* runtime = Runtime::Current();
-      CHECK(runtime == NULL || !runtime->IsStarted() || runtime->IsShuttingDown(self_));
+      CHECK(runtime == nullptr || !runtime->IsStarted() || runtime->IsShuttingDown(self_));
     } else {
       DCHECK_EQ(self, Thread::Current());
       // Read state without locks, ok as state is effectively thread local and we're not interested
@@ -60,10 +60,10 @@
   }
 
   ~ScopedThreadStateChange() LOCKS_EXCLUDED(Locks::thread_suspend_count_lock_) ALWAYS_INLINE {
-    if (UNLIKELY(self_ == NULL)) {
+    if (UNLIKELY(self_ == nullptr)) {
       if (!expected_has_no_thread_) {
         Runtime* runtime = Runtime::Current();
-        bool shutting_down = (runtime == NULL) || runtime->IsShuttingDown(nullptr);
+        bool shutting_down = (runtime == nullptr) || runtime->IsShuttingDown(nullptr);
         CHECK(shutting_down);
       }
     } else {
@@ -87,7 +87,7 @@
  protected:
   // Constructor used by ScopedJniThreadState for an unattached thread that has access to the VM*.
   ScopedThreadStateChange()
-      : self_(NULL), thread_state_(kTerminated), old_thread_state_(kTerminated),
+      : self_(nullptr), thread_state_(kTerminated), old_thread_state_(kTerminated),
         expected_has_no_thread_(true) {}
 
   Thread* const self_;
@@ -124,7 +124,7 @@
    * Add a local reference for an object to the indirect reference table associated with the
    * current stack frame.  When the native function returns, the reference will be discarded.
    *
-   * We need to allow the same reference to be added multiple times, and cope with NULL.
+   * We need to allow the same reference to be added multiple times, and cope with nullptr.
    *
    * This will be called on otherwise unreferenced objects. We cannot do GC allocations here, and
    * it's best if we don't grab a mutex.
@@ -133,8 +133,8 @@
   T AddLocalReference(mirror::Object* obj) const SHARED_LOCKS_REQUIRED(Locks::mutator_lock_) {
     Locks::mutator_lock_->AssertSharedHeld(Self());
     DCHECK(IsRunnable());  // Don't work with raw objects in non-runnable states.
-    if (obj == NULL) {
-      return NULL;
+    if (obj == nullptr) {
+      return nullptr;
     }
     DCHECK_NE((reinterpret_cast<uintptr_t>(obj) & 0xffff0000), 0xebad0000);
     return Env()->AddLocalReference<T>(obj);