Merge changes I81466537,I6ad7451e into rvc-dev

* changes:
  Minor fix on CarService.dump()
  Fixed car service dumpsys help documentation.
diff --git a/car-lib/api/test-current.txt b/car-lib/api/test-current.txt
index 09d9fd7..578a80f 100644
--- a/car-lib/api/test-current.txt
+++ b/car-lib/api/test-current.txt
@@ -53,7 +53,7 @@
 
   public final class CarUserManager {
     method @RequiresPermission(anyOf={android.Manifest.permission.INTERACT_ACROSS_USERS, android.Manifest.permission.INTERACT_ACROSS_USERS_FULL}) public void addListener(@NonNull java.util.concurrent.Executor, @NonNull android.car.user.CarUserManager.UserLifecycleListener);
-    method public int createUser(@Nullable String);
+    method public int createUser(@Nullable String, boolean);
     method public static String lifecycleEventTypeToString(int);
     method @RequiresPermission(anyOf={android.Manifest.permission.INTERACT_ACROSS_USERS, android.Manifest.permission.INTERACT_ACROSS_USERS_FULL}) public void removeListener(@NonNull android.car.user.CarUserManager.UserLifecycleListener);
     method public void removeUser(int);
diff --git a/car-lib/src/android/car/user/CarUserManager.java b/car-lib/src/android/car/user/CarUserManager.java
index 810373b..ba22336 100644
--- a/car-lib/src/android/car/user/CarUserManager.java
+++ b/car-lib/src/android/car/user/CarUserManager.java
@@ -380,11 +380,16 @@
     @TestApi
     // TODO(b/144120654): temp method used by CTS; will eventually be refactored to take a listener
     @UserIdInt
-    public int createUser(@Nullable String name) {
+    public int createUser(@Nullable String name, boolean isGuestUser) {
         Log.i(TAG, "createUser()"); // name is PII
         UserManager userManager = getContext().getSystemService(UserManager.class);
-        UserInfo info = userManager.createUser(name, /* flags= */ 0);
-        return info.id;
+
+        if (isGuestUser) {
+            return userManager.createUser(name, UserManager.USER_TYPE_FULL_GUEST, /* flags= */ 0)
+                    .id;
+        }
+
+        return userManager.createUser(name, /* flags= */ 0).id;
     }
 
     /** @hide */
diff --git a/evs/apps/default/Android.bp b/evs/apps/default/Android.bp
index 413e4ed..7fd60ef 100644
--- a/evs/apps/default/Android.bp
+++ b/evs/apps/default/Android.bp
@@ -35,9 +35,9 @@
     ],
 
     shared_libs: [
+        "libbase",
         "libbinder",
         "libcutils",
-        "liblog",
         "libutils",
         "libui",
         "libhidlbase",
diff --git a/evs/apps/default/EvsStateControl.cpp b/evs/apps/default/EvsStateControl.cpp
index 3f98f6e..b963551 100644
--- a/evs/apps/default/EvsStateControl.cpp
+++ b/evs/apps/default/EvsStateControl.cpp
@@ -22,7 +22,7 @@
 #include <stdio.h>
 #include <string.h>
 
-#include <log/log.h>
+#include <android-base/logging.h>
 #include <inttypes.h>
 #include <utils/SystemClock.h>
 #include <binder/IServiceManager.h>
@@ -66,13 +66,12 @@
 
     // This way we only ever deal with cameras which exist in the system
     // Build our set of cameras for the states we support
-    ALOGD("Requesting camera list");
+    LOG(DEBUG) << "Requesting camera list";
     mEvs->getCameraList_1_1(
         [this, &config](hidl_vec<CameraDesc> cameraList) {
-            ALOGI("Camera list callback received %zu cameras",
-                  cameraList.size());
+            LOG(INFO) << "Camera list callback received " << cameraList.size() << "cameras.";
             for (auto&& cam: cameraList) {
-                ALOGD("Found camera %s", cam.v1.cameraId.c_str());
+                LOG(DEBUG) << "Found camera " << cam.v1.cameraId;
                 bool cameraConfigFound = false;
 
                 // Check our configuration for information about this camera
@@ -105,14 +104,14 @@
                     }
                 }
                 if (!cameraConfigFound) {
-                    ALOGW("No config information for hardware camera %s",
-                          cam.v1.cameraId.c_str());
+                    LOG(WARNING) << "No config information for hardware camera "
+                                 << cam.v1.cameraId;
                 }
             }
         }
     );
 
-    ALOGD("State controller ready");
+    LOG(DEBUG) << "State controller ready";
 }
 
 
@@ -135,7 +134,7 @@
 
 
 void EvsStateControl::updateLoop() {
-    ALOGD("Starting EvsStateControl update loop");
+    LOG(DEBUG) << "Starting EvsStateControl update loop";
 
     bool run = true;
     while (run) {
@@ -161,7 +160,7 @@
 
         // Review vehicle state and choose an appropriate renderer
         if (!selectStateForCurrentConditions()) {
-            ALOGE("selectStateForCurrentConditions failed so we're going to die");
+            LOG(ERROR) << "selectStateForCurrentConditions failed so we're going to die";
             break;
         }
 
@@ -175,7 +174,7 @@
             );
 
             if (tgtBuffer.memHandle == nullptr) {
-                ALOGE("Didn't get requested output buffer -- skipping this frame.");
+                LOG(ERROR) << "Didn't get requested output buffer -- skipping this frame.";
             } else {
                 // Generate our output image
                 if (!mCurrentRenderer->drawFrame(convertBufferDesc(tgtBuffer))) {
@@ -193,11 +192,11 @@
         }
     }
 
-    ALOGW("EvsStateControl update loop ending");
+    LOG(WARNING) << "EvsStateControl update loop ending";
 
     // TODO:  Fix it so we can exit cleanly from the main thread instead
     printf("Shutting down app due to state control loop ending\n");
-    ALOGE("KILLING THE APP FROM THE EvsStateControl LOOP ON DRAW FAILURE!!!");
+    LOG(ERROR) << "KILLING THE APP FROM THE EvsStateControl LOOP ON DRAW FAILURE!!!";
     exit(1);
 }
 
@@ -209,7 +208,7 @@
     if (mVehicle != nullptr) {
         // Query the car state
         if (invokeGet(&mGearValue) != StatusCode::OK) {
-            ALOGE("GEAR_SELECTION not available from vehicle.  Exiting.");
+            LOG(ERROR) << "GEAR_SELECTION not available from vehicle.  Exiting.";
             return false;
         }
         if ((mTurnSignalValue.prop == 0) || (invokeGet(&mTurnSignalValue) != StatusCode::OK)) {
@@ -278,11 +277,11 @@
         return true;
     }
 
-    ALOGD("Switching to state %d.", desiredState);
-    ALOGD("  Current state %d has %zu cameras", mCurrentState,
-          mCameraList[mCurrentState].size());
-    ALOGD("  Desired state %d has %zu cameras", desiredState,
-          mCameraList[desiredState].size());
+    LOG(DEBUG) << "Switching to state " << desiredState;
+    LOG(DEBUG) << "  Current state " << mCurrentState
+               << " has " << mCameraList[mCurrentState].size() << " cameras";
+    LOG(DEBUG) << "  Desired state " << desiredState
+               << " has " << mCameraList[desiredState].size() << " cameras";
 
     if (!isGlReady && !isSfReady()) {
         // Graphics is not ready yet; using CPU renderer.
@@ -290,12 +289,12 @@
             mDesiredRenderer = std::make_unique<RenderPixelCopy>(mEvs,
                                                                  mCameraList[desiredState][0]);
             if (!mDesiredRenderer) {
-                ALOGE("Failed to construct Pixel Copy renderer.  Skipping state change.");
+                LOG(ERROR) << "Failed to construct Pixel Copy renderer.  Skipping state change.";
                 return false;
             }
         } else {
-            ALOGD("Unsupported, desiredState %d has %u cameras.",
-                  desiredState, static_cast<unsigned int>(mCameraList[desiredState].size()));
+            LOG(DEBUG) << "Unsupported, desiredState " << desiredState
+                       << " has " << mCameraList[desiredState].size() << " cameras.";
         }
     } else {
         // Assumes that SurfaceFlinger is available always after being launched.
@@ -306,7 +305,7 @@
             mDesiredRenderer = std::make_unique<RenderDirectView>(mEvs,
                                                                   mCameraDescList[desiredState][0]);
             if (!mDesiredRenderer) {
-                ALOGE("Failed to construct direct renderer.  Skipping state change.");
+                LOG(ERROR) << "Failed to construct direct renderer.  Skipping state change.";
                 return false;
             }
         } else if (mCameraList[desiredState].size() > 1 || desiredState == PARKING) {
@@ -316,12 +315,12 @@
                                                                mCameraList[desiredState],
                                                                mConfig);
             if (!mDesiredRenderer) {
-                ALOGE("Failed to construct top view renderer.  Skipping state change.");
+                LOG(ERROR) << "Failed to construct top view renderer.  Skipping state change.";
                 return false;
             }
         } else {
-            ALOGD("Unsupported, desiredState %d has %u cameras.",
-                  desiredState, static_cast<unsigned int>(mCameraList[desiredState].size()));
+            LOG(DEBUG) << "Unsupported, desiredState " << desiredState
+                       << " has " << mCameraList[desiredState].size() << " cameras.";
         }
 
         // GL renderer is now ready.
@@ -336,29 +335,32 @@
 
     // Now set the display state based on whether we have a video feed to show
     if (mDesiredRenderer == nullptr) {
-        ALOGD("Turning off the display");
+        LOG(DEBUG) << "Turning off the display";
         mDisplay->setDisplayState(EvsDisplayState::NOT_VISIBLE);
     } else {
         mCurrentRenderer = std::move(mDesiredRenderer);
 
         // Start the camera stream
-        ALOGD("EvsStartCameraStreamTiming start time: %" PRId64 "ms", android::elapsedRealtime());
+        LOG(DEBUG) << "EvsStartCameraStreamTiming start time: "
+                   << android::elapsedRealtime() << " ms.";
         if (!mCurrentRenderer->activate()) {
-            ALOGE("New renderer failed to activate");
+            LOG(ERROR) << "New renderer failed to activate";
             return false;
         }
 
         // Activate the display
-        ALOGD("EvsActivateDisplayTiming start time: %" PRId64 "ms", android::elapsedRealtime());
+        LOG(DEBUG) << "EvsActivateDisplayTiming start time: "
+                   << android::elapsedRealtime() << " ms.";
         Return<EvsResult> result = mDisplay->setDisplayState(EvsDisplayState::VISIBLE_ON_NEXT_FRAME);
         if (result != EvsResult::OK) {
-            ALOGE("setDisplayState returned an error (%d)", (EvsResult)result);
+            LOG(ERROR) << "setDisplayState returned an error "
+                       << result.description();
             return false;
         }
     }
 
     // Record our current state
-    ALOGI("Activated state %d.", desiredState);
+    LOG(INFO) << "Activated state " << desiredState;
     mCurrentState = desiredState;
 
     return true;
diff --git a/evs/apps/default/RenderBase.cpp b/evs/apps/default/RenderBase.cpp
index 319fefe..0db7c9d 100644
--- a/evs/apps/default/RenderBase.cpp
+++ b/evs/apps/default/RenderBase.cpp
@@ -17,7 +17,7 @@
 #include "RenderBase.h"
 #include "glError.h"
 
-#include <log/log.h>
+#include <android-base/logging.h>
 #include <ui/GraphicBuffer.h>
 
 // Eventually we shouldn't need this dependency, but for now the
@@ -62,17 +62,17 @@
     // Set up our OpenGL ES context associated with the default display (though we won't be visible)
     EGLDisplay display = eglGetDisplay(EGL_DEFAULT_DISPLAY);
     if (display == EGL_NO_DISPLAY) {
-        ALOGE("Failed to get egl display");
+        LOG(ERROR) << "Failed to get egl display";
         return false;
     }
 
     EGLint major = 0;
     EGLint minor = 0;
     if (!eglInitialize(display, &major, &minor)) {
-        ALOGE("Failed to initialize EGL: %s", getEGLError());
+        LOG(ERROR) << "Failed to initialize EGL: " << getEGLError();
         return false;
     } else {
-        ALOGI("Intiialized EGL at %d.%d", major, minor);
+        LOG(INFO) << "Intiialized EGL at " << major << "." << minor;
     }
 
 
@@ -80,7 +80,7 @@
     EGLConfig egl_config;
     EGLint num_configs;
     if (!eglChooseConfig(display, config_attribs, &egl_config, 1, &num_configs)) {
-        ALOGE("eglChooseConfig() failed with error: %s", getEGLError());
+        LOG(ERROR) << "eglChooseConfig() failed with error: " << getEGLError();
         return false;
     }
 
@@ -90,10 +90,10 @@
     EGLint surface_attribs[] = { EGL_WIDTH, 1, EGL_HEIGHT, 1, EGL_NONE };
     sDummySurface = eglCreatePbufferSurface(display, egl_config, surface_attribs);
     if (sDummySurface == EGL_NO_SURFACE) {
-        ALOGE("Failed to create OpenGL ES Dummy surface: %s", getEGLError());
+        LOG(ERROR) << "Failed to create OpenGL ES Dummy surface: " << getEGLError();
         return false;
     } else {
-        ALOGI("Dummy surface looks good!  :)");
+        LOG(INFO) << "Dummy surface looks good!  :)";
     }
 
 
@@ -102,23 +102,23 @@
     //
     EGLContext context = eglCreateContext(display, egl_config, EGL_NO_CONTEXT, context_attribs);
     if (context == EGL_NO_CONTEXT) {
-        ALOGE("Failed to create OpenGL ES Context: %s", getEGLError());
+        LOG(ERROR) << "Failed to create OpenGL ES Context: " << getEGLError();
         return false;
     }
 
 
     // Activate our render target for drawing
     if (!eglMakeCurrent(display, sDummySurface, sDummySurface, context)) {
-        ALOGE("Failed to make the OpenGL ES Context current: %s", getEGLError());
+        LOG(ERROR) << "Failed to make the OpenGL ES Context current: " << getEGLError();
         return false;
     } else {
-        ALOGI("We made our context current!  :)");
+        LOG(INFO) << "We made our context current!  :)";
     }
 
 
     // Report the extensions available on this implementation
     const char* gl_extensions = (const char*) glGetString(GL_EXTENSIONS);
-    ALOGI("GL EXTENSIONS:\n  %s", gl_extensions);
+    LOG(INFO) << "GL EXTENSIONS:\n  " << gl_extensions;
 
 
     // Reserve handles for the color and depth targets we'll be setting up
@@ -143,7 +143,7 @@
         reinterpret_cast<const AHardwareBuffer_Desc *>(&tgtBuffer.buffer.description);
     // Hardcoded to RGBx for now
     if (pDesc->format != HAL_PIXEL_FORMAT_RGBA_8888) {
-        ALOGE("Unsupported target buffer format");
+        LOG(ERROR) << "Unsupported target buffer format";
         return false;
     }
 
@@ -157,7 +157,7 @@
                                                      GRALLOC_USAGE_HW_RENDER,
                                                      pDesc->stride);
     if (pGfxBuffer.get() == nullptr) {
-        ALOGE("Failed to allocate GraphicBuffer to wrap image handle");
+        LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap image handle";
         return false;
     }
 
@@ -168,7 +168,7 @@
                                   EGL_NATIVE_BUFFER_ANDROID, clientBuf,
                                   eglImageAttributes);
     if (sKHRimage == EGL_NO_IMAGE_KHR) {
-        ALOGE("error creating EGLImage for target buffer: %s", getEGLError());
+        LOG(ERROR) << "Error creating EGLImage for target buffer: " << getEGLError();
         return false;
     }
 
@@ -176,20 +176,20 @@
     glBindRenderbuffer(GL_RENDERBUFFER, sColorBuffer);
     glEGLImageTargetRenderbufferStorageOES(GL_RENDERBUFFER, static_cast<GLeglImageOES>(sKHRimage));
     if (eglGetError() != EGL_SUCCESS) {
-        ALOGI("glEGLImageTargetRenderbufferStorageOES => %s", getEGLError());
+        LOG(INFO) << "glEGLImageTargetRenderbufferStorageOES => " << getEGLError();
         return false;
     }
 
     glFramebufferRenderbuffer(GL_FRAMEBUFFER, GL_COLOR_ATTACHMENT0, GL_RENDERBUFFER, sColorBuffer);
     if (eglGetError() != EGL_SUCCESS) {
-        ALOGE("glFramebufferRenderbuffer => %s", getEGLError());
+        LOG(ERROR) << "glFramebufferRenderbuffer => " << getEGLError();
         return false;
     }
 
     GLenum checkResult = glCheckFramebufferStatus(GL_FRAMEBUFFER);
     if (checkResult != GL_FRAMEBUFFER_COMPLETE) {
-        ALOGE("Offscreen framebuffer not configured successfully (%d: %s)",
-              checkResult, getGLFramebufferError());
+        LOG(ERROR) << "Offscreen framebuffer not configured successfully ("
+                   << checkResult << ": " << getGLFramebufferError() << ")";
         return false;
     }
 
diff --git a/evs/apps/default/RenderDirectView.cpp b/evs/apps/default/RenderDirectView.cpp
index fd44851..4a8db70 100644
--- a/evs/apps/default/RenderDirectView.cpp
+++ b/evs/apps/default/RenderDirectView.cpp
@@ -20,10 +20,10 @@
 #include "shader.h"
 #include "shader_simpleTex.h"
 
-#include <log/log.h>
 #include <math/mat4.h>
 #include <system/camera_metadata.h>
 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
+#include <android-base/logging.h>
 
 using ::android::hardware::camera::device::V3_2::Stream;
 using ::android::hardware::graphics::common::V1_0::PixelFormat;
@@ -52,7 +52,7 @@
 bool RenderDirectView::activate() {
     // Ensure GL is ready to go...
     if (!prepareGL()) {
-        ALOGE("Error initializing GL");
+        LOG(ERROR) << "Error initializing GL";
         return false;
     }
 
@@ -62,7 +62,7 @@
                                             pixShader_simpleTexture,
                                             "simpleTexture");
         if (!mShaderProgram) {
-            ALOGE("Error buliding shader program");
+            LOG(ERROR) << "Error building shader program";
             return false;
         }
     }
@@ -100,7 +100,8 @@
                 ++ptr;
             }
         } else {
-            ALOGW("No stream configuration data is found; default parameters will be used.");
+            LOG(WARNING) << "No stream configuration data is found; "
+                         << "default parameters will be used.";
         }
     }
 
@@ -114,7 +115,7 @@
                                       foundCfg ? std::move(targetCfg) : nullptr,
                                       sDisplay));
     if (!mTexture) {
-        ALOGE("Failed to set up video texture for %s", mCameraDesc.v1.cameraId.c_str());
+        LOG(ERROR) << "Failed to set up video texture for " << mCameraDesc.v1.cameraId;
 // TODO:  For production use, we may actually want to fail in this case, but not yet...
 //       return false;
     }
@@ -135,7 +136,7 @@
 bool RenderDirectView::drawFrame(const BufferDesc& tgtBuffer) {
     // Tell GL to render to the given buffer
     if (!attachRenderTarget(tgtBuffer)) {
-        ALOGE("Failed to attached render target");
+        LOG(ERROR) << "Failed to attached render target";
         return false;
     }
 
@@ -145,7 +146,7 @@
     // Set up the model to clip space transform (identity matrix if we're modeling in screen space)
     GLint loc = glGetUniformLocation(mShaderProgram, "cameraMat");
     if (loc < 0) {
-        ALOGE("Couldn't set shader parameter 'cameraMat'");
+        LOG(ERROR) << "Couldn't set shader parameter 'cameraMat'";
         return false;
     } else {
         const android::mat4 identityMatrix;
@@ -161,7 +162,7 @@
 
     GLint sampler = glGetUniformLocation(mShaderProgram, "tex");
     if (sampler < 0) {
-        ALOGE("Couldn't set shader parameter 'tex'");
+        LOG(ERROR) << "Couldn't set shader parameter 'tex'";
         return false;
     } else {
         // Tell the sampler we looked up from the shader to use texture slot 0 as its source
diff --git a/evs/apps/default/RenderPixelCopy.cpp b/evs/apps/default/RenderPixelCopy.cpp
index ddfacb0..186269f 100644
--- a/evs/apps/default/RenderPixelCopy.cpp
+++ b/evs/apps/default/RenderPixelCopy.cpp
@@ -17,7 +17,7 @@
 #include "RenderPixelCopy.h"
 #include "FormatConvert.h"
 
-#include <log/log.h>
+#include <android-base/logging.h>
 
 
 RenderPixelCopy::RenderPixelCopy(sp<IEvsEnumerator> enumerator,
@@ -34,20 +34,20 @@
         .withDefault(nullptr);
 
     if (pCamera.get() == nullptr) {
-        ALOGE("Failed to allocate new EVS Camera interface");
+        LOG(ERROR) << "Failed to allocate new EVS Camera interface";
         return false;
     }
 
     // Initialize the stream that will help us update this texture's contents
     sp<StreamHandler> pStreamHandler = new StreamHandler(pCamera);
     if (pStreamHandler.get() == nullptr) {
-        ALOGE("failed to allocate FrameHandler");
+        LOG(ERROR) << "Failed to allocate FrameHandler";
         return false;
     }
 
     // Start the video stream
     if (!pStreamHandler->startStream()) {
-        ALOGE("start stream failed");
+        LOG(ERROR) << "Start stream failed";
         return false;
     }
 
@@ -83,7 +83,7 @@
     if (tgtPixels) {
         if (pTgtDesc->format != HAL_PIXEL_FORMAT_RGBA_8888) {
             // We always expect 32 bit RGB for the display output for now.  Is there a need for 565?
-            ALOGE("Diplay buffer is always expected to be 32bit RGBA");
+            LOG(ERROR) << "Diplay buffer is always expected to be 32bit RGBA";
             success = false;
         } else {
             // Make sure we have the latest frame data
@@ -104,7 +104,7 @@
                 unsigned char* srcPixels = nullptr;
                 src->lock(GRALLOC_USAGE_SW_READ_OFTEN, (void**)&srcPixels);
                 if (!srcPixels) {
-                    ALOGE("Failed to get pointer into src image data");
+                    LOG(ERROR) << "Failed to get pointer into src image data";
                 }
 
                 // Make sure we don't run off the end of either buffer
@@ -136,7 +136,7 @@
             }
         }
     } else {
-        ALOGE("Failed to lock buffer contents for contents transfer");
+        LOG(ERROR) << "Failed to lock buffer contents for contents transfer";
         success = false;
     }
 
diff --git a/evs/apps/default/RenderTopView.cpp b/evs/apps/default/RenderTopView.cpp
index 7c82226..bfec3f2 100644
--- a/evs/apps/default/RenderTopView.cpp
+++ b/evs/apps/default/RenderTopView.cpp
@@ -21,10 +21,10 @@
 #include "shader_simpleTex.h"
 #include "shader_projectedTex.h"
 
-#include <log/log.h>
 #include <math/mat4.h>
 #include <math/vec3.h>
 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
+#include <android-base/logging.h>
 
 using ::android::hardware::camera::device::V3_2::Stream;
 
@@ -112,7 +112,7 @@
 bool RenderTopView::activate() {
     // Ensure GL is ready to go...
     if (!prepareGL()) {
-        ALOGE("Error initializing GL");
+        LOG(ERROR) << "Error initializing GL";
         return false;
     }
 
@@ -121,14 +121,14 @@
                                                  pixShader_simpleTexture,
                                                  "simpleTexture");
     if (!mPgmAssets.simpleTexture) {
-        ALOGE("Failed to build shader program");
+        LOG(ERROR) << "Failed to build shader program";
         return false;
     }
     mPgmAssets.projectedTexture = buildShaderProgram(vtxShader_projectedTexture,
                                                     pixShader_projectedTexture,
                                                     "projectedTexture");
     if (!mPgmAssets.projectedTexture) {
-        ALOGE("Failed to build shader program");
+        LOG(ERROR) << "Failed to build shader program";
         return false;
     }
 
@@ -137,7 +137,7 @@
     mTexAssets.checkerBoard.reset(createTextureFromPng(
                                   "/system/etc/automotive/evs/LabeledChecker.png"));
     if (!mTexAssets.checkerBoard) {
-        ALOGE("Failed to load checkerboard texture");
+        LOG(ERROR) << "Failed to load checkerboard texture";
         return false;
     }
 
@@ -145,7 +145,7 @@
     mTexAssets.carTopView.reset(createTextureFromPng(
                                 "/system/etc/automotive/evs/CarFromTop.png"));
     if (!mTexAssets.carTopView) {
-        ALOGE("Failed to load carTopView texture");
+        LOG(ERROR) << "Failed to load carTopView texture";
         return false;
     }
 
@@ -157,8 +157,8 @@
                                          nullptr,
                                          sDisplay));
         if (!cam.tex) {
-            ALOGE("Failed to set up video texture for %s (%s)",
-                  cam.info.cameraId.c_str(), cam.info.function.c_str());
+            LOG(ERROR) << "Failed to set up video texture for " << cam.info.cameraId
+                       << " (" << cam.info.function << ")";
 // TODO:  For production use, we may actually want to fail in this case, but not yet...
 //            return false;
         }
@@ -182,7 +182,7 @@
 bool RenderTopView::drawFrame(const BufferDesc& tgtBuffer) {
     // Tell GL to render to the given buffer
     if (!attachRenderTarget(tgtBuffer)) {
-        ALOGE("Failed to attached render target");
+        LOG(ERROR) << "Failed to attached render target";
         return false;
     }
 
diff --git a/evs/apps/default/StreamHandler.cpp b/evs/apps/default/StreamHandler.cpp
index 125d76a..328f454 100644
--- a/evs/apps/default/StreamHandler.cpp
+++ b/evs/apps/default/StreamHandler.cpp
@@ -19,7 +19,7 @@
 #include <stdio.h>
 #include <string.h>
 
-#include <log/log.h>
+#include <android-base/logging.h>
 #include <cutils/native_handle.h>
 
 using ::android::hardware::automotive::evs::V1_0::EvsResult;
@@ -98,10 +98,11 @@
     std::unique_lock<std::mutex> lock(mLock);
 
     if (mHeldBuffer >= 0) {
-        ALOGE("Ignored call for new frame while still holding the old one.");
+        LOG(ERROR) << "Ignored call for new frame while still holding the old one.";
     } else {
         if (mReadyBuffer < 0) {
-            ALOGE("Returning invalid buffer because we don't have any.  Call newFrameAvailable first?");
+            LOG(ERROR) << "Returning invalid buffer because we don't have any.  "
+                       << "Call newFrameAvailable first?";
             mReadyBuffer = 0;   // This is a lie!
         }
 
@@ -119,7 +120,7 @@
 
     // We better be getting back the buffer we original delivered!
     if ((mHeldBuffer < 0) || (bufDesc_1_1.bufferId != mBuffers[mHeldBuffer].bufferId)) {
-        ALOGE("StreamHandler::doneWithFrame got an unexpected bufDesc_1_1!");
+        LOG(ERROR) << "StreamHandler::doneWithFrame got an unexpected bufDesc_1_1!";
     }
 
     // Send the buffer back to the underlying camera
@@ -134,7 +135,7 @@
 
 
 Return<void> StreamHandler::deliverFrame(const BufferDesc_1_0& bufDesc_1_0) {
-    ALOGI("Ignores a frame delivered from v1.0 EVS service.");
+    LOG(INFO) << "Ignores a frame delivered from v1.0 EVS service.";
     mCamera->doneWithFrame(bufDesc_1_0);
 
     return Void();
@@ -142,14 +143,15 @@
 
 
 Return<void> StreamHandler::deliverFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers) {
-    ALOGD("Received frames from the camera");
+    LOG(DEBUG) << "Received frames from the camera";
 
     // Take the lock to protect our frame slots and running state variable
     std::unique_lock <std::mutex> lock(mLock);
     BufferDesc_1_1 bufDesc = buffers[0];
     if (bufDesc.buffer.nativeHandle.getNativeHandle() == nullptr) {
         // Signal that the last frame has been received and the stream is stopped
-        ALOGW("Invalid null frame (id: 0x%X) is ignored", bufDesc.bufferId);
+        LOG(WARNING) << "Invalid null frame (id: " << std::hex << bufDesc.bufferId
+                     << ") is ignored";
     } else {
         // Do we already have a "ready" frame?
         if (mReadyBuffer >= 0) {
@@ -190,12 +192,13 @@
                 // Signal that the last frame has been received and the stream is stopped
                 mRunning = false;
             }
-            ALOGI("Received a STREAM_STOPPED event");
+            LOG(INFO) << "Received a STREAM_STOPPED event";
             break;
         }
 
         case EvsEventType::PARAMETER_CHANGED:
-            ALOGI("Camera parameter 0x%X is set to 0x%X", event.payload[0], event.payload[1]);
+            LOG(INFO) << "Camera parameter " << std::hex << event.payload[0]
+                      << " is set to " << event.payload[1];
             break;
 
         // Below events are ignored in reference implementation.
@@ -204,10 +207,11 @@
         case EvsEventType::FRAME_DROPPED:
         [[fallthrough]];
         case EvsEventType::TIMEOUT:
-            ALOGI("Event 0x%X is received but ignored", event.aType);
+            LOG(INFO) << "Event " << std::hex << static_cast<unsigned>(event.aType)
+                      << "is received but ignored.";
             break;
         default:
-            ALOGE("Unknown event id 0x%X", event.aType);
+            LOG(ERROR) << "Unknown event id: " << static_cast<unsigned>(event.aType);
             break;
     }
 
diff --git a/evs/apps/default/TexWrapper.cpp b/evs/apps/default/TexWrapper.cpp
index 7ec2191..37cb7a2 100644
--- a/evs/apps/default/TexWrapper.cpp
+++ b/evs/apps/default/TexWrapper.cpp
@@ -16,19 +16,19 @@
 #include "TexWrapper.h"
 #include "glError.h"
 
-#include "log/log.h"
-
 #include <fcntl.h>
 #include <malloc.h>
 #include <png.h>
 
+#include <android-base/logging.h>
+
 
 /* Create an new empty GL texture that will be filled later */
 TexWrapper::TexWrapper() {
     GLuint textureId;
     glGenTextures(1, &textureId);
     if (textureId <= 0) {
-        ALOGE("Didn't get a texture handle allocated: %s", getEGLError());
+        LOG(ERROR) << "Didn't get a texture handle allocated: " << getEGLError();
     } else {
         // Store the basic texture properties
         id = textureId;
diff --git a/evs/apps/default/VideoTex.cpp b/evs/apps/default/VideoTex.cpp
index e918a39..c9fc895 100644
--- a/evs/apps/default/VideoTex.cpp
+++ b/evs/apps/default/VideoTex.cpp
@@ -27,6 +27,7 @@
 
 #include <ui/GraphicBuffer.h>
 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
+#include <android-base/logging.h>
 
 // Eventually we shouldn't need this dependency, but for now the
 // graphics allocator interface isn't fully supported on all platforms
@@ -96,7 +97,7 @@
                                                      GRALLOC_USAGE_HW_TEXTURE,
                                                      pDesc->stride);
     if (pGfxBuffer.get() == nullptr) {
-        ALOGE("Failed to allocate GraphicBuffer to wrap image handle");
+        LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap image handle";
         // Returning "true" in this error condition because we already released the
         // previous image (if any) and so the texture may change in unpredictable ways now!
         return true;
@@ -110,7 +111,7 @@
                                   eglImageAttributes);
     if (mKHRimage == EGL_NO_IMAGE_KHR) {
         const char *msg = getEGLError();
-        ALOGE("error creating EGLImage: %s", msg);
+        LOG(ERROR) << "Error creating EGLImage: " << msg;
     } else {
         // Update the texture handle we already created to refer to this gralloc buffer
         glActiveTexture(GL_TEXTURE0);
@@ -147,21 +148,21 @@
     }
 
     if (pCamera.get() == nullptr) {
-        ALOGE("Failed to allocate new EVS Camera interface for %s", evsCameraId);
+        LOG(ERROR) << "Failed to allocate new EVS Camera interface for " << evsCameraId;
         return nullptr;
     }
 
     // Initialize the stream that will help us update this texture's contents
     sp<StreamHandler> pStreamHandler = new StreamHandler(pCamera);
     if (pStreamHandler.get() == nullptr) {
-        ALOGE("failed to allocate FrameHandler");
+        LOG(ERROR) << "Failed to allocate FrameHandler";
         return nullptr;
     }
 
     // Start the video stream
     if (!pStreamHandler->startStream()) {
         printf("Couldn't start the camera stream (%s)\n", evsCameraId);
-        ALOGE("start stream failed for %s", evsCameraId);
+        LOG(ERROR) << "Start stream failed for " << evsCameraId;
         return nullptr;
     }
 
diff --git a/evs/apps/default/evs_app.cpp b/evs/apps/default/evs_app.cpp
index bc1b307..65519c7 100644
--- a/evs/apps/default/evs_app.cpp
+++ b/evs/apps/default/evs_app.cpp
@@ -22,6 +22,7 @@
 #include <utils/Log.h>
 
 #include "android-base/macros.h"    // arraysize
+#include "android-base/logging.h"
 
 #include <android/hardware/automotive/evs/1.1/IEvsEnumerator.h>
 #include <android/hardware/automotive/evs/1.1/IEvsDisplay.h>
@@ -56,7 +57,8 @@
     options.setToExternal(optionsData, arraysize(optionsData));
     StatusCode status = pVnet->subscribe(listener, options);
     if (status != StatusCode::OK) {
-        ALOGW("VHAL subscription for property 0x%08X failed with code %d.", propertyId, status);
+        LOG(WARNING) << "VHAL subscription for property " << static_cast<int32_t>(propertyId)
+                     << " failed with code " << static_cast<int32_t>(status);
         return false;
     }
 
@@ -67,13 +69,13 @@
 // Main entry point
 int main(int argc, char** argv)
 {
-    ALOGI("EVS app starting\n");
+    LOG(INFO) << "EVS app starting";
 
     // Set up default behavior, then check for command line options
     bool useVehicleHal = true;
     bool printHelp = false;
     const char* evsServiceName = "default";
-    int displayId = 0;
+    int displayId = 1;
     for (int i=1; i< argc; i++) {
         if (strcmp(argv[i], "--test") == 0) {
             useVehicleHal = false;
@@ -92,15 +94,16 @@
     }
     if (printHelp) {
         printf("Options include:\n");
-        printf("  --test   Do not talk to Vehicle Hal, but simulate 'reverse' instead\n");
-        printf("  --hw     Bypass EvsManager by connecting directly to EvsEnumeratorHw\n");
-        printf("  --mock   Connect directly to EvsEnumeratorHw-Mock\n");
+        printf("  --test    Do not talk to Vehicle Hal, but simulate 'reverse' instead\n");
+        printf("  --hw      Bypass EvsManager by connecting directly to EvsEnumeratorHw\n");
+        printf("  --mock    Connect directly to EvsEnumeratorHw-Mock\n");
+        printf("  --display Specify the display to use\n");
     }
 
     // Load our configuration information
     ConfigManager config;
     if (!config.initialize("/system/etc/automotive/evs/config.json")) {
-        ALOGE("Missing or improper configuration for the EVS application.  Exiting.");
+        LOG(ERROR) << "Missing or improper configuration for the EVS application.  Exiting.";
         return 1;
     }
 
@@ -114,20 +117,21 @@
     sp<EvsVehicleListener> pEvsListener = new EvsVehicleListener();
 
     // Get the EVS manager service
-    ALOGI("Acquiring EVS Enumerator");
+    LOG(INFO) << "Acquiring EVS Enumerator";
     android::sp<IEvsEnumerator> pEvs = IEvsEnumerator::getService(evsServiceName);
     if (pEvs.get() == nullptr) {
-        ALOGE("getService(%s) returned NULL.  Exiting.", evsServiceName);
+        LOG(ERROR) << "getService(" << evsServiceName
+                   << ") returned NULL.  Exiting.";
         return 1;
     }
 
     // Request exclusive access to the EVS display
-    ALOGI("Acquiring EVS Display");
+    LOG(INFO) << "Acquiring EVS Display";
 
     // We'll use an available display device.
     android::sp<IEvsDisplay> pDisplay = pEvs->openDisplay_1_1(displayId);
     if (pDisplay.get() == nullptr) {
-        ALOGE("EVS Display unavailable.  Exiting.");
+        LOG(ERROR) << "EVS Display unavailable.  Exiting.";
         return 1;
     }
     config.setActiveDisplayId(displayId);
@@ -135,41 +139,41 @@
     // Connect to the Vehicle HAL so we can monitor state
     sp<IVehicle> pVnet;
     if (useVehicleHal) {
-        ALOGI("Connecting to Vehicle HAL");
+        LOG(INFO) << "Connecting to Vehicle HAL";
         pVnet = IVehicle::getService();
         if (pVnet.get() == nullptr) {
-            ALOGE("Vehicle HAL getService returned NULL.  Exiting.");
+            LOG(ERROR) << "Vehicle HAL getService returned NULL.  Exiting.";
             return 1;
         } else {
             // Register for vehicle state change callbacks we care about
             // Changes in these values are what will trigger a reconfiguration of the EVS pipeline
             if (!subscribeToVHal(pVnet, pEvsListener, VehicleProperty::GEAR_SELECTION)) {
-                ALOGE("Without gear notification, we can't support EVS.  Exiting.");
+                LOG(ERROR) << "Without gear notification, we can't support EVS.  Exiting.";
                 return 1;
             }
             if (!subscribeToVHal(pVnet, pEvsListener, VehicleProperty::TURN_SIGNAL_STATE)) {
-                ALOGW("Didn't get turn signal notificaitons, so we'll ignore those.");
+                LOG(WARNING) << "Didn't get turn signal notifications, so we'll ignore those.";
             }
         }
     } else {
-        ALOGW("Test mode selected, so not talking to Vehicle HAL");
+        LOG(WARNING) << "Test mode selected, so not talking to Vehicle HAL";
     }
 
     // Configure ourselves for the current vehicle state at startup
-    ALOGI("Constructing state controller");
+    LOG(INFO) << "Constructing state controller";
     EvsStateControl *pStateController = new EvsStateControl(pVnet, pEvs, pDisplay, config);
     if (!pStateController->startUpdateLoop()) {
-        ALOGE("Initial configuration failed.  Exiting.");
+        LOG(ERROR) << "Initial configuration failed.  Exiting.";
         return 1;
     }
 
     // Run forever, reacting to events as necessary
-    ALOGI("Entering running state");
+    LOG(INFO) << "Entering running state";
     pEvsListener->run(pStateController);
 
     // In normal operation, we expect to run forever, but in some error conditions we'll quit.
     // One known example is if another process preempts our registration for our service name.
-    ALOGE("EVS Listener stopped.  Exiting.");
+    LOG(ERROR) << "EVS Listener stopped.  Exiting.";
 
     return 0;
 }
diff --git a/evs/manager/1.1/Android.bp b/evs/manager/1.1/Android.bp
index 4a62715..51e6e16 100644
--- a/evs/manager/1.1/Android.bp
+++ b/evs/manager/1.1/Android.bp
@@ -31,8 +31,8 @@
     ],
 
     shared_libs: [
+        "libbase",
         "libcutils",
-        "liblog",
         "libutils",
         "libui",
         "libsync",
@@ -67,7 +67,7 @@
     product_variables: {
         debuggable: {
             cflags: [
-                "-DEVS_ALLOW_AID_ROOT",
+                "-DEVS_DEBUG",
             ]
         }
     }
diff --git a/evs/manager/1.1/Enumerator.cpp b/evs/manager/1.1/Enumerator.cpp
index 90d0570..ede5eb2 100644
--- a/evs/manager/1.1/Enumerator.cpp
+++ b/evs/manager/1.1/Enumerator.cpp
@@ -14,6 +14,7 @@
  * limitations under the License.
  */
 
+#include <android-base/logging.h>
 #include <hwbinder/IPCThreadState.h>
 #include <cutils/android_filesystem_config.h>
 
@@ -30,11 +31,23 @@
 using CameraDesc_1_1 = ::android::hardware::automotive::evs::V1_1::CameraDesc;
 
 bool Enumerator::init(const char* hardwareServiceName) {
-    ALOGD("init");
+    LOG(DEBUG) << "init";
 
     // Connect with the underlying hardware enumerator
     mHwEnumerator = IEvsEnumerator::getService(hardwareServiceName);
     bool result = (mHwEnumerator.get() != nullptr);
+    if (result) {
+        // Get an internal display identifier.
+        mHwEnumerator->getDisplayIdList(
+            [this](const auto& displayPorts) {
+                if (displayPorts.size() > 0) {
+                    mInternalDisplayPort = displayPorts[0];
+                } else {
+                    LOG(WARNING) << "No display is available to EVS service.";
+                }
+            }
+        );
+    }
 
     return result;
 }
@@ -44,13 +57,15 @@
     hardware::IPCThreadState *ipc = hardware::IPCThreadState::self();
     const auto userId = ipc->getCallingUid() / AID_USER_OFFSET;
     const auto appId = ipc->getCallingUid() % AID_USER_OFFSET;
-#ifdef EVS_ALLOW_AID_ROOT
+#ifdef EVS_DEBUG
     if (AID_AUTOMOTIVE_EVS != appId && AID_ROOT != appId && AID_SYSTEM != appId) {
 #else
     if (AID_AUTOMOTIVE_EVS != appId && AID_SYSTEM != appId) {
 #endif
-        ALOGE("EVS access denied?: pid = %d, userId = %d, appId = %d",
-              ipc->getCallingPid(), userId, appId);
+        LOG(ERROR) << "EVS access denied? "
+                   << "pid = " << ipc->getCallingPid()
+                   << ", userId = " << userId
+                   << ", appId = " << appId;
         return false;
     }
 
@@ -62,7 +77,7 @@
     bool found = false;
 
     if (metadata == nullptr) {
-        ALOGE("Metadata is null");
+        LOG(ERROR) << "Metadata is null";
         return found;
     }
 
@@ -72,7 +87,7 @@
                                            &entry);
     if (0 != rc) {
         // No capabilities are found in metadata.
-        ALOGD("%s does not find a target entry", __FUNCTION__);
+        LOG(DEBUG) << __FUNCTION__ << " does not find a target entry";
         return found;
     }
 
@@ -84,7 +99,9 @@
         }
     }
 
-    ALOGE_IF(!found, "%s does not find a logical multi camera cap", __FUNCTION__);
+    if (!found) {
+        LOG(DEBUG) << __FUNCTION__ << " does not find a logical multi camera cap";
+    }
     return found;
 }
 
@@ -92,7 +109,7 @@
 std::unordered_set<std::string> Enumerator::getPhysicalCameraIds(const std::string& id) {
     std::unordered_set<std::string> physicalCameras;
     if (mCameraDevices.find(id) == mCameraDevices.end()) {
-        ALOGE("Queried device %s does not exist!", id.c_str());
+        LOG(ERROR) << "Queried device " << id << " does not exist!";
         return physicalCameras;
     }
 
@@ -101,7 +118,7 @@
     if (!isLogicalCamera(metadata)) {
         // EVS assumes that the device w/o a valid metadata is a physical
         // device.
-        ALOGI("%s is not a logical camera device.", id.c_str());
+        LOG(INFO) << id << " is not a logical camera device.";
         physicalCameras.emplace(id);
         return physicalCameras;
     }
@@ -111,7 +128,7 @@
                                            ANDROID_LOGICAL_MULTI_CAMERA_PHYSICAL_IDS,
                                            &entry);
     if (0 != rc) {
-        ALOGE("No physical camera ID is found for a logical camera device %s!", id.c_str());
+        LOG(ERROR) << "No physical camera ID is found for a logical camera device " << id;
         return physicalCameras;
     }
 
@@ -127,7 +144,8 @@
         }
     }
 
-    ALOGE("%s consists of %d physical camera devices.", id.c_str(), (int)physicalCameras.size());
+    LOG(INFO) << id << " consists of "
+               << physicalCameras.size() << " physical camera devices.";
     return physicalCameras;
 }
 
@@ -150,7 +168,7 @@
 
 
 Return<sp<IEvsCamera_1_0>> Enumerator::openCamera(const hidl_string& cameraId) {
-    ALOGD("openCamera");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return nullptr;
     }
@@ -165,11 +183,11 @@
             IEvsCamera_1_1::castFrom(mHwEnumerator->openCamera(cameraId))
             .withDefault(nullptr);
         if (device == nullptr) {
-            ALOGE("Failed to open hardware camera %s", cameraId.c_str());
+            LOG(ERROR) << "Failed to open hardware camera " << cameraId;
         } else {
             hwCamera = new HalCamera(device, cameraId);
             if (hwCamera == nullptr) {
-                ALOGE("Failed to allocate camera wrapper object");
+                LOG(ERROR) << "Failed to allocate camera wrapper object";
                 mHwEnumerator->closeCamera(device);
             }
         }
@@ -185,7 +203,8 @@
     if (clientCamera != nullptr) {
         mActiveCameras.try_emplace(cameraId, hwCamera);
     } else {
-        ALOGE("Requested camera %s not found or not available", cameraId.c_str());
+        LOG(ERROR) << "Requested camera " << cameraId
+                   << " not found or not available";
     }
 
     // Send the virtual camera object back to the client by strong pointer which will keep it alive
@@ -194,10 +213,10 @@
 
 
 Return<void> Enumerator::closeCamera(const ::android::sp<IEvsCamera_1_0>& clientCamera) {
-    ALOGD("closeCamera");
+    LOG(DEBUG) << __FUNCTION__;
 
     if (clientCamera.get() == nullptr) {
-        ALOGE("Ignoring call with null camera pointer.");
+        LOG(ERROR) << "Ignoring call with null camera pointer.";
         return Void();
     }
 
@@ -230,7 +249,7 @@
 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsEnumerator follow.
 Return<sp<IEvsCamera_1_1>> Enumerator::openCamera_1_1(const hidl_string& cameraId,
                                                       const Stream& streamCfg) {
-    ALOGD("openCamera_1_1");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return nullptr;
     }
@@ -251,13 +270,13 @@
                 IEvsCamera_1_1::castFrom(mHwEnumerator->openCamera_1_1(id, streamCfg))
                 .withDefault(nullptr);
             if (device == nullptr) {
-                ALOGE("Failed to open hardware camera %s", cameraId.c_str());
+                LOG(ERROR) << "Failed to open hardware camera " << cameraId.c_str();
                 success = false;
                 break;
             } else {
                 hwCamera = new HalCamera(device, id, streamCfg);
                 if (hwCamera == nullptr) {
-                    ALOGE("Failed to allocate camera wrapper object");
+                    LOG(ERROR) << "Failed to allocate camera wrapper object";
                     mHwEnumerator->closeCamera(device);
                     success = false;
                     break;
@@ -269,7 +288,7 @@
             sourceCameras.push_back(hwCamera);
         } else {
             if (it->second->getStreamConfig().id != streamCfg.id) {
-                ALOGW("Requested camera is already active in different configuration.");
+                LOG(WARNING) << "Requested camera is already active in different configuration.";
             } else {
                 sourceCameras.push_back(it->second);
             }
@@ -277,7 +296,7 @@
     }
 
     if (sourceCameras.size() < 1) {
-        ALOGE("Failed to open any physical camera device");
+        LOG(ERROR) << "Failed to open any physical camera device";
         return nullptr;
     }
 
@@ -286,7 +305,7 @@
     sp<VirtualCamera> clientCamera = new VirtualCamera(sourceCameras);
     if (clientCamera == nullptr) {
         // TODO: Any resource needs to be cleaned up explicitly?
-        ALOGE("Failed to create a client camera object");
+        LOG(ERROR) << "Failed to create a client camera object";
     } else {
         if (physicalCameras.size() > 1) {
             // VirtualCamera, which represents a logical device, caches its
@@ -299,8 +318,8 @@
             if (!hwCamera->ownVirtualCamera(clientCamera)) {
                 // TODO: Remove a referece to this camera from a virtual camera
                 // object.
-                ALOGE("%s failed to own a created proxy camera object.",
-                      hwCamera->getId().c_str());
+                LOG(ERROR) << hwCamera->getId()
+                           << " failed to own a created proxy camera object.";
             }
         }
     }
@@ -311,7 +330,7 @@
 
 
 Return<void> Enumerator::getCameraList_1_1(getCameraList_1_1_cb list_cb)  {
-    ALOGD("getCameraList");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return Void();
     }
@@ -339,7 +358,7 @@
 
 
 Return<sp<IEvsDisplay_1_0>> Enumerator::openDisplay() {
-    ALOGD("openDisplay");
+    LOG(DEBUG) << __FUNCTION__;
 
     if (!checkPermission()) {
         return nullptr;
@@ -353,7 +372,7 @@
     // Request exclusive access to the EVS display
     sp<IEvsDisplay_1_0> pActiveDisplay = mHwEnumerator->openDisplay();
     if (pActiveDisplay == nullptr) {
-        ALOGE("EVS Display unavailable");
+        LOG(ERROR) << "EVS Display unavailable";
 
         return nullptr;
     }
@@ -371,13 +390,13 @@
 
 
 Return<void> Enumerator::closeDisplay(const ::android::sp<IEvsDisplay_1_0>& display) {
-    ALOGD("closeDisplay");
+    LOG(DEBUG) << __FUNCTION__;
 
     sp<IEvsDisplay_1_0> pActiveDisplay = mActiveDisplay.promote();
 
     // Drop the active display
     if (display.get() != pActiveDisplay.get()) {
-        ALOGW("Ignoring call to closeDisplay with unrecognized display object.");
+        LOG(WARNING) << "Ignoring call to closeDisplay with unrecognized display object.";
     } else {
         // Pass this request through to the hardware layer
         sp<HalDisplay> halDisplay = reinterpret_cast<HalDisplay *>(pActiveDisplay.get());
@@ -390,7 +409,7 @@
 
 
 Return<EvsDisplayState> Enumerator::getDisplayState()  {
-    ALOGD("getDisplayState");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return EvsDisplayState::DEAD;
     }
@@ -409,7 +428,7 @@
 
 
 Return<sp<IEvsDisplay_1_1>> Enumerator::openDisplay_1_1(uint8_t id) {
-    ALOGD("%s", __FUNCTION__);
+    LOG(DEBUG) << __FUNCTION__;
 
     if (!checkPermission()) {
         return nullptr;
@@ -423,7 +442,7 @@
     // Request exclusive access to the EVS display
     sp<IEvsDisplay_1_1> pActiveDisplay = mHwEnumerator->openDisplay_1_1(id);
     if (pActiveDisplay == nullptr) {
-        ALOGE("EVS Display unavailable");
+        LOG(ERROR) << "EVS Display unavailable";
 
         return nullptr;
     }
diff --git a/evs/manager/1.1/Enumerator.h b/evs/manager/1.1/Enumerator.h
index 72619cd..f479bb7 100644
--- a/evs/manager/1.1/Enumerator.h
+++ b/evs/manager/1.1/Enumerator.h
@@ -89,6 +89,9 @@
     // List of camera descriptors of enumerated hw cameras
     std::unordered_map<std::string,
                        CameraDesc>    mCameraDevices;
+
+    // Display port the internal display is connected to.
+    uint8_t                           mInternalDisplayPort;
 };
 
 } // namespace implementation
diff --git a/evs/manager/1.1/HalCamera.cpp b/evs/manager/1.1/HalCamera.cpp
index c252dfd..805e0ec 100644
--- a/evs/manager/1.1/HalCamera.cpp
+++ b/evs/manager/1.1/HalCamera.cpp
@@ -18,6 +18,7 @@
 #include "VirtualCamera.h"
 #include "Enumerator.h"
 
+#include <android-base/logging.h>
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
 
@@ -41,12 +42,12 @@
     sourceCameras.emplace_back(this);
     sp<VirtualCamera> client = new VirtualCamera(sourceCameras);
     if (client == nullptr) {
-        ALOGE("Failed to create client camera object");
+        LOG(ERROR) << "Failed to create client camera object";
         return nullptr;
     }
 
     if (!ownVirtualCamera(client)) {
-        ALOGE("Failed to own a client camera object");
+        LOG(ERROR) << "Failed to own a client camera object";
         client = nullptr;
     }
 
@@ -57,7 +58,7 @@
 bool HalCamera::ownVirtualCamera(sp<VirtualCamera> virtualCamera) {
 
     if (virtualCamera == nullptr) {
-        ALOGE("Failed to create virtualCamera camera object");
+        LOG(ERROR) << "Failed to create virtualCamera camera object";
         return false;
     }
 
@@ -85,7 +86,7 @@
 void HalCamera::disownVirtualCamera(sp<VirtualCamera> virtualCamera) {
     // Ignore calls with null pointers
     if (virtualCamera.get() == nullptr) {
-        ALOGW("Ignoring disownVirtualCamera call with null pointer");
+        LOG(WARNING) << "Ignoring disownVirtualCamera call with null pointer";
         return;
     }
 
@@ -93,12 +94,12 @@
     unsigned clientCount = mClients.size();
     mClients.remove(virtualCamera);
     if (clientCount != mClients.size() + 1) {
-        ALOGE("Couldn't find camera in our client list to remove it");
+        LOG(ERROR) << "Couldn't find camera in our client list to remove it";
     }
 
     // Recompute the number of buffers required with the target camera removed from the list
     if (!changeFramesInFlight(0)) {
-        ALOGE("Error when trying to reduce the in flight buffer count");
+        LOG(ERROR) << "Error when trying to reduce the in flight buffer count";
     }
 }
 
@@ -137,7 +138,7 @@
             }
         }
         if (newRecords.size() > (unsigned)bufferCount) {
-            ALOGW("We found more frames in use than requested.");
+            LOG(WARNING) << "We found more frames in use than requested.";
         }
 
         mFrames.swap(newRecords);
@@ -240,7 +241,7 @@
         }
     }
     if (i == mFrames.size()) {
-        ALOGE("We got a frame back with an ID we don't recognize!");
+        LOG(ERROR) << "We got a frame back with an ID we don't recognize!";
     } else {
         // Are there still clients using this buffer?
         mFrames[i].refCount--;
@@ -263,7 +264,7 @@
         }
     }
     if (i == mFrames.size()) {
-        ALOGE("We got a frame back with an ID we don't recognize!");
+        LOG(ERROR) << "We got a frame back with an ID we don't recognize!";
     } else {
         // Are there still clients using this buffer?
         mFrames[i].refCount--;
@@ -286,7 +287,7 @@
      * IEvsCameraStream v1.1 interfaces and therefore this method must not be
      * used.
      */
-    ALOGI("A delivered frame from EVS v1.0 HW module is rejected.");
+    LOG(INFO) << "A delivered frame from EVS v1.0 HW module is rejected.";
     mHwCamera->doneWithFrame(buffer);
 
     return Void();
@@ -295,7 +296,7 @@
 
 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCameraStream follow.
 Return<void> HalCamera::deliverFrame_1_1(const hardware::hidl_vec<BufferDesc_1_1>& buffer) {
-    ALOGV("Received a frame");
+    LOG(VERBOSE) << "Received a frame";
     // Frames are being forwarded to v1.1 clients only who requested new frame.
     const auto timestamp = buffer[0].timestamp;
     // TODO(b/145750636): For now, we are using a approximately half of 1 seconds / 30 frames = 33ms
@@ -313,11 +314,11 @@
                 continue;
             } else if (timestamp - req.timestamp < kThreshold) {
                 // Skip current frame because it arrives too soon.
-                ALOGD("Skips a frame from %s", getId().c_str());
+                LOG(DEBUG) << "Skips a frame from " << getId();
                 mNextRequests->push_back(req);
             } else if (vCam != nullptr && vCam->deliverFrame(buffer[0])) {
                 // Forward a frame and move a timeline.
-                ALOGD("%s forwarded the buffer #%d", getId().c_str(), buffer[0].bufferId);
+                LOG(DEBUG) << getId() << " forwarded the buffer #" << buffer[0].bufferId;
                 mTimelines[(uint64_t)vCam.get()]->BumpTimelineEventCounter();
                 ++frameDeliveriesV1;
             }
@@ -341,8 +342,8 @@
     if (frameDeliveries < 1) {
         // If none of our clients could accept the frame, then return it
         // right away.
-        ALOGI("Trivially rejecting frame (%d) from %s with no acceptance",
-              buffer[0].bufferId, getId().c_str());
+        LOG(INFO) << "Trivially rejecting frame (" << buffer[0].bufferId
+                  << ") from " << getId() << " with no acceptance";
         mHwCamera->doneWithFrame_1_1(buffer);
     } else {
         // Add an entry for this frame in our tracking list.
@@ -366,11 +367,11 @@
 
 
 Return<void> HalCamera::notify(const EvsEventDesc& event) {
-    ALOGD("Received an event id: %u", event.aType);
+    LOG(DEBUG) << "Received an event id: " << static_cast<int32_t>(event.aType);
     if(event.aType == EvsEventType::STREAM_STOPPED) {
         // This event happens only when there is no more active client.
         if (mStreamState != STOPPING) {
-            ALOGW("Stream stopped unexpectedly");
+            LOG(WARNING) << "Stream stopped unexpectedly";
         }
 
         mStreamState = STOPPED;
@@ -381,7 +382,7 @@
         sp<VirtualCamera> vCam = client.promote();
         if (vCam != nullptr) {
             if (!vCam->notify(event)) {
-                ALOGI("Failed to forward an event");
+                LOG(INFO) << "Failed to forward an event";
             }
         }
     }
@@ -392,11 +393,12 @@
 
 Return<EvsResult> HalCamera::setMaster(sp<VirtualCamera> virtualCamera) {
     if (mMaster == nullptr) {
-        ALOGD("%s: %p becomes a master", __FUNCTION__, virtualCamera.get());
+        LOG(DEBUG) << __FUNCTION__
+                   << ": " << virtualCamera.get() << " becomes a master.";
         mMaster = virtualCamera;
         return EvsResult::OK;
     } else {
-        ALOGD("This camera already has a master client.");
+        LOG(INFO) << "This camera already has a master client.";
         return EvsResult::OWNERSHIP_LOST;
     }
 }
@@ -405,18 +407,19 @@
 Return<EvsResult> HalCamera::forceMaster(sp<VirtualCamera> virtualCamera) {
     sp<VirtualCamera> prevMaster = mMaster.promote();
     if (prevMaster == virtualCamera) {
-        ALOGD("Client %p is already a master client", virtualCamera.get());
+        LOG(DEBUG) << "Client " << virtualCamera.get()
+                   << " is already a master client";
     } else {
         mMaster = virtualCamera;
         if (prevMaster != nullptr) {
-            ALOGD("High priority client %p steals a master role from %p",
-                virtualCamera.get(), prevMaster.get());
+            LOG(INFO) << "High priority client " << virtualCamera.get()
+                      << " steals a master role from " << prevMaster.get();
 
             /* Notify a previous master client the loss of a master role */
             EvsEventDesc event;
             event.aType = EvsEventType::MASTER_RELEASED;
             if (!prevMaster->notify(event)) {
-                ALOGE("Fail to deliver a master role lost notification");
+                LOG(ERROR) << "Fail to deliver a master role lost notification";
             }
         }
     }
@@ -429,7 +432,7 @@
     if (mMaster.promote() != virtualCamera) {
         return EvsResult::INVALID_ARG;
     } else {
-        ALOGD("Unset a master camera client");
+        LOG(INFO) << "Unset a master camera client";
         mMaster = nullptr;
 
         /* Notify other clients that a master role becomes available. */
@@ -437,7 +440,7 @@
         event.aType = EvsEventType::MASTER_RELEASED;
         auto cbResult = this->notify(event);
         if (!cbResult.isOk()) {
-            ALOGE("Fail to deliver a parameter change notification");
+            LOG(ERROR) << "Fail to deliver a parameter change notification";
         }
 
         return EvsResult::OK;
@@ -463,11 +466,11 @@
             event.payload[1] = static_cast<uint32_t>(value);
             auto cbResult = this->notify(event);
             if (!cbResult.isOk()) {
-                ALOGE("Fail to deliver a parameter change notification");
+                LOG(ERROR) << "Fail to deliver a parameter change notification";
             }
         }
     } else {
-        ALOGD("A parameter change request from a non-master client is declined.");
+        LOG(WARNING) << "A parameter change request from a non-master client is declined.";
 
         /* Read a current value of a requested camera parameter */
         getParameter(id, value);
diff --git a/evs/manager/1.1/HalDisplay.cpp b/evs/manager/1.1/HalDisplay.cpp
index 7b7abab..4f7b4fd 100644
--- a/evs/manager/1.1/HalDisplay.cpp
+++ b/evs/manager/1.1/HalDisplay.cpp
@@ -14,7 +14,7 @@
  * limitations under the License.
  */
 
-#include <log/log.h>
+#include <android-base/logging.h>
 #include "HalDisplay.h"
 
 namespace android {
diff --git a/evs/manager/1.1/VirtualCamera.cpp b/evs/manager/1.1/VirtualCamera.cpp
index 79b15b9..c2081e7 100644
--- a/evs/manager/1.1/VirtualCamera.cpp
+++ b/evs/manager/1.1/VirtualCamera.cpp
@@ -18,6 +18,7 @@
 #include "HalCamera.h"
 #include "Enumerator.h"
 
+#include <android-base/logging.h>
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
 
@@ -49,7 +50,7 @@
     if (mStreamState == RUNNING) {
         // Note that if we hit this case, no terminating frame will be sent to the client,
         // but they're probably already dead anyway.
-        ALOGW("Virtual camera being shutdown while stream is running");
+        LOG(WARNING) << "Virtual camera being shutdown while stream is running";
 
         // Tell the frame delivery pipeline we don't want any more frames
         mStreamState = STOPPING;
@@ -57,12 +58,12 @@
         for (auto&& [key, hwCamera] : mHalCamera) {
             auto pHwCamera = hwCamera.promote();
             if (pHwCamera == nullptr) {
-                ALOGW("Camera device %s is not alive.", key.c_str());
+                LOG(WARNING) << "Camera device " << key << " is not alive.";
                 continue;
             }
 
             if (mFramesHeld[key].size() > 0) {
-                ALOGW("VirtualCamera destructing with frames in flight.");
+                LOG(WARNING) << "VirtualCamera destructing with frames in flight.";
 
                 // Return to the underlying hardware camera any buffers the client was holding
                 for (auto&& heldBuffer : mFramesHeld[key]) {
@@ -108,12 +109,12 @@
 bool VirtualCamera::deliverFrame(const BufferDesc_1_1& bufDesc) {
     if (mStreamState == STOPPED) {
         // A stopped stream gets no frames
-        ALOGE("A stopped stream should not get any frames");
+        LOG(ERROR) << "A stopped stream should not get any frames";
         return false;
     } else if (mFramesHeld[bufDesc.deviceId].size() >= mFramesAllowed) {
         // Indicate that we declined to send the frame to the client because they're at quota
-        ALOGI("Skipping new frame as we hold %zu of %u allowed.",
-              mFramesHeld[bufDesc.deviceId].size(), mFramesAllowed);
+        LOG(INFO) << "Skipping new frame as we hold " << mFramesHeld[bufDesc.deviceId].size()
+                  << " of " << mFramesAllowed;
 
         if (mStream_1_1 != nullptr) {
             // Report a frame drop to v1.1 client.
@@ -122,7 +123,7 @@
             event.aType = EvsEventType::FRAME_DROPPED;
             auto result = mStream_1_1->notify(event);
             if (!result.isOk()) {
-                ALOGE("Error delivering end of stream event");
+                LOG(ERROR) << "Error delivering end of stream event";
             }
         }
 
@@ -159,7 +160,8 @@
         case EvsEventType::STREAM_STOPPED:
             if (mStreamState != STOPPING) {
                 // Warn if we got an unexpected stream termination
-                ALOGW("Stream unexpectedly stopped, current status 0x%X", mStreamState);
+                LOG(WARNING) << "Stream unexpectedly stopped, current status "
+                             << mStreamState;
             }
 
             // Mark the stream as stopped.
@@ -169,22 +171,23 @@
                 // Send a null frame instead, for v1.0 client
                 auto result = mStream->deliverFrame({});
                 if (!result.isOk()) {
-                    ALOGE("Error delivering end of stream marker");
+                    LOG(ERROR) << "Error delivering end of stream marker";
                 }
             }
             break;
 
         // v1.0 client will ignore all other events.
         case EvsEventType::PARAMETER_CHANGED:
-            ALOGD("A camera parameter 0x%X is set to 0x%X", event.payload[0], event.payload[1]);
+            LOG(DEBUG) << "A camera parameter " << event.payload[0]
+                       << " is set to " << event.payload[1];
             break;
 
         case EvsEventType::MASTER_RELEASED:
-            ALOGD("The master client has been released");
+            LOG(DEBUG) << "The master client has been released";
             break;
 
         default:
-            ALOGE("Unknown event id 0x%X", event.aType);
+            LOG(WARNING) << "Unknown event id " << static_cast<int32_t>(event.aType);
             break;
     }
 
@@ -192,7 +195,7 @@
         // Forward a received event to the v1.1 client
         auto result = mStream_1_1->notify(event);
         if (!result.isOk()) {
-            ALOGE("Failed to forward an event");
+            LOG(ERROR) << "Failed to forward an event";
             return false;
         }
     }
@@ -205,7 +208,8 @@
 Return<void> VirtualCamera::getCameraInfo(getCameraInfo_cb info_cb) {
     // Straight pass through to hardware layer
     if (mHalCamera.size() > 1) {
-        ALOGE("%s must NOT be called on a logical camera object.", __FUNCTION__);
+        LOG(ERROR) << __FUNCTION__
+                   << " must NOT be called on a logical camera object.";
         info_cb({});
         return Void();
     }
@@ -235,8 +239,9 @@
 
         result = pHwCam->changeFramesInFlight(bufferCountChange);
         if (!result) {
-            ALOGE("%s: Failed to change buffer count by %d to %d",
-                  key.c_str(), bufferCountChange, bufferCount);
+            LOG(ERROR) << key
+                       << ": Failed to change buffer count by " << bufferCountChange
+                       << " to " << bufferCount;
             break;
         }
 
@@ -249,7 +254,7 @@
     if (!result) {
         // Rollback changes because we failed to update all cameras
         for (auto&& hwCamera : changedCameras) {
-            ALOGW("Rollback a change on %s", hwCamera->getId().c_str());
+            LOG(WARNING) << "Rollback a change on  " << hwCamera->getId();
             hwCamera->changeFramesInFlight(-bufferCountChange);
         }
 
@@ -265,7 +270,7 @@
 Return<EvsResult> VirtualCamera::startVideoStream(const ::android::sp<IEvsCameraStream_1_0>& stream)  {
     // We only support a single stream at a time
     if (mStreamState != STOPPED) {
-        ALOGE("ignoring startVideoStream call when a stream is already running.");
+        LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
         return EvsResult::STREAM_ALREADY_RUNNING;
     }
 
@@ -276,9 +281,9 @@
     mStream = stream;
     mStream_1_1 = IEvsCameraStream_1_1::castFrom(stream).withDefault(nullptr);
     if (mStream_1_1 == nullptr) {
-        ALOGI("Start video stream for v1.0 client.");
+        LOG(INFO) << "Start video stream for v1.0 client.";
     } else {
-        ALOGI("Start video stream for v1.1 client.");
+        LOG(INFO) << "Start video stream for v1.1 client.";
     }
 
     mStreamState = RUNNING;
@@ -288,11 +293,12 @@
     while (iter != mHalCamera.end()) {
         auto pHwCamera = iter->second.promote();
         if (pHwCamera == nullptr) {
-            ALOGE("Failed to start a video stream on %s", iter->first.c_str());
+            LOG(ERROR) << "Failed to start a video stream on " << iter->first;
             continue;
         }
 
-        ALOGI("%s starts a video stream on %s", __FUNCTION__, iter->first.c_str());
+        LOG(INFO) << __FUNCTION__
+                  << " starts a video stream on " << iter->first;
         Return<EvsResult> result = pHwCamera->clientStreamStarting();
         if ((!result.isOk()) || (result != EvsResult::OK)) {
             // If we failed to start the underlying stream, then we're not actually running
@@ -327,13 +333,13 @@
                 for (auto&& [key, hwCamera] : mHalCamera) {
                     auto pHwCamera = hwCamera.promote();
                     if (pHwCamera == nullptr) {
-                        ALOGW("Invalid camera %s is ignored.", key.c_str());
+                        LOG(WARNING) << "Invalid camera " << key << " is ignored.";
                         continue;
                     }
 
                     UniqueFence another = pHwCamera->requestNewFrame(this, lastFrameTimestamp);
                     if (!another) {
-                        ALOGW("%s returned an invalid fence.", key.c_str());
+                        LOG(WARNING) << key << " returned an invalid fence.";
                         continue;
                     }
 
@@ -346,7 +352,7 @@
                 if (fence.Wait(kFrameTimeoutMs) < 0) {
                     // TODO(b/145466570): Replace this temporarily camera hang
                     // handler.
-                    ALOGE("%p: Camera hangs? %s", this, strerror(errno));
+                    PLOG(ERROR) << this << ": Camera hangs?";
                     break;
                 } else if (mStreamState == RUNNING) {
                     // Fetch frames and forward to the client
@@ -384,9 +390,10 @@
 
 Return<void> VirtualCamera::doneWithFrame(const BufferDesc_1_0& buffer) {
     if (buffer.memHandle == nullptr) {
-        ALOGE("ignoring doneWithFrame called with invalid handle");
+        LOG(ERROR) << "Ignoring doneWithFrame called with invalid handle";
     } else if (mFramesHeld.size() > 1) {
-        ALOGE("%s must NOT be called on a logical camera object.", __FUNCTION__);
+        LOG(ERROR) << __FUNCTION__
+                   << " must NOT be called on a logical camera object.";
     } else {
         // Find this buffer in our "held" list
         auto& frameQueue = mFramesHeld.begin()->second;
@@ -400,7 +407,8 @@
         }
         if (it == frameQueue.end()) {
             // We should always find the frame in our "held" list
-            ALOGE("Ignoring doneWithFrame called with unrecognized frameID %d", buffer.bufferId);
+            LOG(ERROR) << "Ignoring doneWithFrame called with unrecognized frameID "
+                       << buffer.bufferId;
         } else {
             // Take this frame out of our "held" list
             frameQueue.erase(it);
@@ -410,8 +418,9 @@
             if (pHwCamera != nullptr) {
                 pHwCamera->doneWithFrame(buffer);
             } else {
-                ALOGW("Possible memory leak because a device %s is not valid.",
-                      mHalCamera.begin()->first.c_str());
+                LOG(WARNING) << "Possible memory leak because a device "
+                             << mHalCamera.begin()->first
+                             << " is not valid.";
             }
         }
     }
@@ -432,13 +441,13 @@
             event.aType = EvsEventType::STREAM_STOPPED;
             auto result = mStream_1_1->notify(event);
             if (!result.isOk()) {
-                ALOGE("Error delivering end of stream event");
+                LOG(ERROR) << "Error delivering end of stream event";
             }
         } else {
             // v1.0 client expects a null frame at the end of the stream
             auto result = mStream->deliverFrame({});
             if (!result.isOk()) {
-                ALOGE("Error delivering end of stream marker");
+                LOG(ERROR) << "Error delivering end of stream marker";
             }
         }
 
@@ -469,7 +478,7 @@
 
 Return<int32_t> VirtualCamera::getExtendedInfo(uint32_t opaqueIdentifier)  {
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         return 0;
     }
 
@@ -478,7 +487,7 @@
     if (pHwCamera != nullptr) {
         return pHwCamera->getHwCamera()->getExtendedInfo(opaqueIdentifier);
     } else {
-        ALOGW("%s is invalid.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << mHalCamera.begin()->first << " is invalid.";
         return 0;
     }
 }
@@ -486,7 +495,7 @@
 
 Return<EvsResult> VirtualCamera::setExtendedInfo(uint32_t opaqueIdentifier, int32_t opaqueValue)  {
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         return EvsResult::INVALID_ARG;
     }
 
@@ -495,7 +504,7 @@
     if (pHwCamera != nullptr) {
         return pHwCamera->getHwCamera()->setExtendedInfo(opaqueIdentifier, opaqueValue);
     } else {
-        ALOGW("%s is invalid.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << mHalCamera.begin()->first << " is invalid.";
         return EvsResult::INVALID_ARG;
     }
 }
@@ -541,13 +550,14 @@
             if (hwCamera_1_1 != nullptr) {
                 return hwCamera_1_1->getCameraInfo_1_1(info_cb);
             } else {
-                ALOGW("Failed to promote HW camera to v1.1.");
+                LOG(WARNING) << "Failed to promote HW camera to v1.1.";
             }
         } else {
-            ALOGW("Camera device %s is not alive.", deviceId.c_str());
+            LOG(WARNING) << "Camera device " << deviceId << " is not alive.";
         }
     } else {
-        ALOGW("Requested device %s does not back this device!", deviceId.c_str());
+        LOG(WARNING) << " Requested device " << deviceId
+                     << " does not back this device.";
     }
 
     // Return an empty list
@@ -561,7 +571,7 @@
 
     for (auto&& buffer : buffers) {
         if (buffer.buffer.nativeHandle == nullptr) {
-            ALOGW("ignoring doneWithFrame called with invalid handle");
+            LOG(WARNING) << "Ignoring doneWithFrame called with invalid handle";
         } else {
             // Find this buffer in our "held" list
             auto it = mFramesHeld[buffer.deviceId].begin();
@@ -574,8 +584,8 @@
             }
             if (it == mFramesHeld[buffer.deviceId].end()) {
                 // We should always find the frame in our "held" list
-                ALOGE("Ignoring doneWithFrame called with unrecognized frameID %d",
-                      buffer.bufferId);
+                LOG(ERROR) << "Ignoring doneWithFrame called with unrecognized frameID "
+                           << buffer.bufferId;
             } else {
                 // Take this frame out of our "held" list
                 mFramesHeld[buffer.deviceId].erase(it);
@@ -585,7 +595,8 @@
                 if (pHwCamera != nullptr) {
                     pHwCamera->doneWithFrame(buffer);
                 } else {
-                    ALOGW("Possible memory leak; %s is not valid.", buffer.deviceId.c_str());
+                    LOG(WARNING) << "Possible memory leak; "
+                                 << buffer.deviceId << " is not valid.";
                 }
             }
         }
@@ -597,7 +608,7 @@
 
 Return<EvsResult> VirtualCamera::setMaster() {
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         return EvsResult::INVALID_ARG;
     }
 
@@ -605,7 +616,7 @@
     if (pHwCamera != nullptr) {
         return pHwCamera->setMaster(this);
     } else {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
         return EvsResult::INVALID_ARG;
     }
 }
@@ -613,12 +624,13 @@
 
 Return<EvsResult> VirtualCamera::forceMaster(const sp<IEvsDisplay_1_0>& display) {
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         return EvsResult::INVALID_ARG;
     }
 
     if (display.get() == nullptr) {
-        ALOGE("%s: Passed display is invalid", __FUNCTION__);
+        LOG(ERROR) << __FUNCTION__
+                   << ": Passed display is invalid";
         return EvsResult::INVALID_ARG;
     }
 
@@ -626,7 +638,8 @@
     if (state == DisplayState::NOT_OPEN ||
         state == DisplayState::DEAD ||
         state >= DisplayState::NUM_STATES) {
-        ALOGE("%s: Passed display is in invalid state", __FUNCTION__);
+        LOG(ERROR) << __FUNCTION__
+                   << ": Passed display is in invalid state";
         return EvsResult::INVALID_ARG;
     }
 
@@ -634,7 +647,7 @@
     if (pHwCamera != nullptr) {
         return pHwCamera->forceMaster(this);
     } else {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
         return EvsResult::INVALID_ARG;
     }
 }
@@ -642,7 +655,7 @@
 
 Return<EvsResult> VirtualCamera::unsetMaster() {
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         return EvsResult::INVALID_ARG;
     }
 
@@ -650,7 +663,7 @@
     if (pHwCamera != nullptr) {
         return pHwCamera->unsetMaster(this);
     } else {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
         return EvsResult::INVALID_ARG;
     }
 }
@@ -658,7 +671,7 @@
 
 Return<void> VirtualCamera::getParameterList(getParameterList_cb _hidl_cb) {
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
 
         // Return an empty list
         _hidl_cb({});
@@ -668,7 +681,7 @@
     // Straight pass through to hardware layer
     auto pHwCamera = mHalCamera.begin()->second.promote();
     if (pHwCamera == nullptr) {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
 
         // Return an empty list
         _hidl_cb({});
@@ -680,8 +693,8 @@
     if (hwCamera_1_1 != nullptr) {
         return hwCamera_1_1->getParameterList(_hidl_cb);
     } else {
-        ALOGW("Camera device %s does not support a parameter programming.",
-              mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first
+                     << " does not support a parameter programming.";
 
         // Return an empty list
         _hidl_cb({});
@@ -693,7 +706,7 @@
 Return<void> VirtualCamera::getIntParameterRange(CameraParam id,
                                                  getIntParameterRange_cb _hidl_cb) {
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
 
         // Return [0, 0, 0]
         _hidl_cb(0, 0, 0);
@@ -703,7 +716,7 @@
     // Straight pass through to hardware layer
     auto pHwCamera = mHalCamera.begin()->second.promote();
     if (pHwCamera == nullptr) {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
 
         // Return [0, 0, 0]
         _hidl_cb(0, 0, 0);
@@ -715,8 +728,8 @@
     if (hwCamera_1_1 != nullptr) {
         return hwCamera_1_1->getIntParameterRange(id, _hidl_cb);
     } else {
-        ALOGW("Camera device %s does not support a parameter programming.",
-              mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first
+                     << " does not support a parameter programming.";
 
         // Return [0, 0, 0]
         _hidl_cb(0, 0, 0);
@@ -732,14 +745,14 @@
     hardware::hidl_vec<int32_t> values;
     EvsResult status = EvsResult::INVALID_ARG;
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         _hidl_cb(status, values);
         return Void();
     }
 
     auto pHwCamera = mHalCamera.begin()->second.promote();
     if (pHwCamera == nullptr) {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
         _hidl_cb(status, values);
         return Void();
     }
@@ -759,14 +772,14 @@
     hardware::hidl_vec<int32_t> values;
     EvsResult status = EvsResult::INVALID_ARG;
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         _hidl_cb(status, values);
         return Void();
     }
 
     auto pHwCamera = mHalCamera.begin()->second.promote();
     if (pHwCamera == nullptr) {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
         _hidl_cb(status, values);
         return Void();
     }
@@ -786,20 +799,20 @@
                                                      const hidl_vec<uint8_t>& opaqueValue) {
     hardware::hidl_vec<int32_t> values;
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         return EvsResult::INVALID_ARG;
     }
 
     auto pHwCamera = mHalCamera.begin()->second.promote();
     if (pHwCamera == nullptr) {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
         return EvsResult::INVALID_ARG;
     } else {
         auto hwCamera = IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
         if (hwCamera != nullptr) {
             return hwCamera->setExtendedInfo_1_1(opaqueIdentifier, opaqueValue);
         } else {
-            ALOGE("Underlying hardware camera does not implement v1.1 interfaces.");
+            LOG(ERROR) << "Underlying hardware camera does not implement v1.1 interfaces.";
             return EvsResult::INVALID_ARG;
         }
     }
@@ -811,21 +824,21 @@
     hardware::hidl_vec<uint8_t> values;
     EvsResult status = EvsResult::INVALID_ARG;
     if (mHalCamera.size() > 1) {
-        ALOGW("Logical camera device does not support %s.", __FUNCTION__);
+        LOG(WARNING) << "Logical camera device does not support " << __FUNCTION__;
         _hidl_cb(status, values);
         return Void();
     }
 
     auto pHwCamera = mHalCamera.begin()->second.promote();
     if (pHwCamera == nullptr) {
-        ALOGW("Camera device %s is not alive.", mHalCamera.begin()->first.c_str());
+        LOG(WARNING) << "Camera device " << mHalCamera.begin()->first << " is not alive.";
         _hidl_cb(status, values);
     } else {
         auto hwCamera = IEvsCamera_1_1::castFrom(pHwCamera->getHwCamera()).withDefault(nullptr);
         if (hwCamera != nullptr) {
             hwCamera->getExtendedInfo_1_1(opaqueIdentifier, _hidl_cb);
         } else {
-            ALOGE("Underlying hardware camera does not implement v1.1 interfaces.");
+            LOG(ERROR) << "Underlying hardware camera does not implement v1.1 interfaces.";
             _hidl_cb(status, values);
         }
     }
diff --git a/evs/manager/1.1/service.cpp b/evs/manager/1.1/service.cpp
index ac471e5..2210ab9 100644
--- a/evs/manager/1.1/service.cpp
+++ b/evs/manager/1.1/service.cpp
@@ -16,6 +16,7 @@
 
 #include <unistd.h>
 
+#include <android-base/logging.h>
 #include <hidl/HidlTransportSupport.h>
 #include <utils/Errors.h>
 #include <utils/StrongPointer.h>
@@ -39,29 +40,33 @@
 
 
 static void startService(const char *hardwareServiceName, const char * managerServiceName) {
-    ALOGI("EVS managed service connecting to hardware service at %s", hardwareServiceName);
+    LOG(INFO) << "EVS managed service connecting to hardware service at " << hardwareServiceName;
     android::sp<Enumerator> service = new Enumerator();
     if (!service->init(hardwareServiceName)) {
-        ALOGE("Failed to connect to hardware service - quitting from registrationThread");
+        LOG(ERROR) << "Failed to connect to hardware service - quitting from registrationThread";
         exit(1);
     }
 
     // Register our service -- if somebody is already registered by our name,
     // they will be killed (their thread pool will throw an exception).
-    ALOGI("EVS managed service is starting as %s", managerServiceName);
+    LOG(INFO) << "EVS managed service is starting as " << managerServiceName;
     status_t status = service->registerAsService(managerServiceName);
     if (status != OK) {
-        ALOGE("Could not register service %s (%d) - quitting from registrationThread",
-              managerServiceName, status);
+        LOG(ERROR) << "Could not register service " << managerServiceName
+                   << " status = " << status << " - quitting from registrationThread";
         exit(2);
     }
 
-    ALOGD("Registration complete");
+    LOG(INFO) << "Registration complete";
 }
 
 
 int main(int argc, char** argv) {
-    ALOGI("EVS manager starting\n");
+    LOG(INFO) << "EVS manager starting";
+
+#ifdef EVS_DEBUG
+    SetMinimumLogSeverity(android::base::DEBUG);
+#endif
 
     // Set up default behavior, then check for command line options
     bool printHelp = false;
@@ -72,7 +77,7 @@
         } else if (strcmp(argv[i], "--target") == 0) {
             i++;
             if (i >= argc) {
-                ALOGE("--target <service> was not provided with a service name\n");
+                LOG(ERROR) << "--target <service> was not provided with a service name";
             } else {
                 evsHardwareServiceName = argv[i];
             }
@@ -100,10 +105,10 @@
 
     // Send this main thread to become a permanent part of the thread pool.
     // This is not expected to return.
-    ALOGD("Main thread entering thread pool");
+    LOG(INFO) << "Main thread entering thread pool";
     joinRpcThreadpool();
 
     // In normal operation, we don't expect the thread pool to exit
-    ALOGE("EVS Hardware Enumerator is shutting down");
+    LOG(ERROR) << "EVS Hardware Enumerator is shutting down";
     return 1;
 }
diff --git a/evs/manager/1.1/sync/unique_fd.cpp b/evs/manager/1.1/sync/unique_fd.cpp
index 49aa876..eb5bb63 100644
--- a/evs/manager/1.1/sync/unique_fd.cpp
+++ b/evs/manager/1.1/sync/unique_fd.cpp
@@ -19,7 +19,7 @@
 #include <errno.h>
 #include <string.h>
 
-#include <cutils/log.h>
+#include <android-base/logging.h>
 
 namespace android {
 namespace automotive {
@@ -77,15 +77,18 @@
 void UniqueFd::InternalClose() {
     if (fd_ >= 0) {
         int err = close(fd_);
-        LOG_ALWAYS_FATAL_IF(err < 0, "Error closing UniqueFd -- %s", strerror(errno));
+        if (err < 0) {
+            PLOG(FATAL) << "Error closing UniqueFd";
+        }
     }
     fd_ = -1;
 }
 
 int UniqueFd::InternalDup() const {
     int new_fd = fd_ >= 0 ? dup(fd_) : fd_;
-    LOG_ALWAYS_FATAL_IF(new_fd < 0 && fd_ >= 0, "Error duplicating UniqueFd -- %s",
-                        strerror(errno));
+    if (new_fd < 0 && fd_ >= 0) {
+        PLOG(FATAL) << "Error duplicating UniqueFd";
+    }
     return new_fd;
 }
 
diff --git a/evs/manager/1.1/sync/unique_fence.cpp b/evs/manager/1.1/sync/unique_fence.cpp
index 467b368..7aaa9f4 100644
--- a/evs/manager/1.1/sync/unique_fence.cpp
+++ b/evs/manager/1.1/sync/unique_fence.cpp
@@ -22,7 +22,7 @@
 #include <memory>
 #include <string>
 
-#include <cutils/log.h>
+#include <android-base/logging.h>
 #ifdef __clang__
 #pragma clang diagnostic push
 #pragma clang diagnostic ignored "-Wzero-length-array"
@@ -33,6 +33,7 @@
 #endif  // __clang__
 #include <utils/String8.h>
 
+
 constexpr int kWarningTimeout = 2000;
 
 namespace android {
@@ -108,7 +109,8 @@
 
         String8 dump;
         GetDebugStateDump(dump);
-        ALOGW("Waited on fence %d for %d ms. [%s]", fd_.Get(), kWarningTimeout, dump.string());
+        LOG(WARNING) << "Waited on fence " << fd_.Get()
+                     << " for " << kWarningTimeout << " ms. " << dump.string();
     }
     return sync_wait(fd_.Get(), wait_time_ms);
 }
@@ -128,7 +130,10 @@
             // a new name.
             merged_fence.fd_.Reset(sync_merge(name, fence2.fd_.Get(), fence2.fd_.Get()));
         }
-        ALOGE_IF(!merged_fence.fd_, "merging fences: %s", strerror(errno));
+
+        if (!merged_fence.fd_) {
+            PLOG(ERROR) << "Failed to merge fences";
+        }
     }
     return merged_fence;
 }
diff --git a/evs/manager/1.1/sync/unique_timeline.cpp b/evs/manager/1.1/sync/unique_timeline.cpp
index c44ca60..b9556b8 100644
--- a/evs/manager/1.1/sync/unique_timeline.cpp
+++ b/evs/manager/1.1/sync/unique_timeline.cpp
@@ -17,12 +17,12 @@
 #include "unique_timeline.h"
 
 #include <errno.h>
-#include <string.h>
 #include <limits>
-
-#include <cutils/log.h>
+#include <string.h>
 #include <sw_sync.h>
 
+#include <android-base/logging.h>
+
 namespace android {
 namespace automotive {
 namespace evs {
@@ -31,7 +31,9 @@
 
 UniqueTimeline::UniqueTimeline(unsigned offset)
       : fd_(sw_sync_timeline_create()), fence_counter_(offset) {
-    LOG_ALWAYS_FATAL_IF(!fd_, "Failed to create a timeline.");
+    if (!fd_) {
+        LOG(FATAL) << "Failed to create a timeline.";
+    }
 }
 
 UniqueTimeline::~UniqueTimeline() {
@@ -51,7 +53,9 @@
 
 UniqueFence UniqueTimeline::CreateFence(const char* name) {
     UniqueFence fence(sw_sync_fence_create(fd_.Get(), name, fence_counter_));
-    LOG_ALWAYS_FATAL_IF(!fence, "Cannot create fence -- %s", strerror(errno));
+    if (!fence) {
+        PLOG(FATAL) << "Cannot create fence";
+    }
     return fence;
 }
 
@@ -62,7 +66,9 @@
 void UniqueTimeline::BumpTimelineEventCounter(unsigned count) {
     timeline_counter_ += count;
     int err = sw_sync_timeline_inc(fd_.Get(), count);
-    LOG_ALWAYS_FATAL_IF(err < 0, "Cannot bump timeline counter -- %s", strerror(errno));
+    if (err < 0) {
+        PLOG(FATAL) << "Cannot bump timeline counter";
+    }
 }
 
 }  // namespace implementation
diff --git a/evs/sampleDriver/Android.bp b/evs/sampleDriver/Android.bp
index b783822..bafeae2 100644
--- a/evs/sampleDriver/Android.bp
+++ b/evs/sampleDriver/Android.bp
@@ -45,7 +45,6 @@
         "libcutils",
         "libhardware",
         "libhidlbase",
-        "liblog",
         "libutils",
         "libhardware_legacy",
         "libcamera_metadata",
@@ -81,6 +80,14 @@
     include_dirs: [
         "frameworks/native/include/",
     ],
+
+    product_variables: {
+        debuggable: {
+            cflags: [
+                "-DEVS_DEBUG",
+            ]
+        }
+    }
 }
 
 prebuilt_etc {
diff --git a/evs/sampleDriver/ConfigManager.cpp b/evs/sampleDriver/ConfigManager.cpp
index 48019db..b027bbb 100644
--- a/evs/sampleDriver/ConfigManager.cpp
+++ b/evs/sampleDriver/ConfigManager.cpp
@@ -37,11 +37,10 @@
     const XMLElement *curElem = rootElem;
 
     while (curElem != nullptr) {
-        ALOGV("[ELEM] %s%s", prefix.c_str(), curElem->Name());
+        LOG(VERBOSE) << "[ELEM] " << prefix << curElem->Name();
         const XMLAttribute *curAttr = curElem->FirstAttribute();
         while (curAttr) {
-            ALOGV("[ATTR] %s%s: %s",
-                  prefix.c_str(), curAttr->Name(), curAttr->Value());
+            LOG(VERBOSE) << "[ATTR] " << prefix << curAttr->Name() << ": " << curAttr->Value();
             curAttr = curAttr->Next();
         }
 
@@ -55,7 +54,7 @@
 
 void ConfigManager::readCameraInfo(const XMLElement * const aCameraElem) {
     if (aCameraElem == nullptr) {
-        ALOGW("XML file does not have required camera element");
+        LOG(WARNING) << "XML file does not have required camera element";
         return;
     }
 
@@ -70,7 +69,7 @@
 
             /* read camera device information */
             if (!readCameraDeviceInfo(aCamera, curElem)) {
-                ALOGW("Failed to read a camera information of %s", id);
+                LOG(WARNING) << "Failed to read a camera information of " << id;
                 delete aCamera;
                 continue;
             }
@@ -101,7 +100,7 @@
 
             /* read camera device information */
             if (!readCameraDeviceInfo(aCamera, curElem)) {
-                ALOGW("Failed to read a camera information of %s", id);
+                LOG(WARNING) << "Failed to read a camera information of " << id;
                 delete aCamera;
                 continue;
             }
@@ -113,7 +112,7 @@
             mCameraPosition[pos].emplace(id);
         } else {
             /* ignore other device types */
-            ALOGD("Unknown element %s is ignored", curElem->Name());
+            LOG(DEBUG) << "Unknown element " << curElem->Name() << " is ignored";
         }
 
         curElem = curElem->NextSiblingElement();
@@ -147,8 +146,8 @@
 
     /* construct camera_metadata_t */
     if (!constructCameraMetadata(aCamera, totalEntries, totalDataSize)) {
-        ALOGW("Either failed to allocate memory or "
-              "allocated memory was not large enough");
+        LOG(WARNING) << "Either failed to allocate memory or "
+                     << "allocated memory was not large enough";
     }
 
     return true;
@@ -329,12 +328,15 @@
                 /* TODO(b/140416878): add vendor-defined/custom tag support */
 
                 default:
-                    ALOGW("Parameter %s is not supported",
-                          curElem->FindAttribute("name")->Value());
+                    LOG(WARNING) << "Parameter "
+                                 << curElem->FindAttribute("name")->Value()
+                                 << " is not supported";
                     break;
             }
         } else {
-            ALOGW("Unsupported metadata tag %s found", curElem->FindAttribute("name")->Value());
+            LOG(WARNING) << "Unsupported metadata tag "
+                         << curElem->FindAttribute("name")->Value()
+                         << " is found.";
         }
 
         curElem = curElem->NextSiblingElement("parameter");
@@ -349,7 +351,7 @@
                                        const size_t totalEntries,
                                        const size_t totalDataSize) {
     if (aCamera == nullptr || !aCamera->allocate(totalEntries, totalDataSize)) {
-        ALOGE("Failed to allocate memory for camera metadata");
+        LOG(ERROR) << "Failed to allocate memory for camera metadata";
         return false;
     }
 
@@ -367,7 +369,7 @@
                                             numStreamConfigs * kStreamCfgSz);
 
     if (err) {
-        ALOGE("Failed to add stream configurations to metadata, ignored");
+        LOG(ERROR) << "Failed to add stream configurations to metadata, ignored";
         return false;
     }
 
@@ -379,27 +381,35 @@
                                                 entry.first,
                                                 entry.second);
         if (err) {
-            ALOGE("Failed to add an entry with a tag 0x%X", tag);
+            LOG(ERROR) << "Failed to add an entry with a tag, " << std::hex << tag;
 
             /* may exceed preallocated capacity */
-            ALOGE("Camera metadata has %ld / %ld entries and %ld / %ld bytes are filled",
-                  (long)get_camera_metadata_entry_count(aCamera->characteristics),
-                  (long)get_camera_metadata_entry_capacity(aCamera->characteristics),
-                  (long)get_camera_metadata_data_count(aCamera->characteristics),
-                  (long)get_camera_metadata_data_capacity(aCamera->characteristics));
-            ALOGE("\tCurrent metadata entry requires %ld bytes",
-                  (long)calculate_camera_metadata_entry_data_size(tag, entry.second));
+            LOG(ERROR) << "Camera metadata has "
+                       << get_camera_metadata_entry_count(aCamera->characteristics)
+                       << " / "
+                       << get_camera_metadata_entry_capacity(aCamera->characteristics)
+                       << " entries and "
+                       << get_camera_metadata_data_count(aCamera->characteristics)
+                       << " / "
+                       << get_camera_metadata_data_capacity(aCamera->characteristics)
+                       << " bytes are filled.";
+            LOG(ERROR) << "\tCurrent metadata entry requires "
+                       << calculate_camera_metadata_entry_data_size(tag, entry.second)
+                       << " bytes.";
 
             success = false;
         }
     }
 
-    ALOGV("Camera metadata has %ld / %ld entries and %ld / %ld bytes are filled",
-          (long)get_camera_metadata_entry_count(aCamera->characteristics),
-          (long)get_camera_metadata_entry_capacity(aCamera->characteristics),
-          (long)get_camera_metadata_data_count(aCamera->characteristics),
-          (long)get_camera_metadata_data_capacity(aCamera->characteristics));
-
+    LOG(VERBOSE) << "Camera metadata has "
+                 << get_camera_metadata_entry_count(aCamera->characteristics)
+                 << " / "
+                 << get_camera_metadata_entry_capacity(aCamera->characteristics)
+                 << " entries and "
+                 << get_camera_metadata_data_count(aCamera->characteristics)
+                 << " / "
+                 << get_camera_metadata_data_capacity(aCamera->characteristics)
+                 << " bytes are filled.";
     return success;
 }
 
@@ -426,7 +436,7 @@
 
 void ConfigManager::readDisplayInfo(const XMLElement * const aDisplayElem) {
     if (aDisplayElem == nullptr) {
-        ALOGW("XML file does not have required camera element");
+        LOG(WARNING) << "XML file does not have required camera element";
         return;
     }
 
@@ -437,7 +447,7 @@
 
         unique_ptr<DisplayInfo> dpy(new DisplayInfo());
         if (dpy == nullptr) {
-            ALOGE("Failed to allocate memory for DisplayInfo");
+            LOG(ERROR) << "Failed to allocate memory for DisplayInfo";
             return;
         }
 
@@ -486,15 +496,15 @@
     /* load and parse a configuration file */
     xmlDoc.LoadFile(mConfigFilePath);
     if (xmlDoc.ErrorID() != XML_SUCCESS) {
-        ALOGE("Failed to load and/or parse a configuration file, %s", xmlDoc.ErrorStr());
+        LOG(ERROR) << "Failed to load and/or parse a configuration file, " << xmlDoc.ErrorStr();
         return false;
     }
 
     /* retrieve the root element */
     const XMLElement *rootElem = xmlDoc.RootElement();
     if (strcmp(rootElem->Name(), "configuration")) {
-        ALOGE("A configuration file is not in the required format.  "
-              "See /etc/automotive/evs/evs_configuration.dtd");
+        LOG(ERROR) << "A configuration file is not in the required format.  "
+                   << "See /etc/automotive/evs/evs_configuration.dtd";
         return false;
     }
 
@@ -520,8 +530,9 @@
     mConfigCond.notify_all();
 
     const int64_t parsingEnd = android::elapsedRealtimeNano();
-    ALOGI("Parsing configuration file takes %lf (ms)",
-          (double)(parsingEnd - parsingStart) / 1000000.0);
+    LOG(INFO) << "Parsing configuration file takes "
+              << std::scientific << (double)(parsingEnd - parsingStart) / 1000000.0
+              << " ms.";
 
     return true;
 }
@@ -536,7 +547,7 @@
 
     srcFile.open(mBinaryFilePath, fstream::in | fstream::binary);
     if (!srcFile) {
-        ALOGE("Failed to open a source binary file, %s", mBinaryFilePath);
+        LOG(ERROR) << "Failed to open a source binary file, " << mBinaryFilePath;
         return false;
     }
 
@@ -545,7 +556,7 @@
 
     /* read configuration data into the internal buffer */
     srcFile.read(mBuffer, sizeof(mBuffer));
-    ALOGV("%s: %ld bytes are read", __FUNCTION__, (long)srcFile.gcount());
+    LOG(VERBOSE) << __FUNCTION__ << ": " << srcFile.gcount() << " bytes are read.";
     char *p = mBuffer;
     size_t sz = 0;
 
@@ -565,7 +576,7 @@
         unique_ptr<ConfigManager::CameraGroupInfo> aCamera;
         if (aCamera == nullptr ||
             !aCamera->allocate(num_entry, num_data))  {
-            ALOGE("Failed to create new CameraInfo object");
+            LOG(ERROR) << "Failed to create new CameraInfo object";
             mCameraInfo.clear();
             return false;
         }
@@ -657,7 +668,8 @@
                     p += count * sizeof(camera_metadata_rational_t);
                     break;
                 default:
-                    ALOGW("Type %d is unknown; data may be corrupted", type);
+                    LOG(WARNING) << "Type " << type << " is unknown; "
+                                 << "data may be corrupted.";
                     break;
             }
         }
@@ -683,7 +695,7 @@
         unique_ptr<ConfigManager::CameraInfo> aCamera;
         if (aCamera == nullptr ||
             !aCamera->allocate(num_entry, num_data))  {
-            ALOGE("Failed to create new CameraInfo object");
+            LOG(ERROR) << "Failed to create new CameraInfo object";
             mCameraInfo.clear();
             return false;
         }
@@ -771,7 +783,8 @@
                     p += count * sizeof(camera_metadata_rational_t);
                     break;
                 default:
-                    ALOGW("Type %d is unknown; data may be corrupted", type);
+                    LOG(WARNING) << "Type " << type << " is unknown; "
+                                 << "data may be corrupted.";
                     break;
             }
         }
@@ -786,8 +799,9 @@
     mConfigCond.notify_all();
 
     int64_t readEnd = android::elapsedRealtimeNano();
-    ALOGI("%s takes %lf (ms)", __FUNCTION__,
-          (double)(readEnd - readStart) / 1000000.0);
+    LOG(INFO) << __FUNCTION__ << " takes "
+              << std::scientific << (double)(readEnd - readStart) / 1000000.0
+              << " ms.";
 
     return true;
 }
@@ -800,7 +814,7 @@
 
     outFile.open(mBinaryFilePath, fstream::out | fstream::binary);
     if (!outFile) {
-        ALOGE("Failed to open a destination binary file, %s", mBinaryFilePath);
+        LOG(ERROR) << "Failed to open a destination binary file, " << mBinaryFilePath;
         return false;
     }
 
@@ -812,7 +826,7 @@
     outFile.write(reinterpret_cast<const char *>(&sz),
                   sizeof(size_t));
     for (auto&& [camId, camInfo] : mCameraGroups) {
-        ALOGI("Storing camera group %s", camId.c_str());
+        LOG(INFO) << "Storing camera group " << camId;
 
         /* write a camera identifier string */
         outFile.write(reinterpret_cast<const char *>(&camId),
@@ -867,7 +881,7 @@
             camera_metadata_entry_t entry;
             for (auto idx = 0; idx < num_entry; ++idx) {
                 if (get_camera_metadata_entry(camInfo->characteristics, idx, &entry)) {
-                    ALOGE("Failed to retrieve camera metadata entry %d", idx);
+                    LOG(ERROR) << "Failed to retrieve camera metadata entry " << idx;
                     outFile.close();
                     return false;
                 }
@@ -902,7 +916,7 @@
                     case TYPE_RATIONAL:
                         [[fallthrough]];
                     default:
-                        ALOGW("Type %d is not supported", type);
+                        LOG(WARNING) << "Type " << type << " is not supported.";
                         break;
                 }
             }
@@ -914,7 +928,7 @@
     outFile.write(reinterpret_cast<const char *>(&sz),
                   sizeof(size_t));
     for (auto&& [camId, camInfo] : mCameraInfo) {
-        ALOGI("Storing camera %s", camId.c_str());
+        LOG(INFO) << "Storing camera " << camId;
 
         /* write a camera identifier string */
         outFile.write(reinterpret_cast<const char *>(&camId),
@@ -965,7 +979,7 @@
             camera_metadata_entry_t entry;
             for (auto idx = 0; idx < num_entry; ++idx) {
                 if (get_camera_metadata_entry(camInfo->characteristics, idx, &entry)) {
-                    ALOGE("Failed to retrieve camera metadata entry %d", idx);
+                    LOG(ERROR) << "Failed to retrieve camera metadata entry " << idx;
                     outFile.close();
                     return false;
                 }
@@ -1000,7 +1014,7 @@
                     case TYPE_RATIONAL:
                         [[fallthrough]];
                     default:
-                        ALOGW("Type %d is not supported", type);
+                        LOG(WARNING) << "Type " << type << " is not supported.";
                         break;
                 }
             }
@@ -1009,8 +1023,9 @@
 
     outFile.close();
     int64_t writeEnd = android::elapsedRealtimeNano();
-    ALOGI("%s takes %lf (ms)", __FUNCTION__,
-          (double)(writeEnd - writeStart) / 1000000.0);
+    LOG(INFO) << __FUNCTION__ << " takes "
+              << std::scientific << (double)(writeEnd - writeStart) / 1000000.0
+              << " ms.";
 
 
     return true;
@@ -1062,7 +1077,8 @@
             }
 
             default:
-                ALOGW("Tag 0x%X is not supported.  Data may be corrupted?", tag);
+                LOG(WARNING) << "Tag " << std::hex << tag << " is not supported.  "
+                             << "Data may be corrupted?";
                 break;
         }
     }
diff --git a/evs/sampleDriver/ConfigManager.h b/evs/sampleDriver/ConfigManager.h
index 9fa15aa..97e609a 100644
--- a/evs/sampleDriver/ConfigManager.h
+++ b/evs/sampleDriver/ConfigManager.h
@@ -24,8 +24,8 @@
 #include <tinyxml2.h>
 
 #include <system/camera_metadata.h>
-#include <log/log.h>
 #include <android/hardware/automotive/evs/1.1/types.h>
+#include <android-base/logging.h>
 
 #include "ConfigManagerUtil.h"
 
@@ -65,7 +65,7 @@
         /* Allocate memory for camera_metadata_t */
         bool allocate(size_t entry_cap, size_t data_cap) {
             if (characteristics != nullptr) {
-                ALOGE("Camera metadata is already allocated");
+                LOG(ERROR) << "Camera metadata is already allocated";
                 return false;
             }
 
diff --git a/evs/sampleDriver/ConfigManagerUtil.cpp b/evs/sampleDriver/ConfigManagerUtil.cpp
index d10f236..5105f19 100644
--- a/evs/sampleDriver/ConfigManagerUtil.cpp
+++ b/evs/sampleDriver/ConfigManagerUtil.cpp
@@ -19,10 +19,10 @@
 #include <string>
 #include <sstream>
 #include <linux/videodev2.h>
-
-#include <log/log.h>
 #include <system/graphics-base-v1.0.h>
 
+#include <android-base/logging.h>
+
 
 bool ConfigManagerUtil::convertToEvsCameraParam(const string &id,
                                                 CameraParam &camParam) {
diff --git a/evs/sampleDriver/EvsEnumerator.cpp b/evs/sampleDriver/EvsEnumerator.cpp
index 055bc61..e108007 100644
--- a/evs/sampleDriver/EvsEnumerator.cpp
+++ b/evs/sampleDriver/EvsEnumerator.cpp
@@ -47,6 +47,8 @@
 std::unique_ptr<ConfigManager>                               EvsEnumerator::sConfigManager;
 sp<IAutomotiveDisplayProxyService>                           EvsEnumerator::sDisplayProxy;
 std::unordered_map<uint8_t, uint64_t>                        EvsEnumerator::sDisplayPortList;
+uint64_t                                                     EvsEnumerator::sInternalDisplayId;
+
 
 // Constants
 const auto kEnumerationTimeout = 10s;
@@ -56,8 +58,9 @@
     hardware::IPCThreadState *ipc = hardware::IPCThreadState::self();
     if (AID_AUTOMOTIVE_EVS != ipc->getCallingUid() &&
         AID_ROOT != ipc->getCallingUid()) {
-
-        ALOGE("EVS access denied: pid = %d, uid = %d", ipc->getCallingPid(), ipc->getCallingUid());
+        LOG(ERROR) << "EVS access denied: "
+                   << "pid = " << ipc->getCallingPid()
+                   << ", uid = " << ipc->getCallingUid();
         return false;
     }
 
@@ -67,7 +70,7 @@
 void EvsEnumerator::EvsUeventThread(std::atomic<bool>& running) {
     int status = uevent_init();
     if (!status) {
-        ALOGE("Failed to initialize uevent handler.");
+        LOG(ERROR) << "Failed to initialize uevent handler.";
         return;
     }
 
@@ -112,7 +115,7 @@
             std::lock_guard<std::mutex> lock(sLock);
             if (cmd_removal) {
                 sCameraList.erase(devpath);
-                ALOGI("%s is removed", devpath.c_str());
+                LOG(INFO) << devpath << " is removed.";
             } else if (cmd_addition) {
                 // NOTE: we are here adding new device without a validation
                 // because it always fails to open, b/132164956.
@@ -128,7 +131,7 @@
                     }
                 }
                 sCameraList.emplace(devpath, cam);
-                ALOGI("%s is added", devpath.c_str());
+                LOG(INFO) << devpath << " is added.";
             } else {
                 // Ignore all other actions including "change".
             }
@@ -142,7 +145,7 @@
 }
 
 EvsEnumerator::EvsEnumerator(sp<IAutomotiveDisplayProxyService> proxyService) {
-    ALOGD("EvsEnumerator created");
+    LOG(DEBUG) << "EvsEnumerator is created.";
 
     if (sConfigManager == nullptr) {
         /* loads and initializes ConfigManager in a separate thread */
@@ -168,7 +171,8 @@
     //           For example, this code might be replaced with nothing more than:
     //                   sCameraList.emplace("/dev/video0");
     //                   sCameraList.emplace("/dev/video1");
-    ALOGI("%s: Starting dev/video* enumeration", __FUNCTION__);
+    LOG(INFO) << __FUNCTION__
+              << ": Starting dev/video* enumeration";
     unsigned videoCount   = 0;
     unsigned captureCount = 0;
     DIR* dir = opendir("/dev");
@@ -186,7 +190,7 @@
                 deviceName += entry->d_name;
                 videoCount++;
                 if (sCameraList.find(deviceName) != sCameraList.end()) {
-                    ALOGI("%s has been added already.", deviceName.c_str());
+                    LOG(INFO) << deviceName << " has been added already.";
                     captureCount++;
                 } else if(qualifyCaptureDevice(deviceName.c_str())) {
                     sCameraList.emplace(deviceName, deviceName.c_str());
@@ -196,34 +200,42 @@
         }
     }
 
-    ALOGI("Found %d qualified video capture devices of %d checked\n", captureCount, videoCount);
+    LOG(INFO) << "Found " << captureCount << " qualified video capture devices "
+              << "of " << videoCount << " checked.";
 }
 
 
 void EvsEnumerator::enumerateDisplays() {
-    ALOGI("%s: Starting display enumeration", __FUNCTION__);
+    LOG(INFO) << __FUNCTION__
+              << ": Starting display enumeration";
     if (!sDisplayProxy) {
-        ALOGE("AutomotiveDisplayProxyService is not available!");
+        LOG(ERROR) << "AutomotiveDisplayProxyService is not available!";
         return;
     }
 
     sDisplayProxy->getDisplayIdList(
         [](const auto& displayIds) {
-            for (const auto& id : displayIds) {
-                const auto port = id & 0xF;
-                ALOGI("Display 0x%lX is detected on the port %ld", (unsigned long)id, (long)port);
-                sDisplayPortList.insert_or_assign(port, id);
+            // The first entry of the list is the internal display.  See
+            // SurfaceFlinger::getPhysicalDisplayIds() implementation.
+            if (displayIds.size() > 0) {
+                sInternalDisplayId = displayIds[0];
+                for (const auto& id : displayIds) {
+                    const auto port = id & 0xF;
+                    LOG(INFO) << "Display " << std::hex << id
+                              << " is detected on the port, " << port;
+                    sDisplayPortList.insert_or_assign(port, id);
+                }
             }
         }
     );
 
-    ALOGI("Found %d displays", (int)sDisplayPortList.size());
+    LOG(INFO) << "Found " << sDisplayPortList.size() << " displays";
 }
 
 
 // Methods from ::android::hardware::automotive::evs::V1_0::IEvsEnumerator follow.
 Return<void> EvsEnumerator::getCameraList(getCameraList_cb _hidl_cb)  {
-    ALOGD("getCameraList");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return Void();
     }
@@ -236,7 +248,7 @@
             if (!sCameraSignal.wait_for(lock,
                                         kEnumerationTimeout,
                                         []{ return sCameraList.size() > 0; })) {
-                ALOGD("Timer expired.  No new device has been added.");
+                LOG(DEBUG) << "Timer expired.  No new device has been added.";
             }
         }
     }
@@ -252,7 +264,7 @@
     }
 
     // Send back the results
-    ALOGD("reporting %zu cameras available", hidlCameras.size());
+    LOG(DEBUG) << "Reporting " << hidlCameras.size() << " cameras available";
     _hidl_cb(hidlCameras);
 
     // HIDL convention says we return Void if we sent our result back via callback
@@ -261,7 +273,7 @@
 
 
 Return<sp<IEvsCamera_1_0>> EvsEnumerator::openCamera(const hidl_string& cameraId) {
-    ALOGD("openCamera");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return nullptr;
     }
@@ -272,7 +284,7 @@
     // Has this camera already been instantiated by another caller?
     sp<EvsV4lCamera> pActiveCamera = pRecord->activeInstance.promote();
     if (pActiveCamera != nullptr) {
-        ALOGW("Killing previous camera because of new caller");
+        LOG(WARNING) << "Killing previous camera because of new caller";
         closeCamera(pActiveCamera);
     }
 
@@ -286,7 +298,7 @@
 
     pRecord->activeInstance = pActiveCamera;
     if (pActiveCamera == nullptr) {
-        ALOGE("Failed to create new EvsV4lCamera object for %s\n", cameraId.c_str());
+        LOG(ERROR) << "Failed to create new EvsV4lCamera object for " << cameraId;
     }
 
     return pActiveCamera;
@@ -294,10 +306,10 @@
 
 
 Return<void> EvsEnumerator::closeCamera(const ::android::sp<IEvsCamera_1_0>& pCamera) {
-    ALOGD("closeCamera");
+    LOG(DEBUG) << __FUNCTION__;
 
     if (pCamera == nullptr) {
-        ALOGE("Ignoring call to closeCamera with null camera ptr");
+        LOG(ERROR) << "Ignoring call to closeCamera with null camera ptr";
         return Void();
     }
 
@@ -315,7 +327,7 @@
 
 
 Return<sp<IEvsDisplay_1_0>> EvsEnumerator::openDisplay() {
-    ALOGD("openDisplay");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return nullptr;
     }
@@ -324,30 +336,29 @@
     // give exclusive access to the new caller.
     sp<EvsGlDisplay> pActiveDisplay = sActiveDisplay.promote();
     if (pActiveDisplay != nullptr) {
-        ALOGW("Killing previous display because of new caller");
+        LOG(WARNING) << "Killing previous display because of new caller";
         closeDisplay(pActiveDisplay);
     }
 
-    // Create a new display interface and return it.  Please note that this
-    // implementation uses whichever display unordered_map::begin() returns.
-    pActiveDisplay = new EvsGlDisplay(sDisplayProxy,
-                                      sDisplayPortList.begin()->second);
+    // Create a new display interface and return it.
+    pActiveDisplay = new EvsGlDisplay(sDisplayProxy, sInternalDisplayId);
     sActiveDisplay = pActiveDisplay;
 
-    ALOGD("Returning new EvsGlDisplay object %p", pActiveDisplay.get());
+    LOG(DEBUG) << "Returning new EvsGlDisplay object " << pActiveDisplay.get();
     return pActiveDisplay;
 }
 
 
 Return<void> EvsEnumerator::closeDisplay(const ::android::sp<IEvsDisplay_1_0>& pDisplay) {
-    ALOGD("closeDisplay");
+    LOG(DEBUG) << __FUNCTION__;
 
     // Do we still have a display object we think should be active?
     sp<EvsGlDisplay> pActiveDisplay = sActiveDisplay.promote();
     if (pActiveDisplay == nullptr) {
-        ALOGE("Somehow a display is being destroyed when the enumerator didn't know one existed");
+        LOG(ERROR) << "Somehow a display is being destroyed "
+                   << "when the enumerator didn't know one existed";
     } else if (sActiveDisplay != pDisplay) {
-        ALOGW("Ignoring close of previously orphaned display - why did a client steal?");
+        LOG(WARNING) << "Ignoring close of previously orphaned display - why did a client steal?";
     } else {
         // Drop the active display
         pActiveDisplay->forceShutdown();
@@ -359,7 +370,7 @@
 
 
 Return<EvsDisplayState> EvsEnumerator::getDisplayState()  {
-    ALOGD("getDisplayState");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return EvsDisplayState::DEAD;
     }
@@ -376,7 +387,7 @@
 
 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsEnumerator follow.
 Return<void> EvsEnumerator::getCameraList_1_1(getCameraList_1_1_cb _hidl_cb)  {
-    ALOGD("getCameraList_1_1");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return Void();
     }
@@ -388,7 +399,7 @@
             if (!sCameraSignal.wait_for(lock,
                                         kEnumerationTimeout,
                                         []{ return sCameraList.size() > 0; })) {
-                ALOGD("Timer expired.  No new device has been added.");
+                LOG(DEBUG) << "Timer expired.  No new device has been added.";
             }
         }
     }
@@ -451,7 +462,7 @@
 
 Return<sp<IEvsCamera_1_1>> EvsEnumerator::openCamera_1_1(const hidl_string& cameraId,
                                                          const Stream& streamCfg) {
-    ALOGD("openCamera_1_1");
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return nullptr;
     }
@@ -459,20 +470,21 @@
     // Is this a recognized camera id?
     CameraRecord *pRecord = findCameraById(cameraId);
     if (pRecord == nullptr) {
-        ALOGE("%s does not exist!", cameraId.c_str());
+        LOG(ERROR) << cameraId << " does not exist!";
         return nullptr;
     }
 
     // Has this camera already been instantiated by another caller?
     sp<EvsV4lCamera> pActiveCamera = pRecord->activeInstance.promote();
     if (pActiveCamera != nullptr) {
-        ALOGW("Killing previous camera because of new caller");
+        LOG(WARNING) << "Killing previous camera because of new caller";
         closeCamera(pActiveCamera);
     }
 
     // Construct a camera instance for the caller
     if (sConfigManager == nullptr) {
-        ALOGW("ConfigManager is not available.  Given stream configuration is ignored.");
+        LOG(WARNING) << "ConfigManager is not available.  "
+                     << "Given stream configuration is ignored.";
         pActiveCamera = EvsV4lCamera::Create(cameraId.c_str());
     } else {
         pActiveCamera = EvsV4lCamera::Create(cameraId.c_str(),
@@ -481,7 +493,7 @@
     }
     pRecord->activeInstance = pActiveCamera;
     if (pActiveCamera == nullptr) {
-        ALOGE("Failed to create new EvsV4lCamera object for %s\n", cameraId.c_str());
+        LOG(ERROR) << "Failed to create new EvsV4lCamera object for " << cameraId;
     }
 
     return pActiveCamera;
@@ -491,10 +503,15 @@
 Return<void> EvsEnumerator::getDisplayIdList(getDisplayIdList_cb _list_cb) {
     hidl_vec<uint8_t> ids;
 
-    ids.resize(sDisplayPortList.size());
-    unsigned i = 0;
-    for (const auto& [port, id] : sDisplayPortList) {
-        ids[i++] = port;
+    if (sDisplayPortList.size() > 0) {
+        ids.resize(sDisplayPortList.size());
+        unsigned i = 0;
+        ids[i++] = sInternalDisplayId & 0xF;
+        for (const auto& [port, id] : sDisplayPortList) {
+            if (sInternalDisplayId != id) {
+                ids[i++] = port;
+            }
+        }
     }
 
     _list_cb(ids);
@@ -503,7 +520,7 @@
 
 
 Return<sp<IEvsDisplay_1_1>> EvsEnumerator::openDisplay_1_1(uint8_t port) {
-    ALOGD("%s", __FUNCTION__);
+    LOG(DEBUG) << __FUNCTION__;
     if (!checkPermission()) {
         return nullptr;
     }
@@ -512,7 +529,7 @@
     // give exclusive access to the new caller.
     sp<EvsGlDisplay> pActiveDisplay = sActiveDisplay.promote();
     if (pActiveDisplay != nullptr) {
-        ALOGW("Killing previous display because of new caller");
+        LOG(WARNING) << "Killing previous display because of new caller";
         closeDisplay(pActiveDisplay);
     }
 
@@ -520,7 +537,7 @@
     pActiveDisplay = new EvsGlDisplay(sDisplayProxy, sDisplayPortList[port]);
     sActiveDisplay = pActiveDisplay;
 
-    ALOGD("Returning new EvsGlDisplay object %p", pActiveDisplay.get());
+    LOG(DEBUG) << "Returning new EvsGlDisplay object " << pActiveDisplay.get();
     return pActiveDisplay;
 }
 
@@ -532,17 +549,18 @@
 
     // Is the display being destroyed actually the one we think is active?
     if (!pRecord) {
-        ALOGE("Asked to close a camera whose name isn't recognized");
+        LOG(ERROR) << "Asked to close a camera whose name isn't recognized";
     } else {
         sp<EvsV4lCamera> pActiveCamera = pRecord->activeInstance.promote();
 
         if (pActiveCamera == nullptr) {
-            ALOGE("Somehow a camera is being destroyed "
-                  "when the enumerator didn't know one existed");
+            LOG(ERROR) << "Somehow a camera is being destroyed "
+                       << "when the enumerator didn't know one existed";
         } else if (pActiveCamera != pCamera) {
             // This can happen if the camera was aggressively reopened,
             // orphaning this previous instance
-            ALOGW("Ignoring close of previously orphaned camera - why did a client steal?");
+            LOG(WARNING) << "Ignoring close of previously orphaned camera "
+                         << "- why did a client steal?";
         } else {
             // Drop the active camera
             pActiveCamera->shutdown();
@@ -587,9 +605,10 @@
     for (int i=0; !found; i++) {
         formatDescription.index = i;
         if (ioctl(fd, VIDIOC_ENUM_FMT, &formatDescription) == 0) {
-            ALOGI("FORMAT 0x%X, type 0x%X, desc %s, flags 0x%X",
-                  formatDescription.pixelformat, formatDescription.type,
-                  formatDescription.description, formatDescription.flags);
+            LOG(INFO) << "Format: 0x" << std::hex << formatDescription.pixelformat
+                      << " Type: 0x" << std::hex << formatDescription.type
+                      << " Desc: " << formatDescription.description
+                      << " Flags: 0x" << std::hex << formatDescription.flags;
             switch (formatDescription.pixelformat)
             {
                 case V4L2_PIX_FMT_YUYV:     found = true; break;
@@ -602,7 +621,8 @@
                 case V4L2_PIX_FMT_XRGB32:   found = true; break;
 #endif // V4L2_PIX_FMT_ARGB32
                 default:
-                    ALOGW("Unsupported, 0x%X", formatDescription.pixelformat);
+                    LOG(WARNING) << "Unsupported, "
+                                 << std::hex << formatDescription.pixelformat;
                     break;
             }
         } else {
diff --git a/evs/sampleDriver/EvsEnumerator.h b/evs/sampleDriver/EvsEnumerator.h
index b767a3b..33b497e 100644
--- a/evs/sampleDriver/EvsEnumerator.h
+++ b/evs/sampleDriver/EvsEnumerator.h
@@ -22,7 +22,6 @@
 #include <android/hardware/camera/device/3.2/ICameraDevice.h>
 #include <android/frameworks/automotive/display/1.0/IAutomotiveDisplayProxyService.h>
 
-
 #include <unordered_map>
 #include <thread>
 #include <atomic>
@@ -114,6 +113,7 @@
     static sp<IAutomotiveDisplayProxyService> sDisplayProxy;
     static std::unordered_map<uint8_t,
                               uint64_t>       sDisplayPortList;
+    static uint64_t                           sInternalDisplayId;
 };
 
 } // namespace implementation
diff --git a/evs/sampleDriver/EvsGlDisplay.cpp b/evs/sampleDriver/EvsGlDisplay.cpp
index 652beff..adf56ac 100644
--- a/evs/sampleDriver/EvsGlDisplay.cpp
+++ b/evs/sampleDriver/EvsGlDisplay.cpp
@@ -36,7 +36,7 @@
 EvsGlDisplay::EvsGlDisplay(sp<IAutomotiveDisplayProxyService> pDisplayProxy, uint64_t displayId)
     : mDisplayProxy(pDisplayProxy),
       mDisplayId(displayId) {
-    ALOGD("EvsGlDisplay instantiated");
+    LOG(DEBUG) << "EvsGlDisplay instantiated";
 
     // Set up our self description
     // NOTE:  These are arbitrary values chosen for testing
@@ -46,7 +46,7 @@
 
 
 EvsGlDisplay::~EvsGlDisplay() {
-    ALOGD("EvsGlDisplay being destroyed");
+    LOG(DEBUG) << "EvsGlDisplay being destroyed";
     forceShutdown();
 }
 
@@ -56,7 +56,7 @@
  */
 void EvsGlDisplay::forceShutdown()
 {
-    ALOGD("EvsGlDisplay forceShutdown");
+    LOG(DEBUG) << "EvsGlDisplay forceShutdown";
     std::lock_guard<std::mutex> lock(mAccessLock);
 
     // If the buffer isn't being held by a remote client, release it now as an
@@ -65,7 +65,7 @@
     if (mBuffer.memHandle) {
         // Report if we're going away while a buffer is outstanding
         if (mFrameBusy) {
-            ALOGE("EvsGlDisplay going down while client is holding a buffer");
+            LOG(ERROR) << "EvsGlDisplay going down while client is holding a buffer";
         }
 
         // Drop the graphics buffer we've been using
@@ -87,7 +87,7 @@
  * See the description of the DisplayDesc structure for details.
  */
 Return<void> EvsGlDisplay::getDisplayInfo(getDisplayInfo_cb _hidl_cb)  {
-    ALOGD("getDisplayInfo");
+    LOG(DEBUG) << __FUNCTION__;
 
     // Send back our self description
     _hidl_cb(mInfo);
@@ -105,7 +105,7 @@
  * is expected to request the NOT_VISIBLE state after passing the last video frame.
  */
 Return<EvsResult> EvsGlDisplay::setDisplayState(EvsDisplayState state) {
-    ALOGD("setDisplayState");
+    LOG(DEBUG) << __FUNCTION__;
     std::lock_guard<std::mutex> lock(mAccessLock);
 
     if (mRequestedState == EvsDisplayState::DEAD) {
@@ -144,7 +144,7 @@
  * spontaneously change display states.
  */
 Return<EvsDisplayState> EvsGlDisplay::getDisplayState()  {
-    ALOGD("getDisplayState");
+    LOG(DEBUG) << __FUNCTION__;
     std::lock_guard<std::mutex> lock(mAccessLock);
 
     return mRequestedState;
@@ -158,11 +158,11 @@
  * display is no longer visible.
  */
 Return<void> EvsGlDisplay::getTargetBuffer(getTargetBuffer_cb _hidl_cb)  {
-    ALOGV("getTargetBuffer");
+    LOG(DEBUG) << __FUNCTION__;
     std::lock_guard<std::mutex> lock(mAccessLock);
 
     if (mRequestedState == EvsDisplayState::DEAD) {
-        ALOGE("Rejecting buffer request from object that lost ownership of the display.");
+        LOG(ERROR) << "Rejecting buffer request from object that lost ownership of the display.";
         _hidl_cb({});
         return Void();
     }
@@ -175,7 +175,7 @@
         // (briefly) shown.
         if (!mGlWrapper.initialize(mDisplayProxy, mDisplayId)) {
             // Report the failure
-            ALOGE("Failed to initialize GL display");
+            LOG(ERROR) << "Failed to initialize GL display";
             _hidl_cb({});
             return Void();
         }
@@ -197,22 +197,23 @@
                                          &mBuffer.stride,
                                          0, "EvsGlDisplay");
         if (result != NO_ERROR) {
-            ALOGE("Error %d allocating %d x %d graphics buffer",
-                  result, mBuffer.width, mBuffer.height);
+            LOG(ERROR) << "Error " << result
+                       << " allocating " << mBuffer.width << " x " << mBuffer.height
+                       << " graphics buffer.";
             _hidl_cb({});
             mGlWrapper.shutdown();
             return Void();
         }
         if (!handle) {
-            ALOGE("We didn't get a buffer handle back from the allocator");
+            LOG(ERROR) << "We didn't get a buffer handle back from the allocator";
             _hidl_cb({});
             mGlWrapper.shutdown();
             return Void();
         }
 
         mBuffer.memHandle = handle;
-        ALOGD("Allocated new buffer %p with stride %u",
-              mBuffer.memHandle.getNativeHandle(), mBuffer.stride);
+        LOG(DEBUG) << "Allocated new buffer " << mBuffer.memHandle.getNativeHandle()
+                   << " with stride " <<  mBuffer.stride;
         mFrameBusy = false;
     }
 
@@ -222,7 +223,7 @@
         // (an unsupported mode of operation) or else the client hasn't returned
         // a previously issued buffer yet (they're behaving badly).
         // NOTE:  We have to make the callback even if we have nothing to provide
-        ALOGE("getTargetBuffer called while no buffers available.");
+        LOG(ERROR) << "getTargetBuffer called while no buffers available.";
         _hidl_cb({});
         return Void();
     } else {
@@ -230,8 +231,8 @@
         mFrameBusy = true;
 
         // Send the buffer to the client
-        ALOGV("Providing display buffer handle %p as id %d",
-              mBuffer.memHandle.getNativeHandle(), mBuffer.bufferId);
+        LOG(VERBOSE) << "Providing display buffer handle " << mBuffer.memHandle.getNativeHandle()
+                     << " as id " << mBuffer.bufferId;
         _hidl_cb(mBuffer);
         return Void();
     }
@@ -243,20 +244,21 @@
  * The buffer is no longer valid for use by the client after this call.
  */
 Return<EvsResult> EvsGlDisplay::returnTargetBufferForDisplay(const BufferDesc_1_0& buffer)  {
-    ALOGV("returnTargetBufferForDisplay %p", buffer.memHandle.getNativeHandle());
+    LOG(VERBOSE) << __FUNCTION__ << " " << buffer.memHandle.getNativeHandle();
     std::lock_guard<std::mutex> lock(mAccessLock);
 
     // Nobody should call us with a null handle
     if (!buffer.memHandle.getNativeHandle()) {
-        ALOGE ("returnTargetBufferForDisplay called without a valid buffer handle.\n");
+        LOG(ERROR) << __FUNCTION__
+                   << " called without a valid buffer handle.";
         return EvsResult::INVALID_ARG;
     }
     if (buffer.bufferId != mBuffer.bufferId) {
-        ALOGE ("Got an unrecognized frame returned.\n");
+        LOG(ERROR) << "Got an unrecognized frame returned.";
         return EvsResult::INVALID_ARG;
     }
     if (!mFrameBusy) {
-        ALOGE ("A frame was returned with no outstanding frames.\n");
+        LOG(ERROR) << "A frame was returned with no outstanding frames.";
         return EvsResult::BUFFER_NOT_AVAILABLE;
     }
 
@@ -276,7 +278,7 @@
     // Validate we're in an expected state
     if (mRequestedState != EvsDisplayState::VISIBLE) {
         // Not sure why a client would send frames back when we're not visible.
-        ALOGW ("Got a frame returned while not visible - ignoring.\n");
+        LOG(WARNING) << "Got a frame returned while not visible - ignoring.";
     } else {
         // Update the texture contents with the provided data
 // TODO:  Why doesn't it work to pass in the buffer handle we got from HIDL?
@@ -287,10 +289,13 @@
 
         // Put the image on the screen
         mGlWrapper.renderImageToScreen();
+#ifdef EVS_DEBUG
         if (!sDebugFirstFrameDisplayed) {
-            ALOGD("EvsFirstFrameDisplayTiming start time: %" PRId64 "ms", elapsedRealtime());
+            LOG(DEBUG) << "EvsFirstFrameDisplayTiming start time: "
+                       << elapsedRealtime() << " ms.";
             sDebugFirstFrameDisplayed = true;
         }
+#endif
 
     }
 
diff --git a/evs/sampleDriver/EvsV4lCamera.cpp b/evs/sampleDriver/EvsV4lCamera.cpp
index 8d6bcd3..eefad2a 100644
--- a/evs/sampleDriver/EvsV4lCamera.cpp
+++ b/evs/sampleDriver/EvsV4lCamera.cpp
@@ -18,9 +18,10 @@
 #include "EvsEnumerator.h"
 #include "bufferCopy.h"
 
+#include <android/hardware_buffer.h>
+#include <android-base/logging.h>
 #include <ui/GraphicBufferAllocator.h>
 #include <ui/GraphicBufferMapper.h>
-#include <android/hardware_buffer.h>
 #include <utils/SystemClock.h>
 
 
@@ -43,7 +44,7 @@
         mFramesAllowed(0),
         mFramesInUse(0),
         mCameraInfo(camInfo) {
-    ALOGD("EvsV4lCamera instantiated");
+    LOG(DEBUG) << "EvsV4lCamera instantiated";
 
     mDescription.v1.cameraId = deviceName;
     if (camInfo != nullptr) {
@@ -62,7 +63,7 @@
 
 
 EvsV4lCamera::~EvsV4lCamera() {
-    ALOGD("EvsV4lCamera being destroyed");
+    LOG(DEBUG) << "EvsV4lCamera being destroyed";
     shutdown();
 }
 
@@ -72,7 +73,7 @@
 //
 void EvsV4lCamera::shutdown()
 {
-    ALOGD("EvsV4lCamera shutdown");
+    LOG(DEBUG) << "EvsV4lCamera shutdown";
 
     // Make sure our output stream is cleaned up
     // (It really should be already)
@@ -88,7 +89,7 @@
         GraphicBufferAllocator& alloc(GraphicBufferAllocator::get());
         for (auto&& rec : mBuffers) {
             if (rec.inUse) {
-                ALOGW("Error - releasing buffer despite remote ownership");
+                LOG(WARNING) << "Releasing buffer despite remote ownership";
             }
             alloc.free(rec.handle);
             rec.handle = nullptr;
@@ -100,7 +101,7 @@
 
 // Methods from ::android::hardware::automotive::evs::V1_0::IEvsCamera follow.
 Return<void> EvsV4lCamera::getCameraInfo(getCameraInfo_cb _hidl_cb) {
-    ALOGD("getCameraInfo");
+    LOG(DEBUG) << __FUNCTION__;
 
     // Send back our self description
     _hidl_cb(mDescription.v1);
@@ -109,18 +110,18 @@
 
 
 Return<EvsResult> EvsV4lCamera::setMaxFramesInFlight(uint32_t bufferCount) {
-    ALOGD("setMaxFramesInFlight");
+    LOG(DEBUG) << __FUNCTION__;
     std::lock_guard<std::mutex> lock(mAccessLock);
 
     // If we've been displaced by another owner of the camera, then we can't do anything else
     if (!mVideo.isOpen()) {
-        ALOGW("ignoring setMaxFramesInFlight call when camera has been lost.");
+        LOG(WARNING) << "Ignoring setMaxFramesInFlight call when camera has been lost.";
         return EvsResult::OWNERSHIP_LOST;
     }
 
     // We cannot function without at least one video buffer to send data
     if (bufferCount < 1) {
-        ALOGE("Ignoring setMaxFramesInFlight with less than one buffer requested");
+        LOG(ERROR) << "Ignoring setMaxFramesInFlight with less than one buffer requested";
         return EvsResult::INVALID_ARG;
     }
 
@@ -134,23 +135,23 @@
 
 
 Return<EvsResult> EvsV4lCamera::startVideoStream(const sp<IEvsCameraStream_1_0>& stream)  {
-    ALOGD("startVideoStream");
+    LOG(DEBUG) << __FUNCTION__;
     std::lock_guard<std::mutex> lock(mAccessLock);
 
     // If we've been displaced by another owner of the camera, then we can't do anything else
     if (!mVideo.isOpen()) {
-        ALOGW("ignoring startVideoStream call when camera has been lost.");
+        LOG(WARNING) << "Ignoring startVideoStream call when camera has been lost.";
         return EvsResult::OWNERSHIP_LOST;
     }
     if (mStream.get() != nullptr) {
-        ALOGE("ignoring startVideoStream call when a stream is already running.");
+        LOG(ERROR) << "Ignoring startVideoStream call when a stream is already running.";
         return EvsResult::STREAM_ALREADY_RUNNING;
     }
 
     // If the client never indicated otherwise, configure ourselves for a single streaming buffer
     if (mFramesAllowed < 1) {
         if (!setAvailableFrames_Locked(1)) {
-            ALOGE("Failed to start stream because we couldn't get a graphics buffer");
+            LOG(ERROR) << "Failed to start stream because we couldn't get a graphics buffer";
             return EvsResult::BUFFER_NOT_AVAILABLE;
         }
     }
@@ -158,8 +159,8 @@
     // Choose which image transfer function we need
     // Map from V4L2 to Android graphic buffer format
     const uint32_t videoSrcFormat = mVideo.getV4LFormat();
-    ALOGI("Configuring to accept %4.4s camera data and convert to 0x%X",
-          (char*)&videoSrcFormat, mFormat);
+    LOG(INFO) << "Configuring to accept " << (char*)&videoSrcFormat
+              << " camera data and convert to " << std::hex << mFormat;
 
     switch (mFormat) {
     case HAL_PIXEL_FORMAT_YCRCB_420_SP:
@@ -167,19 +168,19 @@
         case V4L2_PIX_FMT_NV21:     mFillBufferFromVideo = fillNV21FromNV21;    break;
         case V4L2_PIX_FMT_YUYV:     mFillBufferFromVideo = fillNV21FromYUYV;    break;
         default:
-            ALOGE("Unhandled camera output format %c%c%c%c (0x%8X)\n",
-                  ((char*)&videoSrcFormat)[0],
-                  ((char*)&videoSrcFormat)[1],
-                  ((char*)&videoSrcFormat)[2],
-                  ((char*)&videoSrcFormat)[3],
-                  videoSrcFormat);
+            LOG(ERROR) << "Unhandled camera output format: "
+                       << ((char*)&videoSrcFormat)[0]
+                       << ((char*)&videoSrcFormat)[1]
+                       << ((char*)&videoSrcFormat)[2]
+                       << ((char*)&videoSrcFormat)[3]
+                       << std::hex << videoSrcFormat;
         }
         break;
     case HAL_PIXEL_FORMAT_RGBA_8888:
         switch (videoSrcFormat) {
         case V4L2_PIX_FMT_YUYV:     mFillBufferFromVideo = fillRGBAFromYUYV;    break;
         default:
-            ALOGE("Unhandled camera format %4.4s", (char*)&videoSrcFormat);
+            LOG(ERROR) << "Unhandled camera format " << (char*)&videoSrcFormat;
         }
         break;
     case HAL_PIXEL_FORMAT_YCBCR_422_I:
@@ -187,11 +188,11 @@
         case V4L2_PIX_FMT_YUYV:     mFillBufferFromVideo = fillYUYVFromYUYV;    break;
         case V4L2_PIX_FMT_UYVY:     mFillBufferFromVideo = fillYUYVFromUYVY;    break;
         default:
-            ALOGE("Unhandled camera format %4.4s", (char*)&videoSrcFormat);
+            LOG(ERROR) << "Unhandled camera format " << (char*)&videoSrcFormat;
         }
         break;
     default:
-        ALOGE("Unhandled output format %4.4s", (char*)&mFormat);
+        LOG(ERROR) << "Unhandled camera format " << (char*)&mFormat;
     }
 
 
@@ -207,7 +208,7 @@
         // No need to hold onto this if we failed to start
         mStream = nullptr;
         mStream_1_1 = nullptr;
-        ALOGE("underlying camera start stream failed");
+        LOG(ERROR) << "Underlying camera start stream failed";
         return EvsResult::UNDERLYING_SERVICE_ERROR;
     }
 
@@ -216,7 +217,7 @@
 
 
 Return<void> EvsV4lCamera::doneWithFrame(const BufferDesc_1_0& buffer)  {
-    ALOGD("doneWithFrame");
+    LOG(DEBUG) << __FUNCTION__;
     doneWithFrame_impl(buffer.bufferId, buffer.memHandle);
 
     return Void();
@@ -224,7 +225,7 @@
 
 
 Return<void> EvsV4lCamera::stopVideoStream()  {
-    ALOGD("stopVideoStream");
+    LOG(DEBUG) << __FUNCTION__;
 
     // Tell the capture device to stop (and block until it does)
     mVideo.stopStream();
@@ -237,7 +238,7 @@
         event.aType = EvsEventType::STREAM_STOPPED;
         auto result = mStream_1_1->notify(event);
         if (!result.isOk()) {
-            ALOGE("Error delivering end of stream event");
+            LOG(ERROR) << "Error delivering end of stream event";
         }
 
         // Drop our reference to the client's stream receiver
@@ -250,7 +251,7 @@
         BufferDesc_1_0 nullBuff = {};
         auto result = mStream->deliverFrame(nullBuff);
         if (!result.isOk()) {
-            ALOGE("Error delivering end of stream marker");
+            LOG(ERROR) << "Error delivering end of stream marker";
         }
 
         // Drop our reference to the client's stream receiver
@@ -262,7 +263,7 @@
 
 
 Return<int32_t> EvsV4lCamera::getExtendedInfo(uint32_t /*opaqueIdentifier*/)  {
-    ALOGD("getExtendedInfo");
+    LOG(DEBUG) << __FUNCTION__;
     // Return zero by default as required by the spec
     return 0;
 }
@@ -270,12 +271,12 @@
 
 Return<EvsResult> EvsV4lCamera::setExtendedInfo(uint32_t /*opaqueIdentifier*/,
                                                 int32_t  /*opaqueValue*/)  {
-    ALOGD("setExtendedInfo");
+    LOG(DEBUG) << __FUNCTION__;
     std::lock_guard<std::mutex> lock(mAccessLock);
 
     // If we've been displaced by another owner of the camera, then we can't do anything else
     if (!mVideo.isOpen()) {
-        ALOGW("ignoring setExtendedInfo call when camera has been lost.");
+        LOG(WARNING) << "Ignoring setExtendedInfo call when camera has been lost.";
         return EvsResult::OWNERSHIP_LOST;
     }
 
@@ -286,7 +287,7 @@
 
 // Methods from ::android::hardware::automotive::evs::V1_1::IEvsCamera follow.
 Return<void> EvsV4lCamera::getCameraInfo_1_1(getCameraInfo_1_1_cb _hidl_cb) {
-    ALOGD("getCameraInfo_1_1");
+    LOG(DEBUG) << __FUNCTION__;
 
     // Send back our self description
     _hidl_cb(mDescription);
@@ -296,7 +297,7 @@
 
 Return<void> EvsV4lCamera::getPhysicalCameraInfo(const hidl_string& id,
                                                  getPhysicalCameraInfo_cb _hidl_cb) {
-    ALOGD("%s", __FUNCTION__);
+    LOG(DEBUG) << __FUNCTION__;
 
     // This method works exactly same as getCameraInfo_1_1() in EVS HW module.
     (void)id;
@@ -306,7 +307,7 @@
 
 
 Return<EvsResult> EvsV4lCamera::doneWithFrame_1_1(const hidl_vec<BufferDesc_1_1>& buffers)  {
-    ALOGD(__FUNCTION__);
+    LOG(DEBUG) << __FUNCTION__;
 
     for (auto&& buffer : buffers) {
         doneWithFrame_impl(buffer.bufferId, buffer.buffer.nativeHandle);
@@ -454,16 +455,16 @@
 
     // If we've been displaced by another owner of the camera, then we can't do anything else
     if (!mVideo.isOpen()) {
-        ALOGW("ignoring doneWithFrame call when camera has been lost.");
+        LOG(WARNING) << "Ignoring doneWithFrame call when camera has been lost.";
     } else {
         if (memHandle == nullptr) {
-            ALOGE("ignoring doneWithFrame called with null handle");
+            LOG(ERROR) << "Ignoring doneWithFrame called with null handle";
         } else if (bufferId >= mBuffers.size()) {
-            ALOGE("ignoring doneWithFrame called with invalid bufferId %d (max is %zu)",
-                  bufferId, mBuffers.size()-1);
+            LOG(ERROR) << "Ignoring doneWithFrame called with invalid bufferId " << bufferId
+                       << " (max is " << mBuffers.size() - 1 << ")";
         } else if (!mBuffers[bufferId].inUse) {
-            ALOGE("ignoring doneWithFrame called on frame %d which is already free",
-                  bufferId);
+            LOG(ERROR) << "Ignoring doneWithFrame called on frame " << bufferId
+                       << " which is already free";
         } else {
             // Mark the frame as available
             mBuffers[bufferId].inUse = false;
@@ -490,11 +491,11 @@
 
 bool EvsV4lCamera::setAvailableFrames_Locked(unsigned bufferCount) {
     if (bufferCount < 1) {
-        ALOGE("Ignoring request to set buffer count to zero");
+        LOG(ERROR) << "Ignoring request to set buffer count to zero";
         return false;
     }
     if (bufferCount > MAX_BUFFERS_IN_FLIGHT) {
-        ALOGE("Rejecting buffer request in excess of internal limit");
+        LOG(ERROR) << "Rejecting buffer request in excess of internal limit";
         return false;
     }
 
@@ -502,26 +503,26 @@
     if (mFramesAllowed < bufferCount) {
         // An increase is required
         unsigned needed = bufferCount - mFramesAllowed;
-        ALOGI("Allocating %d buffers for camera frames", needed);
+        LOG(INFO) << "Allocating " << needed << " buffers for camera frames";
 
         unsigned added = increaseAvailableFrames_Locked(needed);
         if (added != needed) {
             // If we didn't add all the frames we needed, then roll back to the previous state
-            ALOGE("Rolling back to previous frame queue size");
+            LOG(ERROR) << "Rolling back to previous frame queue size";
             decreaseAvailableFrames_Locked(added);
             return false;
         }
     } else if (mFramesAllowed > bufferCount) {
         // A decrease is required
         unsigned framesToRelease = mFramesAllowed - bufferCount;
-        ALOGI("Returning %d camera frame buffers", framesToRelease);
+        LOG(INFO) << "Returning " << framesToRelease << " camera frame buffers";
 
         unsigned released = decreaseAvailableFrames_Locked(framesToRelease);
         if (released != framesToRelease) {
             // This shouldn't happen with a properly behaving client because the client
             // should only make this call after returning sufficient outstanding buffers
             // to allow a clean resize.
-            ALOGE("Buffer queue shrink failed -- too many buffers currently in use?");
+            LOG(ERROR) << "Buffer queue shrink failed -- too many buffers currently in use?";
         }
     }
 
@@ -544,19 +545,18 @@
                                          mUsage,
                                          &memHandle, &pixelsPerLine, 0, "EvsV4lCamera");
         if (result != NO_ERROR) {
-            ALOGE("Error %d allocating %d x %d graphics buffer",
-                  result,
-                  mVideo.getWidth(),
-                  mVideo.getHeight());
+            LOG(ERROR) << "Error " << result << " allocating "
+                       << mVideo.getWidth() << " x " << mVideo.getHeight()
+                       << " graphics buffer";
             break;
         }
         if (!memHandle) {
-            ALOGE("We didn't get a buffer handle back from the allocator");
+            LOG(ERROR) << "We didn't get a buffer handle back from the allocator";
             break;
         }
         if (mStride) {
             if (mStride != pixelsPerLine) {
-                ALOGE("We did not expect to get buffers with different strides!");
+                LOG(ERROR) << "We did not expect to get buffers with different strides!";
             }
         } else {
             // Gralloc defines stride in terms of pixels per line
@@ -625,7 +625,7 @@
         // Are we allowed to issue another buffer?
         if (mFramesInUse >= mFramesAllowed) {
             // Can't do anything right now -- skip this frame
-            ALOGW("Skipped a frame because too many are in flight\n");
+            LOG(WARNING) << "Skipped a frame because too many are in flight";
         } else {
             // Identify an available buffer to fill
             for (idx = 0; idx < mBuffers.size(); idx++) {
@@ -638,7 +638,7 @@
             }
             if (idx >= mBuffers.size()) {
                 // This shouldn't happen since we already checked mFramesInUse vs mFramesAllowed
-                ALOGE("Failed to find an available buffer slot\n");
+                LOG(ERROR) << "Failed to find an available buffer slot";
             } else {
                 // We're going to make the frame busy
                 mBuffers[idx].inUse = true;
@@ -685,8 +685,9 @@
             // TODO(b/145457727): When EvsHidlTest::CameraToDisplayRoundTrip
             // test case was repeatedly executed, EVS occasionally fails to map
             // a buffer.
-            ALOGE("Camera failed to gain access to image buffer for writing - "
-                  "status: %s, error: %s", statusToString(result).c_str(), strerror(errno));
+            LOG(ERROR) << "Camera failed to gain access to image buffer for writing - "
+                       << " status: " << statusToString(result)
+                       << " , error: " << strerror(errno);
         }
 
         // Transfer the video image into the output buffer, making any needed
@@ -727,13 +728,14 @@
         }
 
         if (flag) {
-            ALOGD("Delivered %p as id %d", bufDesc_1_1.buffer.nativeHandle.getNativeHandle(), bufDesc_1_1.bufferId);
+            LOG(DEBUG) << "Delivered " << bufDesc_1_1.buffer.nativeHandle.getNativeHandle()
+                       << " as id " << bufDesc_1_1.bufferId;
         } else {
             // This can happen if the client dies and is likely unrecoverable.
             // To avoid consuming resources generating failing calls, we stop sending
             // frames.  Note, however, that the stream remains in the "STREAMING" state
             // until cleaned up on the main thread.
-            ALOGE("Frame delivery call failed in the transport layer.");
+            LOG(ERROR) << "Frame delivery call failed in the transport layer.";
 
             // Since we didn't actually deliver it, mark the frame as available
             std::lock_guard<std::mutex> lock(mAccessLock);
@@ -777,7 +779,7 @@
             v4l2cid = V4L2_CID_ZOOM_ABSOLUTE;
             break;
         default:
-            ALOGE("Camera parameter %u is unknown.", id);
+            LOG(ERROR) << "Camera parameter " << static_cast<unsigned>(id) << " is unknown.";
             return false;
     }
 
@@ -799,7 +801,7 @@
 sp<EvsV4lCamera> EvsV4lCamera::Create(const char *deviceName,
                                       unique_ptr<ConfigManager::CameraInfo> &camInfo,
                                       const Stream *requestedStreamCfg) {
-    ALOGI("Create %s", deviceName);
+    LOG(INFO) << "Create " << deviceName;
     sp<EvsV4lCamera> evsCamera = new EvsV4lCamera(deviceName, camInfo);
     if (evsCamera == nullptr) {
         return nullptr;
@@ -833,10 +835,10 @@
         }
 
         if (streamId >= 0) {
-            ALOGI("Try to open a video with width: %d, height: %d, format: %d",
-                   camInfo->streamConfigurations[streamId][1],
-                   camInfo->streamConfigurations[streamId][2],
-                   camInfo->streamConfigurations[streamId][3]);
+            LOG(INFO) << "Try to open a video with "
+                      << "width: " << camInfo->streamConfigurations[streamId][1]
+                      << ", height: " << camInfo->streamConfigurations[streamId][2]
+                      << ", format: " << camInfo->streamConfigurations[streamId][3];
             success =
                 evsCamera->mVideo.open(deviceName,
                                        camInfo->streamConfigurations[streamId][1],
@@ -848,11 +850,11 @@
     if (!success) {
         // Create a camera object with the default resolution and format
         // , HAL_PIXEL_FORMAT_RGBA_8888.
-        ALOGI("Open a video with default parameters");
+        LOG(INFO) << "Open a video with default parameters";
         success =
             evsCamera->mVideo.open(deviceName, kDefaultResolution[0], kDefaultResolution[1]);
         if (!success) {
-            ALOGE("Failed to open a video stream");
+            LOG(ERROR) << "Failed to open a video stream";
             return nullptr;
         }
     }
diff --git a/evs/sampleDriver/GlWrapper.cpp b/evs/sampleDriver/GlWrapper.cpp
index a8f7d2a..59d677d 100644
--- a/evs/sampleDriver/GlWrapper.cpp
+++ b/evs/sampleDriver/GlWrapper.cpp
@@ -112,7 +112,7 @@
     GLint compiled = 0;
     glGetShaderiv(shader, GL_COMPILE_STATUS, &compiled);
     if (!compiled) {
-        ALOGE("Error compiling shader\n");
+        LOG(ERROR) << "Error compiling shader";
 
         GLint size = 0;
         glGetShaderiv(shader, GL_INFO_LOG_LENGTH, &size);
@@ -121,7 +121,7 @@
             // Get and report the error message
             char *infoLog = (char*)malloc(size);
             glGetShaderInfoLog(shader, size, nullptr, infoLog);
-            ALOGE("  msg:\n%s\n", infoLog);
+            LOG(ERROR) << "  msg:" << std::endl << infoLog;
             free(infoLog);
         }
 
@@ -137,20 +137,20 @@
 static GLuint buildShaderProgram(const char* vtxSrc, const char* pxlSrc) {
     GLuint program = glCreateProgram();
     if (program == 0) {
-        ALOGE("Failed to allocate program object\n");
+        LOG(ERROR) << "Failed to allocate program object";
         return 0;
     }
 
     // Compile the shaders and bind them to this program
     GLuint vertexShader = loadShader(GL_VERTEX_SHADER, vtxSrc);
     if (vertexShader == 0) {
-        ALOGE("Failed to load vertex shader\n");
+        LOG(ERROR) << "Failed to load vertex shader";
         glDeleteProgram(program);
         return 0;
     }
     GLuint pixelShader = loadShader(GL_FRAGMENT_SHADER, pxlSrc);
     if (pixelShader == 0) {
-        ALOGE("Failed to load pixel shader\n");
+        LOG(ERROR) << "Failed to load pixel shader";
         glDeleteProgram(program);
         glDeleteShader(vertexShader);
         return 0;
@@ -164,7 +164,7 @@
     glGetProgramiv(program, GL_LINK_STATUS, &linked);
     if (!linked)
     {
-        ALOGE("Error linking program.\n");
+        LOG(ERROR) << "Error linking program";
         GLint size = 0;
         glGetProgramiv(program, GL_INFO_LOG_LENGTH, &size);
         if (size > 0)
@@ -172,7 +172,7 @@
             // Get and report the error message
             char *infoLog = (char*)malloc(size);
             glGetProgramInfoLog(program, size, nullptr, infoLog);
-            ALOGE("  msg:  %s\n", infoLog);
+            LOG(ERROR) << "  msg:  " << infoLog;
             free(infoLog);
         }
 
@@ -189,10 +189,10 @@
 // Main entry point
 bool GlWrapper::initialize(sp<IAutomotiveDisplayProxyService> pWindowProxy,
                            uint64_t displayId) {
-    ALOGD("%s", __FUNCTION__);
+    LOG(DEBUG) << __FUNCTION__;
 
     if (pWindowProxy == nullptr) {
-        ALOGE("Could not get IAutomotiveDisplayProxyService.");
+        LOG(ERROR) << "Could not get IAutomotiveDisplayProxyService.";
         return false;
     }
 
@@ -209,24 +209,24 @@
             std::swap(mWidth, mHeight);
         }
 
-        ALOGD("Display resolution is %d x %d", mWidth, mHeight);
+        LOG(DEBUG) << "Display resolution is " << mWidth << " x " << mHeight;
     });
 
     mGfxBufferProducer = pWindowProxy->getIGraphicBufferProducer(displayId);
     if (mGfxBufferProducer == nullptr) {
-        ALOGE("Failed to get IGraphicBufferProducer from IAutomotiveDisplayProxyService.");
+        LOG(ERROR) << "Failed to get IGraphicBufferProducer from IAutomotiveDisplayProxyService.";
         return false;
     }
 
     mSurfaceHolder = getSurfaceFromHGBP(mGfxBufferProducer);
     if (mSurfaceHolder == nullptr) {
-        ALOGE("Failed to get a Surface from HGBP.");
+        LOG(ERROR) << "Failed to get a Surface from HGBP.";
         return false;
     }
 
     mWindow = getNativeWindow(mSurfaceHolder.get());
     if (mWindow == nullptr) {
-        ALOGE("Failed to get a native window from Surface.");
+        LOG(ERROR) << "Failed to get a native window from Surface.";
         return false;
     }
 
@@ -234,14 +234,14 @@
     // Set up our OpenGL ES context associated with the default display
     mDisplay = eglGetDisplay(EGL_DEFAULT_DISPLAY);
     if (mDisplay == EGL_NO_DISPLAY) {
-        ALOGE("Failed to get egl display");
+        LOG(ERROR) << "Failed to get egl display";
         return false;
     }
 
     EGLint major = 3;
     EGLint minor = 0;
     if (!eglInitialize(mDisplay, &major, &minor)) {
-        ALOGE("Failed to initialize EGL: %s", getEGLError());
+        LOG(ERROR) << "Failed to initialize EGL: " << getEGLError();
         return false;
     }
 
@@ -260,14 +260,14 @@
     EGLint numConfigs = -1;
     eglChooseConfig(mDisplay, config_attribs, &egl_config, 1, &numConfigs);
     if (numConfigs != 1) {
-        ALOGE("Didn't find a suitable format for our display window");
+        LOG(ERROR) << "Didn't find a suitable format for our display window";
         return false;
     }
 
     // Create the EGL render target surface
     mSurface = eglCreateWindowSurface(mDisplay, egl_config, mWindow, nullptr);
     if (mSurface == EGL_NO_SURFACE) {
-        ALOGE("eglCreateWindowSurface failed.");
+        LOG(ERROR) << "eglCreateWindowSurface failed.";
         return false;
     }
 
@@ -277,14 +277,14 @@
     const EGLint context_attribs[] = {EGL_CONTEXT_CLIENT_VERSION, 3, EGL_NONE};
     mContext = eglCreateContext(mDisplay, egl_config, EGL_NO_CONTEXT, context_attribs);
     if (mContext == EGL_NO_CONTEXT) {
-        ALOGE("Failed to create OpenGL ES Context: %s", getEGLError());
+        LOG(ERROR) << "Failed to create OpenGL ES Context: " << getEGLError();
         return false;
     }
 
 
     // Activate our render target for drawing
     if (!eglMakeCurrent(mDisplay, mSurface, mSurface, mContext)) {
-        ALOGE("Failed to make the OpenGL ES Context current: %s", getEGLError());
+        LOG(ERROR) << "Failed to make the OpenGL ES Context current: " << getEGLError();
         return false;
     }
 
@@ -292,14 +292,14 @@
     // Create the shader program for our simple pipeline
     mShaderProgram = buildShaderProgram(vertexShaderSource, pixelShaderSource);
     if (!mShaderProgram) {
-        ALOGE("Failed to build shader program: %s", getEGLError());
+        LOG(ERROR) << "Failed to build shader program: " << getEGLError();
         return false;
     }
 
     // Create a GL texture that will eventually wrap our externally created texture surface(s)
     glGenTextures(1, &mTextureMap);
     if (mTextureMap <= 0) {
-        ALOGE("Didn't get a texture handle allocated: %s", getEGLError());
+        LOG(ERROR) << "Didn't get a texture handle allocated: " << getEGLError();
         return false;
     }
 
@@ -339,7 +339,7 @@
     if (pWindowProxy != nullptr) {
         pWindowProxy->showWindow(id);
     } else {
-        ALOGE("IAutomotiveDisplayProxyService is not available.");
+        LOG(ERROR) << "IAutomotiveDisplayProxyService is not available.";
     }
 }
 
@@ -348,7 +348,7 @@
     if (pWindowProxy != nullptr) {
         pWindowProxy->hideWindow(id);
     } else {
-        ALOGE("IAutomotiveDisplayProxyService is not available.");
+        LOG(ERROR) << "IAutomotiveDisplayProxyService is not available.";
     }
 }
 
@@ -389,7 +389,7 @@
                 false   /* keep ownership */
         );
         if (pGfxBuffer.get() == nullptr) {
-            ALOGE("Failed to allocate GraphicBuffer to wrap our native handle");
+            LOG(ERROR) << "Failed to allocate GraphicBuffer to wrap our native handle";
             return false;
         }
 
@@ -402,7 +402,7 @@
                                       cbuf,
                                       eglImageAttributes);
         if (mKHRimage == EGL_NO_IMAGE_KHR) {
-            ALOGE("error creating EGLImage: %s", getEGLError());
+            LOG(ERROR) << "Error creating EGLImage: " << getEGLError();
             return false;
         }
 
diff --git a/evs/sampleDriver/GlWrapper.h b/evs/sampleDriver/GlWrapper.h
index 2d99540..e883b20 100644
--- a/evs/sampleDriver/GlWrapper.h
+++ b/evs/sampleDriver/GlWrapper.h
@@ -26,6 +26,7 @@
 
 #include <android/frameworks/automotive/display/1.0/IAutomotiveDisplayProxyService.h>
 #include <android/hardware/automotive/evs/1.1/types.h>
+#include <android-base/logging.h>
 #include <bufferqueueconverter/BufferQueueConverter.h>
 
 
diff --git a/evs/sampleDriver/VideoCapture.cpp b/evs/sampleDriver/VideoCapture.cpp
index 9be55cc..070e33f 100644
--- a/evs/sampleDriver/VideoCapture.cpp
+++ b/evs/sampleDriver/VideoCapture.cpp
@@ -17,12 +17,14 @@
 #include <stdlib.h>
 #include <error.h>
 #include <errno.h>
+#include <iomanip>
 #include <memory.h>
 #include <fcntl.h>
 #include <unistd.h>
 #include <sys/ioctl.h>
 #include <sys/mman.h>
-#include <log/log.h>
+
+#include <android-base/logging.h>
 
 #include "assert.h"
 
@@ -38,7 +40,7 @@
 //    int mDeviceFd = open(deviceName, O_RDWR | O_NONBLOCK, 0);
     mDeviceFd = ::open(deviceName, O_RDWR, 0);
     if (mDeviceFd < 0) {
-        ALOGE("failed to open device %s (%d = %s)", deviceName, errno, strerror(errno));
+        PLOG(ERROR) << "failed to open device " << deviceName;
         return false;
     }
 
@@ -46,35 +48,32 @@
     {
         int result = ioctl(mDeviceFd, VIDIOC_QUERYCAP, &caps);
         if (result  < 0) {
-            ALOGE("failed to get device caps for %s (%d = %s)", deviceName, errno, strerror(errno));
+            PLOG(ERROR) << "failed to get device caps for " << deviceName;
             return false;
         }
     }
 
     // Report device properties
-    ALOGI("Open Device: %s (fd=%d)", deviceName, mDeviceFd);
-    ALOGI("  Driver: %s", caps.driver);
-    ALOGI("  Card: %s", caps.card);
-    ALOGI("  Version: %u.%u.%u",
-            (caps.version >> 16) & 0xFF,
-            (caps.version >> 8)  & 0xFF,
-            (caps.version)       & 0xFF);
-    ALOGI("  All Caps: %08X", caps.capabilities);
-    ALOGI("  Dev Caps: %08X", caps.device_caps);
+    LOG(INFO) << "Open Device: " << deviceName << " (fd = " << mDeviceFd << ")";
+    LOG(INFO) << "  Driver: " << caps.driver;
+    LOG(INFO) << "  Card: " << caps.card;
+    LOG(INFO) << "  Version: " << ((caps.version >> 16) & 0xFF)
+                               << "." << ((caps.version >> 8) & 0xFF)
+                               << "." << (caps.version & 0xFF);
+    LOG(INFO) << "  All Caps: " << std::hex << std::setw(8) << caps.capabilities;
+    LOG(INFO) << "  Dev Caps: " << std::hex << caps.device_caps;
 
     // Enumerate the available capture formats (if any)
-    ALOGI("Supported capture formats:");
+    LOG(INFO) << "Supported capture formats:";
     v4l2_fmtdesc formatDescriptions;
     formatDescriptions.type = V4L2_BUF_TYPE_VIDEO_CAPTURE;
     for (int i=0; true; i++) {
         formatDescriptions.index = i;
         if (ioctl(mDeviceFd, VIDIOC_ENUM_FMT, &formatDescriptions) == 0) {
-            ALOGI("  %2d: %s 0x%08X 0x%X",
-                   i,
-                   formatDescriptions.description,
-                   formatDescriptions.pixelformat,
-                   formatDescriptions.flags
-            );
+            LOG(INFO) << "  " << std::setw(2) << i
+                      << ": " << formatDescriptions.description
+                      << " " << std::hex << std::setw(8) << formatDescriptions.pixelformat
+                      << " " << std::hex << formatDescriptions.flags;
         } else {
             // No more formats available
             break;
@@ -85,7 +84,7 @@
     if (!(caps.capabilities & V4L2_CAP_VIDEO_CAPTURE) ||
         !(caps.capabilities & V4L2_CAP_STREAMING)) {
         // Can't do streaming capture.
-        ALOGE("Streaming capture not supported by %s.", deviceName);
+        LOG(ERROR) << "Streaming capture not supported by " << deviceName;
         return false;
     }
 
@@ -95,14 +94,15 @@
     format.fmt.pix.pixelformat = V4L2_PIX_FMT_UYVY;
     format.fmt.pix.width = width;
     format.fmt.pix.height = height;
-    ALOGI("Requesting format %c%c%c%c (0x%08X)",
-          ((char*)&format.fmt.pix.pixelformat)[0],
-          ((char*)&format.fmt.pix.pixelformat)[1],
-          ((char*)&format.fmt.pix.pixelformat)[2],
-          ((char*)&format.fmt.pix.pixelformat)[3],
-          format.fmt.pix.pixelformat);
+    LOG(INFO) << "Requesting format: "
+              << ((char*)&format.fmt.pix.pixelformat)[0]
+              << ((char*)&format.fmt.pix.pixelformat)[1]
+              << ((char*)&format.fmt.pix.pixelformat)[2]
+              << ((char*)&format.fmt.pix.pixelformat)[3]
+              << "(" << std::hex << std::setw(8) << format.fmt.pix.pixelformat << ")";
+
     if (ioctl(mDeviceFd, VIDIOC_S_FMT, &format) < 0) {
-        ALOGE("VIDIOC_S_FMT: %s", strerror(errno));
+        PLOG(ERROR) << "VIDIOC_S_FMT failed";
     }
 
     // Report the current output format
@@ -114,14 +114,12 @@
         mHeight = format.fmt.pix.height;
         mStride = format.fmt.pix.bytesperline;
 
-        ALOGI("Current output format:  fmt=0x%X, %dx%d, pitch=%d",
-               format.fmt.pix.pixelformat,
-               format.fmt.pix.width,
-               format.fmt.pix.height,
-               format.fmt.pix.bytesperline
-        );
+        LOG(INFO) << "Current output format:  "
+                  << "fmt=0x" << std::hex << format.fmt.pix.pixelformat
+                  << ", " << std::dec << format.fmt.pix.width << " x " << format.fmt.pix.height
+                  << ", pitch=" << format.fmt.pix.bytesperline;
     } else {
-        ALOGE("VIDIOC_G_FMT: %s", strerror(errno));
+        PLOG(ERROR) << "VIDIOC_G_FMT failed";
         return false;
     }
 
@@ -135,12 +133,12 @@
 
 
 void VideoCapture::close() {
-    ALOGD("VideoCapture::close");
+    LOG(DEBUG) << __FUNCTION__;
     // Stream should be stopped first!
     assert(mRunMode == STOPPED);
 
     if (isOpen()) {
-        ALOGD("closing video device file handle %d", mDeviceFd);
+        LOG(DEBUG) << "closing video device file handle " << mDeviceFd;
         ::close(mDeviceFd);
         mDeviceFd = -1;
     }
@@ -152,7 +150,7 @@
     int prevRunMode = mRunMode.fetch_or(RUN);
     if (prevRunMode & RUN) {
         // The background thread is already running, so we can't start a new stream
-        ALOGE("Already in RUN state, so we can't start a new streaming thread");
+        LOG(ERROR) << "Already in RUN state, so we can't start a new streaming thread";
         return false;
     }
 
@@ -162,7 +160,7 @@
     bufrequest.memory = V4L2_MEMORY_MMAP;
     bufrequest.count = 1;
     if (ioctl(mDeviceFd, VIDIOC_REQBUFS, &bufrequest) < 0) {
-        ALOGE("VIDIOC_REQBUFS: %s", strerror(errno));
+        PLOG(ERROR) << "VIDIOC_REQBUFS failed";
         return false;
     }
 
@@ -172,14 +170,14 @@
     mBufferInfo.memory   = V4L2_MEMORY_MMAP;
     mBufferInfo.index    = 0;
     if (ioctl(mDeviceFd, VIDIOC_QUERYBUF, &mBufferInfo) < 0) {
-        ALOGE("VIDIOC_QUERYBUF: %s", strerror(errno));
+        PLOG(ERROR) << "VIDIOC_QUERYBUF failed";
         return false;
     }
 
-    ALOGI("Buffer description:");
-    ALOGI("  offset: %d", mBufferInfo.m.offset);
-    ALOGI("  length: %d", mBufferInfo.length);
-    ALOGI("  flags : 0x%X", mBufferInfo.flags);
+    LOG(INFO) << "Buffer description:";
+    LOG(INFO) << "  offset: " << mBufferInfo.m.offset;
+    LOG(INFO) << "  length: " << mBufferInfo.length;
+    LOG(INFO) << "  flags : " << std::hex << mBufferInfo.flags;
 
     // Get a pointer to the buffer contents by mapping into our address space
     mPixelBuffer = mmap(
@@ -191,22 +189,22 @@
             mBufferInfo.m.offset
     );
     if( mPixelBuffer == MAP_FAILED) {
-        ALOGE("mmap: %s", strerror(errno));
+        PLOG(ERROR) << "mmap() failed";
         return false;
     }
     memset(mPixelBuffer, 0, mBufferInfo.length);
-    ALOGI("Buffer mapped at %p", mPixelBuffer);
+    LOG(INFO) << "Buffer mapped at " << mPixelBuffer;
 
     // Queue the first capture buffer
     if (ioctl(mDeviceFd, VIDIOC_QBUF, &mBufferInfo) < 0) {
-        ALOGE("VIDIOC_QBUF: %s", strerror(errno));
+        PLOG(ERROR) << "VIDIOC_QBUF failed";
         return false;
     }
 
     // Start the video stream
     int type = mBufferInfo.type;
     if (ioctl(mDeviceFd, VIDIOC_STREAMON, &type) < 0) {
-        ALOGE("VIDIOC_STREAMON: %s", strerror(errno));
+        PLOG(ERROR) << "VIDIOC_STREAMON failed";
         return false;
     }
 
@@ -216,7 +214,7 @@
     // Fire up a thread to receive and dispatch the video frames
     mCaptureThread = std::thread([this](){ collectFrames(); });
 
-    ALOGD("Stream started.");
+    LOG(DEBUG) << "Stream started.";
     return true;
 }
 
@@ -228,7 +226,8 @@
         // The background thread wasn't running, so set the flag back to STOPPED
         mRunMode = STOPPED;
     } else if (prevRunMode & STOPPING) {
-        ALOGE("stopStream called while stream is already stopping.  Reentrancy is not supported!");
+        LOG(ERROR) << "stopStream called while stream is already stopping.  "
+                   << "Reentrancy is not supported!";
         return;
     } else {
         // Block until the background thread is stopped
@@ -239,10 +238,10 @@
         // Stop the underlying video stream (automatically empties the buffer queue)
         int type = mBufferInfo.type;
         if (ioctl(mDeviceFd, VIDIOC_STREAMOFF, &type) < 0) {
-            ALOGE("VIDIOC_STREAMOFF: %s", strerror(errno));
+            PLOG(ERROR) << "VIDIOC_STREAMOFF failed";
         }
 
-        ALOGD("Capture thread stopped.");
+        LOG(DEBUG) << "Capture thread stopped.";
     }
 
     // Unmap the buffers we allocated
@@ -271,7 +270,7 @@
 
     // Requeue the buffer to capture the next available frame
     if (ioctl(mDeviceFd, VIDIOC_QBUF, &mBufferInfo) < 0) {
-        ALOGE("VIDIOC_QBUF: %s", strerror(errno));
+        PLOG(ERROR) << "VIDIOC_QBUF failed";
         return false;
     }
 
@@ -285,7 +284,7 @@
     while (mRunMode == RUN) {
         // Wait for a buffer to be ready
         if (ioctl(mDeviceFd, VIDIOC_DQBUF, &mBufferInfo) < 0) {
-            ALOGE("VIDIOC_DQBUF: %s", strerror(errno));
+            PLOG(ERROR) << "VIDIOC_DQBUF failed";
             break;
         }
 
@@ -298,7 +297,7 @@
     }
 
     // Mark ourselves stopped
-    ALOGD("VideoCapture thread ending");
+    LOG(DEBUG) << "VideoCapture thread ending";
     mRunMode = STOPPED;
 }
 
@@ -306,8 +305,8 @@
 int VideoCapture::setParameter(v4l2_control& control) {
     int status = ioctl(mDeviceFd, VIDIOC_S_CTRL, &control);
     if (status < 0) {
-        ALOGE("Failed to program a parameter value (id: 0x%X): %s",
-              control.id, strerror(errno));
+        PLOG(ERROR) << "Failed to program a parameter value "
+                    << "id = " << std::hex << control.id;
     }
 
     return status;
@@ -317,8 +316,9 @@
 int VideoCapture::getParameter(v4l2_control& control) {
     int status = ioctl(mDeviceFd, VIDIOC_G_CTRL, &control);
     if (status < 0) {
-        ALOGE("Failed to read a parameter value (fd: 0x%X, id: 0x%X): %s",
-              mDeviceFd, control.id, strerror(errno));
+        PLOG(ERROR) << "Failed to read a parameter value"
+                    << " fd = " << std::hex << mDeviceFd
+                    << " id = " << control.id;
     }
 
     return status;
diff --git a/evs/sampleDriver/service.cpp b/evs/sampleDriver/service.cpp
index fef417e..d4b7617 100644
--- a/evs/sampleDriver/service.cpp
+++ b/evs/sampleDriver/service.cpp
@@ -42,14 +42,19 @@
 
 
 int main() {
-    ALOGI("EVS Hardware Enumerator service is starting");
+    LOG(INFO) << "EVS Hardware Enumerator service is starting";
 
-    android::sp<IAutomotiveDisplayProxyService> carWindowService = IAutomotiveDisplayProxyService::getService("default");
+    android::sp<IAutomotiveDisplayProxyService> carWindowService =
+        IAutomotiveDisplayProxyService::getService("default");
     if (carWindowService == nullptr) {
-        ALOGE("Cannot use AutomotiveDisplayProxyService.  Exiting.");
+        LOG(ERROR) << "Cannot use AutomotiveDisplayProxyService.  Exiting.";
         return 1;
     }
 
+#ifdef EVS_DEBUG
+    SetMinimumLogSeverity(android::base::DEBUG);
+#endif
+
     // Start a thread to listen video device addition events.
     std::atomic<bool> running { true };
     std::thread ueventHandler(EvsEnumerator::EvsUeventThread, std::ref(running));
@@ -62,10 +67,11 @@
     // they will be killed (their thread pool will throw an exception).
     status_t status = service->registerAsService(kEnumeratorServiceName);
     if (status == OK) {
-        ALOGD("%s is ready.", kEnumeratorServiceName);
+        LOG(DEBUG) << kEnumeratorServiceName << " is ready.";
         joinRpcThreadpool();
     } else {
-        ALOGE("Could not register service %s (%d).", kEnumeratorServiceName, status);
+        LOG(ERROR) << "Could not register service " << kEnumeratorServiceName
+                   << " (" << status << ").";
     }
 
     // Exit a uevent handler thread.
@@ -75,6 +81,6 @@
     }
 
     // In normal operation, we don't expect the thread pool to exit
-    ALOGE("EVS Hardware Enumerator is shutting down");
+    LOG(ERROR) << "EVS Hardware Enumerator is shutting down";
     return 1;
 }
diff --git a/evs/sepolicy/evs_driver.te b/evs/sepolicy/evs_driver.te
index b051569..9bb7b97 100644
--- a/evs/sepolicy/evs_driver.te
+++ b/evs/sepolicy/evs_driver.te
@@ -25,4 +25,5 @@
 
 # Allow the driver to use automotive display proxy service
 allow hal_evs_driver automotive_display_service_server:binder call;
+allow hal_evs_driver fwk_automotive_display_hwservice:hwservice_manager find;
 
diff --git a/service/src/com/android/car/CarFeatureController.java b/service/src/com/android/car/CarFeatureController.java
index 0418967..9f0d9e3 100644
--- a/service/src/com/android/car/CarFeatureController.java
+++ b/service/src/com/android/car/CarFeatureController.java
@@ -145,6 +145,11 @@
         mHandlerThread = new HandlerThread(TAG);
         mHandlerThread.start();
         mHandler = new Handler(mHandlerThread.getLooper());
+        if (!checkMandatoryFeaturesLocked()) { // mandatory feature missing, force default config
+            mEnabledFeatures.clear();
+            mEnabledFeatures.addAll(MANDATORY_FEATURES);
+            shouldLoadDefaultConfig = true;
+        }
         // Separate if to use this as backup for failure in loadFromConfigFileLocked()
         if (shouldLoadDefaultConfig) {
             parseDefaultConfig();
@@ -180,6 +185,17 @@
         return mEnabledFeatures.contains(featureName);
     }
 
+    private boolean checkMandatoryFeaturesLocked() {
+        // Ensure that mandatory features are always there
+        for (String feature: MANDATORY_FEATURES) {
+            if (!mEnabledFeatures.contains(feature)) {
+                Log.e(TAG, "Mandatory feature missing in mEnabledFeatures:" + feature);
+                return false;
+            }
+        }
+        return true;
+    }
+
     @FeaturerRequestEnum
     private int checkFeatureExisting(String featureName) {
         if (MANDATORY_FEATURES.contains(featureName)) {
diff --git a/service/src/com/android/car/hal/UserHalService.java b/service/src/com/android/car/hal/UserHalService.java
index 7b26e82..6ec716a 100644
--- a/service/src/com/android/car/hal/UserHalService.java
+++ b/service/src/com/android/car/hal/UserHalService.java
@@ -39,8 +39,8 @@
 import android.util.Pair;
 import android.util.Slog;
 import android.util.SparseArray;
-import android.util.SparseIntArray;
 
+import com.android.car.hal.UserHalService.HalCallback;
 import com.android.internal.annotations.GuardedBy;
 import com.android.internal.util.Preconditions;
 
@@ -52,8 +52,6 @@
 import java.util.List;
 import java.util.Objects;
 import java.util.Optional;
-import java.util.concurrent.CountDownLatch;
-import java.util.concurrent.TimeUnit;
 
 /**
  * Service used to integrate the OEM's custom user management with Android's.
@@ -67,15 +65,6 @@
     // TODO(b/150413515): STOPSHIP - change to false before R is launched
     private static final boolean DBG = true;
 
-    private static final int REQUEST_TYPE_GET_INITIAL_INFO = 1;
-
-    /** @hide */
-    @IntDef(prefix = { "REQUEST_TYPE_" }, value = {
-            REQUEST_TYPE_GET_INITIAL_INFO
-    })
-    @Retention(RetentionPolicy.SOURCE)
-    @interface RequestType{}
-
     private final Object mLock = new Object();
 
     private final VehicleHal mHal;
@@ -98,14 +87,8 @@
     /**
      * Map of callbacks by request id.
      */
-    @GuardedBy("mHandler")
-    private SparseArray<Pair<Class<?>, HalCallback<?>>> mPendingCallbacks = new SparseArray<>();
-
-    /**
-     * Map of request ids by {@link RequestType}.
-     */
     @GuardedBy("mLock")
-    private SparseIntArray mPendingRequests = new SparseIntArray();
+    private SparseArray<Pair<Class<?>, HalCallback<?>>> mPendingCallbacks = new SparseArray<>();
 
     public UserHalService(VehicleHal hal) {
         mHal = hal;
@@ -158,7 +141,7 @@
     public Collection<VehiclePropConfig> takeSupportedProperties(
             Collection<VehiclePropConfig> allProperties) {
         boolean supported = false;
-        // TODO(b/146207078): increase capacity once it supports more
+        // TODO(b/150413515): increase capacity once it supports more
         SparseArray<VehiclePropConfig> properties = new SparseArray<>(1);
         ArrayList<VehiclePropConfig> taken = new ArrayList<>();
         for (VehiclePropConfig config : allProperties) {
@@ -246,7 +229,7 @@
         if (DBG) Log.d(TAG, "getInitialInfo(" + requestType + ")");
         Preconditions.checkArgumentPositive(timeoutMs, "timeout must be positive");
         Objects.requireNonNull(usersInfo);
-        // TODO(b/146207078): use helper method to convert request to prop value and check usersInfo
+        // TODO(b/150413515): use helper method to convert request to prop value and check usersInfo
         // is valid
         Objects.requireNonNull(callback);
 
@@ -255,9 +238,9 @@
         int requestId;
         synchronized (mLock) {
             checkSupportedLocked();
-            if (hasPendingRequestLocked(REQUEST_TYPE_GET_INITIAL_INFO, callback)) return;
-            requestId = addPendingRequestLocked(REQUEST_TYPE_GET_INITIAL_INFO);
-            // TODO(b/146207078): use helper method to convert request to prop value
+            if (hasPendingRequestLocked(InitialUserInfoResponse.class, callback)) return;
+            requestId = mNextRequestId++;
+            // TODO(b/150413515): use helper method to convert request to prop value
             propRequest.value.int32Values.add(requestId);
             propRequest.value.int32Values.add(requestType);
             propRequest.value.int32Values.add(usersInfo.currentUser.userId);
@@ -269,10 +252,8 @@
                 propRequest.value.int32Values.add(userInfo.flags);
             }
             setTimestamp(propRequest);
+            addPendingRequestLocked(requestId, InitialUserInfoResponse.class, callback);
         }
-        mHandler.sendMessage(obtainMessage(
-                UserHalService::handleAddPendingRequest, this, requestId,
-                InitialUserInfoResponse.class, callback));
 
         mHandler.sendMessageDelayed(obtainMessage(
                 UserHalService::handleCheckIfRequestTimedOut, this, requestId).setWhat(requestId),
@@ -286,7 +267,8 @@
         }
     }
 
-    private void handleAddPendingRequest(int requestId, @NonNull Class<?> responseClass,
+    @GuardedBy("mLock")
+    private void addPendingRequestLocked(int requestId, @NonNull Class<?> responseClass,
             @NonNull HalCallback<?> callback) {
         if (DBG) {
             Log.d(TAG, "adding pending callback (of type " + responseClass.getName()
@@ -296,30 +278,21 @@
     }
 
     /**
-     * Checks if there is a pending request of type {@code requestType}, calling {@code callback}
+     * Checks if there is a pending request of type {@code requestClass}, calling {@code callback}
      * with {@link HalCallback#STATUS_CONCURRENT_OPERATION} when there is.
      */
-    private boolean hasPendingRequestLocked(@RequestType int requestType,
-            @NonNull HalCallback<?> callback) {
-        int index = mPendingRequests.indexOfKey(requestType);
-        if (index < 0) return false;
-
-        int requestId = mPendingRequests.valueAt(index);
-        Log.w(TAG, "Already have pending request of type " + requestTypeToString(requestType)
-                + ": id=" + requestId);
-
-        callback.onResponse(HalCallback.STATUS_CONCURRENT_OPERATION, null);
-        return true;
-    }
-
-    /**
-     * Adds a new pending request to the queue, returning its request id.
-     */
     @GuardedBy("mLock")
-    private int addPendingRequestLocked(@RequestType int requestType) {
-        int requestId = mNextRequestId++;
-        mPendingRequests.put(requestType, requestId);
-        return requestId;
+    private boolean hasPendingRequestLocked(@NonNull Class<?> requestClass,
+            @NonNull HalCallback<?> callback) {
+        for (int i = 0; i < mPendingCallbacks.size(); i++) {
+            Pair<Class<?>, HalCallback<?>> pair = mPendingCallbacks.valueAt(i);
+            if (pair.first == requestClass) {
+                Log.w(TAG, "Already have pending request of type " + requestClass);
+                callback.onResponse(HalCallback.STATUS_CONCURRENT_OPERATION, null);
+                return true;
+            }
+        }
+        return false;
     }
 
     /**
@@ -328,11 +301,13 @@
     private void handleRemovePendingRequest(int requestId) {
         if (DBG) Log.d(TAG, "Removing pending request #" + requestId);
         mHandler.removeMessages(requestId);
-        mPendingCallbacks.remove(requestId);
+        synchronized (mLock) {
+            mPendingCallbacks.remove(requestId);
+        }
     }
 
     private void handleCheckIfRequestTimedOut(int requestId) {
-        Pair<Class<?>, HalCallback<?>> pair = mPendingCallbacks.get(requestId);
+        Pair<Class<?>, HalCallback<?>> pair = getPendingCallback(requestId);
         if (pair == null) return;
 
         Log.w(TAG, "Request #" + requestId + " timed out");
@@ -340,9 +315,15 @@
         pair.second.onResponse(HalCallback.STATUS_HAL_RESPONSE_TIMEOUT, null);
     }
 
-    @GuardedBy("mHandle")
+    @Nullable
+    private Pair<Class<?>, HalCallback<?>> getPendingCallback(int requestId) {
+        synchronized (mLock) {
+            return mPendingCallbacks.get(requestId);
+        }
+    }
+
     private void handleOnInitialUserInfoResponse(VehiclePropValue value) {
-        // TODO(b/146207078): record (for dumping()) the last N responses.
+        // TODO(b/150413515): record (for dumping()) the last N responses.
         int requestId = value.value.int32Values.get(0);
         HalCallback<InitialUserInfoResponse> callback = handleGetPendingCallback(requestId,
                 InitialUserInfoResponse.class);
@@ -352,7 +333,7 @@
         }
         handleRemovePendingRequest(requestId);
         InitialUserInfoResponse response = new InitialUserInfoResponse();
-        // TODO(b/146207078): use helper method to convert prop value to proper response
+        // TODO(b/150413515): use helper method to convert prop value to proper response
         response.requestId = requestId;
         response.action = value.value.int32Values.get(1);
         switch (response.action) {
@@ -379,15 +360,14 @@
         callback.onResponse(HalCallback.STATUS_OK, response);
     }
 
-    @GuardedBy("mHandle")
     private <T> HalCallback<T> handleGetPendingCallback(int requestId, Class<T> clazz) {
-        Pair<Class<?>, HalCallback<?>> pair = mPendingCallbacks.get(requestId);
+        Pair<Class<?>, HalCallback<?>> pair = getPendingCallback(requestId);
         if (pair == null) return null;
 
         if (pair.first != clazz) {
             Slog.e(TAG, "Invalid callback class for request " + requestId + ": expected" + clazz
                     + ", but got is " + pair.first);
-            // TODO(b/146207078): add unit test for this scenario once it supports other properties
+            // TODO(b/150413515): add unit test for this scenario once it supports other properties
             return null;
         }
         @SuppressWarnings("unchecked")
@@ -419,28 +399,12 @@
                 writer.printf("%s%s\n", indent, mProperties.valueAt(i));
             }
             writer.printf("next request id: %d\n", mNextRequestId);
-            int size = mPendingRequests.size();
-            if (size == 0) {
-                writer.println("no pending requests");
-            } else {
-                writer.printf("%d pending requests\n", size);
-                for (int i = 0; i < size; i++) {
-                    String type = requestTypeToString(mPendingRequests.keyAt(i));
-                    int requestId = mPendingRequests.valueAt(i);
-                    writer.printf("%s#%d: %s=req_id(%d)\n", indent, i, type, requestId);
-                }
-            }
-        }
 
-        CountDownLatch latch = new CountDownLatch(1);
-        mHandler.sendMessage(obtainMessage(UserHalService::handleDump, this, writer, latch));
-        try {
-            if (!latch.await(500, TimeUnit.SECONDS)) {
-                writer.println("\nTIMED OUT");
+            if (mPendingCallbacks.size() == 0) {
+                writer.println("no pending callbacks");
+            } else {
+                writer.printf("pending callbacks: %s\n", mPendingCallbacks);
             }
-        } catch (InterruptedException e) {
-            Thread.currentThread().interrupt();
-            writer.println("\nINTERRUPTED");
         }
     }
 
@@ -450,25 +414,4 @@
         writer.printf("%s%s=%s\n", indent, name, value);
     }
 
-    /**
-     * Dumps the state that's guarded by {@code mHandler}.
-     */
-    private void handleDump(@NonNull PrintWriter writer, @NonNull CountDownLatch latch) {
-        if (mPendingCallbacks.size() == 0) {
-            writer.println("no pending callbacks");
-        } else {
-            writer.printf("pending callbacks: %s\n", mPendingCallbacks);
-        }
-        latch.countDown();
-    }
-
-    @NonNull
-    private static String requestTypeToString(@RequestType int type) {
-        switch (type) {
-            case REQUEST_TYPE_GET_INITIAL_INFO:
-                return "TYPE_GET_INITIAL_INFO";
-            default:
-                return "UNKNOWN-" + type;
-        }
-    }
 }
diff --git a/tests/CarDeveloperOptions/OWNERS b/tests/CarDeveloperOptions/OWNERS
index aefc8fb..b57ecbb 100644
--- a/tests/CarDeveloperOptions/OWNERS
+++ b/tests/CarDeveloperOptions/OWNERS
@@ -1,19 +1,18 @@
 # Use these reviewers by default.
-davidln@google.com
 hseog@google.com
 johnchoi@google.com
 kwaky@google.com
 priyanksingh@google.com
 stenning@google.com
+ericberglund@google.com
+jianyliu@google.com
 
 # People who can originally approve code for Settings.
 edgarwang@google.com
 emilychuang@google.com
-rafftsai@google.com
 tmfang@google.com
 
 # Emergency approvers in case the above are not available
-zhfan@google.com
 
 # Exempt resource files (because they are in a flat directory and too hard to manage via OWNERS)
 per-file *.xml=*
diff --git a/tests/CarDeveloperOptions/src/com/android/car/developeroptions/development/AbstractBluetoothA2dpPreferenceController.java b/tests/CarDeveloperOptions/src/com/android/car/developeroptions/development/AbstractBluetoothA2dpPreferenceController.java
index 1451f19..441ea56 100644
--- a/tests/CarDeveloperOptions/src/com/android/car/developeroptions/development/AbstractBluetoothA2dpPreferenceController.java
+++ b/tests/CarDeveloperOptions/src/com/android/car/developeroptions/development/AbstractBluetoothA2dpPreferenceController.java
@@ -105,6 +105,9 @@
 
     @Override
     public void updateState(Preference preference) {
+        if (mBluetoothA2dp == null) {
+            return;
+        }
         BluetoothDevice activeDevice = mBluetoothA2dp.getActiveDevice();
         if (activeDevice == null || getCodecConfig(activeDevice) == null || mPreference == null) {
             return;
diff --git a/tests/carservice_test/src/com/android/car/pm/ActivityBlockingActivityTest.java b/tests/carservice_test/src/com/android/car/pm/ActivityBlockingActivityTest.java
index c7781d5..9bef532 100644
--- a/tests/carservice_test/src/com/android/car/pm/ActivityBlockingActivityTest.java
+++ b/tests/carservice_test/src/com/android/car/pm/ActivityBlockingActivityTest.java
@@ -37,6 +37,7 @@
 import androidx.test.InstrumentationRegistry;
 import androidx.test.ext.junit.runners.AndroidJUnit4;
 import androidx.test.filters.MediumTest;
+import androidx.test.filters.Suppress;
 
 import org.junit.After;
 import org.junit.Before;
@@ -84,6 +85,8 @@
         assertBlockingActivityNotFound();
     }
 
+    // Suppress test to avoid blocking team while b/150491747 is evaluated
+    @Suppress
     @Test
     public void testBlockingActivity_nonDoActivity_isBlocked() throws Exception {
         startNonDoActivity(NonDoActivity.EXTRA_DO_NOTHING);
@@ -108,6 +111,8 @@
         assertBlockingActivityNotFound();
     }
 
+    // Suppress test to avoid blocking team while b/150491747 is evaluated
+    @Suppress
     @Test
     public void testBlockingActivity_nonDoFinishesOnResume_noBlockingActivity()
             throws Exception {
@@ -116,6 +121,8 @@
         assertBlockingActivityNotFound();
     }
 
+    // Suppress test to avoid blocking team while b/150491747 is evaluated
+    @Suppress
     @Test
     public void testBlockingActivity_nonDoLaunchesDoOnResume_noBlockingActivity()
             throws Exception {
diff --git a/tests/carservice_unit_test/src/com/android/car/hal/UserHalServiceTest.java b/tests/carservice_unit_test/src/com/android/car/hal/UserHalServiceTest.java
index 1a07456..b0bc2fe 100644
--- a/tests/carservice_unit_test/src/com/android/car/hal/UserHalServiceTest.java
+++ b/tests/carservice_unit_test/src/com/android/car/hal/UserHalServiceTest.java
@@ -361,6 +361,12 @@
         assertThat(newUser.flags).isEqualTo(newUserFlags);
     }
 
+    @Test
+    public void testGetUserInfo_twoSuccessfulCalls() throws Exception {
+        testGetUserInfo_successDefault();
+        testGetUserInfo_successDefault();
+    }
+
     /**
      * Asserts the given {@link UsersInfo} is properly represented in the {@link VehiclePropValue}.
      *