Use pixel format from buffer to determine opacity.

The format that's passed into BufferQueueLayer creation is meant to
define what format the buffers should have that are dequeued from the
consumer. If the client submits its own buffers, they can have any
pixel format. Therefore, the format from the activeBuffer should be used
when determing opacity since that's the information about the buffer
that will be rendered.

Bug: 152162496
Test: LayerTypeAndRenderTypeTransactionTest#SetBufferFormat
Change-Id: I4295b837369b8ca8918d8398a29cab133a47cbf7
diff --git a/services/surfaceflinger/BufferLayer.cpp b/services/surfaceflinger/BufferLayer.cpp
index e4d754c..a32bc2b 100644
--- a/services/surfaceflinger/BufferLayer.cpp
+++ b/services/surfaceflinger/BufferLayer.cpp
@@ -272,7 +272,7 @@
     // pixel format is HDR Y410 masquerading as RGBA_1010102
     return (mBufferInfo.mDataspace == ui::Dataspace::BT2020_ITU_PQ &&
             mBufferInfo.mApi == NATIVE_WINDOW_API_MEDIA &&
-            mBufferInfo.mBuffer->getPixelFormat() == HAL_PIXEL_FORMAT_RGBA_1010102);
+            mBufferInfo.mPixelFormat == HAL_PIXEL_FORMAT_RGBA_1010102);
 }
 
 sp<compositionengine::LayerFE> BufferLayer::getCompositionEngineLayerFE() const {
@@ -374,6 +374,12 @@
     return true;
 }
 
+void BufferLayer::gatherBufferInfo() {
+    mBufferInfo.mPixelFormat =
+            !mBufferInfo.mBuffer ? PIXEL_FORMAT_NONE : mBufferInfo.mBuffer->format;
+    mBufferInfo.mFrameLatencyNeeded = true;
+}
+
 bool BufferLayer::latchBuffer(bool& recomputeVisibleRegions, nsecs_t latchTime,
                               nsecs_t expectedPresentTime) {
     ATRACE_CALL();
@@ -434,7 +440,6 @@
     gatherBufferInfo();
 
     mRefreshPending = true;
-    mBufferInfo.mFrameLatencyNeeded = true;
     if (oldBufferInfo.mBuffer == nullptr) {
         // the first time we receive a buffer, we need to trigger a
         // geometry invalidation.