blob: 29a466074ff389cc4c705a79d53f161ae6d4b1a1 [file] [log] [blame]
Saurabh Shah82b06f42017-09-06 16:43:49 -07001/*
Xu Yang32e58c22017-11-20 09:58:11 +08002Copyright (c) 2017-2018, The Linux Foundation. All rights reserved.
Saurabh Shah82b06f42017-09-06 16:43:49 -07003
4Redistribution and use in source and binary forms, with or without
5modification, are permitted provided that the following conditions are
6met:
7 * Redistributions of source code must retain the above copyright
8 notice, this list of conditions and the following disclaimer.
9 * Redistributions in binary form must reproduce the above
10 copyright notice, this list of conditions and the following
11 disclaimer in the documentation and/or other materials provided
12 with the distribution.
13 * Neither the name of The Linux Foundation nor the names of its
14 contributors may be used to endorse or promote products derived
15 from this software without specific prior written permission.
16
17THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28*/
29
30#include <utils/debug.h>
Sushil Chauhan06521582018-03-19 14:00:23 -070031#include <vector>
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -070032#include <cstring>
Saurabh Shah82b06f42017-09-06 16:43:49 -070033
34#include "hw_peripheral_drm.h"
35
36#define __CLASS__ "HWPeripheralDRM"
37
38using sde_drm::DRMDisplayType;
39using sde_drm::DRMOps;
Saurabh Shah82b06f42017-09-06 16:43:49 -070040using sde_drm::DRMPowerMode;
Ping Li6a74d892018-05-02 15:54:58 -070041using sde_drm::DppsFeaturePayload;
Xu Yang32e58c22017-11-20 09:58:11 +080042using sde_drm::DRMDppsFeatureInfo;
Sushil Chauhan06521582018-03-19 14:00:23 -070043using sde_drm::DRMSecureMode;
44using sde_drm::DRMCWbCaptureMode;
Saurabh Shah82b06f42017-09-06 16:43:49 -070045
46namespace sdm {
47
Mathew Joseph Karimpanal731bc932017-11-22 10:04:56 +053048HWPeripheralDRM::HWPeripheralDRM(int32_t display_id, BufferSyncHandler *buffer_sync_handler,
49 BufferAllocator *buffer_allocator, HWInfoInterface *hw_info_intf)
Saurabh Shah82b06f42017-09-06 16:43:49 -070050 : HWDeviceDRM(buffer_sync_handler, buffer_allocator, hw_info_intf) {
51 disp_type_ = DRMDisplayType::PERIPHERAL;
Varun Arora7c8ee542018-05-01 20:58:16 -070052 device_name_ = "Peripheral";
Mathew Joseph Karimpanal731bc932017-11-22 10:04:56 +053053 display_id_ = display_id;
Saurabh Shah82b06f42017-09-06 16:43:49 -070054}
55
56DisplayError HWPeripheralDRM::Init() {
57 DisplayError ret = HWDeviceDRM::Init();
58 if (ret != kErrorNone) {
59 DLOGE("Init failed for %s", device_name_);
60 return ret;
61 }
62
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053063 scalar_data_.resize(hw_resource_.hw_dest_scalar_info.count);
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -070064 dest_scalar_cache_.resize(hw_resource_.hw_dest_scalar_info.count);
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053065
Saurabh Shah82b06f42017-09-06 16:43:49 -070066 return kErrorNone;
67}
68
69DisplayError HWPeripheralDRM::Validate(HWLayers *hw_layers) {
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053070 HWLayersInfo &hw_layer_info = hw_layers->info;
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -070071 SetDestScalarData(hw_layer_info, true);
Sushil Chauhan06521582018-03-19 14:00:23 -070072 SetupConcurrentWriteback(hw_layer_info, true);
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -070073 SetIdlePCState();
Saurabh Shah82b06f42017-09-06 16:43:49 -070074
75 return HWDeviceDRM::Validate(hw_layers);
76}
77
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053078DisplayError HWPeripheralDRM::Commit(HWLayers *hw_layers) {
79 HWLayersInfo &hw_layer_info = hw_layers->info;
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -070080 SetDestScalarData(hw_layer_info, false);
Sushil Chauhan06521582018-03-19 14:00:23 -070081 SetupConcurrentWriteback(hw_layer_info, false);
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -070082 SetIdlePCState();
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053083
Sushil Chauhan06521582018-03-19 14:00:23 -070084 DisplayError error = HWDeviceDRM::Commit(hw_layers);
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -070085 if (error != kErrorNone) {
86 return error;
87 }
Sushil Chauhan06521582018-03-19 14:00:23 -070088
89 if (cwb_config_.enabled && (error == kErrorNone)) {
90 PostCommitConcurrentWriteback(hw_layer_info.stack->output_buffer);
91 }
92
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -070093 // Initialize to default after successful commit
94 synchronous_commit_ = false;
95
Sushil Chauhan06521582018-03-19 14:00:23 -070096 return error;
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053097}
98
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053099void HWPeripheralDRM::ResetDisplayParams() {
100 sde_dest_scalar_data_ = {};
101 for (uint32_t j = 0; j < scalar_data_.size(); j++) {
102 scalar_data_[j] = {};
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700103 dest_scalar_cache_[j] = {};
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530104 }
105}
106
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700107void HWPeripheralDRM::SetDestScalarData(HWLayersInfo hw_layer_info, bool validate) {
Alex Sarraf4446cd72018-05-16 15:23:57 -0700108 if (!hw_scale_ || !hw_resource_.hw_dest_scalar_info.count) {
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530109 return;
110 }
111
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700112 uint32_t count = 0;
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530113 for (uint32_t i = 0; i < hw_resource_.hw_dest_scalar_info.count; i++) {
114 DestScaleInfoMap::iterator it = hw_layer_info.dest_scale_info_map.find(i);
115
116 if (it == hw_layer_info.dest_scale_info_map.end()) {
117 continue;
118 }
119
120 HWDestScaleInfo *dest_scale_info = it->second;
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700121 SDEScaler *scale = &scalar_data_[count];
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530122 hw_scale_->SetScaler(dest_scale_info->scale_data, scale);
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700123
124 sde_drm_dest_scaler_cfg *dest_scalar_data = &sde_dest_scalar_data_.ds_cfg[count];
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530125 dest_scalar_data->flags = 0;
126 if (scale->scaler_v2.enable) {
127 dest_scalar_data->flags |= SDE_DRM_DESTSCALER_ENABLE;
128 }
129 if (scale->scaler_v2.de.enable) {
130 dest_scalar_data->flags |= SDE_DRM_DESTSCALER_ENHANCER_UPDATE;
131 }
132 if (dest_scale_info->scale_update) {
133 dest_scalar_data->flags |= SDE_DRM_DESTSCALER_SCALE_UPDATE;
134 }
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700135 if (hw_panel_info_.partial_update) {
136 dest_scalar_data->flags |= SDE_DRM_DESTSCALER_PU_ENABLE;
137 }
138
139 if (!std::memcmp(&dest_scalar_cache_[count].scalar_data, scale, sizeof(SDEScaler)) &&
140 dest_scalar_cache_[count].flags == dest_scalar_data->flags) {
141 continue;
142 }
143
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530144 dest_scalar_data->index = i;
145 dest_scalar_data->lm_width = dest_scale_info->mixer_width;
146 dest_scalar_data->lm_height = dest_scale_info->mixer_height;
147 dest_scalar_data->scaler_cfg = reinterpret_cast<uint64_t>(&scale->scaler_v2);
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700148 if (!validate) {
149 dest_scalar_cache_[count].flags = dest_scalar_data->flags;
150 dest_scalar_cache_[count].scalar_data = *scale;
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530151 }
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700152 count++;
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530153 }
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700154 if (count) {
155 sde_dest_scalar_data_.num_dest_scaler = count;
156 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_DEST_SCALER_CONFIG, token_.crtc_id,
157 reinterpret_cast<uint64_t>(&sde_dest_scalar_data_));
158 }
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530159}
160
161DisplayError HWPeripheralDRM::Flush() {
162 DisplayError err = HWDeviceDRM::Flush();
163 if (err != kErrorNone) {
164 return err;
165 }
166
167 ResetDisplayParams();
168 return kErrorNone;
169}
170
Ping Li6a74d892018-05-02 15:54:58 -0700171DisplayError HWPeripheralDRM::SetDppsFeature(void *payload, size_t size) {
172 uint32_t obj_id = 0, object_type = 0, feature_id = 0;
173 uint64_t value = 0;
Xu Yang32e58c22017-11-20 09:58:11 +0800174
Ping Li6a74d892018-05-02 15:54:58 -0700175 if (size != sizeof(DppsFeaturePayload)) {
176 DLOGE("invalid payload size %d, expected %d", size, sizeof(DppsFeaturePayload));
177 return kErrorParameters;
178 }
179
180 DppsFeaturePayload *feature_payload = reinterpret_cast<DppsFeaturePayload *>(payload);
181 object_type = feature_payload->object_type;
182 feature_id = feature_payload->feature_id;
183 value = feature_payload->value;
Xu Yang32e58c22017-11-20 09:58:11 +0800184 if (object_type == DRM_MODE_OBJECT_CRTC) {
185 obj_id = token_.crtc_id;
186 } else if (object_type == DRM_MODE_OBJECT_CONNECTOR) {
187 obj_id = token_.conn_id;
188 } else {
189 DLOGE("invalid object type 0x%x", object_type);
190 return kErrorUndefined;
191 }
192
193 drm_atomic_intf_->Perform(DRMOps::DPPS_CACHE_FEATURE, obj_id, feature_id, value);
194 return kErrorNone;
195}
196
Ping Li6a74d892018-05-02 15:54:58 -0700197DisplayError HWPeripheralDRM::GetDppsFeatureInfo(void *payload, size_t size) {
198 if (size != sizeof(DRMDppsFeatureInfo)) {
199 DLOGE("invalid payload size %d, expected %d", size, sizeof(DRMDppsFeatureInfo));
200 return kErrorParameters;
201 }
202 DRMDppsFeatureInfo *feature_info = reinterpret_cast<DRMDppsFeatureInfo *>(payload);
Xu Yang32e58c22017-11-20 09:58:11 +0800203 drm_mgr_intf_->GetDppsFeatureInfo(feature_info);
204 return kErrorNone;
205}
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530206
Ramkumar Radhakrishnana38b7602018-03-15 14:49:52 -0700207DisplayError HWPeripheralDRM::HandleSecureEvent(SecureEvent secure_event) {
208 switch (secure_event) {
209 case kSecureDisplayStart: {
210 secure_display_active_ = true;
211 if (hw_panel_info_.mode != kModeCommand) {
212 DisplayError err = Flush();
213 if (err != kErrorNone) {
214 return err;
215 }
216 }
217 }
218 break;
219
220 case kSecureDisplayEnd: {
221 if (hw_panel_info_.mode != kModeCommand) {
222 DisplayError err = Flush();
223 if (err != kErrorNone) {
224 return err;
225 }
226 }
227 secure_display_active_ = false;
228 }
229 break;
230
231 default:
232 DLOGE("Invalid secure event %d", secure_event);
233 return kErrorNotSupported;
234 }
235
236 return kErrorNone;
237}
238
Sushil Chauhan06521582018-03-19 14:00:23 -0700239void HWPeripheralDRM::SetupConcurrentWriteback(const HWLayersInfo &hw_layer_info, bool validate) {
240 bool enable = hw_resource_.has_concurrent_writeback && hw_layer_info.stack->output_buffer;
241 if (!(enable || cwb_config_.enabled)) {
242 return;
243 }
244
245 bool setup_modes = enable && !cwb_config_.enabled && validate;
246 if (setup_modes && (SetupConcurrentWritebackModes() == kErrorNone)) {
247 cwb_config_.enabled = true;
248 }
249
250 if (cwb_config_.enabled) {
251 if (enable) {
252 // Set DRM properties for Concurrent Writeback.
253 ConfigureConcurrentWriteback(hw_layer_info.stack);
Prabhanjan Kandula9e9c2322018-06-06 18:07:15 -0700254
255 if (!validate) {
256 // Set GET_RETIRE_FENCE property to get Concurrent Writeback fence.
257 int *fence = &hw_layer_info.stack->output_buffer->release_fence_fd;
258 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE,
259 cwb_config_.token.conn_id, fence);
260 }
Sushil Chauhan06521582018-03-19 14:00:23 -0700261 } else {
262 // Tear down the Concurrent Writeback topology.
263 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, cwb_config_.token.conn_id, 0);
264 }
265 }
266}
267
268DisplayError HWPeripheralDRM::SetupConcurrentWritebackModes() {
269 // To setup Concurrent Writeback topology, get the Connector ID of Virtual display
270 if (drm_mgr_intf_->RegisterDisplay(DRMDisplayType::VIRTUAL, &cwb_config_.token)) {
271 DLOGE("RegisterDisplay failed for Concurrent Writeback");
272 return kErrorResources;
273 }
274
275 // Set the modes based on Primary display.
276 std::vector<drmModeModeInfo> modes;
277 for (auto &item : connector_info_.modes) {
278 modes.push_back(item.mode);
279 }
280
281 // Inform the mode list to driver.
282 struct sde_drm_wb_cfg cwb_cfg = {};
283 cwb_cfg.connector_id = cwb_config_.token.conn_id;
284 cwb_cfg.flags = SDE_DRM_WB_CFG_FLAGS_CONNECTED;
285 cwb_cfg.count_modes = UINT32(modes.size());
286 cwb_cfg.modes = (uint64_t)modes.data();
287
288 int ret = -EINVAL;
289#ifdef DRM_IOCTL_SDE_WB_CONFIG
290 ret = drmIoctl(dev_fd_, DRM_IOCTL_SDE_WB_CONFIG, &cwb_cfg);
291#endif
292 if (ret) {
293 drm_mgr_intf_->UnregisterDisplay(cwb_config_.token);
294 DLOGE("Dump CWBConfig: mode_count %d flags %x", cwb_cfg.count_modes, cwb_cfg.flags);
295 DumpConnectorModeInfo();
296 return kErrorHardware;
297 }
298
299 return kErrorNone;
300}
301
302void HWPeripheralDRM::ConfigureConcurrentWriteback(LayerStack *layer_stack) {
303 LayerBuffer *output_buffer = layer_stack->output_buffer;
Sushil Chauhanabc5b272018-06-20 16:18:13 -0700304 registry_.MapOutputBufferToFbId(output_buffer);
Sushil Chauhan06521582018-03-19 14:00:23 -0700305
306 // Set the topology for Concurrent Writeback: [CRTC_PRIMARY_DISPLAY - CONNECTOR_VIRTUAL_DISPLAY].
307 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, cwb_config_.token.conn_id, token_.crtc_id);
308
309 // Set CRTC Capture Mode
310 DRMCWbCaptureMode capture_mode = layer_stack->flags.post_processed_output ?
311 DRMCWbCaptureMode::DSPP_OUT : DRMCWbCaptureMode::MIXER_OUT;
312 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CAPTURE_MODE, token_.crtc_id, capture_mode);
313
314 // Set Connector Output FB
Sushil Chauhanabc5b272018-06-20 16:18:13 -0700315 uint32_t fb_id = registry_.GetOutputFbId(output_buffer->handle_id);
Sushil Chauhan06521582018-03-19 14:00:23 -0700316 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_OUTPUT_FB_ID, cwb_config_.token.conn_id, fb_id);
317
318 // Set Connector Secure Mode
319 bool secure = output_buffer->flags.secure;
320 DRMSecureMode mode = secure ? DRMSecureMode::SECURE : DRMSecureMode::NON_SECURE;
321 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_FB_SECURE_MODE, cwb_config_.token.conn_id, mode);
322
323 // Set Connector Output Rect
324 sde_drm::DRMRect dst = {};
325 dst.left = 0;
326 dst.top = 0;
327 dst.right = display_attributes_[current_mode_index_].x_pixels;
328 dst.bottom = display_attributes_[current_mode_index_].y_pixels;
329 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_OUTPUT_RECT, cwb_config_.token.conn_id, dst);
330}
331
332void HWPeripheralDRM::PostCommitConcurrentWriteback(LayerBuffer *output_buffer) {
333 bool enabled = hw_resource_.has_concurrent_writeback && output_buffer;
334
Prabhanjan Kandula9e9c2322018-06-06 18:07:15 -0700335 if (!enabled) {
Sushil Chauhan06521582018-03-19 14:00:23 -0700336 drm_mgr_intf_->UnregisterDisplay(cwb_config_.token);
337 cwb_config_.enabled = false;
338 }
339}
340
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -0700341DisplayError HWPeripheralDRM::ControlIdlePowerCollapse(bool enable, bool synchronous) {
342 sde_drm::DRMIdlePCState idle_pc_state =
343 enable ? sde_drm::DRMIdlePCState::ENABLE : sde_drm::DRMIdlePCState::DISABLE;
344 if (idle_pc_state == idle_pc_state_) {
345 return kErrorNone;
346 }
347 // As idle PC is disabled after subsequent commit, Make sure to have synchrounous commit and
348 // ensure TA accesses the display_cc registers after idle PC is disabled.
349 idle_pc_state_ = idle_pc_state;
350 synchronous_commit_ = !enable ? synchronous : false;
351 return kErrorNone;
352}
353
354DisplayError HWPeripheralDRM::PowerOn(const HWQosData &qos_data, int *release_fence) {
355 DTRACE_SCOPED();
356 if (!drm_atomic_intf_) {
357 DLOGE("DRM Atomic Interface is null!");
358 return kErrorUndefined;
359 }
360
361 if (first_cycle_) {
362 return kErrorNone;
363 }
364 drm_atomic_intf_->Perform(sde_drm::DRMOps::CRTC_SET_IDLE_PC_STATE, token_.crtc_id,
365 sde_drm::DRMIdlePCState::ENABLE);
366 DisplayError err = HWDeviceDRM::PowerOn(qos_data, release_fence);
367 if (err != kErrorNone) {
368 return err;
369 }
370 idle_pc_state_ = sde_drm::DRMIdlePCState::ENABLE;
371
372 return kErrorNone;
373}
374
Saurabh Shah82b06f42017-09-06 16:43:49 -0700375} // namespace sdm