blob: caa404d3cdf62c6860b942c564308028f1e09a22 [file] [log] [blame]
Saurabh Shah82b06f42017-09-06 16:43:49 -07001/*
Xu Yang32e58c22017-11-20 09:58:11 +08002Copyright (c) 2017-2018, The Linux Foundation. All rights reserved.
Saurabh Shah82b06f42017-09-06 16:43:49 -07003
4Redistribution and use in source and binary forms, with or without
5modification, are permitted provided that the following conditions are
6met:
7 * Redistributions of source code must retain the above copyright
8 notice, this list of conditions and the following disclaimer.
9 * Redistributions in binary form must reproduce the above
10 copyright notice, this list of conditions and the following
11 disclaimer in the documentation and/or other materials provided
12 with the distribution.
13 * Neither the name of The Linux Foundation nor the names of its
14 contributors may be used to endorse or promote products derived
15 from this software without specific prior written permission.
16
17THIS SOFTWARE IS PROVIDED "AS IS" AND ANY EXPRESS OR IMPLIED
18WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
19MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT
20ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS
21BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
22CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
23SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
24BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
25WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
26OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
27IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
28*/
29
30#include <utils/debug.h>
Sushil Chauhan06521582018-03-19 14:00:23 -070031#include <vector>
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -070032#include <cstring>
Saurabh Shah82b06f42017-09-06 16:43:49 -070033
34#include "hw_peripheral_drm.h"
35
36#define __CLASS__ "HWPeripheralDRM"
37
38using sde_drm::DRMDisplayType;
39using sde_drm::DRMOps;
Saurabh Shah82b06f42017-09-06 16:43:49 -070040using sde_drm::DRMPowerMode;
Ping Li6a74d892018-05-02 15:54:58 -070041using sde_drm::DppsFeaturePayload;
Xu Yang32e58c22017-11-20 09:58:11 +080042using sde_drm::DRMDppsFeatureInfo;
Sushil Chauhan06521582018-03-19 14:00:23 -070043using sde_drm::DRMSecureMode;
44using sde_drm::DRMCWbCaptureMode;
Saurabh Shah82b06f42017-09-06 16:43:49 -070045
46namespace sdm {
47
Mathew Joseph Karimpanal731bc932017-11-22 10:04:56 +053048HWPeripheralDRM::HWPeripheralDRM(int32_t display_id, BufferSyncHandler *buffer_sync_handler,
49 BufferAllocator *buffer_allocator, HWInfoInterface *hw_info_intf)
Saurabh Shah82b06f42017-09-06 16:43:49 -070050 : HWDeviceDRM(buffer_sync_handler, buffer_allocator, hw_info_intf) {
51 disp_type_ = DRMDisplayType::PERIPHERAL;
Varun Arora7c8ee542018-05-01 20:58:16 -070052 device_name_ = "Peripheral";
Mathew Joseph Karimpanal731bc932017-11-22 10:04:56 +053053 display_id_ = display_id;
Saurabh Shah82b06f42017-09-06 16:43:49 -070054}
55
56DisplayError HWPeripheralDRM::Init() {
57 DisplayError ret = HWDeviceDRM::Init();
58 if (ret != kErrorNone) {
59 DLOGE("Init failed for %s", device_name_);
60 return ret;
61 }
62
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053063 scalar_data_.resize(hw_resource_.hw_dest_scalar_info.count);
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -070064 dest_scalar_cache_.resize(hw_resource_.hw_dest_scalar_info.count);
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053065
Saurabh Shah82b06f42017-09-06 16:43:49 -070066 return kErrorNone;
67}
68
69DisplayError HWPeripheralDRM::Validate(HWLayers *hw_layers) {
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053070 HWLayersInfo &hw_layer_info = hw_layers->info;
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -070071 SetDestScalarData(hw_layer_info, true);
Sushil Chauhan06521582018-03-19 14:00:23 -070072 SetupConcurrentWriteback(hw_layer_info, true);
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -070073 SetIdlePCState();
Saurabh Shah82b06f42017-09-06 16:43:49 -070074
75 return HWDeviceDRM::Validate(hw_layers);
76}
77
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053078DisplayError HWPeripheralDRM::Commit(HWLayers *hw_layers) {
79 HWLayersInfo &hw_layer_info = hw_layers->info;
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -070080 SetDestScalarData(hw_layer_info, false);
Sushil Chauhan06521582018-03-19 14:00:23 -070081 SetupConcurrentWriteback(hw_layer_info, false);
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -070082 SetIdlePCState();
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053083
Sushil Chauhan06521582018-03-19 14:00:23 -070084 DisplayError error = HWDeviceDRM::Commit(hw_layers);
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -070085 if (error != kErrorNone) {
86 return error;
87 }
Sushil Chauhan06521582018-03-19 14:00:23 -070088
89 if (cwb_config_.enabled && (error == kErrorNone)) {
90 PostCommitConcurrentWriteback(hw_layer_info.stack->output_buffer);
91 }
92
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -070093 // Initialize to default after successful commit
94 synchronous_commit_ = false;
95
Sushil Chauhan06521582018-03-19 14:00:23 -070096 return error;
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053097}
98
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +053099void HWPeripheralDRM::ResetDisplayParams() {
100 sde_dest_scalar_data_ = {};
101 for (uint32_t j = 0; j < scalar_data_.size(); j++) {
102 scalar_data_[j] = {};
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700103 dest_scalar_cache_[j] = {};
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530104 }
105}
106
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700107void HWPeripheralDRM::SetDestScalarData(HWLayersInfo hw_layer_info, bool validate) {
Alex Sarraf4446cd72018-05-16 15:23:57 -0700108 if (!hw_scale_ || !hw_resource_.hw_dest_scalar_info.count) {
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530109 return;
110 }
111
Ramkumar Radhakrishnan2d1a21e2018-09-18 19:41:55 -0700112 for (uint32_t i = 0; i < hw_resource_.hw_dest_scalar_info.count && validate; i++) {
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530113 DestScaleInfoMap::iterator it = hw_layer_info.dest_scale_info_map.find(i);
114
115 if (it == hw_layer_info.dest_scale_info_map.end()) {
116 continue;
117 }
118
119 HWDestScaleInfo *dest_scale_info = it->second;
Ramkumar Radhakrishnan2d1a21e2018-09-18 19:41:55 -0700120 SDEScaler *scale = &scalar_data_[i];
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530121 hw_scale_->SetScaler(dest_scale_info->scale_data, scale);
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700122
Ramkumar Radhakrishnan2d1a21e2018-09-18 19:41:55 -0700123 sde_drm_dest_scaler_cfg *dest_scalar_data = &sde_dest_scalar_data_.ds_cfg[i];
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530124 dest_scalar_data->flags = 0;
125 if (scale->scaler_v2.enable) {
126 dest_scalar_data->flags |= SDE_DRM_DESTSCALER_ENABLE;
127 }
128 if (scale->scaler_v2.de.enable) {
129 dest_scalar_data->flags |= SDE_DRM_DESTSCALER_ENHANCER_UPDATE;
130 }
131 if (dest_scale_info->scale_update) {
132 dest_scalar_data->flags |= SDE_DRM_DESTSCALER_SCALE_UPDATE;
133 }
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700134 if (hw_panel_info_.partial_update) {
135 dest_scalar_data->flags |= SDE_DRM_DESTSCALER_PU_ENABLE;
136 }
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530137 dest_scalar_data->index = i;
138 dest_scalar_data->lm_width = dest_scale_info->mixer_width;
139 dest_scalar_data->lm_height = dest_scale_info->mixer_height;
140 dest_scalar_data->scaler_cfg = reinterpret_cast<uint64_t>(&scale->scaler_v2);
Ramkumar Radhakrishnan2d1a21e2018-09-18 19:41:55 -0700141
142 if (std::memcmp(&dest_scalar_cache_[i].scalar_data, scale, sizeof(SDEScaler)) ||
143 dest_scalar_cache_[i].flags != dest_scalar_data->flags) {
144 needs_ds_update_ = true;
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530145 }
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530146 }
Ramkumar Radhakrishnan2d1a21e2018-09-18 19:41:55 -0700147
148 if (needs_ds_update_) {
149 if (!validate) {
150 // Cache the destination scalar data during commit
151 for (uint32_t i = 0; i < hw_resource_.hw_dest_scalar_info.count; i++) {
152 DestScaleInfoMap::iterator it = hw_layer_info.dest_scale_info_map.find(i);
153 if (it == hw_layer_info.dest_scale_info_map.end()) {
154 continue;
155 }
156 dest_scalar_cache_[i].flags = sde_dest_scalar_data_.ds_cfg[i].flags;
157 dest_scalar_cache_[i].scalar_data = scalar_data_[i];
158 }
159 needs_ds_update_ = false;
160 }
161 sde_dest_scalar_data_.num_dest_scaler = UINT32(hw_layer_info.dest_scale_info_map.size());
Ramkumar Radhakrishnan07438bb2018-07-25 19:30:54 -0700162 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_DEST_SCALER_CONFIG, token_.crtc_id,
163 reinterpret_cast<uint64_t>(&sde_dest_scalar_data_));
164 }
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530165}
166
167DisplayError HWPeripheralDRM::Flush() {
168 DisplayError err = HWDeviceDRM::Flush();
169 if (err != kErrorNone) {
170 return err;
171 }
172
173 ResetDisplayParams();
174 return kErrorNone;
175}
176
Ping Li6a74d892018-05-02 15:54:58 -0700177DisplayError HWPeripheralDRM::SetDppsFeature(void *payload, size_t size) {
178 uint32_t obj_id = 0, object_type = 0, feature_id = 0;
179 uint64_t value = 0;
Xu Yang32e58c22017-11-20 09:58:11 +0800180
Ping Li6a74d892018-05-02 15:54:58 -0700181 if (size != sizeof(DppsFeaturePayload)) {
182 DLOGE("invalid payload size %d, expected %d", size, sizeof(DppsFeaturePayload));
183 return kErrorParameters;
184 }
185
186 DppsFeaturePayload *feature_payload = reinterpret_cast<DppsFeaturePayload *>(payload);
187 object_type = feature_payload->object_type;
188 feature_id = feature_payload->feature_id;
189 value = feature_payload->value;
Xu Yang32e58c22017-11-20 09:58:11 +0800190 if (object_type == DRM_MODE_OBJECT_CRTC) {
191 obj_id = token_.crtc_id;
192 } else if (object_type == DRM_MODE_OBJECT_CONNECTOR) {
193 obj_id = token_.conn_id;
194 } else {
195 DLOGE("invalid object type 0x%x", object_type);
196 return kErrorUndefined;
197 }
198
199 drm_atomic_intf_->Perform(DRMOps::DPPS_CACHE_FEATURE, obj_id, feature_id, value);
200 return kErrorNone;
201}
202
Ping Li6a74d892018-05-02 15:54:58 -0700203DisplayError HWPeripheralDRM::GetDppsFeatureInfo(void *payload, size_t size) {
204 if (size != sizeof(DRMDppsFeatureInfo)) {
205 DLOGE("invalid payload size %d, expected %d", size, sizeof(DRMDppsFeatureInfo));
206 return kErrorParameters;
207 }
208 DRMDppsFeatureInfo *feature_info = reinterpret_cast<DRMDppsFeatureInfo *>(payload);
Xu Yang32e58c22017-11-20 09:58:11 +0800209 drm_mgr_intf_->GetDppsFeatureInfo(feature_info);
210 return kErrorNone;
211}
Ramkumar Radhakrishnan7e971e02017-08-17 14:19:15 +0530212
Ramkumar Radhakrishnana38b7602018-03-15 14:49:52 -0700213DisplayError HWPeripheralDRM::HandleSecureEvent(SecureEvent secure_event) {
214 switch (secure_event) {
215 case kSecureDisplayStart: {
216 secure_display_active_ = true;
217 if (hw_panel_info_.mode != kModeCommand) {
218 DisplayError err = Flush();
219 if (err != kErrorNone) {
220 return err;
221 }
222 }
223 }
224 break;
225
226 case kSecureDisplayEnd: {
227 if (hw_panel_info_.mode != kModeCommand) {
228 DisplayError err = Flush();
229 if (err != kErrorNone) {
230 return err;
231 }
232 }
233 secure_display_active_ = false;
Ramkumar Radhakrishnand25d30e2018-08-17 11:06:27 -0700234 synchronous_commit_ = true;
Ramkumar Radhakrishnana38b7602018-03-15 14:49:52 -0700235 }
236 break;
237
238 default:
239 DLOGE("Invalid secure event %d", secure_event);
240 return kErrorNotSupported;
241 }
242
243 return kErrorNone;
244}
245
Sushil Chauhan06521582018-03-19 14:00:23 -0700246void HWPeripheralDRM::SetupConcurrentWriteback(const HWLayersInfo &hw_layer_info, bool validate) {
247 bool enable = hw_resource_.has_concurrent_writeback && hw_layer_info.stack->output_buffer;
248 if (!(enable || cwb_config_.enabled)) {
249 return;
250 }
251
252 bool setup_modes = enable && !cwb_config_.enabled && validate;
253 if (setup_modes && (SetupConcurrentWritebackModes() == kErrorNone)) {
254 cwb_config_.enabled = true;
255 }
256
257 if (cwb_config_.enabled) {
258 if (enable) {
259 // Set DRM properties for Concurrent Writeback.
260 ConfigureConcurrentWriteback(hw_layer_info.stack);
Prabhanjan Kandula9e9c2322018-06-06 18:07:15 -0700261
262 if (!validate) {
263 // Set GET_RETIRE_FENCE property to get Concurrent Writeback fence.
264 int *fence = &hw_layer_info.stack->output_buffer->release_fence_fd;
265 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_GET_RETIRE_FENCE,
266 cwb_config_.token.conn_id, fence);
267 }
Sushil Chauhan06521582018-03-19 14:00:23 -0700268 } else {
269 // Tear down the Concurrent Writeback topology.
270 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, cwb_config_.token.conn_id, 0);
271 }
272 }
273}
274
275DisplayError HWPeripheralDRM::SetupConcurrentWritebackModes() {
276 // To setup Concurrent Writeback topology, get the Connector ID of Virtual display
277 if (drm_mgr_intf_->RegisterDisplay(DRMDisplayType::VIRTUAL, &cwb_config_.token)) {
278 DLOGE("RegisterDisplay failed for Concurrent Writeback");
279 return kErrorResources;
280 }
281
282 // Set the modes based on Primary display.
283 std::vector<drmModeModeInfo> modes;
284 for (auto &item : connector_info_.modes) {
285 modes.push_back(item.mode);
286 }
287
288 // Inform the mode list to driver.
289 struct sde_drm_wb_cfg cwb_cfg = {};
290 cwb_cfg.connector_id = cwb_config_.token.conn_id;
291 cwb_cfg.flags = SDE_DRM_WB_CFG_FLAGS_CONNECTED;
292 cwb_cfg.count_modes = UINT32(modes.size());
293 cwb_cfg.modes = (uint64_t)modes.data();
294
295 int ret = -EINVAL;
296#ifdef DRM_IOCTL_SDE_WB_CONFIG
297 ret = drmIoctl(dev_fd_, DRM_IOCTL_SDE_WB_CONFIG, &cwb_cfg);
298#endif
299 if (ret) {
300 drm_mgr_intf_->UnregisterDisplay(cwb_config_.token);
301 DLOGE("Dump CWBConfig: mode_count %d flags %x", cwb_cfg.count_modes, cwb_cfg.flags);
302 DumpConnectorModeInfo();
303 return kErrorHardware;
304 }
305
306 return kErrorNone;
307}
308
309void HWPeripheralDRM::ConfigureConcurrentWriteback(LayerStack *layer_stack) {
310 LayerBuffer *output_buffer = layer_stack->output_buffer;
Sushil Chauhanabc5b272018-06-20 16:18:13 -0700311 registry_.MapOutputBufferToFbId(output_buffer);
Sushil Chauhan06521582018-03-19 14:00:23 -0700312
313 // Set the topology for Concurrent Writeback: [CRTC_PRIMARY_DISPLAY - CONNECTOR_VIRTUAL_DISPLAY].
314 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_CRTC, cwb_config_.token.conn_id, token_.crtc_id);
315
316 // Set CRTC Capture Mode
317 DRMCWbCaptureMode capture_mode = layer_stack->flags.post_processed_output ?
318 DRMCWbCaptureMode::DSPP_OUT : DRMCWbCaptureMode::MIXER_OUT;
319 drm_atomic_intf_->Perform(DRMOps::CRTC_SET_CAPTURE_MODE, token_.crtc_id, capture_mode);
320
321 // Set Connector Output FB
Sushil Chauhanabc5b272018-06-20 16:18:13 -0700322 uint32_t fb_id = registry_.GetOutputFbId(output_buffer->handle_id);
Sushil Chauhan06521582018-03-19 14:00:23 -0700323 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_OUTPUT_FB_ID, cwb_config_.token.conn_id, fb_id);
324
325 // Set Connector Secure Mode
326 bool secure = output_buffer->flags.secure;
327 DRMSecureMode mode = secure ? DRMSecureMode::SECURE : DRMSecureMode::NON_SECURE;
328 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_FB_SECURE_MODE, cwb_config_.token.conn_id, mode);
329
330 // Set Connector Output Rect
331 sde_drm::DRMRect dst = {};
332 dst.left = 0;
333 dst.top = 0;
334 dst.right = display_attributes_[current_mode_index_].x_pixels;
335 dst.bottom = display_attributes_[current_mode_index_].y_pixels;
336 drm_atomic_intf_->Perform(DRMOps::CONNECTOR_SET_OUTPUT_RECT, cwb_config_.token.conn_id, dst);
337}
338
339void HWPeripheralDRM::PostCommitConcurrentWriteback(LayerBuffer *output_buffer) {
340 bool enabled = hw_resource_.has_concurrent_writeback && output_buffer;
341
Prabhanjan Kandula9e9c2322018-06-06 18:07:15 -0700342 if (!enabled) {
Sushil Chauhan06521582018-03-19 14:00:23 -0700343 drm_mgr_intf_->UnregisterDisplay(cwb_config_.token);
344 cwb_config_.enabled = false;
345 }
346}
347
Ramkumar Radhakrishnanf985d482018-07-23 18:10:41 -0700348DisplayError HWPeripheralDRM::ControlIdlePowerCollapse(bool enable, bool synchronous) {
349 sde_drm::DRMIdlePCState idle_pc_state =
350 enable ? sde_drm::DRMIdlePCState::ENABLE : sde_drm::DRMIdlePCState::DISABLE;
351 if (idle_pc_state == idle_pc_state_) {
352 return kErrorNone;
353 }
354 // As idle PC is disabled after subsequent commit, Make sure to have synchrounous commit and
355 // ensure TA accesses the display_cc registers after idle PC is disabled.
356 idle_pc_state_ = idle_pc_state;
357 synchronous_commit_ = !enable ? synchronous : false;
358 return kErrorNone;
359}
360
361DisplayError HWPeripheralDRM::PowerOn(const HWQosData &qos_data, int *release_fence) {
362 DTRACE_SCOPED();
363 if (!drm_atomic_intf_) {
364 DLOGE("DRM Atomic Interface is null!");
365 return kErrorUndefined;
366 }
367
368 if (first_cycle_) {
369 return kErrorNone;
370 }
371 drm_atomic_intf_->Perform(sde_drm::DRMOps::CRTC_SET_IDLE_PC_STATE, token_.crtc_id,
372 sde_drm::DRMIdlePCState::ENABLE);
373 DisplayError err = HWDeviceDRM::PowerOn(qos_data, release_fence);
374 if (err != kErrorNone) {
375 return err;
376 }
377 idle_pc_state_ = sde_drm::DRMIdlePCState::ENABLE;
378
379 return kErrorNone;
380}
381
Saurabh Shah82b06f42017-09-06 16:43:49 -0700382} // namespace sdm