| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 1 | /* | 
|  | 2 | * Copyright © 2007 David Airlie | 
|  | 3 | * | 
|  | 4 | * Permission is hereby granted, free of charge, to any person obtaining a | 
|  | 5 | * copy of this software and associated documentation files (the "Software"), | 
|  | 6 | * to deal in the Software without restriction, including without limitation | 
|  | 7 | * the rights to use, copy, modify, merge, publish, distribute, sublicense, | 
|  | 8 | * and/or sell copies of the Software, and to permit persons to whom the | 
|  | 9 | * Software is furnished to do so, subject to the following conditions: | 
|  | 10 | * | 
|  | 11 | * The above copyright notice and this permission notice (including the next | 
|  | 12 | * paragraph) shall be included in all copies or substantial portions of the | 
|  | 13 | * Software. | 
|  | 14 | * | 
|  | 15 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR | 
|  | 16 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, | 
|  | 17 | * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL | 
|  | 18 | * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER | 
|  | 19 | * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING | 
|  | 20 | * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER | 
|  | 21 | * DEALINGS IN THE SOFTWARE. | 
|  | 22 | * | 
|  | 23 | * Authors: | 
|  | 24 | *     David Airlie | 
|  | 25 | */ | 
|  | 26 | #include <linux/module.h> | 
|  | 27 | #include <linux/slab.h> | 
|  | 28 | #include <linux/fb.h> | 
| Alex Deucher | 7c1fa1d | 2016-08-27 12:37:22 -0400 | [diff] [blame^] | 29 | #include <linux/pm_runtime.h> | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 30 |  | 
|  | 31 | #include <drm/drmP.h> | 
|  | 32 | #include <drm/drm_crtc.h> | 
|  | 33 | #include <drm/drm_crtc_helper.h> | 
|  | 34 | #include <drm/amdgpu_drm.h> | 
|  | 35 | #include "amdgpu.h" | 
| Marek Olšák | fbd76d5 | 2015-05-14 23:48:26 +0200 | [diff] [blame] | 36 | #include "cikd.h" | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 37 |  | 
|  | 38 | #include <drm/drm_fb_helper.h> | 
|  | 39 |  | 
|  | 40 | #include <linux/vga_switcheroo.h> | 
|  | 41 |  | 
|  | 42 | /* object hierarchy - | 
|  | 43 | this contains a helper + a amdgpu fb | 
|  | 44 | the helper contains a pointer to amdgpu framebuffer baseclass. | 
|  | 45 | */ | 
|  | 46 | struct amdgpu_fbdev { | 
|  | 47 | struct drm_fb_helper helper; | 
|  | 48 | struct amdgpu_framebuffer rfb; | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 49 | struct amdgpu_device *adev; | 
|  | 50 | }; | 
|  | 51 |  | 
| Alex Deucher | 7c1fa1d | 2016-08-27 12:37:22 -0400 | [diff] [blame^] | 52 | static int | 
|  | 53 | amdgpufb_open(struct fb_info *info, int user) | 
|  | 54 | { | 
|  | 55 | struct amdgpu_fbdev *rfbdev = info->par; | 
|  | 56 | struct amdgpu_device *adev = rfbdev->adev; | 
|  | 57 | int ret = pm_runtime_get_sync(adev->ddev->dev); | 
|  | 58 | if (ret < 0 && ret != -EACCES) { | 
|  | 59 | pm_runtime_mark_last_busy(adev->ddev->dev); | 
|  | 60 | pm_runtime_put_autosuspend(adev->ddev->dev); | 
|  | 61 | return ret; | 
|  | 62 | } | 
|  | 63 | return 0; | 
|  | 64 | } | 
|  | 65 |  | 
|  | 66 | static int | 
|  | 67 | amdgpufb_release(struct fb_info *info, int user) | 
|  | 68 | { | 
|  | 69 | struct amdgpu_fbdev *rfbdev = info->par; | 
|  | 70 | struct amdgpu_device *adev = rfbdev->adev; | 
|  | 71 |  | 
|  | 72 | pm_runtime_mark_last_busy(adev->ddev->dev); | 
|  | 73 | pm_runtime_put_autosuspend(adev->ddev->dev); | 
|  | 74 | return 0; | 
|  | 75 | } | 
|  | 76 |  | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 77 | static struct fb_ops amdgpufb_ops = { | 
|  | 78 | .owner = THIS_MODULE, | 
| Alex Deucher | 7c1fa1d | 2016-08-27 12:37:22 -0400 | [diff] [blame^] | 79 | .fb_open = amdgpufb_open, | 
|  | 80 | .fb_release = amdgpufb_release, | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 81 | .fb_check_var = drm_fb_helper_check_var, | 
|  | 82 | .fb_set_par = drm_fb_helper_set_par, | 
| Archit Taneja | 2dbaf392 | 2015-07-31 16:22:00 +0530 | [diff] [blame] | 83 | .fb_fillrect = drm_fb_helper_cfb_fillrect, | 
|  | 84 | .fb_copyarea = drm_fb_helper_cfb_copyarea, | 
|  | 85 | .fb_imageblit = drm_fb_helper_cfb_imageblit, | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 86 | .fb_pan_display = drm_fb_helper_pan_display, | 
|  | 87 | .fb_blank = drm_fb_helper_blank, | 
|  | 88 | .fb_setcmap = drm_fb_helper_setcmap, | 
|  | 89 | .fb_debug_enter = drm_fb_helper_debug_enter, | 
|  | 90 | .fb_debug_leave = drm_fb_helper_debug_leave, | 
|  | 91 | }; | 
|  | 92 |  | 
|  | 93 |  | 
|  | 94 | int amdgpu_align_pitch(struct amdgpu_device *adev, int width, int bpp, bool tiled) | 
|  | 95 | { | 
|  | 96 | int aligned = width; | 
|  | 97 | int pitch_mask = 0; | 
|  | 98 |  | 
|  | 99 | switch (bpp / 8) { | 
|  | 100 | case 1: | 
|  | 101 | pitch_mask = 255; | 
|  | 102 | break; | 
|  | 103 | case 2: | 
|  | 104 | pitch_mask = 127; | 
|  | 105 | break; | 
|  | 106 | case 3: | 
|  | 107 | case 4: | 
|  | 108 | pitch_mask = 63; | 
|  | 109 | break; | 
|  | 110 | } | 
|  | 111 |  | 
|  | 112 | aligned += pitch_mask; | 
|  | 113 | aligned &= ~pitch_mask; | 
|  | 114 | return aligned; | 
|  | 115 | } | 
|  | 116 |  | 
|  | 117 | static void amdgpufb_destroy_pinned_object(struct drm_gem_object *gobj) | 
|  | 118 | { | 
|  | 119 | struct amdgpu_bo *rbo = gem_to_amdgpu_bo(gobj); | 
|  | 120 | int ret; | 
|  | 121 |  | 
|  | 122 | ret = amdgpu_bo_reserve(rbo, false); | 
|  | 123 | if (likely(ret == 0)) { | 
|  | 124 | amdgpu_bo_kunmap(rbo); | 
|  | 125 | amdgpu_bo_unpin(rbo); | 
|  | 126 | amdgpu_bo_unreserve(rbo); | 
|  | 127 | } | 
|  | 128 | drm_gem_object_unreference_unlocked(gobj); | 
|  | 129 | } | 
|  | 130 |  | 
|  | 131 | static int amdgpufb_create_pinned_object(struct amdgpu_fbdev *rfbdev, | 
|  | 132 | struct drm_mode_fb_cmd2 *mode_cmd, | 
|  | 133 | struct drm_gem_object **gobj_p) | 
|  | 134 | { | 
|  | 135 | struct amdgpu_device *adev = rfbdev->adev; | 
|  | 136 | struct drm_gem_object *gobj = NULL; | 
|  | 137 | struct amdgpu_bo *rbo = NULL; | 
|  | 138 | bool fb_tiled = false; /* useful for testing */ | 
|  | 139 | u32 tiling_flags = 0; | 
|  | 140 | int ret; | 
|  | 141 | int aligned_size, size; | 
|  | 142 | int height = mode_cmd->height; | 
|  | 143 | u32 bpp, depth; | 
|  | 144 |  | 
|  | 145 | drm_fb_get_bpp_depth(mode_cmd->pixel_format, &depth, &bpp); | 
|  | 146 |  | 
|  | 147 | /* need to align pitch with crtc limits */ | 
|  | 148 | mode_cmd->pitches[0] = amdgpu_align_pitch(adev, mode_cmd->width, bpp, | 
|  | 149 | fb_tiled) * ((bpp + 1) / 8); | 
|  | 150 |  | 
|  | 151 | height = ALIGN(mode_cmd->height, 8); | 
|  | 152 | size = mode_cmd->pitches[0] * height; | 
|  | 153 | aligned_size = ALIGN(size, PAGE_SIZE); | 
|  | 154 | ret = amdgpu_gem_object_create(adev, aligned_size, 0, | 
|  | 155 | AMDGPU_GEM_DOMAIN_VRAM, | 
| Alex Deucher | 857d913 | 2015-08-27 00:14:16 -0400 | [diff] [blame] | 156 | AMDGPU_GEM_CREATE_CPU_ACCESS_REQUIRED, | 
|  | 157 | true, &gobj); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 158 | if (ret) { | 
|  | 159 | printk(KERN_ERR "failed to allocate framebuffer (%d)\n", | 
|  | 160 | aligned_size); | 
|  | 161 | return -ENOMEM; | 
|  | 162 | } | 
|  | 163 | rbo = gem_to_amdgpu_bo(gobj); | 
|  | 164 |  | 
|  | 165 | if (fb_tiled) | 
| Marek Olšák | fbd76d5 | 2015-05-14 23:48:26 +0200 | [diff] [blame] | 166 | tiling_flags = AMDGPU_TILING_SET(ARRAY_MODE, GRPH_ARRAY_2D_TILED_THIN1); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 167 |  | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 168 | ret = amdgpu_bo_reserve(rbo, false); | 
|  | 169 | if (unlikely(ret != 0)) | 
|  | 170 | goto out_unref; | 
|  | 171 |  | 
|  | 172 | if (tiling_flags) { | 
|  | 173 | ret = amdgpu_bo_set_tiling_flags(rbo, | 
| Marek Olšák | 63ab1c2 | 2015-05-14 23:03:57 +0200 | [diff] [blame] | 174 | tiling_flags); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 175 | if (ret) | 
|  | 176 | dev_err(adev->dev, "FB failed to set tiling flags\n"); | 
|  | 177 | } | 
|  | 178 |  | 
|  | 179 |  | 
| Chunming Zhou | 7e5a547 | 2015-04-24 17:37:30 +0800 | [diff] [blame] | 180 | ret = amdgpu_bo_pin_restricted(rbo, AMDGPU_GEM_DOMAIN_VRAM, 0, 0, NULL); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 181 | if (ret) { | 
|  | 182 | amdgpu_bo_unreserve(rbo); | 
|  | 183 | goto out_unref; | 
|  | 184 | } | 
|  | 185 | ret = amdgpu_bo_kmap(rbo, NULL); | 
|  | 186 | amdgpu_bo_unreserve(rbo); | 
|  | 187 | if (ret) { | 
|  | 188 | goto out_unref; | 
|  | 189 | } | 
|  | 190 |  | 
|  | 191 | *gobj_p = gobj; | 
|  | 192 | return 0; | 
|  | 193 | out_unref: | 
|  | 194 | amdgpufb_destroy_pinned_object(gobj); | 
|  | 195 | *gobj_p = NULL; | 
|  | 196 | return ret; | 
|  | 197 | } | 
|  | 198 |  | 
|  | 199 | static int amdgpufb_create(struct drm_fb_helper *helper, | 
|  | 200 | struct drm_fb_helper_surface_size *sizes) | 
|  | 201 | { | 
|  | 202 | struct amdgpu_fbdev *rfbdev = (struct amdgpu_fbdev *)helper; | 
|  | 203 | struct amdgpu_device *adev = rfbdev->adev; | 
|  | 204 | struct fb_info *info; | 
|  | 205 | struct drm_framebuffer *fb = NULL; | 
|  | 206 | struct drm_mode_fb_cmd2 mode_cmd; | 
|  | 207 | struct drm_gem_object *gobj = NULL; | 
|  | 208 | struct amdgpu_bo *rbo = NULL; | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 209 | int ret; | 
|  | 210 | unsigned long tmp; | 
|  | 211 |  | 
|  | 212 | mode_cmd.width = sizes->surface_width; | 
|  | 213 | mode_cmd.height = sizes->surface_height; | 
|  | 214 |  | 
|  | 215 | if (sizes->surface_bpp == 24) | 
|  | 216 | sizes->surface_bpp = 32; | 
|  | 217 |  | 
|  | 218 | mode_cmd.pixel_format = drm_mode_legacy_fb_format(sizes->surface_bpp, | 
|  | 219 | sizes->surface_depth); | 
|  | 220 |  | 
|  | 221 | ret = amdgpufb_create_pinned_object(rfbdev, &mode_cmd, &gobj); | 
|  | 222 | if (ret) { | 
|  | 223 | DRM_ERROR("failed to create fbcon object %d\n", ret); | 
|  | 224 | return ret; | 
|  | 225 | } | 
|  | 226 |  | 
|  | 227 | rbo = gem_to_amdgpu_bo(gobj); | 
|  | 228 |  | 
|  | 229 | /* okay we have an object now allocate the framebuffer */ | 
| Archit Taneja | 2dbaf392 | 2015-07-31 16:22:00 +0530 | [diff] [blame] | 230 | info = drm_fb_helper_alloc_fbi(helper); | 
|  | 231 | if (IS_ERR(info)) { | 
|  | 232 | ret = PTR_ERR(info); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 233 | goto out_unref; | 
|  | 234 | } | 
|  | 235 |  | 
|  | 236 | info->par = rfbdev; | 
| Alex Deucher | df7989f | 2015-11-02 10:52:32 -0500 | [diff] [blame] | 237 | info->skip_vt_switch = true; | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 238 |  | 
|  | 239 | ret = amdgpu_framebuffer_init(adev->ddev, &rfbdev->rfb, &mode_cmd, gobj); | 
|  | 240 | if (ret) { | 
|  | 241 | DRM_ERROR("failed to initialize framebuffer %d\n", ret); | 
| Archit Taneja | 2dbaf392 | 2015-07-31 16:22:00 +0530 | [diff] [blame] | 242 | goto out_destroy_fbi; | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 243 | } | 
|  | 244 |  | 
|  | 245 | fb = &rfbdev->rfb.base; | 
|  | 246 |  | 
|  | 247 | /* setup helper */ | 
|  | 248 | rfbdev->helper.fb = fb; | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 249 |  | 
|  | 250 | memset_io(rbo->kptr, 0x0, amdgpu_bo_size(rbo)); | 
|  | 251 |  | 
|  | 252 | strcpy(info->fix.id, "amdgpudrmfb"); | 
|  | 253 |  | 
|  | 254 | drm_fb_helper_fill_fix(info, fb->pitches[0], fb->depth); | 
|  | 255 |  | 
|  | 256 | info->flags = FBINFO_DEFAULT | FBINFO_CAN_FORCE_OUTPUT; | 
|  | 257 | info->fbops = &amdgpufb_ops; | 
|  | 258 |  | 
|  | 259 | tmp = amdgpu_bo_gpu_offset(rbo) - adev->mc.vram_start; | 
|  | 260 | info->fix.smem_start = adev->mc.aper_base + tmp; | 
|  | 261 | info->fix.smem_len = amdgpu_bo_size(rbo); | 
|  | 262 | info->screen_base = rbo->kptr; | 
|  | 263 | info->screen_size = amdgpu_bo_size(rbo); | 
|  | 264 |  | 
|  | 265 | drm_fb_helper_fill_var(info, &rfbdev->helper, sizes->fb_width, sizes->fb_height); | 
|  | 266 |  | 
|  | 267 | /* setup aperture base/size for vesafb takeover */ | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 268 | info->apertures->ranges[0].base = adev->ddev->mode_config.fb_base; | 
|  | 269 | info->apertures->ranges[0].size = adev->mc.aper_size; | 
|  | 270 |  | 
|  | 271 | /* Use default scratch pixmap (info->pixmap.flags = FB_PIXMAP_SYSTEM) */ | 
|  | 272 |  | 
|  | 273 | if (info->screen_base == NULL) { | 
|  | 274 | ret = -ENOSPC; | 
| Archit Taneja | 2dbaf392 | 2015-07-31 16:22:00 +0530 | [diff] [blame] | 275 | goto out_destroy_fbi; | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 276 | } | 
|  | 277 |  | 
|  | 278 | DRM_INFO("fb mappable at 0x%lX\n",  info->fix.smem_start); | 
|  | 279 | DRM_INFO("vram apper at 0x%lX\n",  (unsigned long)adev->mc.aper_base); | 
|  | 280 | DRM_INFO("size %lu\n", (unsigned long)amdgpu_bo_size(rbo)); | 
|  | 281 | DRM_INFO("fb depth is %d\n", fb->depth); | 
|  | 282 | DRM_INFO("   pitch is %d\n", fb->pitches[0]); | 
|  | 283 |  | 
|  | 284 | vga_switcheroo_client_fb_set(adev->ddev->pdev, info); | 
|  | 285 | return 0; | 
|  | 286 |  | 
| Archit Taneja | 2dbaf392 | 2015-07-31 16:22:00 +0530 | [diff] [blame] | 287 | out_destroy_fbi: | 
|  | 288 | drm_fb_helper_release_fbi(helper); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 289 | out_unref: | 
|  | 290 | if (rbo) { | 
|  | 291 |  | 
|  | 292 | } | 
|  | 293 | if (fb && ret) { | 
| Daniel Vetter | a9906fd | 2015-11-23 10:32:37 +0100 | [diff] [blame] | 294 | drm_gem_object_unreference_unlocked(gobj); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 295 | drm_framebuffer_unregister_private(fb); | 
|  | 296 | drm_framebuffer_cleanup(fb); | 
|  | 297 | kfree(fb); | 
|  | 298 | } | 
|  | 299 | return ret; | 
|  | 300 | } | 
|  | 301 |  | 
|  | 302 | void amdgpu_fb_output_poll_changed(struct amdgpu_device *adev) | 
|  | 303 | { | 
|  | 304 | if (adev->mode_info.rfbdev) | 
|  | 305 | drm_fb_helper_hotplug_event(&adev->mode_info.rfbdev->helper); | 
|  | 306 | } | 
|  | 307 |  | 
|  | 308 | static int amdgpu_fbdev_destroy(struct drm_device *dev, struct amdgpu_fbdev *rfbdev) | 
|  | 309 | { | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 310 | struct amdgpu_framebuffer *rfb = &rfbdev->rfb; | 
|  | 311 |  | 
| Archit Taneja | 2dbaf392 | 2015-07-31 16:22:00 +0530 | [diff] [blame] | 312 | drm_fb_helper_unregister_fbi(&rfbdev->helper); | 
|  | 313 | drm_fb_helper_release_fbi(&rfbdev->helper); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 314 |  | 
|  | 315 | if (rfb->obj) { | 
|  | 316 | amdgpufb_destroy_pinned_object(rfb->obj); | 
|  | 317 | rfb->obj = NULL; | 
|  | 318 | } | 
|  | 319 | drm_fb_helper_fini(&rfbdev->helper); | 
|  | 320 | drm_framebuffer_unregister_private(&rfb->base); | 
|  | 321 | drm_framebuffer_cleanup(&rfb->base); | 
|  | 322 |  | 
|  | 323 | return 0; | 
|  | 324 | } | 
|  | 325 |  | 
|  | 326 | /** Sets the color ramps on behalf of fbcon */ | 
|  | 327 | static void amdgpu_crtc_fb_gamma_set(struct drm_crtc *crtc, u16 red, u16 green, | 
|  | 328 | u16 blue, int regno) | 
|  | 329 | { | 
|  | 330 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | 
|  | 331 |  | 
|  | 332 | amdgpu_crtc->lut_r[regno] = red >> 6; | 
|  | 333 | amdgpu_crtc->lut_g[regno] = green >> 6; | 
|  | 334 | amdgpu_crtc->lut_b[regno] = blue >> 6; | 
|  | 335 | } | 
|  | 336 |  | 
|  | 337 | /** Gets the color ramps on behalf of fbcon */ | 
|  | 338 | static void amdgpu_crtc_fb_gamma_get(struct drm_crtc *crtc, u16 *red, u16 *green, | 
|  | 339 | u16 *blue, int regno) | 
|  | 340 | { | 
|  | 341 | struct amdgpu_crtc *amdgpu_crtc = to_amdgpu_crtc(crtc); | 
|  | 342 |  | 
|  | 343 | *red = amdgpu_crtc->lut_r[regno] << 6; | 
|  | 344 | *green = amdgpu_crtc->lut_g[regno] << 6; | 
|  | 345 | *blue = amdgpu_crtc->lut_b[regno] << 6; | 
|  | 346 | } | 
|  | 347 |  | 
|  | 348 | static const struct drm_fb_helper_funcs amdgpu_fb_helper_funcs = { | 
|  | 349 | .gamma_set = amdgpu_crtc_fb_gamma_set, | 
|  | 350 | .gamma_get = amdgpu_crtc_fb_gamma_get, | 
|  | 351 | .fb_probe = amdgpufb_create, | 
|  | 352 | }; | 
|  | 353 |  | 
|  | 354 | int amdgpu_fbdev_init(struct amdgpu_device *adev) | 
|  | 355 | { | 
|  | 356 | struct amdgpu_fbdev *rfbdev; | 
|  | 357 | int bpp_sel = 32; | 
|  | 358 | int ret; | 
|  | 359 |  | 
|  | 360 | /* don't init fbdev on hw without DCE */ | 
|  | 361 | if (!adev->mode_info.mode_config_initialized) | 
|  | 362 | return 0; | 
|  | 363 |  | 
| Alex Deucher | f49d45c | 2016-01-26 00:30:33 -0500 | [diff] [blame] | 364 | /* don't init fbdev if there are no connectors */ | 
|  | 365 | if (list_empty(&adev->ddev->mode_config.connector_list)) | 
|  | 366 | return 0; | 
|  | 367 |  | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 368 | /* select 8 bpp console on low vram cards */ | 
|  | 369 | if (adev->mc.real_vram_size <= (32*1024*1024)) | 
|  | 370 | bpp_sel = 8; | 
|  | 371 |  | 
|  | 372 | rfbdev = kzalloc(sizeof(struct amdgpu_fbdev), GFP_KERNEL); | 
|  | 373 | if (!rfbdev) | 
|  | 374 | return -ENOMEM; | 
|  | 375 |  | 
|  | 376 | rfbdev->adev = adev; | 
|  | 377 | adev->mode_info.rfbdev = rfbdev; | 
|  | 378 |  | 
|  | 379 | drm_fb_helper_prepare(adev->ddev, &rfbdev->helper, | 
|  | 380 | &amdgpu_fb_helper_funcs); | 
|  | 381 |  | 
|  | 382 | ret = drm_fb_helper_init(adev->ddev, &rfbdev->helper, | 
|  | 383 | adev->mode_info.num_crtc, | 
|  | 384 | AMDGPUFB_CONN_LIMIT); | 
|  | 385 | if (ret) { | 
|  | 386 | kfree(rfbdev); | 
|  | 387 | return ret; | 
|  | 388 | } | 
|  | 389 |  | 
|  | 390 | drm_fb_helper_single_add_all_connectors(&rfbdev->helper); | 
|  | 391 |  | 
|  | 392 | /* disable all the possible outputs/crtcs before entering KMS mode */ | 
|  | 393 | drm_helper_disable_unused_functions(adev->ddev); | 
|  | 394 |  | 
|  | 395 | drm_fb_helper_initial_config(&rfbdev->helper, bpp_sel); | 
|  | 396 | return 0; | 
|  | 397 | } | 
|  | 398 |  | 
|  | 399 | void amdgpu_fbdev_fini(struct amdgpu_device *adev) | 
|  | 400 | { | 
|  | 401 | if (!adev->mode_info.rfbdev) | 
|  | 402 | return; | 
|  | 403 |  | 
|  | 404 | amdgpu_fbdev_destroy(adev->ddev, adev->mode_info.rfbdev); | 
|  | 405 | kfree(adev->mode_info.rfbdev); | 
|  | 406 | adev->mode_info.rfbdev = NULL; | 
|  | 407 | } | 
|  | 408 |  | 
|  | 409 | void amdgpu_fbdev_set_suspend(struct amdgpu_device *adev, int state) | 
|  | 410 | { | 
|  | 411 | if (adev->mode_info.rfbdev) | 
| Archit Taneja | 2dbaf392 | 2015-07-31 16:22:00 +0530 | [diff] [blame] | 412 | drm_fb_helper_set_suspend(&adev->mode_info.rfbdev->helper, | 
|  | 413 | state); | 
| Alex Deucher | d38ceaf | 2015-04-20 16:55:21 -0400 | [diff] [blame] | 414 | } | 
|  | 415 |  | 
|  | 416 | int amdgpu_fbdev_total_size(struct amdgpu_device *adev) | 
|  | 417 | { | 
|  | 418 | struct amdgpu_bo *robj; | 
|  | 419 | int size = 0; | 
|  | 420 |  | 
|  | 421 | if (!adev->mode_info.rfbdev) | 
|  | 422 | return 0; | 
|  | 423 |  | 
|  | 424 | robj = gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.obj); | 
|  | 425 | size += amdgpu_bo_size(robj); | 
|  | 426 | return size; | 
|  | 427 | } | 
|  | 428 |  | 
|  | 429 | bool amdgpu_fbdev_robj_is_fb(struct amdgpu_device *adev, struct amdgpu_bo *robj) | 
|  | 430 | { | 
|  | 431 | if (!adev->mode_info.rfbdev) | 
|  | 432 | return false; | 
|  | 433 | if (robj == gem_to_amdgpu_bo(adev->mode_info.rfbdev->rfb.obj)) | 
|  | 434 | return true; | 
|  | 435 | return false; | 
|  | 436 | } | 
| Alex Deucher | 8b7530b | 2015-10-02 16:59:34 -0400 | [diff] [blame] | 437 |  | 
|  | 438 | void amdgpu_fbdev_restore_mode(struct amdgpu_device *adev) | 
|  | 439 | { | 
|  | 440 | struct amdgpu_fbdev *afbdev = adev->mode_info.rfbdev; | 
|  | 441 | struct drm_fb_helper *fb_helper; | 
|  | 442 | int ret; | 
|  | 443 |  | 
|  | 444 | if (!afbdev) | 
|  | 445 | return; | 
|  | 446 |  | 
|  | 447 | fb_helper = &afbdev->helper; | 
|  | 448 |  | 
|  | 449 | ret = drm_fb_helper_restore_fbdev_mode_unlocked(fb_helper); | 
|  | 450 | if (ret) | 
|  | 451 | DRM_DEBUG("failed to restore crtc mode\n"); | 
|  | 452 | } |