Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 1 | /************************************************************************** |
| 2 | * |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 3 | * Copyright © 2009-2011 VMware, Inc., Palo Alto, CA., USA |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 4 | * All Rights Reserved. |
| 5 | * |
| 6 | * Permission is hereby granted, free of charge, to any person obtaining a |
| 7 | * copy of this software and associated documentation files (the |
| 8 | * "Software"), to deal in the Software without restriction, including |
| 9 | * without limitation the rights to use, copy, modify, merge, publish, |
| 10 | * distribute, sub license, and/or sell copies of the Software, and to |
| 11 | * permit persons to whom the Software is furnished to do so, subject to |
| 12 | * the following conditions: |
| 13 | * |
| 14 | * The above copyright notice and this permission notice (including the |
| 15 | * next paragraph) shall be included in all copies or substantial portions |
| 16 | * of the Software. |
| 17 | * |
| 18 | * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR |
| 19 | * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, |
| 20 | * FITNESS FOR A PARTICULAR PURPOSE AND NON-INFRINGEMENT. IN NO EVENT SHALL |
| 21 | * THE COPYRIGHT HOLDERS, AUTHORS AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, |
| 22 | * DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR |
| 23 | * OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE |
| 24 | * USE OR OTHER DEALINGS IN THE SOFTWARE. |
| 25 | * |
| 26 | **************************************************************************/ |
| 27 | |
| 28 | #include "vmwgfx_drv.h" |
David Howells | 760285e | 2012-10-02 18:01:07 +0100 | [diff] [blame] | 29 | #include <drm/drmP.h> |
| 30 | #include <drm/ttm/ttm_bo_driver.h> |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 31 | |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 32 | #define VMW_PPN_SIZE (sizeof(unsigned long)) |
| 33 | /* A future safe maximum remap size. */ |
| 34 | #define VMW_PPN_PER_REMAP ((31 * 1024) / VMW_PPN_SIZE) |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 35 | #define DMA_ADDR_INVALID ((dma_addr_t) 0) |
| 36 | #define DMA_PAGE_INVALID 0UL |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 37 | |
| 38 | static int vmw_gmr2_bind(struct vmw_private *dev_priv, |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 39 | struct vmw_piter *iter, |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 40 | unsigned long num_pages, |
| 41 | int gmr_id) |
| 42 | { |
| 43 | SVGAFifoCmdDefineGMR2 define_cmd; |
| 44 | SVGAFifoCmdRemapGMR2 remap_cmd; |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 45 | uint32_t *cmd; |
| 46 | uint32_t *cmd_orig; |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 47 | uint32_t define_size = sizeof(define_cmd) + sizeof(*cmd); |
| 48 | uint32_t remap_num = num_pages / VMW_PPN_PER_REMAP + ((num_pages % VMW_PPN_PER_REMAP) > 0); |
| 49 | uint32_t remap_size = VMW_PPN_SIZE * num_pages + (sizeof(remap_cmd) + sizeof(*cmd)) * remap_num; |
| 50 | uint32_t remap_pos = 0; |
| 51 | uint32_t cmd_size = define_size + remap_size; |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 52 | uint32_t i; |
| 53 | |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 54 | cmd_orig = cmd = vmw_fifo_reserve(dev_priv, cmd_size); |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 55 | if (unlikely(cmd == NULL)) |
| 56 | return -ENOMEM; |
| 57 | |
| 58 | define_cmd.gmrId = gmr_id; |
| 59 | define_cmd.numPages = num_pages; |
| 60 | |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 61 | *cmd++ = SVGA_CMD_DEFINE_GMR2; |
| 62 | memcpy(cmd, &define_cmd, sizeof(define_cmd)); |
| 63 | cmd += sizeof(define_cmd) / sizeof(*cmd); |
| 64 | |
| 65 | /* |
| 66 | * Need to split the command if there are too many |
| 67 | * pages that goes into the gmr. |
| 68 | */ |
| 69 | |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 70 | remap_cmd.gmrId = gmr_id; |
| 71 | remap_cmd.flags = (VMW_PPN_SIZE > sizeof(*cmd)) ? |
| 72 | SVGA_REMAP_GMR2_PPN64 : SVGA_REMAP_GMR2_PPN32; |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 73 | |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 74 | while (num_pages > 0) { |
| 75 | unsigned long nr = min(num_pages, (unsigned long)VMW_PPN_PER_REMAP); |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 76 | |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 77 | remap_cmd.offsetPages = remap_pos; |
| 78 | remap_cmd.numPages = nr; |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 79 | |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 80 | *cmd++ = SVGA_CMD_REMAP_GMR2; |
| 81 | memcpy(cmd, &remap_cmd, sizeof(remap_cmd)); |
| 82 | cmd += sizeof(remap_cmd) / sizeof(*cmd); |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 83 | |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 84 | for (i = 0; i < nr; ++i) { |
| 85 | if (VMW_PPN_SIZE <= 4) |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 86 | *cmd = vmw_piter_dma_addr(iter) >> PAGE_SHIFT; |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 87 | else |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 88 | *((uint64_t *)cmd) = vmw_piter_dma_addr(iter) >> |
| 89 | PAGE_SHIFT; |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 90 | |
| 91 | cmd += VMW_PPN_SIZE / sizeof(*cmd); |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 92 | vmw_piter_next(iter); |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 93 | } |
| 94 | |
| 95 | num_pages -= nr; |
| 96 | remap_pos += nr; |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 97 | } |
| 98 | |
Jakob Bornecrantz | 6e4dcff | 2013-08-29 02:32:53 +0200 | [diff] [blame] | 99 | BUG_ON(cmd != cmd_orig + cmd_size / sizeof(*cmd)); |
| 100 | |
| 101 | vmw_fifo_commit(dev_priv, cmd_size); |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 102 | |
| 103 | return 0; |
| 104 | } |
| 105 | |
| 106 | static void vmw_gmr2_unbind(struct vmw_private *dev_priv, |
| 107 | int gmr_id) |
| 108 | { |
| 109 | SVGAFifoCmdDefineGMR2 define_cmd; |
| 110 | uint32_t define_size = sizeof(define_cmd) + 4; |
| 111 | uint32_t *cmd; |
| 112 | |
| 113 | cmd = vmw_fifo_reserve(dev_priv, define_size); |
| 114 | if (unlikely(cmd == NULL)) { |
| 115 | DRM_ERROR("GMR2 unbind failed.\n"); |
| 116 | return; |
| 117 | } |
| 118 | define_cmd.gmrId = gmr_id; |
| 119 | define_cmd.numPages = 0; |
| 120 | |
| 121 | *cmd++ = SVGA_CMD_DEFINE_GMR2; |
| 122 | memcpy(cmd, &define_cmd, sizeof(define_cmd)); |
| 123 | |
| 124 | vmw_fifo_commit(dev_priv, define_size); |
| 125 | } |
| 126 | |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 127 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 128 | int vmw_gmr_bind(struct vmw_private *dev_priv, |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 129 | const struct vmw_sg_table *vsgt, |
Thomas Hellstrom | 135cba0 | 2010-10-26 21:21:47 +0200 | [diff] [blame] | 130 | unsigned long num_pages, |
| 131 | int gmr_id) |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 132 | { |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 133 | struct vmw_piter data_iter; |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 134 | |
Thomas Hellstrom | d92d985 | 2013-10-24 01:49:26 -0700 | [diff] [blame] | 135 | vmw_piter_start(&data_iter, vsgt, 0); |
| 136 | |
| 137 | if (unlikely(!vmw_piter_next(&data_iter))) |
| 138 | return 0; |
| 139 | |
Thomas Hellstrom | 0d00c48 | 2014-01-15 20:19:53 +0100 | [diff] [blame] | 140 | if (unlikely(!(dev_priv->capabilities & SVGA_CAP_GMR2))) |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 141 | return -EINVAL; |
| 142 | |
Thomas Hellstrom | 0d00c48 | 2014-01-15 20:19:53 +0100 | [diff] [blame] | 143 | return vmw_gmr2_bind(dev_priv, &data_iter, num_pages, gmr_id); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 144 | } |
| 145 | |
Thomas Hellstrom | 135cba0 | 2010-10-26 21:21:47 +0200 | [diff] [blame] | 146 | |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 147 | void vmw_gmr_unbind(struct vmw_private *dev_priv, int gmr_id) |
| 148 | { |
Thomas Hellstrom | 0d00c48 | 2014-01-15 20:19:53 +0100 | [diff] [blame] | 149 | if (likely(dev_priv->capabilities & SVGA_CAP_GMR2)) |
Thomas Hellstrom | 2de59d0 | 2011-08-31 09:42:55 +0200 | [diff] [blame] | 150 | vmw_gmr2_unbind(dev_priv, gmr_id); |
Jakob Bornecrantz | fb1d973 | 2009-12-10 00:19:58 +0000 | [diff] [blame] | 151 | } |