blob: 8945463e70b648da480a334712e68b44b32949d7 [file] [log] [blame]
Eric Anholtc8b75bc2015-03-02 13:01:12 -08001/*
2 * Copyright (C) 2015 Broadcom
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
7 */
8
9#include "drmP.h"
10#include "drm_gem_cma_helper.h"
11
12struct vc4_dev {
13 struct drm_device *dev;
14
15 struct vc4_hdmi *hdmi;
16 struct vc4_hvs *hvs;
17 struct vc4_crtc *crtc[3];
Eric Anholtd3f51682015-03-02 13:01:12 -080018 struct vc4_v3d *v3d;
Derek Foreman48666d52015-07-02 11:19:54 -050019
20 struct drm_fbdev_cma *fbdev;
Eric Anholtc826a6e2015-10-09 20:25:07 -070021
22 /* The kernel-space BO cache. Tracks buffers that have been
23 * unreferenced by all other users (refcounts of 0!) but not
24 * yet freed, so we can do cheap allocations.
25 */
26 struct vc4_bo_cache {
27 /* Array of list heads for entries in the BO cache,
28 * based on number of pages, so we can do O(1) lookups
29 * in the cache when allocating.
30 */
31 struct list_head *size_list;
32 uint32_t size_list_size;
33
34 /* List of all BOs in the cache, ordered by age, so we
35 * can do O(1) lookups when trying to free old
36 * buffers.
37 */
38 struct list_head time_list;
39 struct work_struct time_work;
40 struct timer_list time_timer;
41 } bo_cache;
42
43 struct vc4_bo_stats {
44 u32 num_allocated;
45 u32 size_allocated;
46 u32 num_cached;
47 u32 size_cached;
48 } bo_stats;
49
50 /* Protects bo_cache and the BO stats. */
51 struct mutex bo_lock;
Eric Anholtc8b75bc2015-03-02 13:01:12 -080052};
53
54static inline struct vc4_dev *
55to_vc4_dev(struct drm_device *dev)
56{
57 return (struct vc4_dev *)dev->dev_private;
58}
59
60struct vc4_bo {
61 struct drm_gem_cma_object base;
Eric Anholtc826a6e2015-10-09 20:25:07 -070062
63 /* List entry for the BO's position in either
64 * vc4_exec_info->unref_list or vc4_dev->bo_cache.time_list
65 */
66 struct list_head unref_head;
67
68 /* Time in jiffies when the BO was put in vc4->bo_cache. */
69 unsigned long free_time;
70
71 /* List entry for the BO's position in vc4_dev->bo_cache.size_list */
72 struct list_head size_head;
Eric Anholt463873d2015-11-30 11:41:40 -080073
74 /* Struct for shader validation state, if created by
75 * DRM_IOCTL_VC4_CREATE_SHADER_BO.
76 */
77 struct vc4_validated_shader_info *validated_shader;
Eric Anholtc8b75bc2015-03-02 13:01:12 -080078};
79
80static inline struct vc4_bo *
81to_vc4_bo(struct drm_gem_object *bo)
82{
83 return (struct vc4_bo *)bo;
84}
85
Eric Anholtd3f51682015-03-02 13:01:12 -080086struct vc4_v3d {
87 struct platform_device *pdev;
88 void __iomem *regs;
89};
90
Eric Anholtc8b75bc2015-03-02 13:01:12 -080091struct vc4_hvs {
92 struct platform_device *pdev;
93 void __iomem *regs;
94 void __iomem *dlist;
95};
96
97struct vc4_plane {
98 struct drm_plane base;
99};
100
101static inline struct vc4_plane *
102to_vc4_plane(struct drm_plane *plane)
103{
104 return (struct vc4_plane *)plane;
105}
106
107enum vc4_encoder_type {
108 VC4_ENCODER_TYPE_HDMI,
109 VC4_ENCODER_TYPE_VEC,
110 VC4_ENCODER_TYPE_DSI0,
111 VC4_ENCODER_TYPE_DSI1,
112 VC4_ENCODER_TYPE_SMI,
113 VC4_ENCODER_TYPE_DPI,
114};
115
116struct vc4_encoder {
117 struct drm_encoder base;
118 enum vc4_encoder_type type;
119 u32 clock_select;
120};
121
122static inline struct vc4_encoder *
123to_vc4_encoder(struct drm_encoder *encoder)
124{
125 return container_of(encoder, struct vc4_encoder, base);
126}
127
Eric Anholtd3f51682015-03-02 13:01:12 -0800128#define V3D_READ(offset) readl(vc4->v3d->regs + offset)
129#define V3D_WRITE(offset, val) writel(val, vc4->v3d->regs + offset)
Eric Anholtc8b75bc2015-03-02 13:01:12 -0800130#define HVS_READ(offset) readl(vc4->hvs->regs + offset)
131#define HVS_WRITE(offset, val) writel(val, vc4->hvs->regs + offset)
132
133/**
Eric Anholt463873d2015-11-30 11:41:40 -0800134 * struct vc4_texture_sample_info - saves the offsets into the UBO for texture
135 * setup parameters.
136 *
137 * This will be used at draw time to relocate the reference to the texture
138 * contents in p0, and validate that the offset combined with
139 * width/height/stride/etc. from p1 and p2/p3 doesn't sample outside the BO.
140 * Note that the hardware treats unprovided config parameters as 0, so not all
141 * of them need to be set up for every texure sample, and we'll store ~0 as
142 * the offset to mark the unused ones.
143 *
144 * See the VC4 3D architecture guide page 41 ("Texture and Memory Lookup Unit
145 * Setup") for definitions of the texture parameters.
146 */
147struct vc4_texture_sample_info {
148 bool is_direct;
149 uint32_t p_offset[4];
150};
151
152/**
153 * struct vc4_validated_shader_info - information about validated shaders that
154 * needs to be used from command list validation.
155 *
156 * For a given shader, each time a shader state record references it, we need
157 * to verify that the shader doesn't read more uniforms than the shader state
158 * record's uniform BO pointer can provide, and we need to apply relocations
159 * and validate the shader state record's uniforms that define the texture
160 * samples.
161 */
162struct vc4_validated_shader_info {
163 uint32_t uniforms_size;
164 uint32_t uniforms_src_size;
165 uint32_t num_texture_samples;
166 struct vc4_texture_sample_info *texture_samples;
167};
168
169/**
Eric Anholtc8b75bc2015-03-02 13:01:12 -0800170 * _wait_for - magic (register) wait macro
171 *
172 * Does the right thing for modeset paths when run under kdgb or similar atomic
173 * contexts. Note that it's important that we check the condition again after
174 * having timed out, since the timeout could be due to preemption or similar and
175 * we've never had a chance to check the condition before the timeout.
176 */
177#define _wait_for(COND, MS, W) ({ \
178 unsigned long timeout__ = jiffies + msecs_to_jiffies(MS) + 1; \
179 int ret__ = 0; \
180 while (!(COND)) { \
181 if (time_after(jiffies, timeout__)) { \
182 if (!(COND)) \
183 ret__ = -ETIMEDOUT; \
184 break; \
185 } \
186 if (W && drm_can_sleep()) { \
187 msleep(W); \
188 } else { \
189 cpu_relax(); \
190 } \
191 } \
192 ret__; \
193})
194
195#define wait_for(COND, MS) _wait_for(COND, MS, 1)
196
197/* vc4_bo.c */
Eric Anholtc826a6e2015-10-09 20:25:07 -0700198struct drm_gem_object *vc4_create_object(struct drm_device *dev, size_t size);
Eric Anholtc8b75bc2015-03-02 13:01:12 -0800199void vc4_free_object(struct drm_gem_object *gem_obj);
Eric Anholtc826a6e2015-10-09 20:25:07 -0700200struct vc4_bo *vc4_bo_create(struct drm_device *dev, size_t size,
201 bool from_cache);
Eric Anholtc8b75bc2015-03-02 13:01:12 -0800202int vc4_dumb_create(struct drm_file *file_priv,
203 struct drm_device *dev,
204 struct drm_mode_create_dumb *args);
205struct dma_buf *vc4_prime_export(struct drm_device *dev,
206 struct drm_gem_object *obj, int flags);
Eric Anholtd5bc60f2015-01-18 09:33:17 +1300207int vc4_create_bo_ioctl(struct drm_device *dev, void *data,
208 struct drm_file *file_priv);
Eric Anholt463873d2015-11-30 11:41:40 -0800209int vc4_create_shader_bo_ioctl(struct drm_device *dev, void *data,
210 struct drm_file *file_priv);
Eric Anholtd5bc60f2015-01-18 09:33:17 +1300211int vc4_mmap_bo_ioctl(struct drm_device *dev, void *data,
212 struct drm_file *file_priv);
Eric Anholt463873d2015-11-30 11:41:40 -0800213int vc4_mmap(struct file *filp, struct vm_area_struct *vma);
214int vc4_prime_mmap(struct drm_gem_object *obj, struct vm_area_struct *vma);
215void *vc4_prime_vmap(struct drm_gem_object *obj);
Eric Anholtc826a6e2015-10-09 20:25:07 -0700216void vc4_bo_cache_init(struct drm_device *dev);
217void vc4_bo_cache_destroy(struct drm_device *dev);
218int vc4_bo_stats_debugfs(struct seq_file *m, void *arg);
Eric Anholtc8b75bc2015-03-02 13:01:12 -0800219
220/* vc4_crtc.c */
221extern struct platform_driver vc4_crtc_driver;
Dave Airlie1f437102015-10-22 10:23:31 +1000222int vc4_enable_vblank(struct drm_device *dev, unsigned int crtc_id);
223void vc4_disable_vblank(struct drm_device *dev, unsigned int crtc_id);
Eric Anholtc8b75bc2015-03-02 13:01:12 -0800224void vc4_cancel_page_flip(struct drm_crtc *crtc, struct drm_file *file);
225int vc4_crtc_debugfs_regs(struct seq_file *m, void *arg);
226
227/* vc4_debugfs.c */
228int vc4_debugfs_init(struct drm_minor *minor);
229void vc4_debugfs_cleanup(struct drm_minor *minor);
230
231/* vc4_drv.c */
232void __iomem *vc4_ioremap_regs(struct platform_device *dev, int index);
233
234/* vc4_hdmi.c */
235extern struct platform_driver vc4_hdmi_driver;
236int vc4_hdmi_debugfs_regs(struct seq_file *m, void *unused);
237
238/* vc4_hvs.c */
239extern struct platform_driver vc4_hvs_driver;
240void vc4_hvs_dump_state(struct drm_device *dev);
241int vc4_hvs_debugfs_regs(struct seq_file *m, void *unused);
242
243/* vc4_kms.c */
244int vc4_kms_load(struct drm_device *dev);
245
246/* vc4_plane.c */
247struct drm_plane *vc4_plane_init(struct drm_device *dev,
248 enum drm_plane_type type);
249u32 vc4_plane_write_dlist(struct drm_plane *plane, u32 __iomem *dlist);
250u32 vc4_plane_dlist_size(struct drm_plane_state *state);
Eric Anholt463873d2015-11-30 11:41:40 -0800251
Eric Anholtd3f51682015-03-02 13:01:12 -0800252/* vc4_v3d.c */
253extern struct platform_driver vc4_v3d_driver;
254int vc4_v3d_debugfs_ident(struct seq_file *m, void *unused);
255int vc4_v3d_debugfs_regs(struct seq_file *m, void *unused);
256
Eric Anholt463873d2015-11-30 11:41:40 -0800257/* vc4_validate_shader.c */
258struct vc4_validated_shader_info *
259vc4_validate_shader(struct drm_gem_cma_object *shader_obj);