Merge tag 'drm/tegra/for-4.5-rc7' of git://anongit.freedesktop.org/tegra/linux into...
[deliverable/linux.git] / drivers / gpu / drm / vc4 / vc4_plane.c
CommitLineData
c8b75bca
EA
1/*
2 * Copyright (C) 2015 Broadcom
3 *
4 * This program is free software; you can redistribute it and/or modify
5 * it under the terms of the GNU General Public License version 2 as
6 * published by the Free Software Foundation.
7 */
8
9/**
10 * DOC: VC4 plane module
11 *
12 * Each DRM plane is a layer of pixels being scanned out by the HVS.
13 *
14 * At atomic modeset check time, we compute the HVS display element
15 * state that would be necessary for displaying the plane (giving us a
16 * chance to figure out if a plane configuration is invalid), then at
17 * atomic flush time the CRTC will ask us to write our element state
18 * into the region of the HVS that it has allocated for us.
19 */
20
21#include "vc4_drv.h"
22#include "vc4_regs.h"
23#include "drm_atomic_helper.h"
24#include "drm_fb_cma_helper.h"
25#include "drm_plane_helper.h"
26
27struct vc4_plane_state {
28 struct drm_plane_state base;
29 u32 *dlist;
30 u32 dlist_size; /* Number of dwords in allocated for the display list */
31 u32 dlist_count; /* Number of used dwords in the display list. */
b501bacc
EA
32
33 /* Offset in the dlist to pointer word 0. */
34 u32 pw0_offset;
35
36 /* Offset where the plane's dlist was last stored in the
37 hardware at vc4_crtc_atomic_flush() time.
38 */
39 u32 *hw_dlist;
c8b75bca
EA
40};
41
42static inline struct vc4_plane_state *
43to_vc4_plane_state(struct drm_plane_state *state)
44{
45 return (struct vc4_plane_state *)state;
46}
47
48static const struct hvs_format {
49 u32 drm; /* DRM_FORMAT_* */
50 u32 hvs; /* HVS_FORMAT_* */
51 u32 pixel_order;
52 bool has_alpha;
53} hvs_formats[] = {
54 {
55 .drm = DRM_FORMAT_XRGB8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
56 .pixel_order = HVS_PIXEL_ORDER_ABGR, .has_alpha = false,
57 },
58 {
59 .drm = DRM_FORMAT_ARGB8888, .hvs = HVS_PIXEL_FORMAT_RGBA8888,
60 .pixel_order = HVS_PIXEL_ORDER_ABGR, .has_alpha = true,
61 },
62};
63
64static const struct hvs_format *vc4_get_hvs_format(u32 drm_format)
65{
66 unsigned i;
67
68 for (i = 0; i < ARRAY_SIZE(hvs_formats); i++) {
69 if (hvs_formats[i].drm == drm_format)
70 return &hvs_formats[i];
71 }
72
73 return NULL;
74}
75
76static bool plane_enabled(struct drm_plane_state *state)
77{
78 return state->fb && state->crtc;
79}
80
91276ae2 81static struct drm_plane_state *vc4_plane_duplicate_state(struct drm_plane *plane)
c8b75bca
EA
82{
83 struct vc4_plane_state *vc4_state;
84
85 if (WARN_ON(!plane->state))
86 return NULL;
87
88 vc4_state = kmemdup(plane->state, sizeof(*vc4_state), GFP_KERNEL);
89 if (!vc4_state)
90 return NULL;
91
92 __drm_atomic_helper_plane_duplicate_state(plane, &vc4_state->base);
93
94 if (vc4_state->dlist) {
95 vc4_state->dlist = kmemdup(vc4_state->dlist,
96 vc4_state->dlist_count * 4,
97 GFP_KERNEL);
98 if (!vc4_state->dlist) {
99 kfree(vc4_state);
100 return NULL;
101 }
102 vc4_state->dlist_size = vc4_state->dlist_count;
103 }
104
105 return &vc4_state->base;
106}
107
91276ae2 108static void vc4_plane_destroy_state(struct drm_plane *plane,
109 struct drm_plane_state *state)
c8b75bca
EA
110{
111 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
112
113 kfree(vc4_state->dlist);
114 __drm_atomic_helper_plane_destroy_state(plane, &vc4_state->base);
115 kfree(state);
116}
117
118/* Called during init to allocate the plane's atomic state. */
91276ae2 119static void vc4_plane_reset(struct drm_plane *plane)
c8b75bca
EA
120{
121 struct vc4_plane_state *vc4_state;
122
123 WARN_ON(plane->state);
124
125 vc4_state = kzalloc(sizeof(*vc4_state), GFP_KERNEL);
126 if (!vc4_state)
127 return;
128
129 plane->state = &vc4_state->base;
130 vc4_state->base.plane = plane;
131}
132
133static void vc4_dlist_write(struct vc4_plane_state *vc4_state, u32 val)
134{
135 if (vc4_state->dlist_count == vc4_state->dlist_size) {
136 u32 new_size = max(4u, vc4_state->dlist_count * 2);
137 u32 *new_dlist = kmalloc(new_size * 4, GFP_KERNEL);
138
139 if (!new_dlist)
140 return;
141 memcpy(new_dlist, vc4_state->dlist, vc4_state->dlist_count * 4);
142
143 kfree(vc4_state->dlist);
144 vc4_state->dlist = new_dlist;
145 vc4_state->dlist_size = new_size;
146 }
147
148 vc4_state->dlist[vc4_state->dlist_count++] = val;
149}
150
151/* Writes out a full display list for an active plane to the plane's
152 * private dlist state.
153 */
154static int vc4_plane_mode_set(struct drm_plane *plane,
155 struct drm_plane_state *state)
156{
157 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
158 struct drm_framebuffer *fb = state->fb;
159 struct drm_gem_cma_object *bo = drm_fb_cma_get_gem_obj(fb, 0);
160 u32 ctl0_offset = vc4_state->dlist_count;
161 const struct hvs_format *format = vc4_get_hvs_format(fb->pixel_format);
162 uint32_t offset = fb->offsets[0];
163 int crtc_x = state->crtc_x;
164 int crtc_y = state->crtc_y;
165 int crtc_w = state->crtc_w;
166 int crtc_h = state->crtc_h;
167
bf893acc
EA
168 if (state->crtc_w << 16 != state->src_w ||
169 state->crtc_h << 16 != state->src_h) {
170 /* We don't support scaling yet, which involves
171 * allocating the LBM memory for scaling temporary
172 * storage, and putting filter kernels in the HVS
173 * context.
174 */
175 return -EINVAL;
176 }
177
c8b75bca
EA
178 if (crtc_x < 0) {
179 offset += drm_format_plane_cpp(fb->pixel_format, 0) * -crtc_x;
180 crtc_w += crtc_x;
181 crtc_x = 0;
182 }
183
184 if (crtc_y < 0) {
185 offset += fb->pitches[0] * -crtc_y;
186 crtc_h += crtc_y;
187 crtc_y = 0;
188 }
189
190 vc4_dlist_write(vc4_state,
191 SCALER_CTL0_VALID |
192 (format->pixel_order << SCALER_CTL0_ORDER_SHIFT) |
193 (format->hvs << SCALER_CTL0_PIXEL_FORMAT_SHIFT) |
194 SCALER_CTL0_UNITY);
195
196 /* Position Word 0: Image Positions and Alpha Value */
197 vc4_dlist_write(vc4_state,
198 VC4_SET_FIELD(0xff, SCALER_POS0_FIXED_ALPHA) |
199 VC4_SET_FIELD(crtc_x, SCALER_POS0_START_X) |
200 VC4_SET_FIELD(crtc_y, SCALER_POS0_START_Y));
201
202 /* Position Word 1: Scaled Image Dimensions.
203 * Skipped due to SCALER_CTL0_UNITY scaling.
204 */
205
206 /* Position Word 2: Source Image Size, Alpha Mode */
207 vc4_dlist_write(vc4_state,
208 VC4_SET_FIELD(format->has_alpha ?
209 SCALER_POS2_ALPHA_MODE_PIPELINE :
210 SCALER_POS2_ALPHA_MODE_FIXED,
211 SCALER_POS2_ALPHA_MODE) |
212 VC4_SET_FIELD(crtc_w, SCALER_POS2_WIDTH) |
213 VC4_SET_FIELD(crtc_h, SCALER_POS2_HEIGHT));
214
215 /* Position Word 3: Context. Written by the HVS. */
216 vc4_dlist_write(vc4_state, 0xc0c0c0c0);
217
b501bacc
EA
218 vc4_state->pw0_offset = vc4_state->dlist_count;
219
c8b75bca
EA
220 /* Pointer Word 0: RGB / Y Pointer */
221 vc4_dlist_write(vc4_state, bo->paddr + offset);
222
223 /* Pointer Context Word 0: Written by the HVS */
224 vc4_dlist_write(vc4_state, 0xc0c0c0c0);
225
226 /* Pitch word 0: Pointer 0 Pitch */
227 vc4_dlist_write(vc4_state,
228 VC4_SET_FIELD(fb->pitches[0], SCALER_SRC_PITCH));
229
230 vc4_state->dlist[ctl0_offset] |=
231 VC4_SET_FIELD(vc4_state->dlist_count, SCALER_CTL0_SIZE);
232
233 return 0;
234}
235
236/* If a modeset involves changing the setup of a plane, the atomic
237 * infrastructure will call this to validate a proposed plane setup.
238 * However, if a plane isn't getting updated, this (and the
239 * corresponding vc4_plane_atomic_update) won't get called. Thus, we
240 * compute the dlist here and have all active plane dlists get updated
241 * in the CRTC's flush.
242 */
243static int vc4_plane_atomic_check(struct drm_plane *plane,
244 struct drm_plane_state *state)
245{
246 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
247
248 vc4_state->dlist_count = 0;
249
250 if (plane_enabled(state))
251 return vc4_plane_mode_set(plane, state);
252 else
253 return 0;
254}
255
256static void vc4_plane_atomic_update(struct drm_plane *plane,
257 struct drm_plane_state *old_state)
258{
259 /* No contents here. Since we don't know where in the CRTC's
260 * dlist we should be stored, our dlist is uploaded to the
261 * hardware with vc4_plane_write_dlist() at CRTC atomic_flush
262 * time.
263 */
264}
265
266u32 vc4_plane_write_dlist(struct drm_plane *plane, u32 __iomem *dlist)
267{
268 struct vc4_plane_state *vc4_state = to_vc4_plane_state(plane->state);
269 int i;
270
b501bacc
EA
271 vc4_state->hw_dlist = dlist;
272
c8b75bca
EA
273 /* Can't memcpy_toio() because it needs to be 32-bit writes. */
274 for (i = 0; i < vc4_state->dlist_count; i++)
275 writel(vc4_state->dlist[i], &dlist[i]);
276
277 return vc4_state->dlist_count;
278}
279
280u32 vc4_plane_dlist_size(struct drm_plane_state *state)
281{
282 struct vc4_plane_state *vc4_state = to_vc4_plane_state(state);
283
284 return vc4_state->dlist_count;
285}
286
b501bacc
EA
287/* Updates the plane to immediately (well, once the FIFO needs
288 * refilling) scan out from at a new framebuffer.
289 */
290void vc4_plane_async_set_fb(struct drm_plane *plane, struct drm_framebuffer *fb)
291{
292 struct vc4_plane_state *vc4_state = to_vc4_plane_state(plane->state);
293 struct drm_gem_cma_object *bo = drm_fb_cma_get_gem_obj(fb, 0);
294 uint32_t addr;
295
296 /* We're skipping the address adjustment for negative origin,
297 * because this is only called on the primary plane.
298 */
299 WARN_ON_ONCE(plane->state->crtc_x < 0 || plane->state->crtc_y < 0);
300 addr = bo->paddr + fb->offsets[0];
301
302 /* Write the new address into the hardware immediately. The
303 * scanout will start from this address as soon as the FIFO
304 * needs to refill with pixels.
305 */
306 writel(addr, &vc4_state->hw_dlist[vc4_state->pw0_offset]);
307
308 /* Also update the CPU-side dlist copy, so that any later
309 * atomic updates that don't do a new modeset on our plane
310 * also use our updated address.
311 */
312 vc4_state->dlist[vc4_state->pw0_offset] = addr;
313}
314
c8b75bca
EA
315static const struct drm_plane_helper_funcs vc4_plane_helper_funcs = {
316 .prepare_fb = NULL,
317 .cleanup_fb = NULL,
318 .atomic_check = vc4_plane_atomic_check,
319 .atomic_update = vc4_plane_atomic_update,
320};
321
322static void vc4_plane_destroy(struct drm_plane *plane)
323{
324 drm_plane_helper_disable(plane);
325 drm_plane_cleanup(plane);
326}
327
328static const struct drm_plane_funcs vc4_plane_funcs = {
329 .update_plane = drm_atomic_helper_update_plane,
330 .disable_plane = drm_atomic_helper_disable_plane,
331 .destroy = vc4_plane_destroy,
332 .set_property = NULL,
333 .reset = vc4_plane_reset,
334 .atomic_duplicate_state = vc4_plane_duplicate_state,
335 .atomic_destroy_state = vc4_plane_destroy_state,
336};
337
338struct drm_plane *vc4_plane_init(struct drm_device *dev,
339 enum drm_plane_type type)
340{
341 struct drm_plane *plane = NULL;
342 struct vc4_plane *vc4_plane;
343 u32 formats[ARRAY_SIZE(hvs_formats)];
344 int ret = 0;
345 unsigned i;
346
347 vc4_plane = devm_kzalloc(dev->dev, sizeof(*vc4_plane),
348 GFP_KERNEL);
349 if (!vc4_plane) {
350 ret = -ENOMEM;
351 goto fail;
352 }
353
354 for (i = 0; i < ARRAY_SIZE(hvs_formats); i++)
355 formats[i] = hvs_formats[i].drm;
356 plane = &vc4_plane->base;
357 ret = drm_universal_plane_init(dev, plane, 0xff,
358 &vc4_plane_funcs,
359 formats, ARRAY_SIZE(formats),
b0b3b795 360 type, NULL);
c8b75bca
EA
361
362 drm_plane_helper_add(plane, &vc4_plane_helper_funcs);
363
364 return plane;
365fail:
366 if (plane)
367 vc4_plane_destroy(plane);
368
369 return ERR_PTR(ret);
370}
This page took 0.054122 seconds and 5 git commands to generate.