drm/atomic: Remove drm_atomic_connectors_for_crtc.
[cascardo/linux.git] / drivers / gpu / drm / vc4 / vc4_crtc.c
1 /*
2  * Copyright (C) 2015 Broadcom
3  *
4  * This program is free software; you can redistribute it and/or modify
5  * it under the terms of the GNU General Public License version 2 as
6  * published by the Free Software Foundation.
7  */
8
9 /**
10  * DOC: VC4 CRTC module
11  *
12  * In VC4, the Pixel Valve is what most closely corresponds to the
13  * DRM's concept of a CRTC.  The PV generates video timings from the
14  * output's clock plus its configuration.  It pulls scaled pixels from
15  * the HVS at that timing, and feeds it to the encoder.
16  *
17  * However, the DRM CRTC also collects the configuration of all the
18  * DRM planes attached to it.  As a result, this file also manages
19  * setup of the VC4 HVS's display elements on the CRTC.
20  *
21  * The 2835 has 3 different pixel valves.  pv0 in the audio power
22  * domain feeds DSI0 or DPI, while pv1 feeds DS1 or SMI.  pv2 in the
23  * image domain can feed either HDMI or the SDTV controller.  The
24  * pixel valve chooses from the CPRMAN clocks (HSM for HDMI, VEC for
25  * SDTV, etc.) according to which output type is chosen in the mux.
26  *
27  * For power management, the pixel valve's registers are all clocked
28  * by the AXI clock, while the timings and FIFOs make use of the
29  * output-specific clock.  Since the encoders also directly consume
30  * the CPRMAN clocks, and know what timings they need, they are the
31  * ones that set the clock.
32  */
33
34 #include "drm_atomic.h"
35 #include "drm_atomic_helper.h"
36 #include "drm_crtc_helper.h"
37 #include "linux/clk.h"
38 #include "drm_fb_cma_helper.h"
39 #include "linux/component.h"
40 #include "linux/of_device.h"
41 #include "vc4_drv.h"
42 #include "vc4_regs.h"
43
44 struct vc4_crtc {
45         struct drm_crtc base;
46         const struct vc4_crtc_data *data;
47         void __iomem *regs;
48
49         /* Which HVS channel we're using for our CRTC. */
50         int channel;
51
52         /* Pointer to the actual hardware display list memory for the
53          * crtc.
54          */
55         u32 __iomem *dlist;
56
57         u32 dlist_size; /* in dwords */
58
59         struct drm_pending_vblank_event *event;
60 };
61
62 static inline struct vc4_crtc *
63 to_vc4_crtc(struct drm_crtc *crtc)
64 {
65         return (struct vc4_crtc *)crtc;
66 }
67
68 struct vc4_crtc_data {
69         /* Which channel of the HVS this pixelvalve sources from. */
70         int hvs_channel;
71
72         enum vc4_encoder_type encoder0_type;
73         enum vc4_encoder_type encoder1_type;
74 };
75
76 #define CRTC_WRITE(offset, val) writel(val, vc4_crtc->regs + (offset))
77 #define CRTC_READ(offset) readl(vc4_crtc->regs + (offset))
78
79 #define CRTC_REG(reg) { reg, #reg }
80 static const struct {
81         u32 reg;
82         const char *name;
83 } crtc_regs[] = {
84         CRTC_REG(PV_CONTROL),
85         CRTC_REG(PV_V_CONTROL),
86         CRTC_REG(PV_VSYNCD),
87         CRTC_REG(PV_HORZA),
88         CRTC_REG(PV_HORZB),
89         CRTC_REG(PV_VERTA),
90         CRTC_REG(PV_VERTB),
91         CRTC_REG(PV_VERTA_EVEN),
92         CRTC_REG(PV_VERTB_EVEN),
93         CRTC_REG(PV_INTEN),
94         CRTC_REG(PV_INTSTAT),
95         CRTC_REG(PV_STAT),
96         CRTC_REG(PV_HACT_ACT),
97 };
98
99 static void vc4_crtc_dump_regs(struct vc4_crtc *vc4_crtc)
100 {
101         int i;
102
103         for (i = 0; i < ARRAY_SIZE(crtc_regs); i++) {
104                 DRM_INFO("0x%04x (%s): 0x%08x\n",
105                          crtc_regs[i].reg, crtc_regs[i].name,
106                          CRTC_READ(crtc_regs[i].reg));
107         }
108 }
109
110 #ifdef CONFIG_DEBUG_FS
111 int vc4_crtc_debugfs_regs(struct seq_file *m, void *unused)
112 {
113         struct drm_info_node *node = (struct drm_info_node *)m->private;
114         struct drm_device *dev = node->minor->dev;
115         int crtc_index = (uintptr_t)node->info_ent->data;
116         struct drm_crtc *crtc;
117         struct vc4_crtc *vc4_crtc;
118         int i;
119
120         i = 0;
121         list_for_each_entry(crtc, &dev->mode_config.crtc_list, head) {
122                 if (i == crtc_index)
123                         break;
124                 i++;
125         }
126         if (!crtc)
127                 return 0;
128         vc4_crtc = to_vc4_crtc(crtc);
129
130         for (i = 0; i < ARRAY_SIZE(crtc_regs); i++) {
131                 seq_printf(m, "%s (0x%04x): 0x%08x\n",
132                            crtc_regs[i].name, crtc_regs[i].reg,
133                            CRTC_READ(crtc_regs[i].reg));
134         }
135
136         return 0;
137 }
138 #endif
139
140 static void vc4_crtc_destroy(struct drm_crtc *crtc)
141 {
142         drm_crtc_cleanup(crtc);
143 }
144
145 static u32 vc4_get_fifo_full_level(u32 format)
146 {
147         static const u32 fifo_len_bytes = 64;
148         static const u32 hvs_latency_pix = 6;
149
150         switch (format) {
151         case PV_CONTROL_FORMAT_DSIV_16:
152         case PV_CONTROL_FORMAT_DSIC_16:
153                 return fifo_len_bytes - 2 * hvs_latency_pix;
154         case PV_CONTROL_FORMAT_DSIV_18:
155                 return fifo_len_bytes - 14;
156         case PV_CONTROL_FORMAT_24:
157         case PV_CONTROL_FORMAT_DSIV_24:
158         default:
159                 return fifo_len_bytes - 3 * hvs_latency_pix;
160         }
161 }
162
163 /*
164  * Returns the clock select bit for the connector attached to the
165  * CRTC.
166  */
167 static int vc4_get_clock_select(struct drm_crtc *crtc)
168 {
169         struct drm_connector *connector;
170
171         drm_for_each_connector(connector, crtc->dev) {
172                 if (connector->state->crtc == crtc) {
173                         struct drm_encoder *encoder = connector->encoder;
174                         struct vc4_encoder *vc4_encoder =
175                                 to_vc4_encoder(encoder);
176
177                         return vc4_encoder->clock_select;
178                 }
179         }
180
181         return -1;
182 }
183
184 static void vc4_crtc_mode_set_nofb(struct drm_crtc *crtc)
185 {
186         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
187         struct drm_crtc_state *state = crtc->state;
188         struct drm_display_mode *mode = &state->adjusted_mode;
189         bool interlace = mode->flags & DRM_MODE_FLAG_INTERLACE;
190         u32 vactive = (mode->vdisplay >> (interlace ? 1 : 0));
191         u32 format = PV_CONTROL_FORMAT_24;
192         bool debug_dump_regs = false;
193         int clock_select = vc4_get_clock_select(crtc);
194
195         if (debug_dump_regs) {
196                 DRM_INFO("CRTC %d regs before:\n", drm_crtc_index(crtc));
197                 vc4_crtc_dump_regs(vc4_crtc);
198         }
199
200         /* Reset the PV fifo. */
201         CRTC_WRITE(PV_CONTROL, 0);
202         CRTC_WRITE(PV_CONTROL, PV_CONTROL_FIFO_CLR | PV_CONTROL_EN);
203         CRTC_WRITE(PV_CONTROL, 0);
204
205         CRTC_WRITE(PV_HORZA,
206                    VC4_SET_FIELD(mode->htotal - mode->hsync_end,
207                                  PV_HORZA_HBP) |
208                    VC4_SET_FIELD(mode->hsync_end - mode->hsync_start,
209                                  PV_HORZA_HSYNC));
210         CRTC_WRITE(PV_HORZB,
211                    VC4_SET_FIELD(mode->hsync_start - mode->hdisplay,
212                                  PV_HORZB_HFP) |
213                    VC4_SET_FIELD(mode->hdisplay, PV_HORZB_HACTIVE));
214
215         if (interlace) {
216                 CRTC_WRITE(PV_VERTA_EVEN,
217                            VC4_SET_FIELD(mode->vtotal - mode->vsync_end - 1,
218                                          PV_VERTA_VBP) |
219                            VC4_SET_FIELD(mode->vsync_end - mode->vsync_start,
220                                          PV_VERTA_VSYNC));
221                 CRTC_WRITE(PV_VERTB_EVEN,
222                            VC4_SET_FIELD(mode->vsync_start - mode->vdisplay,
223                                          PV_VERTB_VFP) |
224                            VC4_SET_FIELD(vactive, PV_VERTB_VACTIVE));
225         }
226
227         CRTC_WRITE(PV_HACT_ACT, mode->hdisplay);
228
229         CRTC_WRITE(PV_V_CONTROL,
230                    PV_VCONTROL_CONTINUOUS |
231                    (interlace ? PV_VCONTROL_INTERLACE : 0));
232
233         CRTC_WRITE(PV_CONTROL,
234                    VC4_SET_FIELD(format, PV_CONTROL_FORMAT) |
235                    VC4_SET_FIELD(vc4_get_fifo_full_level(format),
236                                  PV_CONTROL_FIFO_LEVEL) |
237                    PV_CONTROL_CLR_AT_START |
238                    PV_CONTROL_TRIGGER_UNDERFLOW |
239                    PV_CONTROL_WAIT_HSTART |
240                    VC4_SET_FIELD(clock_select, PV_CONTROL_CLK_SELECT) |
241                    PV_CONTROL_FIFO_CLR |
242                    PV_CONTROL_EN);
243
244         if (debug_dump_regs) {
245                 DRM_INFO("CRTC %d regs after:\n", drm_crtc_index(crtc));
246                 vc4_crtc_dump_regs(vc4_crtc);
247         }
248 }
249
250 static void require_hvs_enabled(struct drm_device *dev)
251 {
252         struct vc4_dev *vc4 = to_vc4_dev(dev);
253
254         WARN_ON_ONCE((HVS_READ(SCALER_DISPCTRL) & SCALER_DISPCTRL_ENABLE) !=
255                      SCALER_DISPCTRL_ENABLE);
256 }
257
258 static void vc4_crtc_disable(struct drm_crtc *crtc)
259 {
260         struct drm_device *dev = crtc->dev;
261         struct vc4_dev *vc4 = to_vc4_dev(dev);
262         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
263         u32 chan = vc4_crtc->channel;
264         int ret;
265         require_hvs_enabled(dev);
266
267         CRTC_WRITE(PV_V_CONTROL,
268                    CRTC_READ(PV_V_CONTROL) & ~PV_VCONTROL_VIDEN);
269         ret = wait_for(!(CRTC_READ(PV_V_CONTROL) & PV_VCONTROL_VIDEN), 1);
270         WARN_ONCE(ret, "Timeout waiting for !PV_VCONTROL_VIDEN\n");
271
272         if (HVS_READ(SCALER_DISPCTRLX(chan)) &
273             SCALER_DISPCTRLX_ENABLE) {
274                 HVS_WRITE(SCALER_DISPCTRLX(chan),
275                           SCALER_DISPCTRLX_RESET);
276
277                 /* While the docs say that reset is self-clearing, it
278                  * seems it doesn't actually.
279                  */
280                 HVS_WRITE(SCALER_DISPCTRLX(chan), 0);
281         }
282
283         /* Once we leave, the scaler should be disabled and its fifo empty. */
284
285         WARN_ON_ONCE(HVS_READ(SCALER_DISPCTRLX(chan)) & SCALER_DISPCTRLX_RESET);
286
287         WARN_ON_ONCE(VC4_GET_FIELD(HVS_READ(SCALER_DISPSTATX(chan)),
288                                    SCALER_DISPSTATX_MODE) !=
289                      SCALER_DISPSTATX_MODE_DISABLED);
290
291         WARN_ON_ONCE((HVS_READ(SCALER_DISPSTATX(chan)) &
292                       (SCALER_DISPSTATX_FULL | SCALER_DISPSTATX_EMPTY)) !=
293                      SCALER_DISPSTATX_EMPTY);
294 }
295
296 static void vc4_crtc_enable(struct drm_crtc *crtc)
297 {
298         struct drm_device *dev = crtc->dev;
299         struct vc4_dev *vc4 = to_vc4_dev(dev);
300         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
301         struct drm_crtc_state *state = crtc->state;
302         struct drm_display_mode *mode = &state->adjusted_mode;
303
304         require_hvs_enabled(dev);
305
306         /* Turn on the scaler, which will wait for vstart to start
307          * compositing.
308          */
309         HVS_WRITE(SCALER_DISPCTRLX(vc4_crtc->channel),
310                   VC4_SET_FIELD(mode->hdisplay, SCALER_DISPCTRLX_WIDTH) |
311                   VC4_SET_FIELD(mode->vdisplay, SCALER_DISPCTRLX_HEIGHT) |
312                   SCALER_DISPCTRLX_ENABLE);
313
314         /* Turn on the pixel valve, which will emit the vstart signal. */
315         CRTC_WRITE(PV_V_CONTROL,
316                    CRTC_READ(PV_V_CONTROL) | PV_VCONTROL_VIDEN);
317 }
318
319 static int vc4_crtc_atomic_check(struct drm_crtc *crtc,
320                                  struct drm_crtc_state *state)
321 {
322         struct drm_device *dev = crtc->dev;
323         struct vc4_dev *vc4 = to_vc4_dev(dev);
324         struct drm_plane *plane;
325         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
326         u32 dlist_count = 0;
327
328         /* The pixelvalve can only feed one encoder (and encoders are
329          * 1:1 with connectors.)
330          */
331         if (hweight32(state->connector_mask) > 1)
332                 return -EINVAL;
333
334         drm_atomic_crtc_state_for_each_plane(plane, state) {
335                 struct drm_plane_state *plane_state =
336                         state->state->plane_states[drm_plane_index(plane)];
337
338                 /* plane might not have changed, in which case take
339                  * current state:
340                  */
341                 if (!plane_state)
342                         plane_state = plane->state;
343
344                 dlist_count += vc4_plane_dlist_size(plane_state);
345         }
346
347         dlist_count++; /* Account for SCALER_CTL0_END. */
348
349         if (!vc4_crtc->dlist || dlist_count > vc4_crtc->dlist_size) {
350                 vc4_crtc->dlist = ((u32 __iomem *)vc4->hvs->dlist +
351                                    HVS_BOOTLOADER_DLIST_END);
352                 vc4_crtc->dlist_size = ((SCALER_DLIST_SIZE >> 2) -
353                                         HVS_BOOTLOADER_DLIST_END);
354
355                 if (dlist_count > vc4_crtc->dlist_size) {
356                         DRM_DEBUG_KMS("dlist too large for CRTC (%d > %d).\n",
357                                       dlist_count, vc4_crtc->dlist_size);
358                         return -EINVAL;
359                 }
360         }
361
362         return 0;
363 }
364
365 static void vc4_crtc_atomic_flush(struct drm_crtc *crtc,
366                                   struct drm_crtc_state *old_state)
367 {
368         struct drm_device *dev = crtc->dev;
369         struct vc4_dev *vc4 = to_vc4_dev(dev);
370         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
371         struct drm_plane *plane;
372         bool debug_dump_regs = false;
373         u32 __iomem *dlist_next = vc4_crtc->dlist;
374
375         if (debug_dump_regs) {
376                 DRM_INFO("CRTC %d HVS before:\n", drm_crtc_index(crtc));
377                 vc4_hvs_dump_state(dev);
378         }
379
380         /* Copy all the active planes' dlist contents to the hardware dlist.
381          *
382          * XXX: If the new display list was large enough that it
383          * overlapped a currently-read display list, we need to do
384          * something like disable scanout before putting in the new
385          * list.  For now, we're safe because we only have the two
386          * planes.
387          */
388         drm_atomic_crtc_for_each_plane(plane, crtc) {
389                 dlist_next += vc4_plane_write_dlist(plane, dlist_next);
390         }
391
392         if (dlist_next == vc4_crtc->dlist) {
393                 /* If no planes were enabled, use the SCALER_CTL0_END
394                  * at the start of the display list memory (in the
395                  * bootloader section).  We'll rewrite that
396                  * SCALER_CTL0_END, just in case, though.
397                  */
398                 writel(SCALER_CTL0_END, vc4->hvs->dlist);
399                 HVS_WRITE(SCALER_DISPLISTX(vc4_crtc->channel), 0);
400         } else {
401                 writel(SCALER_CTL0_END, dlist_next);
402                 dlist_next++;
403
404                 HVS_WRITE(SCALER_DISPLISTX(vc4_crtc->channel),
405                           (u32 __iomem *)vc4_crtc->dlist -
406                           (u32 __iomem *)vc4->hvs->dlist);
407
408                 /* Make the next display list start after ours. */
409                 vc4_crtc->dlist_size -= (dlist_next - vc4_crtc->dlist);
410                 vc4_crtc->dlist = dlist_next;
411         }
412
413         if (debug_dump_regs) {
414                 DRM_INFO("CRTC %d HVS after:\n", drm_crtc_index(crtc));
415                 vc4_hvs_dump_state(dev);
416         }
417
418         if (crtc->state->event) {
419                 unsigned long flags;
420
421                 crtc->state->event->pipe = drm_crtc_index(crtc);
422
423                 WARN_ON(drm_crtc_vblank_get(crtc) != 0);
424
425                 spin_lock_irqsave(&dev->event_lock, flags);
426                 vc4_crtc->event = crtc->state->event;
427                 spin_unlock_irqrestore(&dev->event_lock, flags);
428                 crtc->state->event = NULL;
429         }
430 }
431
432 int vc4_enable_vblank(struct drm_device *dev, unsigned int crtc_id)
433 {
434         struct vc4_dev *vc4 = to_vc4_dev(dev);
435         struct vc4_crtc *vc4_crtc = vc4->crtc[crtc_id];
436
437         CRTC_WRITE(PV_INTEN, PV_INT_VFP_START);
438
439         return 0;
440 }
441
442 void vc4_disable_vblank(struct drm_device *dev, unsigned int crtc_id)
443 {
444         struct vc4_dev *vc4 = to_vc4_dev(dev);
445         struct vc4_crtc *vc4_crtc = vc4->crtc[crtc_id];
446
447         CRTC_WRITE(PV_INTEN, 0);
448 }
449
450 static void vc4_crtc_handle_page_flip(struct vc4_crtc *vc4_crtc)
451 {
452         struct drm_crtc *crtc = &vc4_crtc->base;
453         struct drm_device *dev = crtc->dev;
454         unsigned long flags;
455
456         spin_lock_irqsave(&dev->event_lock, flags);
457         if (vc4_crtc->event) {
458                 drm_crtc_send_vblank_event(crtc, vc4_crtc->event);
459                 vc4_crtc->event = NULL;
460         }
461         spin_unlock_irqrestore(&dev->event_lock, flags);
462 }
463
464 static irqreturn_t vc4_crtc_irq_handler(int irq, void *data)
465 {
466         struct vc4_crtc *vc4_crtc = data;
467         u32 stat = CRTC_READ(PV_INTSTAT);
468         irqreturn_t ret = IRQ_NONE;
469
470         if (stat & PV_INT_VFP_START) {
471                 CRTC_WRITE(PV_INTSTAT, PV_INT_VFP_START);
472                 drm_crtc_handle_vblank(&vc4_crtc->base);
473                 vc4_crtc_handle_page_flip(vc4_crtc);
474                 ret = IRQ_HANDLED;
475         }
476
477         return ret;
478 }
479
480 struct vc4_async_flip_state {
481         struct drm_crtc *crtc;
482         struct drm_framebuffer *fb;
483         struct drm_pending_vblank_event *event;
484
485         struct vc4_seqno_cb cb;
486 };
487
488 /* Called when the V3D execution for the BO being flipped to is done, so that
489  * we can actually update the plane's address to point to it.
490  */
491 static void
492 vc4_async_page_flip_complete(struct vc4_seqno_cb *cb)
493 {
494         struct vc4_async_flip_state *flip_state =
495                 container_of(cb, struct vc4_async_flip_state, cb);
496         struct drm_crtc *crtc = flip_state->crtc;
497         struct drm_device *dev = crtc->dev;
498         struct vc4_dev *vc4 = to_vc4_dev(dev);
499         struct drm_plane *plane = crtc->primary;
500
501         vc4_plane_async_set_fb(plane, flip_state->fb);
502         if (flip_state->event) {
503                 unsigned long flags;
504
505                 spin_lock_irqsave(&dev->event_lock, flags);
506                 drm_crtc_send_vblank_event(crtc, flip_state->event);
507                 spin_unlock_irqrestore(&dev->event_lock, flags);
508         }
509
510         drm_framebuffer_unreference(flip_state->fb);
511         kfree(flip_state);
512
513         up(&vc4->async_modeset);
514 }
515
516 /* Implements async (non-vblank-synced) page flips.
517  *
518  * The page flip ioctl needs to return immediately, so we grab the
519  * modeset semaphore on the pipe, and queue the address update for
520  * when V3D is done with the BO being flipped to.
521  */
522 static int vc4_async_page_flip(struct drm_crtc *crtc,
523                                struct drm_framebuffer *fb,
524                                struct drm_pending_vblank_event *event,
525                                uint32_t flags)
526 {
527         struct drm_device *dev = crtc->dev;
528         struct vc4_dev *vc4 = to_vc4_dev(dev);
529         struct drm_plane *plane = crtc->primary;
530         int ret = 0;
531         struct vc4_async_flip_state *flip_state;
532         struct drm_gem_cma_object *cma_bo = drm_fb_cma_get_gem_obj(fb, 0);
533         struct vc4_bo *bo = to_vc4_bo(&cma_bo->base);
534
535         flip_state = kzalloc(sizeof(*flip_state), GFP_KERNEL);
536         if (!flip_state)
537                 return -ENOMEM;
538
539         drm_framebuffer_reference(fb);
540         flip_state->fb = fb;
541         flip_state->crtc = crtc;
542         flip_state->event = event;
543
544         /* Make sure all other async modesetes have landed. */
545         ret = down_interruptible(&vc4->async_modeset);
546         if (ret) {
547                 kfree(flip_state);
548                 return ret;
549         }
550
551         /* Immediately update the plane's legacy fb pointer, so that later
552          * modeset prep sees the state that will be present when the semaphore
553          * is released.
554          */
555         drm_atomic_set_fb_for_plane(plane->state, fb);
556         plane->fb = fb;
557
558         vc4_queue_seqno_cb(dev, &flip_state->cb, bo->seqno,
559                            vc4_async_page_flip_complete);
560
561         /* Driver takes ownership of state on successful async commit. */
562         return 0;
563 }
564
565 static int vc4_page_flip(struct drm_crtc *crtc,
566                          struct drm_framebuffer *fb,
567                          struct drm_pending_vblank_event *event,
568                          uint32_t flags)
569 {
570         if (flags & DRM_MODE_PAGE_FLIP_ASYNC)
571                 return vc4_async_page_flip(crtc, fb, event, flags);
572         else
573                 return drm_atomic_helper_page_flip(crtc, fb, event, flags);
574 }
575
576 static const struct drm_crtc_funcs vc4_crtc_funcs = {
577         .set_config = drm_atomic_helper_set_config,
578         .destroy = vc4_crtc_destroy,
579         .page_flip = vc4_page_flip,
580         .set_property = NULL,
581         .cursor_set = NULL, /* handled by drm_mode_cursor_universal */
582         .cursor_move = NULL, /* handled by drm_mode_cursor_universal */
583         .reset = drm_atomic_helper_crtc_reset,
584         .atomic_duplicate_state = drm_atomic_helper_crtc_duplicate_state,
585         .atomic_destroy_state = drm_atomic_helper_crtc_destroy_state,
586 };
587
588 static const struct drm_crtc_helper_funcs vc4_crtc_helper_funcs = {
589         .mode_set_nofb = vc4_crtc_mode_set_nofb,
590         .disable = vc4_crtc_disable,
591         .enable = vc4_crtc_enable,
592         .atomic_check = vc4_crtc_atomic_check,
593         .atomic_flush = vc4_crtc_atomic_flush,
594 };
595
596 /* Frees the page flip event when the DRM device is closed with the
597  * event still outstanding.
598  */
599 void vc4_cancel_page_flip(struct drm_crtc *crtc, struct drm_file *file)
600 {
601         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
602         struct drm_device *dev = crtc->dev;
603         unsigned long flags;
604
605         spin_lock_irqsave(&dev->event_lock, flags);
606
607         if (vc4_crtc->event && vc4_crtc->event->base.file_priv == file) {
608                 vc4_crtc->event->base.destroy(&vc4_crtc->event->base);
609                 drm_crtc_vblank_put(crtc);
610                 vc4_crtc->event = NULL;
611         }
612
613         spin_unlock_irqrestore(&dev->event_lock, flags);
614 }
615
616 static const struct vc4_crtc_data pv0_data = {
617         .hvs_channel = 0,
618         .encoder0_type = VC4_ENCODER_TYPE_DSI0,
619         .encoder1_type = VC4_ENCODER_TYPE_DPI,
620 };
621
622 static const struct vc4_crtc_data pv1_data = {
623         .hvs_channel = 2,
624         .encoder0_type = VC4_ENCODER_TYPE_DSI1,
625         .encoder1_type = VC4_ENCODER_TYPE_SMI,
626 };
627
628 static const struct vc4_crtc_data pv2_data = {
629         .hvs_channel = 1,
630         .encoder0_type = VC4_ENCODER_TYPE_VEC,
631         .encoder1_type = VC4_ENCODER_TYPE_HDMI,
632 };
633
634 static const struct of_device_id vc4_crtc_dt_match[] = {
635         { .compatible = "brcm,bcm2835-pixelvalve0", .data = &pv0_data },
636         { .compatible = "brcm,bcm2835-pixelvalve1", .data = &pv1_data },
637         { .compatible = "brcm,bcm2835-pixelvalve2", .data = &pv2_data },
638         {}
639 };
640
641 static void vc4_set_crtc_possible_masks(struct drm_device *drm,
642                                         struct drm_crtc *crtc)
643 {
644         struct vc4_crtc *vc4_crtc = to_vc4_crtc(crtc);
645         struct drm_encoder *encoder;
646
647         drm_for_each_encoder(encoder, drm) {
648                 struct vc4_encoder *vc4_encoder = to_vc4_encoder(encoder);
649
650                 if (vc4_encoder->type == vc4_crtc->data->encoder0_type) {
651                         vc4_encoder->clock_select = 0;
652                         encoder->possible_crtcs |= drm_crtc_mask(crtc);
653                 } else if (vc4_encoder->type == vc4_crtc->data->encoder1_type) {
654                         vc4_encoder->clock_select = 1;
655                         encoder->possible_crtcs |= drm_crtc_mask(crtc);
656                 }
657         }
658 }
659
660 static int vc4_crtc_bind(struct device *dev, struct device *master, void *data)
661 {
662         struct platform_device *pdev = to_platform_device(dev);
663         struct drm_device *drm = dev_get_drvdata(master);
664         struct vc4_dev *vc4 = to_vc4_dev(drm);
665         struct vc4_crtc *vc4_crtc;
666         struct drm_crtc *crtc;
667         struct drm_plane *primary_plane, *cursor_plane;
668         const struct of_device_id *match;
669         int ret;
670
671         vc4_crtc = devm_kzalloc(dev, sizeof(*vc4_crtc), GFP_KERNEL);
672         if (!vc4_crtc)
673                 return -ENOMEM;
674         crtc = &vc4_crtc->base;
675
676         match = of_match_device(vc4_crtc_dt_match, dev);
677         if (!match)
678                 return -ENODEV;
679         vc4_crtc->data = match->data;
680
681         vc4_crtc->regs = vc4_ioremap_regs(pdev, 0);
682         if (IS_ERR(vc4_crtc->regs))
683                 return PTR_ERR(vc4_crtc->regs);
684
685         /* For now, we create just the primary and the legacy cursor
686          * planes.  We should be able to stack more planes on easily,
687          * but to do that we would need to compute the bandwidth
688          * requirement of the plane configuration, and reject ones
689          * that will take too much.
690          */
691         primary_plane = vc4_plane_init(drm, DRM_PLANE_TYPE_PRIMARY);
692         if (IS_ERR(primary_plane)) {
693                 dev_err(dev, "failed to construct primary plane\n");
694                 ret = PTR_ERR(primary_plane);
695                 goto err;
696         }
697
698         cursor_plane = vc4_plane_init(drm, DRM_PLANE_TYPE_CURSOR);
699         if (IS_ERR(cursor_plane)) {
700                 dev_err(dev, "failed to construct cursor plane\n");
701                 ret = PTR_ERR(cursor_plane);
702                 goto err_primary;
703         }
704
705         drm_crtc_init_with_planes(drm, crtc, primary_plane, cursor_plane,
706                                   &vc4_crtc_funcs, NULL);
707         drm_crtc_helper_add(crtc, &vc4_crtc_helper_funcs);
708         primary_plane->crtc = crtc;
709         cursor_plane->crtc = crtc;
710         vc4->crtc[drm_crtc_index(crtc)] = vc4_crtc;
711         vc4_crtc->channel = vc4_crtc->data->hvs_channel;
712
713         CRTC_WRITE(PV_INTEN, 0);
714         CRTC_WRITE(PV_INTSTAT, PV_INT_VFP_START);
715         ret = devm_request_irq(dev, platform_get_irq(pdev, 0),
716                                vc4_crtc_irq_handler, 0, "vc4 crtc", vc4_crtc);
717         if (ret)
718                 goto err_cursor;
719
720         vc4_set_crtc_possible_masks(drm, crtc);
721
722         platform_set_drvdata(pdev, vc4_crtc);
723
724         return 0;
725
726 err_cursor:
727         cursor_plane->funcs->destroy(cursor_plane);
728 err_primary:
729         primary_plane->funcs->destroy(primary_plane);
730 err:
731         return ret;
732 }
733
734 static void vc4_crtc_unbind(struct device *dev, struct device *master,
735                             void *data)
736 {
737         struct platform_device *pdev = to_platform_device(dev);
738         struct vc4_crtc *vc4_crtc = dev_get_drvdata(dev);
739
740         vc4_crtc_destroy(&vc4_crtc->base);
741
742         CRTC_WRITE(PV_INTEN, 0);
743
744         platform_set_drvdata(pdev, NULL);
745 }
746
747 static const struct component_ops vc4_crtc_ops = {
748         .bind   = vc4_crtc_bind,
749         .unbind = vc4_crtc_unbind,
750 };
751
752 static int vc4_crtc_dev_probe(struct platform_device *pdev)
753 {
754         return component_add(&pdev->dev, &vc4_crtc_ops);
755 }
756
757 static int vc4_crtc_dev_remove(struct platform_device *pdev)
758 {
759         component_del(&pdev->dev, &vc4_crtc_ops);
760         return 0;
761 }
762
763 struct platform_driver vc4_crtc_driver = {
764         .probe = vc4_crtc_dev_probe,
765         .remove = vc4_crtc_dev_remove,
766         .driver = {
767                 .name = "vc4_crtc",
768                 .of_match_table = vc4_crtc_dt_match,
769         },
770 };