2 * Copyright (C) 2014 Free Electrons
3 * Copyright (C) 2014 Atmel
5 * Author: Boris BREZILLON <boris.brezillon@free-electrons.com>
7 * This program is free software; you can redistribute it and/or modify it
8 * under the terms of the GNU General Public License version 2 as published by
9 * the Free Software Foundation.
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
16 * You should have received a copy of the GNU General Public License along with
17 * this program. If not, see <http://www.gnu.org/licenses/>.
20 #include "atmel_hlcdc_dc.h"
23 * Atmel HLCDC Plane state structure.
25 * @base: DRM plane state
26 * @crtc_x: x position of the plane relative to the CRTC
27 * @crtc_y: y position of the plane relative to the CRTC
28 * @crtc_w: visible width of the plane
29 * @crtc_h: visible height of the plane
30 * @src_x: x buffer position
31 * @src_y: y buffer position
32 * @src_w: buffer width
33 * @src_h: buffer height
34 * @disc_x: x discard position
35 * @disc_y: y discard position
36 * @disc_w: discard width
37 * @disc_h: discard height
38 * @bpp: bytes per pixel deduced from pixel_format
39 * @offsets: offsets to apply to the GEM buffers
40 * @xstride: value to add to the pixel pointer between each line
41 * @pstride: value to add to the pixel pointer between each pixel
42 * @nplanes: number of planes (deduced from pixel_format)
43 * @dscrs: DMA descriptors
45 struct atmel_hlcdc_plane_state {
46 struct drm_plane_state base;
63 /* These fields are private and should not be touched */
64 int bpp[ATMEL_HLCDC_LAYER_MAX_PLANES];
65 unsigned int offsets[ATMEL_HLCDC_LAYER_MAX_PLANES];
66 int xstride[ATMEL_HLCDC_LAYER_MAX_PLANES];
67 int pstride[ATMEL_HLCDC_LAYER_MAX_PLANES];
70 /* DMA descriptors. */
71 struct atmel_hlcdc_dma_channel_dscr *dscrs[ATMEL_HLCDC_LAYER_MAX_PLANES];
74 static inline struct atmel_hlcdc_plane_state *
75 drm_plane_state_to_atmel_hlcdc_plane_state(struct drm_plane_state *s)
77 return container_of(s, struct atmel_hlcdc_plane_state, base);
80 #define SUBPIXEL_MASK 0xffff
82 static uint32_t rgb_formats[] = {
95 struct atmel_hlcdc_formats atmel_hlcdc_plane_rgb_formats = {
96 .formats = rgb_formats,
97 .nformats = ARRAY_SIZE(rgb_formats),
100 static uint32_t rgb_and_yuv_formats[] = {
122 struct atmel_hlcdc_formats atmel_hlcdc_plane_rgb_and_yuv_formats = {
123 .formats = rgb_and_yuv_formats,
124 .nformats = ARRAY_SIZE(rgb_and_yuv_formats),
127 static int atmel_hlcdc_format_to_plane_mode(u32 format, u32 *mode)
131 *mode = ATMEL_HLCDC_C8_MODE;
133 case DRM_FORMAT_XRGB4444:
134 *mode = ATMEL_HLCDC_XRGB4444_MODE;
136 case DRM_FORMAT_ARGB4444:
137 *mode = ATMEL_HLCDC_ARGB4444_MODE;
139 case DRM_FORMAT_RGBA4444:
140 *mode = ATMEL_HLCDC_RGBA4444_MODE;
142 case DRM_FORMAT_RGB565:
143 *mode = ATMEL_HLCDC_RGB565_MODE;
145 case DRM_FORMAT_RGB888:
146 *mode = ATMEL_HLCDC_RGB888_MODE;
148 case DRM_FORMAT_ARGB1555:
149 *mode = ATMEL_HLCDC_ARGB1555_MODE;
151 case DRM_FORMAT_XRGB8888:
152 *mode = ATMEL_HLCDC_XRGB8888_MODE;
154 case DRM_FORMAT_ARGB8888:
155 *mode = ATMEL_HLCDC_ARGB8888_MODE;
157 case DRM_FORMAT_RGBA8888:
158 *mode = ATMEL_HLCDC_RGBA8888_MODE;
160 case DRM_FORMAT_AYUV:
161 *mode = ATMEL_HLCDC_AYUV_MODE;
163 case DRM_FORMAT_YUYV:
164 *mode = ATMEL_HLCDC_YUYV_MODE;
166 case DRM_FORMAT_UYVY:
167 *mode = ATMEL_HLCDC_UYVY_MODE;
169 case DRM_FORMAT_YVYU:
170 *mode = ATMEL_HLCDC_YVYU_MODE;
172 case DRM_FORMAT_VYUY:
173 *mode = ATMEL_HLCDC_VYUY_MODE;
175 case DRM_FORMAT_NV21:
176 *mode = ATMEL_HLCDC_NV21_MODE;
178 case DRM_FORMAT_NV61:
179 *mode = ATMEL_HLCDC_NV61_MODE;
181 case DRM_FORMAT_YUV420:
182 *mode = ATMEL_HLCDC_YUV420_MODE;
184 case DRM_FORMAT_YUV422:
185 *mode = ATMEL_HLCDC_YUV422_MODE;
194 static u32 heo_downscaling_xcoef[] = {
213 static u32 heo_downscaling_ycoef[] = {
224 static u32 heo_upscaling_xcoef[] = {
243 static u32 heo_upscaling_ycoef[] = {
254 #define ATMEL_HLCDC_XPHIDEF 4
255 #define ATMEL_HLCDC_YPHIDEF 4
257 static u32 atmel_hlcdc_plane_phiscaler_get_factor(u32 srcsize,
261 u32 factor, max_memsize;
263 factor = (256 * ((8 * (srcsize - 1)) - phidef)) / (dstsize - 1);
264 max_memsize = ((factor * (dstsize - 1)) + (256 * phidef)) / 2048;
266 if (max_memsize > srcsize - 1)
273 atmel_hlcdc_plane_scaler_set_phicoeff(struct atmel_hlcdc_plane *plane,
274 const u32 *coeff_tab, int size,
275 unsigned int cfg_offs)
279 for (i = 0; i < size; i++)
280 atmel_hlcdc_layer_write_cfg(&plane->layer, cfg_offs + i,
284 void atmel_hlcdc_plane_setup_scaler(struct atmel_hlcdc_plane *plane,
285 struct atmel_hlcdc_plane_state *state)
287 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
288 u32 xfactor, yfactor;
290 if (!desc->layout.scaler_config)
293 if (state->crtc_w == state->src_w && state->crtc_h == state->src_h) {
294 atmel_hlcdc_layer_write_cfg(&plane->layer,
295 desc->layout.scaler_config, 0);
299 if (desc->layout.phicoeffs.x) {
300 xfactor = atmel_hlcdc_plane_phiscaler_get_factor(state->src_w,
302 ATMEL_HLCDC_XPHIDEF);
304 yfactor = atmel_hlcdc_plane_phiscaler_get_factor(state->src_h,
306 ATMEL_HLCDC_YPHIDEF);
308 atmel_hlcdc_plane_scaler_set_phicoeff(plane,
309 state->crtc_w < state->src_w ?
310 heo_downscaling_xcoef :
312 ARRAY_SIZE(heo_upscaling_xcoef),
313 desc->layout.phicoeffs.x);
315 atmel_hlcdc_plane_scaler_set_phicoeff(plane,
316 state->crtc_h < state->src_h ?
317 heo_downscaling_ycoef :
319 ARRAY_SIZE(heo_upscaling_ycoef),
320 desc->layout.phicoeffs.y);
322 xfactor = (1024 * state->src_w) / state->crtc_w;
323 yfactor = (1024 * state->src_h) / state->crtc_h;
326 atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.scaler_config,
327 ATMEL_HLCDC_LAYER_SCALER_ENABLE |
328 ATMEL_HLCDC_LAYER_SCALER_FACTORS(xfactor,
333 atmel_hlcdc_plane_update_pos_and_size(struct atmel_hlcdc_plane *plane,
334 struct atmel_hlcdc_plane_state *state)
336 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
338 if (desc->layout.size)
339 atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.size,
340 ATMEL_HLCDC_LAYER_SIZE(state->crtc_w,
343 if (desc->layout.memsize)
344 atmel_hlcdc_layer_write_cfg(&plane->layer,
345 desc->layout.memsize,
346 ATMEL_HLCDC_LAYER_SIZE(state->src_w,
349 if (desc->layout.pos)
350 atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.pos,
351 ATMEL_HLCDC_LAYER_POS(state->crtc_x,
354 atmel_hlcdc_plane_setup_scaler(plane, state);
358 atmel_hlcdc_plane_update_general_settings(struct atmel_hlcdc_plane *plane,
359 struct atmel_hlcdc_plane_state *state)
361 unsigned int cfg = ATMEL_HLCDC_LAYER_DMA_BLEN_INCR16 | state->ahb_id;
362 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
363 const struct drm_format_info *format = state->base.fb->format;
366 * Rotation optimization is not working on RGB888 (rotation is still
367 * working but without any optimization).
369 if (format->format == DRM_FORMAT_RGB888)
370 cfg |= ATMEL_HLCDC_LAYER_DMA_ROTDIS;
372 atmel_hlcdc_layer_write_cfg(&plane->layer, ATMEL_HLCDC_LAYER_DMA_CFG,
375 cfg = ATMEL_HLCDC_LAYER_DMA;
377 if (plane->base.type != DRM_PLANE_TYPE_PRIMARY) {
378 cfg |= ATMEL_HLCDC_LAYER_OVR | ATMEL_HLCDC_LAYER_ITER2BL |
379 ATMEL_HLCDC_LAYER_ITER;
381 if (format->has_alpha)
382 cfg |= ATMEL_HLCDC_LAYER_LAEN;
384 cfg |= ATMEL_HLCDC_LAYER_GAEN |
385 ATMEL_HLCDC_LAYER_GA(state->base.alpha >> 8);
388 if (state->disc_h && state->disc_w)
389 cfg |= ATMEL_HLCDC_LAYER_DISCEN;
391 atmel_hlcdc_layer_write_cfg(&plane->layer, desc->layout.general_config,
395 static void atmel_hlcdc_plane_update_format(struct atmel_hlcdc_plane *plane,
396 struct atmel_hlcdc_plane_state *state)
401 ret = atmel_hlcdc_format_to_plane_mode(state->base.fb->format->format,
406 if ((state->base.fb->format->format == DRM_FORMAT_YUV422 ||
407 state->base.fb->format->format == DRM_FORMAT_NV61) &&
408 drm_rotation_90_or_270(state->base.rotation))
409 cfg |= ATMEL_HLCDC_YUV422ROT;
411 atmel_hlcdc_layer_write_cfg(&plane->layer,
412 ATMEL_HLCDC_LAYER_FORMAT_CFG, cfg);
415 static void atmel_hlcdc_plane_update_clut(struct atmel_hlcdc_plane *plane)
417 struct drm_crtc *crtc = plane->base.crtc;
418 struct drm_color_lut *lut;
421 if (!crtc || !crtc->state)
424 if (!crtc->state->color_mgmt_changed || !crtc->state->gamma_lut)
427 lut = (struct drm_color_lut *)crtc->state->gamma_lut->data;
429 for (idx = 0; idx < ATMEL_HLCDC_CLUT_SIZE; idx++, lut++) {
430 u32 val = ((lut->red << 8) & 0xff0000) |
431 (lut->green & 0xff00) |
434 atmel_hlcdc_layer_write_clut(&plane->layer, idx, val);
438 static void atmel_hlcdc_plane_update_buffers(struct atmel_hlcdc_plane *plane,
439 struct atmel_hlcdc_plane_state *state)
441 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
442 struct drm_framebuffer *fb = state->base.fb;
446 sr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHSR);
448 for (i = 0; i < state->nplanes; i++) {
449 struct drm_gem_cma_object *gem = drm_fb_cma_get_gem_obj(fb, i);
451 state->dscrs[i]->addr = gem->paddr + state->offsets[i];
453 atmel_hlcdc_layer_write_reg(&plane->layer,
454 ATMEL_HLCDC_LAYER_PLANE_HEAD(i),
455 state->dscrs[i]->self);
457 if (!(sr & ATMEL_HLCDC_LAYER_EN)) {
458 atmel_hlcdc_layer_write_reg(&plane->layer,
459 ATMEL_HLCDC_LAYER_PLANE_ADDR(i),
460 state->dscrs[i]->addr);
461 atmel_hlcdc_layer_write_reg(&plane->layer,
462 ATMEL_HLCDC_LAYER_PLANE_CTRL(i),
463 state->dscrs[i]->ctrl);
464 atmel_hlcdc_layer_write_reg(&plane->layer,
465 ATMEL_HLCDC_LAYER_PLANE_NEXT(i),
466 state->dscrs[i]->self);
469 if (desc->layout.xstride[i])
470 atmel_hlcdc_layer_write_cfg(&plane->layer,
471 desc->layout.xstride[i],
474 if (desc->layout.pstride[i])
475 atmel_hlcdc_layer_write_cfg(&plane->layer,
476 desc->layout.pstride[i],
481 int atmel_hlcdc_plane_prepare_ahb_routing(struct drm_crtc_state *c_state)
483 unsigned int ahb_load[2] = { };
484 struct drm_plane *plane;
486 drm_atomic_crtc_state_for_each_plane(plane, c_state) {
487 struct atmel_hlcdc_plane_state *plane_state;
488 struct drm_plane_state *plane_s;
489 unsigned int pixels, load = 0;
492 plane_s = drm_atomic_get_plane_state(c_state->state, plane);
494 return PTR_ERR(plane_s);
497 drm_plane_state_to_atmel_hlcdc_plane_state(plane_s);
499 pixels = (plane_state->src_w * plane_state->src_h) -
500 (plane_state->disc_w * plane_state->disc_h);
502 for (i = 0; i < plane_state->nplanes; i++)
503 load += pixels * plane_state->bpp[i];
505 if (ahb_load[0] <= ahb_load[1])
506 plane_state->ahb_id = 0;
508 plane_state->ahb_id = 1;
510 ahb_load[plane_state->ahb_id] += load;
517 atmel_hlcdc_plane_prepare_disc_area(struct drm_crtc_state *c_state)
519 int disc_x = 0, disc_y = 0, disc_w = 0, disc_h = 0;
520 const struct atmel_hlcdc_layer_cfg_layout *layout;
521 struct atmel_hlcdc_plane_state *primary_state;
522 struct drm_plane_state *primary_s;
523 struct atmel_hlcdc_plane *primary;
524 struct drm_plane *ovl;
526 primary = drm_plane_to_atmel_hlcdc_plane(c_state->crtc->primary);
527 layout = &primary->layer.desc->layout;
528 if (!layout->disc_pos || !layout->disc_size)
531 primary_s = drm_atomic_get_plane_state(c_state->state,
533 if (IS_ERR(primary_s))
534 return PTR_ERR(primary_s);
536 primary_state = drm_plane_state_to_atmel_hlcdc_plane_state(primary_s);
538 drm_atomic_crtc_state_for_each_plane(ovl, c_state) {
539 struct atmel_hlcdc_plane_state *ovl_state;
540 struct drm_plane_state *ovl_s;
542 if (ovl == c_state->crtc->primary)
545 ovl_s = drm_atomic_get_plane_state(c_state->state, ovl);
547 return PTR_ERR(ovl_s);
549 ovl_state = drm_plane_state_to_atmel_hlcdc_plane_state(ovl_s);
552 ovl_s->fb->format->has_alpha ||
553 ovl_s->alpha != DRM_BLEND_ALPHA_OPAQUE)
556 /* TODO: implement a smarter hidden area detection */
557 if (ovl_state->crtc_h * ovl_state->crtc_w < disc_h * disc_w)
560 disc_x = ovl_state->crtc_x;
561 disc_y = ovl_state->crtc_y;
562 disc_h = ovl_state->crtc_h;
563 disc_w = ovl_state->crtc_w;
566 primary_state->disc_x = disc_x;
567 primary_state->disc_y = disc_y;
568 primary_state->disc_w = disc_w;
569 primary_state->disc_h = disc_h;
575 atmel_hlcdc_plane_update_disc_area(struct atmel_hlcdc_plane *plane,
576 struct atmel_hlcdc_plane_state *state)
578 const struct atmel_hlcdc_layer_cfg_layout *layout;
580 layout = &plane->layer.desc->layout;
581 if (!layout->disc_pos || !layout->disc_size)
584 atmel_hlcdc_layer_write_cfg(&plane->layer, layout->disc_pos,
585 ATMEL_HLCDC_LAYER_DISC_POS(state->disc_x,
588 atmel_hlcdc_layer_write_cfg(&plane->layer, layout->disc_size,
589 ATMEL_HLCDC_LAYER_DISC_SIZE(state->disc_w,
593 static int atmel_hlcdc_plane_atomic_check(struct drm_plane *p,
594 struct drm_plane_state *s)
596 struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
597 struct atmel_hlcdc_plane_state *state =
598 drm_plane_state_to_atmel_hlcdc_plane_state(s);
599 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
600 struct drm_framebuffer *fb = state->base.fb;
601 const struct drm_display_mode *mode;
602 struct drm_crtc_state *crtc_state;
603 unsigned int patched_crtc_w;
604 unsigned int patched_crtc_h;
605 unsigned int patched_src_w;
606 unsigned int patched_src_h;
614 if (!state->base.crtc || !fb)
617 crtc_state = drm_atomic_get_existing_crtc_state(s->state, s->crtc);
618 mode = &crtc_state->adjusted_mode;
620 state->src_x = s->src_x;
621 state->src_y = s->src_y;
622 state->src_h = s->src_h;
623 state->src_w = s->src_w;
624 state->crtc_x = s->crtc_x;
625 state->crtc_y = s->crtc_y;
626 state->crtc_h = s->crtc_h;
627 state->crtc_w = s->crtc_w;
628 if ((state->src_x | state->src_y | state->src_w | state->src_h) &
637 state->nplanes = fb->format->num_planes;
638 if (state->nplanes > ATMEL_HLCDC_LAYER_MAX_PLANES)
642 * Swap width and size in case of 90 or 270 degrees rotation
644 if (drm_rotation_90_or_270(state->base.rotation)) {
646 state->crtc_w = state->crtc_h;
649 state->src_w = state->src_h;
653 if (state->crtc_x + state->crtc_w > mode->hdisplay)
654 patched_crtc_w = mode->hdisplay - state->crtc_x;
656 patched_crtc_w = state->crtc_w;
658 if (state->crtc_x < 0) {
659 patched_crtc_w += state->crtc_x;
660 x_offset = -state->crtc_x;
664 if (state->crtc_y + state->crtc_h > mode->vdisplay)
665 patched_crtc_h = mode->vdisplay - state->crtc_y;
667 patched_crtc_h = state->crtc_h;
669 if (state->crtc_y < 0) {
670 patched_crtc_h += state->crtc_y;
671 y_offset = -state->crtc_y;
675 patched_src_w = DIV_ROUND_CLOSEST(patched_crtc_w * state->src_w,
677 patched_src_h = DIV_ROUND_CLOSEST(patched_crtc_h * state->src_h,
680 hsub = drm_format_horz_chroma_subsampling(fb->format->format);
681 vsub = drm_format_vert_chroma_subsampling(fb->format->format);
683 for (i = 0; i < state->nplanes; i++) {
684 unsigned int offset = 0;
685 int xdiv = i ? hsub : 1;
686 int ydiv = i ? vsub : 1;
688 state->bpp[i] = fb->format->cpp[i];
692 switch (state->base.rotation & DRM_MODE_ROTATE_MASK) {
693 case DRM_MODE_ROTATE_90:
694 offset = ((y_offset + state->src_y + patched_src_w - 1) /
695 ydiv) * fb->pitches[i];
696 offset += ((x_offset + state->src_x) / xdiv) *
698 state->xstride[i] = ((patched_src_w - 1) / ydiv) *
700 state->pstride[i] = -fb->pitches[i] - state->bpp[i];
702 case DRM_MODE_ROTATE_180:
703 offset = ((y_offset + state->src_y + patched_src_h - 1) /
704 ydiv) * fb->pitches[i];
705 offset += ((x_offset + state->src_x + patched_src_w - 1) /
706 xdiv) * state->bpp[i];
707 state->xstride[i] = ((((patched_src_w - 1) / xdiv) - 1) *
708 state->bpp[i]) - fb->pitches[i];
709 state->pstride[i] = -2 * state->bpp[i];
711 case DRM_MODE_ROTATE_270:
712 offset = ((y_offset + state->src_y) / ydiv) *
714 offset += ((x_offset + state->src_x + patched_src_h - 1) /
715 xdiv) * state->bpp[i];
716 state->xstride[i] = -(((patched_src_w - 1) / ydiv) *
719 state->pstride[i] = fb->pitches[i] - state->bpp[i];
721 case DRM_MODE_ROTATE_0:
723 offset = ((y_offset + state->src_y) / ydiv) *
725 offset += ((x_offset + state->src_x) / xdiv) *
727 state->xstride[i] = fb->pitches[i] -
728 ((patched_src_w / xdiv) *
730 state->pstride[i] = 0;
734 state->offsets[i] = offset + fb->offsets[i];
737 state->src_w = patched_src_w;
738 state->src_h = patched_src_h;
739 state->crtc_w = patched_crtc_w;
740 state->crtc_h = patched_crtc_h;
742 if (!desc->layout.size &&
743 (mode->hdisplay != state->crtc_w ||
744 mode->vdisplay != state->crtc_h))
747 if (desc->max_height && state->crtc_h > desc->max_height)
750 if (desc->max_width && state->crtc_w > desc->max_width)
753 if ((state->crtc_h != state->src_h || state->crtc_w != state->src_w) &&
754 (!desc->layout.memsize ||
755 state->base.fb->format->has_alpha))
758 if (state->crtc_x < 0 || state->crtc_y < 0)
761 if (state->crtc_w + state->crtc_x > mode->hdisplay ||
762 state->crtc_h + state->crtc_y > mode->vdisplay)
768 static void atmel_hlcdc_plane_atomic_update(struct drm_plane *p,
769 struct drm_plane_state *old_s)
771 struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
772 struct atmel_hlcdc_plane_state *state =
773 drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
776 if (!p->state->crtc || !p->state->fb)
779 atmel_hlcdc_plane_update_pos_and_size(plane, state);
780 atmel_hlcdc_plane_update_general_settings(plane, state);
781 atmel_hlcdc_plane_update_format(plane, state);
782 atmel_hlcdc_plane_update_clut(plane);
783 atmel_hlcdc_plane_update_buffers(plane, state);
784 atmel_hlcdc_plane_update_disc_area(plane, state);
786 /* Enable the overrun interrupts. */
787 atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_IER,
788 ATMEL_HLCDC_LAYER_OVR_IRQ(0) |
789 ATMEL_HLCDC_LAYER_OVR_IRQ(1) |
790 ATMEL_HLCDC_LAYER_OVR_IRQ(2));
792 /* Apply the new config at the next SOF event. */
793 sr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHSR);
794 atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHER,
795 ATMEL_HLCDC_LAYER_UPDATE |
796 (sr & ATMEL_HLCDC_LAYER_EN ?
797 ATMEL_HLCDC_LAYER_A2Q : ATMEL_HLCDC_LAYER_EN));
800 static void atmel_hlcdc_plane_atomic_disable(struct drm_plane *p,
801 struct drm_plane_state *old_state)
803 struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
805 /* Disable interrupts */
806 atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_IDR,
809 /* Disable the layer */
810 atmel_hlcdc_layer_write_reg(&plane->layer, ATMEL_HLCDC_LAYER_CHDR,
811 ATMEL_HLCDC_LAYER_RST |
812 ATMEL_HLCDC_LAYER_A2Q |
813 ATMEL_HLCDC_LAYER_UPDATE);
815 /* Clear all pending interrupts */
816 atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_ISR);
819 static void atmel_hlcdc_plane_destroy(struct drm_plane *p)
821 struct atmel_hlcdc_plane *plane = drm_plane_to_atmel_hlcdc_plane(p);
824 drm_framebuffer_put(plane->base.fb);
826 drm_plane_cleanup(p);
829 static int atmel_hlcdc_plane_init_properties(struct atmel_hlcdc_plane *plane)
831 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
833 if (desc->type == ATMEL_HLCDC_OVERLAY_LAYER ||
834 desc->type == ATMEL_HLCDC_CURSOR_LAYER) {
837 ret = drm_plane_create_alpha_property(&plane->base);
842 if (desc->layout.xstride[0] && desc->layout.pstride[0]) {
845 ret = drm_plane_create_rotation_property(&plane->base,
849 DRM_MODE_ROTATE_180 |
850 DRM_MODE_ROTATE_270);
855 if (desc->layout.csc) {
857 * TODO: decare a "yuv-to-rgb-conv-factors" property to let
858 * userspace modify these factors (using a BLOB property ?).
860 atmel_hlcdc_layer_write_cfg(&plane->layer,
863 atmel_hlcdc_layer_write_cfg(&plane->layer,
864 desc->layout.csc + 1,
866 atmel_hlcdc_layer_write_cfg(&plane->layer,
867 desc->layout.csc + 2,
874 void atmel_hlcdc_plane_irq(struct atmel_hlcdc_plane *plane)
876 const struct atmel_hlcdc_layer_desc *desc = plane->layer.desc;
879 isr = atmel_hlcdc_layer_read_reg(&plane->layer, ATMEL_HLCDC_LAYER_ISR);
882 * There's not much we can do in case of overrun except informing
883 * the user. However, we are in interrupt context here, hence the
887 (ATMEL_HLCDC_LAYER_OVR_IRQ(0) | ATMEL_HLCDC_LAYER_OVR_IRQ(1) |
888 ATMEL_HLCDC_LAYER_OVR_IRQ(2)))
889 dev_dbg(plane->base.dev->dev, "overrun on plane %s\n",
893 static const struct drm_plane_helper_funcs atmel_hlcdc_layer_plane_helper_funcs = {
894 .atomic_check = atmel_hlcdc_plane_atomic_check,
895 .atomic_update = atmel_hlcdc_plane_atomic_update,
896 .atomic_disable = atmel_hlcdc_plane_atomic_disable,
899 static int atmel_hlcdc_plane_alloc_dscrs(struct drm_plane *p,
900 struct atmel_hlcdc_plane_state *state)
902 struct atmel_hlcdc_dc *dc = p->dev->dev_private;
905 for (i = 0; i < ARRAY_SIZE(state->dscrs); i++) {
906 struct atmel_hlcdc_dma_channel_dscr *dscr;
909 dscr = dma_pool_alloc(dc->dscrpool, GFP_KERNEL, &dscr_dma);
914 dscr->next = dscr_dma;
915 dscr->self = dscr_dma;
916 dscr->ctrl = ATMEL_HLCDC_LAYER_DFETCH;
918 state->dscrs[i] = dscr;
924 for (i--; i >= 0; i--) {
925 dma_pool_free(dc->dscrpool, state->dscrs[i],
926 state->dscrs[i]->self);
932 static void atmel_hlcdc_plane_reset(struct drm_plane *p)
934 struct atmel_hlcdc_plane_state *state;
937 state = drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
940 drm_framebuffer_put(state->base.fb);
946 state = kzalloc(sizeof(*state), GFP_KERNEL);
948 if (atmel_hlcdc_plane_alloc_dscrs(p, state)) {
951 "Failed to allocate initial plane state\n");
955 p->state = &state->base;
956 p->state->alpha = DRM_BLEND_ALPHA_OPAQUE;
961 static struct drm_plane_state *
962 atmel_hlcdc_plane_atomic_duplicate_state(struct drm_plane *p)
964 struct atmel_hlcdc_plane_state *state =
965 drm_plane_state_to_atmel_hlcdc_plane_state(p->state);
966 struct atmel_hlcdc_plane_state *copy;
968 copy = kmemdup(state, sizeof(*state), GFP_KERNEL);
972 if (atmel_hlcdc_plane_alloc_dscrs(p, copy)) {
978 drm_framebuffer_get(copy->base.fb);
983 static void atmel_hlcdc_plane_atomic_destroy_state(struct drm_plane *p,
984 struct drm_plane_state *s)
986 struct atmel_hlcdc_plane_state *state =
987 drm_plane_state_to_atmel_hlcdc_plane_state(s);
988 struct atmel_hlcdc_dc *dc = p->dev->dev_private;
991 for (i = 0; i < ARRAY_SIZE(state->dscrs); i++) {
992 dma_pool_free(dc->dscrpool, state->dscrs[i],
993 state->dscrs[i]->self);
997 drm_framebuffer_put(s->fb);
1002 static const struct drm_plane_funcs layer_plane_funcs = {
1003 .update_plane = drm_atomic_helper_update_plane,
1004 .disable_plane = drm_atomic_helper_disable_plane,
1005 .destroy = atmel_hlcdc_plane_destroy,
1006 .reset = atmel_hlcdc_plane_reset,
1007 .atomic_duplicate_state = atmel_hlcdc_plane_atomic_duplicate_state,
1008 .atomic_destroy_state = atmel_hlcdc_plane_atomic_destroy_state,
1011 static int atmel_hlcdc_plane_create(struct drm_device *dev,
1012 const struct atmel_hlcdc_layer_desc *desc)
1014 struct atmel_hlcdc_dc *dc = dev->dev_private;
1015 struct atmel_hlcdc_plane *plane;
1016 enum drm_plane_type type;
1019 plane = devm_kzalloc(dev->dev, sizeof(*plane), GFP_KERNEL);
1023 atmel_hlcdc_layer_init(&plane->layer, desc, dc->hlcdc->regmap);
1025 if (desc->type == ATMEL_HLCDC_BASE_LAYER)
1026 type = DRM_PLANE_TYPE_PRIMARY;
1027 else if (desc->type == ATMEL_HLCDC_CURSOR_LAYER)
1028 type = DRM_PLANE_TYPE_CURSOR;
1030 type = DRM_PLANE_TYPE_OVERLAY;
1032 ret = drm_universal_plane_init(dev, &plane->base, 0,
1034 desc->formats->formats,
1035 desc->formats->nformats,
1040 drm_plane_helper_add(&plane->base,
1041 &atmel_hlcdc_layer_plane_helper_funcs);
1043 /* Set default property values*/
1044 ret = atmel_hlcdc_plane_init_properties(plane);
1048 dc->layers[desc->id] = &plane->layer;
1053 int atmel_hlcdc_create_planes(struct drm_device *dev)
1055 struct atmel_hlcdc_dc *dc = dev->dev_private;
1056 const struct atmel_hlcdc_layer_desc *descs = dc->desc->layers;
1057 int nlayers = dc->desc->nlayers;
1060 dc->dscrpool = dmam_pool_create("atmel-hlcdc-dscr", dev->dev,
1061 sizeof(struct atmel_hlcdc_dma_channel_dscr),
1066 for (i = 0; i < nlayers; i++) {
1067 if (descs[i].type != ATMEL_HLCDC_BASE_LAYER &&
1068 descs[i].type != ATMEL_HLCDC_OVERLAY_LAYER &&
1069 descs[i].type != ATMEL_HLCDC_CURSOR_LAYER)
1072 ret = atmel_hlcdc_plane_create(dev, &descs[i]);