dma-buf: rename dma_resv_get_excl_rcu to _unlocked
[linux-2.6-microblaze.git] / drivers / gpu / drm / drm_gem_atomic_helper.c
1 // SPDX-License-Identifier: GPL-2.0-or-later
2
3 #include <linux/dma-resv.h>
4
5 #include <drm/drm_atomic_state_helper.h>
6 #include <drm/drm_atomic_uapi.h>
7 #include <drm/drm_gem.h>
8 #include <drm/drm_gem_atomic_helper.h>
9 #include <drm/drm_gem_framebuffer_helper.h>
10 #include <drm/drm_simple_kms_helper.h>
11
12 #include "drm_internal.h"
13
14 /**
15  * DOC: overview
16  *
17  * The GEM atomic helpers library implements generic atomic-commit
18  * functions for drivers that use GEM objects. Currently, it provides
19  * synchronization helpers, and plane state and framebuffer BO mappings
20  * for planes with shadow buffers.
21  *
22  * Before scanout, a plane's framebuffer needs to be synchronized with
23  * possible writers that draw into the framebuffer. All drivers should
24  * call drm_gem_plane_helper_prepare_fb() from their implementation of
25  * struct &drm_plane_helper.prepare_fb . It sets the plane's fence from
26  * the framebuffer so that the DRM core can synchronize access automatically.
27  *
28  * drm_gem_plane_helper_prepare_fb() can also be used directly as
29  * implementation of prepare_fb. For drivers based on
30  * struct drm_simple_display_pipe, drm_gem_simple_display_pipe_prepare_fb()
31  * provides equivalent functionality.
32  *
33  * .. code-block:: c
34  *
35  *      #include <drm/drm_gem_atomic_helper.h>
36  *
37  *      struct drm_plane_helper_funcs driver_plane_helper_funcs = {
38  *              ...,
39  *              . prepare_fb = drm_gem_plane_helper_prepare_fb,
40  *      };
41  *
42  *      struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
43  *              ...,
44  *              . prepare_fb = drm_gem_simple_display_pipe_prepare_fb,
45  *      };
46  *
47  * A driver using a shadow buffer copies the content of the shadow buffers
48  * into the HW's framebuffer memory during an atomic update. This requires
49  * a mapping of the shadow buffer into kernel address space. The mappings
50  * cannot be established by commit-tail functions, such as atomic_update,
51  * as this would violate locking rules around dma_buf_vmap().
52  *
53  * The helpers for shadow-buffered planes establish and release mappings,
54  * and provide struct drm_shadow_plane_state, which stores the plane's mapping
55  * for commit-tail functons.
56  *
57  * Shadow-buffered planes can easily be enabled by using the provided macros
58  * %DRM_GEM_SHADOW_PLANE_FUNCS and %DRM_GEM_SHADOW_PLANE_HELPER_FUNCS.
59  * These macros set up the plane and plane-helper callbacks to point to the
60  * shadow-buffer helpers.
61  *
62  * .. code-block:: c
63  *
64  *      #include <drm/drm_gem_atomic_helper.h>
65  *
66  *      struct drm_plane_funcs driver_plane_funcs = {
67  *              ...,
68  *              DRM_GEM_SHADOW_PLANE_FUNCS,
69  *      };
70  *
71  *      struct drm_plane_helper_funcs driver_plane_helper_funcs = {
72  *              ...,
73  *              DRM_GEM_SHADOW_PLANE_HELPER_FUNCS,
74  *      };
75  *
76  * In the driver's atomic-update function, shadow-buffer mappings are available
77  * from the plane state. Use to_drm_shadow_plane_state() to upcast from
78  * struct drm_plane_state.
79  *
80  * .. code-block:: c
81  *
82  *      void driver_plane_atomic_update(struct drm_plane *plane,
83  *                                      struct drm_plane_state *old_plane_state)
84  *      {
85  *              struct drm_plane_state *plane_state = plane->state;
86  *              struct drm_shadow_plane_state *shadow_plane_state =
87  *                      to_drm_shadow_plane_state(plane_state);
88  *
89  *              // access shadow buffer via shadow_plane_state->map
90  *      }
91  *
92  * A mapping address for each of the framebuffer's buffer object is stored in
93  * struct &drm_shadow_plane_state.map. The mappings are valid while the state
94  * is being used.
95  *
96  * Drivers that use struct drm_simple_display_pipe can use
97  * %DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS to initialize the rsp
98  * callbacks. Access to shadow-buffer mappings is similar to regular
99  * atomic_update.
100  *
101  * .. code-block:: c
102  *
103  *      struct drm_simple_display_pipe_funcs driver_pipe_funcs = {
104  *              ...,
105  *              DRM_GEM_SIMPLE_DISPLAY_PIPE_SHADOW_PLANE_FUNCS,
106  *      };
107  *
108  *      void driver_pipe_enable(struct drm_simple_display_pipe *pipe,
109  *                              struct drm_crtc_state *crtc_state,
110  *                              struct drm_plane_state *plane_state)
111  *      {
112  *              struct drm_shadow_plane_state *shadow_plane_state =
113  *                      to_drm_shadow_plane_state(plane_state);
114  *
115  *              // access shadow buffer via shadow_plane_state->map
116  *      }
117  */
118
119 /*
120  * Plane Helpers
121  */
122
123 /**
124  * drm_gem_plane_helper_prepare_fb() - Prepare a GEM backed framebuffer
125  * @plane: Plane
126  * @state: Plane state the fence will be attached to
127  *
128  * This function extracts the exclusive fence from &drm_gem_object.resv and
129  * attaches it to plane state for the atomic helper to wait on. This is
130  * necessary to correctly implement implicit synchronization for any buffers
131  * shared as a struct &dma_buf. This function can be used as the
132  * &drm_plane_helper_funcs.prepare_fb callback.
133  *
134  * There is no need for &drm_plane_helper_funcs.cleanup_fb hook for simple
135  * GEM based framebuffer drivers which have their buffers always pinned in
136  * memory.
137  *
138  * See drm_atomic_set_fence_for_plane() for a discussion of implicit and
139  * explicit fencing in atomic modeset updates.
140  */
141 int drm_gem_plane_helper_prepare_fb(struct drm_plane *plane, struct drm_plane_state *state)
142 {
143         struct drm_gem_object *obj;
144         struct dma_fence *fence;
145
146         if (!state->fb)
147                 return 0;
148
149         obj = drm_gem_fb_get_obj(state->fb, 0);
150         fence = dma_resv_get_excl_unlocked(obj->resv);
151         drm_atomic_set_fence_for_plane(state, fence);
152
153         return 0;
154 }
155 EXPORT_SYMBOL_GPL(drm_gem_plane_helper_prepare_fb);
156
157 /**
158  * drm_gem_simple_display_pipe_prepare_fb - prepare_fb helper for &drm_simple_display_pipe
159  * @pipe: Simple display pipe
160  * @plane_state: Plane state
161  *
162  * This function uses drm_gem_plane_helper_prepare_fb() to extract the exclusive fence
163  * from &drm_gem_object.resv and attaches it to plane state for the atomic
164  * helper to wait on. This is necessary to correctly implement implicit
165  * synchronization for any buffers shared as a struct &dma_buf. Drivers can use
166  * this as their &drm_simple_display_pipe_funcs.prepare_fb callback.
167  *
168  * See drm_atomic_set_fence_for_plane() for a discussion of implicit and
169  * explicit fencing in atomic modeset updates.
170  */
171 int drm_gem_simple_display_pipe_prepare_fb(struct drm_simple_display_pipe *pipe,
172                                            struct drm_plane_state *plane_state)
173 {
174         return drm_gem_plane_helper_prepare_fb(&pipe->plane, plane_state);
175 }
176 EXPORT_SYMBOL(drm_gem_simple_display_pipe_prepare_fb);
177
178 /*
179  * Shadow-buffered Planes
180  */
181
182 /**
183  * drm_gem_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
184  * @plane: the plane
185  *
186  * This function implements struct &drm_plane_funcs.atomic_duplicate_state for
187  * shadow-buffered planes. It assumes the existing state to be of type
188  * struct drm_shadow_plane_state and it allocates the new state to be of this
189  * type.
190  *
191  * The function does not duplicate existing mappings of the shadow buffers.
192  * Mappings are maintained during the atomic commit by the plane's prepare_fb
193  * and cleanup_fb helpers. See drm_gem_prepare_shadow_fb() and drm_gem_cleanup_shadow_fb()
194  * for corresponding helpers.
195  *
196  * Returns:
197  * A pointer to a new plane state on success, or NULL otherwise.
198  */
199 struct drm_plane_state *
200 drm_gem_duplicate_shadow_plane_state(struct drm_plane *plane)
201 {
202         struct drm_plane_state *plane_state = plane->state;
203         struct drm_shadow_plane_state *new_shadow_plane_state;
204
205         if (!plane_state)
206                 return NULL;
207
208         new_shadow_plane_state = kzalloc(sizeof(*new_shadow_plane_state), GFP_KERNEL);
209         if (!new_shadow_plane_state)
210                 return NULL;
211         __drm_atomic_helper_plane_duplicate_state(plane, &new_shadow_plane_state->base);
212
213         return &new_shadow_plane_state->base;
214 }
215 EXPORT_SYMBOL(drm_gem_duplicate_shadow_plane_state);
216
217 /**
218  * drm_gem_destroy_shadow_plane_state - deletes shadow-buffered plane state
219  * @plane: the plane
220  * @plane_state: the plane state of type struct drm_shadow_plane_state
221  *
222  * This function implements struct &drm_plane_funcs.atomic_destroy_state
223  * for shadow-buffered planes. It expects that mappings of shadow buffers
224  * have been released already.
225  */
226 void drm_gem_destroy_shadow_plane_state(struct drm_plane *plane,
227                                         struct drm_plane_state *plane_state)
228 {
229         struct drm_shadow_plane_state *shadow_plane_state =
230                 to_drm_shadow_plane_state(plane_state);
231
232         __drm_atomic_helper_plane_destroy_state(&shadow_plane_state->base);
233         kfree(shadow_plane_state);
234 }
235 EXPORT_SYMBOL(drm_gem_destroy_shadow_plane_state);
236
237 /**
238  * drm_gem_reset_shadow_plane - resets a shadow-buffered plane
239  * @plane: the plane
240  *
241  * This function implements struct &drm_plane_funcs.reset_plane for
242  * shadow-buffered planes. It assumes the current plane state to be
243  * of type struct drm_shadow_plane and it allocates the new state of
244  * this type.
245  */
246 void drm_gem_reset_shadow_plane(struct drm_plane *plane)
247 {
248         struct drm_shadow_plane_state *shadow_plane_state;
249
250         if (plane->state) {
251                 drm_gem_destroy_shadow_plane_state(plane, plane->state);
252                 plane->state = NULL; /* must be set to NULL here */
253         }
254
255         shadow_plane_state = kzalloc(sizeof(*shadow_plane_state), GFP_KERNEL);
256         if (!shadow_plane_state)
257                 return;
258         __drm_atomic_helper_plane_reset(plane, &shadow_plane_state->base);
259 }
260 EXPORT_SYMBOL(drm_gem_reset_shadow_plane);
261
262 /**
263  * drm_gem_prepare_shadow_fb - prepares shadow framebuffers
264  * @plane: the plane
265  * @plane_state: the plane state of type struct drm_shadow_plane_state
266  *
267  * This function implements struct &drm_plane_helper_funcs.prepare_fb. It
268  * maps all buffer objects of the plane's framebuffer into kernel address
269  * space and stores them in &struct drm_shadow_plane_state.map. The
270  * framebuffer will be synchronized as part of the atomic commit.
271  *
272  * See drm_gem_cleanup_shadow_fb() for cleanup.
273  *
274  * Returns:
275  * 0 on success, or a negative errno code otherwise.
276  */
277 int drm_gem_prepare_shadow_fb(struct drm_plane *plane, struct drm_plane_state *plane_state)
278 {
279         struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
280         struct drm_framebuffer *fb = plane_state->fb;
281         struct drm_gem_object *obj;
282         struct dma_buf_map map;
283         int ret;
284         size_t i;
285
286         if (!fb)
287                 return 0;
288
289         ret = drm_gem_plane_helper_prepare_fb(plane, plane_state);
290         if (ret)
291                 return ret;
292
293         for (i = 0; i < ARRAY_SIZE(shadow_plane_state->map); ++i) {
294                 obj = drm_gem_fb_get_obj(fb, i);
295                 if (!obj)
296                         continue;
297                 ret = drm_gem_vmap(obj, &map);
298                 if (ret)
299                         goto err_drm_gem_vunmap;
300                 shadow_plane_state->map[i] = map;
301         }
302
303         return 0;
304
305 err_drm_gem_vunmap:
306         while (i) {
307                 --i;
308                 obj = drm_gem_fb_get_obj(fb, i);
309                 if (!obj)
310                         continue;
311                 drm_gem_vunmap(obj, &shadow_plane_state->map[i]);
312         }
313         return ret;
314 }
315 EXPORT_SYMBOL(drm_gem_prepare_shadow_fb);
316
317 /**
318  * drm_gem_cleanup_shadow_fb - releases shadow framebuffers
319  * @plane: the plane
320  * @plane_state: the plane state of type struct drm_shadow_plane_state
321  *
322  * This function implements struct &drm_plane_helper_funcs.cleanup_fb.
323  * This function unmaps all buffer objects of the plane's framebuffer.
324  *
325  * See drm_gem_prepare_shadow_fb() for more inforamtion.
326  */
327 void drm_gem_cleanup_shadow_fb(struct drm_plane *plane, struct drm_plane_state *plane_state)
328 {
329         struct drm_shadow_plane_state *shadow_plane_state = to_drm_shadow_plane_state(plane_state);
330         struct drm_framebuffer *fb = plane_state->fb;
331         size_t i = ARRAY_SIZE(shadow_plane_state->map);
332         struct drm_gem_object *obj;
333
334         if (!fb)
335                 return;
336
337         while (i) {
338                 --i;
339                 obj = drm_gem_fb_get_obj(fb, i);
340                 if (!obj)
341                         continue;
342                 drm_gem_vunmap(obj, &shadow_plane_state->map[i]);
343         }
344 }
345 EXPORT_SYMBOL(drm_gem_cleanup_shadow_fb);
346
347 /**
348  * drm_gem_simple_kms_prepare_shadow_fb - prepares shadow framebuffers
349  * @pipe: the simple display pipe
350  * @plane_state: the plane state of type struct drm_shadow_plane_state
351  *
352  * This function implements struct drm_simple_display_funcs.prepare_fb. It
353  * maps all buffer objects of the plane's framebuffer into kernel address
354  * space and stores them in struct drm_shadow_plane_state.map. The
355  * framebuffer will be synchronized as part of the atomic commit.
356  *
357  * See drm_gem_simple_kms_cleanup_shadow_fb() for cleanup.
358  *
359  * Returns:
360  * 0 on success, or a negative errno code otherwise.
361  */
362 int drm_gem_simple_kms_prepare_shadow_fb(struct drm_simple_display_pipe *pipe,
363                                          struct drm_plane_state *plane_state)
364 {
365         return drm_gem_prepare_shadow_fb(&pipe->plane, plane_state);
366 }
367 EXPORT_SYMBOL(drm_gem_simple_kms_prepare_shadow_fb);
368
369 /**
370  * drm_gem_simple_kms_cleanup_shadow_fb - releases shadow framebuffers
371  * @pipe: the simple display pipe
372  * @plane_state: the plane state of type struct drm_shadow_plane_state
373  *
374  * This function implements struct drm_simple_display_funcs.cleanup_fb.
375  * This function unmaps all buffer objects of the plane's framebuffer.
376  *
377  * See drm_gem_simple_kms_prepare_shadow_fb().
378  */
379 void drm_gem_simple_kms_cleanup_shadow_fb(struct drm_simple_display_pipe *pipe,
380                                           struct drm_plane_state *plane_state)
381 {
382         drm_gem_cleanup_shadow_fb(&pipe->plane, plane_state);
383 }
384 EXPORT_SYMBOL(drm_gem_simple_kms_cleanup_shadow_fb);
385
386 /**
387  * drm_gem_simple_kms_reset_shadow_plane - resets a shadow-buffered plane
388  * @pipe: the simple display pipe
389  *
390  * This function implements struct drm_simple_display_funcs.reset_plane
391  * for shadow-buffered planes.
392  */
393 void drm_gem_simple_kms_reset_shadow_plane(struct drm_simple_display_pipe *pipe)
394 {
395         drm_gem_reset_shadow_plane(&pipe->plane);
396 }
397 EXPORT_SYMBOL(drm_gem_simple_kms_reset_shadow_plane);
398
399 /**
400  * drm_gem_simple_kms_duplicate_shadow_plane_state - duplicates shadow-buffered plane state
401  * @pipe: the simple display pipe
402  *
403  * This function implements struct drm_simple_display_funcs.duplicate_plane_state
404  * for shadow-buffered planes. It does not duplicate existing mappings of the shadow
405  * buffers. Mappings are maintained during the atomic commit by the plane's prepare_fb
406  * and cleanup_fb helpers.
407  *
408  * Returns:
409  * A pointer to a new plane state on success, or NULL otherwise.
410  */
411 struct drm_plane_state *
412 drm_gem_simple_kms_duplicate_shadow_plane_state(struct drm_simple_display_pipe *pipe)
413 {
414         return drm_gem_duplicate_shadow_plane_state(&pipe->plane);
415 }
416 EXPORT_SYMBOL(drm_gem_simple_kms_duplicate_shadow_plane_state);
417
418 /**
419  * drm_gem_simple_kms_destroy_shadow_plane_state - resets shadow-buffered plane state
420  * @pipe: the simple display pipe
421  * @plane_state: the plane state of type struct drm_shadow_plane_state
422  *
423  * This function implements struct drm_simple_display_funcs.destroy_plane_state
424  * for shadow-buffered planes. It expects that mappings of shadow buffers
425  * have been released already.
426  */
427 void drm_gem_simple_kms_destroy_shadow_plane_state(struct drm_simple_display_pipe *pipe,
428                                                    struct drm_plane_state *plane_state)
429 {
430         drm_gem_destroy_shadow_plane_state(&pipe->plane, plane_state);
431 }
432 EXPORT_SYMBOL(drm_gem_simple_kms_destroy_shadow_plane_state);