Merge tag 'drm-misc-next-2021-10-14' of git://anongit.freedesktop.org/drm/drm-misc...
[linux-2.6-microblaze.git] / drivers / gpu / drm / msm / disp / dpu1 / dpu_encoder.c
1 // SPDX-License-Identifier: GPL-2.0-only
2 /*
3  * Copyright (c) 2014-2018, 2020-2021 The Linux Foundation. All rights reserved.
4  * Copyright (C) 2013 Red Hat
5  * Author: Rob Clark <robdclark@gmail.com>
6  */
7
8 #define pr_fmt(fmt)     "[drm:%s:%d] " fmt, __func__, __LINE__
9 #include <linux/debugfs.h>
10 #include <linux/kthread.h>
11 #include <linux/seq_file.h>
12
13 #include <drm/drm_crtc.h>
14 #include <drm/drm_file.h>
15 #include <drm/drm_probe_helper.h>
16
17 #include "msm_drv.h"
18 #include "dpu_kms.h"
19 #include "dpu_hwio.h"
20 #include "dpu_hw_catalog.h"
21 #include "dpu_hw_intf.h"
22 #include "dpu_hw_ctl.h"
23 #include "dpu_hw_dspp.h"
24 #include "dpu_formats.h"
25 #include "dpu_encoder_phys.h"
26 #include "dpu_crtc.h"
27 #include "dpu_trace.h"
28 #include "dpu_core_irq.h"
29 #include "disp/msm_disp_snapshot.h"
30
31 #define DPU_DEBUG_ENC(e, fmt, ...) DRM_DEBUG_ATOMIC("enc%d " fmt,\
32                 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
33
34 #define DPU_ERROR_ENC(e, fmt, ...) DPU_ERROR("enc%d " fmt,\
35                 (e) ? (e)->base.base.id : -1, ##__VA_ARGS__)
36
37 #define DPU_DEBUG_PHYS(p, fmt, ...) DRM_DEBUG_ATOMIC("enc%d intf%d pp%d " fmt,\
38                 (p) ? (p)->parent->base.id : -1, \
39                 (p) ? (p)->intf_idx - INTF_0 : -1, \
40                 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
41                 ##__VA_ARGS__)
42
43 #define DPU_ERROR_PHYS(p, fmt, ...) DPU_ERROR("enc%d intf%d pp%d " fmt,\
44                 (p) ? (p)->parent->base.id : -1, \
45                 (p) ? (p)->intf_idx - INTF_0 : -1, \
46                 (p) ? ((p)->hw_pp ? (p)->hw_pp->idx - PINGPONG_0 : -1) : -1, \
47                 ##__VA_ARGS__)
48
49 /*
50  * Two to anticipate panels that can do cmd/vid dynamic switching
51  * plan is to create all possible physical encoder types, and switch between
52  * them at runtime
53  */
54 #define NUM_PHYS_ENCODER_TYPES 2
55
56 #define MAX_PHYS_ENCODERS_PER_VIRTUAL \
57         (MAX_H_TILES_PER_DISPLAY * NUM_PHYS_ENCODER_TYPES)
58
59 #define MAX_CHANNELS_PER_ENC 2
60
61 #define IDLE_SHORT_TIMEOUT      1
62
63 #define MAX_HDISPLAY_SPLIT 1080
64
65 /* timeout in frames waiting for frame done */
66 #define DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES 5
67
68 /**
69  * enum dpu_enc_rc_events - events for resource control state machine
70  * @DPU_ENC_RC_EVENT_KICKOFF:
71  *      This event happens at NORMAL priority.
72  *      Event that signals the start of the transfer. When this event is
73  *      received, enable MDP/DSI core clocks. Regardless of the previous
74  *      state, the resource should be in ON state at the end of this event.
75  * @DPU_ENC_RC_EVENT_FRAME_DONE:
76  *      This event happens at INTERRUPT level.
77  *      Event signals the end of the data transfer after the PP FRAME_DONE
78  *      event. At the end of this event, a delayed work is scheduled to go to
79  *      IDLE_PC state after IDLE_TIMEOUT time.
80  * @DPU_ENC_RC_EVENT_PRE_STOP:
81  *      This event happens at NORMAL priority.
82  *      This event, when received during the ON state, leave the RC STATE
83  *      in the PRE_OFF state. It should be followed by the STOP event as
84  *      part of encoder disable.
85  *      If received during IDLE or OFF states, it will do nothing.
86  * @DPU_ENC_RC_EVENT_STOP:
87  *      This event happens at NORMAL priority.
88  *      When this event is received, disable all the MDP/DSI core clocks, and
89  *      disable IRQs. It should be called from the PRE_OFF or IDLE states.
90  *      IDLE is expected when IDLE_PC has run, and PRE_OFF did nothing.
91  *      PRE_OFF is expected when PRE_STOP was executed during the ON state.
92  *      Resource state should be in OFF at the end of the event.
93  * @DPU_ENC_RC_EVENT_ENTER_IDLE:
94  *      This event happens at NORMAL priority from a work item.
95  *      Event signals that there were no frame updates for IDLE_TIMEOUT time.
96  *      This would disable MDP/DSI core clocks and change the resource state
97  *      to IDLE.
98  */
99 enum dpu_enc_rc_events {
100         DPU_ENC_RC_EVENT_KICKOFF = 1,
101         DPU_ENC_RC_EVENT_FRAME_DONE,
102         DPU_ENC_RC_EVENT_PRE_STOP,
103         DPU_ENC_RC_EVENT_STOP,
104         DPU_ENC_RC_EVENT_ENTER_IDLE
105 };
106
107 /*
108  * enum dpu_enc_rc_states - states that the resource control maintains
109  * @DPU_ENC_RC_STATE_OFF: Resource is in OFF state
110  * @DPU_ENC_RC_STATE_PRE_OFF: Resource is transitioning to OFF state
111  * @DPU_ENC_RC_STATE_ON: Resource is in ON state
112  * @DPU_ENC_RC_STATE_MODESET: Resource is in modeset state
113  * @DPU_ENC_RC_STATE_IDLE: Resource is in IDLE state
114  */
115 enum dpu_enc_rc_states {
116         DPU_ENC_RC_STATE_OFF,
117         DPU_ENC_RC_STATE_PRE_OFF,
118         DPU_ENC_RC_STATE_ON,
119         DPU_ENC_RC_STATE_IDLE
120 };
121
122 /**
123  * struct dpu_encoder_virt - virtual encoder. Container of one or more physical
124  *      encoders. Virtual encoder manages one "logical" display. Physical
125  *      encoders manage one intf block, tied to a specific panel/sub-panel.
126  *      Virtual encoder defers as much as possible to the physical encoders.
127  *      Virtual encoder registers itself with the DRM Framework as the encoder.
128  * @base:               drm_encoder base class for registration with DRM
129  * @enc_spinlock:       Virtual-Encoder-Wide Spin Lock for IRQ purposes
130  * @bus_scaling_client: Client handle to the bus scaling interface
131  * @enabled:            True if the encoder is active, protected by enc_lock
132  * @num_phys_encs:      Actual number of physical encoders contained.
133  * @phys_encs:          Container of physical encoders managed.
134  * @cur_master:         Pointer to the current master in this mode. Optimization
135  *                      Only valid after enable. Cleared as disable.
136  * @cur_slave:          As above but for the slave encoder.
137  * @hw_pp:              Handle to the pingpong blocks used for the display. No.
138  *                      pingpong blocks can be different than num_phys_encs.
139  * @intfs_swapped:      Whether or not the phys_enc interfaces have been swapped
140  *                      for partial update right-only cases, such as pingpong
141  *                      split where virtual pingpong does not generate IRQs
142  * @crtc:               Pointer to the currently assigned crtc. Normally you
143  *                      would use crtc->state->encoder_mask to determine the
144  *                      link between encoder/crtc. However in this case we need
145  *                      to track crtc in the disable() hook which is called
146  *                      _after_ encoder_mask is cleared.
147  * @crtc_kickoff_cb:            Callback into CRTC that will flush & start
148  *                              all CTL paths
149  * @crtc_kickoff_cb_data:       Opaque user data given to crtc_kickoff_cb
150  * @debugfs_root:               Debug file system root file node
151  * @enc_lock:                   Lock around physical encoder
152  *                              create/destroy/enable/disable
153  * @frame_busy_mask:            Bitmask tracking which phys_enc we are still
154  *                              busy processing current command.
155  *                              Bit0 = phys_encs[0] etc.
156  * @crtc_frame_event_cb:        callback handler for frame event
157  * @crtc_frame_event_cb_data:   callback handler private data
158  * @frame_done_timeout_ms:      frame done timeout in ms
159  * @frame_done_timer:           watchdog timer for frame done event
160  * @vsync_event_timer:          vsync timer
161  * @disp_info:                  local copy of msm_display_info struct
162  * @idle_pc_supported:          indicate if idle power collaps is supported
163  * @rc_lock:                    resource control mutex lock to protect
164  *                              virt encoder over various state changes
165  * @rc_state:                   resource controller state
166  * @delayed_off_work:           delayed worker to schedule disabling of
167  *                              clks and resources after IDLE_TIMEOUT time.
168  * @vsync_event_work:           worker to handle vsync event for autorefresh
169  * @topology:                   topology of the display
170  * @idle_timeout:               idle timeout duration in milliseconds
171  * @dp:                         msm_dp pointer, for DP encoders
172  */
173 struct dpu_encoder_virt {
174         struct drm_encoder base;
175         spinlock_t enc_spinlock;
176         uint32_t bus_scaling_client;
177
178         bool enabled;
179
180         unsigned int num_phys_encs;
181         struct dpu_encoder_phys *phys_encs[MAX_PHYS_ENCODERS_PER_VIRTUAL];
182         struct dpu_encoder_phys *cur_master;
183         struct dpu_encoder_phys *cur_slave;
184         struct dpu_hw_pingpong *hw_pp[MAX_CHANNELS_PER_ENC];
185
186         bool intfs_swapped;
187
188         struct drm_crtc *crtc;
189
190         struct dentry *debugfs_root;
191         struct mutex enc_lock;
192         DECLARE_BITMAP(frame_busy_mask, MAX_PHYS_ENCODERS_PER_VIRTUAL);
193         void (*crtc_frame_event_cb)(void *, u32 event);
194         void *crtc_frame_event_cb_data;
195
196         atomic_t frame_done_timeout_ms;
197         struct timer_list frame_done_timer;
198         struct timer_list vsync_event_timer;
199
200         struct msm_display_info disp_info;
201
202         bool idle_pc_supported;
203         struct mutex rc_lock;
204         enum dpu_enc_rc_states rc_state;
205         struct delayed_work delayed_off_work;
206         struct kthread_work vsync_event_work;
207         struct msm_display_topology topology;
208
209         u32 idle_timeout;
210
211         struct msm_dp *dp;
212 };
213
214 #define to_dpu_encoder_virt(x) container_of(x, struct dpu_encoder_virt, base)
215
216 static u32 dither_matrix[DITHER_MATRIX_SZ] = {
217         15, 7, 13, 5, 3, 11, 1, 9, 12, 4, 14, 6, 0, 8, 2, 10
218 };
219
220 static void _dpu_encoder_setup_dither(struct dpu_hw_pingpong *hw_pp, unsigned bpc)
221 {
222         struct dpu_hw_dither_cfg dither_cfg = { 0 };
223
224         if (!hw_pp->ops.setup_dither)
225                 return;
226
227         switch (bpc) {
228         case 6:
229                 dither_cfg.c0_bitdepth = 6;
230                 dither_cfg.c1_bitdepth = 6;
231                 dither_cfg.c2_bitdepth = 6;
232                 dither_cfg.c3_bitdepth = 6;
233                 dither_cfg.temporal_en = 0;
234                 break;
235         default:
236                 hw_pp->ops.setup_dither(hw_pp, NULL);
237                 return;
238         }
239
240         memcpy(&dither_cfg.matrix, dither_matrix,
241                         sizeof(u32) * DITHER_MATRIX_SZ);
242
243         hw_pp->ops.setup_dither(hw_pp, &dither_cfg);
244 }
245
246 void dpu_encoder_helper_report_irq_timeout(struct dpu_encoder_phys *phys_enc,
247                 enum dpu_intr_idx intr_idx)
248 {
249         DRM_ERROR("irq timeout id=%u, intf=%d, pp=%d, intr=%d\n",
250                   DRMID(phys_enc->parent), phys_enc->intf_idx - INTF_0,
251                   phys_enc->hw_pp->idx - PINGPONG_0, intr_idx);
252
253         if (phys_enc->parent_ops->handle_frame_done)
254                 phys_enc->parent_ops->handle_frame_done(
255                                 phys_enc->parent, phys_enc,
256                                 DPU_ENCODER_FRAME_EVENT_ERROR);
257 }
258
259 static int dpu_encoder_helper_wait_event_timeout(int32_t drm_id,
260                 u32 irq_idx, struct dpu_encoder_wait_info *info);
261
262 int dpu_encoder_helper_wait_for_irq(struct dpu_encoder_phys *phys_enc,
263                 enum dpu_intr_idx intr_idx,
264                 struct dpu_encoder_wait_info *wait_info)
265 {
266         struct dpu_encoder_irq *irq;
267         u32 irq_status;
268         int ret;
269
270         if (!wait_info || intr_idx >= INTR_IDX_MAX) {
271                 DPU_ERROR("invalid params\n");
272                 return -EINVAL;
273         }
274         irq = &phys_enc->irq[intr_idx];
275
276         /* note: do master / slave checking outside */
277
278         /* return EWOULDBLOCK since we know the wait isn't necessary */
279         if (phys_enc->enable_state == DPU_ENC_DISABLED) {
280                 DRM_ERROR("encoder is disabled id=%u, intr=%d, irq=%d\n",
281                           DRMID(phys_enc->parent), intr_idx,
282                           irq->irq_idx);
283                 return -EWOULDBLOCK;
284         }
285
286         if (irq->irq_idx < 0) {
287                 DRM_DEBUG_KMS("skip irq wait id=%u, intr=%d, irq=%s\n",
288                               DRMID(phys_enc->parent), intr_idx,
289                               irq->name);
290                 return 0;
291         }
292
293         DRM_DEBUG_KMS("id=%u, intr=%d, irq=%d, pp=%d, pending_cnt=%d\n",
294                       DRMID(phys_enc->parent), intr_idx,
295                       irq->irq_idx, phys_enc->hw_pp->idx - PINGPONG_0,
296                       atomic_read(wait_info->atomic_cnt));
297
298         ret = dpu_encoder_helper_wait_event_timeout(
299                         DRMID(phys_enc->parent),
300                         irq->irq_idx,
301                         wait_info);
302
303         if (ret <= 0) {
304                 irq_status = dpu_core_irq_read(phys_enc->dpu_kms,
305                                 irq->irq_idx, true);
306                 if (irq_status) {
307                         unsigned long flags;
308
309                         DRM_DEBUG_KMS("irq not triggered id=%u, intr=%d, irq=%d, pp=%d, atomic_cnt=%d\n",
310                                       DRMID(phys_enc->parent), intr_idx,
311                                       irq->irq_idx,
312                                       phys_enc->hw_pp->idx - PINGPONG_0,
313                                       atomic_read(wait_info->atomic_cnt));
314                         local_irq_save(flags);
315                         irq->cb.func(phys_enc, irq->irq_idx);
316                         local_irq_restore(flags);
317                         ret = 0;
318                 } else {
319                         ret = -ETIMEDOUT;
320                         DRM_DEBUG_KMS("irq timeout id=%u, intr=%d, irq=%d, pp=%d, atomic_cnt=%d\n",
321                                       DRMID(phys_enc->parent), intr_idx,
322                                       irq->irq_idx,
323                                       phys_enc->hw_pp->idx - PINGPONG_0,
324                                       atomic_read(wait_info->atomic_cnt));
325                 }
326         } else {
327                 ret = 0;
328                 trace_dpu_enc_irq_wait_success(DRMID(phys_enc->parent),
329                         intr_idx, irq->irq_idx,
330                         phys_enc->hw_pp->idx - PINGPONG_0,
331                         atomic_read(wait_info->atomic_cnt));
332         }
333
334         return ret;
335 }
336
337 int dpu_encoder_helper_register_irq(struct dpu_encoder_phys *phys_enc,
338                 enum dpu_intr_idx intr_idx)
339 {
340         struct dpu_encoder_irq *irq;
341         int ret = 0;
342
343         if (intr_idx >= INTR_IDX_MAX) {
344                 DPU_ERROR("invalid params\n");
345                 return -EINVAL;
346         }
347         irq = &phys_enc->irq[intr_idx];
348
349         if (irq->irq_idx < 0) {
350                 DPU_ERROR_PHYS(phys_enc,
351                         "invalid IRQ index:%d\n", irq->irq_idx);
352                 return -EINVAL;
353         }
354
355         ret = dpu_core_irq_register_callback(phys_enc->dpu_kms, irq->irq_idx,
356                         &irq->cb);
357         if (ret) {
358                 DPU_ERROR_PHYS(phys_enc,
359                         "failed to register IRQ callback for %s\n",
360                         irq->name);
361                 irq->irq_idx = -EINVAL;
362                 return ret;
363         }
364
365         trace_dpu_enc_irq_register_success(DRMID(phys_enc->parent), intr_idx,
366                                 irq->irq_idx);
367
368         return ret;
369 }
370
371 int dpu_encoder_helper_unregister_irq(struct dpu_encoder_phys *phys_enc,
372                 enum dpu_intr_idx intr_idx)
373 {
374         struct dpu_encoder_irq *irq;
375         int ret;
376
377         irq = &phys_enc->irq[intr_idx];
378
379         /* silently skip irqs that weren't registered */
380         if (irq->irq_idx < 0) {
381                 DRM_ERROR("duplicate unregister id=%u, intr=%d, irq=%d",
382                           DRMID(phys_enc->parent), intr_idx,
383                           irq->irq_idx);
384                 return 0;
385         }
386
387         ret = dpu_core_irq_unregister_callback(phys_enc->dpu_kms, irq->irq_idx,
388                         &irq->cb);
389         if (ret) {
390                 DRM_ERROR("unreg cb fail id=%u, intr=%d, irq=%d ret=%d",
391                           DRMID(phys_enc->parent), intr_idx,
392                           irq->irq_idx, ret);
393         }
394
395         trace_dpu_enc_irq_unregister_success(DRMID(phys_enc->parent), intr_idx,
396                                              irq->irq_idx);
397
398         return 0;
399 }
400
401 int dpu_encoder_get_vsync_count(struct drm_encoder *drm_enc)
402 {
403         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
404         struct dpu_encoder_phys *phys = dpu_enc ? dpu_enc->cur_master : NULL;
405         return phys ? atomic_read(&phys->vsync_cnt) : 0;
406 }
407
408 int dpu_encoder_get_linecount(struct drm_encoder *drm_enc)
409 {
410         struct dpu_encoder_virt *dpu_enc;
411         struct dpu_encoder_phys *phys;
412         int linecount = 0;
413
414         dpu_enc = to_dpu_encoder_virt(drm_enc);
415         phys = dpu_enc ? dpu_enc->cur_master : NULL;
416
417         if (phys && phys->ops.get_line_count)
418                 linecount = phys->ops.get_line_count(phys);
419
420         return linecount;
421 }
422
423 void dpu_encoder_get_hw_resources(struct drm_encoder *drm_enc,
424                                   struct dpu_encoder_hw_resources *hw_res)
425 {
426         struct dpu_encoder_virt *dpu_enc = NULL;
427         int i = 0;
428
429         dpu_enc = to_dpu_encoder_virt(drm_enc);
430         DPU_DEBUG_ENC(dpu_enc, "\n");
431
432         /* Query resources used by phys encs, expected to be without overlap */
433         memset(hw_res, 0, sizeof(*hw_res));
434
435         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
436                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
437
438                 if (phys->ops.get_hw_resources)
439                         phys->ops.get_hw_resources(phys, hw_res);
440         }
441 }
442
443 static void dpu_encoder_destroy(struct drm_encoder *drm_enc)
444 {
445         struct dpu_encoder_virt *dpu_enc = NULL;
446         int i = 0;
447
448         if (!drm_enc) {
449                 DPU_ERROR("invalid encoder\n");
450                 return;
451         }
452
453         dpu_enc = to_dpu_encoder_virt(drm_enc);
454         DPU_DEBUG_ENC(dpu_enc, "\n");
455
456         mutex_lock(&dpu_enc->enc_lock);
457
458         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
459                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
460
461                 if (phys->ops.destroy) {
462                         phys->ops.destroy(phys);
463                         --dpu_enc->num_phys_encs;
464                         dpu_enc->phys_encs[i] = NULL;
465                 }
466         }
467
468         if (dpu_enc->num_phys_encs)
469                 DPU_ERROR_ENC(dpu_enc, "expected 0 num_phys_encs not %d\n",
470                                 dpu_enc->num_phys_encs);
471         dpu_enc->num_phys_encs = 0;
472         mutex_unlock(&dpu_enc->enc_lock);
473
474         drm_encoder_cleanup(drm_enc);
475         mutex_destroy(&dpu_enc->enc_lock);
476 }
477
478 void dpu_encoder_helper_split_config(
479                 struct dpu_encoder_phys *phys_enc,
480                 enum dpu_intf interface)
481 {
482         struct dpu_encoder_virt *dpu_enc;
483         struct split_pipe_cfg cfg = { 0 };
484         struct dpu_hw_mdp *hw_mdptop;
485         struct msm_display_info *disp_info;
486
487         if (!phys_enc->hw_mdptop || !phys_enc->parent) {
488                 DPU_ERROR("invalid arg(s), encoder %d\n", phys_enc != NULL);
489                 return;
490         }
491
492         dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
493         hw_mdptop = phys_enc->hw_mdptop;
494         disp_info = &dpu_enc->disp_info;
495
496         if (disp_info->intf_type != DRM_MODE_ENCODER_DSI)
497                 return;
498
499         /**
500          * disable split modes since encoder will be operating in as the only
501          * encoder, either for the entire use case in the case of, for example,
502          * single DSI, or for this frame in the case of left/right only partial
503          * update.
504          */
505         if (phys_enc->split_role == ENC_ROLE_SOLO) {
506                 if (hw_mdptop->ops.setup_split_pipe)
507                         hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
508                 return;
509         }
510
511         cfg.en = true;
512         cfg.mode = phys_enc->intf_mode;
513         cfg.intf = interface;
514
515         if (cfg.en && phys_enc->ops.needs_single_flush &&
516                         phys_enc->ops.needs_single_flush(phys_enc))
517                 cfg.split_flush_en = true;
518
519         if (phys_enc->split_role == ENC_ROLE_MASTER) {
520                 DPU_DEBUG_ENC(dpu_enc, "enable %d\n", cfg.en);
521
522                 if (hw_mdptop->ops.setup_split_pipe)
523                         hw_mdptop->ops.setup_split_pipe(hw_mdptop, &cfg);
524         }
525 }
526
527 static struct msm_display_topology dpu_encoder_get_topology(
528                         struct dpu_encoder_virt *dpu_enc,
529                         struct dpu_kms *dpu_kms,
530                         struct drm_display_mode *mode)
531 {
532         struct msm_display_topology topology = {0};
533         int i, intf_count = 0;
534
535         for (i = 0; i < MAX_PHYS_ENCODERS_PER_VIRTUAL; i++)
536                 if (dpu_enc->phys_encs[i])
537                         intf_count++;
538
539         /* Datapath topology selection
540          *
541          * Dual display
542          * 2 LM, 2 INTF ( Split display using 2 interfaces)
543          *
544          * Single display
545          * 1 LM, 1 INTF
546          * 2 LM, 1 INTF (stream merge to support high resolution interfaces)
547          *
548          * Adding color blocks only to primary interface if available in
549          * sufficient number
550          */
551         if (intf_count == 2)
552                 topology.num_lm = 2;
553         else if (!dpu_kms->catalog->caps->has_3d_merge)
554                 topology.num_lm = 1;
555         else
556                 topology.num_lm = (mode->hdisplay > MAX_HDISPLAY_SPLIT) ? 2 : 1;
557
558         if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_DSI) {
559                 if (dpu_kms->catalog->dspp &&
560                         (dpu_kms->catalog->dspp_count >= topology.num_lm))
561                         topology.num_dspp = topology.num_lm;
562         }
563
564         topology.num_enc = 0;
565         topology.num_intf = intf_count;
566
567         return topology;
568 }
569 static int dpu_encoder_virt_atomic_check(
570                 struct drm_encoder *drm_enc,
571                 struct drm_crtc_state *crtc_state,
572                 struct drm_connector_state *conn_state)
573 {
574         struct dpu_encoder_virt *dpu_enc;
575         struct msm_drm_private *priv;
576         struct dpu_kms *dpu_kms;
577         const struct drm_display_mode *mode;
578         struct drm_display_mode *adj_mode;
579         struct msm_display_topology topology;
580         struct dpu_global_state *global_state;
581         int i = 0;
582         int ret = 0;
583
584         if (!drm_enc || !crtc_state || !conn_state) {
585                 DPU_ERROR("invalid arg(s), drm_enc %d, crtc/conn state %d/%d\n",
586                                 drm_enc != NULL, crtc_state != NULL, conn_state != NULL);
587                 return -EINVAL;
588         }
589
590         dpu_enc = to_dpu_encoder_virt(drm_enc);
591         DPU_DEBUG_ENC(dpu_enc, "\n");
592
593         priv = drm_enc->dev->dev_private;
594         dpu_kms = to_dpu_kms(priv->kms);
595         mode = &crtc_state->mode;
596         adj_mode = &crtc_state->adjusted_mode;
597         global_state = dpu_kms_get_global_state(crtc_state->state);
598         if (IS_ERR(global_state))
599                 return PTR_ERR(global_state);
600
601         trace_dpu_enc_atomic_check(DRMID(drm_enc));
602
603         /* perform atomic check on the first physical encoder (master) */
604         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
605                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
606
607                 if (phys->ops.atomic_check)
608                         ret = phys->ops.atomic_check(phys, crtc_state,
609                                         conn_state);
610                 else if (phys->ops.mode_fixup)
611                         if (!phys->ops.mode_fixup(phys, mode, adj_mode))
612                                 ret = -EINVAL;
613
614                 if (ret) {
615                         DPU_ERROR_ENC(dpu_enc,
616                                         "mode unsupported, phys idx %d\n", i);
617                         break;
618                 }
619         }
620
621         topology = dpu_encoder_get_topology(dpu_enc, dpu_kms, adj_mode);
622
623         /* Reserve dynamic resources now. */
624         if (!ret) {
625                 /*
626                  * Release and Allocate resources on every modeset
627                  * Dont allocate when active is false.
628                  */
629                 if (drm_atomic_crtc_needs_modeset(crtc_state)) {
630                         dpu_rm_release(global_state, drm_enc);
631
632                         if (!crtc_state->active_changed || crtc_state->active)
633                                 ret = dpu_rm_reserve(&dpu_kms->rm, global_state,
634                                                 drm_enc, crtc_state, topology);
635                 }
636         }
637
638         trace_dpu_enc_atomic_check_flags(DRMID(drm_enc), adj_mode->flags);
639
640         return ret;
641 }
642
643 static void _dpu_encoder_update_vsync_source(struct dpu_encoder_virt *dpu_enc,
644                         struct msm_display_info *disp_info)
645 {
646         struct dpu_vsync_source_cfg vsync_cfg = { 0 };
647         struct msm_drm_private *priv;
648         struct dpu_kms *dpu_kms;
649         struct dpu_hw_mdp *hw_mdptop;
650         struct drm_encoder *drm_enc;
651         int i;
652
653         if (!dpu_enc || !disp_info) {
654                 DPU_ERROR("invalid param dpu_enc:%d or disp_info:%d\n",
655                                         dpu_enc != NULL, disp_info != NULL);
656                 return;
657         } else if (dpu_enc->num_phys_encs > ARRAY_SIZE(dpu_enc->hw_pp)) {
658                 DPU_ERROR("invalid num phys enc %d/%d\n",
659                                 dpu_enc->num_phys_encs,
660                                 (int) ARRAY_SIZE(dpu_enc->hw_pp));
661                 return;
662         }
663
664         drm_enc = &dpu_enc->base;
665         /* this pointers are checked in virt_enable_helper */
666         priv = drm_enc->dev->dev_private;
667
668         dpu_kms = to_dpu_kms(priv->kms);
669         hw_mdptop = dpu_kms->hw_mdp;
670         if (!hw_mdptop) {
671                 DPU_ERROR("invalid mdptop\n");
672                 return;
673         }
674
675         if (hw_mdptop->ops.setup_vsync_source &&
676                         disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) {
677                 for (i = 0; i < dpu_enc->num_phys_encs; i++)
678                         vsync_cfg.ppnumber[i] = dpu_enc->hw_pp[i]->idx;
679
680                 vsync_cfg.pp_count = dpu_enc->num_phys_encs;
681                 if (disp_info->is_te_using_watchdog_timer)
682                         vsync_cfg.vsync_source = DPU_VSYNC_SOURCE_WD_TIMER_0;
683                 else
684                         vsync_cfg.vsync_source = DPU_VSYNC0_SOURCE_GPIO;
685
686                 hw_mdptop->ops.setup_vsync_source(hw_mdptop, &vsync_cfg);
687         }
688 }
689
690 static void _dpu_encoder_irq_control(struct drm_encoder *drm_enc, bool enable)
691 {
692         struct dpu_encoder_virt *dpu_enc;
693         int i;
694
695         if (!drm_enc) {
696                 DPU_ERROR("invalid encoder\n");
697                 return;
698         }
699
700         dpu_enc = to_dpu_encoder_virt(drm_enc);
701
702         DPU_DEBUG_ENC(dpu_enc, "enable:%d\n", enable);
703         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
704                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
705
706                 if (phys->ops.irq_control)
707                         phys->ops.irq_control(phys, enable);
708         }
709
710 }
711
712 static void _dpu_encoder_resource_control_helper(struct drm_encoder *drm_enc,
713                 bool enable)
714 {
715         struct msm_drm_private *priv;
716         struct dpu_kms *dpu_kms;
717         struct dpu_encoder_virt *dpu_enc;
718
719         dpu_enc = to_dpu_encoder_virt(drm_enc);
720         priv = drm_enc->dev->dev_private;
721         dpu_kms = to_dpu_kms(priv->kms);
722
723         trace_dpu_enc_rc_helper(DRMID(drm_enc), enable);
724
725         if (!dpu_enc->cur_master) {
726                 DPU_ERROR("encoder master not set\n");
727                 return;
728         }
729
730         if (enable) {
731                 /* enable DPU core clks */
732                 pm_runtime_get_sync(&dpu_kms->pdev->dev);
733
734                 /* enable all the irq */
735                 _dpu_encoder_irq_control(drm_enc, true);
736
737         } else {
738                 /* disable all the irq */
739                 _dpu_encoder_irq_control(drm_enc, false);
740
741                 /* disable DPU core clks */
742                 pm_runtime_put_sync(&dpu_kms->pdev->dev);
743         }
744
745 }
746
747 static int dpu_encoder_resource_control(struct drm_encoder *drm_enc,
748                 u32 sw_event)
749 {
750         struct dpu_encoder_virt *dpu_enc;
751         struct msm_drm_private *priv;
752         bool is_vid_mode = false;
753
754         if (!drm_enc || !drm_enc->dev || !drm_enc->crtc) {
755                 DPU_ERROR("invalid parameters\n");
756                 return -EINVAL;
757         }
758         dpu_enc = to_dpu_encoder_virt(drm_enc);
759         priv = drm_enc->dev->dev_private;
760         is_vid_mode = dpu_enc->disp_info.capabilities &
761                                                 MSM_DISPLAY_CAP_VID_MODE;
762
763         /*
764          * when idle_pc is not supported, process only KICKOFF, STOP and MODESET
765          * events and return early for other events (ie wb display).
766          */
767         if (!dpu_enc->idle_pc_supported &&
768                         (sw_event != DPU_ENC_RC_EVENT_KICKOFF &&
769                         sw_event != DPU_ENC_RC_EVENT_STOP &&
770                         sw_event != DPU_ENC_RC_EVENT_PRE_STOP))
771                 return 0;
772
773         trace_dpu_enc_rc(DRMID(drm_enc), sw_event, dpu_enc->idle_pc_supported,
774                          dpu_enc->rc_state, "begin");
775
776         switch (sw_event) {
777         case DPU_ENC_RC_EVENT_KICKOFF:
778                 /* cancel delayed off work, if any */
779                 if (cancel_delayed_work_sync(&dpu_enc->delayed_off_work))
780                         DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
781                                         sw_event);
782
783                 mutex_lock(&dpu_enc->rc_lock);
784
785                 /* return if the resource control is already in ON state */
786                 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
787                         DRM_DEBUG_ATOMIC("id;%u, sw_event:%d, rc in ON state\n",
788                                       DRMID(drm_enc), sw_event);
789                         mutex_unlock(&dpu_enc->rc_lock);
790                         return 0;
791                 } else if (dpu_enc->rc_state != DPU_ENC_RC_STATE_OFF &&
792                                 dpu_enc->rc_state != DPU_ENC_RC_STATE_IDLE) {
793                         DRM_DEBUG_ATOMIC("id;%u, sw_event:%d, rc in state %d\n",
794                                       DRMID(drm_enc), sw_event,
795                                       dpu_enc->rc_state);
796                         mutex_unlock(&dpu_enc->rc_lock);
797                         return -EINVAL;
798                 }
799
800                 if (is_vid_mode && dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE)
801                         _dpu_encoder_irq_control(drm_enc, true);
802                 else
803                         _dpu_encoder_resource_control_helper(drm_enc, true);
804
805                 dpu_enc->rc_state = DPU_ENC_RC_STATE_ON;
806
807                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
808                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
809                                  "kickoff");
810
811                 mutex_unlock(&dpu_enc->rc_lock);
812                 break;
813
814         case DPU_ENC_RC_EVENT_FRAME_DONE:
815                 /*
816                  * mutex lock is not used as this event happens at interrupt
817                  * context. And locking is not required as, the other events
818                  * like KICKOFF and STOP does a wait-for-idle before executing
819                  * the resource_control
820                  */
821                 if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
822                         DRM_DEBUG_KMS("id:%d, sw_event:%d,rc:%d-unexpected\n",
823                                       DRMID(drm_enc), sw_event,
824                                       dpu_enc->rc_state);
825                         return -EINVAL;
826                 }
827
828                 /*
829                  * schedule off work item only when there are no
830                  * frames pending
831                  */
832                 if (dpu_crtc_frame_pending(drm_enc->crtc) > 1) {
833                         DRM_DEBUG_KMS("id:%d skip schedule work\n",
834                                       DRMID(drm_enc));
835                         return 0;
836                 }
837
838                 queue_delayed_work(priv->wq, &dpu_enc->delayed_off_work,
839                                    msecs_to_jiffies(dpu_enc->idle_timeout));
840
841                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
842                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
843                                  "frame done");
844                 break;
845
846         case DPU_ENC_RC_EVENT_PRE_STOP:
847                 /* cancel delayed off work, if any */
848                 if (cancel_delayed_work_sync(&dpu_enc->delayed_off_work))
849                         DPU_DEBUG_ENC(dpu_enc, "sw_event:%d, work cancelled\n",
850                                         sw_event);
851
852                 mutex_lock(&dpu_enc->rc_lock);
853
854                 if (is_vid_mode &&
855                           dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
856                         _dpu_encoder_irq_control(drm_enc, true);
857                 }
858                 /* skip if is already OFF or IDLE, resources are off already */
859                 else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF ||
860                                 dpu_enc->rc_state == DPU_ENC_RC_STATE_IDLE) {
861                         DRM_DEBUG_KMS("id:%u, sw_event:%d, rc in %d state\n",
862                                       DRMID(drm_enc), sw_event,
863                                       dpu_enc->rc_state);
864                         mutex_unlock(&dpu_enc->rc_lock);
865                         return 0;
866                 }
867
868                 dpu_enc->rc_state = DPU_ENC_RC_STATE_PRE_OFF;
869
870                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
871                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
872                                  "pre stop");
873
874                 mutex_unlock(&dpu_enc->rc_lock);
875                 break;
876
877         case DPU_ENC_RC_EVENT_STOP:
878                 mutex_lock(&dpu_enc->rc_lock);
879
880                 /* return if the resource control is already in OFF state */
881                 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_OFF) {
882                         DRM_DEBUG_KMS("id: %u, sw_event:%d, rc in OFF state\n",
883                                       DRMID(drm_enc), sw_event);
884                         mutex_unlock(&dpu_enc->rc_lock);
885                         return 0;
886                 } else if (dpu_enc->rc_state == DPU_ENC_RC_STATE_ON) {
887                         DRM_ERROR("id: %u, sw_event:%d, rc in state %d\n",
888                                   DRMID(drm_enc), sw_event, dpu_enc->rc_state);
889                         mutex_unlock(&dpu_enc->rc_lock);
890                         return -EINVAL;
891                 }
892
893                 /**
894                  * expect to arrive here only if in either idle state or pre-off
895                  * and in IDLE state the resources are already disabled
896                  */
897                 if (dpu_enc->rc_state == DPU_ENC_RC_STATE_PRE_OFF)
898                         _dpu_encoder_resource_control_helper(drm_enc, false);
899
900                 dpu_enc->rc_state = DPU_ENC_RC_STATE_OFF;
901
902                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
903                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
904                                  "stop");
905
906                 mutex_unlock(&dpu_enc->rc_lock);
907                 break;
908
909         case DPU_ENC_RC_EVENT_ENTER_IDLE:
910                 mutex_lock(&dpu_enc->rc_lock);
911
912                 if (dpu_enc->rc_state != DPU_ENC_RC_STATE_ON) {
913                         DRM_ERROR("id: %u, sw_event:%d, rc:%d !ON state\n",
914                                   DRMID(drm_enc), sw_event, dpu_enc->rc_state);
915                         mutex_unlock(&dpu_enc->rc_lock);
916                         return 0;
917                 }
918
919                 /*
920                  * if we are in ON but a frame was just kicked off,
921                  * ignore the IDLE event, it's probably a stale timer event
922                  */
923                 if (dpu_enc->frame_busy_mask[0]) {
924                         DRM_ERROR("id:%u, sw_event:%d, rc:%d frame pending\n",
925                                   DRMID(drm_enc), sw_event, dpu_enc->rc_state);
926                         mutex_unlock(&dpu_enc->rc_lock);
927                         return 0;
928                 }
929
930                 if (is_vid_mode)
931                         _dpu_encoder_irq_control(drm_enc, false);
932                 else
933                         _dpu_encoder_resource_control_helper(drm_enc, false);
934
935                 dpu_enc->rc_state = DPU_ENC_RC_STATE_IDLE;
936
937                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
938                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
939                                  "idle");
940
941                 mutex_unlock(&dpu_enc->rc_lock);
942                 break;
943
944         default:
945                 DRM_ERROR("id:%u, unexpected sw_event: %d\n", DRMID(drm_enc),
946                           sw_event);
947                 trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
948                                  dpu_enc->idle_pc_supported, dpu_enc->rc_state,
949                                  "error");
950                 break;
951         }
952
953         trace_dpu_enc_rc(DRMID(drm_enc), sw_event,
954                          dpu_enc->idle_pc_supported, dpu_enc->rc_state,
955                          "end");
956         return 0;
957 }
958
959 static void dpu_encoder_virt_mode_set(struct drm_encoder *drm_enc,
960                                       struct drm_display_mode *mode,
961                                       struct drm_display_mode *adj_mode)
962 {
963         struct dpu_encoder_virt *dpu_enc;
964         struct msm_drm_private *priv;
965         struct dpu_kms *dpu_kms;
966         struct list_head *connector_list;
967         struct drm_connector *conn = NULL, *conn_iter;
968         struct drm_crtc *drm_crtc;
969         struct dpu_crtc_state *cstate;
970         struct dpu_global_state *global_state;
971         struct dpu_hw_blk *hw_pp[MAX_CHANNELS_PER_ENC];
972         struct dpu_hw_blk *hw_ctl[MAX_CHANNELS_PER_ENC];
973         struct dpu_hw_blk *hw_lm[MAX_CHANNELS_PER_ENC];
974         struct dpu_hw_blk *hw_dspp[MAX_CHANNELS_PER_ENC] = { NULL };
975         int num_lm, num_ctl, num_pp;
976         int i, j;
977
978         if (!drm_enc) {
979                 DPU_ERROR("invalid encoder\n");
980                 return;
981         }
982
983         dpu_enc = to_dpu_encoder_virt(drm_enc);
984         DPU_DEBUG_ENC(dpu_enc, "\n");
985
986         priv = drm_enc->dev->dev_private;
987         dpu_kms = to_dpu_kms(priv->kms);
988         connector_list = &dpu_kms->dev->mode_config.connector_list;
989
990         global_state = dpu_kms_get_existing_global_state(dpu_kms);
991         if (IS_ERR_OR_NULL(global_state)) {
992                 DPU_ERROR("Failed to get global state");
993                 return;
994         }
995
996         trace_dpu_enc_mode_set(DRMID(drm_enc));
997
998         if (drm_enc->encoder_type == DRM_MODE_ENCODER_TMDS)
999                 msm_dp_display_mode_set(dpu_enc->dp, drm_enc, mode, adj_mode);
1000
1001         list_for_each_entry(conn_iter, connector_list, head)
1002                 if (conn_iter->encoder == drm_enc)
1003                         conn = conn_iter;
1004
1005         if (!conn) {
1006                 DPU_ERROR_ENC(dpu_enc, "failed to find attached connector\n");
1007                 return;
1008         } else if (!conn->state) {
1009                 DPU_ERROR_ENC(dpu_enc, "invalid connector state\n");
1010                 return;
1011         }
1012
1013         drm_for_each_crtc(drm_crtc, drm_enc->dev)
1014                 if (drm_crtc->state->encoder_mask & drm_encoder_mask(drm_enc))
1015                         break;
1016
1017         /* Query resource that have been reserved in atomic check step. */
1018         num_pp = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1019                 drm_enc->base.id, DPU_HW_BLK_PINGPONG, hw_pp,
1020                 ARRAY_SIZE(hw_pp));
1021         num_ctl = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1022                 drm_enc->base.id, DPU_HW_BLK_CTL, hw_ctl, ARRAY_SIZE(hw_ctl));
1023         num_lm = dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1024                 drm_enc->base.id, DPU_HW_BLK_LM, hw_lm, ARRAY_SIZE(hw_lm));
1025         dpu_rm_get_assigned_resources(&dpu_kms->rm, global_state,
1026                 drm_enc->base.id, DPU_HW_BLK_DSPP, hw_dspp,
1027                 ARRAY_SIZE(hw_dspp));
1028
1029         for (i = 0; i < MAX_CHANNELS_PER_ENC; i++)
1030                 dpu_enc->hw_pp[i] = i < num_pp ? to_dpu_hw_pingpong(hw_pp[i])
1031                                                 : NULL;
1032
1033         cstate = to_dpu_crtc_state(drm_crtc->state);
1034
1035         for (i = 0; i < num_lm; i++) {
1036                 int ctl_idx = (i < num_ctl) ? i : (num_ctl-1);
1037
1038                 cstate->mixers[i].hw_lm = to_dpu_hw_mixer(hw_lm[i]);
1039                 cstate->mixers[i].lm_ctl = to_dpu_hw_ctl(hw_ctl[ctl_idx]);
1040                 cstate->mixers[i].hw_dspp = to_dpu_hw_dspp(hw_dspp[i]);
1041         }
1042
1043         cstate->num_mixers = num_lm;
1044
1045         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1046                 int num_blk;
1047                 struct dpu_hw_blk *hw_blk[MAX_CHANNELS_PER_ENC];
1048                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1049
1050                 if (!dpu_enc->hw_pp[i]) {
1051                         DPU_ERROR_ENC(dpu_enc,
1052                                 "no pp block assigned at idx: %d\n", i);
1053                         return;
1054                 }
1055
1056                 if (!hw_ctl[i]) {
1057                         DPU_ERROR_ENC(dpu_enc,
1058                                 "no ctl block assigned at idx: %d\n", i);
1059                         return;
1060                 }
1061
1062                 phys->hw_pp = dpu_enc->hw_pp[i];
1063                 phys->hw_ctl = to_dpu_hw_ctl(hw_ctl[i]);
1064
1065                 num_blk = dpu_rm_get_assigned_resources(&dpu_kms->rm,
1066                         global_state, drm_enc->base.id, DPU_HW_BLK_INTF,
1067                         hw_blk, ARRAY_SIZE(hw_blk));
1068                 for (j = 0; j < num_blk; j++) {
1069                         struct dpu_hw_intf *hw_intf;
1070
1071                         hw_intf = to_dpu_hw_intf(hw_blk[i]);
1072                         if (hw_intf->idx == phys->intf_idx)
1073                                 phys->hw_intf = hw_intf;
1074                 }
1075
1076                 if (!phys->hw_intf) {
1077                         DPU_ERROR_ENC(dpu_enc,
1078                                       "no intf block assigned at idx: %d\n", i);
1079                         return;
1080                 }
1081
1082                 phys->connector = conn->state->connector;
1083                 if (phys->ops.mode_set)
1084                         phys->ops.mode_set(phys, mode, adj_mode);
1085         }
1086 }
1087
1088 static void _dpu_encoder_virt_enable_helper(struct drm_encoder *drm_enc)
1089 {
1090         struct dpu_encoder_virt *dpu_enc = NULL;
1091         int i;
1092
1093         if (!drm_enc || !drm_enc->dev) {
1094                 DPU_ERROR("invalid parameters\n");
1095                 return;
1096         }
1097
1098         dpu_enc = to_dpu_encoder_virt(drm_enc);
1099         if (!dpu_enc || !dpu_enc->cur_master) {
1100                 DPU_ERROR("invalid dpu encoder/master\n");
1101                 return;
1102         }
1103
1104
1105         if (dpu_enc->disp_info.intf_type == DRM_MODE_CONNECTOR_DisplayPort &&
1106                 dpu_enc->cur_master->hw_mdptop &&
1107                 dpu_enc->cur_master->hw_mdptop->ops.intf_audio_select)
1108                 dpu_enc->cur_master->hw_mdptop->ops.intf_audio_select(
1109                         dpu_enc->cur_master->hw_mdptop);
1110
1111         _dpu_encoder_update_vsync_source(dpu_enc, &dpu_enc->disp_info);
1112
1113         if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_DSI &&
1114                         !WARN_ON(dpu_enc->num_phys_encs == 0)) {
1115                 unsigned bpc = dpu_enc->phys_encs[0]->connector->display_info.bpc;
1116                 for (i = 0; i < MAX_CHANNELS_PER_ENC; i++) {
1117                         if (!dpu_enc->hw_pp[i])
1118                                 continue;
1119                         _dpu_encoder_setup_dither(dpu_enc->hw_pp[i], bpc);
1120                 }
1121         }
1122 }
1123
1124 void dpu_encoder_virt_runtime_resume(struct drm_encoder *drm_enc)
1125 {
1126         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1127
1128         mutex_lock(&dpu_enc->enc_lock);
1129
1130         if (!dpu_enc->enabled)
1131                 goto out;
1132
1133         if (dpu_enc->cur_slave && dpu_enc->cur_slave->ops.restore)
1134                 dpu_enc->cur_slave->ops.restore(dpu_enc->cur_slave);
1135         if (dpu_enc->cur_master && dpu_enc->cur_master->ops.restore)
1136                 dpu_enc->cur_master->ops.restore(dpu_enc->cur_master);
1137
1138         _dpu_encoder_virt_enable_helper(drm_enc);
1139
1140 out:
1141         mutex_unlock(&dpu_enc->enc_lock);
1142 }
1143
1144 static void dpu_encoder_virt_enable(struct drm_encoder *drm_enc)
1145 {
1146         struct dpu_encoder_virt *dpu_enc = NULL;
1147         int ret = 0;
1148         struct msm_drm_private *priv;
1149         struct drm_display_mode *cur_mode = NULL;
1150
1151         if (!drm_enc) {
1152                 DPU_ERROR("invalid encoder\n");
1153                 return;
1154         }
1155         dpu_enc = to_dpu_encoder_virt(drm_enc);
1156
1157         mutex_lock(&dpu_enc->enc_lock);
1158         cur_mode = &dpu_enc->base.crtc->state->adjusted_mode;
1159         priv = drm_enc->dev->dev_private;
1160
1161         trace_dpu_enc_enable(DRMID(drm_enc), cur_mode->hdisplay,
1162                              cur_mode->vdisplay);
1163
1164         /* always enable slave encoder before master */
1165         if (dpu_enc->cur_slave && dpu_enc->cur_slave->ops.enable)
1166                 dpu_enc->cur_slave->ops.enable(dpu_enc->cur_slave);
1167
1168         if (dpu_enc->cur_master && dpu_enc->cur_master->ops.enable)
1169                 dpu_enc->cur_master->ops.enable(dpu_enc->cur_master);
1170
1171         ret = dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1172         if (ret) {
1173                 DPU_ERROR_ENC(dpu_enc, "dpu resource control failed: %d\n",
1174                                 ret);
1175                 goto out;
1176         }
1177
1178         _dpu_encoder_virt_enable_helper(drm_enc);
1179
1180         if (drm_enc->encoder_type == DRM_MODE_ENCODER_TMDS) {
1181                 ret = msm_dp_display_enable(dpu_enc->dp, drm_enc);
1182                 if (ret) {
1183                         DPU_ERROR_ENC(dpu_enc, "dp display enable failed: %d\n",
1184                                 ret);
1185                         goto out;
1186                 }
1187         }
1188         dpu_enc->enabled = true;
1189
1190 out:
1191         mutex_unlock(&dpu_enc->enc_lock);
1192 }
1193
1194 static void dpu_encoder_virt_disable(struct drm_encoder *drm_enc)
1195 {
1196         struct dpu_encoder_virt *dpu_enc = NULL;
1197         struct msm_drm_private *priv;
1198         int i = 0;
1199
1200         if (!drm_enc) {
1201                 DPU_ERROR("invalid encoder\n");
1202                 return;
1203         } else if (!drm_enc->dev) {
1204                 DPU_ERROR("invalid dev\n");
1205                 return;
1206         }
1207
1208         dpu_enc = to_dpu_encoder_virt(drm_enc);
1209         DPU_DEBUG_ENC(dpu_enc, "\n");
1210
1211         mutex_lock(&dpu_enc->enc_lock);
1212         dpu_enc->enabled = false;
1213
1214         priv = drm_enc->dev->dev_private;
1215
1216         trace_dpu_enc_disable(DRMID(drm_enc));
1217
1218         /* wait for idle */
1219         dpu_encoder_wait_for_event(drm_enc, MSM_ENC_TX_COMPLETE);
1220
1221         if (drm_enc->encoder_type == DRM_MODE_ENCODER_TMDS) {
1222                 if (msm_dp_display_pre_disable(dpu_enc->dp, drm_enc))
1223                         DPU_ERROR_ENC(dpu_enc, "dp display push idle failed\n");
1224         }
1225
1226         dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_PRE_STOP);
1227
1228         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1229                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1230
1231                 if (phys->ops.disable)
1232                         phys->ops.disable(phys);
1233         }
1234
1235
1236         /* after phys waits for frame-done, should be no more frames pending */
1237         if (atomic_xchg(&dpu_enc->frame_done_timeout_ms, 0)) {
1238                 DPU_ERROR("enc%d timeout pending\n", drm_enc->base.id);
1239                 del_timer_sync(&dpu_enc->frame_done_timer);
1240         }
1241
1242         dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_STOP);
1243
1244         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1245                 dpu_enc->phys_encs[i]->connector = NULL;
1246         }
1247
1248         DPU_DEBUG_ENC(dpu_enc, "encoder disabled\n");
1249
1250         if (drm_enc->encoder_type == DRM_MODE_ENCODER_TMDS) {
1251                 if (msm_dp_display_disable(dpu_enc->dp, drm_enc))
1252                         DPU_ERROR_ENC(dpu_enc, "dp display disable failed\n");
1253         }
1254
1255         mutex_unlock(&dpu_enc->enc_lock);
1256 }
1257
1258 static enum dpu_intf dpu_encoder_get_intf(struct dpu_mdss_cfg *catalog,
1259                 enum dpu_intf_type type, u32 controller_id)
1260 {
1261         int i = 0;
1262
1263         for (i = 0; i < catalog->intf_count; i++) {
1264                 if (catalog->intf[i].type == type
1265                     && catalog->intf[i].controller_id == controller_id) {
1266                         return catalog->intf[i].id;
1267                 }
1268         }
1269
1270         return INTF_MAX;
1271 }
1272
1273 static void dpu_encoder_vblank_callback(struct drm_encoder *drm_enc,
1274                 struct dpu_encoder_phys *phy_enc)
1275 {
1276         struct dpu_encoder_virt *dpu_enc = NULL;
1277         unsigned long lock_flags;
1278
1279         if (!drm_enc || !phy_enc)
1280                 return;
1281
1282         DPU_ATRACE_BEGIN("encoder_vblank_callback");
1283         dpu_enc = to_dpu_encoder_virt(drm_enc);
1284
1285         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1286         if (dpu_enc->crtc)
1287                 dpu_crtc_vblank_callback(dpu_enc->crtc);
1288         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1289
1290         atomic_inc(&phy_enc->vsync_cnt);
1291         DPU_ATRACE_END("encoder_vblank_callback");
1292 }
1293
1294 static void dpu_encoder_underrun_callback(struct drm_encoder *drm_enc,
1295                 struct dpu_encoder_phys *phy_enc)
1296 {
1297         if (!phy_enc)
1298                 return;
1299
1300         DPU_ATRACE_BEGIN("encoder_underrun_callback");
1301         atomic_inc(&phy_enc->underrun_cnt);
1302
1303         /* trigger dump only on the first underrun */
1304         if (atomic_read(&phy_enc->underrun_cnt) == 1)
1305                 msm_disp_snapshot_state(drm_enc->dev);
1306
1307         trace_dpu_enc_underrun_cb(DRMID(drm_enc),
1308                                   atomic_read(&phy_enc->underrun_cnt));
1309         DPU_ATRACE_END("encoder_underrun_callback");
1310 }
1311
1312 void dpu_encoder_assign_crtc(struct drm_encoder *drm_enc, struct drm_crtc *crtc)
1313 {
1314         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1315         unsigned long lock_flags;
1316
1317         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1318         /* crtc should always be cleared before re-assigning */
1319         WARN_ON(crtc && dpu_enc->crtc);
1320         dpu_enc->crtc = crtc;
1321         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1322 }
1323
1324 void dpu_encoder_toggle_vblank_for_crtc(struct drm_encoder *drm_enc,
1325                                         struct drm_crtc *crtc, bool enable)
1326 {
1327         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1328         unsigned long lock_flags;
1329         int i;
1330
1331         trace_dpu_enc_vblank_cb(DRMID(drm_enc), enable);
1332
1333         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1334         if (dpu_enc->crtc != crtc) {
1335                 spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1336                 return;
1337         }
1338         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1339
1340         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1341                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1342
1343                 if (phys->ops.control_vblank_irq)
1344                         phys->ops.control_vblank_irq(phys, enable);
1345         }
1346 }
1347
1348 void dpu_encoder_register_frame_event_callback(struct drm_encoder *drm_enc,
1349                 void (*frame_event_cb)(void *, u32 event),
1350                 void *frame_event_cb_data)
1351 {
1352         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1353         unsigned long lock_flags;
1354         bool enable;
1355
1356         enable = frame_event_cb ? true : false;
1357
1358         if (!drm_enc) {
1359                 DPU_ERROR("invalid encoder\n");
1360                 return;
1361         }
1362         trace_dpu_enc_frame_event_cb(DRMID(drm_enc), enable);
1363
1364         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1365         dpu_enc->crtc_frame_event_cb = frame_event_cb;
1366         dpu_enc->crtc_frame_event_cb_data = frame_event_cb_data;
1367         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1368 }
1369
1370 static void dpu_encoder_frame_done_callback(
1371                 struct drm_encoder *drm_enc,
1372                 struct dpu_encoder_phys *ready_phys, u32 event)
1373 {
1374         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1375         unsigned int i;
1376
1377         if (event & (DPU_ENCODER_FRAME_EVENT_DONE
1378                         | DPU_ENCODER_FRAME_EVENT_ERROR
1379                         | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
1380
1381                 if (!dpu_enc->frame_busy_mask[0]) {
1382                         /**
1383                          * suppress frame_done without waiter,
1384                          * likely autorefresh
1385                          */
1386                         trace_dpu_enc_frame_done_cb_not_busy(DRMID(drm_enc),
1387                                         event, ready_phys->intf_idx);
1388                         return;
1389                 }
1390
1391                 /* One of the physical encoders has become idle */
1392                 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1393                         if (dpu_enc->phys_encs[i] == ready_phys) {
1394                                 trace_dpu_enc_frame_done_cb(DRMID(drm_enc), i,
1395                                                 dpu_enc->frame_busy_mask[0]);
1396                                 clear_bit(i, dpu_enc->frame_busy_mask);
1397                         }
1398                 }
1399
1400                 if (!dpu_enc->frame_busy_mask[0]) {
1401                         atomic_set(&dpu_enc->frame_done_timeout_ms, 0);
1402                         del_timer(&dpu_enc->frame_done_timer);
1403
1404                         dpu_encoder_resource_control(drm_enc,
1405                                         DPU_ENC_RC_EVENT_FRAME_DONE);
1406
1407                         if (dpu_enc->crtc_frame_event_cb)
1408                                 dpu_enc->crtc_frame_event_cb(
1409                                         dpu_enc->crtc_frame_event_cb_data,
1410                                         event);
1411                 }
1412         } else {
1413                 if (dpu_enc->crtc_frame_event_cb)
1414                         dpu_enc->crtc_frame_event_cb(
1415                                 dpu_enc->crtc_frame_event_cb_data, event);
1416         }
1417 }
1418
1419 static void dpu_encoder_off_work(struct work_struct *work)
1420 {
1421         struct dpu_encoder_virt *dpu_enc = container_of(work,
1422                         struct dpu_encoder_virt, delayed_off_work.work);
1423
1424         dpu_encoder_resource_control(&dpu_enc->base,
1425                                                 DPU_ENC_RC_EVENT_ENTER_IDLE);
1426
1427         dpu_encoder_frame_done_callback(&dpu_enc->base, NULL,
1428                                 DPU_ENCODER_FRAME_EVENT_IDLE);
1429 }
1430
1431 /**
1432  * _dpu_encoder_trigger_flush - trigger flush for a physical encoder
1433  * @drm_enc: Pointer to drm encoder structure
1434  * @phys: Pointer to physical encoder structure
1435  * @extra_flush_bits: Additional bit mask to include in flush trigger
1436  */
1437 static void _dpu_encoder_trigger_flush(struct drm_encoder *drm_enc,
1438                 struct dpu_encoder_phys *phys, uint32_t extra_flush_bits)
1439 {
1440         struct dpu_hw_ctl *ctl;
1441         int pending_kickoff_cnt;
1442         u32 ret = UINT_MAX;
1443
1444         if (!phys->hw_pp) {
1445                 DPU_ERROR("invalid pingpong hw\n");
1446                 return;
1447         }
1448
1449         ctl = phys->hw_ctl;
1450         if (!ctl->ops.trigger_flush) {
1451                 DPU_ERROR("missing trigger cb\n");
1452                 return;
1453         }
1454
1455         pending_kickoff_cnt = dpu_encoder_phys_inc_pending(phys);
1456
1457         if (extra_flush_bits && ctl->ops.update_pending_flush)
1458                 ctl->ops.update_pending_flush(ctl, extra_flush_bits);
1459
1460         ctl->ops.trigger_flush(ctl);
1461
1462         if (ctl->ops.get_pending_flush)
1463                 ret = ctl->ops.get_pending_flush(ctl);
1464
1465         trace_dpu_enc_trigger_flush(DRMID(drm_enc), phys->intf_idx,
1466                                     pending_kickoff_cnt, ctl->idx,
1467                                     extra_flush_bits, ret);
1468 }
1469
1470 /**
1471  * _dpu_encoder_trigger_start - trigger start for a physical encoder
1472  * @phys: Pointer to physical encoder structure
1473  */
1474 static void _dpu_encoder_trigger_start(struct dpu_encoder_phys *phys)
1475 {
1476         if (!phys) {
1477                 DPU_ERROR("invalid argument(s)\n");
1478                 return;
1479         }
1480
1481         if (!phys->hw_pp) {
1482                 DPU_ERROR("invalid pingpong hw\n");
1483                 return;
1484         }
1485
1486         if (phys->ops.trigger_start && phys->enable_state != DPU_ENC_DISABLED)
1487                 phys->ops.trigger_start(phys);
1488 }
1489
1490 void dpu_encoder_helper_trigger_start(struct dpu_encoder_phys *phys_enc)
1491 {
1492         struct dpu_hw_ctl *ctl;
1493
1494         ctl = phys_enc->hw_ctl;
1495         if (ctl->ops.trigger_start) {
1496                 ctl->ops.trigger_start(ctl);
1497                 trace_dpu_enc_trigger_start(DRMID(phys_enc->parent), ctl->idx);
1498         }
1499 }
1500
1501 static int dpu_encoder_helper_wait_event_timeout(
1502                 int32_t drm_id,
1503                 u32 irq_idx,
1504                 struct dpu_encoder_wait_info *info)
1505 {
1506         int rc = 0;
1507         s64 expected_time = ktime_to_ms(ktime_get()) + info->timeout_ms;
1508         s64 jiffies = msecs_to_jiffies(info->timeout_ms);
1509         s64 time;
1510
1511         do {
1512                 rc = wait_event_timeout(*(info->wq),
1513                                 atomic_read(info->atomic_cnt) == 0, jiffies);
1514                 time = ktime_to_ms(ktime_get());
1515
1516                 trace_dpu_enc_wait_event_timeout(drm_id, irq_idx, rc, time,
1517                                                  expected_time,
1518                                                  atomic_read(info->atomic_cnt));
1519         /* If we timed out, counter is valid and time is less, wait again */
1520         } while (atomic_read(info->atomic_cnt) && (rc == 0) &&
1521                         (time < expected_time));
1522
1523         return rc;
1524 }
1525
1526 static void dpu_encoder_helper_hw_reset(struct dpu_encoder_phys *phys_enc)
1527 {
1528         struct dpu_encoder_virt *dpu_enc;
1529         struct dpu_hw_ctl *ctl;
1530         int rc;
1531         struct drm_encoder *drm_enc;
1532
1533         dpu_enc = to_dpu_encoder_virt(phys_enc->parent);
1534         ctl = phys_enc->hw_ctl;
1535         drm_enc = phys_enc->parent;
1536
1537         if (!ctl->ops.reset)
1538                 return;
1539
1540         DRM_DEBUG_KMS("id:%u ctl %d reset\n", DRMID(drm_enc),
1541                       ctl->idx);
1542
1543         rc = ctl->ops.reset(ctl);
1544         if (rc) {
1545                 DPU_ERROR_ENC(dpu_enc, "ctl %d reset failure\n",  ctl->idx);
1546                 msm_disp_snapshot_state(drm_enc->dev);
1547         }
1548
1549         phys_enc->enable_state = DPU_ENC_ENABLED;
1550 }
1551
1552 /**
1553  * _dpu_encoder_kickoff_phys - handle physical encoder kickoff
1554  *      Iterate through the physical encoders and perform consolidated flush
1555  *      and/or control start triggering as needed. This is done in the virtual
1556  *      encoder rather than the individual physical ones in order to handle
1557  *      use cases that require visibility into multiple physical encoders at
1558  *      a time.
1559  * @dpu_enc: Pointer to virtual encoder structure
1560  */
1561 static void _dpu_encoder_kickoff_phys(struct dpu_encoder_virt *dpu_enc)
1562 {
1563         struct dpu_hw_ctl *ctl;
1564         uint32_t i, pending_flush;
1565         unsigned long lock_flags;
1566
1567         pending_flush = 0x0;
1568
1569         /* update pending counts and trigger kickoff ctl flush atomically */
1570         spin_lock_irqsave(&dpu_enc->enc_spinlock, lock_flags);
1571
1572         /* don't perform flush/start operations for slave encoders */
1573         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1574                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1575
1576                 if (phys->enable_state == DPU_ENC_DISABLED)
1577                         continue;
1578
1579                 ctl = phys->hw_ctl;
1580
1581                 /*
1582                  * This is cleared in frame_done worker, which isn't invoked
1583                  * for async commits. So don't set this for async, since it'll
1584                  * roll over to the next commit.
1585                  */
1586                 if (phys->split_role != ENC_ROLE_SLAVE)
1587                         set_bit(i, dpu_enc->frame_busy_mask);
1588
1589                 if (!phys->ops.needs_single_flush ||
1590                                 !phys->ops.needs_single_flush(phys))
1591                         _dpu_encoder_trigger_flush(&dpu_enc->base, phys, 0x0);
1592                 else if (ctl->ops.get_pending_flush)
1593                         pending_flush |= ctl->ops.get_pending_flush(ctl);
1594         }
1595
1596         /* for split flush, combine pending flush masks and send to master */
1597         if (pending_flush && dpu_enc->cur_master) {
1598                 _dpu_encoder_trigger_flush(
1599                                 &dpu_enc->base,
1600                                 dpu_enc->cur_master,
1601                                 pending_flush);
1602         }
1603
1604         _dpu_encoder_trigger_start(dpu_enc->cur_master);
1605
1606         spin_unlock_irqrestore(&dpu_enc->enc_spinlock, lock_flags);
1607 }
1608
1609 void dpu_encoder_trigger_kickoff_pending(struct drm_encoder *drm_enc)
1610 {
1611         struct dpu_encoder_virt *dpu_enc;
1612         struct dpu_encoder_phys *phys;
1613         unsigned int i;
1614         struct dpu_hw_ctl *ctl;
1615         struct msm_display_info *disp_info;
1616
1617         if (!drm_enc) {
1618                 DPU_ERROR("invalid encoder\n");
1619                 return;
1620         }
1621         dpu_enc = to_dpu_encoder_virt(drm_enc);
1622         disp_info = &dpu_enc->disp_info;
1623
1624         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1625                 phys = dpu_enc->phys_encs[i];
1626
1627                 ctl = phys->hw_ctl;
1628                 if (ctl->ops.clear_pending_flush)
1629                         ctl->ops.clear_pending_flush(ctl);
1630
1631                 /* update only for command mode primary ctl */
1632                 if ((phys == dpu_enc->cur_master) &&
1633                    (disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE)
1634                     && ctl->ops.trigger_pending)
1635                         ctl->ops.trigger_pending(ctl);
1636         }
1637 }
1638
1639 static u32 _dpu_encoder_calculate_linetime(struct dpu_encoder_virt *dpu_enc,
1640                 struct drm_display_mode *mode)
1641 {
1642         u64 pclk_rate;
1643         u32 pclk_period;
1644         u32 line_time;
1645
1646         /*
1647          * For linetime calculation, only operate on master encoder.
1648          */
1649         if (!dpu_enc->cur_master)
1650                 return 0;
1651
1652         if (!dpu_enc->cur_master->ops.get_line_count) {
1653                 DPU_ERROR("get_line_count function not defined\n");
1654                 return 0;
1655         }
1656
1657         pclk_rate = mode->clock; /* pixel clock in kHz */
1658         if (pclk_rate == 0) {
1659                 DPU_ERROR("pclk is 0, cannot calculate line time\n");
1660                 return 0;
1661         }
1662
1663         pclk_period = DIV_ROUND_UP_ULL(1000000000ull, pclk_rate);
1664         if (pclk_period == 0) {
1665                 DPU_ERROR("pclk period is 0\n");
1666                 return 0;
1667         }
1668
1669         /*
1670          * Line time calculation based on Pixel clock and HTOTAL.
1671          * Final unit is in ns.
1672          */
1673         line_time = (pclk_period * mode->htotal) / 1000;
1674         if (line_time == 0) {
1675                 DPU_ERROR("line time calculation is 0\n");
1676                 return 0;
1677         }
1678
1679         DPU_DEBUG_ENC(dpu_enc,
1680                         "clk_rate=%lldkHz, clk_period=%d, linetime=%dns\n",
1681                         pclk_rate, pclk_period, line_time);
1682
1683         return line_time;
1684 }
1685
1686 int dpu_encoder_vsync_time(struct drm_encoder *drm_enc, ktime_t *wakeup_time)
1687 {
1688         struct drm_display_mode *mode;
1689         struct dpu_encoder_virt *dpu_enc;
1690         u32 cur_line;
1691         u32 line_time;
1692         u32 vtotal, time_to_vsync;
1693         ktime_t cur_time;
1694
1695         dpu_enc = to_dpu_encoder_virt(drm_enc);
1696
1697         if (!drm_enc->crtc || !drm_enc->crtc->state) {
1698                 DPU_ERROR("crtc/crtc state object is NULL\n");
1699                 return -EINVAL;
1700         }
1701         mode = &drm_enc->crtc->state->adjusted_mode;
1702
1703         line_time = _dpu_encoder_calculate_linetime(dpu_enc, mode);
1704         if (!line_time)
1705                 return -EINVAL;
1706
1707         cur_line = dpu_enc->cur_master->ops.get_line_count(dpu_enc->cur_master);
1708
1709         vtotal = mode->vtotal;
1710         if (cur_line >= vtotal)
1711                 time_to_vsync = line_time * vtotal;
1712         else
1713                 time_to_vsync = line_time * (vtotal - cur_line);
1714
1715         if (time_to_vsync == 0) {
1716                 DPU_ERROR("time to vsync should not be zero, vtotal=%d\n",
1717                                 vtotal);
1718                 return -EINVAL;
1719         }
1720
1721         cur_time = ktime_get();
1722         *wakeup_time = ktime_add_ns(cur_time, time_to_vsync);
1723
1724         DPU_DEBUG_ENC(dpu_enc,
1725                         "cur_line=%u vtotal=%u time_to_vsync=%u, cur_time=%lld, wakeup_time=%lld\n",
1726                         cur_line, vtotal, time_to_vsync,
1727                         ktime_to_ms(cur_time),
1728                         ktime_to_ms(*wakeup_time));
1729         return 0;
1730 }
1731
1732 static void dpu_encoder_vsync_event_handler(struct timer_list *t)
1733 {
1734         struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
1735                         vsync_event_timer);
1736         struct drm_encoder *drm_enc = &dpu_enc->base;
1737         struct msm_drm_private *priv;
1738         struct msm_drm_thread *event_thread;
1739
1740         if (!drm_enc->dev || !drm_enc->crtc) {
1741                 DPU_ERROR("invalid parameters\n");
1742                 return;
1743         }
1744
1745         priv = drm_enc->dev->dev_private;
1746
1747         if (drm_enc->crtc->index >= ARRAY_SIZE(priv->event_thread)) {
1748                 DPU_ERROR("invalid crtc index\n");
1749                 return;
1750         }
1751         event_thread = &priv->event_thread[drm_enc->crtc->index];
1752         if (!event_thread) {
1753                 DPU_ERROR("event_thread not found for crtc:%d\n",
1754                                 drm_enc->crtc->index);
1755                 return;
1756         }
1757
1758         del_timer(&dpu_enc->vsync_event_timer);
1759 }
1760
1761 static void dpu_encoder_vsync_event_work_handler(struct kthread_work *work)
1762 {
1763         struct dpu_encoder_virt *dpu_enc = container_of(work,
1764                         struct dpu_encoder_virt, vsync_event_work);
1765         ktime_t wakeup_time;
1766
1767         if (dpu_encoder_vsync_time(&dpu_enc->base, &wakeup_time))
1768                 return;
1769
1770         trace_dpu_enc_vsync_event_work(DRMID(&dpu_enc->base), wakeup_time);
1771         mod_timer(&dpu_enc->vsync_event_timer,
1772                         nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1773 }
1774
1775 void dpu_encoder_prepare_for_kickoff(struct drm_encoder *drm_enc)
1776 {
1777         struct dpu_encoder_virt *dpu_enc;
1778         struct dpu_encoder_phys *phys;
1779         bool needs_hw_reset = false;
1780         unsigned int i;
1781
1782         dpu_enc = to_dpu_encoder_virt(drm_enc);
1783
1784         trace_dpu_enc_prepare_kickoff(DRMID(drm_enc));
1785
1786         /* prepare for next kickoff, may include waiting on previous kickoff */
1787         DPU_ATRACE_BEGIN("enc_prepare_for_kickoff");
1788         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1789                 phys = dpu_enc->phys_encs[i];
1790                 if (phys->ops.prepare_for_kickoff)
1791                         phys->ops.prepare_for_kickoff(phys);
1792                 if (phys->enable_state == DPU_ENC_ERR_NEEDS_HW_RESET)
1793                         needs_hw_reset = true;
1794         }
1795         DPU_ATRACE_END("enc_prepare_for_kickoff");
1796
1797         dpu_encoder_resource_control(drm_enc, DPU_ENC_RC_EVENT_KICKOFF);
1798
1799         /* if any phys needs reset, reset all phys, in-order */
1800         if (needs_hw_reset) {
1801                 trace_dpu_enc_prepare_kickoff_reset(DRMID(drm_enc));
1802                 for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1803                         dpu_encoder_helper_hw_reset(dpu_enc->phys_encs[i]);
1804                 }
1805         }
1806 }
1807
1808 void dpu_encoder_kickoff(struct drm_encoder *drm_enc)
1809 {
1810         struct dpu_encoder_virt *dpu_enc;
1811         struct dpu_encoder_phys *phys;
1812         ktime_t wakeup_time;
1813         unsigned long timeout_ms;
1814         unsigned int i;
1815
1816         DPU_ATRACE_BEGIN("encoder_kickoff");
1817         dpu_enc = to_dpu_encoder_virt(drm_enc);
1818
1819         trace_dpu_enc_kickoff(DRMID(drm_enc));
1820
1821         timeout_ms = DPU_ENCODER_FRAME_DONE_TIMEOUT_FRAMES * 1000 /
1822                         drm_mode_vrefresh(&drm_enc->crtc->state->adjusted_mode);
1823
1824         atomic_set(&dpu_enc->frame_done_timeout_ms, timeout_ms);
1825         mod_timer(&dpu_enc->frame_done_timer,
1826                         jiffies + msecs_to_jiffies(timeout_ms));
1827
1828         /* All phys encs are ready to go, trigger the kickoff */
1829         _dpu_encoder_kickoff_phys(dpu_enc);
1830
1831         /* allow phys encs to handle any post-kickoff business */
1832         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1833                 phys = dpu_enc->phys_encs[i];
1834                 if (phys->ops.handle_post_kickoff)
1835                         phys->ops.handle_post_kickoff(phys);
1836         }
1837
1838         if (dpu_enc->disp_info.intf_type == DRM_MODE_ENCODER_DSI &&
1839                         !dpu_encoder_vsync_time(drm_enc, &wakeup_time)) {
1840                 trace_dpu_enc_early_kickoff(DRMID(drm_enc),
1841                                             ktime_to_ms(wakeup_time));
1842                 mod_timer(&dpu_enc->vsync_event_timer,
1843                                 nsecs_to_jiffies(ktime_to_ns(wakeup_time)));
1844         }
1845
1846         DPU_ATRACE_END("encoder_kickoff");
1847 }
1848
1849 void dpu_encoder_prepare_commit(struct drm_encoder *drm_enc)
1850 {
1851         struct dpu_encoder_virt *dpu_enc;
1852         struct dpu_encoder_phys *phys;
1853         int i;
1854
1855         if (!drm_enc) {
1856                 DPU_ERROR("invalid encoder\n");
1857                 return;
1858         }
1859         dpu_enc = to_dpu_encoder_virt(drm_enc);
1860
1861         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1862                 phys = dpu_enc->phys_encs[i];
1863                 if (phys->ops.prepare_commit)
1864                         phys->ops.prepare_commit(phys);
1865         }
1866 }
1867
1868 #ifdef CONFIG_DEBUG_FS
1869 static int _dpu_encoder_status_show(struct seq_file *s, void *data)
1870 {
1871         struct dpu_encoder_virt *dpu_enc = s->private;
1872         int i;
1873
1874         mutex_lock(&dpu_enc->enc_lock);
1875         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
1876                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
1877
1878                 seq_printf(s, "intf:%d    vsync:%8d     underrun:%8d    ",
1879                                 phys->intf_idx - INTF_0,
1880                                 atomic_read(&phys->vsync_cnt),
1881                                 atomic_read(&phys->underrun_cnt));
1882
1883                 switch (phys->intf_mode) {
1884                 case INTF_MODE_VIDEO:
1885                         seq_puts(s, "mode: video\n");
1886                         break;
1887                 case INTF_MODE_CMD:
1888                         seq_puts(s, "mode: command\n");
1889                         break;
1890                 default:
1891                         seq_puts(s, "mode: ???\n");
1892                         break;
1893                 }
1894         }
1895         mutex_unlock(&dpu_enc->enc_lock);
1896
1897         return 0;
1898 }
1899
1900 DEFINE_SHOW_ATTRIBUTE(_dpu_encoder_status);
1901
1902 static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
1903 {
1904         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(drm_enc);
1905         int i;
1906
1907         char name[DPU_NAME_SIZE];
1908
1909         if (!drm_enc->dev) {
1910                 DPU_ERROR("invalid encoder or kms\n");
1911                 return -EINVAL;
1912         }
1913
1914         snprintf(name, DPU_NAME_SIZE, "encoder%u", drm_enc->base.id);
1915
1916         /* create overall sub-directory for the encoder */
1917         dpu_enc->debugfs_root = debugfs_create_dir(name,
1918                         drm_enc->dev->primary->debugfs_root);
1919
1920         /* don't error check these */
1921         debugfs_create_file("status", 0600,
1922                 dpu_enc->debugfs_root, dpu_enc, &_dpu_encoder_status_fops);
1923
1924         for (i = 0; i < dpu_enc->num_phys_encs; i++)
1925                 if (dpu_enc->phys_encs[i]->ops.late_register)
1926                         dpu_enc->phys_encs[i]->ops.late_register(
1927                                         dpu_enc->phys_encs[i],
1928                                         dpu_enc->debugfs_root);
1929
1930         return 0;
1931 }
1932 #else
1933 static int _dpu_encoder_init_debugfs(struct drm_encoder *drm_enc)
1934 {
1935         return 0;
1936 }
1937 #endif
1938
1939 static int dpu_encoder_late_register(struct drm_encoder *encoder)
1940 {
1941         return _dpu_encoder_init_debugfs(encoder);
1942 }
1943
1944 static void dpu_encoder_early_unregister(struct drm_encoder *encoder)
1945 {
1946         struct dpu_encoder_virt *dpu_enc = to_dpu_encoder_virt(encoder);
1947
1948         debugfs_remove_recursive(dpu_enc->debugfs_root);
1949 }
1950
1951 static int dpu_encoder_virt_add_phys_encs(
1952                 u32 display_caps,
1953                 struct dpu_encoder_virt *dpu_enc,
1954                 struct dpu_enc_phys_init_params *params)
1955 {
1956         struct dpu_encoder_phys *enc = NULL;
1957
1958         DPU_DEBUG_ENC(dpu_enc, "\n");
1959
1960         /*
1961          * We may create up to NUM_PHYS_ENCODER_TYPES physical encoder types
1962          * in this function, check up-front.
1963          */
1964         if (dpu_enc->num_phys_encs + NUM_PHYS_ENCODER_TYPES >=
1965                         ARRAY_SIZE(dpu_enc->phys_encs)) {
1966                 DPU_ERROR_ENC(dpu_enc, "too many physical encoders %d\n",
1967                           dpu_enc->num_phys_encs);
1968                 return -EINVAL;
1969         }
1970
1971         if (display_caps & MSM_DISPLAY_CAP_VID_MODE) {
1972                 enc = dpu_encoder_phys_vid_init(params);
1973
1974                 if (IS_ERR_OR_NULL(enc)) {
1975                         DPU_ERROR_ENC(dpu_enc, "failed to init vid enc: %ld\n",
1976                                 PTR_ERR(enc));
1977                         return enc == NULL ? -EINVAL : PTR_ERR(enc);
1978                 }
1979
1980                 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
1981                 ++dpu_enc->num_phys_encs;
1982         }
1983
1984         if (display_caps & MSM_DISPLAY_CAP_CMD_MODE) {
1985                 enc = dpu_encoder_phys_cmd_init(params);
1986
1987                 if (IS_ERR_OR_NULL(enc)) {
1988                         DPU_ERROR_ENC(dpu_enc, "failed to init cmd enc: %ld\n",
1989                                 PTR_ERR(enc));
1990                         return enc == NULL ? -EINVAL : PTR_ERR(enc);
1991                 }
1992
1993                 dpu_enc->phys_encs[dpu_enc->num_phys_encs] = enc;
1994                 ++dpu_enc->num_phys_encs;
1995         }
1996
1997         if (params->split_role == ENC_ROLE_SLAVE)
1998                 dpu_enc->cur_slave = enc;
1999         else
2000                 dpu_enc->cur_master = enc;
2001
2002         return 0;
2003 }
2004
2005 static const struct dpu_encoder_virt_ops dpu_encoder_parent_ops = {
2006         .handle_vblank_virt = dpu_encoder_vblank_callback,
2007         .handle_underrun_virt = dpu_encoder_underrun_callback,
2008         .handle_frame_done = dpu_encoder_frame_done_callback,
2009 };
2010
2011 static int dpu_encoder_setup_display(struct dpu_encoder_virt *dpu_enc,
2012                                  struct dpu_kms *dpu_kms,
2013                                  struct msm_display_info *disp_info)
2014 {
2015         int ret = 0;
2016         int i = 0;
2017         enum dpu_intf_type intf_type = INTF_NONE;
2018         struct dpu_enc_phys_init_params phys_params;
2019
2020         if (!dpu_enc) {
2021                 DPU_ERROR("invalid arg(s), enc %d\n", dpu_enc != NULL);
2022                 return -EINVAL;
2023         }
2024
2025         dpu_enc->cur_master = NULL;
2026
2027         memset(&phys_params, 0, sizeof(phys_params));
2028         phys_params.dpu_kms = dpu_kms;
2029         phys_params.parent = &dpu_enc->base;
2030         phys_params.parent_ops = &dpu_encoder_parent_ops;
2031         phys_params.enc_spinlock = &dpu_enc->enc_spinlock;
2032
2033         switch (disp_info->intf_type) {
2034         case DRM_MODE_ENCODER_DSI:
2035                 intf_type = INTF_DSI;
2036                 break;
2037         case DRM_MODE_ENCODER_TMDS:
2038                 intf_type = INTF_DP;
2039                 break;
2040         }
2041
2042         WARN_ON(disp_info->num_of_h_tiles < 1);
2043
2044         DPU_DEBUG("dsi_info->num_of_h_tiles %d\n", disp_info->num_of_h_tiles);
2045
2046         if ((disp_info->capabilities & MSM_DISPLAY_CAP_CMD_MODE) ||
2047             (disp_info->capabilities & MSM_DISPLAY_CAP_VID_MODE))
2048                 dpu_enc->idle_pc_supported =
2049                                 dpu_kms->catalog->caps->has_idle_pc;
2050
2051         mutex_lock(&dpu_enc->enc_lock);
2052         for (i = 0; i < disp_info->num_of_h_tiles && !ret; i++) {
2053                 /*
2054                  * Left-most tile is at index 0, content is controller id
2055                  * h_tile_instance_ids[2] = {0, 1}; DSI0 = left, DSI1 = right
2056                  * h_tile_instance_ids[2] = {1, 0}; DSI1 = left, DSI0 = right
2057                  */
2058                 u32 controller_id = disp_info->h_tile_instance[i];
2059
2060                 if (disp_info->num_of_h_tiles > 1) {
2061                         if (i == 0)
2062                                 phys_params.split_role = ENC_ROLE_MASTER;
2063                         else
2064                                 phys_params.split_role = ENC_ROLE_SLAVE;
2065                 } else {
2066                         phys_params.split_role = ENC_ROLE_SOLO;
2067                 }
2068
2069                 DPU_DEBUG("h_tile_instance %d = %d, split_role %d\n",
2070                                 i, controller_id, phys_params.split_role);
2071
2072                 phys_params.intf_idx = dpu_encoder_get_intf(dpu_kms->catalog,
2073                                                                                                         intf_type,
2074                                                                                                         controller_id);
2075                 if (phys_params.intf_idx == INTF_MAX) {
2076                         DPU_ERROR_ENC(dpu_enc, "could not get intf: type %d, id %d\n",
2077                                                   intf_type, controller_id);
2078                         ret = -EINVAL;
2079                 }
2080
2081                 if (!ret) {
2082                         ret = dpu_encoder_virt_add_phys_encs(disp_info->capabilities,
2083                                                                                                  dpu_enc,
2084                                                                                                  &phys_params);
2085                         if (ret)
2086                                 DPU_ERROR_ENC(dpu_enc, "failed to add phys encs\n");
2087                 }
2088         }
2089
2090         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2091                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2092                 atomic_set(&phys->vsync_cnt, 0);
2093                 atomic_set(&phys->underrun_cnt, 0);
2094         }
2095         mutex_unlock(&dpu_enc->enc_lock);
2096
2097         return ret;
2098 }
2099
2100 static void dpu_encoder_frame_done_timeout(struct timer_list *t)
2101 {
2102         struct dpu_encoder_virt *dpu_enc = from_timer(dpu_enc, t,
2103                         frame_done_timer);
2104         struct drm_encoder *drm_enc = &dpu_enc->base;
2105         u32 event;
2106
2107         if (!drm_enc->dev) {
2108                 DPU_ERROR("invalid parameters\n");
2109                 return;
2110         }
2111
2112         if (!dpu_enc->frame_busy_mask[0] || !dpu_enc->crtc_frame_event_cb) {
2113                 DRM_DEBUG_KMS("id:%u invalid timeout frame_busy_mask=%lu\n",
2114                               DRMID(drm_enc), dpu_enc->frame_busy_mask[0]);
2115                 return;
2116         } else if (!atomic_xchg(&dpu_enc->frame_done_timeout_ms, 0)) {
2117                 DRM_DEBUG_KMS("id:%u invalid timeout\n", DRMID(drm_enc));
2118                 return;
2119         }
2120
2121         DPU_ERROR_ENC(dpu_enc, "frame done timeout\n");
2122
2123         event = DPU_ENCODER_FRAME_EVENT_ERROR;
2124         trace_dpu_enc_frame_done_timeout(DRMID(drm_enc), event);
2125         dpu_enc->crtc_frame_event_cb(dpu_enc->crtc_frame_event_cb_data, event);
2126 }
2127
2128 static const struct drm_encoder_helper_funcs dpu_encoder_helper_funcs = {
2129         .mode_set = dpu_encoder_virt_mode_set,
2130         .disable = dpu_encoder_virt_disable,
2131         .enable = dpu_kms_encoder_enable,
2132         .atomic_check = dpu_encoder_virt_atomic_check,
2133
2134         /* This is called by dpu_kms_encoder_enable */
2135         .commit = dpu_encoder_virt_enable,
2136 };
2137
2138 static const struct drm_encoder_funcs dpu_encoder_funcs = {
2139                 .destroy = dpu_encoder_destroy,
2140                 .late_register = dpu_encoder_late_register,
2141                 .early_unregister = dpu_encoder_early_unregister,
2142 };
2143
2144 int dpu_encoder_setup(struct drm_device *dev, struct drm_encoder *enc,
2145                 struct msm_display_info *disp_info)
2146 {
2147         struct msm_drm_private *priv = dev->dev_private;
2148         struct dpu_kms *dpu_kms = to_dpu_kms(priv->kms);
2149         struct drm_encoder *drm_enc = NULL;
2150         struct dpu_encoder_virt *dpu_enc = NULL;
2151         int ret = 0;
2152
2153         dpu_enc = to_dpu_encoder_virt(enc);
2154
2155         ret = dpu_encoder_setup_display(dpu_enc, dpu_kms, disp_info);
2156         if (ret)
2157                 goto fail;
2158
2159         atomic_set(&dpu_enc->frame_done_timeout_ms, 0);
2160         timer_setup(&dpu_enc->frame_done_timer,
2161                         dpu_encoder_frame_done_timeout, 0);
2162
2163         if (disp_info->intf_type == DRM_MODE_ENCODER_DSI)
2164                 timer_setup(&dpu_enc->vsync_event_timer,
2165                                 dpu_encoder_vsync_event_handler,
2166                                 0);
2167         else if (disp_info->intf_type == DRM_MODE_ENCODER_TMDS)
2168                 dpu_enc->dp = priv->dp[disp_info->h_tile_instance[0]];
2169
2170         INIT_DELAYED_WORK(&dpu_enc->delayed_off_work,
2171                         dpu_encoder_off_work);
2172         dpu_enc->idle_timeout = IDLE_TIMEOUT;
2173
2174         kthread_init_work(&dpu_enc->vsync_event_work,
2175                         dpu_encoder_vsync_event_work_handler);
2176
2177         memcpy(&dpu_enc->disp_info, disp_info, sizeof(*disp_info));
2178
2179         DPU_DEBUG_ENC(dpu_enc, "created\n");
2180
2181         return ret;
2182
2183 fail:
2184         DPU_ERROR("failed to create encoder\n");
2185         if (drm_enc)
2186                 dpu_encoder_destroy(drm_enc);
2187
2188         return ret;
2189
2190
2191 }
2192
2193 struct drm_encoder *dpu_encoder_init(struct drm_device *dev,
2194                 int drm_enc_mode)
2195 {
2196         struct dpu_encoder_virt *dpu_enc = NULL;
2197         int rc = 0;
2198
2199         dpu_enc = devm_kzalloc(dev->dev, sizeof(*dpu_enc), GFP_KERNEL);
2200         if (!dpu_enc)
2201                 return ERR_PTR(-ENOMEM);
2202
2203         rc = drm_encoder_init(dev, &dpu_enc->base, &dpu_encoder_funcs,
2204                         drm_enc_mode, NULL);
2205         if (rc) {
2206                 devm_kfree(dev->dev, dpu_enc);
2207                 return ERR_PTR(rc);
2208         }
2209
2210         drm_encoder_helper_add(&dpu_enc->base, &dpu_encoder_helper_funcs);
2211
2212         spin_lock_init(&dpu_enc->enc_spinlock);
2213         dpu_enc->enabled = false;
2214         mutex_init(&dpu_enc->enc_lock);
2215         mutex_init(&dpu_enc->rc_lock);
2216
2217         return &dpu_enc->base;
2218 }
2219
2220 int dpu_encoder_wait_for_event(struct drm_encoder *drm_enc,
2221         enum msm_event_wait event)
2222 {
2223         int (*fn_wait)(struct dpu_encoder_phys *phys_enc) = NULL;
2224         struct dpu_encoder_virt *dpu_enc = NULL;
2225         int i, ret = 0;
2226
2227         if (!drm_enc) {
2228                 DPU_ERROR("invalid encoder\n");
2229                 return -EINVAL;
2230         }
2231         dpu_enc = to_dpu_encoder_virt(drm_enc);
2232         DPU_DEBUG_ENC(dpu_enc, "\n");
2233
2234         for (i = 0; i < dpu_enc->num_phys_encs; i++) {
2235                 struct dpu_encoder_phys *phys = dpu_enc->phys_encs[i];
2236
2237                 switch (event) {
2238                 case MSM_ENC_COMMIT_DONE:
2239                         fn_wait = phys->ops.wait_for_commit_done;
2240                         break;
2241                 case MSM_ENC_TX_COMPLETE:
2242                         fn_wait = phys->ops.wait_for_tx_complete;
2243                         break;
2244                 case MSM_ENC_VBLANK:
2245                         fn_wait = phys->ops.wait_for_vblank;
2246                         break;
2247                 default:
2248                         DPU_ERROR_ENC(dpu_enc, "unknown wait event %d\n",
2249                                         event);
2250                         return -EINVAL;
2251                 }
2252
2253                 if (fn_wait) {
2254                         DPU_ATRACE_BEGIN("wait_for_completion_event");
2255                         ret = fn_wait(phys);
2256                         DPU_ATRACE_END("wait_for_completion_event");
2257                         if (ret)
2258                                 return ret;
2259                 }
2260         }
2261
2262         return ret;
2263 }
2264
2265 enum dpu_intf_mode dpu_encoder_get_intf_mode(struct drm_encoder *encoder)
2266 {
2267         struct dpu_encoder_virt *dpu_enc = NULL;
2268
2269         if (!encoder) {
2270                 DPU_ERROR("invalid encoder\n");
2271                 return INTF_MODE_NONE;
2272         }
2273         dpu_enc = to_dpu_encoder_virt(encoder);
2274
2275         if (dpu_enc->cur_master)
2276                 return dpu_enc->cur_master->intf_mode;
2277
2278         if (dpu_enc->num_phys_encs)
2279                 return dpu_enc->phys_encs[0]->intf_mode;
2280
2281         return INTF_MODE_NONE;
2282 }