GNU Linux-libre 4.19.286-gnu1
[releases.git] / drivers / gpu / drm / msm / disp / dpu1 / dpu_crtc.c
1 /*
2  * Copyright (c) 2014-2018 The Linux Foundation. All rights reserved.
3  * Copyright (C) 2013 Red Hat
4  * Author: Rob Clark <robdclark@gmail.com>
5  *
6  * This program is free software; you can redistribute it and/or modify it
7  * under the terms of the GNU General Public License version 2 as published by
8  * the Free Software Foundation.
9  *
10  * This program is distributed in the hope that it will be useful, but WITHOUT
11  * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
12  * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License for
13  * more details.
14  *
15  * You should have received a copy of the GNU General Public License along with
16  * this program.  If not, see <http://www.gnu.org/licenses/>.
17  */
18
19 #define pr_fmt(fmt)     "[drm:%s:%d] " fmt, __func__, __LINE__
20 #include <linux/sort.h>
21 #include <linux/debugfs.h>
22 #include <linux/ktime.h>
23 #include <drm/drm_mode.h>
24 #include <drm/drm_crtc.h>
25 #include <drm/drm_crtc_helper.h>
26 #include <drm/drm_flip_work.h>
27 #include <drm/drm_rect.h>
28
29 #include "dpu_kms.h"
30 #include "dpu_hw_lm.h"
31 #include "dpu_hw_ctl.h"
32 #include "dpu_crtc.h"
33 #include "dpu_plane.h"
34 #include "dpu_encoder.h"
35 #include "dpu_vbif.h"
36 #include "dpu_power_handle.h"
37 #include "dpu_core_perf.h"
38 #include "dpu_trace.h"
39
40 #define DPU_DRM_BLEND_OP_NOT_DEFINED    0
41 #define DPU_DRM_BLEND_OP_OPAQUE         1
42 #define DPU_DRM_BLEND_OP_PREMULTIPLIED  2
43 #define DPU_DRM_BLEND_OP_COVERAGE       3
44 #define DPU_DRM_BLEND_OP_MAX            4
45
46 /* layer mixer index on dpu_crtc */
47 #define LEFT_MIXER 0
48 #define RIGHT_MIXER 1
49
50 #define MISR_BUFF_SIZE                  256
51
52 static inline struct dpu_kms *_dpu_crtc_get_kms(struct drm_crtc *crtc)
53 {
54         struct msm_drm_private *priv;
55
56         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
57                 DPU_ERROR("invalid crtc\n");
58                 return NULL;
59         }
60         priv = crtc->dev->dev_private;
61         if (!priv || !priv->kms) {
62                 DPU_ERROR("invalid kms\n");
63                 return NULL;
64         }
65
66         return to_dpu_kms(priv->kms);
67 }
68
69 static inline int _dpu_crtc_power_enable(struct dpu_crtc *dpu_crtc, bool enable)
70 {
71         struct drm_crtc *crtc;
72         struct msm_drm_private *priv;
73         struct dpu_kms *dpu_kms;
74
75         if (!dpu_crtc) {
76                 DPU_ERROR("invalid dpu crtc\n");
77                 return -EINVAL;
78         }
79
80         crtc = &dpu_crtc->base;
81         if (!crtc->dev || !crtc->dev->dev_private) {
82                 DPU_ERROR("invalid drm device\n");
83                 return -EINVAL;
84         }
85
86         priv = crtc->dev->dev_private;
87         if (!priv->kms) {
88                 DPU_ERROR("invalid kms\n");
89                 return -EINVAL;
90         }
91
92         dpu_kms = to_dpu_kms(priv->kms);
93
94         if (enable)
95                 pm_runtime_get_sync(&dpu_kms->pdev->dev);
96         else
97                 pm_runtime_put_sync(&dpu_kms->pdev->dev);
98
99         return 0;
100 }
101
102 /**
103  * _dpu_crtc_rp_to_crtc - get crtc from resource pool object
104  * @rp: Pointer to resource pool
105  * return: Pointer to drm crtc if success; null otherwise
106  */
107 static struct drm_crtc *_dpu_crtc_rp_to_crtc(struct dpu_crtc_respool *rp)
108 {
109         if (!rp)
110                 return NULL;
111
112         return container_of(rp, struct dpu_crtc_state, rp)->base.crtc;
113 }
114
115 /**
116  * _dpu_crtc_rp_reclaim - reclaim unused, or all if forced, resources in pool
117  * @rp: Pointer to resource pool
118  * @force: True to reclaim all resources; otherwise, reclaim only unused ones
119  * return: None
120  */
121 static void _dpu_crtc_rp_reclaim(struct dpu_crtc_respool *rp, bool force)
122 {
123         struct dpu_crtc_res *res, *next;
124         struct drm_crtc *crtc;
125
126         crtc = _dpu_crtc_rp_to_crtc(rp);
127         if (!crtc) {
128                 DPU_ERROR("invalid crtc\n");
129                 return;
130         }
131
132         DPU_DEBUG("crtc%d.%u %s\n", crtc->base.id, rp->sequence_id,
133                         force ? "destroy" : "free_unused");
134
135         list_for_each_entry_safe(res, next, &rp->res_list, list) {
136                 if (!force && !(res->flags & DPU_CRTC_RES_FLAG_FREE))
137                         continue;
138                 DPU_DEBUG("crtc%d.%u reclaim res:0x%x/0x%llx/%pK/%d\n",
139                                 crtc->base.id, rp->sequence_id,
140                                 res->type, res->tag, res->val,
141                                 atomic_read(&res->refcount));
142                 list_del(&res->list);
143                 if (res->ops.put)
144                         res->ops.put(res->val);
145                 kfree(res);
146         }
147 }
148
149 /**
150  * _dpu_crtc_rp_free_unused - free unused resource in pool
151  * @rp: Pointer to resource pool
152  * return: none
153  */
154 static void _dpu_crtc_rp_free_unused(struct dpu_crtc_respool *rp)
155 {
156         mutex_lock(rp->rp_lock);
157         _dpu_crtc_rp_reclaim(rp, false);
158         mutex_unlock(rp->rp_lock);
159 }
160
161 /**
162  * _dpu_crtc_rp_destroy - destroy resource pool
163  * @rp: Pointer to resource pool
164  * return: None
165  */
166 static void _dpu_crtc_rp_destroy(struct dpu_crtc_respool *rp)
167 {
168         mutex_lock(rp->rp_lock);
169         list_del_init(&rp->rp_list);
170         _dpu_crtc_rp_reclaim(rp, true);
171         mutex_unlock(rp->rp_lock);
172 }
173
174 /**
175  * _dpu_crtc_hw_blk_get - get callback for hardware block
176  * @val: Resource handle
177  * @type: Resource type
178  * @tag: Search tag for given resource
179  * return: Resource handle
180  */
181 static void *_dpu_crtc_hw_blk_get(void *val, u32 type, u64 tag)
182 {
183         DPU_DEBUG("res:%d/0x%llx/%pK\n", type, tag, val);
184         return dpu_hw_blk_get(val, type, tag);
185 }
186
187 /**
188  * _dpu_crtc_hw_blk_put - put callback for hardware block
189  * @val: Resource handle
190  * return: None
191  */
192 static void _dpu_crtc_hw_blk_put(void *val)
193 {
194         DPU_DEBUG("res://%pK\n", val);
195         dpu_hw_blk_put(val);
196 }
197
198 /**
199  * _dpu_crtc_rp_duplicate - duplicate resource pool and reset reference count
200  * @rp: Pointer to original resource pool
201  * @dup_rp: Pointer to duplicated resource pool
202  * return: None
203  */
204 static void _dpu_crtc_rp_duplicate(struct dpu_crtc_respool *rp,
205                 struct dpu_crtc_respool *dup_rp)
206 {
207         struct dpu_crtc_res *res, *dup_res;
208         struct drm_crtc *crtc;
209
210         if (!rp || !dup_rp || !rp->rp_head) {
211                 DPU_ERROR("invalid resource pool\n");
212                 return;
213         }
214
215         crtc = _dpu_crtc_rp_to_crtc(rp);
216         if (!crtc) {
217                 DPU_ERROR("invalid crtc\n");
218                 return;
219         }
220
221         DPU_DEBUG("crtc%d.%u duplicate\n", crtc->base.id, rp->sequence_id);
222
223         mutex_lock(rp->rp_lock);
224         dup_rp->sequence_id = rp->sequence_id + 1;
225         INIT_LIST_HEAD(&dup_rp->res_list);
226         dup_rp->ops = rp->ops;
227         list_for_each_entry(res, &rp->res_list, list) {
228                 dup_res = kzalloc(sizeof(struct dpu_crtc_res), GFP_KERNEL);
229                 if (!dup_res) {
230                         mutex_unlock(rp->rp_lock);
231                         return;
232                 }
233                 INIT_LIST_HEAD(&dup_res->list);
234                 atomic_set(&dup_res->refcount, 0);
235                 dup_res->type = res->type;
236                 dup_res->tag = res->tag;
237                 dup_res->val = res->val;
238                 dup_res->ops = res->ops;
239                 dup_res->flags = DPU_CRTC_RES_FLAG_FREE;
240                 DPU_DEBUG("crtc%d.%u dup res:0x%x/0x%llx/%pK/%d\n",
241                                 crtc->base.id, dup_rp->sequence_id,
242                                 dup_res->type, dup_res->tag, dup_res->val,
243                                 atomic_read(&dup_res->refcount));
244                 list_add_tail(&dup_res->list, &dup_rp->res_list);
245                 if (dup_res->ops.get)
246                         dup_res->ops.get(dup_res->val, 0, -1);
247         }
248
249         dup_rp->rp_lock = rp->rp_lock;
250         dup_rp->rp_head = rp->rp_head;
251         INIT_LIST_HEAD(&dup_rp->rp_list);
252         list_add_tail(&dup_rp->rp_list, rp->rp_head);
253         mutex_unlock(rp->rp_lock);
254 }
255
256 /**
257  * _dpu_crtc_rp_reset - reset resource pool after allocation
258  * @rp: Pointer to original resource pool
259  * @rp_lock: Pointer to serialization resource pool lock
260  * @rp_head: Pointer to crtc resource pool head
261  * return: None
262  */
263 static void _dpu_crtc_rp_reset(struct dpu_crtc_respool *rp,
264                 struct mutex *rp_lock, struct list_head *rp_head)
265 {
266         if (!rp || !rp_lock || !rp_head) {
267                 DPU_ERROR("invalid resource pool\n");
268                 return;
269         }
270
271         mutex_lock(rp_lock);
272         rp->rp_lock = rp_lock;
273         rp->rp_head = rp_head;
274         INIT_LIST_HEAD(&rp->rp_list);
275         rp->sequence_id = 0;
276         INIT_LIST_HEAD(&rp->res_list);
277         rp->ops.get = _dpu_crtc_hw_blk_get;
278         rp->ops.put = _dpu_crtc_hw_blk_put;
279         list_add_tail(&rp->rp_list, rp->rp_head);
280         mutex_unlock(rp_lock);
281 }
282
283 static void dpu_crtc_destroy(struct drm_crtc *crtc)
284 {
285         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
286
287         DPU_DEBUG("\n");
288
289         if (!crtc)
290                 return;
291
292         dpu_crtc->phandle = NULL;
293
294         drm_crtc_cleanup(crtc);
295         mutex_destroy(&dpu_crtc->crtc_lock);
296         kfree(dpu_crtc);
297 }
298
299 static void _dpu_crtc_setup_blend_cfg(struct dpu_crtc_mixer *mixer,
300                 struct dpu_plane_state *pstate)
301 {
302         struct dpu_hw_mixer *lm = mixer->hw_lm;
303
304         /* default to opaque blending */
305         lm->ops.setup_blend_config(lm, pstate->stage, 0XFF, 0,
306                                 DPU_BLEND_FG_ALPHA_FG_CONST |
307                                 DPU_BLEND_BG_ALPHA_BG_CONST);
308 }
309
310 static void _dpu_crtc_program_lm_output_roi(struct drm_crtc *crtc)
311 {
312         struct dpu_crtc *dpu_crtc;
313         struct dpu_crtc_state *crtc_state;
314         int lm_idx, lm_horiz_position;
315
316         dpu_crtc = to_dpu_crtc(crtc);
317         crtc_state = to_dpu_crtc_state(crtc->state);
318
319         lm_horiz_position = 0;
320         for (lm_idx = 0; lm_idx < dpu_crtc->num_mixers; lm_idx++) {
321                 const struct drm_rect *lm_roi = &crtc_state->lm_bounds[lm_idx];
322                 struct dpu_hw_mixer *hw_lm = dpu_crtc->mixers[lm_idx].hw_lm;
323                 struct dpu_hw_mixer_cfg cfg;
324
325                 if (!lm_roi || !drm_rect_visible(lm_roi))
326                         continue;
327
328                 cfg.out_width = drm_rect_width(lm_roi);
329                 cfg.out_height = drm_rect_height(lm_roi);
330                 cfg.right_mixer = lm_horiz_position++;
331                 cfg.flags = 0;
332                 hw_lm->ops.setup_mixer_out(hw_lm, &cfg);
333         }
334 }
335
336 static void _dpu_crtc_blend_setup_mixer(struct drm_crtc *crtc,
337         struct dpu_crtc *dpu_crtc, struct dpu_crtc_mixer *mixer)
338 {
339         struct drm_plane *plane;
340         struct drm_framebuffer *fb;
341         struct drm_plane_state *state;
342         struct dpu_crtc_state *cstate;
343         struct dpu_plane_state *pstate = NULL;
344         struct dpu_format *format;
345         struct dpu_hw_ctl *ctl;
346         struct dpu_hw_mixer *lm;
347         struct dpu_hw_stage_cfg *stage_cfg;
348
349         u32 flush_mask;
350         uint32_t stage_idx, lm_idx;
351         int zpos_cnt[DPU_STAGE_MAX + 1] = { 0 };
352         bool bg_alpha_enable = false;
353
354         if (!dpu_crtc || !mixer) {
355                 DPU_ERROR("invalid dpu_crtc or mixer\n");
356                 return;
357         }
358
359         ctl = mixer->hw_ctl;
360         lm = mixer->hw_lm;
361         stage_cfg = &dpu_crtc->stage_cfg;
362         cstate = to_dpu_crtc_state(crtc->state);
363
364         drm_atomic_crtc_for_each_plane(plane, crtc) {
365                 state = plane->state;
366                 if (!state)
367                         continue;
368
369                 pstate = to_dpu_plane_state(state);
370                 fb = state->fb;
371
372                 dpu_plane_get_ctl_flush(plane, ctl, &flush_mask);
373
374                 DPU_DEBUG("crtc %d stage:%d - plane %d sspp %d fb %d\n",
375                                 crtc->base.id,
376                                 pstate->stage,
377                                 plane->base.id,
378                                 dpu_plane_pipe(plane) - SSPP_VIG0,
379                                 state->fb ? state->fb->base.id : -1);
380
381                 format = to_dpu_format(msm_framebuffer_format(pstate->base.fb));
382                 if (!format) {
383                         DPU_ERROR("invalid format\n");
384                         return;
385                 }
386
387                 if (pstate->stage == DPU_STAGE_BASE && format->alpha_enable)
388                         bg_alpha_enable = true;
389
390                 stage_idx = zpos_cnt[pstate->stage]++;
391                 stage_cfg->stage[pstate->stage][stage_idx] =
392                                         dpu_plane_pipe(plane);
393                 stage_cfg->multirect_index[pstate->stage][stage_idx] =
394                                         pstate->multirect_index;
395
396                 trace_dpu_crtc_setup_mixer(DRMID(crtc), DRMID(plane),
397                                            state, pstate, stage_idx,
398                                            dpu_plane_pipe(plane) - SSPP_VIG0,
399                                            format->base.pixel_format,
400                                            fb ? fb->modifier : 0);
401
402                 /* blend config update */
403                 for (lm_idx = 0; lm_idx < dpu_crtc->num_mixers; lm_idx++) {
404                         _dpu_crtc_setup_blend_cfg(mixer + lm_idx, pstate);
405
406                         mixer[lm_idx].flush_mask |= flush_mask;
407
408                         if (bg_alpha_enable && !format->alpha_enable)
409                                 mixer[lm_idx].mixer_op_mode = 0;
410                         else
411                                 mixer[lm_idx].mixer_op_mode |=
412                                                 1 << pstate->stage;
413                 }
414         }
415
416          _dpu_crtc_program_lm_output_roi(crtc);
417 }
418
419 /**
420  * _dpu_crtc_blend_setup - configure crtc mixers
421  * @crtc: Pointer to drm crtc structure
422  */
423 static void _dpu_crtc_blend_setup(struct drm_crtc *crtc)
424 {
425         struct dpu_crtc *dpu_crtc;
426         struct dpu_crtc_state *dpu_crtc_state;
427         struct dpu_crtc_mixer *mixer;
428         struct dpu_hw_ctl *ctl;
429         struct dpu_hw_mixer *lm;
430
431         int i;
432
433         if (!crtc)
434                 return;
435
436         dpu_crtc = to_dpu_crtc(crtc);
437         dpu_crtc_state = to_dpu_crtc_state(crtc->state);
438         mixer = dpu_crtc->mixers;
439
440         DPU_DEBUG("%s\n", dpu_crtc->name);
441
442         if (dpu_crtc->num_mixers > CRTC_DUAL_MIXERS) {
443                 DPU_ERROR("invalid number mixers: %d\n", dpu_crtc->num_mixers);
444                 return;
445         }
446
447         for (i = 0; i < dpu_crtc->num_mixers; i++) {
448                 if (!mixer[i].hw_lm || !mixer[i].hw_ctl) {
449                         DPU_ERROR("invalid lm or ctl assigned to mixer\n");
450                         return;
451                 }
452                 mixer[i].mixer_op_mode = 0;
453                 mixer[i].flush_mask = 0;
454                 if (mixer[i].hw_ctl->ops.clear_all_blendstages)
455                         mixer[i].hw_ctl->ops.clear_all_blendstages(
456                                         mixer[i].hw_ctl);
457         }
458
459         /* initialize stage cfg */
460         memset(&dpu_crtc->stage_cfg, 0, sizeof(struct dpu_hw_stage_cfg));
461
462         _dpu_crtc_blend_setup_mixer(crtc, dpu_crtc, mixer);
463
464         for (i = 0; i < dpu_crtc->num_mixers; i++) {
465                 ctl = mixer[i].hw_ctl;
466                 lm = mixer[i].hw_lm;
467
468                 lm->ops.setup_alpha_out(lm, mixer[i].mixer_op_mode);
469
470                 mixer[i].flush_mask |= ctl->ops.get_bitmask_mixer(ctl,
471                         mixer[i].hw_lm->idx);
472
473                 /* stage config flush mask */
474                 ctl->ops.update_pending_flush(ctl, mixer[i].flush_mask);
475
476                 DPU_DEBUG("lm %d, op_mode 0x%X, ctl %d, flush mask 0x%x\n",
477                         mixer[i].hw_lm->idx - LM_0,
478                         mixer[i].mixer_op_mode,
479                         ctl->idx - CTL_0,
480                         mixer[i].flush_mask);
481
482                 ctl->ops.setup_blendstage(ctl, mixer[i].hw_lm->idx,
483                         &dpu_crtc->stage_cfg);
484         }
485 }
486
487 /**
488  *  _dpu_crtc_complete_flip - signal pending page_flip events
489  * Any pending vblank events are added to the vblank_event_list
490  * so that the next vblank interrupt shall signal them.
491  * However PAGE_FLIP events are not handled through the vblank_event_list.
492  * This API signals any pending PAGE_FLIP events requested through
493  * DRM_IOCTL_MODE_PAGE_FLIP and are cached in the dpu_crtc->event.
494  * @crtc: Pointer to drm crtc structure
495  */
496 static void _dpu_crtc_complete_flip(struct drm_crtc *crtc)
497 {
498         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
499         struct drm_device *dev = crtc->dev;
500         unsigned long flags;
501
502         spin_lock_irqsave(&dev->event_lock, flags);
503         if (dpu_crtc->event) {
504                 DRM_DEBUG_VBL("%s: send event: %pK\n", dpu_crtc->name,
505                               dpu_crtc->event);
506                 trace_dpu_crtc_complete_flip(DRMID(crtc));
507                 drm_crtc_send_vblank_event(crtc, dpu_crtc->event);
508                 dpu_crtc->event = NULL;
509         }
510         spin_unlock_irqrestore(&dev->event_lock, flags);
511 }
512
513 enum dpu_intf_mode dpu_crtc_get_intf_mode(struct drm_crtc *crtc)
514 {
515         struct drm_encoder *encoder;
516
517         if (!crtc || !crtc->dev) {
518                 DPU_ERROR("invalid crtc\n");
519                 return INTF_MODE_NONE;
520         }
521
522         drm_for_each_encoder(encoder, crtc->dev)
523                 if (encoder->crtc == crtc)
524                         return dpu_encoder_get_intf_mode(encoder);
525
526         return INTF_MODE_NONE;
527 }
528
529 static void dpu_crtc_vblank_cb(void *data)
530 {
531         struct drm_crtc *crtc = (struct drm_crtc *)data;
532         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
533
534         /* keep statistics on vblank callback - with auto reset via debugfs */
535         if (ktime_compare(dpu_crtc->vblank_cb_time, ktime_set(0, 0)) == 0)
536                 dpu_crtc->vblank_cb_time = ktime_get();
537         else
538                 dpu_crtc->vblank_cb_count++;
539         _dpu_crtc_complete_flip(crtc);
540         drm_crtc_handle_vblank(crtc);
541         trace_dpu_crtc_vblank_cb(DRMID(crtc));
542 }
543
544 static void dpu_crtc_frame_event_work(struct kthread_work *work)
545 {
546         struct msm_drm_private *priv;
547         struct dpu_crtc_frame_event *fevent;
548         struct drm_crtc *crtc;
549         struct dpu_crtc *dpu_crtc;
550         struct dpu_kms *dpu_kms;
551         unsigned long flags;
552         bool frame_done = false;
553
554         if (!work) {
555                 DPU_ERROR("invalid work handle\n");
556                 return;
557         }
558
559         fevent = container_of(work, struct dpu_crtc_frame_event, work);
560         if (!fevent->crtc || !fevent->crtc->state) {
561                 DPU_ERROR("invalid crtc\n");
562                 return;
563         }
564
565         crtc = fevent->crtc;
566         dpu_crtc = to_dpu_crtc(crtc);
567
568         dpu_kms = _dpu_crtc_get_kms(crtc);
569         if (!dpu_kms) {
570                 DPU_ERROR("invalid kms handle\n");
571                 return;
572         }
573         priv = dpu_kms->dev->dev_private;
574         DPU_ATRACE_BEGIN("crtc_frame_event");
575
576         DRM_DEBUG_KMS("crtc%d event:%u ts:%lld\n", crtc->base.id, fevent->event,
577                         ktime_to_ns(fevent->ts));
578
579         if (fevent->event & (DPU_ENCODER_FRAME_EVENT_DONE
580                                 | DPU_ENCODER_FRAME_EVENT_ERROR
581                                 | DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)) {
582
583                 if (atomic_read(&dpu_crtc->frame_pending) < 1) {
584                         /* this should not happen */
585                         DRM_ERROR("crtc%d ev:%u ts:%lld frame_pending:%d\n",
586                                         crtc->base.id,
587                                         fevent->event,
588                                         ktime_to_ns(fevent->ts),
589                                         atomic_read(&dpu_crtc->frame_pending));
590                 } else if (atomic_dec_return(&dpu_crtc->frame_pending) == 0) {
591                         /* release bandwidth and other resources */
592                         trace_dpu_crtc_frame_event_done(DRMID(crtc),
593                                                         fevent->event);
594                         dpu_core_perf_crtc_release_bw(crtc);
595                 } else {
596                         trace_dpu_crtc_frame_event_more_pending(DRMID(crtc),
597                                                                 fevent->event);
598                 }
599
600                 if (fevent->event & DPU_ENCODER_FRAME_EVENT_DONE)
601                         dpu_core_perf_crtc_update(crtc, 0, false);
602
603                 if (fevent->event & (DPU_ENCODER_FRAME_EVENT_DONE
604                                         | DPU_ENCODER_FRAME_EVENT_ERROR))
605                         frame_done = true;
606         }
607
608         if (fevent->event & DPU_ENCODER_FRAME_EVENT_PANEL_DEAD)
609                 DPU_ERROR("crtc%d ts:%lld received panel dead event\n",
610                                 crtc->base.id, ktime_to_ns(fevent->ts));
611
612         if (frame_done)
613                 complete_all(&dpu_crtc->frame_done_comp);
614
615         spin_lock_irqsave(&dpu_crtc->spin_lock, flags);
616         list_add_tail(&fevent->list, &dpu_crtc->frame_event_list);
617         spin_unlock_irqrestore(&dpu_crtc->spin_lock, flags);
618         DPU_ATRACE_END("crtc_frame_event");
619 }
620
621 /*
622  * dpu_crtc_frame_event_cb - crtc frame event callback API. CRTC module
623  * registers this API to encoder for all frame event callbacks like
624  * frame_error, frame_done, idle_timeout, etc. Encoder may call different events
625  * from different context - IRQ, user thread, commit_thread, etc. Each event
626  * should be carefully reviewed and should be processed in proper task context
627  * to avoid schedulin delay or properly manage the irq context's bottom half
628  * processing.
629  */
630 static void dpu_crtc_frame_event_cb(void *data, u32 event)
631 {
632         struct drm_crtc *crtc = (struct drm_crtc *)data;
633         struct dpu_crtc *dpu_crtc;
634         struct msm_drm_private *priv;
635         struct dpu_crtc_frame_event *fevent;
636         unsigned long flags;
637         u32 crtc_id;
638
639         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
640                 DPU_ERROR("invalid parameters\n");
641                 return;
642         }
643
644         /* Nothing to do on idle event */
645         if (event & DPU_ENCODER_FRAME_EVENT_IDLE)
646                 return;
647
648         dpu_crtc = to_dpu_crtc(crtc);
649         priv = crtc->dev->dev_private;
650         crtc_id = drm_crtc_index(crtc);
651
652         trace_dpu_crtc_frame_event_cb(DRMID(crtc), event);
653
654         spin_lock_irqsave(&dpu_crtc->spin_lock, flags);
655         fevent = list_first_entry_or_null(&dpu_crtc->frame_event_list,
656                         struct dpu_crtc_frame_event, list);
657         if (fevent)
658                 list_del_init(&fevent->list);
659         spin_unlock_irqrestore(&dpu_crtc->spin_lock, flags);
660
661         if (!fevent) {
662                 DRM_ERROR_RATELIMITED("crtc%d event %d overflow\n", crtc->base.id, event);
663                 return;
664         }
665
666         fevent->event = event;
667         fevent->crtc = crtc;
668         fevent->ts = ktime_get();
669         kthread_queue_work(&priv->event_thread[crtc_id].worker, &fevent->work);
670 }
671
672 void dpu_crtc_complete_commit(struct drm_crtc *crtc,
673                 struct drm_crtc_state *old_state)
674 {
675         if (!crtc || !crtc->state) {
676                 DPU_ERROR("invalid crtc\n");
677                 return;
678         }
679         trace_dpu_crtc_complete_commit(DRMID(crtc));
680 }
681
682 static void _dpu_crtc_setup_mixer_for_encoder(
683                 struct drm_crtc *crtc,
684                 struct drm_encoder *enc)
685 {
686         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
687         struct dpu_kms *dpu_kms = _dpu_crtc_get_kms(crtc);
688         struct dpu_rm *rm = &dpu_kms->rm;
689         struct dpu_crtc_mixer *mixer;
690         struct dpu_hw_ctl *last_valid_ctl = NULL;
691         int i;
692         struct dpu_rm_hw_iter lm_iter, ctl_iter;
693
694         dpu_rm_init_hw_iter(&lm_iter, enc->base.id, DPU_HW_BLK_LM);
695         dpu_rm_init_hw_iter(&ctl_iter, enc->base.id, DPU_HW_BLK_CTL);
696
697         /* Set up all the mixers and ctls reserved by this encoder */
698         for (i = dpu_crtc->num_mixers; i < ARRAY_SIZE(dpu_crtc->mixers); i++) {
699                 mixer = &dpu_crtc->mixers[i];
700
701                 if (!dpu_rm_get_hw(rm, &lm_iter))
702                         break;
703                 mixer->hw_lm = (struct dpu_hw_mixer *)lm_iter.hw;
704
705                 /* CTL may be <= LMs, if <, multiple LMs controlled by 1 CTL */
706                 if (!dpu_rm_get_hw(rm, &ctl_iter)) {
707                         DPU_DEBUG("no ctl assigned to lm %d, using previous\n",
708                                         mixer->hw_lm->idx - LM_0);
709                         mixer->hw_ctl = last_valid_ctl;
710                 } else {
711                         mixer->hw_ctl = (struct dpu_hw_ctl *)ctl_iter.hw;
712                         last_valid_ctl = mixer->hw_ctl;
713                 }
714
715                 /* Shouldn't happen, mixers are always >= ctls */
716                 if (!mixer->hw_ctl) {
717                         DPU_ERROR("no valid ctls found for lm %d\n",
718                                         mixer->hw_lm->idx - LM_0);
719                         return;
720                 }
721
722                 mixer->encoder = enc;
723
724                 dpu_crtc->num_mixers++;
725                 DPU_DEBUG("setup mixer %d: lm %d\n",
726                                 i, mixer->hw_lm->idx - LM_0);
727                 DPU_DEBUG("setup mixer %d: ctl %d\n",
728                                 i, mixer->hw_ctl->idx - CTL_0);
729         }
730 }
731
732 static void _dpu_crtc_setup_mixers(struct drm_crtc *crtc)
733 {
734         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
735         struct drm_encoder *enc;
736
737         dpu_crtc->num_mixers = 0;
738         dpu_crtc->mixers_swapped = false;
739         memset(dpu_crtc->mixers, 0, sizeof(dpu_crtc->mixers));
740
741         mutex_lock(&dpu_crtc->crtc_lock);
742         /* Check for mixers on all encoders attached to this crtc */
743         list_for_each_entry(enc, &crtc->dev->mode_config.encoder_list, head) {
744                 if (enc->crtc != crtc)
745                         continue;
746
747                 _dpu_crtc_setup_mixer_for_encoder(crtc, enc);
748         }
749
750         mutex_unlock(&dpu_crtc->crtc_lock);
751 }
752
753 static void _dpu_crtc_setup_lm_bounds(struct drm_crtc *crtc,
754                 struct drm_crtc_state *state)
755 {
756         struct dpu_crtc *dpu_crtc;
757         struct dpu_crtc_state *cstate;
758         struct drm_display_mode *adj_mode;
759         u32 crtc_split_width;
760         int i;
761
762         if (!crtc || !state) {
763                 DPU_ERROR("invalid args\n");
764                 return;
765         }
766
767         dpu_crtc = to_dpu_crtc(crtc);
768         cstate = to_dpu_crtc_state(state);
769
770         adj_mode = &state->adjusted_mode;
771         crtc_split_width = dpu_crtc_get_mixer_width(dpu_crtc, cstate, adj_mode);
772
773         for (i = 0; i < dpu_crtc->num_mixers; i++) {
774                 struct drm_rect *r = &cstate->lm_bounds[i];
775                 r->x1 = crtc_split_width * i;
776                 r->y1 = 0;
777                 r->x2 = r->x1 + crtc_split_width;
778                 r->y2 = dpu_crtc_get_mixer_height(dpu_crtc, cstate, adj_mode);
779
780                 trace_dpu_crtc_setup_lm_bounds(DRMID(crtc), i, r);
781         }
782
783         drm_mode_debug_printmodeline(adj_mode);
784 }
785
786 static void dpu_crtc_atomic_begin(struct drm_crtc *crtc,
787                 struct drm_crtc_state *old_state)
788 {
789         struct dpu_crtc *dpu_crtc;
790         struct drm_encoder *encoder;
791         struct drm_device *dev;
792         unsigned long flags;
793         struct dpu_crtc_smmu_state_data *smmu_state;
794
795         if (!crtc) {
796                 DPU_ERROR("invalid crtc\n");
797                 return;
798         }
799
800         if (!crtc->state->enable) {
801                 DPU_DEBUG("crtc%d -> enable %d, skip atomic_begin\n",
802                                 crtc->base.id, crtc->state->enable);
803                 return;
804         }
805
806         DPU_DEBUG("crtc%d\n", crtc->base.id);
807
808         dpu_crtc = to_dpu_crtc(crtc);
809         dev = crtc->dev;
810         smmu_state = &dpu_crtc->smmu_state;
811
812         if (!dpu_crtc->num_mixers) {
813                 _dpu_crtc_setup_mixers(crtc);
814                 _dpu_crtc_setup_lm_bounds(crtc, crtc->state);
815         }
816
817         if (dpu_crtc->event) {
818                 WARN_ON(dpu_crtc->event);
819         } else {
820                 spin_lock_irqsave(&dev->event_lock, flags);
821                 dpu_crtc->event = crtc->state->event;
822                 crtc->state->event = NULL;
823                 spin_unlock_irqrestore(&dev->event_lock, flags);
824         }
825
826         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
827                 if (encoder->crtc != crtc)
828                         continue;
829
830                 /* encoder will trigger pending mask now */
831                 dpu_encoder_trigger_kickoff_pending(encoder);
832         }
833
834         /*
835          * If no mixers have been allocated in dpu_crtc_atomic_check(),
836          * it means we are trying to flush a CRTC whose state is disabled:
837          * nothing else needs to be done.
838          */
839         if (unlikely(!dpu_crtc->num_mixers))
840                 return;
841
842         _dpu_crtc_blend_setup(crtc);
843
844         /*
845          * PP_DONE irq is only used by command mode for now.
846          * It is better to request pending before FLUSH and START trigger
847          * to make sure no pp_done irq missed.
848          * This is safe because no pp_done will happen before SW trigger
849          * in command mode.
850          */
851 }
852
853 static void dpu_crtc_atomic_flush(struct drm_crtc *crtc,
854                 struct drm_crtc_state *old_crtc_state)
855 {
856         struct dpu_crtc *dpu_crtc;
857         struct drm_device *dev;
858         struct drm_plane *plane;
859         struct msm_drm_private *priv;
860         struct msm_drm_thread *event_thread;
861         unsigned long flags;
862         struct dpu_crtc_state *cstate;
863
864         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
865                 DPU_ERROR("invalid crtc\n");
866                 return;
867         }
868
869         if (!crtc->state->enable) {
870                 DPU_DEBUG("crtc%d -> enable %d, skip atomic_flush\n",
871                                 crtc->base.id, crtc->state->enable);
872                 return;
873         }
874
875         DPU_DEBUG("crtc%d\n", crtc->base.id);
876
877         dpu_crtc = to_dpu_crtc(crtc);
878         cstate = to_dpu_crtc_state(crtc->state);
879         dev = crtc->dev;
880         priv = dev->dev_private;
881
882         if (crtc->index >= ARRAY_SIZE(priv->event_thread)) {
883                 DPU_ERROR("invalid crtc index[%d]\n", crtc->index);
884                 return;
885         }
886
887         event_thread = &priv->event_thread[crtc->index];
888
889         if (dpu_crtc->event) {
890                 DPU_DEBUG("already received dpu_crtc->event\n");
891         } else {
892                 spin_lock_irqsave(&dev->event_lock, flags);
893                 dpu_crtc->event = crtc->state->event;
894                 crtc->state->event = NULL;
895                 spin_unlock_irqrestore(&dev->event_lock, flags);
896         }
897
898         /*
899          * If no mixers has been allocated in dpu_crtc_atomic_check(),
900          * it means we are trying to flush a CRTC whose state is disabled:
901          * nothing else needs to be done.
902          */
903         if (unlikely(!dpu_crtc->num_mixers))
904                 return;
905
906         /*
907          * For planes without commit update, drm framework will not add
908          * those planes to current state since hardware update is not
909          * required. However, if those planes were power collapsed since
910          * last commit cycle, driver has to restore the hardware state
911          * of those planes explicitly here prior to plane flush.
912          */
913         drm_atomic_crtc_for_each_plane(plane, crtc)
914                 dpu_plane_restore(plane);
915
916         /* update performance setting before crtc kickoff */
917         dpu_core_perf_crtc_update(crtc, 1, false);
918
919         /*
920          * Final plane updates: Give each plane a chance to complete all
921          *                      required writes/flushing before crtc's "flush
922          *                      everything" call below.
923          */
924         drm_atomic_crtc_for_each_plane(plane, crtc) {
925                 if (dpu_crtc->smmu_state.transition_error)
926                         dpu_plane_set_error(plane, true);
927                 dpu_plane_flush(plane);
928         }
929
930         /* Kickoff will be scheduled by outer layer */
931 }
932
933 /**
934  * dpu_crtc_destroy_state - state destroy hook
935  * @crtc: drm CRTC
936  * @state: CRTC state object to release
937  */
938 static void dpu_crtc_destroy_state(struct drm_crtc *crtc,
939                 struct drm_crtc_state *state)
940 {
941         struct dpu_crtc *dpu_crtc;
942         struct dpu_crtc_state *cstate;
943
944         if (!crtc || !state) {
945                 DPU_ERROR("invalid argument(s)\n");
946                 return;
947         }
948
949         dpu_crtc = to_dpu_crtc(crtc);
950         cstate = to_dpu_crtc_state(state);
951
952         DPU_DEBUG("crtc%d\n", crtc->base.id);
953
954         _dpu_crtc_rp_destroy(&cstate->rp);
955
956         __drm_atomic_helper_crtc_destroy_state(state);
957
958         kfree(cstate);
959 }
960
961 static int _dpu_crtc_wait_for_frame_done(struct drm_crtc *crtc)
962 {
963         struct dpu_crtc *dpu_crtc;
964         int ret, rc = 0;
965
966         if (!crtc) {
967                 DPU_ERROR("invalid argument\n");
968                 return -EINVAL;
969         }
970         dpu_crtc = to_dpu_crtc(crtc);
971
972         if (!atomic_read(&dpu_crtc->frame_pending)) {
973                 DPU_DEBUG("no frames pending\n");
974                 return 0;
975         }
976
977         DPU_ATRACE_BEGIN("frame done completion wait");
978         ret = wait_for_completion_timeout(&dpu_crtc->frame_done_comp,
979                         msecs_to_jiffies(DPU_FRAME_DONE_TIMEOUT));
980         if (!ret) {
981                 DRM_ERROR("frame done wait timed out, ret:%d\n", ret);
982                 rc = -ETIMEDOUT;
983         }
984         DPU_ATRACE_END("frame done completion wait");
985
986         return rc;
987 }
988
989 void dpu_crtc_commit_kickoff(struct drm_crtc *crtc)
990 {
991         struct drm_encoder *encoder;
992         struct drm_device *dev;
993         struct dpu_crtc *dpu_crtc;
994         struct msm_drm_private *priv;
995         struct dpu_kms *dpu_kms;
996         struct dpu_crtc_state *cstate;
997         int ret;
998
999         if (!crtc) {
1000                 DPU_ERROR("invalid argument\n");
1001                 return;
1002         }
1003         dev = crtc->dev;
1004         dpu_crtc = to_dpu_crtc(crtc);
1005         dpu_kms = _dpu_crtc_get_kms(crtc);
1006
1007         if (!dpu_kms || !dpu_kms->dev || !dpu_kms->dev->dev_private) {
1008                 DPU_ERROR("invalid argument\n");
1009                 return;
1010         }
1011
1012         priv = dpu_kms->dev->dev_private;
1013         cstate = to_dpu_crtc_state(crtc->state);
1014
1015         /*
1016          * If no mixers has been allocated in dpu_crtc_atomic_check(),
1017          * it means we are trying to start a CRTC whose state is disabled:
1018          * nothing else needs to be done.
1019          */
1020         if (unlikely(!dpu_crtc->num_mixers))
1021                 return;
1022
1023         DPU_ATRACE_BEGIN("crtc_commit");
1024
1025         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1026                 struct dpu_encoder_kickoff_params params = { 0 };
1027
1028                 if (encoder->crtc != crtc)
1029                         continue;
1030
1031                 /*
1032                  * Encoder will flush/start now, unless it has a tx pending.
1033                  * If so, it may delay and flush at an irq event (e.g. ppdone)
1034                  */
1035                 dpu_encoder_prepare_for_kickoff(encoder, &params);
1036         }
1037
1038         /* wait for frame_event_done completion */
1039         DPU_ATRACE_BEGIN("wait_for_frame_done_event");
1040         ret = _dpu_crtc_wait_for_frame_done(crtc);
1041         DPU_ATRACE_END("wait_for_frame_done_event");
1042         if (ret) {
1043                 DPU_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
1044                                 crtc->base.id,
1045                                 atomic_read(&dpu_crtc->frame_pending));
1046                 goto end;
1047         }
1048
1049         if (atomic_inc_return(&dpu_crtc->frame_pending) == 1) {
1050                 /* acquire bandwidth and other resources */
1051                 DPU_DEBUG("crtc%d first commit\n", crtc->base.id);
1052         } else
1053                 DPU_DEBUG("crtc%d commit\n", crtc->base.id);
1054
1055         dpu_crtc->play_count++;
1056
1057         dpu_vbif_clear_errors(dpu_kms);
1058
1059         list_for_each_entry(encoder, &dev->mode_config.encoder_list, head) {
1060                 if (encoder->crtc != crtc)
1061                         continue;
1062
1063                 dpu_encoder_kickoff(encoder);
1064         }
1065
1066 end:
1067         reinit_completion(&dpu_crtc->frame_done_comp);
1068         DPU_ATRACE_END("crtc_commit");
1069 }
1070
1071 /**
1072  * _dpu_crtc_vblank_enable_no_lock - update power resource and vblank request
1073  * @dpu_crtc: Pointer to dpu crtc structure
1074  * @enable: Whether to enable/disable vblanks
1075  *
1076  * @Return: error code
1077  */
1078 static int _dpu_crtc_vblank_enable_no_lock(
1079                 struct dpu_crtc *dpu_crtc, bool enable)
1080 {
1081         struct drm_device *dev;
1082         struct drm_crtc *crtc;
1083         struct drm_encoder *enc;
1084
1085         if (!dpu_crtc) {
1086                 DPU_ERROR("invalid crtc\n");
1087                 return -EINVAL;
1088         }
1089
1090         crtc = &dpu_crtc->base;
1091         dev = crtc->dev;
1092
1093         if (enable) {
1094                 int ret;
1095
1096                 /* drop lock since power crtc cb may try to re-acquire lock */
1097                 mutex_unlock(&dpu_crtc->crtc_lock);
1098                 ret = _dpu_crtc_power_enable(dpu_crtc, true);
1099                 mutex_lock(&dpu_crtc->crtc_lock);
1100                 if (ret)
1101                         return ret;
1102
1103                 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
1104                         if (enc->crtc != crtc)
1105                                 continue;
1106
1107                         trace_dpu_crtc_vblank_enable(DRMID(&dpu_crtc->base),
1108                                                      DRMID(enc), enable,
1109                                                      dpu_crtc);
1110
1111                         dpu_encoder_register_vblank_callback(enc,
1112                                         dpu_crtc_vblank_cb, (void *)crtc);
1113                 }
1114         } else {
1115                 list_for_each_entry(enc, &dev->mode_config.encoder_list, head) {
1116                         if (enc->crtc != crtc)
1117                                 continue;
1118
1119                         trace_dpu_crtc_vblank_enable(DRMID(&dpu_crtc->base),
1120                                                      DRMID(enc), enable,
1121                                                      dpu_crtc);
1122
1123                         dpu_encoder_register_vblank_callback(enc, NULL, NULL);
1124                 }
1125
1126                 /* drop lock since power crtc cb may try to re-acquire lock */
1127                 mutex_unlock(&dpu_crtc->crtc_lock);
1128                 _dpu_crtc_power_enable(dpu_crtc, false);
1129                 mutex_lock(&dpu_crtc->crtc_lock);
1130         }
1131
1132         return 0;
1133 }
1134
1135 /**
1136  * _dpu_crtc_set_suspend - notify crtc of suspend enable/disable
1137  * @crtc: Pointer to drm crtc object
1138  * @enable: true to enable suspend, false to indicate resume
1139  */
1140 static void _dpu_crtc_set_suspend(struct drm_crtc *crtc, bool enable)
1141 {
1142         struct dpu_crtc *dpu_crtc;
1143         struct msm_drm_private *priv;
1144         struct dpu_kms *dpu_kms;
1145         int ret = 0;
1146
1147         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
1148                 DPU_ERROR("invalid crtc\n");
1149                 return;
1150         }
1151         dpu_crtc = to_dpu_crtc(crtc);
1152         priv = crtc->dev->dev_private;
1153
1154         if (!priv->kms) {
1155                 DPU_ERROR("invalid crtc kms\n");
1156                 return;
1157         }
1158         dpu_kms = to_dpu_kms(priv->kms);
1159
1160         DRM_DEBUG_KMS("crtc%d suspend = %d\n", crtc->base.id, enable);
1161
1162         mutex_lock(&dpu_crtc->crtc_lock);
1163
1164         /*
1165          * If the vblank is enabled, release a power reference on suspend
1166          * and take it back during resume (if it is still enabled).
1167          */
1168         trace_dpu_crtc_set_suspend(DRMID(&dpu_crtc->base), enable, dpu_crtc);
1169         if (dpu_crtc->suspend == enable)
1170                 DPU_DEBUG("crtc%d suspend already set to %d, ignoring update\n",
1171                                 crtc->base.id, enable);
1172         else if (dpu_crtc->enabled && dpu_crtc->vblank_requested) {
1173                 ret = _dpu_crtc_vblank_enable_no_lock(dpu_crtc, !enable);
1174                 if (ret)
1175                         DPU_ERROR("%s vblank enable failed: %d\n",
1176                                         dpu_crtc->name, ret);
1177         }
1178
1179         dpu_crtc->suspend = enable;
1180         mutex_unlock(&dpu_crtc->crtc_lock);
1181 }
1182
1183 /**
1184  * dpu_crtc_duplicate_state - state duplicate hook
1185  * @crtc: Pointer to drm crtc structure
1186  * @Returns: Pointer to new drm_crtc_state structure
1187  */
1188 static struct drm_crtc_state *dpu_crtc_duplicate_state(struct drm_crtc *crtc)
1189 {
1190         struct dpu_crtc *dpu_crtc;
1191         struct dpu_crtc_state *cstate, *old_cstate;
1192
1193         if (!crtc || !crtc->state) {
1194                 DPU_ERROR("invalid argument(s)\n");
1195                 return NULL;
1196         }
1197
1198         dpu_crtc = to_dpu_crtc(crtc);
1199         old_cstate = to_dpu_crtc_state(crtc->state);
1200         cstate = kmemdup(old_cstate, sizeof(*old_cstate), GFP_KERNEL);
1201         if (!cstate) {
1202                 DPU_ERROR("failed to allocate state\n");
1203                 return NULL;
1204         }
1205
1206         /* duplicate base helper */
1207         __drm_atomic_helper_crtc_duplicate_state(crtc, &cstate->base);
1208
1209         _dpu_crtc_rp_duplicate(&old_cstate->rp, &cstate->rp);
1210
1211         return &cstate->base;
1212 }
1213
1214 /**
1215  * dpu_crtc_reset - reset hook for CRTCs
1216  * Resets the atomic state for @crtc by freeing the state pointer (which might
1217  * be NULL, e.g. at driver load time) and allocating a new empty state object.
1218  * @crtc: Pointer to drm crtc structure
1219  */
1220 static void dpu_crtc_reset(struct drm_crtc *crtc)
1221 {
1222         struct dpu_crtc *dpu_crtc;
1223         struct dpu_crtc_state *cstate;
1224
1225         if (!crtc) {
1226                 DPU_ERROR("invalid crtc\n");
1227                 return;
1228         }
1229
1230         /* revert suspend actions, if necessary */
1231         if (dpu_kms_is_suspend_state(crtc->dev))
1232                 _dpu_crtc_set_suspend(crtc, false);
1233
1234         /* remove previous state, if present */
1235         if (crtc->state) {
1236                 dpu_crtc_destroy_state(crtc, crtc->state);
1237                 crtc->state = 0;
1238         }
1239
1240         dpu_crtc = to_dpu_crtc(crtc);
1241         cstate = kzalloc(sizeof(*cstate), GFP_KERNEL);
1242         if (!cstate) {
1243                 DPU_ERROR("failed to allocate state\n");
1244                 return;
1245         }
1246
1247         _dpu_crtc_rp_reset(&cstate->rp, &dpu_crtc->rp_lock,
1248                         &dpu_crtc->rp_head);
1249
1250         cstate->base.crtc = crtc;
1251         crtc->state = &cstate->base;
1252 }
1253
1254 static void dpu_crtc_handle_power_event(u32 event_type, void *arg)
1255 {
1256         struct drm_crtc *crtc = arg;
1257         struct dpu_crtc *dpu_crtc;
1258         struct drm_encoder *encoder;
1259         struct dpu_crtc_mixer *m;
1260         u32 i, misr_status;
1261
1262         if (!crtc) {
1263                 DPU_ERROR("invalid crtc\n");
1264                 return;
1265         }
1266         dpu_crtc = to_dpu_crtc(crtc);
1267
1268         mutex_lock(&dpu_crtc->crtc_lock);
1269
1270         trace_dpu_crtc_handle_power_event(DRMID(crtc), event_type);
1271
1272         switch (event_type) {
1273         case DPU_POWER_EVENT_POST_ENABLE:
1274                 /* restore encoder; crtc will be programmed during commit */
1275                 drm_for_each_encoder(encoder, crtc->dev) {
1276                         if (encoder->crtc != crtc)
1277                                 continue;
1278
1279                         dpu_encoder_virt_restore(encoder);
1280                 }
1281
1282                 for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1283                         m = &dpu_crtc->mixers[i];
1284                         if (!m->hw_lm || !m->hw_lm->ops.setup_misr ||
1285                                         !dpu_crtc->misr_enable)
1286                                 continue;
1287
1288                         m->hw_lm->ops.setup_misr(m->hw_lm, true,
1289                                         dpu_crtc->misr_frame_count);
1290                 }
1291                 break;
1292         case DPU_POWER_EVENT_PRE_DISABLE:
1293                 for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1294                         m = &dpu_crtc->mixers[i];
1295                         if (!m->hw_lm || !m->hw_lm->ops.collect_misr ||
1296                                         !dpu_crtc->misr_enable)
1297                                 continue;
1298
1299                         misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
1300                         dpu_crtc->misr_data[i] = misr_status ? misr_status :
1301                                                         dpu_crtc->misr_data[i];
1302                 }
1303                 break;
1304         case DPU_POWER_EVENT_POST_DISABLE:
1305                 /**
1306                  * Nothing to do. All the planes on the CRTC will be
1307                  * programmed for every frame
1308                  */
1309                 break;
1310         default:
1311                 DPU_DEBUG("event:%d not handled\n", event_type);
1312                 break;
1313         }
1314
1315         mutex_unlock(&dpu_crtc->crtc_lock);
1316 }
1317
1318 static void dpu_crtc_disable(struct drm_crtc *crtc)
1319 {
1320         struct dpu_crtc *dpu_crtc;
1321         struct dpu_crtc_state *cstate;
1322         struct drm_display_mode *mode;
1323         struct drm_encoder *encoder;
1324         struct msm_drm_private *priv;
1325         int ret;
1326         unsigned long flags;
1327
1328         if (!crtc || !crtc->dev || !crtc->dev->dev_private || !crtc->state) {
1329                 DPU_ERROR("invalid crtc\n");
1330                 return;
1331         }
1332         dpu_crtc = to_dpu_crtc(crtc);
1333         cstate = to_dpu_crtc_state(crtc->state);
1334         mode = &cstate->base.adjusted_mode;
1335         priv = crtc->dev->dev_private;
1336
1337         DRM_DEBUG_KMS("crtc%d\n", crtc->base.id);
1338
1339         if (dpu_kms_is_suspend_state(crtc->dev))
1340                 _dpu_crtc_set_suspend(crtc, true);
1341
1342         /* Disable/save vblank irq handling */
1343         drm_crtc_vblank_off(crtc);
1344
1345         mutex_lock(&dpu_crtc->crtc_lock);
1346
1347         /* wait for frame_event_done completion */
1348         if (_dpu_crtc_wait_for_frame_done(crtc))
1349                 DPU_ERROR("crtc%d wait for frame done failed;frame_pending%d\n",
1350                                 crtc->base.id,
1351                                 atomic_read(&dpu_crtc->frame_pending));
1352
1353         trace_dpu_crtc_disable(DRMID(crtc), false, dpu_crtc);
1354         if (dpu_crtc->enabled && !dpu_crtc->suspend &&
1355                         dpu_crtc->vblank_requested) {
1356                 ret = _dpu_crtc_vblank_enable_no_lock(dpu_crtc, false);
1357                 if (ret)
1358                         DPU_ERROR("%s vblank enable failed: %d\n",
1359                                         dpu_crtc->name, ret);
1360         }
1361         dpu_crtc->enabled = false;
1362
1363         if (atomic_read(&dpu_crtc->frame_pending)) {
1364                 trace_dpu_crtc_disable_frame_pending(DRMID(crtc),
1365                                      atomic_read(&dpu_crtc->frame_pending));
1366                 dpu_core_perf_crtc_release_bw(crtc);
1367                 atomic_set(&dpu_crtc->frame_pending, 0);
1368         }
1369
1370         dpu_core_perf_crtc_update(crtc, 0, true);
1371
1372         drm_for_each_encoder(encoder, crtc->dev) {
1373                 if (encoder->crtc != crtc)
1374                         continue;
1375                 dpu_encoder_register_frame_event_callback(encoder, NULL, NULL);
1376         }
1377
1378         if (dpu_crtc->power_event)
1379                 dpu_power_handle_unregister_event(dpu_crtc->phandle,
1380                                 dpu_crtc->power_event);
1381
1382         memset(dpu_crtc->mixers, 0, sizeof(dpu_crtc->mixers));
1383         dpu_crtc->num_mixers = 0;
1384         dpu_crtc->mixers_swapped = false;
1385
1386         /* disable clk & bw control until clk & bw properties are set */
1387         cstate->bw_control = false;
1388         cstate->bw_split_vote = false;
1389
1390         mutex_unlock(&dpu_crtc->crtc_lock);
1391
1392         if (crtc->state->event && !crtc->state->active) {
1393                 spin_lock_irqsave(&crtc->dev->event_lock, flags);
1394                 drm_crtc_send_vblank_event(crtc, crtc->state->event);
1395                 crtc->state->event = NULL;
1396                 spin_unlock_irqrestore(&crtc->dev->event_lock, flags);
1397         }
1398 }
1399
1400 static void dpu_crtc_enable(struct drm_crtc *crtc,
1401                 struct drm_crtc_state *old_crtc_state)
1402 {
1403         struct dpu_crtc *dpu_crtc;
1404         struct drm_encoder *encoder;
1405         struct msm_drm_private *priv;
1406         int ret;
1407
1408         if (!crtc || !crtc->dev || !crtc->dev->dev_private) {
1409                 DPU_ERROR("invalid crtc\n");
1410                 return;
1411         }
1412         priv = crtc->dev->dev_private;
1413
1414         DRM_DEBUG_KMS("crtc%d\n", crtc->base.id);
1415         dpu_crtc = to_dpu_crtc(crtc);
1416
1417         drm_for_each_encoder(encoder, crtc->dev) {
1418                 if (encoder->crtc != crtc)
1419                         continue;
1420                 dpu_encoder_register_frame_event_callback(encoder,
1421                                 dpu_crtc_frame_event_cb, (void *)crtc);
1422         }
1423
1424         mutex_lock(&dpu_crtc->crtc_lock);
1425         trace_dpu_crtc_enable(DRMID(crtc), true, dpu_crtc);
1426         if (!dpu_crtc->enabled && !dpu_crtc->suspend &&
1427                         dpu_crtc->vblank_requested) {
1428                 ret = _dpu_crtc_vblank_enable_no_lock(dpu_crtc, true);
1429                 if (ret)
1430                         DPU_ERROR("%s vblank enable failed: %d\n",
1431                                         dpu_crtc->name, ret);
1432         }
1433         dpu_crtc->enabled = true;
1434
1435         mutex_unlock(&dpu_crtc->crtc_lock);
1436
1437         /* Enable/restore vblank irq handling */
1438         drm_crtc_vblank_on(crtc);
1439
1440         dpu_crtc->power_event = dpu_power_handle_register_event(
1441                 dpu_crtc->phandle,
1442                 DPU_POWER_EVENT_POST_ENABLE | DPU_POWER_EVENT_POST_DISABLE |
1443                 DPU_POWER_EVENT_PRE_DISABLE,
1444                 dpu_crtc_handle_power_event, crtc, dpu_crtc->name);
1445
1446 }
1447
1448 struct plane_state {
1449         struct dpu_plane_state *dpu_pstate;
1450         const struct drm_plane_state *drm_pstate;
1451         int stage;
1452         u32 pipe_id;
1453 };
1454
1455 static int dpu_crtc_atomic_check(struct drm_crtc *crtc,
1456                 struct drm_crtc_state *state)
1457 {
1458         struct dpu_crtc *dpu_crtc;
1459         struct plane_state *pstates;
1460         struct dpu_crtc_state *cstate;
1461
1462         const struct drm_plane_state *pstate;
1463         struct drm_plane *plane;
1464         struct drm_display_mode *mode;
1465
1466         int cnt = 0, rc = 0, mixer_width, i, z_pos;
1467
1468         struct dpu_multirect_plane_states multirect_plane[DPU_STAGE_MAX * 2];
1469         int multirect_count = 0;
1470         const struct drm_plane_state *pipe_staged[SSPP_MAX];
1471         int left_zpos_cnt = 0, right_zpos_cnt = 0;
1472         struct drm_rect crtc_rect = { 0 };
1473
1474         if (!crtc) {
1475                 DPU_ERROR("invalid crtc\n");
1476                 return -EINVAL;
1477         }
1478
1479         pstates = kzalloc(sizeof(*pstates) * DPU_STAGE_MAX * 4, GFP_KERNEL);
1480         if (!pstates)
1481                 return -ENOMEM;
1482
1483         dpu_crtc = to_dpu_crtc(crtc);
1484         cstate = to_dpu_crtc_state(state);
1485
1486         if (!state->enable || !state->active) {
1487                 DPU_DEBUG("crtc%d -> enable %d, active %d, skip atomic_check\n",
1488                                 crtc->base.id, state->enable, state->active);
1489                 goto end;
1490         }
1491
1492         mode = &state->adjusted_mode;
1493         DPU_DEBUG("%s: check", dpu_crtc->name);
1494
1495         /* force a full mode set if active state changed */
1496         if (state->active_changed)
1497                 state->mode_changed = true;
1498
1499         memset(pipe_staged, 0, sizeof(pipe_staged));
1500
1501         mixer_width = dpu_crtc_get_mixer_width(dpu_crtc, cstate, mode);
1502
1503         _dpu_crtc_setup_lm_bounds(crtc, state);
1504
1505         crtc_rect.x2 = mode->hdisplay;
1506         crtc_rect.y2 = mode->vdisplay;
1507
1508          /* get plane state for all drm planes associated with crtc state */
1509         drm_atomic_crtc_state_for_each_plane_state(plane, pstate, state) {
1510                 struct drm_rect dst, clip = crtc_rect;
1511
1512                 if (IS_ERR_OR_NULL(pstate)) {
1513                         rc = PTR_ERR(pstate);
1514                         DPU_ERROR("%s: failed to get plane%d state, %d\n",
1515                                         dpu_crtc->name, plane->base.id, rc);
1516                         goto end;
1517                 }
1518                 if (cnt >= DPU_STAGE_MAX * 4)
1519                         continue;
1520
1521                 pstates[cnt].dpu_pstate = to_dpu_plane_state(pstate);
1522                 pstates[cnt].drm_pstate = pstate;
1523                 pstates[cnt].stage = pstate->normalized_zpos;
1524                 pstates[cnt].pipe_id = dpu_plane_pipe(plane);
1525
1526                 if (pipe_staged[pstates[cnt].pipe_id]) {
1527                         multirect_plane[multirect_count].r0 =
1528                                 pipe_staged[pstates[cnt].pipe_id];
1529                         multirect_plane[multirect_count].r1 = pstate;
1530                         multirect_count++;
1531
1532                         pipe_staged[pstates[cnt].pipe_id] = NULL;
1533                 } else {
1534                         pipe_staged[pstates[cnt].pipe_id] = pstate;
1535                 }
1536
1537                 cnt++;
1538
1539                 dst = drm_plane_state_dest(pstate);
1540                 if (!drm_rect_intersect(&clip, &dst)) {
1541                         DPU_ERROR("invalid vertical/horizontal destination\n");
1542                         DPU_ERROR("display: " DRM_RECT_FMT " plane: "
1543                                   DRM_RECT_FMT "\n", DRM_RECT_ARG(&crtc_rect),
1544                                   DRM_RECT_ARG(&dst));
1545                         rc = -E2BIG;
1546                         goto end;
1547                 }
1548         }
1549
1550         for (i = 1; i < SSPP_MAX; i++) {
1551                 if (pipe_staged[i]) {
1552                         dpu_plane_clear_multirect(pipe_staged[i]);
1553
1554                         if (is_dpu_plane_virtual(pipe_staged[i]->plane)) {
1555                                 DPU_ERROR(
1556                                         "r1 only virt plane:%d not supported\n",
1557                                         pipe_staged[i]->plane->base.id);
1558                                 rc  = -EINVAL;
1559                                 goto end;
1560                         }
1561                 }
1562         }
1563
1564         z_pos = -1;
1565         for (i = 0; i < cnt; i++) {
1566                 /* reset counts at every new blend stage */
1567                 if (pstates[i].stage != z_pos) {
1568                         left_zpos_cnt = 0;
1569                         right_zpos_cnt = 0;
1570                         z_pos = pstates[i].stage;
1571                 }
1572
1573                 /* verify z_pos setting before using it */
1574                 if (z_pos >= DPU_STAGE_MAX - DPU_STAGE_0) {
1575                         DPU_ERROR("> %d plane stages assigned\n",
1576                                         DPU_STAGE_MAX - DPU_STAGE_0);
1577                         rc = -EINVAL;
1578                         goto end;
1579                 } else if (pstates[i].drm_pstate->crtc_x < mixer_width) {
1580                         if (left_zpos_cnt == 2) {
1581                                 DPU_ERROR("> 2 planes @ stage %d on left\n",
1582                                         z_pos);
1583                                 rc = -EINVAL;
1584                                 goto end;
1585                         }
1586                         left_zpos_cnt++;
1587
1588                 } else {
1589                         if (right_zpos_cnt == 2) {
1590                                 DPU_ERROR("> 2 planes @ stage %d on right\n",
1591                                         z_pos);
1592                                 rc = -EINVAL;
1593                                 goto end;
1594                         }
1595                         right_zpos_cnt++;
1596                 }
1597
1598                 pstates[i].dpu_pstate->stage = z_pos + DPU_STAGE_0;
1599                 DPU_DEBUG("%s: zpos %d", dpu_crtc->name, z_pos);
1600         }
1601
1602         for (i = 0; i < multirect_count; i++) {
1603                 if (dpu_plane_validate_multirect_v2(&multirect_plane[i])) {
1604                         DPU_ERROR(
1605                         "multirect validation failed for planes (%d - %d)\n",
1606                                         multirect_plane[i].r0->plane->base.id,
1607                                         multirect_plane[i].r1->plane->base.id);
1608                         rc = -EINVAL;
1609                         goto end;
1610                 }
1611         }
1612
1613         rc = dpu_core_perf_crtc_check(crtc, state);
1614         if (rc) {
1615                 DPU_ERROR("crtc%d failed performance check %d\n",
1616                                 crtc->base.id, rc);
1617                 goto end;
1618         }
1619
1620         /* validate source split:
1621          * use pstates sorted by stage to check planes on same stage
1622          * we assume that all pipes are in source split so its valid to compare
1623          * without taking into account left/right mixer placement
1624          */
1625         for (i = 1; i < cnt; i++) {
1626                 struct plane_state *prv_pstate, *cur_pstate;
1627                 struct drm_rect left_rect, right_rect;
1628                 int32_t left_pid, right_pid;
1629                 int32_t stage;
1630
1631                 prv_pstate = &pstates[i - 1];
1632                 cur_pstate = &pstates[i];
1633                 if (prv_pstate->stage != cur_pstate->stage)
1634                         continue;
1635
1636                 stage = cur_pstate->stage;
1637
1638                 left_pid = prv_pstate->dpu_pstate->base.plane->base.id;
1639                 left_rect = drm_plane_state_dest(prv_pstate->drm_pstate);
1640
1641                 right_pid = cur_pstate->dpu_pstate->base.plane->base.id;
1642                 right_rect = drm_plane_state_dest(cur_pstate->drm_pstate);
1643
1644                 if (right_rect.x1 < left_rect.x1) {
1645                         swap(left_pid, right_pid);
1646                         swap(left_rect, right_rect);
1647                 }
1648
1649                 /**
1650                  * - planes are enumerated in pipe-priority order such that
1651                  *   planes with lower drm_id must be left-most in a shared
1652                  *   blend-stage when using source split.
1653                  * - planes in source split must be contiguous in width
1654                  * - planes in source split must have same dest yoff and height
1655                  */
1656                 if (right_pid < left_pid) {
1657                         DPU_ERROR(
1658                                 "invalid src split cfg. priority mismatch. stage: %d left: %d right: %d\n",
1659                                 stage, left_pid, right_pid);
1660                         rc = -EINVAL;
1661                         goto end;
1662                 } else if (right_rect.x1 != drm_rect_width(&left_rect)) {
1663                         DPU_ERROR("non-contiguous coordinates for src split. "
1664                                   "stage: %d left: " DRM_RECT_FMT " right: "
1665                                   DRM_RECT_FMT "\n", stage,
1666                                   DRM_RECT_ARG(&left_rect),
1667                                   DRM_RECT_ARG(&right_rect));
1668                         rc = -EINVAL;
1669                         goto end;
1670                 } else if (left_rect.y1 != right_rect.y1 ||
1671                            drm_rect_height(&left_rect) != drm_rect_height(&right_rect)) {
1672                         DPU_ERROR("source split at stage: %d. invalid "
1673                                   "yoff/height: left: " DRM_RECT_FMT " right: "
1674                                   DRM_RECT_FMT "\n", stage,
1675                                   DRM_RECT_ARG(&left_rect),
1676                                   DRM_RECT_ARG(&right_rect));
1677                         rc = -EINVAL;
1678                         goto end;
1679                 }
1680         }
1681
1682 end:
1683         _dpu_crtc_rp_free_unused(&cstate->rp);
1684         kfree(pstates);
1685         return rc;
1686 }
1687
1688 int dpu_crtc_vblank(struct drm_crtc *crtc, bool en)
1689 {
1690         struct dpu_crtc *dpu_crtc;
1691         int ret;
1692
1693         if (!crtc) {
1694                 DPU_ERROR("invalid crtc\n");
1695                 return -EINVAL;
1696         }
1697         dpu_crtc = to_dpu_crtc(crtc);
1698
1699         mutex_lock(&dpu_crtc->crtc_lock);
1700         trace_dpu_crtc_vblank(DRMID(&dpu_crtc->base), en, dpu_crtc);
1701         if (dpu_crtc->enabled && !dpu_crtc->suspend) {
1702                 ret = _dpu_crtc_vblank_enable_no_lock(dpu_crtc, en);
1703                 if (ret)
1704                         DPU_ERROR("%s vblank enable failed: %d\n",
1705                                         dpu_crtc->name, ret);
1706         }
1707         dpu_crtc->vblank_requested = en;
1708         mutex_unlock(&dpu_crtc->crtc_lock);
1709
1710         return 0;
1711 }
1712
1713 #ifdef CONFIG_DEBUG_FS
1714 static int _dpu_debugfs_status_show(struct seq_file *s, void *data)
1715 {
1716         struct dpu_crtc *dpu_crtc;
1717         struct dpu_plane_state *pstate = NULL;
1718         struct dpu_crtc_mixer *m;
1719
1720         struct drm_crtc *crtc;
1721         struct drm_plane *plane;
1722         struct drm_display_mode *mode;
1723         struct drm_framebuffer *fb;
1724         struct drm_plane_state *state;
1725         struct dpu_crtc_state *cstate;
1726
1727         int i, out_width;
1728
1729         if (!s || !s->private)
1730                 return -EINVAL;
1731
1732         dpu_crtc = s->private;
1733         crtc = &dpu_crtc->base;
1734         cstate = to_dpu_crtc_state(crtc->state);
1735
1736         mutex_lock(&dpu_crtc->crtc_lock);
1737         mode = &crtc->state->adjusted_mode;
1738         out_width = dpu_crtc_get_mixer_width(dpu_crtc, cstate, mode);
1739
1740         seq_printf(s, "crtc:%d width:%d height:%d\n", crtc->base.id,
1741                                 mode->hdisplay, mode->vdisplay);
1742
1743         seq_puts(s, "\n");
1744
1745         for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1746                 m = &dpu_crtc->mixers[i];
1747                 if (!m->hw_lm)
1748                         seq_printf(s, "\tmixer[%d] has no lm\n", i);
1749                 else if (!m->hw_ctl)
1750                         seq_printf(s, "\tmixer[%d] has no ctl\n", i);
1751                 else
1752                         seq_printf(s, "\tmixer:%d ctl:%d width:%d height:%d\n",
1753                                 m->hw_lm->idx - LM_0, m->hw_ctl->idx - CTL_0,
1754                                 out_width, mode->vdisplay);
1755         }
1756
1757         seq_puts(s, "\n");
1758
1759         drm_atomic_crtc_for_each_plane(plane, crtc) {
1760                 pstate = to_dpu_plane_state(plane->state);
1761                 state = plane->state;
1762
1763                 if (!pstate || !state)
1764                         continue;
1765
1766                 seq_printf(s, "\tplane:%u stage:%d\n", plane->base.id,
1767                         pstate->stage);
1768
1769                 if (plane->state->fb) {
1770                         fb = plane->state->fb;
1771
1772                         seq_printf(s, "\tfb:%d image format:%4.4s wxh:%ux%u ",
1773                                 fb->base.id, (char *) &fb->format->format,
1774                                 fb->width, fb->height);
1775                         for (i = 0; i < ARRAY_SIZE(fb->format->cpp); ++i)
1776                                 seq_printf(s, "cpp[%d]:%u ",
1777                                                 i, fb->format->cpp[i]);
1778                         seq_puts(s, "\n\t");
1779
1780                         seq_printf(s, "modifier:%8llu ", fb->modifier);
1781                         seq_puts(s, "\n");
1782
1783                         seq_puts(s, "\t");
1784                         for (i = 0; i < ARRAY_SIZE(fb->pitches); i++)
1785                                 seq_printf(s, "pitches[%d]:%8u ", i,
1786                                                         fb->pitches[i]);
1787                         seq_puts(s, "\n");
1788
1789                         seq_puts(s, "\t");
1790                         for (i = 0; i < ARRAY_SIZE(fb->offsets); i++)
1791                                 seq_printf(s, "offsets[%d]:%8u ", i,
1792                                                         fb->offsets[i]);
1793                         seq_puts(s, "\n");
1794                 }
1795
1796                 seq_printf(s, "\tsrc_x:%4d src_y:%4d src_w:%4d src_h:%4d\n",
1797                         state->src_x, state->src_y, state->src_w, state->src_h);
1798
1799                 seq_printf(s, "\tdst x:%4d dst_y:%4d dst_w:%4d dst_h:%4d\n",
1800                         state->crtc_x, state->crtc_y, state->crtc_w,
1801                         state->crtc_h);
1802                 seq_printf(s, "\tmultirect: mode: %d index: %d\n",
1803                         pstate->multirect_mode, pstate->multirect_index);
1804
1805                 seq_puts(s, "\n");
1806         }
1807         if (dpu_crtc->vblank_cb_count) {
1808                 ktime_t diff = ktime_sub(ktime_get(), dpu_crtc->vblank_cb_time);
1809                 s64 diff_ms = ktime_to_ms(diff);
1810                 s64 fps = diff_ms ? div_s64(
1811                                 dpu_crtc->vblank_cb_count * 1000, diff_ms) : 0;
1812
1813                 seq_printf(s,
1814                         "vblank fps:%lld count:%u total:%llums total_framecount:%llu\n",
1815                                 fps, dpu_crtc->vblank_cb_count,
1816                                 ktime_to_ms(diff), dpu_crtc->play_count);
1817
1818                 /* reset time & count for next measurement */
1819                 dpu_crtc->vblank_cb_count = 0;
1820                 dpu_crtc->vblank_cb_time = ktime_set(0, 0);
1821         }
1822
1823         seq_printf(s, "vblank_enable:%d\n", dpu_crtc->vblank_requested);
1824
1825         mutex_unlock(&dpu_crtc->crtc_lock);
1826
1827         return 0;
1828 }
1829
1830 static int _dpu_debugfs_status_open(struct inode *inode, struct file *file)
1831 {
1832         return single_open(file, _dpu_debugfs_status_show, inode->i_private);
1833 }
1834
1835 static ssize_t _dpu_crtc_misr_setup(struct file *file,
1836                 const char __user *user_buf, size_t count, loff_t *ppos)
1837 {
1838         struct dpu_crtc *dpu_crtc;
1839         struct dpu_crtc_mixer *m;
1840         int i = 0, rc;
1841         char buf[MISR_BUFF_SIZE + 1];
1842         u32 frame_count, enable;
1843         size_t buff_copy;
1844
1845         if (!file || !file->private_data)
1846                 return -EINVAL;
1847
1848         dpu_crtc = file->private_data;
1849         buff_copy = min_t(size_t, count, MISR_BUFF_SIZE);
1850         if (copy_from_user(buf, user_buf, buff_copy)) {
1851                 DPU_ERROR("buffer copy failed\n");
1852                 return -EINVAL;
1853         }
1854
1855         buf[buff_copy] = 0; /* end of string */
1856
1857         if (sscanf(buf, "%u %u", &enable, &frame_count) != 2)
1858                 return -EINVAL;
1859
1860         rc = _dpu_crtc_power_enable(dpu_crtc, true);
1861         if (rc)
1862                 return rc;
1863
1864         mutex_lock(&dpu_crtc->crtc_lock);
1865         dpu_crtc->misr_enable = enable;
1866         dpu_crtc->misr_frame_count = frame_count;
1867         for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1868                 dpu_crtc->misr_data[i] = 0;
1869                 m = &dpu_crtc->mixers[i];
1870                 if (!m->hw_lm || !m->hw_lm->ops.setup_misr)
1871                         continue;
1872
1873                 m->hw_lm->ops.setup_misr(m->hw_lm, enable, frame_count);
1874         }
1875         mutex_unlock(&dpu_crtc->crtc_lock);
1876         _dpu_crtc_power_enable(dpu_crtc, false);
1877
1878         return count;
1879 }
1880
1881 static ssize_t _dpu_crtc_misr_read(struct file *file,
1882                 char __user *user_buff, size_t count, loff_t *ppos)
1883 {
1884         struct dpu_crtc *dpu_crtc;
1885         struct dpu_crtc_mixer *m;
1886         int i = 0, rc;
1887         u32 misr_status;
1888         ssize_t len = 0;
1889         char buf[MISR_BUFF_SIZE + 1] = {'\0'};
1890
1891         if (*ppos)
1892                 return 0;
1893
1894         if (!file || !file->private_data)
1895                 return -EINVAL;
1896
1897         dpu_crtc = file->private_data;
1898         rc = _dpu_crtc_power_enable(dpu_crtc, true);
1899         if (rc)
1900                 return rc;
1901
1902         mutex_lock(&dpu_crtc->crtc_lock);
1903         if (!dpu_crtc->misr_enable) {
1904                 len += snprintf(buf + len, MISR_BUFF_SIZE - len,
1905                         "disabled\n");
1906                 goto buff_check;
1907         }
1908
1909         for (i = 0; i < dpu_crtc->num_mixers; ++i) {
1910                 m = &dpu_crtc->mixers[i];
1911                 if (!m->hw_lm || !m->hw_lm->ops.collect_misr)
1912                         continue;
1913
1914                 misr_status = m->hw_lm->ops.collect_misr(m->hw_lm);
1915                 dpu_crtc->misr_data[i] = misr_status ? misr_status :
1916                                                         dpu_crtc->misr_data[i];
1917                 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "lm idx:%d\n",
1918                                         m->hw_lm->idx - LM_0);
1919                 len += snprintf(buf + len, MISR_BUFF_SIZE - len, "0x%x\n",
1920                                                         dpu_crtc->misr_data[i]);
1921         }
1922
1923 buff_check:
1924         if (count <= len) {
1925                 len = 0;
1926                 goto end;
1927         }
1928
1929         if (copy_to_user(user_buff, buf, len)) {
1930                 len = -EFAULT;
1931                 goto end;
1932         }
1933
1934         *ppos += len;   /* increase offset */
1935
1936 end:
1937         mutex_unlock(&dpu_crtc->crtc_lock);
1938         _dpu_crtc_power_enable(dpu_crtc, false);
1939         return len;
1940 }
1941
1942 #define DEFINE_DPU_DEBUGFS_SEQ_FOPS(__prefix)                          \
1943 static int __prefix ## _open(struct inode *inode, struct file *file)    \
1944 {                                                                       \
1945         return single_open(file, __prefix ## _show, inode->i_private);  \
1946 }                                                                       \
1947 static const struct file_operations __prefix ## _fops = {               \
1948         .owner = THIS_MODULE,                                           \
1949         .open = __prefix ## _open,                                      \
1950         .release = single_release,                                      \
1951         .read = seq_read,                                               \
1952         .llseek = seq_lseek,                                            \
1953 }
1954
1955 static int dpu_crtc_debugfs_state_show(struct seq_file *s, void *v)
1956 {
1957         struct drm_crtc *crtc = (struct drm_crtc *) s->private;
1958         struct dpu_crtc *dpu_crtc = to_dpu_crtc(crtc);
1959         struct dpu_crtc_res *res;
1960         struct dpu_crtc_respool *rp;
1961         int i;
1962
1963         seq_printf(s, "client type: %d\n", dpu_crtc_get_client_type(crtc));
1964         seq_printf(s, "intf_mode: %d\n", dpu_crtc_get_intf_mode(crtc));
1965         seq_printf(s, "core_clk_rate: %llu\n",
1966                         dpu_crtc->cur_perf.core_clk_rate);
1967         for (i = DPU_POWER_HANDLE_DBUS_ID_MNOC;
1968                         i < DPU_POWER_HANDLE_DBUS_ID_MAX; i++) {
1969                 seq_printf(s, "bw_ctl[%s]: %llu\n",
1970                                 dpu_power_handle_get_dbus_name(i),
1971                                 dpu_crtc->cur_perf.bw_ctl[i]);
1972                 seq_printf(s, "max_per_pipe_ib[%s]: %llu\n",
1973                                 dpu_power_handle_get_dbus_name(i),
1974                                 dpu_crtc->cur_perf.max_per_pipe_ib[i]);
1975         }
1976
1977         mutex_lock(&dpu_crtc->rp_lock);
1978         list_for_each_entry(rp, &dpu_crtc->rp_head, rp_list) {
1979                 seq_printf(s, "rp.%d: ", rp->sequence_id);
1980                 list_for_each_entry(res, &rp->res_list, list)
1981                         seq_printf(s, "0x%x/0x%llx/%pK/%d ",
1982                                         res->type, res->tag, res->val,
1983                                         atomic_read(&res->refcount));
1984                 seq_puts(s, "\n");
1985         }
1986         mutex_unlock(&dpu_crtc->rp_lock);
1987
1988         return 0;
1989 }
1990 DEFINE_DPU_DEBUGFS_SEQ_FOPS(dpu_crtc_debugfs_state);
1991
1992 static int _dpu_crtc_init_debugfs(struct drm_crtc *crtc)
1993 {
1994         struct dpu_crtc *dpu_crtc;
1995         struct dpu_kms *dpu_kms;
1996
1997         static const struct file_operations debugfs_status_fops = {
1998                 .open =         _dpu_debugfs_status_open,
1999                 .read =         seq_read,
2000                 .llseek =       seq_lseek,
2001                 .release =      single_release,
2002         };
2003         static const struct file_operations debugfs_misr_fops = {
2004                 .open =         simple_open,
2005                 .read =         _dpu_crtc_misr_read,
2006                 .write =        _dpu_crtc_misr_setup,
2007         };
2008
2009         if (!crtc)
2010                 return -EINVAL;
2011         dpu_crtc = to_dpu_crtc(crtc);
2012
2013         dpu_kms = _dpu_crtc_get_kms(crtc);
2014         if (!dpu_kms)
2015                 return -EINVAL;
2016
2017         dpu_crtc->debugfs_root = debugfs_create_dir(dpu_crtc->name,
2018                         crtc->dev->primary->debugfs_root);
2019         if (!dpu_crtc->debugfs_root)
2020                 return -ENOMEM;
2021
2022         /* don't error check these */
2023         debugfs_create_file("status", 0400,
2024                         dpu_crtc->debugfs_root,
2025                         dpu_crtc, &debugfs_status_fops);
2026         debugfs_create_file("state", 0600,
2027                         dpu_crtc->debugfs_root,
2028                         &dpu_crtc->base,
2029                         &dpu_crtc_debugfs_state_fops);
2030         debugfs_create_file("misr_data", 0600, dpu_crtc->debugfs_root,
2031                                         dpu_crtc, &debugfs_misr_fops);
2032
2033         return 0;
2034 }
2035
2036 static void _dpu_crtc_destroy_debugfs(struct drm_crtc *crtc)
2037 {
2038         struct dpu_crtc *dpu_crtc;
2039
2040         if (!crtc)
2041                 return;
2042         dpu_crtc = to_dpu_crtc(crtc);
2043         debugfs_remove_recursive(dpu_crtc->debugfs_root);
2044 }
2045 #else
2046 static int _dpu_crtc_init_debugfs(struct drm_crtc *crtc)
2047 {
2048         return 0;
2049 }
2050
2051 static void _dpu_crtc_destroy_debugfs(struct drm_crtc *crtc)
2052 {
2053 }
2054 #endif /* CONFIG_DEBUG_FS */
2055
2056 static int dpu_crtc_late_register(struct drm_crtc *crtc)
2057 {
2058         return _dpu_crtc_init_debugfs(crtc);
2059 }
2060
2061 static void dpu_crtc_early_unregister(struct drm_crtc *crtc)
2062 {
2063         _dpu_crtc_destroy_debugfs(crtc);
2064 }
2065
2066 static const struct drm_crtc_funcs dpu_crtc_funcs = {
2067         .set_config = drm_atomic_helper_set_config,
2068         .destroy = dpu_crtc_destroy,
2069         .page_flip = drm_atomic_helper_page_flip,
2070         .reset = dpu_crtc_reset,
2071         .atomic_duplicate_state = dpu_crtc_duplicate_state,
2072         .atomic_destroy_state = dpu_crtc_destroy_state,
2073         .late_register = dpu_crtc_late_register,
2074         .early_unregister = dpu_crtc_early_unregister,
2075 };
2076
2077 static const struct drm_crtc_helper_funcs dpu_crtc_helper_funcs = {
2078         .disable = dpu_crtc_disable,
2079         .atomic_enable = dpu_crtc_enable,
2080         .atomic_check = dpu_crtc_atomic_check,
2081         .atomic_begin = dpu_crtc_atomic_begin,
2082         .atomic_flush = dpu_crtc_atomic_flush,
2083 };
2084
2085 /* initialize crtc */
2086 struct drm_crtc *dpu_crtc_init(struct drm_device *dev, struct drm_plane *plane)
2087 {
2088         struct drm_crtc *crtc = NULL;
2089         struct dpu_crtc *dpu_crtc = NULL;
2090         struct msm_drm_private *priv = NULL;
2091         struct dpu_kms *kms = NULL;
2092         int i;
2093
2094         priv = dev->dev_private;
2095         kms = to_dpu_kms(priv->kms);
2096
2097         dpu_crtc = kzalloc(sizeof(*dpu_crtc), GFP_KERNEL);
2098         if (!dpu_crtc)
2099                 return ERR_PTR(-ENOMEM);
2100
2101         crtc = &dpu_crtc->base;
2102         crtc->dev = dev;
2103
2104         mutex_init(&dpu_crtc->crtc_lock);
2105         spin_lock_init(&dpu_crtc->spin_lock);
2106         atomic_set(&dpu_crtc->frame_pending, 0);
2107
2108         mutex_init(&dpu_crtc->rp_lock);
2109         INIT_LIST_HEAD(&dpu_crtc->rp_head);
2110
2111         init_completion(&dpu_crtc->frame_done_comp);
2112
2113         INIT_LIST_HEAD(&dpu_crtc->frame_event_list);
2114
2115         for (i = 0; i < ARRAY_SIZE(dpu_crtc->frame_events); i++) {
2116                 INIT_LIST_HEAD(&dpu_crtc->frame_events[i].list);
2117                 list_add(&dpu_crtc->frame_events[i].list,
2118                                 &dpu_crtc->frame_event_list);
2119                 kthread_init_work(&dpu_crtc->frame_events[i].work,
2120                                 dpu_crtc_frame_event_work);
2121         }
2122
2123         drm_crtc_init_with_planes(dev, crtc, plane, NULL, &dpu_crtc_funcs,
2124                                 NULL);
2125
2126         drm_crtc_helper_add(crtc, &dpu_crtc_helper_funcs);
2127
2128         /* save user friendly CRTC name for later */
2129         snprintf(dpu_crtc->name, DPU_CRTC_NAME_SIZE, "crtc%u", crtc->base.id);
2130
2131         /* initialize event handling */
2132         spin_lock_init(&dpu_crtc->event_lock);
2133
2134         dpu_crtc->phandle = &kms->phandle;
2135
2136         DPU_DEBUG("%s: successfully initialized crtc\n", dpu_crtc->name);
2137         return crtc;
2138 }