Merge remote-tracking branch 'spi/for-5.12' into spi-linus
[linux-2.6-microblaze.git] / drivers / gpu / drm / i915 / intel_dram.c
1 // SPDX-License-Identifier: MIT
2 /*
3  * Copyright © 2020 Intel Corporation
4  */
5
6 #include "i915_drv.h"
7 #include "intel_dram.h"
8 #include "intel_sideband.h"
9
10 struct dram_dimm_info {
11         u16 size;
12         u8 width, ranks;
13 };
14
15 struct dram_channel_info {
16         struct dram_dimm_info dimm_l, dimm_s;
17         u8 ranks;
18         bool is_16gb_dimm;
19 };
20
21 #define DRAM_TYPE_STR(type) [INTEL_DRAM_ ## type] = #type
22
23 static const char *intel_dram_type_str(enum intel_dram_type type)
24 {
25         static const char * const str[] = {
26                 DRAM_TYPE_STR(UNKNOWN),
27                 DRAM_TYPE_STR(DDR3),
28                 DRAM_TYPE_STR(DDR4),
29                 DRAM_TYPE_STR(LPDDR3),
30                 DRAM_TYPE_STR(LPDDR4),
31         };
32
33         if (type >= ARRAY_SIZE(str))
34                 type = INTEL_DRAM_UNKNOWN;
35
36         return str[type];
37 }
38
39 #undef DRAM_TYPE_STR
40
41 static int intel_dimm_num_devices(const struct dram_dimm_info *dimm)
42 {
43         return dimm->ranks * 64 / (dimm->width ?: 1);
44 }
45
46 /* Returns total Gb for the whole DIMM */
47 static int skl_get_dimm_size(u16 val)
48 {
49         return (val & SKL_DRAM_SIZE_MASK) * 8;
50 }
51
52 static int skl_get_dimm_width(u16 val)
53 {
54         if (skl_get_dimm_size(val) == 0)
55                 return 0;
56
57         switch (val & SKL_DRAM_WIDTH_MASK) {
58         case SKL_DRAM_WIDTH_X8:
59         case SKL_DRAM_WIDTH_X16:
60         case SKL_DRAM_WIDTH_X32:
61                 val = (val & SKL_DRAM_WIDTH_MASK) >> SKL_DRAM_WIDTH_SHIFT;
62                 return 8 << val;
63         default:
64                 MISSING_CASE(val);
65                 return 0;
66         }
67 }
68
69 static int skl_get_dimm_ranks(u16 val)
70 {
71         if (skl_get_dimm_size(val) == 0)
72                 return 0;
73
74         val = (val & SKL_DRAM_RANK_MASK) >> SKL_DRAM_RANK_SHIFT;
75
76         return val + 1;
77 }
78
79 /* Returns total Gb for the whole DIMM */
80 static int cnl_get_dimm_size(u16 val)
81 {
82         return (val & CNL_DRAM_SIZE_MASK) * 8 / 2;
83 }
84
85 static int cnl_get_dimm_width(u16 val)
86 {
87         if (cnl_get_dimm_size(val) == 0)
88                 return 0;
89
90         switch (val & CNL_DRAM_WIDTH_MASK) {
91         case CNL_DRAM_WIDTH_X8:
92         case CNL_DRAM_WIDTH_X16:
93         case CNL_DRAM_WIDTH_X32:
94                 val = (val & CNL_DRAM_WIDTH_MASK) >> CNL_DRAM_WIDTH_SHIFT;
95                 return 8 << val;
96         default:
97                 MISSING_CASE(val);
98                 return 0;
99         }
100 }
101
102 static int cnl_get_dimm_ranks(u16 val)
103 {
104         if (cnl_get_dimm_size(val) == 0)
105                 return 0;
106
107         val = (val & CNL_DRAM_RANK_MASK) >> CNL_DRAM_RANK_SHIFT;
108
109         return val + 1;
110 }
111
112 static bool
113 skl_is_16gb_dimm(const struct dram_dimm_info *dimm)
114 {
115         /* Convert total Gb to Gb per DRAM device */
116         return dimm->size / (intel_dimm_num_devices(dimm) ?: 1) == 16;
117 }
118
119 static void
120 skl_dram_get_dimm_info(struct drm_i915_private *i915,
121                        struct dram_dimm_info *dimm,
122                        int channel, char dimm_name, u16 val)
123 {
124         if (INTEL_GEN(i915) >= 10) {
125                 dimm->size = cnl_get_dimm_size(val);
126                 dimm->width = cnl_get_dimm_width(val);
127                 dimm->ranks = cnl_get_dimm_ranks(val);
128         } else {
129                 dimm->size = skl_get_dimm_size(val);
130                 dimm->width = skl_get_dimm_width(val);
131                 dimm->ranks = skl_get_dimm_ranks(val);
132         }
133
134         drm_dbg_kms(&i915->drm,
135                     "CH%u DIMM %c size: %u Gb, width: X%u, ranks: %u, 16Gb DIMMs: %s\n",
136                     channel, dimm_name, dimm->size, dimm->width, dimm->ranks,
137                     yesno(skl_is_16gb_dimm(dimm)));
138 }
139
140 static int
141 skl_dram_get_channel_info(struct drm_i915_private *i915,
142                           struct dram_channel_info *ch,
143                           int channel, u32 val)
144 {
145         skl_dram_get_dimm_info(i915, &ch->dimm_l,
146                                channel, 'L', val & 0xffff);
147         skl_dram_get_dimm_info(i915, &ch->dimm_s,
148                                channel, 'S', val >> 16);
149
150         if (ch->dimm_l.size == 0 && ch->dimm_s.size == 0) {
151                 drm_dbg_kms(&i915->drm, "CH%u not populated\n", channel);
152                 return -EINVAL;
153         }
154
155         if (ch->dimm_l.ranks == 2 || ch->dimm_s.ranks == 2)
156                 ch->ranks = 2;
157         else if (ch->dimm_l.ranks == 1 && ch->dimm_s.ranks == 1)
158                 ch->ranks = 2;
159         else
160                 ch->ranks = 1;
161
162         ch->is_16gb_dimm = skl_is_16gb_dimm(&ch->dimm_l) ||
163                 skl_is_16gb_dimm(&ch->dimm_s);
164
165         drm_dbg_kms(&i915->drm, "CH%u ranks: %u, 16Gb DIMMs: %s\n",
166                     channel, ch->ranks, yesno(ch->is_16gb_dimm));
167
168         return 0;
169 }
170
171 static bool
172 intel_is_dram_symmetric(const struct dram_channel_info *ch0,
173                         const struct dram_channel_info *ch1)
174 {
175         return !memcmp(ch0, ch1, sizeof(*ch0)) &&
176                 (ch0->dimm_s.size == 0 ||
177                  !memcmp(&ch0->dimm_l, &ch0->dimm_s, sizeof(ch0->dimm_l)));
178 }
179
180 static int
181 skl_dram_get_channels_info(struct drm_i915_private *i915)
182 {
183         struct dram_info *dram_info = &i915->dram_info;
184         struct dram_channel_info ch0 = {}, ch1 = {};
185         u32 val;
186         int ret;
187
188         val = intel_uncore_read(&i915->uncore,
189                                 SKL_MAD_DIMM_CH0_0_0_0_MCHBAR_MCMAIN);
190         ret = skl_dram_get_channel_info(i915, &ch0, 0, val);
191         if (ret == 0)
192                 dram_info->num_channels++;
193
194         val = intel_uncore_read(&i915->uncore,
195                                 SKL_MAD_DIMM_CH1_0_0_0_MCHBAR_MCMAIN);
196         ret = skl_dram_get_channel_info(i915, &ch1, 1, val);
197         if (ret == 0)
198                 dram_info->num_channels++;
199
200         if (dram_info->num_channels == 0) {
201                 drm_info(&i915->drm, "Number of memory channels is zero\n");
202                 return -EINVAL;
203         }
204
205         if (ch0.ranks == 0 && ch1.ranks == 0) {
206                 drm_info(&i915->drm, "couldn't get memory rank information\n");
207                 return -EINVAL;
208         }
209
210         dram_info->wm_lv_0_adjust_needed = ch0.is_16gb_dimm || ch1.is_16gb_dimm;
211
212         dram_info->symmetric_memory = intel_is_dram_symmetric(&ch0, &ch1);
213
214         drm_dbg_kms(&i915->drm, "Memory configuration is symmetric? %s\n",
215                     yesno(dram_info->symmetric_memory));
216
217         return 0;
218 }
219
220 static enum intel_dram_type
221 skl_get_dram_type(struct drm_i915_private *i915)
222 {
223         u32 val;
224
225         val = intel_uncore_read(&i915->uncore,
226                                 SKL_MAD_INTER_CHANNEL_0_0_0_MCHBAR_MCMAIN);
227
228         switch (val & SKL_DRAM_DDR_TYPE_MASK) {
229         case SKL_DRAM_DDR_TYPE_DDR3:
230                 return INTEL_DRAM_DDR3;
231         case SKL_DRAM_DDR_TYPE_DDR4:
232                 return INTEL_DRAM_DDR4;
233         case SKL_DRAM_DDR_TYPE_LPDDR3:
234                 return INTEL_DRAM_LPDDR3;
235         case SKL_DRAM_DDR_TYPE_LPDDR4:
236                 return INTEL_DRAM_LPDDR4;
237         default:
238                 MISSING_CASE(val);
239                 return INTEL_DRAM_UNKNOWN;
240         }
241 }
242
243 static int
244 skl_get_dram_info(struct drm_i915_private *i915)
245 {
246         struct dram_info *dram_info = &i915->dram_info;
247         u32 mem_freq_khz, val;
248         int ret;
249
250         dram_info->type = skl_get_dram_type(i915);
251         drm_dbg_kms(&i915->drm, "DRAM type: %s\n",
252                     intel_dram_type_str(dram_info->type));
253
254         ret = skl_dram_get_channels_info(i915);
255         if (ret)
256                 return ret;
257
258         val = intel_uncore_read(&i915->uncore,
259                                 SKL_MC_BIOS_DATA_0_0_0_MCHBAR_PCU);
260         mem_freq_khz = DIV_ROUND_UP((val & SKL_REQ_DATA_MASK) *
261                                     SKL_MEMORY_FREQ_MULTIPLIER_HZ, 1000);
262
263         if (dram_info->num_channels * mem_freq_khz == 0) {
264                 drm_info(&i915->drm,
265                          "Couldn't get system memory bandwidth\n");
266                 return -EINVAL;
267         }
268
269         return 0;
270 }
271
272 /* Returns Gb per DRAM device */
273 static int bxt_get_dimm_size(u32 val)
274 {
275         switch (val & BXT_DRAM_SIZE_MASK) {
276         case BXT_DRAM_SIZE_4GBIT:
277                 return 4;
278         case BXT_DRAM_SIZE_6GBIT:
279                 return 6;
280         case BXT_DRAM_SIZE_8GBIT:
281                 return 8;
282         case BXT_DRAM_SIZE_12GBIT:
283                 return 12;
284         case BXT_DRAM_SIZE_16GBIT:
285                 return 16;
286         default:
287                 MISSING_CASE(val);
288                 return 0;
289         }
290 }
291
292 static int bxt_get_dimm_width(u32 val)
293 {
294         if (!bxt_get_dimm_size(val))
295                 return 0;
296
297         val = (val & BXT_DRAM_WIDTH_MASK) >> BXT_DRAM_WIDTH_SHIFT;
298
299         return 8 << val;
300 }
301
302 static int bxt_get_dimm_ranks(u32 val)
303 {
304         if (!bxt_get_dimm_size(val))
305                 return 0;
306
307         switch (val & BXT_DRAM_RANK_MASK) {
308         case BXT_DRAM_RANK_SINGLE:
309                 return 1;
310         case BXT_DRAM_RANK_DUAL:
311                 return 2;
312         default:
313                 MISSING_CASE(val);
314                 return 0;
315         }
316 }
317
318 static enum intel_dram_type bxt_get_dimm_type(u32 val)
319 {
320         if (!bxt_get_dimm_size(val))
321                 return INTEL_DRAM_UNKNOWN;
322
323         switch (val & BXT_DRAM_TYPE_MASK) {
324         case BXT_DRAM_TYPE_DDR3:
325                 return INTEL_DRAM_DDR3;
326         case BXT_DRAM_TYPE_LPDDR3:
327                 return INTEL_DRAM_LPDDR3;
328         case BXT_DRAM_TYPE_DDR4:
329                 return INTEL_DRAM_DDR4;
330         case BXT_DRAM_TYPE_LPDDR4:
331                 return INTEL_DRAM_LPDDR4;
332         default:
333                 MISSING_CASE(val);
334                 return INTEL_DRAM_UNKNOWN;
335         }
336 }
337
338 static void bxt_get_dimm_info(struct dram_dimm_info *dimm, u32 val)
339 {
340         dimm->width = bxt_get_dimm_width(val);
341         dimm->ranks = bxt_get_dimm_ranks(val);
342
343         /*
344          * Size in register is Gb per DRAM device. Convert to total
345          * Gb to match the way we report this for non-LP platforms.
346          */
347         dimm->size = bxt_get_dimm_size(val) * intel_dimm_num_devices(dimm);
348 }
349
350 static int bxt_get_dram_info(struct drm_i915_private *i915)
351 {
352         struct dram_info *dram_info = &i915->dram_info;
353         u32 dram_channels;
354         u32 mem_freq_khz, val;
355         u8 num_active_channels, valid_ranks = 0;
356         int i;
357
358         val = intel_uncore_read(&i915->uncore, BXT_P_CR_MC_BIOS_REQ_0_0_0);
359         mem_freq_khz = DIV_ROUND_UP((val & BXT_REQ_DATA_MASK) *
360                                     BXT_MEMORY_FREQ_MULTIPLIER_HZ, 1000);
361
362         dram_channels = val & BXT_DRAM_CHANNEL_ACTIVE_MASK;
363         num_active_channels = hweight32(dram_channels);
364
365         if (mem_freq_khz * num_active_channels == 0) {
366                 drm_info(&i915->drm,
367                          "Couldn't get system memory bandwidth\n");
368                 return -EINVAL;
369         }
370
371         /*
372          * Now read each DUNIT8/9/10/11 to check the rank of each dimms.
373          */
374         for (i = BXT_D_CR_DRP0_DUNIT_START; i <= BXT_D_CR_DRP0_DUNIT_END; i++) {
375                 struct dram_dimm_info dimm;
376                 enum intel_dram_type type;
377
378                 val = intel_uncore_read(&i915->uncore, BXT_D_CR_DRP0_DUNIT(i));
379                 if (val == 0xFFFFFFFF)
380                         continue;
381
382                 dram_info->num_channels++;
383
384                 bxt_get_dimm_info(&dimm, val);
385                 type = bxt_get_dimm_type(val);
386
387                 drm_WARN_ON(&i915->drm, type != INTEL_DRAM_UNKNOWN &&
388                             dram_info->type != INTEL_DRAM_UNKNOWN &&
389                             dram_info->type != type);
390
391                 drm_dbg_kms(&i915->drm,
392                             "CH%u DIMM size: %u Gb, width: X%u, ranks: %u, type: %s\n",
393                             i - BXT_D_CR_DRP0_DUNIT_START,
394                             dimm.size, dimm.width, dimm.ranks,
395                             intel_dram_type_str(type));
396
397                 if (valid_ranks == 0)
398                         valid_ranks = dimm.ranks;
399
400                 if (type != INTEL_DRAM_UNKNOWN)
401                         dram_info->type = type;
402         }
403
404         if (dram_info->type == INTEL_DRAM_UNKNOWN || valid_ranks == 0) {
405                 drm_info(&i915->drm, "couldn't get memory information\n");
406                 return -EINVAL;
407         }
408
409         return 0;
410 }
411
412 static int icl_pcode_read_mem_global_info(struct drm_i915_private *dev_priv)
413 {
414         struct dram_info *dram_info = &dev_priv->dram_info;
415         u32 val = 0;
416         int ret;
417
418         ret = sandybridge_pcode_read(dev_priv,
419                                      ICL_PCODE_MEM_SUBSYSYSTEM_INFO |
420                                      ICL_PCODE_MEM_SS_READ_GLOBAL_INFO,
421                                      &val, NULL);
422         if (ret)
423                 return ret;
424
425         if (IS_GEN(dev_priv, 12)) {
426                 switch (val & 0xf) {
427                 case 0:
428                         dram_info->type = INTEL_DRAM_DDR4;
429                         break;
430                 case 1:
431                         dram_info->type = INTEL_DRAM_DDR5;
432                         break;
433                 case 2:
434                         dram_info->type = INTEL_DRAM_LPDDR5;
435                         break;
436                 case 3:
437                         dram_info->type = INTEL_DRAM_LPDDR4;
438                         break;
439                 case 4:
440                         dram_info->type = INTEL_DRAM_DDR3;
441                         break;
442                 case 5:
443                         dram_info->type = INTEL_DRAM_LPDDR3;
444                         break;
445                 default:
446                         MISSING_CASE(val & 0xf);
447                         return -1;
448                 }
449         } else {
450                 switch (val & 0xf) {
451                 case 0:
452                         dram_info->type = INTEL_DRAM_DDR4;
453                         break;
454                 case 1:
455                         dram_info->type = INTEL_DRAM_DDR3;
456                         break;
457                 case 2:
458                         dram_info->type = INTEL_DRAM_LPDDR3;
459                         break;
460                 case 3:
461                         dram_info->type = INTEL_DRAM_LPDDR4;
462                         break;
463                 default:
464                         MISSING_CASE(val & 0xf);
465                         return -1;
466                 }
467         }
468
469         dram_info->num_channels = (val & 0xf0) >> 4;
470         dram_info->num_qgv_points = (val & 0xf00) >> 8;
471
472         return 0;
473 }
474
475 static int gen11_get_dram_info(struct drm_i915_private *i915)
476 {
477         int ret = skl_get_dram_info(i915);
478
479         if (ret)
480                 return ret;
481
482         return icl_pcode_read_mem_global_info(i915);
483 }
484
485 static int gen12_get_dram_info(struct drm_i915_private *i915)
486 {
487         /* Always needed for GEN12+ */
488         i915->dram_info.wm_lv_0_adjust_needed = true;
489
490         return icl_pcode_read_mem_global_info(i915);
491 }
492
493 void intel_dram_detect(struct drm_i915_private *i915)
494 {
495         struct dram_info *dram_info = &i915->dram_info;
496         int ret;
497
498         /*
499          * Assume level 0 watermark latency adjustment is needed until proven
500          * otherwise, this w/a is not needed by bxt/glk.
501          */
502         dram_info->wm_lv_0_adjust_needed = !IS_GEN9_LP(i915);
503
504         if (INTEL_GEN(i915) < 9 || !HAS_DISPLAY(i915))
505                 return;
506
507         if (INTEL_GEN(i915) >= 12)
508                 ret = gen12_get_dram_info(i915);
509         else if (INTEL_GEN(i915) >= 11)
510                 ret = gen11_get_dram_info(i915);
511         else if (IS_GEN9_LP(i915))
512                 ret = bxt_get_dram_info(i915);
513         else
514                 ret = skl_get_dram_info(i915);
515         if (ret)
516                 return;
517
518         drm_dbg_kms(&i915->drm, "DRAM channels: %u\n", dram_info->num_channels);
519
520         drm_dbg_kms(&i915->drm, "Watermark level 0 adjustment needed: %s\n",
521                     yesno(dram_info->wm_lv_0_adjust_needed));
522 }
523
524 static u32 gen9_edram_size_mb(struct drm_i915_private *i915, u32 cap)
525 {
526         static const u8 ways[8] = { 4, 8, 12, 16, 16, 16, 16, 16 };
527         static const u8 sets[4] = { 1, 1, 2, 2 };
528
529         return EDRAM_NUM_BANKS(cap) *
530                 ways[EDRAM_WAYS_IDX(cap)] *
531                 sets[EDRAM_SETS_IDX(cap)];
532 }
533
534 void intel_dram_edram_detect(struct drm_i915_private *i915)
535 {
536         u32 edram_cap = 0;
537
538         if (!(IS_HASWELL(i915) || IS_BROADWELL(i915) || INTEL_GEN(i915) >= 9))
539                 return;
540
541         edram_cap = __raw_uncore_read32(&i915->uncore, HSW_EDRAM_CAP);
542
543         /* NB: We can't write IDICR yet because we don't have gt funcs set up */
544
545         if (!(edram_cap & EDRAM_ENABLED))
546                 return;
547
548         /*
549          * The needed capability bits for size calculation are not there with
550          * pre gen9 so return 128MB always.
551          */
552         if (INTEL_GEN(i915) < 9)
553                 i915->edram_size_mb = 128;
554         else
555                 i915->edram_size_mb = gen9_edram_size_mb(i915, edram_cap);
556
557         drm_info(&i915->drm, "Found %uMB of eDRAM\n", i915->edram_size_mb);
558 }