amdgpu/nv.c - Optimize code for video codec support structure
[linux-2.6-microblaze.git] / drivers / gpu / drm / amd / amdgpu / nv.c
1 /*
2  * Copyright 2019 Advanced Micro Devices, Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  */
23 #include <linux/firmware.h>
24 #include <linux/slab.h>
25 #include <linux/module.h>
26 #include <linux/pci.h>
27
28 #include <drm/amdgpu_drm.h>
29
30 #include "amdgpu.h"
31 #include "amdgpu_atombios.h"
32 #include "amdgpu_ih.h"
33 #include "amdgpu_uvd.h"
34 #include "amdgpu_vce.h"
35 #include "amdgpu_ucode.h"
36 #include "amdgpu_psp.h"
37 #include "atom.h"
38 #include "amd_pcie.h"
39
40 #include "gc/gc_10_1_0_offset.h"
41 #include "gc/gc_10_1_0_sh_mask.h"
42 #include "mp/mp_11_0_offset.h"
43
44 #include "soc15.h"
45 #include "soc15_common.h"
46 #include "gmc_v10_0.h"
47 #include "gfxhub_v2_0.h"
48 #include "mmhub_v2_0.h"
49 #include "nbio_v2_3.h"
50 #include "nbio_v7_2.h"
51 #include "hdp_v5_0.h"
52 #include "nv.h"
53 #include "navi10_ih.h"
54 #include "gfx_v10_0.h"
55 #include "sdma_v5_0.h"
56 #include "sdma_v5_2.h"
57 #include "vcn_v2_0.h"
58 #include "jpeg_v2_0.h"
59 #include "vcn_v3_0.h"
60 #include "jpeg_v3_0.h"
61 #include "dce_virtual.h"
62 #include "mes_v10_1.h"
63 #include "mxgpu_nv.h"
64 #include "smuio_v11_0.h"
65 #include "smuio_v11_0_6.h"
66
67 static const struct amd_ip_funcs nv_common_ip_funcs;
68
69 /* Navi */
70 static const struct amdgpu_video_codec_info nv_video_codecs_encode_array[] =
71 {
72         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 4096, 2304, 0)},
73         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_HEVC, 4096, 2304, 0)},
74 };
75
76 static const struct amdgpu_video_codecs nv_video_codecs_encode =
77 {
78         .codec_count = ARRAY_SIZE(nv_video_codecs_encode_array),
79         .codec_array = nv_video_codecs_encode_array,
80 };
81
82 /* Navi1x */
83 static const struct amdgpu_video_codec_info nv_video_codecs_decode_array[] =
84 {
85         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG2, 4096, 4906, 3)},
86         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4, 4096, 4906, 5)},
87         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 4096, 4906, 52)},
88         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VC1, 4096, 4906, 4)},
89         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_HEVC, 8192, 4352, 186)},
90         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_JPEG, 4096, 4096, 0)},
91         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VP9, 8192, 4352, 0)},
92 };
93
94 static const struct amdgpu_video_codecs nv_video_codecs_decode =
95 {
96         .codec_count = ARRAY_SIZE(nv_video_codecs_decode_array),
97         .codec_array = nv_video_codecs_decode_array,
98 };
99
100 /* Sienna Cichlid */
101 static const struct amdgpu_video_codec_info sc_video_codecs_decode_array[] =
102 {
103         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG2, 4096, 4906, 3)},
104         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4, 4096, 4906, 5)},
105         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 4096, 4906, 52)},
106         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VC1, 4096, 4906, 4)},
107         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_HEVC, 8192, 4352, 186)},
108         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_JPEG, 4096, 4096, 0)},
109         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VP9, 8192, 4352, 0)},
110         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_AV1, 8192, 4352, 0)},
111 };
112
113 static const struct amdgpu_video_codecs sc_video_codecs_decode =
114 {
115         .codec_count = ARRAY_SIZE(sc_video_codecs_decode_array),
116         .codec_array = sc_video_codecs_decode_array,
117 };
118
119 /* SRIOV Sienna Cichlid, not const since data is controlled by host */
120 static struct amdgpu_video_codec_info sriov_sc_video_codecs_encode_array[] =
121 {
122         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 4096, 2304, 0)},
123         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_HEVC, 4096, 2304, 0)},
124 };
125
126 static struct amdgpu_video_codec_info sriov_sc_video_codecs_decode_array[] =
127 {
128         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG2, 4096, 4906, 3)},
129         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4, 4096, 4906, 5)},
130         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 4096, 4906, 52)},
131         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VC1, 4096, 4906, 4)},
132         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_HEVC, 8192, 4352, 186)},
133         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_JPEG, 4096, 4096, 0)},
134         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VP9, 8192, 4352, 0)},
135         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_AV1, 8192, 4352, 0)},
136 };
137
138 static struct amdgpu_video_codecs sriov_sc_video_codecs_encode =
139 {
140         .codec_count = ARRAY_SIZE(sriov_sc_video_codecs_encode_array),
141         .codec_array = sriov_sc_video_codecs_encode_array,
142 };
143
144 static struct amdgpu_video_codecs sriov_sc_video_codecs_decode =
145 {
146         .codec_count = ARRAY_SIZE(sriov_sc_video_codecs_decode_array),
147         .codec_array = sriov_sc_video_codecs_decode_array,
148 };
149
150 /* Beige Goby*/
151 static const struct amdgpu_video_codec_info bg_video_codecs_decode_array[] = {
152         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 4096, 4906, 52)},
153         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_HEVC, 8192, 4352, 186)},
154         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VP9, 8192, 4352, 0)},
155 };
156
157 static const struct amdgpu_video_codecs bg_video_codecs_decode = {
158         .codec_count = ARRAY_SIZE(bg_video_codecs_decode_array),
159         .codec_array = bg_video_codecs_decode_array,
160 };
161
162 static const struct amdgpu_video_codecs bg_video_codecs_encode = {
163         .codec_count = 0,
164         .codec_array = NULL,
165 };
166
167 /* Yellow Carp*/
168 static const struct amdgpu_video_codec_info yc_video_codecs_decode_array[] = {
169         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_MPEG4_AVC, 4096, 4906, 52)},
170         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_HEVC, 8192, 4352, 186)},
171         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_VP9, 8192, 4352, 0)},
172         {codec_info_build(AMDGPU_INFO_VIDEO_CAPS_CODEC_IDX_JPEG, 4096, 4096, 0)},
173 };
174
175 static const struct amdgpu_video_codecs yc_video_codecs_decode = {
176         .codec_count = ARRAY_SIZE(bg_video_codecs_decode_array),
177         .codec_array = bg_video_codecs_decode_array,
178 };
179
180 static int nv_query_video_codecs(struct amdgpu_device *adev, bool encode,
181                                  const struct amdgpu_video_codecs **codecs)
182 {
183         switch (adev->asic_type) {
184         case CHIP_SIENNA_CICHLID:
185                 if (amdgpu_sriov_vf(adev)) {
186                         if (encode)
187                                 *codecs = &sriov_sc_video_codecs_encode;
188                         else
189                                 *codecs = &sriov_sc_video_codecs_decode;
190                 } else {
191                         if (encode)
192                                 *codecs = &nv_video_codecs_encode;
193                         else
194                                 *codecs = &sc_video_codecs_decode;
195                 }
196                 return 0;
197         case CHIP_NAVY_FLOUNDER:
198         case CHIP_DIMGREY_CAVEFISH:
199         case CHIP_VANGOGH:
200                 if (encode)
201                         *codecs = &nv_video_codecs_encode;
202                 else
203                         *codecs = &sc_video_codecs_decode;
204                 return 0;
205         case CHIP_YELLOW_CARP:
206                 if (encode)
207                         *codecs = &nv_video_codecs_encode;
208                 else
209                         *codecs = &yc_video_codecs_decode;
210                 return 0;
211         case CHIP_BEIGE_GOBY:
212                 if (encode)
213                         *codecs = &bg_video_codecs_encode;
214                 else
215                         *codecs = &bg_video_codecs_decode;
216                 return 0;
217         case CHIP_NAVI10:
218         case CHIP_NAVI14:
219         case CHIP_NAVI12:
220                 if (encode)
221                         *codecs = &nv_video_codecs_encode;
222                 else
223                         *codecs = &nv_video_codecs_decode;
224                 return 0;
225         default:
226                 return -EINVAL;
227         }
228 }
229
230 /*
231  * Indirect registers accessor
232  */
233 static u32 nv_pcie_rreg(struct amdgpu_device *adev, u32 reg)
234 {
235         unsigned long address, data;
236         address = adev->nbio.funcs->get_pcie_index_offset(adev);
237         data = adev->nbio.funcs->get_pcie_data_offset(adev);
238
239         return amdgpu_device_indirect_rreg(adev, address, data, reg);
240 }
241
242 static void nv_pcie_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
243 {
244         unsigned long address, data;
245
246         address = adev->nbio.funcs->get_pcie_index_offset(adev);
247         data = adev->nbio.funcs->get_pcie_data_offset(adev);
248
249         amdgpu_device_indirect_wreg(adev, address, data, reg, v);
250 }
251
252 static u64 nv_pcie_rreg64(struct amdgpu_device *adev, u32 reg)
253 {
254         unsigned long address, data;
255         address = adev->nbio.funcs->get_pcie_index_offset(adev);
256         data = adev->nbio.funcs->get_pcie_data_offset(adev);
257
258         return amdgpu_device_indirect_rreg64(adev, address, data, reg);
259 }
260
261 static u32 nv_pcie_port_rreg(struct amdgpu_device *adev, u32 reg)
262 {
263         unsigned long flags, address, data;
264         u32 r;
265         address = adev->nbio.funcs->get_pcie_port_index_offset(adev);
266         data = adev->nbio.funcs->get_pcie_port_data_offset(adev);
267
268         spin_lock_irqsave(&adev->pcie_idx_lock, flags);
269         WREG32(address, reg * 4);
270         (void)RREG32(address);
271         r = RREG32(data);
272         spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
273         return r;
274 }
275
276 static void nv_pcie_wreg64(struct amdgpu_device *adev, u32 reg, u64 v)
277 {
278         unsigned long address, data;
279
280         address = adev->nbio.funcs->get_pcie_index_offset(adev);
281         data = adev->nbio.funcs->get_pcie_data_offset(adev);
282
283         amdgpu_device_indirect_wreg64(adev, address, data, reg, v);
284 }
285
286 static void nv_pcie_port_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
287 {
288         unsigned long flags, address, data;
289
290         address = adev->nbio.funcs->get_pcie_port_index_offset(adev);
291         data = adev->nbio.funcs->get_pcie_port_data_offset(adev);
292
293         spin_lock_irqsave(&adev->pcie_idx_lock, flags);
294         WREG32(address, reg * 4);
295         (void)RREG32(address);
296         WREG32(data, v);
297         (void)RREG32(data);
298         spin_unlock_irqrestore(&adev->pcie_idx_lock, flags);
299 }
300
301 static u32 nv_didt_rreg(struct amdgpu_device *adev, u32 reg)
302 {
303         unsigned long flags, address, data;
304         u32 r;
305
306         address = SOC15_REG_OFFSET(GC, 0, mmDIDT_IND_INDEX);
307         data = SOC15_REG_OFFSET(GC, 0, mmDIDT_IND_DATA);
308
309         spin_lock_irqsave(&adev->didt_idx_lock, flags);
310         WREG32(address, (reg));
311         r = RREG32(data);
312         spin_unlock_irqrestore(&adev->didt_idx_lock, flags);
313         return r;
314 }
315
316 static void nv_didt_wreg(struct amdgpu_device *adev, u32 reg, u32 v)
317 {
318         unsigned long flags, address, data;
319
320         address = SOC15_REG_OFFSET(GC, 0, mmDIDT_IND_INDEX);
321         data = SOC15_REG_OFFSET(GC, 0, mmDIDT_IND_DATA);
322
323         spin_lock_irqsave(&adev->didt_idx_lock, flags);
324         WREG32(address, (reg));
325         WREG32(data, (v));
326         spin_unlock_irqrestore(&adev->didt_idx_lock, flags);
327 }
328
329 static u32 nv_get_config_memsize(struct amdgpu_device *adev)
330 {
331         return adev->nbio.funcs->get_memsize(adev);
332 }
333
334 static u32 nv_get_xclk(struct amdgpu_device *adev)
335 {
336         return adev->clock.spll.reference_freq;
337 }
338
339
340 void nv_grbm_select(struct amdgpu_device *adev,
341                      u32 me, u32 pipe, u32 queue, u32 vmid)
342 {
343         u32 grbm_gfx_cntl = 0;
344         grbm_gfx_cntl = REG_SET_FIELD(grbm_gfx_cntl, GRBM_GFX_CNTL, PIPEID, pipe);
345         grbm_gfx_cntl = REG_SET_FIELD(grbm_gfx_cntl, GRBM_GFX_CNTL, MEID, me);
346         grbm_gfx_cntl = REG_SET_FIELD(grbm_gfx_cntl, GRBM_GFX_CNTL, VMID, vmid);
347         grbm_gfx_cntl = REG_SET_FIELD(grbm_gfx_cntl, GRBM_GFX_CNTL, QUEUEID, queue);
348
349         WREG32_SOC15(GC, 0, mmGRBM_GFX_CNTL, grbm_gfx_cntl);
350 }
351
352 static void nv_vga_set_state(struct amdgpu_device *adev, bool state)
353 {
354         /* todo */
355 }
356
357 static bool nv_read_disabled_bios(struct amdgpu_device *adev)
358 {
359         /* todo */
360         return false;
361 }
362
363 static bool nv_read_bios_from_rom(struct amdgpu_device *adev,
364                                   u8 *bios, u32 length_bytes)
365 {
366         u32 *dw_ptr;
367         u32 i, length_dw;
368         u32 rom_index_offset, rom_data_offset;
369
370         if (bios == NULL)
371                 return false;
372         if (length_bytes == 0)
373                 return false;
374         /* APU vbios image is part of sbios image */
375         if (adev->flags & AMD_IS_APU)
376                 return false;
377
378         dw_ptr = (u32 *)bios;
379         length_dw = ALIGN(length_bytes, 4) / 4;
380
381         rom_index_offset =
382                 adev->smuio.funcs->get_rom_index_offset(adev);
383         rom_data_offset =
384                 adev->smuio.funcs->get_rom_data_offset(adev);
385
386         /* set rom index to 0 */
387         WREG32(rom_index_offset, 0);
388         /* read out the rom data */
389         for (i = 0; i < length_dw; i++)
390                 dw_ptr[i] = RREG32(rom_data_offset);
391
392         return true;
393 }
394
395 static struct soc15_allowed_register_entry nv_allowed_read_registers[] = {
396         { SOC15_REG_ENTRY(GC, 0, mmGRBM_STATUS)},
397         { SOC15_REG_ENTRY(GC, 0, mmGRBM_STATUS2)},
398         { SOC15_REG_ENTRY(GC, 0, mmGRBM_STATUS_SE0)},
399         { SOC15_REG_ENTRY(GC, 0, mmGRBM_STATUS_SE1)},
400         { SOC15_REG_ENTRY(GC, 0, mmGRBM_STATUS_SE2)},
401         { SOC15_REG_ENTRY(GC, 0, mmGRBM_STATUS_SE3)},
402         { SOC15_REG_ENTRY(SDMA0, 0, mmSDMA0_STATUS_REG)},
403         { SOC15_REG_ENTRY(SDMA1, 0, mmSDMA1_STATUS_REG)},
404         { SOC15_REG_ENTRY(GC, 0, mmCP_STAT)},
405         { SOC15_REG_ENTRY(GC, 0, mmCP_STALLED_STAT1)},
406         { SOC15_REG_ENTRY(GC, 0, mmCP_STALLED_STAT2)},
407         { SOC15_REG_ENTRY(GC, 0, mmCP_STALLED_STAT3)},
408         { SOC15_REG_ENTRY(GC, 0, mmCP_CPF_BUSY_STAT)},
409         { SOC15_REG_ENTRY(GC, 0, mmCP_CPF_STALLED_STAT1)},
410         { SOC15_REG_ENTRY(GC, 0, mmCP_CPF_STATUS)},
411         { SOC15_REG_ENTRY(GC, 0, mmCP_CPC_BUSY_STAT)},
412         { SOC15_REG_ENTRY(GC, 0, mmCP_CPC_STALLED_STAT1)},
413         { SOC15_REG_ENTRY(GC, 0, mmCP_CPC_STATUS)},
414         { SOC15_REG_ENTRY(GC, 0, mmGB_ADDR_CONFIG)},
415 };
416
417 static uint32_t nv_read_indexed_register(struct amdgpu_device *adev, u32 se_num,
418                                          u32 sh_num, u32 reg_offset)
419 {
420         uint32_t val;
421
422         mutex_lock(&adev->grbm_idx_mutex);
423         if (se_num != 0xffffffff || sh_num != 0xffffffff)
424                 amdgpu_gfx_select_se_sh(adev, se_num, sh_num, 0xffffffff);
425
426         val = RREG32(reg_offset);
427
428         if (se_num != 0xffffffff || sh_num != 0xffffffff)
429                 amdgpu_gfx_select_se_sh(adev, 0xffffffff, 0xffffffff, 0xffffffff);
430         mutex_unlock(&adev->grbm_idx_mutex);
431         return val;
432 }
433
434 static uint32_t nv_get_register_value(struct amdgpu_device *adev,
435                                       bool indexed, u32 se_num,
436                                       u32 sh_num, u32 reg_offset)
437 {
438         if (indexed) {
439                 return nv_read_indexed_register(adev, se_num, sh_num, reg_offset);
440         } else {
441                 if (reg_offset == SOC15_REG_OFFSET(GC, 0, mmGB_ADDR_CONFIG))
442                         return adev->gfx.config.gb_addr_config;
443                 return RREG32(reg_offset);
444         }
445 }
446
447 static int nv_read_register(struct amdgpu_device *adev, u32 se_num,
448                             u32 sh_num, u32 reg_offset, u32 *value)
449 {
450         uint32_t i;
451         struct soc15_allowed_register_entry  *en;
452
453         *value = 0;
454         for (i = 0; i < ARRAY_SIZE(nv_allowed_read_registers); i++) {
455                 en = &nv_allowed_read_registers[i];
456                 if ((i == 7 && (adev->sdma.num_instances == 1)) || /* some asics don't have SDMA1 */
457                     reg_offset !=
458                     (adev->reg_offset[en->hwip][en->inst][en->seg] + en->reg_offset))
459                         continue;
460
461                 *value = nv_get_register_value(adev,
462                                                nv_allowed_read_registers[i].grbm_indexed,
463                                                se_num, sh_num, reg_offset);
464                 return 0;
465         }
466         return -EINVAL;
467 }
468
469 static int nv_asic_mode2_reset(struct amdgpu_device *adev)
470 {
471         u32 i;
472         int ret = 0;
473
474         amdgpu_atombios_scratch_regs_engine_hung(adev, true);
475
476         /* disable BM */
477         pci_clear_master(adev->pdev);
478
479         amdgpu_device_cache_pci_state(adev->pdev);
480
481         ret = amdgpu_dpm_mode2_reset(adev);
482         if (ret)
483                 dev_err(adev->dev, "GPU mode2 reset failed\n");
484
485         amdgpu_device_load_pci_state(adev->pdev);
486
487         /* wait for asic to come out of reset */
488         for (i = 0; i < adev->usec_timeout; i++) {
489                 u32 memsize = adev->nbio.funcs->get_memsize(adev);
490
491                 if (memsize != 0xffffffff)
492                         break;
493                 udelay(1);
494         }
495
496         amdgpu_atombios_scratch_regs_engine_hung(adev, false);
497
498         return ret;
499 }
500
501 static enum amd_reset_method
502 nv_asic_reset_method(struct amdgpu_device *adev)
503 {
504         if (amdgpu_reset_method == AMD_RESET_METHOD_MODE1 ||
505             amdgpu_reset_method == AMD_RESET_METHOD_MODE2 ||
506             amdgpu_reset_method == AMD_RESET_METHOD_BACO ||
507             amdgpu_reset_method == AMD_RESET_METHOD_PCI)
508                 return amdgpu_reset_method;
509
510         if (amdgpu_reset_method != -1)
511                 dev_warn(adev->dev, "Specified reset method:%d isn't supported, using AUTO instead.\n",
512                                   amdgpu_reset_method);
513
514         switch (adev->asic_type) {
515         case CHIP_VANGOGH:
516         case CHIP_YELLOW_CARP:
517                 return AMD_RESET_METHOD_MODE2;
518         case CHIP_SIENNA_CICHLID:
519         case CHIP_NAVY_FLOUNDER:
520         case CHIP_DIMGREY_CAVEFISH:
521         case CHIP_BEIGE_GOBY:
522                 return AMD_RESET_METHOD_MODE1;
523         default:
524                 if (amdgpu_dpm_is_baco_supported(adev))
525                         return AMD_RESET_METHOD_BACO;
526                 else
527                         return AMD_RESET_METHOD_MODE1;
528         }
529 }
530
531 static int nv_asic_reset(struct amdgpu_device *adev)
532 {
533         int ret = 0;
534
535         switch (nv_asic_reset_method(adev)) {
536         case AMD_RESET_METHOD_PCI:
537                 dev_info(adev->dev, "PCI reset\n");
538                 ret = amdgpu_device_pci_reset(adev);
539                 break;
540         case AMD_RESET_METHOD_BACO:
541                 dev_info(adev->dev, "BACO reset\n");
542                 ret = amdgpu_dpm_baco_reset(adev);
543                 break;
544         case AMD_RESET_METHOD_MODE2:
545                 dev_info(adev->dev, "MODE2 reset\n");
546                 ret = nv_asic_mode2_reset(adev);
547                 break;
548         default:
549                 dev_info(adev->dev, "MODE1 reset\n");
550                 ret = amdgpu_device_mode1_reset(adev);
551                 break;
552         }
553
554         return ret;
555 }
556
557 static int nv_set_uvd_clocks(struct amdgpu_device *adev, u32 vclk, u32 dclk)
558 {
559         /* todo */
560         return 0;
561 }
562
563 static int nv_set_vce_clocks(struct amdgpu_device *adev, u32 evclk, u32 ecclk)
564 {
565         /* todo */
566         return 0;
567 }
568
569 static void nv_pcie_gen3_enable(struct amdgpu_device *adev)
570 {
571         if (pci_is_root_bus(adev->pdev->bus))
572                 return;
573
574         if (amdgpu_pcie_gen2 == 0)
575                 return;
576
577         if (!(adev->pm.pcie_gen_mask & (CAIL_PCIE_LINK_SPEED_SUPPORT_GEN2 |
578                                         CAIL_PCIE_LINK_SPEED_SUPPORT_GEN3)))
579                 return;
580
581         /* todo */
582 }
583
584 static void nv_program_aspm(struct amdgpu_device *adev)
585 {
586         if (!amdgpu_aspm)
587                 return;
588
589         if (!(adev->flags & AMD_IS_APU) &&
590             (adev->nbio.funcs->program_aspm))
591                 adev->nbio.funcs->program_aspm(adev);
592
593 }
594
595 static void nv_enable_doorbell_aperture(struct amdgpu_device *adev,
596                                         bool enable)
597 {
598         adev->nbio.funcs->enable_doorbell_aperture(adev, enable);
599         adev->nbio.funcs->enable_doorbell_selfring_aperture(adev, enable);
600 }
601
602 static const struct amdgpu_ip_block_version nv_common_ip_block =
603 {
604         .type = AMD_IP_BLOCK_TYPE_COMMON,
605         .major = 1,
606         .minor = 0,
607         .rev = 0,
608         .funcs = &nv_common_ip_funcs,
609 };
610
611 static bool nv_is_headless_sku(struct pci_dev *pdev)
612 {
613         if ((pdev->device == 0x731E &&
614             (pdev->revision == 0xC6 || pdev->revision == 0xC7)) ||
615             (pdev->device == 0x7340 && pdev->revision == 0xC9)  ||
616             (pdev->device == 0x7360 && pdev->revision == 0xC7))
617                 return true;
618         return false;
619 }
620
621 static int nv_reg_base_init(struct amdgpu_device *adev)
622 {
623         int r;
624
625         if (amdgpu_discovery) {
626                 r = amdgpu_discovery_reg_base_init(adev);
627                 if (r) {
628                         DRM_WARN("failed to init reg base from ip discovery table, "
629                                         "fallback to legacy init method\n");
630                         goto legacy_init;
631                 }
632
633                 amdgpu_discovery_harvest_ip(adev);
634                 if (nv_is_headless_sku(adev->pdev)) {
635                         adev->harvest_ip_mask |= AMD_HARVEST_IP_VCN_MASK;
636                         adev->harvest_ip_mask |= AMD_HARVEST_IP_JPEG_MASK;
637                 }
638
639                 return 0;
640         }
641
642 legacy_init:
643         switch (adev->asic_type) {
644         case CHIP_NAVI10:
645                 navi10_reg_base_init(adev);
646                 break;
647         case CHIP_NAVI14:
648                 navi14_reg_base_init(adev);
649                 break;
650         case CHIP_NAVI12:
651                 navi12_reg_base_init(adev);
652                 break;
653         case CHIP_SIENNA_CICHLID:
654         case CHIP_NAVY_FLOUNDER:
655                 sienna_cichlid_reg_base_init(adev);
656                 break;
657         case CHIP_VANGOGH:
658                 vangogh_reg_base_init(adev);
659                 break;
660         case CHIP_DIMGREY_CAVEFISH:
661                 dimgrey_cavefish_reg_base_init(adev);
662                 break;
663         case CHIP_BEIGE_GOBY:
664                 beige_goby_reg_base_init(adev);
665                 break;
666         case CHIP_YELLOW_CARP:
667                 yellow_carp_reg_base_init(adev);
668                 break;
669         default:
670                 return -EINVAL;
671         }
672
673         return 0;
674 }
675
676 void nv_set_virt_ops(struct amdgpu_device *adev)
677 {
678         adev->virt.ops = &xgpu_nv_virt_ops;
679 }
680
681 int nv_set_ip_blocks(struct amdgpu_device *adev)
682 {
683         int r;
684
685         if (adev->flags & AMD_IS_APU) {
686                 adev->nbio.funcs = &nbio_v7_2_funcs;
687                 adev->nbio.hdp_flush_reg = &nbio_v7_2_hdp_flush_reg;
688         } else {
689                 adev->nbio.funcs = &nbio_v2_3_funcs;
690                 adev->nbio.hdp_flush_reg = &nbio_v2_3_hdp_flush_reg;
691         }
692         adev->hdp.funcs = &hdp_v5_0_funcs;
693
694         if (adev->asic_type >= CHIP_SIENNA_CICHLID)
695                 adev->smuio.funcs = &smuio_v11_0_6_funcs;
696         else
697                 adev->smuio.funcs = &smuio_v11_0_funcs;
698
699         if (adev->asic_type == CHIP_SIENNA_CICHLID)
700                 adev->gmc.xgmi.supported = true;
701
702         /* Set IP register base before any HW register access */
703         r = nv_reg_base_init(adev);
704         if (r)
705                 return r;
706
707         switch (adev->asic_type) {
708         case CHIP_NAVI10:
709         case CHIP_NAVI14:
710                 amdgpu_device_ip_block_add(adev, &nv_common_ip_block);
711                 amdgpu_device_ip_block_add(adev, &gmc_v10_0_ip_block);
712                 amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
713                 amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
714                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP &&
715                     !amdgpu_sriov_vf(adev))
716                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
717                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
718                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
719 #if defined(CONFIG_DRM_AMD_DC)
720                 else if (amdgpu_device_has_dc_support(adev))
721                         amdgpu_device_ip_block_add(adev, &dm_ip_block);
722 #endif
723                 amdgpu_device_ip_block_add(adev, &gfx_v10_0_ip_block);
724                 amdgpu_device_ip_block_add(adev, &sdma_v5_0_ip_block);
725                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_DIRECT &&
726                     !amdgpu_sriov_vf(adev))
727                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
728                 amdgpu_device_ip_block_add(adev, &vcn_v2_0_ip_block);
729                 amdgpu_device_ip_block_add(adev, &jpeg_v2_0_ip_block);
730                 if (adev->enable_mes)
731                         amdgpu_device_ip_block_add(adev, &mes_v10_1_ip_block);
732                 break;
733         case CHIP_NAVI12:
734                 amdgpu_device_ip_block_add(adev, &nv_common_ip_block);
735                 amdgpu_device_ip_block_add(adev, &gmc_v10_0_ip_block);
736                 if (!amdgpu_sriov_vf(adev)) {
737                         amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
738                         amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
739                 } else {
740                         amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
741                         amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
742                 }
743                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP)
744                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
745                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
746                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
747 #if defined(CONFIG_DRM_AMD_DC)
748                 else if (amdgpu_device_has_dc_support(adev))
749                         amdgpu_device_ip_block_add(adev, &dm_ip_block);
750 #endif
751                 amdgpu_device_ip_block_add(adev, &gfx_v10_0_ip_block);
752                 amdgpu_device_ip_block_add(adev, &sdma_v5_0_ip_block);
753                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_DIRECT &&
754                     !amdgpu_sriov_vf(adev))
755                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
756                 amdgpu_device_ip_block_add(adev, &vcn_v2_0_ip_block);
757                 if (!amdgpu_sriov_vf(adev))
758                         amdgpu_device_ip_block_add(adev, &jpeg_v2_0_ip_block);
759                 break;
760         case CHIP_SIENNA_CICHLID:
761                 amdgpu_device_ip_block_add(adev, &nv_common_ip_block);
762                 amdgpu_device_ip_block_add(adev, &gmc_v10_0_ip_block);
763                 if (!amdgpu_sriov_vf(adev)) {
764                         amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
765                         if (likely(adev->firmware.load_type == AMDGPU_FW_LOAD_PSP))
766                                 amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
767                 } else {
768                         if (likely(adev->firmware.load_type == AMDGPU_FW_LOAD_PSP))
769                                 amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
770                         amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
771                 }
772                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP &&
773                     is_support_sw_smu(adev))
774                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
775                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
776                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
777 #if defined(CONFIG_DRM_AMD_DC)
778                 else if (amdgpu_device_has_dc_support(adev))
779                         amdgpu_device_ip_block_add(adev, &dm_ip_block);
780 #endif
781                 amdgpu_device_ip_block_add(adev, &gfx_v10_0_ip_block);
782                 amdgpu_device_ip_block_add(adev, &sdma_v5_2_ip_block);
783                 amdgpu_device_ip_block_add(adev, &vcn_v3_0_ip_block);
784                 if (!amdgpu_sriov_vf(adev))
785                         amdgpu_device_ip_block_add(adev, &jpeg_v3_0_ip_block);
786                 if (adev->enable_mes)
787                         amdgpu_device_ip_block_add(adev, &mes_v10_1_ip_block);
788                 break;
789         case CHIP_NAVY_FLOUNDER:
790                 amdgpu_device_ip_block_add(adev, &nv_common_ip_block);
791                 amdgpu_device_ip_block_add(adev, &gmc_v10_0_ip_block);
792                 amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
793                 if (likely(adev->firmware.load_type == AMDGPU_FW_LOAD_PSP))
794                         amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
795                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP &&
796                     is_support_sw_smu(adev))
797                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
798                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
799                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
800 #if defined(CONFIG_DRM_AMD_DC)
801                 else if (amdgpu_device_has_dc_support(adev))
802                         amdgpu_device_ip_block_add(adev, &dm_ip_block);
803 #endif
804                 amdgpu_device_ip_block_add(adev, &gfx_v10_0_ip_block);
805                 amdgpu_device_ip_block_add(adev, &sdma_v5_2_ip_block);
806                 amdgpu_device_ip_block_add(adev, &vcn_v3_0_ip_block);
807                 amdgpu_device_ip_block_add(adev, &jpeg_v3_0_ip_block);
808                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_DIRECT &&
809                     is_support_sw_smu(adev))
810                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
811                 break;
812         case CHIP_VANGOGH:
813                 amdgpu_device_ip_block_add(adev, &nv_common_ip_block);
814                 amdgpu_device_ip_block_add(adev, &gmc_v10_0_ip_block);
815                 amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
816                 if (likely(adev->firmware.load_type == AMDGPU_FW_LOAD_PSP))
817                         amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
818                 amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
819                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
820                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
821 #if defined(CONFIG_DRM_AMD_DC)
822                 else if (amdgpu_device_has_dc_support(adev))
823                         amdgpu_device_ip_block_add(adev, &dm_ip_block);
824 #endif
825                 amdgpu_device_ip_block_add(adev, &gfx_v10_0_ip_block);
826                 amdgpu_device_ip_block_add(adev, &sdma_v5_2_ip_block);
827                 amdgpu_device_ip_block_add(adev, &vcn_v3_0_ip_block);
828                 amdgpu_device_ip_block_add(adev, &jpeg_v3_0_ip_block);
829                 break;
830         case CHIP_DIMGREY_CAVEFISH:
831                 amdgpu_device_ip_block_add(adev, &nv_common_ip_block);
832                 amdgpu_device_ip_block_add(adev, &gmc_v10_0_ip_block);
833                 amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
834                 if (likely(adev->firmware.load_type == AMDGPU_FW_LOAD_PSP))
835                         amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
836                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP &&
837                     is_support_sw_smu(adev))
838                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
839                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
840                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
841 #if defined(CONFIG_DRM_AMD_DC)
842                 else if (amdgpu_device_has_dc_support(adev))
843                         amdgpu_device_ip_block_add(adev, &dm_ip_block);
844 #endif
845                 amdgpu_device_ip_block_add(adev, &gfx_v10_0_ip_block);
846                 amdgpu_device_ip_block_add(adev, &sdma_v5_2_ip_block);
847                 amdgpu_device_ip_block_add(adev, &vcn_v3_0_ip_block);
848                 amdgpu_device_ip_block_add(adev, &jpeg_v3_0_ip_block);
849                 break;
850         case CHIP_BEIGE_GOBY:
851                 amdgpu_device_ip_block_add(adev, &nv_common_ip_block);
852                 amdgpu_device_ip_block_add(adev, &gmc_v10_0_ip_block);
853                 amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
854                 if (likely(adev->firmware.load_type == AMDGPU_FW_LOAD_PSP))
855                         amdgpu_device_ip_block_add(adev, &psp_v11_0_ip_block);
856                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_PSP &&
857                     is_support_sw_smu(adev))
858                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
859                 amdgpu_device_ip_block_add(adev, &gfx_v10_0_ip_block);
860                 amdgpu_device_ip_block_add(adev, &sdma_v5_2_ip_block);
861                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
862                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
863 #if defined(CONFIG_DRM_AMD_DC)
864                 else if (amdgpu_device_has_dc_support(adev))
865                         amdgpu_device_ip_block_add(adev, &dm_ip_block);
866 #endif
867                 if (adev->firmware.load_type == AMDGPU_FW_LOAD_DIRECT &&
868                     is_support_sw_smu(adev))
869                         amdgpu_device_ip_block_add(adev, &smu_v11_0_ip_block);
870                 amdgpu_device_ip_block_add(adev, &vcn_v3_0_ip_block);
871                 break;
872         case CHIP_YELLOW_CARP:
873                 amdgpu_device_ip_block_add(adev, &nv_common_ip_block);
874                 amdgpu_device_ip_block_add(adev, &gmc_v10_0_ip_block);
875                 amdgpu_device_ip_block_add(adev, &navi10_ih_ip_block);
876                 if (likely(adev->firmware.load_type == AMDGPU_FW_LOAD_PSP))
877                         amdgpu_device_ip_block_add(adev, &psp_v13_0_ip_block);
878                 amdgpu_device_ip_block_add(adev, &smu_v13_0_ip_block);
879                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
880                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
881                 amdgpu_device_ip_block_add(adev, &gfx_v10_0_ip_block);
882                 amdgpu_device_ip_block_add(adev, &sdma_v5_2_ip_block);
883                 if (adev->enable_virtual_display || amdgpu_sriov_vf(adev))
884                         amdgpu_device_ip_block_add(adev, &dce_virtual_ip_block);
885 #if defined(CONFIG_DRM_AMD_DC)
886                 else if (amdgpu_device_has_dc_support(adev))
887                         amdgpu_device_ip_block_add(adev, &dm_ip_block);
888 #endif
889                 amdgpu_device_ip_block_add(adev, &vcn_v3_0_ip_block);
890                 amdgpu_device_ip_block_add(adev, &jpeg_v3_0_ip_block);
891                 break;
892         default:
893                 return -EINVAL;
894         }
895
896         return 0;
897 }
898
899 static uint32_t nv_get_rev_id(struct amdgpu_device *adev)
900 {
901         return adev->nbio.funcs->get_rev_id(adev);
902 }
903
904 static bool nv_need_full_reset(struct amdgpu_device *adev)
905 {
906         return true;
907 }
908
909 static bool nv_need_reset_on_init(struct amdgpu_device *adev)
910 {
911         u32 sol_reg;
912
913         if (adev->flags & AMD_IS_APU)
914                 return false;
915
916         /* Check sOS sign of life register to confirm sys driver and sOS
917          * are already been loaded.
918          */
919         sol_reg = RREG32_SOC15(MP0, 0, mmMP0_SMN_C2PMSG_81);
920         if (sol_reg)
921                 return true;
922
923         return false;
924 }
925
926 static uint64_t nv_get_pcie_replay_count(struct amdgpu_device *adev)
927 {
928
929         /* TODO
930          * dummy implement for pcie_replay_count sysfs interface
931          * */
932
933         return 0;
934 }
935
936 static void nv_init_doorbell_index(struct amdgpu_device *adev)
937 {
938         adev->doorbell_index.kiq = AMDGPU_NAVI10_DOORBELL_KIQ;
939         adev->doorbell_index.mec_ring0 = AMDGPU_NAVI10_DOORBELL_MEC_RING0;
940         adev->doorbell_index.mec_ring1 = AMDGPU_NAVI10_DOORBELL_MEC_RING1;
941         adev->doorbell_index.mec_ring2 = AMDGPU_NAVI10_DOORBELL_MEC_RING2;
942         adev->doorbell_index.mec_ring3 = AMDGPU_NAVI10_DOORBELL_MEC_RING3;
943         adev->doorbell_index.mec_ring4 = AMDGPU_NAVI10_DOORBELL_MEC_RING4;
944         adev->doorbell_index.mec_ring5 = AMDGPU_NAVI10_DOORBELL_MEC_RING5;
945         adev->doorbell_index.mec_ring6 = AMDGPU_NAVI10_DOORBELL_MEC_RING6;
946         adev->doorbell_index.mec_ring7 = AMDGPU_NAVI10_DOORBELL_MEC_RING7;
947         adev->doorbell_index.userqueue_start = AMDGPU_NAVI10_DOORBELL_USERQUEUE_START;
948         adev->doorbell_index.userqueue_end = AMDGPU_NAVI10_DOORBELL_USERQUEUE_END;
949         adev->doorbell_index.gfx_ring0 = AMDGPU_NAVI10_DOORBELL_GFX_RING0;
950         adev->doorbell_index.gfx_ring1 = AMDGPU_NAVI10_DOORBELL_GFX_RING1;
951         adev->doorbell_index.mes_ring = AMDGPU_NAVI10_DOORBELL_MES_RING;
952         adev->doorbell_index.sdma_engine[0] = AMDGPU_NAVI10_DOORBELL_sDMA_ENGINE0;
953         adev->doorbell_index.sdma_engine[1] = AMDGPU_NAVI10_DOORBELL_sDMA_ENGINE1;
954         adev->doorbell_index.sdma_engine[2] = AMDGPU_NAVI10_DOORBELL_sDMA_ENGINE2;
955         adev->doorbell_index.sdma_engine[3] = AMDGPU_NAVI10_DOORBELL_sDMA_ENGINE3;
956         adev->doorbell_index.ih = AMDGPU_NAVI10_DOORBELL_IH;
957         adev->doorbell_index.vcn.vcn_ring0_1 = AMDGPU_NAVI10_DOORBELL64_VCN0_1;
958         adev->doorbell_index.vcn.vcn_ring2_3 = AMDGPU_NAVI10_DOORBELL64_VCN2_3;
959         adev->doorbell_index.vcn.vcn_ring4_5 = AMDGPU_NAVI10_DOORBELL64_VCN4_5;
960         adev->doorbell_index.vcn.vcn_ring6_7 = AMDGPU_NAVI10_DOORBELL64_VCN6_7;
961         adev->doorbell_index.first_non_cp = AMDGPU_NAVI10_DOORBELL64_FIRST_NON_CP;
962         adev->doorbell_index.last_non_cp = AMDGPU_NAVI10_DOORBELL64_LAST_NON_CP;
963
964         adev->doorbell_index.max_assignment = AMDGPU_NAVI10_DOORBELL_MAX_ASSIGNMENT << 1;
965         adev->doorbell_index.sdma_doorbell_range = 20;
966 }
967
968 static void nv_pre_asic_init(struct amdgpu_device *adev)
969 {
970 }
971
972 static int nv_update_umd_stable_pstate(struct amdgpu_device *adev,
973                                        bool enter)
974 {
975         if (enter)
976                 amdgpu_gfx_rlc_enter_safe_mode(adev);
977         else
978                 amdgpu_gfx_rlc_exit_safe_mode(adev);
979
980         if (adev->gfx.funcs->update_perfmon_mgcg)
981                 adev->gfx.funcs->update_perfmon_mgcg(adev, !enter);
982
983         if (!(adev->flags & AMD_IS_APU) &&
984             (adev->nbio.funcs->enable_aspm))
985                 adev->nbio.funcs->enable_aspm(adev, !enter);
986
987         return 0;
988 }
989
990 static const struct amdgpu_asic_funcs nv_asic_funcs =
991 {
992         .read_disabled_bios = &nv_read_disabled_bios,
993         .read_bios_from_rom = &nv_read_bios_from_rom,
994         .read_register = &nv_read_register,
995         .reset = &nv_asic_reset,
996         .reset_method = &nv_asic_reset_method,
997         .set_vga_state = &nv_vga_set_state,
998         .get_xclk = &nv_get_xclk,
999         .set_uvd_clocks = &nv_set_uvd_clocks,
1000         .set_vce_clocks = &nv_set_vce_clocks,
1001         .get_config_memsize = &nv_get_config_memsize,
1002         .init_doorbell_index = &nv_init_doorbell_index,
1003         .need_full_reset = &nv_need_full_reset,
1004         .need_reset_on_init = &nv_need_reset_on_init,
1005         .get_pcie_replay_count = &nv_get_pcie_replay_count,
1006         .supports_baco = &amdgpu_dpm_is_baco_supported,
1007         .pre_asic_init = &nv_pre_asic_init,
1008         .update_umd_stable_pstate = &nv_update_umd_stable_pstate,
1009         .query_video_codecs = &nv_query_video_codecs,
1010 };
1011
1012 static int nv_common_early_init(void *handle)
1013 {
1014 #define MMIO_REG_HOLE_OFFSET (0x80000 - PAGE_SIZE)
1015         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1016
1017         adev->rmmio_remap.reg_offset = MMIO_REG_HOLE_OFFSET;
1018         adev->rmmio_remap.bus_addr = adev->rmmio_base + MMIO_REG_HOLE_OFFSET;
1019         adev->smc_rreg = NULL;
1020         adev->smc_wreg = NULL;
1021         adev->pcie_rreg = &nv_pcie_rreg;
1022         adev->pcie_wreg = &nv_pcie_wreg;
1023         adev->pcie_rreg64 = &nv_pcie_rreg64;
1024         adev->pcie_wreg64 = &nv_pcie_wreg64;
1025         adev->pciep_rreg = &nv_pcie_port_rreg;
1026         adev->pciep_wreg = &nv_pcie_port_wreg;
1027
1028         /* TODO: will add them during VCN v2 implementation */
1029         adev->uvd_ctx_rreg = NULL;
1030         adev->uvd_ctx_wreg = NULL;
1031
1032         adev->didt_rreg = &nv_didt_rreg;
1033         adev->didt_wreg = &nv_didt_wreg;
1034
1035         adev->asic_funcs = &nv_asic_funcs;
1036
1037         adev->rev_id = nv_get_rev_id(adev);
1038         adev->external_rev_id = 0xff;
1039         switch (adev->asic_type) {
1040         case CHIP_NAVI10:
1041                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1042                         AMD_CG_SUPPORT_GFX_CGCG |
1043                         AMD_CG_SUPPORT_IH_CG |
1044                         AMD_CG_SUPPORT_HDP_MGCG |
1045                         AMD_CG_SUPPORT_HDP_LS |
1046                         AMD_CG_SUPPORT_SDMA_MGCG |
1047                         AMD_CG_SUPPORT_SDMA_LS |
1048                         AMD_CG_SUPPORT_MC_MGCG |
1049                         AMD_CG_SUPPORT_MC_LS |
1050                         AMD_CG_SUPPORT_ATHUB_MGCG |
1051                         AMD_CG_SUPPORT_ATHUB_LS |
1052                         AMD_CG_SUPPORT_VCN_MGCG |
1053                         AMD_CG_SUPPORT_JPEG_MGCG |
1054                         AMD_CG_SUPPORT_BIF_MGCG |
1055                         AMD_CG_SUPPORT_BIF_LS;
1056                 adev->pg_flags = AMD_PG_SUPPORT_VCN |
1057                         AMD_PG_SUPPORT_VCN_DPG |
1058                         AMD_PG_SUPPORT_JPEG |
1059                         AMD_PG_SUPPORT_ATHUB;
1060                 adev->external_rev_id = adev->rev_id + 0x1;
1061                 break;
1062         case CHIP_NAVI14:
1063                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1064                         AMD_CG_SUPPORT_GFX_CGCG |
1065                         AMD_CG_SUPPORT_IH_CG |
1066                         AMD_CG_SUPPORT_HDP_MGCG |
1067                         AMD_CG_SUPPORT_HDP_LS |
1068                         AMD_CG_SUPPORT_SDMA_MGCG |
1069                         AMD_CG_SUPPORT_SDMA_LS |
1070                         AMD_CG_SUPPORT_MC_MGCG |
1071                         AMD_CG_SUPPORT_MC_LS |
1072                         AMD_CG_SUPPORT_ATHUB_MGCG |
1073                         AMD_CG_SUPPORT_ATHUB_LS |
1074                         AMD_CG_SUPPORT_VCN_MGCG |
1075                         AMD_CG_SUPPORT_JPEG_MGCG |
1076                         AMD_CG_SUPPORT_BIF_MGCG |
1077                         AMD_CG_SUPPORT_BIF_LS;
1078                 adev->pg_flags = AMD_PG_SUPPORT_VCN |
1079                         AMD_PG_SUPPORT_JPEG |
1080                         AMD_PG_SUPPORT_VCN_DPG;
1081                 adev->external_rev_id = adev->rev_id + 20;
1082                 break;
1083         case CHIP_NAVI12:
1084                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1085                         AMD_CG_SUPPORT_GFX_MGLS |
1086                         AMD_CG_SUPPORT_GFX_CGCG |
1087                         AMD_CG_SUPPORT_GFX_CP_LS |
1088                         AMD_CG_SUPPORT_GFX_RLC_LS |
1089                         AMD_CG_SUPPORT_IH_CG |
1090                         AMD_CG_SUPPORT_HDP_MGCG |
1091                         AMD_CG_SUPPORT_HDP_LS |
1092                         AMD_CG_SUPPORT_SDMA_MGCG |
1093                         AMD_CG_SUPPORT_SDMA_LS |
1094                         AMD_CG_SUPPORT_MC_MGCG |
1095                         AMD_CG_SUPPORT_MC_LS |
1096                         AMD_CG_SUPPORT_ATHUB_MGCG |
1097                         AMD_CG_SUPPORT_ATHUB_LS |
1098                         AMD_CG_SUPPORT_VCN_MGCG |
1099                         AMD_CG_SUPPORT_JPEG_MGCG;
1100                 adev->pg_flags = AMD_PG_SUPPORT_VCN |
1101                         AMD_PG_SUPPORT_VCN_DPG |
1102                         AMD_PG_SUPPORT_JPEG |
1103                         AMD_PG_SUPPORT_ATHUB;
1104                 /* guest vm gets 0xffffffff when reading RCC_DEV0_EPF0_STRAP0,
1105                  * as a consequence, the rev_id and external_rev_id are wrong.
1106                  * workaround it by hardcoding rev_id to 0 (default value).
1107                  */
1108                 if (amdgpu_sriov_vf(adev))
1109                         adev->rev_id = 0;
1110                 adev->external_rev_id = adev->rev_id + 0xa;
1111                 break;
1112         case CHIP_SIENNA_CICHLID:
1113                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1114                         AMD_CG_SUPPORT_GFX_CGCG |
1115                         AMD_CG_SUPPORT_GFX_CGLS |
1116                         AMD_CG_SUPPORT_GFX_3D_CGCG |
1117                         AMD_CG_SUPPORT_MC_MGCG |
1118                         AMD_CG_SUPPORT_VCN_MGCG |
1119                         AMD_CG_SUPPORT_JPEG_MGCG |
1120                         AMD_CG_SUPPORT_HDP_MGCG |
1121                         AMD_CG_SUPPORT_HDP_LS |
1122                         AMD_CG_SUPPORT_IH_CG |
1123                         AMD_CG_SUPPORT_MC_LS;
1124                 adev->pg_flags = AMD_PG_SUPPORT_VCN |
1125                         AMD_PG_SUPPORT_VCN_DPG |
1126                         AMD_PG_SUPPORT_JPEG |
1127                         AMD_PG_SUPPORT_ATHUB |
1128                         AMD_PG_SUPPORT_MMHUB;
1129                 if (amdgpu_sriov_vf(adev)) {
1130                         /* hypervisor control CG and PG enablement */
1131                         adev->cg_flags = 0;
1132                         adev->pg_flags = 0;
1133                 }
1134                 adev->external_rev_id = adev->rev_id + 0x28;
1135                 break;
1136         case CHIP_NAVY_FLOUNDER:
1137                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1138                         AMD_CG_SUPPORT_GFX_CGCG |
1139                         AMD_CG_SUPPORT_GFX_CGLS |
1140                         AMD_CG_SUPPORT_GFX_3D_CGCG |
1141                         AMD_CG_SUPPORT_VCN_MGCG |
1142                         AMD_CG_SUPPORT_JPEG_MGCG |
1143                         AMD_CG_SUPPORT_MC_MGCG |
1144                         AMD_CG_SUPPORT_MC_LS |
1145                         AMD_CG_SUPPORT_HDP_MGCG |
1146                         AMD_CG_SUPPORT_HDP_LS |
1147                         AMD_CG_SUPPORT_IH_CG;
1148                 adev->pg_flags = AMD_PG_SUPPORT_VCN |
1149                         AMD_PG_SUPPORT_VCN_DPG |
1150                         AMD_PG_SUPPORT_JPEG |
1151                         AMD_PG_SUPPORT_ATHUB |
1152                         AMD_PG_SUPPORT_MMHUB;
1153                 adev->external_rev_id = adev->rev_id + 0x32;
1154                 break;
1155
1156         case CHIP_VANGOGH:
1157                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1158                         AMD_CG_SUPPORT_GFX_MGLS |
1159                         AMD_CG_SUPPORT_GFX_CP_LS |
1160                         AMD_CG_SUPPORT_GFX_RLC_LS |
1161                         AMD_CG_SUPPORT_GFX_CGCG |
1162                         AMD_CG_SUPPORT_GFX_CGLS |
1163                         AMD_CG_SUPPORT_GFX_3D_CGCG |
1164                         AMD_CG_SUPPORT_GFX_3D_CGLS |
1165                         AMD_CG_SUPPORT_MC_MGCG |
1166                         AMD_CG_SUPPORT_MC_LS |
1167                         AMD_CG_SUPPORT_GFX_FGCG |
1168                         AMD_CG_SUPPORT_VCN_MGCG |
1169                         AMD_CG_SUPPORT_SDMA_MGCG |
1170                         AMD_CG_SUPPORT_SDMA_LS |
1171                         AMD_CG_SUPPORT_JPEG_MGCG;
1172                 adev->pg_flags = AMD_PG_SUPPORT_GFX_PG |
1173                         AMD_PG_SUPPORT_VCN |
1174                         AMD_PG_SUPPORT_VCN_DPG |
1175                         AMD_PG_SUPPORT_JPEG;
1176                 if (adev->apu_flags & AMD_APU_IS_VANGOGH)
1177                         adev->external_rev_id = adev->rev_id + 0x01;
1178                 break;
1179         case CHIP_DIMGREY_CAVEFISH:
1180                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1181                         AMD_CG_SUPPORT_GFX_CGCG |
1182                         AMD_CG_SUPPORT_GFX_CGLS |
1183                         AMD_CG_SUPPORT_GFX_3D_CGCG |
1184                         AMD_CG_SUPPORT_VCN_MGCG |
1185                         AMD_CG_SUPPORT_JPEG_MGCG |
1186                         AMD_CG_SUPPORT_MC_MGCG |
1187                         AMD_CG_SUPPORT_MC_LS |
1188                         AMD_CG_SUPPORT_HDP_MGCG |
1189                         AMD_CG_SUPPORT_HDP_LS |
1190                         AMD_CG_SUPPORT_IH_CG;
1191                 adev->pg_flags = AMD_PG_SUPPORT_VCN |
1192                         AMD_PG_SUPPORT_VCN_DPG |
1193                         AMD_PG_SUPPORT_JPEG |
1194                         AMD_PG_SUPPORT_ATHUB |
1195                         AMD_PG_SUPPORT_MMHUB;
1196                 adev->external_rev_id = adev->rev_id + 0x3c;
1197                 break;
1198         case CHIP_BEIGE_GOBY:
1199                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1200                         AMD_CG_SUPPORT_GFX_CGCG |
1201                         AMD_CG_SUPPORT_GFX_CGLS |
1202                         AMD_CG_SUPPORT_GFX_3D_CGCG |
1203                         AMD_CG_SUPPORT_MC_MGCG |
1204                         AMD_CG_SUPPORT_MC_LS |
1205                         AMD_CG_SUPPORT_HDP_MGCG |
1206                         AMD_CG_SUPPORT_HDP_LS |
1207                         AMD_CG_SUPPORT_IH_CG |
1208                         AMD_CG_SUPPORT_VCN_MGCG;
1209                 adev->pg_flags = AMD_PG_SUPPORT_VCN |
1210                         AMD_PG_SUPPORT_VCN_DPG |
1211                         AMD_PG_SUPPORT_ATHUB |
1212                         AMD_PG_SUPPORT_MMHUB;
1213                 adev->external_rev_id = adev->rev_id + 0x46;
1214                 break;
1215         case CHIP_YELLOW_CARP:
1216                 adev->cg_flags = AMD_CG_SUPPORT_GFX_MGCG |
1217                         AMD_CG_SUPPORT_GFX_MGLS |
1218                         AMD_CG_SUPPORT_GFX_CGCG |
1219                         AMD_CG_SUPPORT_GFX_CGLS |
1220                         AMD_CG_SUPPORT_GFX_3D_CGCG |
1221                         AMD_CG_SUPPORT_GFX_3D_CGLS |
1222                         AMD_CG_SUPPORT_GFX_RLC_LS |
1223                         AMD_CG_SUPPORT_GFX_CP_LS |
1224                         AMD_CG_SUPPORT_GFX_FGCG |
1225                         AMD_CG_SUPPORT_MC_MGCG |
1226                         AMD_CG_SUPPORT_MC_LS |
1227                         AMD_CG_SUPPORT_SDMA_LS |
1228                         AMD_CG_SUPPORT_HDP_MGCG |
1229                         AMD_CG_SUPPORT_HDP_LS |
1230                         AMD_CG_SUPPORT_ATHUB_MGCG |
1231                         AMD_CG_SUPPORT_ATHUB_LS |
1232                         AMD_CG_SUPPORT_IH_CG |
1233                         AMD_CG_SUPPORT_VCN_MGCG |
1234                         AMD_CG_SUPPORT_JPEG_MGCG;
1235                 adev->pg_flags = AMD_PG_SUPPORT_GFX_PG |
1236                         AMD_PG_SUPPORT_VCN |
1237                         AMD_PG_SUPPORT_VCN_DPG |
1238                         AMD_PG_SUPPORT_JPEG;
1239                 adev->external_rev_id = adev->rev_id + 0x01;
1240                 break;
1241         default:
1242                 /* FIXME: not supported yet */
1243                 return -EINVAL;
1244         }
1245
1246         if (adev->harvest_ip_mask & AMD_HARVEST_IP_VCN_MASK)
1247                 adev->pg_flags &= ~(AMD_PG_SUPPORT_VCN |
1248                                     AMD_PG_SUPPORT_VCN_DPG |
1249                                     AMD_PG_SUPPORT_JPEG);
1250
1251         if (amdgpu_sriov_vf(adev)) {
1252                 amdgpu_virt_init_setting(adev);
1253                 xgpu_nv_mailbox_set_irq_funcs(adev);
1254         }
1255
1256         return 0;
1257 }
1258
1259 static int nv_common_late_init(void *handle)
1260 {
1261         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1262
1263         if (amdgpu_sriov_vf(adev)) {
1264                 xgpu_nv_mailbox_get_irq(adev);
1265                 amdgpu_virt_update_sriov_video_codec(adev,
1266                                 sriov_sc_video_codecs_encode_array, ARRAY_SIZE(sriov_sc_video_codecs_encode_array),
1267                                 sriov_sc_video_codecs_decode_array, ARRAY_SIZE(sriov_sc_video_codecs_decode_array));
1268         }
1269
1270         return 0;
1271 }
1272
1273 static int nv_common_sw_init(void *handle)
1274 {
1275         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1276
1277         if (amdgpu_sriov_vf(adev))
1278                 xgpu_nv_mailbox_add_irq_id(adev);
1279
1280         return 0;
1281 }
1282
1283 static int nv_common_sw_fini(void *handle)
1284 {
1285         return 0;
1286 }
1287
1288 static int nv_common_hw_init(void *handle)
1289 {
1290         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1291
1292         if (adev->nbio.funcs->apply_lc_spc_mode_wa)
1293                 adev->nbio.funcs->apply_lc_spc_mode_wa(adev);
1294
1295         if (adev->nbio.funcs->apply_l1_link_width_reconfig_wa)
1296                 adev->nbio.funcs->apply_l1_link_width_reconfig_wa(adev);
1297
1298         /* enable pcie gen2/3 link */
1299         nv_pcie_gen3_enable(adev);
1300         /* enable aspm */
1301         nv_program_aspm(adev);
1302         /* setup nbio registers */
1303         adev->nbio.funcs->init_registers(adev);
1304         /* remap HDP registers to a hole in mmio space,
1305          * for the purpose of expose those registers
1306          * to process space
1307          */
1308         if (adev->nbio.funcs->remap_hdp_registers)
1309                 adev->nbio.funcs->remap_hdp_registers(adev);
1310         /* enable the doorbell aperture */
1311         nv_enable_doorbell_aperture(adev, true);
1312
1313         return 0;
1314 }
1315
1316 static int nv_common_hw_fini(void *handle)
1317 {
1318         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1319
1320         /* disable the doorbell aperture */
1321         nv_enable_doorbell_aperture(adev, false);
1322
1323         return 0;
1324 }
1325
1326 static int nv_common_suspend(void *handle)
1327 {
1328         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1329
1330         return nv_common_hw_fini(adev);
1331 }
1332
1333 static int nv_common_resume(void *handle)
1334 {
1335         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1336
1337         return nv_common_hw_init(adev);
1338 }
1339
1340 static bool nv_common_is_idle(void *handle)
1341 {
1342         return true;
1343 }
1344
1345 static int nv_common_wait_for_idle(void *handle)
1346 {
1347         return 0;
1348 }
1349
1350 static int nv_common_soft_reset(void *handle)
1351 {
1352         return 0;
1353 }
1354
1355 static int nv_common_set_clockgating_state(void *handle,
1356                                            enum amd_clockgating_state state)
1357 {
1358         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1359
1360         if (amdgpu_sriov_vf(adev))
1361                 return 0;
1362
1363         switch (adev->asic_type) {
1364         case CHIP_NAVI10:
1365         case CHIP_NAVI14:
1366         case CHIP_NAVI12:
1367         case CHIP_SIENNA_CICHLID:
1368         case CHIP_NAVY_FLOUNDER:
1369         case CHIP_DIMGREY_CAVEFISH:
1370         case CHIP_BEIGE_GOBY:
1371                 adev->nbio.funcs->update_medium_grain_clock_gating(adev,
1372                                 state == AMD_CG_STATE_GATE);
1373                 adev->nbio.funcs->update_medium_grain_light_sleep(adev,
1374                                 state == AMD_CG_STATE_GATE);
1375                 adev->hdp.funcs->update_clock_gating(adev,
1376                                 state == AMD_CG_STATE_GATE);
1377                 adev->smuio.funcs->update_rom_clock_gating(adev,
1378                                 state == AMD_CG_STATE_GATE);
1379                 break;
1380         default:
1381                 break;
1382         }
1383         return 0;
1384 }
1385
1386 static int nv_common_set_powergating_state(void *handle,
1387                                            enum amd_powergating_state state)
1388 {
1389         /* TODO */
1390         return 0;
1391 }
1392
1393 static void nv_common_get_clockgating_state(void *handle, u32 *flags)
1394 {
1395         struct amdgpu_device *adev = (struct amdgpu_device *)handle;
1396
1397         if (amdgpu_sriov_vf(adev))
1398                 *flags = 0;
1399
1400         adev->nbio.funcs->get_clockgating_state(adev, flags);
1401
1402         adev->hdp.funcs->get_clock_gating_state(adev, flags);
1403
1404         adev->smuio.funcs->get_clock_gating_state(adev, flags);
1405
1406         return;
1407 }
1408
1409 static const struct amd_ip_funcs nv_common_ip_funcs = {
1410         .name = "nv_common",
1411         .early_init = nv_common_early_init,
1412         .late_init = nv_common_late_init,
1413         .sw_init = nv_common_sw_init,
1414         .sw_fini = nv_common_sw_fini,
1415         .hw_init = nv_common_hw_init,
1416         .hw_fini = nv_common_hw_fini,
1417         .suspend = nv_common_suspend,
1418         .resume = nv_common_resume,
1419         .is_idle = nv_common_is_idle,
1420         .wait_for_idle = nv_common_wait_for_idle,
1421         .soft_reset = nv_common_soft_reset,
1422         .set_clockgating_state = nv_common_set_clockgating_state,
1423         .set_powergating_state = nv_common_set_powergating_state,
1424         .get_clockgating_state = nv_common_get_clockgating_state,
1425 };