Merge tag 'defconfig-5.15' of git://git.kernel.org/pub/scm/linux/kernel/git/soc/soc
[linux-2.6-microblaze.git] / drivers / crypto / qat / qat_dh895xcc / adf_dh895xcc_hw_data.c
1 // SPDX-License-Identifier: (BSD-3-Clause OR GPL-2.0-only)
2 /* Copyright(c) 2014 - 2020 Intel Corporation */
3 #include <adf_accel_devices.h>
4 #include <adf_pf2vf_msg.h>
5 #include <adf_common_drv.h>
6 #include <adf_gen2_hw_data.h>
7 #include "adf_dh895xcc_hw_data.h"
8 #include "icp_qat_hw.h"
9
10 /* Worker thread to service arbiter mappings */
11 static const u32 thrd_to_arb_map[ADF_DH895XCC_MAX_ACCELENGINES] = {
12         0x12222AAA, 0x11666666, 0x12222AAA, 0x11666666,
13         0x12222AAA, 0x11222222, 0x12222AAA, 0x11222222,
14         0x12222AAA, 0x11222222, 0x12222AAA, 0x11222222
15 };
16
17 static struct adf_hw_device_class dh895xcc_class = {
18         .name = ADF_DH895XCC_DEVICE_NAME,
19         .type = DEV_DH895XCC,
20         .instances = 0
21 };
22
23 static u32 get_accel_mask(struct adf_hw_device_data *self)
24 {
25         u32 fuses = self->fuses;
26
27         return ~fuses >> ADF_DH895XCC_ACCELERATORS_REG_OFFSET &
28                          ADF_DH895XCC_ACCELERATORS_MASK;
29 }
30
31 static u32 get_ae_mask(struct adf_hw_device_data *self)
32 {
33         u32 fuses = self->fuses;
34
35         return ~fuses & ADF_DH895XCC_ACCELENGINES_MASK;
36 }
37
38 static u32 get_num_accels(struct adf_hw_device_data *self)
39 {
40         u32 i, ctr = 0;
41
42         if (!self || !self->accel_mask)
43                 return 0;
44
45         for (i = 0; i < ADF_DH895XCC_MAX_ACCELERATORS; i++) {
46                 if (self->accel_mask & (1 << i))
47                         ctr++;
48         }
49         return ctr;
50 }
51
52 static u32 get_num_aes(struct adf_hw_device_data *self)
53 {
54         u32 i, ctr = 0;
55
56         if (!self || !self->ae_mask)
57                 return 0;
58
59         for (i = 0; i < ADF_DH895XCC_MAX_ACCELENGINES; i++) {
60                 if (self->ae_mask & (1 << i))
61                         ctr++;
62         }
63         return ctr;
64 }
65
66 static u32 get_misc_bar_id(struct adf_hw_device_data *self)
67 {
68         return ADF_DH895XCC_PMISC_BAR;
69 }
70
71 static u32 get_etr_bar_id(struct adf_hw_device_data *self)
72 {
73         return ADF_DH895XCC_ETR_BAR;
74 }
75
76 static u32 get_sram_bar_id(struct adf_hw_device_data *self)
77 {
78         return ADF_DH895XCC_SRAM_BAR;
79 }
80
81 static u32 get_accel_cap(struct adf_accel_dev *accel_dev)
82 {
83         struct pci_dev *pdev = accel_dev->accel_pci_dev.pci_dev;
84         u32 capabilities;
85         u32 legfuses;
86
87         capabilities = ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC |
88                        ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC |
89                        ICP_ACCEL_CAPABILITIES_AUTHENTICATION;
90
91         /* Read accelerator capabilities mask */
92         pci_read_config_dword(pdev, ADF_DEVICE_LEGFUSE_OFFSET, &legfuses);
93
94         if (legfuses & ICP_ACCEL_MASK_CIPHER_SLICE)
95                 capabilities &= ~ICP_ACCEL_CAPABILITIES_CRYPTO_SYMMETRIC;
96         if (legfuses & ICP_ACCEL_MASK_PKE_SLICE)
97                 capabilities &= ~ICP_ACCEL_CAPABILITIES_CRYPTO_ASYMMETRIC;
98         if (legfuses & ICP_ACCEL_MASK_AUTH_SLICE)
99                 capabilities &= ~ICP_ACCEL_CAPABILITIES_AUTHENTICATION;
100
101         return capabilities;
102 }
103
104 static enum dev_sku_info get_sku(struct adf_hw_device_data *self)
105 {
106         int sku = (self->fuses & ADF_DH895XCC_FUSECTL_SKU_MASK)
107             >> ADF_DH895XCC_FUSECTL_SKU_SHIFT;
108
109         switch (sku) {
110         case ADF_DH895XCC_FUSECTL_SKU_1:
111                 return DEV_SKU_1;
112         case ADF_DH895XCC_FUSECTL_SKU_2:
113                 return DEV_SKU_2;
114         case ADF_DH895XCC_FUSECTL_SKU_3:
115                 return DEV_SKU_3;
116         case ADF_DH895XCC_FUSECTL_SKU_4:
117                 return DEV_SKU_4;
118         default:
119                 return DEV_SKU_UNKNOWN;
120         }
121         return DEV_SKU_UNKNOWN;
122 }
123
124 static const u32 *adf_get_arbiter_mapping(void)
125 {
126         return thrd_to_arb_map;
127 }
128
129 static u32 get_pf2vf_offset(u32 i)
130 {
131         return ADF_DH895XCC_PF2VF_OFFSET(i);
132 }
133
134 static void adf_enable_error_correction(struct adf_accel_dev *accel_dev)
135 {
136         struct adf_hw_device_data *hw_device = accel_dev->hw_device;
137         struct adf_bar *misc_bar = &GET_BARS(accel_dev)[ADF_DH895XCC_PMISC_BAR];
138         unsigned long accel_mask = hw_device->accel_mask;
139         unsigned long ae_mask = hw_device->ae_mask;
140         void __iomem *csr = misc_bar->virt_addr;
141         unsigned int val, i;
142
143         /* Enable Accel Engine error detection & correction */
144         for_each_set_bit(i, &ae_mask, GET_MAX_ACCELENGINES(accel_dev)) {
145                 val = ADF_CSR_RD(csr, ADF_DH895XCC_AE_CTX_ENABLES(i));
146                 val |= ADF_DH895XCC_ENABLE_AE_ECC_ERR;
147                 ADF_CSR_WR(csr, ADF_DH895XCC_AE_CTX_ENABLES(i), val);
148                 val = ADF_CSR_RD(csr, ADF_DH895XCC_AE_MISC_CONTROL(i));
149                 val |= ADF_DH895XCC_ENABLE_AE_ECC_PARITY_CORR;
150                 ADF_CSR_WR(csr, ADF_DH895XCC_AE_MISC_CONTROL(i), val);
151         }
152
153         /* Enable shared memory error detection & correction */
154         for_each_set_bit(i, &accel_mask, ADF_DH895XCC_MAX_ACCELERATORS) {
155                 val = ADF_CSR_RD(csr, ADF_DH895XCC_UERRSSMSH(i));
156                 val |= ADF_DH895XCC_ERRSSMSH_EN;
157                 ADF_CSR_WR(csr, ADF_DH895XCC_UERRSSMSH(i), val);
158                 val = ADF_CSR_RD(csr, ADF_DH895XCC_CERRSSMSH(i));
159                 val |= ADF_DH895XCC_ERRSSMSH_EN;
160                 ADF_CSR_WR(csr, ADF_DH895XCC_CERRSSMSH(i), val);
161         }
162 }
163
164 static void adf_enable_ints(struct adf_accel_dev *accel_dev)
165 {
166         void __iomem *addr;
167
168         addr = (&GET_BARS(accel_dev)[ADF_DH895XCC_PMISC_BAR])->virt_addr;
169
170         /* Enable bundle and misc interrupts */
171         ADF_CSR_WR(addr, ADF_DH895XCC_SMIAPF0_MASK_OFFSET,
172                    accel_dev->pf.vf_info ? 0 :
173                         BIT_ULL(GET_MAX_BANKS(accel_dev)) - 1);
174         ADF_CSR_WR(addr, ADF_DH895XCC_SMIAPF1_MASK_OFFSET,
175                    ADF_DH895XCC_SMIA1_MASK);
176 }
177
178 static int adf_enable_pf2vf_comms(struct adf_accel_dev *accel_dev)
179 {
180         spin_lock_init(&accel_dev->pf.vf2pf_ints_lock);
181
182         return 0;
183 }
184
185 static void configure_iov_threads(struct adf_accel_dev *accel_dev, bool enable)
186 {
187         adf_gen2_cfg_iov_thds(accel_dev, enable,
188                               ADF_DH895XCC_AE2FUNC_MAP_GRP_A_NUM_REGS,
189                               ADF_DH895XCC_AE2FUNC_MAP_GRP_B_NUM_REGS);
190 }
191
192 void adf_init_hw_data_dh895xcc(struct adf_hw_device_data *hw_data)
193 {
194         hw_data->dev_class = &dh895xcc_class;
195         hw_data->instance_id = dh895xcc_class.instances++;
196         hw_data->num_banks = ADF_DH895XCC_ETR_MAX_BANKS;
197         hw_data->num_rings_per_bank = ADF_ETR_MAX_RINGS_PER_BANK;
198         hw_data->num_accel = ADF_DH895XCC_MAX_ACCELERATORS;
199         hw_data->num_logical_accel = 1;
200         hw_data->num_engines = ADF_DH895XCC_MAX_ACCELENGINES;
201         hw_data->tx_rx_gap = ADF_DH895XCC_RX_RINGS_OFFSET;
202         hw_data->tx_rings_mask = ADF_DH895XCC_TX_RINGS_MASK;
203         hw_data->alloc_irq = adf_isr_resource_alloc;
204         hw_data->free_irq = adf_isr_resource_free;
205         hw_data->enable_error_correction = adf_enable_error_correction;
206         hw_data->get_accel_mask = get_accel_mask;
207         hw_data->get_ae_mask = get_ae_mask;
208         hw_data->get_accel_cap = get_accel_cap;
209         hw_data->get_num_accels = get_num_accels;
210         hw_data->get_num_aes = get_num_aes;
211         hw_data->get_etr_bar_id = get_etr_bar_id;
212         hw_data->get_misc_bar_id = get_misc_bar_id;
213         hw_data->get_admin_info = adf_gen2_get_admin_info;
214         hw_data->get_arb_info = adf_gen2_get_arb_info;
215         hw_data->get_sram_bar_id = get_sram_bar_id;
216         hw_data->get_sku = get_sku;
217         hw_data->fw_name = ADF_DH895XCC_FW;
218         hw_data->fw_mmp_name = ADF_DH895XCC_MMP;
219         hw_data->init_admin_comms = adf_init_admin_comms;
220         hw_data->exit_admin_comms = adf_exit_admin_comms;
221         hw_data->configure_iov_threads = configure_iov_threads;
222         hw_data->send_admin_init = adf_send_admin_init;
223         hw_data->init_arb = adf_init_arb;
224         hw_data->exit_arb = adf_exit_arb;
225         hw_data->get_arb_mapping = adf_get_arbiter_mapping;
226         hw_data->enable_ints = adf_enable_ints;
227         hw_data->reset_device = adf_reset_sbr;
228         hw_data->get_pf2vf_offset = get_pf2vf_offset;
229         hw_data->enable_pfvf_comms = adf_enable_pf2vf_comms;
230         hw_data->disable_iov = adf_disable_sriov;
231         hw_data->min_iov_compat_ver = ADF_PFVF_COMPAT_THIS_VERSION;
232
233         adf_gen2_init_hw_csr_ops(&hw_data->csr_ops);
234 }
235
236 void adf_clean_hw_data_dh895xcc(struct adf_hw_device_data *hw_data)
237 {
238         hw_data->dev_class->instances--;
239 }