dma_addr_t l2ptr_dma;
};
+struct arm_smmu_ctx_desc {
+ u16 asid;
+ u64 ttbr;
+ u64 tcr;
+ u64 mair;
+};
+
+struct arm_smmu_ctx_desc_cfg {
+ __le64 *cdtab;
+ dma_addr_t cdtab_dma;
+};
+
struct arm_smmu_s1_cfg {
- __le64 *cdptr;
- dma_addr_t cdptr_dma;
-
- struct arm_smmu_ctx_desc {
- u16 asid;
- u64 ttbr;
- u64 tcr;
- u64 mair;
- } cd;
+ struct arm_smmu_ctx_desc_cfg cdcfg;
+ struct arm_smmu_ctx_desc cd;
};
struct arm_smmu_s2_cfg {
struct arm_smmu_s1_cfg *cfg)
{
u64 val;
+ __le64 *cdptr = cfg->cdcfg.cdtab;
/*
* We don't need to issue any invalidation here, as we'll invalidate
if (smmu->features & ARM_SMMU_FEAT_STALL_FORCE)
val |= CTXDESC_CD_0_S;
- cfg->cdptr[0] = cpu_to_le64(val);
+ cdptr[0] = cpu_to_le64(val);
val = cfg->cd.ttbr & CTXDESC_CD_1_TTB0_MASK;
- cfg->cdptr[1] = cpu_to_le64(val);
+ cdptr[1] = cpu_to_le64(val);
- cfg->cdptr[3] = cpu_to_le64(cfg->cd.mair);
+ cdptr[3] = cpu_to_le64(cfg->cd.mair);
}
/* Stream table manipulation functions */
!(smmu->features & ARM_SMMU_FEAT_STALL_FORCE))
dst[1] |= cpu_to_le64(STRTAB_STE_1_S1STALLD);
- val |= (s1_cfg->cdptr_dma & STRTAB_STE_0_S1CTXPTR_MASK) |
+ val |= (s1_cfg->cdcfg.cdtab_dma & STRTAB_STE_0_S1CTXPTR_MASK) |
FIELD_PREP(STRTAB_STE_0_CFG, STRTAB_STE_0_CFG_S1_TRANS);
}
if (smmu_domain->stage == ARM_SMMU_DOMAIN_S1) {
struct arm_smmu_s1_cfg *cfg = &smmu_domain->s1_cfg;
- if (cfg->cdptr) {
+ if (cfg->cdcfg.cdtab) {
dmam_free_coherent(smmu_domain->smmu->dev,
CTXDESC_CD_DWORDS << 3,
- cfg->cdptr,
- cfg->cdptr_dma);
+ cfg->cdcfg.cdtab,
+ cfg->cdcfg.cdtab_dma);
arm_smmu_bitmap_free(smmu->asid_map, cfg->cd.asid);
}
if (asid < 0)
return asid;
- cfg->cdptr = dmam_alloc_coherent(smmu->dev, CTXDESC_CD_DWORDS << 3,
- &cfg->cdptr_dma, GFP_KERNEL);
- if (!cfg->cdptr) {
+ cfg->cdcfg.cdtab = dmam_alloc_coherent(smmu->dev,
+ CTXDESC_CD_DWORDS << 3,
+ &cfg->cdcfg.cdtab_dma,
+ GFP_KERNEL);
+ if (!cfg->cdcfg.cdtab) {
dev_warn(smmu->dev, "failed to allocate context descriptor\n");
ret = -ENOMEM;
goto out_free_asid;