1 // SPDX-License-Identifier: GPL-2.0-only
3 * skl-topology.c - Implements Platform component ALSA controls/widget
6 * Copyright (C) 2014-2015 Intel Corp
7 * Author: Jeeja KP <jeeja.kp@intel.com>
8 * ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
11 #include <linux/slab.h>
12 #include <linux/types.h>
13 #include <linux/firmware.h>
14 #include <linux/uuid.h>
15 #include <sound/soc.h>
16 #include <sound/soc-topology.h>
17 #include <uapi/sound/snd_sst_tokens.h>
18 #include <uapi/sound/skl-tplg-interface.h>
19 #include "skl-sst-dsp.h"
20 #include "skl-sst-ipc.h"
21 #include "skl-topology.h"
23 #include "../common/sst-dsp.h"
24 #include "../common/sst-dsp-priv.h"
26 #define SKL_CH_FIXUP_MASK (1 << 0)
27 #define SKL_RATE_FIXUP_MASK (1 << 1)
28 #define SKL_FMT_FIXUP_MASK (1 << 2)
29 #define SKL_IN_DIR_BIT_MASK BIT(0)
30 #define SKL_PIN_COUNT_MASK GENMASK(7, 4)
32 static const int mic_mono_list[] = {
35 static const int mic_stereo_list[][SKL_CH_STEREO] = {
36 {0, 1}, {0, 2}, {0, 3}, {1, 2}, {1, 3}, {2, 3},
38 static const int mic_trio_list[][SKL_CH_TRIO] = {
39 {0, 1, 2}, {0, 1, 3}, {0, 2, 3}, {1, 2, 3},
41 static const int mic_quatro_list[][SKL_CH_QUATRO] = {
45 #define CHECK_HW_PARAMS(ch, freq, bps, prm_ch, prm_freq, prm_bps) \
46 ((ch == prm_ch) && (bps == prm_bps) && (freq == prm_freq))
48 void skl_tplg_d0i3_get(struct skl *skl, enum d0i3_capability caps)
50 struct skl_d0i3_data *d0i3 = &skl->skl_sst->d0i3;
57 case SKL_D0I3_STREAMING:
61 case SKL_D0I3_NON_STREAMING:
62 d0i3->non_streaming++;
67 void skl_tplg_d0i3_put(struct skl *skl, enum d0i3_capability caps)
69 struct skl_d0i3_data *d0i3 = &skl->skl_sst->d0i3;
76 case SKL_D0I3_STREAMING:
80 case SKL_D0I3_NON_STREAMING:
81 d0i3->non_streaming--;
87 * SKL DSP driver modelling uses only few DAPM widgets so for rest we will
88 * ignore. This helpers checks if the SKL driver handles this widget type
90 static int is_skl_dsp_widget_type(struct snd_soc_dapm_widget *w,
93 if (w->dapm->dev != dev)
97 case snd_soc_dapm_dai_link:
98 case snd_soc_dapm_dai_in:
99 case snd_soc_dapm_aif_in:
100 case snd_soc_dapm_aif_out:
101 case snd_soc_dapm_dai_out:
102 case snd_soc_dapm_switch:
103 case snd_soc_dapm_output:
104 case snd_soc_dapm_mux:
113 * Each pipelines needs memory to be allocated. Check if we have free memory
114 * from available pool.
116 static bool skl_is_pipe_mem_avail(struct skl *skl,
117 struct skl_module_cfg *mconfig)
119 struct skl_sst *ctx = skl->skl_sst;
121 if (skl->resource.mem + mconfig->pipe->memory_pages >
122 skl->resource.max_mem) {
124 "%s: module_id %d instance %d\n", __func__,
125 mconfig->id.module_id,
126 mconfig->id.instance_id);
128 "exceeds ppl memory available %d mem %d\n",
129 skl->resource.max_mem, skl->resource.mem);
137 * Add the mem to the mem pool. This is freed when pipe is deleted.
138 * Note: DSP does actual memory management we only keep track for complete
141 static void skl_tplg_alloc_pipe_mem(struct skl *skl,
142 struct skl_module_cfg *mconfig)
144 skl->resource.mem += mconfig->pipe->memory_pages;
148 * Pipeline needs needs DSP CPU resources for computation, this is
149 * quantified in MCPS (Million Clocks Per Second) required for module/pipe
151 * Each pipelines needs mcps to be allocated. Check if we have mcps for this
155 static bool skl_is_pipe_mcps_avail(struct skl *skl,
156 struct skl_module_cfg *mconfig)
158 struct skl_sst *ctx = skl->skl_sst;
159 u8 res_idx = mconfig->res_idx;
160 struct skl_module_res *res = &mconfig->module->resources[res_idx];
162 if (skl->resource.mcps + res->cps > skl->resource.max_mcps) {
164 "%s: module_id %d instance %d\n", __func__,
165 mconfig->id.module_id, mconfig->id.instance_id);
167 "exceeds ppl mcps available %d > mem %d\n",
168 skl->resource.max_mcps, skl->resource.mcps);
175 static void skl_tplg_alloc_pipe_mcps(struct skl *skl,
176 struct skl_module_cfg *mconfig)
178 u8 res_idx = mconfig->res_idx;
179 struct skl_module_res *res = &mconfig->module->resources[res_idx];
181 skl->resource.mcps += res->cps;
185 * Free the mcps when tearing down
188 skl_tplg_free_pipe_mcps(struct skl *skl, struct skl_module_cfg *mconfig)
190 u8 res_idx = mconfig->res_idx;
191 struct skl_module_res *res = &mconfig->module->resources[res_idx];
193 skl->resource.mcps -= res->cps;
197 * Free the memory when tearing down
200 skl_tplg_free_pipe_mem(struct skl *skl, struct skl_module_cfg *mconfig)
202 skl->resource.mem -= mconfig->pipe->memory_pages;
206 static void skl_dump_mconfig(struct skl_sst *ctx,
207 struct skl_module_cfg *mcfg)
209 struct skl_module_iface *iface = &mcfg->module->formats[0];
211 dev_dbg(ctx->dev, "Dumping config\n");
212 dev_dbg(ctx->dev, "Input Format:\n");
213 dev_dbg(ctx->dev, "channels = %d\n", iface->inputs[0].fmt.channels);
214 dev_dbg(ctx->dev, "s_freq = %d\n", iface->inputs[0].fmt.s_freq);
215 dev_dbg(ctx->dev, "ch_cfg = %d\n", iface->inputs[0].fmt.ch_cfg);
216 dev_dbg(ctx->dev, "valid bit depth = %d\n",
217 iface->inputs[0].fmt.valid_bit_depth);
218 dev_dbg(ctx->dev, "Output Format:\n");
219 dev_dbg(ctx->dev, "channels = %d\n", iface->outputs[0].fmt.channels);
220 dev_dbg(ctx->dev, "s_freq = %d\n", iface->outputs[0].fmt.s_freq);
221 dev_dbg(ctx->dev, "valid bit depth = %d\n",
222 iface->outputs[0].fmt.valid_bit_depth);
223 dev_dbg(ctx->dev, "ch_cfg = %d\n", iface->outputs[0].fmt.ch_cfg);
226 static void skl_tplg_update_chmap(struct skl_module_fmt *fmt, int chs)
228 int slot_map = 0xFFFFFFFF;
232 for (i = 0; i < chs; i++) {
234 * For 2 channels with starting slot as 0, slot map will
235 * look like 0xFFFFFF10.
237 slot_map &= (~(0xF << (4 * i)) | (start_slot << (4 * i)));
240 fmt->ch_map = slot_map;
243 static void skl_tplg_update_params(struct skl_module_fmt *fmt,
244 struct skl_pipe_params *params, int fixup)
246 if (fixup & SKL_RATE_FIXUP_MASK)
247 fmt->s_freq = params->s_freq;
248 if (fixup & SKL_CH_FIXUP_MASK) {
249 fmt->channels = params->ch;
250 skl_tplg_update_chmap(fmt, fmt->channels);
252 if (fixup & SKL_FMT_FIXUP_MASK) {
253 fmt->valid_bit_depth = skl_get_bit_depth(params->s_fmt);
256 * 16 bit is 16 bit container whereas 24 bit is in 32 bit
257 * container so update bit depth accordingly
259 switch (fmt->valid_bit_depth) {
260 case SKL_DEPTH_16BIT:
261 fmt->bit_depth = fmt->valid_bit_depth;
265 fmt->bit_depth = SKL_DEPTH_32BIT;
273 * A pipeline may have modules which impact the pcm parameters, like SRC,
274 * channel converter, format converter.
275 * We need to calculate the output params by applying the 'fixup'
276 * Topology will tell driver which type of fixup is to be applied by
277 * supplying the fixup mask, so based on that we calculate the output
279 * Now In FE the pcm hw_params is source/target format. Same is applicable
280 * for BE with its hw_params invoked.
281 * here based on FE, BE pipeline and direction we calculate the input and
282 * outfix and then apply that for a module
284 static void skl_tplg_update_params_fixup(struct skl_module_cfg *m_cfg,
285 struct skl_pipe_params *params, bool is_fe)
287 int in_fixup, out_fixup;
288 struct skl_module_fmt *in_fmt, *out_fmt;
290 /* Fixups will be applied to pin 0 only */
291 in_fmt = &m_cfg->module->formats[0].inputs[0].fmt;
292 out_fmt = &m_cfg->module->formats[0].outputs[0].fmt;
294 if (params->stream == SNDRV_PCM_STREAM_PLAYBACK) {
296 in_fixup = m_cfg->params_fixup;
297 out_fixup = (~m_cfg->converter) &
300 out_fixup = m_cfg->params_fixup;
301 in_fixup = (~m_cfg->converter) &
306 out_fixup = m_cfg->params_fixup;
307 in_fixup = (~m_cfg->converter) &
310 in_fixup = m_cfg->params_fixup;
311 out_fixup = (~m_cfg->converter) &
316 skl_tplg_update_params(in_fmt, params, in_fixup);
317 skl_tplg_update_params(out_fmt, params, out_fixup);
321 * A module needs input and output buffers, which are dependent upon pcm
322 * params, so once we have calculate params, we need buffer calculation as
325 static void skl_tplg_update_buffer_size(struct skl_sst *ctx,
326 struct skl_module_cfg *mcfg)
329 struct skl_module_fmt *in_fmt, *out_fmt;
330 struct skl_module_res *res;
332 /* Since fixups is applied to pin 0 only, ibs, obs needs
333 * change for pin 0 only
335 res = &mcfg->module->resources[0];
336 in_fmt = &mcfg->module->formats[0].inputs[0].fmt;
337 out_fmt = &mcfg->module->formats[0].outputs[0].fmt;
339 if (mcfg->m_type == SKL_MODULE_TYPE_SRCINT)
342 res->ibs = DIV_ROUND_UP(in_fmt->s_freq, 1000) *
343 in_fmt->channels * (in_fmt->bit_depth >> 3) *
346 res->obs = DIV_ROUND_UP(out_fmt->s_freq, 1000) *
347 out_fmt->channels * (out_fmt->bit_depth >> 3) *
351 static u8 skl_tplg_be_dev_type(int dev_type)
357 ret = NHLT_DEVICE_BT;
360 case SKL_DEVICE_DMIC:
361 ret = NHLT_DEVICE_DMIC;
365 ret = NHLT_DEVICE_I2S;
369 ret = NHLT_DEVICE_INVALID;
376 static int skl_tplg_update_be_blob(struct snd_soc_dapm_widget *w,
379 struct skl_module_cfg *m_cfg = w->priv;
381 u32 ch, s_freq, s_fmt;
382 struct nhlt_specific_cfg *cfg;
383 struct skl *skl = get_skl_ctx(ctx->dev);
384 u8 dev_type = skl_tplg_be_dev_type(m_cfg->dev_type);
385 int fmt_idx = m_cfg->fmt_idx;
386 struct skl_module_iface *m_iface = &m_cfg->module->formats[fmt_idx];
388 /* check if we already have blob */
389 if (m_cfg->formats_config.caps_size > 0)
392 dev_dbg(ctx->dev, "Applying default cfg blob\n");
393 switch (m_cfg->dev_type) {
394 case SKL_DEVICE_DMIC:
395 link_type = NHLT_LINK_DMIC;
396 dir = SNDRV_PCM_STREAM_CAPTURE;
397 s_freq = m_iface->inputs[0].fmt.s_freq;
398 s_fmt = m_iface->inputs[0].fmt.bit_depth;
399 ch = m_iface->inputs[0].fmt.channels;
403 link_type = NHLT_LINK_SSP;
404 if (m_cfg->hw_conn_type == SKL_CONN_SOURCE) {
405 dir = SNDRV_PCM_STREAM_PLAYBACK;
406 s_freq = m_iface->outputs[0].fmt.s_freq;
407 s_fmt = m_iface->outputs[0].fmt.bit_depth;
408 ch = m_iface->outputs[0].fmt.channels;
410 dir = SNDRV_PCM_STREAM_CAPTURE;
411 s_freq = m_iface->inputs[0].fmt.s_freq;
412 s_fmt = m_iface->inputs[0].fmt.bit_depth;
413 ch = m_iface->inputs[0].fmt.channels;
421 /* update the blob based on virtual bus_id and default params */
422 cfg = skl_get_ep_blob(skl, m_cfg->vbus_id, link_type,
423 s_fmt, ch, s_freq, dir, dev_type);
425 m_cfg->formats_config.caps_size = cfg->size;
426 m_cfg->formats_config.caps = (u32 *) &cfg->caps;
428 dev_err(ctx->dev, "Blob NULL for id %x type %d dirn %d\n",
429 m_cfg->vbus_id, link_type, dir);
430 dev_err(ctx->dev, "PCM: ch %d, freq %d, fmt %d\n",
438 static void skl_tplg_update_module_params(struct snd_soc_dapm_widget *w,
441 struct skl_module_cfg *m_cfg = w->priv;
442 struct skl_pipe_params *params = m_cfg->pipe->p_params;
443 int p_conn_type = m_cfg->pipe->conn_type;
446 if (!m_cfg->params_fixup)
449 dev_dbg(ctx->dev, "Mconfig for widget=%s BEFORE updation\n",
452 skl_dump_mconfig(ctx, m_cfg);
454 if (p_conn_type == SKL_PIPE_CONN_TYPE_FE)
459 skl_tplg_update_params_fixup(m_cfg, params, is_fe);
460 skl_tplg_update_buffer_size(ctx, m_cfg);
462 dev_dbg(ctx->dev, "Mconfig for widget=%s AFTER updation\n",
465 skl_dump_mconfig(ctx, m_cfg);
469 * some modules can have multiple params set from user control and
470 * need to be set after module is initialized. If set_param flag is
471 * set module params will be done after module is initialised.
473 static int skl_tplg_set_module_params(struct snd_soc_dapm_widget *w,
477 struct skl_module_cfg *mconfig = w->priv;
478 const struct snd_kcontrol_new *k;
479 struct soc_bytes_ext *sb;
480 struct skl_algo_data *bc;
481 struct skl_specific_cfg *sp_cfg;
483 if (mconfig->formats_config.caps_size > 0 &&
484 mconfig->formats_config.set_params == SKL_PARAM_SET) {
485 sp_cfg = &mconfig->formats_config;
486 ret = skl_set_module_params(ctx, sp_cfg->caps,
488 sp_cfg->param_id, mconfig);
493 for (i = 0; i < w->num_kcontrols; i++) {
494 k = &w->kcontrol_news[i];
495 if (k->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) {
496 sb = (void *) k->private_value;
497 bc = (struct skl_algo_data *)sb->dobj.private;
499 if (bc->set_params == SKL_PARAM_SET) {
500 ret = skl_set_module_params(ctx,
501 (u32 *)bc->params, bc->size,
502 bc->param_id, mconfig);
513 * some module param can set from user control and this is required as
514 * when module is initailzed. if module param is required in init it is
515 * identifed by set_param flag. if set_param flag is not set, then this
516 * parameter needs to set as part of module init.
518 static int skl_tplg_set_module_init_data(struct snd_soc_dapm_widget *w)
520 const struct snd_kcontrol_new *k;
521 struct soc_bytes_ext *sb;
522 struct skl_algo_data *bc;
523 struct skl_module_cfg *mconfig = w->priv;
526 for (i = 0; i < w->num_kcontrols; i++) {
527 k = &w->kcontrol_news[i];
528 if (k->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) {
529 sb = (struct soc_bytes_ext *)k->private_value;
530 bc = (struct skl_algo_data *)sb->dobj.private;
532 if (bc->set_params != SKL_PARAM_INIT)
535 mconfig->formats_config.caps = (u32 *)bc->params;
536 mconfig->formats_config.caps_size = bc->size;
545 static int skl_tplg_module_prepare(struct skl_sst *ctx, struct skl_pipe *pipe,
546 struct snd_soc_dapm_widget *w, struct skl_module_cfg *mcfg)
548 switch (mcfg->dev_type) {
549 case SKL_DEVICE_HDAHOST:
550 return skl_pcm_host_dma_prepare(ctx->dev, pipe->p_params);
552 case SKL_DEVICE_HDALINK:
553 return skl_pcm_link_dma_prepare(ctx->dev, pipe->p_params);
560 * Inside a pipe instance, we can have various modules. These modules need
561 * to instantiated in DSP by invoking INIT_MODULE IPC, which is achieved by
562 * skl_init_module() routine, so invoke that for all modules in a pipeline
565 skl_tplg_init_pipe_modules(struct skl *skl, struct skl_pipe *pipe)
567 struct skl_pipe_module *w_module;
568 struct snd_soc_dapm_widget *w;
569 struct skl_module_cfg *mconfig;
570 struct skl_sst *ctx = skl->skl_sst;
574 list_for_each_entry(w_module, &pipe->w_list, node) {
579 /* check if module ids are populated */
580 if (mconfig->id.module_id < 0) {
581 dev_err(skl->skl_sst->dev,
582 "module %pUL id not populated\n",
583 (uuid_le *)mconfig->guid);
587 cfg_idx = mconfig->pipe->cur_config_idx;
588 mconfig->fmt_idx = mconfig->mod_cfg[cfg_idx].fmt_idx;
589 mconfig->res_idx = mconfig->mod_cfg[cfg_idx].res_idx;
591 /* check resource available */
592 if (!skl_is_pipe_mcps_avail(skl, mconfig))
595 if (mconfig->module->loadable && ctx->dsp->fw_ops.load_mod) {
596 ret = ctx->dsp->fw_ops.load_mod(ctx->dsp,
597 mconfig->id.module_id, mconfig->guid);
601 mconfig->m_state = SKL_MODULE_LOADED;
604 /* prepare the DMA if the module is gateway cpr */
605 ret = skl_tplg_module_prepare(ctx, pipe, w, mconfig);
609 /* update blob if blob is null for be with default value */
610 skl_tplg_update_be_blob(w, ctx);
613 * apply fix/conversion to module params based on
616 skl_tplg_update_module_params(w, ctx);
617 uuid_mod = (uuid_le *)mconfig->guid;
618 mconfig->id.pvt_id = skl_get_pvt_id(ctx, uuid_mod,
619 mconfig->id.instance_id);
620 if (mconfig->id.pvt_id < 0)
622 skl_tplg_set_module_init_data(w);
624 ret = skl_dsp_get_core(ctx->dsp, mconfig->core_id);
626 dev_err(ctx->dev, "Failed to wake up core %d ret=%d\n",
627 mconfig->core_id, ret);
631 ret = skl_init_module(ctx, mconfig);
633 skl_put_pvt_id(ctx, uuid_mod, &mconfig->id.pvt_id);
636 skl_tplg_alloc_pipe_mcps(skl, mconfig);
637 ret = skl_tplg_set_module_params(w, ctx);
644 skl_dsp_put_core(ctx->dsp, mconfig->core_id);
648 static int skl_tplg_unload_pipe_modules(struct skl_sst *ctx,
649 struct skl_pipe *pipe)
652 struct skl_pipe_module *w_module = NULL;
653 struct skl_module_cfg *mconfig = NULL;
655 list_for_each_entry(w_module, &pipe->w_list, node) {
657 mconfig = w_module->w->priv;
658 uuid_mod = (uuid_le *)mconfig->guid;
660 if (mconfig->module->loadable && ctx->dsp->fw_ops.unload_mod &&
661 mconfig->m_state > SKL_MODULE_UNINIT) {
662 ret = ctx->dsp->fw_ops.unload_mod(ctx->dsp,
663 mconfig->id.module_id);
667 skl_put_pvt_id(ctx, uuid_mod, &mconfig->id.pvt_id);
669 ret = skl_dsp_put_core(ctx->dsp, mconfig->core_id);
671 /* don't return; continue with other modules */
672 dev_err(ctx->dev, "Failed to sleep core %d ret=%d\n",
673 mconfig->core_id, ret);
677 /* no modules to unload in this path, so return */
682 * Here, we select pipe format based on the pipe type and pipe
683 * direction to determine the current config index for the pipeline.
684 * The config index is then used to select proper module resources.
685 * Intermediate pipes currently have a fixed format hence we select the
686 * 0th configuratation by default for such pipes.
689 skl_tplg_get_pipe_config(struct skl *skl, struct skl_module_cfg *mconfig)
691 struct skl_sst *ctx = skl->skl_sst;
692 struct skl_pipe *pipe = mconfig->pipe;
693 struct skl_pipe_params *params = pipe->p_params;
694 struct skl_path_config *pconfig = &pipe->configs[0];
695 struct skl_pipe_fmt *fmt = NULL;
699 if (pipe->nr_cfgs == 0) {
700 pipe->cur_config_idx = 0;
704 if (pipe->conn_type == SKL_PIPE_CONN_TYPE_NONE) {
705 dev_dbg(ctx->dev, "No conn_type detected, take 0th config\n");
706 pipe->cur_config_idx = 0;
707 pipe->memory_pages = pconfig->mem_pages;
712 if ((pipe->conn_type == SKL_PIPE_CONN_TYPE_FE &&
713 pipe->direction == SNDRV_PCM_STREAM_PLAYBACK) ||
714 (pipe->conn_type == SKL_PIPE_CONN_TYPE_BE &&
715 pipe->direction == SNDRV_PCM_STREAM_CAPTURE))
718 for (i = 0; i < pipe->nr_cfgs; i++) {
719 pconfig = &pipe->configs[i];
721 fmt = &pconfig->in_fmt;
723 fmt = &pconfig->out_fmt;
725 if (CHECK_HW_PARAMS(params->ch, params->s_freq, params->s_fmt,
726 fmt->channels, fmt->freq, fmt->bps)) {
727 pipe->cur_config_idx = i;
728 pipe->memory_pages = pconfig->mem_pages;
729 dev_dbg(ctx->dev, "Using pipe config: %d\n", i);
735 dev_err(ctx->dev, "Invalid pipe config: %d %d %d for pipe: %d\n",
736 params->ch, params->s_freq, params->s_fmt, pipe->ppl_id);
741 * Mixer module represents a pipeline. So in the Pre-PMU event of mixer we
742 * need create the pipeline. So we do following:
743 * - check the resources
744 * - Create the pipeline
745 * - Initialize the modules in pipeline
746 * - finally bind all modules together
748 static int skl_tplg_mixer_dapm_pre_pmu_event(struct snd_soc_dapm_widget *w,
752 struct skl_module_cfg *mconfig = w->priv;
753 struct skl_pipe_module *w_module;
754 struct skl_pipe *s_pipe = mconfig->pipe;
755 struct skl_module_cfg *src_module = NULL, *dst_module, *module;
756 struct skl_sst *ctx = skl->skl_sst;
757 struct skl_module_deferred_bind *modules;
759 ret = skl_tplg_get_pipe_config(skl, mconfig);
763 /* check resource available */
764 if (!skl_is_pipe_mcps_avail(skl, mconfig))
767 if (!skl_is_pipe_mem_avail(skl, mconfig))
771 * Create a list of modules for pipe.
772 * This list contains modules from source to sink
774 ret = skl_create_pipeline(ctx, mconfig->pipe);
778 skl_tplg_alloc_pipe_mem(skl, mconfig);
779 skl_tplg_alloc_pipe_mcps(skl, mconfig);
781 /* Init all pipe modules from source to sink */
782 ret = skl_tplg_init_pipe_modules(skl, s_pipe);
786 /* Bind modules from source to sink */
787 list_for_each_entry(w_module, &s_pipe->w_list, node) {
788 dst_module = w_module->w->priv;
790 if (src_module == NULL) {
791 src_module = dst_module;
795 ret = skl_bind_modules(ctx, src_module, dst_module);
799 src_module = dst_module;
803 * When the destination module is initialized, check for these modules
804 * in deferred bind list. If found, bind them.
806 list_for_each_entry(w_module, &s_pipe->w_list, node) {
807 if (list_empty(&skl->bind_list))
810 list_for_each_entry(modules, &skl->bind_list, node) {
811 module = w_module->w->priv;
812 if (modules->dst == module)
813 skl_bind_modules(ctx, modules->src,
821 static int skl_fill_sink_instance_id(struct skl_sst *ctx, u32 *params,
822 int size, struct skl_module_cfg *mcfg)
826 if (mcfg->m_type == SKL_MODULE_TYPE_KPB) {
827 struct skl_kpb_params *kpb_params =
828 (struct skl_kpb_params *)params;
829 struct skl_mod_inst_map *inst = kpb_params->u.map;
831 for (i = 0; i < kpb_params->num_modules; i++) {
832 pvt_id = skl_get_pvt_instance_id_map(ctx, inst->mod_id,
837 inst->inst_id = pvt_id;
845 * Some modules require params to be set after the module is bound to
846 * all pins connected.
848 * The module provider initializes set_param flag for such modules and we
849 * send params after binding
851 static int skl_tplg_set_module_bind_params(struct snd_soc_dapm_widget *w,
852 struct skl_module_cfg *mcfg, struct skl_sst *ctx)
855 struct skl_module_cfg *mconfig = w->priv;
856 const struct snd_kcontrol_new *k;
857 struct soc_bytes_ext *sb;
858 struct skl_algo_data *bc;
859 struct skl_specific_cfg *sp_cfg;
863 * check all out/in pins are in bind state.
864 * if so set the module param
866 for (i = 0; i < mcfg->module->max_output_pins; i++) {
867 if (mcfg->m_out_pin[i].pin_state != SKL_PIN_BIND_DONE)
871 for (i = 0; i < mcfg->module->max_input_pins; i++) {
872 if (mcfg->m_in_pin[i].pin_state != SKL_PIN_BIND_DONE)
876 if (mconfig->formats_config.caps_size > 0 &&
877 mconfig->formats_config.set_params == SKL_PARAM_BIND) {
878 sp_cfg = &mconfig->formats_config;
879 ret = skl_set_module_params(ctx, sp_cfg->caps,
881 sp_cfg->param_id, mconfig);
886 for (i = 0; i < w->num_kcontrols; i++) {
887 k = &w->kcontrol_news[i];
888 if (k->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) {
889 sb = (void *) k->private_value;
890 bc = (struct skl_algo_data *)sb->dobj.private;
892 if (bc->set_params == SKL_PARAM_BIND) {
893 params = kmemdup(bc->params, bc->max, GFP_KERNEL);
897 skl_fill_sink_instance_id(ctx, params, bc->max,
900 ret = skl_set_module_params(ctx, params,
901 bc->max, bc->param_id, mconfig);
913 static int skl_get_module_id(struct skl_sst *ctx, uuid_le *uuid)
915 struct uuid_module *module;
917 list_for_each_entry(module, &ctx->uuid_list, list) {
918 if (uuid_le_cmp(*uuid, module->uuid) == 0)
925 static int skl_tplg_find_moduleid_from_uuid(struct skl *skl,
926 const struct snd_kcontrol_new *k)
928 struct soc_bytes_ext *sb = (void *) k->private_value;
929 struct skl_algo_data *bc = (struct skl_algo_data *)sb->dobj.private;
930 struct skl_kpb_params *uuid_params, *params;
931 struct hdac_bus *bus = skl_to_bus(skl);
932 int i, size, module_id;
934 if (bc->set_params == SKL_PARAM_BIND && bc->max) {
935 uuid_params = (struct skl_kpb_params *)bc->params;
936 size = uuid_params->num_modules *
937 sizeof(struct skl_mod_inst_map) +
938 sizeof(uuid_params->num_modules);
940 params = devm_kzalloc(bus->dev, size, GFP_KERNEL);
944 params->num_modules = uuid_params->num_modules;
946 for (i = 0; i < uuid_params->num_modules; i++) {
947 module_id = skl_get_module_id(skl->skl_sst,
948 &uuid_params->u.map_uuid[i].mod_uuid);
950 devm_kfree(bus->dev, params);
954 params->u.map[i].mod_id = module_id;
955 params->u.map[i].inst_id =
956 uuid_params->u.map_uuid[i].inst_id;
959 devm_kfree(bus->dev, bc->params);
960 bc->params = (char *)params;
968 * Retrieve the module id from UUID mentioned in the
971 void skl_tplg_add_moduleid_in_bind_params(struct skl *skl,
972 struct snd_soc_dapm_widget *w)
974 struct skl_module_cfg *mconfig = w->priv;
978 * Post bind params are used for only for KPB
979 * to set copier instances to drain the data
982 if (mconfig->m_type != SKL_MODULE_TYPE_KPB)
985 for (i = 0; i < w->num_kcontrols; i++)
986 if ((w->kcontrol_news[i].access &
987 SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) &&
988 (skl_tplg_find_moduleid_from_uuid(skl,
989 &w->kcontrol_news[i]) < 0))
990 dev_err(skl->skl_sst->dev,
991 "%s: invalid kpb post bind params\n",
995 static int skl_tplg_module_add_deferred_bind(struct skl *skl,
996 struct skl_module_cfg *src, struct skl_module_cfg *dst)
998 struct skl_module_deferred_bind *m_list, *modules;
1001 /* only supported for module with static pin connection */
1002 for (i = 0; i < dst->module->max_input_pins; i++) {
1003 struct skl_module_pin *pin = &dst->m_in_pin[i];
1005 if (pin->is_dynamic)
1008 if ((pin->id.module_id == src->id.module_id) &&
1009 (pin->id.instance_id == src->id.instance_id)) {
1011 if (!list_empty(&skl->bind_list)) {
1012 list_for_each_entry(modules, &skl->bind_list, node) {
1013 if (modules->src == src && modules->dst == dst)
1018 m_list = kzalloc(sizeof(*m_list), GFP_KERNEL);
1025 list_add(&m_list->node, &skl->bind_list);
1032 static int skl_tplg_bind_sinks(struct snd_soc_dapm_widget *w,
1034 struct snd_soc_dapm_widget *src_w,
1035 struct skl_module_cfg *src_mconfig)
1037 struct snd_soc_dapm_path *p;
1038 struct snd_soc_dapm_widget *sink = NULL, *next_sink = NULL;
1039 struct skl_module_cfg *sink_mconfig;
1040 struct skl_sst *ctx = skl->skl_sst;
1043 snd_soc_dapm_widget_for_each_sink_path(w, p) {
1047 dev_dbg(ctx->dev, "%s: src widget=%s\n", __func__, w->name);
1048 dev_dbg(ctx->dev, "%s: sink widget=%s\n", __func__, p->sink->name);
1050 next_sink = p->sink;
1052 if (!is_skl_dsp_widget_type(p->sink, ctx->dev))
1053 return skl_tplg_bind_sinks(p->sink, skl, src_w, src_mconfig);
1056 * here we will check widgets in sink pipelines, so that
1057 * can be any widgets type and we are only interested if
1058 * they are ones used for SKL so check that first
1060 if ((p->sink->priv != NULL) &&
1061 is_skl_dsp_widget_type(p->sink, ctx->dev)) {
1064 sink_mconfig = sink->priv;
1067 * Modules other than PGA leaf can be connected
1068 * directly or via switch to a module in another
1069 * pipeline. EX: reference path
1070 * when the path is enabled, the dst module that needs
1071 * to be bound may not be initialized. if the module is
1072 * not initialized, add these modules in the deferred
1073 * bind list and when the dst module is initialised,
1074 * bind this module to the dst_module in deferred list.
1076 if (((src_mconfig->m_state == SKL_MODULE_INIT_DONE)
1077 && (sink_mconfig->m_state == SKL_MODULE_UNINIT))) {
1079 ret = skl_tplg_module_add_deferred_bind(skl,
1080 src_mconfig, sink_mconfig);
1088 if (src_mconfig->m_state == SKL_MODULE_UNINIT ||
1089 sink_mconfig->m_state == SKL_MODULE_UNINIT)
1092 /* Bind source to sink, mixin is always source */
1093 ret = skl_bind_modules(ctx, src_mconfig, sink_mconfig);
1097 /* set module params after bind */
1098 skl_tplg_set_module_bind_params(src_w, src_mconfig, ctx);
1099 skl_tplg_set_module_bind_params(sink, sink_mconfig, ctx);
1101 /* Start sinks pipe first */
1102 if (sink_mconfig->pipe->state != SKL_PIPE_STARTED) {
1103 if (sink_mconfig->pipe->conn_type !=
1104 SKL_PIPE_CONN_TYPE_FE)
1105 ret = skl_run_pipe(ctx,
1106 sink_mconfig->pipe);
1113 if (!sink && next_sink)
1114 return skl_tplg_bind_sinks(next_sink, skl, src_w, src_mconfig);
1120 * A PGA represents a module in a pipeline. So in the Pre-PMU event of PGA
1121 * we need to do following:
1122 * - Bind to sink pipeline
1123 * Since the sink pipes can be running and we don't get mixer event on
1124 * connect for already running mixer, we need to find the sink pipes
1125 * here and bind to them. This way dynamic connect works.
1126 * - Start sink pipeline, if not running
1127 * - Then run current pipe
1129 static int skl_tplg_pga_dapm_pre_pmu_event(struct snd_soc_dapm_widget *w,
1132 struct skl_module_cfg *src_mconfig;
1133 struct skl_sst *ctx = skl->skl_sst;
1136 src_mconfig = w->priv;
1139 * find which sink it is connected to, bind with the sink,
1140 * if sink is not started, start sink pipe first, then start
1143 ret = skl_tplg_bind_sinks(w, skl, w, src_mconfig);
1147 /* Start source pipe last after starting all sinks */
1148 if (src_mconfig->pipe->conn_type != SKL_PIPE_CONN_TYPE_FE)
1149 return skl_run_pipe(ctx, src_mconfig->pipe);
1154 static struct snd_soc_dapm_widget *skl_get_src_dsp_widget(
1155 struct snd_soc_dapm_widget *w, struct skl *skl)
1157 struct snd_soc_dapm_path *p;
1158 struct snd_soc_dapm_widget *src_w = NULL;
1159 struct skl_sst *ctx = skl->skl_sst;
1161 snd_soc_dapm_widget_for_each_source_path(w, p) {
1166 dev_dbg(ctx->dev, "sink widget=%s\n", w->name);
1167 dev_dbg(ctx->dev, "src widget=%s\n", p->source->name);
1170 * here we will check widgets in sink pipelines, so that can
1171 * be any widgets type and we are only interested if they are
1172 * ones used for SKL so check that first
1174 if ((p->source->priv != NULL) &&
1175 is_skl_dsp_widget_type(p->source, ctx->dev)) {
1181 return skl_get_src_dsp_widget(src_w, skl);
1187 * in the Post-PMU event of mixer we need to do following:
1188 * - Check if this pipe is running
1190 * - bind this pipeline to its source pipeline
1191 * if source pipe is already running, this means it is a dynamic
1192 * connection and we need to bind only to that pipe
1193 * - start this pipeline
1195 static int skl_tplg_mixer_dapm_post_pmu_event(struct snd_soc_dapm_widget *w,
1199 struct snd_soc_dapm_widget *source, *sink;
1200 struct skl_module_cfg *src_mconfig, *sink_mconfig;
1201 struct skl_sst *ctx = skl->skl_sst;
1202 int src_pipe_started = 0;
1205 sink_mconfig = sink->priv;
1208 * If source pipe is already started, that means source is driving
1209 * one more sink before this sink got connected, Since source is
1210 * started, bind this sink to source and start this pipe.
1212 source = skl_get_src_dsp_widget(w, skl);
1213 if (source != NULL) {
1214 src_mconfig = source->priv;
1215 sink_mconfig = sink->priv;
1216 src_pipe_started = 1;
1219 * check pipe state, then no need to bind or start the
1222 if (src_mconfig->pipe->state != SKL_PIPE_STARTED)
1223 src_pipe_started = 0;
1226 if (src_pipe_started) {
1227 ret = skl_bind_modules(ctx, src_mconfig, sink_mconfig);
1231 /* set module params after bind */
1232 skl_tplg_set_module_bind_params(source, src_mconfig, ctx);
1233 skl_tplg_set_module_bind_params(sink, sink_mconfig, ctx);
1235 if (sink_mconfig->pipe->conn_type != SKL_PIPE_CONN_TYPE_FE)
1236 ret = skl_run_pipe(ctx, sink_mconfig->pipe);
1243 * in the Pre-PMD event of mixer we need to do following:
1245 * - find the source connections and remove that from dapm_path_list
1246 * - unbind with source pipelines if still connected
1248 static int skl_tplg_mixer_dapm_pre_pmd_event(struct snd_soc_dapm_widget *w,
1251 struct skl_module_cfg *src_mconfig, *sink_mconfig;
1253 struct skl_sst *ctx = skl->skl_sst;
1255 sink_mconfig = w->priv;
1258 ret = skl_stop_pipe(ctx, sink_mconfig->pipe);
1262 for (i = 0; i < sink_mconfig->module->max_input_pins; i++) {
1263 if (sink_mconfig->m_in_pin[i].pin_state == SKL_PIN_BIND_DONE) {
1264 src_mconfig = sink_mconfig->m_in_pin[i].tgt_mcfg;
1268 ret = skl_unbind_modules(ctx,
1269 src_mconfig, sink_mconfig);
1277 * in the Post-PMD event of mixer we need to do following:
1278 * - Free the mcps used
1279 * - Free the mem used
1280 * - Unbind the modules within the pipeline
1281 * - Delete the pipeline (modules are not required to be explicitly
1282 * deleted, pipeline delete is enough here
1284 static int skl_tplg_mixer_dapm_post_pmd_event(struct snd_soc_dapm_widget *w,
1287 struct skl_module_cfg *mconfig = w->priv;
1288 struct skl_pipe_module *w_module;
1289 struct skl_module_cfg *src_module = NULL, *dst_module;
1290 struct skl_sst *ctx = skl->skl_sst;
1291 struct skl_pipe *s_pipe = mconfig->pipe;
1292 struct skl_module_deferred_bind *modules, *tmp;
1294 if (s_pipe->state == SKL_PIPE_INVALID)
1297 skl_tplg_free_pipe_mcps(skl, mconfig);
1298 skl_tplg_free_pipe_mem(skl, mconfig);
1300 list_for_each_entry(w_module, &s_pipe->w_list, node) {
1301 if (list_empty(&skl->bind_list))
1304 src_module = w_module->w->priv;
1306 list_for_each_entry_safe(modules, tmp, &skl->bind_list, node) {
1308 * When the destination module is deleted, Unbind the
1309 * modules from deferred bind list.
1311 if (modules->dst == src_module) {
1312 skl_unbind_modules(ctx, modules->src,
1317 * When the source module is deleted, remove this entry
1318 * from the deferred bind list.
1320 if (modules->src == src_module) {
1321 list_del(&modules->node);
1322 modules->src = NULL;
1323 modules->dst = NULL;
1329 list_for_each_entry(w_module, &s_pipe->w_list, node) {
1330 dst_module = w_module->w->priv;
1332 if (mconfig->m_state >= SKL_MODULE_INIT_DONE)
1333 skl_tplg_free_pipe_mcps(skl, dst_module);
1334 if (src_module == NULL) {
1335 src_module = dst_module;
1339 skl_unbind_modules(ctx, src_module, dst_module);
1340 src_module = dst_module;
1343 skl_delete_pipe(ctx, mconfig->pipe);
1345 list_for_each_entry(w_module, &s_pipe->w_list, node) {
1346 src_module = w_module->w->priv;
1347 src_module->m_state = SKL_MODULE_UNINIT;
1350 return skl_tplg_unload_pipe_modules(ctx, s_pipe);
1354 * in the Post-PMD event of PGA we need to do following:
1355 * - Free the mcps used
1356 * - Stop the pipeline
1357 * - In source pipe is connected, unbind with source pipelines
1359 static int skl_tplg_pga_dapm_post_pmd_event(struct snd_soc_dapm_widget *w,
1362 struct skl_module_cfg *src_mconfig, *sink_mconfig;
1364 struct skl_sst *ctx = skl->skl_sst;
1366 src_mconfig = w->priv;
1368 /* Stop the pipe since this is a mixin module */
1369 ret = skl_stop_pipe(ctx, src_mconfig->pipe);
1373 for (i = 0; i < src_mconfig->module->max_output_pins; i++) {
1374 if (src_mconfig->m_out_pin[i].pin_state == SKL_PIN_BIND_DONE) {
1375 sink_mconfig = src_mconfig->m_out_pin[i].tgt_mcfg;
1379 * This is a connecter and if path is found that means
1380 * unbind between source and sink has not happened yet
1382 ret = skl_unbind_modules(ctx, src_mconfig,
1391 * In modelling, we assume there will be ONLY one mixer in a pipeline. If a
1392 * second one is required that is created as another pipe entity.
1393 * The mixer is responsible for pipe management and represent a pipeline
1396 static int skl_tplg_mixer_event(struct snd_soc_dapm_widget *w,
1397 struct snd_kcontrol *k, int event)
1399 struct snd_soc_dapm_context *dapm = w->dapm;
1400 struct skl *skl = get_skl_ctx(dapm->dev);
1403 case SND_SOC_DAPM_PRE_PMU:
1404 return skl_tplg_mixer_dapm_pre_pmu_event(w, skl);
1406 case SND_SOC_DAPM_POST_PMU:
1407 return skl_tplg_mixer_dapm_post_pmu_event(w, skl);
1409 case SND_SOC_DAPM_PRE_PMD:
1410 return skl_tplg_mixer_dapm_pre_pmd_event(w, skl);
1412 case SND_SOC_DAPM_POST_PMD:
1413 return skl_tplg_mixer_dapm_post_pmd_event(w, skl);
1420 * In modelling, we assumed rest of the modules in pipeline are PGA. But we
1421 * are interested in last PGA (leaf PGA) in a pipeline to disconnect with
1422 * the sink when it is running (two FE to one BE or one FE to two BE)
1425 static int skl_tplg_pga_event(struct snd_soc_dapm_widget *w,
1426 struct snd_kcontrol *k, int event)
1429 struct snd_soc_dapm_context *dapm = w->dapm;
1430 struct skl *skl = get_skl_ctx(dapm->dev);
1433 case SND_SOC_DAPM_PRE_PMU:
1434 return skl_tplg_pga_dapm_pre_pmu_event(w, skl);
1436 case SND_SOC_DAPM_POST_PMD:
1437 return skl_tplg_pga_dapm_post_pmd_event(w, skl);
1443 static int skl_tplg_tlv_control_get(struct snd_kcontrol *kcontrol,
1444 unsigned int __user *data, unsigned int size)
1446 struct soc_bytes_ext *sb =
1447 (struct soc_bytes_ext *)kcontrol->private_value;
1448 struct skl_algo_data *bc = (struct skl_algo_data *)sb->dobj.private;
1449 struct snd_soc_dapm_widget *w = snd_soc_dapm_kcontrol_widget(kcontrol);
1450 struct skl_module_cfg *mconfig = w->priv;
1451 struct skl *skl = get_skl_ctx(w->dapm->dev);
1454 skl_get_module_params(skl->skl_sst, (u32 *)bc->params,
1455 bc->size, bc->param_id, mconfig);
1457 /* decrement size for TLV header */
1458 size -= 2 * sizeof(u32);
1460 /* check size as we don't want to send kernel data */
1465 if (copy_to_user(data, &bc->param_id, sizeof(u32)))
1467 if (copy_to_user(data + 1, &size, sizeof(u32)))
1469 if (copy_to_user(data + 2, bc->params, size))
1476 #define SKL_PARAM_VENDOR_ID 0xff
1478 static int skl_tplg_tlv_control_set(struct snd_kcontrol *kcontrol,
1479 const unsigned int __user *data, unsigned int size)
1481 struct snd_soc_dapm_widget *w = snd_soc_dapm_kcontrol_widget(kcontrol);
1482 struct skl_module_cfg *mconfig = w->priv;
1483 struct soc_bytes_ext *sb =
1484 (struct soc_bytes_ext *)kcontrol->private_value;
1485 struct skl_algo_data *ac = (struct skl_algo_data *)sb->dobj.private;
1486 struct skl *skl = get_skl_ctx(w->dapm->dev);
1494 * if the param_is is of type Vendor, firmware expects actual
1495 * parameter id and size from the control.
1497 if (ac->param_id == SKL_PARAM_VENDOR_ID) {
1498 if (copy_from_user(ac->params, data, size))
1501 if (copy_from_user(ac->params,
1507 return skl_set_module_params(skl->skl_sst,
1508 (u32 *)ac->params, ac->size,
1509 ac->param_id, mconfig);
1515 static int skl_tplg_mic_control_get(struct snd_kcontrol *kcontrol,
1516 struct snd_ctl_elem_value *ucontrol)
1518 struct snd_soc_dapm_widget *w = snd_soc_dapm_kcontrol_widget(kcontrol);
1519 struct skl_module_cfg *mconfig = w->priv;
1520 struct soc_enum *ec = (struct soc_enum *)kcontrol->private_value;
1521 u32 ch_type = *((u32 *)ec->dobj.private);
1523 if (mconfig->dmic_ch_type == ch_type)
1524 ucontrol->value.enumerated.item[0] =
1525 mconfig->dmic_ch_combo_index;
1527 ucontrol->value.enumerated.item[0] = 0;
1532 static int skl_fill_mic_sel_params(struct skl_module_cfg *mconfig,
1533 struct skl_mic_sel_config *mic_cfg, struct device *dev)
1535 struct skl_specific_cfg *sp_cfg = &mconfig->formats_config;
1537 sp_cfg->caps_size = sizeof(struct skl_mic_sel_config);
1538 sp_cfg->set_params = SKL_PARAM_SET;
1539 sp_cfg->param_id = 0x00;
1540 if (!sp_cfg->caps) {
1541 sp_cfg->caps = devm_kzalloc(dev, sp_cfg->caps_size, GFP_KERNEL);
1546 mic_cfg->mic_switch = SKL_MIC_SEL_SWITCH;
1548 memcpy(sp_cfg->caps, mic_cfg, sp_cfg->caps_size);
1553 static int skl_tplg_mic_control_set(struct snd_kcontrol *kcontrol,
1554 struct snd_ctl_elem_value *ucontrol)
1556 struct snd_soc_dapm_widget *w = snd_soc_dapm_kcontrol_widget(kcontrol);
1557 struct skl_module_cfg *mconfig = w->priv;
1558 struct skl_mic_sel_config mic_cfg = {0};
1559 struct soc_enum *ec = (struct soc_enum *)kcontrol->private_value;
1560 u32 ch_type = *((u32 *)ec->dobj.private);
1562 u8 in_ch, out_ch, index;
1564 mconfig->dmic_ch_type = ch_type;
1565 mconfig->dmic_ch_combo_index = ucontrol->value.enumerated.item[0];
1567 /* enum control index 0 is INVALID, so no channels to be set */
1568 if (mconfig->dmic_ch_combo_index == 0)
1571 /* No valid channel selection map for index 0, so offset by 1 */
1572 index = mconfig->dmic_ch_combo_index - 1;
1576 if (mconfig->dmic_ch_combo_index > ARRAY_SIZE(mic_mono_list))
1579 list = &mic_mono_list[index];
1583 if (mconfig->dmic_ch_combo_index > ARRAY_SIZE(mic_stereo_list))
1586 list = mic_stereo_list[index];
1590 if (mconfig->dmic_ch_combo_index > ARRAY_SIZE(mic_trio_list))
1593 list = mic_trio_list[index];
1597 if (mconfig->dmic_ch_combo_index > ARRAY_SIZE(mic_quatro_list))
1600 list = mic_quatro_list[index];
1604 dev_err(w->dapm->dev,
1605 "Invalid channel %d for mic_select module\n",
1611 /* channel type enum map to number of chanels for that type */
1612 for (out_ch = 0; out_ch < ch_type; out_ch++) {
1613 in_ch = list[out_ch];
1614 mic_cfg.blob[out_ch][in_ch] = SKL_DEFAULT_MIC_SEL_GAIN;
1617 return skl_fill_mic_sel_params(mconfig, &mic_cfg, w->dapm->dev);
1621 * Fill the dma id for host and link. In case of passthrough
1622 * pipeline, this will both host and link in the same
1623 * pipeline, so need to copy the link and host based on dev_type
1625 static void skl_tplg_fill_dma_id(struct skl_module_cfg *mcfg,
1626 struct skl_pipe_params *params)
1628 struct skl_pipe *pipe = mcfg->pipe;
1630 if (pipe->passthru) {
1631 switch (mcfg->dev_type) {
1632 case SKL_DEVICE_HDALINK:
1633 pipe->p_params->link_dma_id = params->link_dma_id;
1634 pipe->p_params->link_index = params->link_index;
1635 pipe->p_params->link_bps = params->link_bps;
1638 case SKL_DEVICE_HDAHOST:
1639 pipe->p_params->host_dma_id = params->host_dma_id;
1640 pipe->p_params->host_bps = params->host_bps;
1646 pipe->p_params->s_fmt = params->s_fmt;
1647 pipe->p_params->ch = params->ch;
1648 pipe->p_params->s_freq = params->s_freq;
1649 pipe->p_params->stream = params->stream;
1650 pipe->p_params->format = params->format;
1653 memcpy(pipe->p_params, params, sizeof(*params));
1658 * The FE params are passed by hw_params of the DAI.
1659 * On hw_params, the params are stored in Gateway module of the FE and we
1660 * need to calculate the format in DSP module configuration, that
1661 * conversion is done here
1663 int skl_tplg_update_pipe_params(struct device *dev,
1664 struct skl_module_cfg *mconfig,
1665 struct skl_pipe_params *params)
1667 struct skl_module_res *res = &mconfig->module->resources[0];
1668 struct skl *skl = get_skl_ctx(dev);
1669 struct skl_module_fmt *format = NULL;
1670 u8 cfg_idx = mconfig->pipe->cur_config_idx;
1672 skl_tplg_fill_dma_id(mconfig, params);
1673 mconfig->fmt_idx = mconfig->mod_cfg[cfg_idx].fmt_idx;
1674 mconfig->res_idx = mconfig->mod_cfg[cfg_idx].res_idx;
1676 if (skl->nr_modules)
1679 if (params->stream == SNDRV_PCM_STREAM_PLAYBACK)
1680 format = &mconfig->module->formats[0].inputs[0].fmt;
1682 format = &mconfig->module->formats[0].outputs[0].fmt;
1684 /* set the hw_params */
1685 format->s_freq = params->s_freq;
1686 format->channels = params->ch;
1687 format->valid_bit_depth = skl_get_bit_depth(params->s_fmt);
1690 * 16 bit is 16 bit container whereas 24 bit is in 32 bit
1691 * container so update bit depth accordingly
1693 switch (format->valid_bit_depth) {
1694 case SKL_DEPTH_16BIT:
1695 format->bit_depth = format->valid_bit_depth;
1698 case SKL_DEPTH_24BIT:
1699 case SKL_DEPTH_32BIT:
1700 format->bit_depth = SKL_DEPTH_32BIT;
1704 dev_err(dev, "Invalid bit depth %x for pipe\n",
1705 format->valid_bit_depth);
1709 if (params->stream == SNDRV_PCM_STREAM_PLAYBACK) {
1710 res->ibs = (format->s_freq / 1000) *
1711 (format->channels) *
1712 (format->bit_depth >> 3);
1714 res->obs = (format->s_freq / 1000) *
1715 (format->channels) *
1716 (format->bit_depth >> 3);
1723 * Query the module config for the FE DAI
1724 * This is used to find the hw_params set for that DAI and apply to FE
1727 struct skl_module_cfg *
1728 skl_tplg_fe_get_cpr_module(struct snd_soc_dai *dai, int stream)
1730 struct snd_soc_dapm_widget *w;
1731 struct snd_soc_dapm_path *p = NULL;
1733 if (stream == SNDRV_PCM_STREAM_PLAYBACK) {
1734 w = dai->playback_widget;
1735 snd_soc_dapm_widget_for_each_sink_path(w, p) {
1736 if (p->connect && p->sink->power &&
1737 !is_skl_dsp_widget_type(p->sink, dai->dev))
1740 if (p->sink->priv) {
1741 dev_dbg(dai->dev, "set params for %s\n",
1743 return p->sink->priv;
1747 w = dai->capture_widget;
1748 snd_soc_dapm_widget_for_each_source_path(w, p) {
1749 if (p->connect && p->source->power &&
1750 !is_skl_dsp_widget_type(p->source, dai->dev))
1753 if (p->source->priv) {
1754 dev_dbg(dai->dev, "set params for %s\n",
1756 return p->source->priv;
1764 static struct skl_module_cfg *skl_get_mconfig_pb_cpr(
1765 struct snd_soc_dai *dai, struct snd_soc_dapm_widget *w)
1767 struct snd_soc_dapm_path *p;
1768 struct skl_module_cfg *mconfig = NULL;
1770 snd_soc_dapm_widget_for_each_source_path(w, p) {
1771 if (w->endpoints[SND_SOC_DAPM_DIR_OUT] > 0) {
1773 (p->sink->id == snd_soc_dapm_aif_out) &&
1775 mconfig = p->source->priv;
1778 mconfig = skl_get_mconfig_pb_cpr(dai, p->source);
1786 static struct skl_module_cfg *skl_get_mconfig_cap_cpr(
1787 struct snd_soc_dai *dai, struct snd_soc_dapm_widget *w)
1789 struct snd_soc_dapm_path *p;
1790 struct skl_module_cfg *mconfig = NULL;
1792 snd_soc_dapm_widget_for_each_sink_path(w, p) {
1793 if (w->endpoints[SND_SOC_DAPM_DIR_IN] > 0) {
1795 (p->source->id == snd_soc_dapm_aif_in) &&
1797 mconfig = p->sink->priv;
1800 mconfig = skl_get_mconfig_cap_cpr(dai, p->sink);
1808 struct skl_module_cfg *
1809 skl_tplg_be_get_cpr_module(struct snd_soc_dai *dai, int stream)
1811 struct snd_soc_dapm_widget *w;
1812 struct skl_module_cfg *mconfig;
1814 if (stream == SNDRV_PCM_STREAM_PLAYBACK) {
1815 w = dai->playback_widget;
1816 mconfig = skl_get_mconfig_pb_cpr(dai, w);
1818 w = dai->capture_widget;
1819 mconfig = skl_get_mconfig_cap_cpr(dai, w);
1824 static u8 skl_tplg_be_link_type(int dev_type)
1830 ret = NHLT_LINK_SSP;
1833 case SKL_DEVICE_DMIC:
1834 ret = NHLT_LINK_DMIC;
1837 case SKL_DEVICE_I2S:
1838 ret = NHLT_LINK_SSP;
1841 case SKL_DEVICE_HDALINK:
1842 ret = NHLT_LINK_HDA;
1846 ret = NHLT_LINK_INVALID;
1854 * Fill the BE gateway parameters
1855 * The BE gateway expects a blob of parameters which are kept in the ACPI
1856 * NHLT blob, so query the blob for interface type (i2s/pdm) and instance.
1857 * The port can have multiple settings so pick based on the PCM
1860 static int skl_tplg_be_fill_pipe_params(struct snd_soc_dai *dai,
1861 struct skl_module_cfg *mconfig,
1862 struct skl_pipe_params *params)
1864 struct nhlt_specific_cfg *cfg;
1865 struct skl *skl = get_skl_ctx(dai->dev);
1866 int link_type = skl_tplg_be_link_type(mconfig->dev_type);
1867 u8 dev_type = skl_tplg_be_dev_type(mconfig->dev_type);
1869 skl_tplg_fill_dma_id(mconfig, params);
1871 if (link_type == NHLT_LINK_HDA)
1874 /* update the blob based on virtual bus_id*/
1875 cfg = skl_get_ep_blob(skl, mconfig->vbus_id, link_type,
1876 params->s_fmt, params->ch,
1877 params->s_freq, params->stream,
1880 mconfig->formats_config.caps_size = cfg->size;
1881 mconfig->formats_config.caps = (u32 *) &cfg->caps;
1883 dev_err(dai->dev, "Blob NULL for id %x type %d dirn %d\n",
1884 mconfig->vbus_id, link_type,
1886 dev_err(dai->dev, "PCM: ch %d, freq %d, fmt %d\n",
1887 params->ch, params->s_freq, params->s_fmt);
1894 static int skl_tplg_be_set_src_pipe_params(struct snd_soc_dai *dai,
1895 struct snd_soc_dapm_widget *w,
1896 struct skl_pipe_params *params)
1898 struct snd_soc_dapm_path *p;
1901 snd_soc_dapm_widget_for_each_source_path(w, p) {
1902 if (p->connect && is_skl_dsp_widget_type(p->source, dai->dev) &&
1905 ret = skl_tplg_be_fill_pipe_params(dai,
1906 p->source->priv, params);
1910 ret = skl_tplg_be_set_src_pipe_params(dai,
1920 static int skl_tplg_be_set_sink_pipe_params(struct snd_soc_dai *dai,
1921 struct snd_soc_dapm_widget *w, struct skl_pipe_params *params)
1923 struct snd_soc_dapm_path *p = NULL;
1926 snd_soc_dapm_widget_for_each_sink_path(w, p) {
1927 if (p->connect && is_skl_dsp_widget_type(p->sink, dai->dev) &&
1930 ret = skl_tplg_be_fill_pipe_params(dai,
1931 p->sink->priv, params);
1935 ret = skl_tplg_be_set_sink_pipe_params(
1936 dai, p->sink, params);
1946 * BE hw_params can be a source parameters (capture) or sink parameters
1947 * (playback). Based on sink and source we need to either find the source
1948 * list or the sink list and set the pipeline parameters
1950 int skl_tplg_be_update_params(struct snd_soc_dai *dai,
1951 struct skl_pipe_params *params)
1953 struct snd_soc_dapm_widget *w;
1955 if (params->stream == SNDRV_PCM_STREAM_PLAYBACK) {
1956 w = dai->playback_widget;
1958 return skl_tplg_be_set_src_pipe_params(dai, w, params);
1961 w = dai->capture_widget;
1963 return skl_tplg_be_set_sink_pipe_params(dai, w, params);
1969 static const struct snd_soc_tplg_widget_events skl_tplg_widget_ops[] = {
1970 {SKL_MIXER_EVENT, skl_tplg_mixer_event},
1971 {SKL_VMIXER_EVENT, skl_tplg_mixer_event},
1972 {SKL_PGA_EVENT, skl_tplg_pga_event},
1975 static const struct snd_soc_tplg_bytes_ext_ops skl_tlv_ops[] = {
1976 {SKL_CONTROL_TYPE_BYTE_TLV, skl_tplg_tlv_control_get,
1977 skl_tplg_tlv_control_set},
1980 static const struct snd_soc_tplg_kcontrol_ops skl_tplg_kcontrol_ops[] = {
1982 .id = SKL_CONTROL_TYPE_MIC_SELECT,
1983 .get = skl_tplg_mic_control_get,
1984 .put = skl_tplg_mic_control_set,
1988 static int skl_tplg_fill_pipe_cfg(struct device *dev,
1989 struct skl_pipe *pipe, u32 tkn,
1990 u32 tkn_val, int conf_idx, int dir)
1992 struct skl_pipe_fmt *fmt;
1993 struct skl_path_config *config;
1997 fmt = &pipe->configs[conf_idx].in_fmt;
2001 fmt = &pipe->configs[conf_idx].out_fmt;
2005 dev_err(dev, "Invalid direction: %d\n", dir);
2009 config = &pipe->configs[conf_idx];
2012 case SKL_TKN_U32_CFG_FREQ:
2013 fmt->freq = tkn_val;
2016 case SKL_TKN_U8_CFG_CHAN:
2017 fmt->channels = tkn_val;
2020 case SKL_TKN_U8_CFG_BPS:
2024 case SKL_TKN_U32_PATH_MEM_PGS:
2025 config->mem_pages = tkn_val;
2029 dev_err(dev, "Invalid token config: %d\n", tkn);
2036 static int skl_tplg_fill_pipe_tkn(struct device *dev,
2037 struct skl_pipe *pipe, u32 tkn,
2042 case SKL_TKN_U32_PIPE_CONN_TYPE:
2043 pipe->conn_type = tkn_val;
2046 case SKL_TKN_U32_PIPE_PRIORITY:
2047 pipe->pipe_priority = tkn_val;
2050 case SKL_TKN_U32_PIPE_MEM_PGS:
2051 pipe->memory_pages = tkn_val;
2054 case SKL_TKN_U32_PMODE:
2055 pipe->lp_mode = tkn_val;
2058 case SKL_TKN_U32_PIPE_DIRECTION:
2059 pipe->direction = tkn_val;
2062 case SKL_TKN_U32_NUM_CONFIGS:
2063 pipe->nr_cfgs = tkn_val;
2067 dev_err(dev, "Token not handled %d\n", tkn);
2075 * Add pipeline by parsing the relevant tokens
2076 * Return an existing pipe if the pipe already exists.
2078 static int skl_tplg_add_pipe(struct device *dev,
2079 struct skl_module_cfg *mconfig, struct skl *skl,
2080 struct snd_soc_tplg_vendor_value_elem *tkn_elem)
2082 struct skl_pipeline *ppl;
2083 struct skl_pipe *pipe;
2084 struct skl_pipe_params *params;
2086 list_for_each_entry(ppl, &skl->ppl_list, node) {
2087 if (ppl->pipe->ppl_id == tkn_elem->value) {
2088 mconfig->pipe = ppl->pipe;
2093 ppl = devm_kzalloc(dev, sizeof(*ppl), GFP_KERNEL);
2097 pipe = devm_kzalloc(dev, sizeof(*pipe), GFP_KERNEL);
2101 params = devm_kzalloc(dev, sizeof(*params), GFP_KERNEL);
2105 pipe->p_params = params;
2106 pipe->ppl_id = tkn_elem->value;
2107 INIT_LIST_HEAD(&pipe->w_list);
2110 list_add(&ppl->node, &skl->ppl_list);
2112 mconfig->pipe = pipe;
2113 mconfig->pipe->state = SKL_PIPE_INVALID;
2118 static int skl_tplg_get_uuid(struct device *dev, u8 *guid,
2119 struct snd_soc_tplg_vendor_uuid_elem *uuid_tkn)
2121 if (uuid_tkn->token == SKL_TKN_UUID) {
2122 memcpy(guid, &uuid_tkn->uuid, 16);
2126 dev_err(dev, "Not an UUID token %d\n", uuid_tkn->token);
2131 static int skl_tplg_fill_pin(struct device *dev,
2132 struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2133 struct skl_module_pin *m_pin,
2138 switch (tkn_elem->token) {
2139 case SKL_TKN_U32_PIN_MOD_ID:
2140 m_pin[pin_index].id.module_id = tkn_elem->value;
2143 case SKL_TKN_U32_PIN_INST_ID:
2144 m_pin[pin_index].id.instance_id = tkn_elem->value;
2148 ret = skl_tplg_get_uuid(dev, m_pin[pin_index].id.mod_uuid.b,
2149 (struct snd_soc_tplg_vendor_uuid_elem *)tkn_elem);
2156 dev_err(dev, "%d Not a pin token\n", tkn_elem->token);
2164 * Parse for pin config specific tokens to fill up the
2165 * module private data
2167 static int skl_tplg_fill_pins_info(struct device *dev,
2168 struct skl_module_cfg *mconfig,
2169 struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2170 int dir, int pin_count)
2173 struct skl_module_pin *m_pin;
2177 m_pin = mconfig->m_in_pin;
2181 m_pin = mconfig->m_out_pin;
2185 dev_err(dev, "Invalid direction value\n");
2189 ret = skl_tplg_fill_pin(dev, tkn_elem, m_pin, pin_count);
2193 m_pin[pin_count].in_use = false;
2194 m_pin[pin_count].pin_state = SKL_PIN_UNBIND;
2200 * Fill up input/output module config format based
2203 static int skl_tplg_fill_fmt(struct device *dev,
2204 struct skl_module_fmt *dst_fmt,
2208 case SKL_TKN_U32_FMT_CH:
2209 dst_fmt->channels = value;
2212 case SKL_TKN_U32_FMT_FREQ:
2213 dst_fmt->s_freq = value;
2216 case SKL_TKN_U32_FMT_BIT_DEPTH:
2217 dst_fmt->bit_depth = value;
2220 case SKL_TKN_U32_FMT_SAMPLE_SIZE:
2221 dst_fmt->valid_bit_depth = value;
2224 case SKL_TKN_U32_FMT_CH_CONFIG:
2225 dst_fmt->ch_cfg = value;
2228 case SKL_TKN_U32_FMT_INTERLEAVE:
2229 dst_fmt->interleaving_style = value;
2232 case SKL_TKN_U32_FMT_SAMPLE_TYPE:
2233 dst_fmt->sample_type = value;
2236 case SKL_TKN_U32_FMT_CH_MAP:
2237 dst_fmt->ch_map = value;
2241 dev_err(dev, "Invalid token %d\n", tkn);
2248 static int skl_tplg_widget_fill_fmt(struct device *dev,
2249 struct skl_module_iface *fmt,
2250 u32 tkn, u32 val, u32 dir, int fmt_idx)
2252 struct skl_module_fmt *dst_fmt;
2259 dst_fmt = &fmt->inputs[fmt_idx].fmt;
2263 dst_fmt = &fmt->outputs[fmt_idx].fmt;
2267 dev_err(dev, "Invalid direction: %d\n", dir);
2271 return skl_tplg_fill_fmt(dev, dst_fmt, tkn, val);
2274 static void skl_tplg_fill_pin_dynamic_val(
2275 struct skl_module_pin *mpin, u32 pin_count, u32 value)
2279 for (i = 0; i < pin_count; i++)
2280 mpin[i].is_dynamic = value;
2284 * Resource table in the manifest has pin specific resources
2285 * like pin and pin buffer size
2287 static int skl_tplg_manifest_pin_res_tkn(struct device *dev,
2288 struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2289 struct skl_module_res *res, int pin_idx, int dir)
2291 struct skl_module_pin_resources *m_pin;
2295 m_pin = &res->input[pin_idx];
2299 m_pin = &res->output[pin_idx];
2303 dev_err(dev, "Invalid pin direction: %d\n", dir);
2307 switch (tkn_elem->token) {
2308 case SKL_TKN_MM_U32_RES_PIN_ID:
2309 m_pin->pin_index = tkn_elem->value;
2312 case SKL_TKN_MM_U32_PIN_BUF:
2313 m_pin->buf_size = tkn_elem->value;
2317 dev_err(dev, "Invalid token: %d\n", tkn_elem->token);
2325 * Fill module specific resources from the manifest's resource
2326 * table like CPS, DMA size, mem_pages.
2328 static int skl_tplg_fill_res_tkn(struct device *dev,
2329 struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2330 struct skl_module_res *res,
2331 int pin_idx, int dir)
2333 int ret, tkn_count = 0;
2338 switch (tkn_elem->token) {
2339 case SKL_TKN_MM_U32_CPS:
2340 res->cps = tkn_elem->value;
2343 case SKL_TKN_MM_U32_DMA_SIZE:
2344 res->dma_buffer_size = tkn_elem->value;
2347 case SKL_TKN_MM_U32_CPC:
2348 res->cpc = tkn_elem->value;
2351 case SKL_TKN_U32_MEM_PAGES:
2352 res->is_pages = tkn_elem->value;
2355 case SKL_TKN_U32_OBS:
2356 res->obs = tkn_elem->value;
2359 case SKL_TKN_U32_IBS:
2360 res->ibs = tkn_elem->value;
2363 case SKL_TKN_U32_MAX_MCPS:
2364 res->cps = tkn_elem->value;
2367 case SKL_TKN_MM_U32_RES_PIN_ID:
2368 case SKL_TKN_MM_U32_PIN_BUF:
2369 ret = skl_tplg_manifest_pin_res_tkn(dev, tkn_elem, res,
2376 dev_err(dev, "Not a res type token: %d", tkn_elem->token);
2386 * Parse tokens to fill up the module private data
2388 static int skl_tplg_get_token(struct device *dev,
2389 struct snd_soc_tplg_vendor_value_elem *tkn_elem,
2390 struct skl *skl, struct skl_module_cfg *mconfig)
2394 static int is_pipe_exists;
2395 static int pin_index, dir, conf_idx;
2396 struct skl_module_iface *iface = NULL;
2397 struct skl_module_res *res = NULL;
2398 int res_idx = mconfig->res_idx;
2399 int fmt_idx = mconfig->fmt_idx;
2402 * If the manifest structure contains no modules, fill all
2403 * the module data to 0th index.
2404 * res_idx and fmt_idx are default set to 0.
2406 if (skl->nr_modules == 0) {
2407 res = &mconfig->module->resources[res_idx];
2408 iface = &mconfig->module->formats[fmt_idx];
2411 if (tkn_elem->token > SKL_TKN_MAX)
2414 switch (tkn_elem->token) {
2415 case SKL_TKN_U8_IN_QUEUE_COUNT:
2416 mconfig->module->max_input_pins = tkn_elem->value;
2419 case SKL_TKN_U8_OUT_QUEUE_COUNT:
2420 mconfig->module->max_output_pins = tkn_elem->value;
2423 case SKL_TKN_U8_DYN_IN_PIN:
2424 if (!mconfig->m_in_pin)
2426 devm_kcalloc(dev, MAX_IN_QUEUE,
2427 sizeof(*mconfig->m_in_pin),
2429 if (!mconfig->m_in_pin)
2432 skl_tplg_fill_pin_dynamic_val(mconfig->m_in_pin, MAX_IN_QUEUE,
2436 case SKL_TKN_U8_DYN_OUT_PIN:
2437 if (!mconfig->m_out_pin)
2438 mconfig->m_out_pin =
2439 devm_kcalloc(dev, MAX_IN_QUEUE,
2440 sizeof(*mconfig->m_in_pin),
2442 if (!mconfig->m_out_pin)
2445 skl_tplg_fill_pin_dynamic_val(mconfig->m_out_pin, MAX_OUT_QUEUE,
2449 case SKL_TKN_U8_TIME_SLOT:
2450 mconfig->time_slot = tkn_elem->value;
2453 case SKL_TKN_U8_CORE_ID:
2454 mconfig->core_id = tkn_elem->value;
2457 case SKL_TKN_U8_MOD_TYPE:
2458 mconfig->m_type = tkn_elem->value;
2461 case SKL_TKN_U8_DEV_TYPE:
2462 mconfig->dev_type = tkn_elem->value;
2465 case SKL_TKN_U8_HW_CONN_TYPE:
2466 mconfig->hw_conn_type = tkn_elem->value;
2469 case SKL_TKN_U16_MOD_INST_ID:
2470 mconfig->id.instance_id =
2474 case SKL_TKN_U32_MEM_PAGES:
2475 case SKL_TKN_U32_MAX_MCPS:
2476 case SKL_TKN_U32_OBS:
2477 case SKL_TKN_U32_IBS:
2478 ret = skl_tplg_fill_res_tkn(dev, tkn_elem, res, pin_index, dir);
2484 case SKL_TKN_U32_VBUS_ID:
2485 mconfig->vbus_id = tkn_elem->value;
2488 case SKL_TKN_U32_PARAMS_FIXUP:
2489 mconfig->params_fixup = tkn_elem->value;
2492 case SKL_TKN_U32_CONVERTER:
2493 mconfig->converter = tkn_elem->value;
2496 case SKL_TKN_U32_D0I3_CAPS:
2497 mconfig->d0i3_caps = tkn_elem->value;
2500 case SKL_TKN_U32_PIPE_ID:
2501 ret = skl_tplg_add_pipe(dev,
2502 mconfig, skl, tkn_elem);
2505 if (ret == -EEXIST) {
2509 return is_pipe_exists;
2514 case SKL_TKN_U32_PIPE_CONFIG_ID:
2515 conf_idx = tkn_elem->value;
2518 case SKL_TKN_U32_PIPE_CONN_TYPE:
2519 case SKL_TKN_U32_PIPE_PRIORITY:
2520 case SKL_TKN_U32_PIPE_MEM_PGS:
2521 case SKL_TKN_U32_PMODE:
2522 case SKL_TKN_U32_PIPE_DIRECTION:
2523 case SKL_TKN_U32_NUM_CONFIGS:
2524 if (is_pipe_exists) {
2525 ret = skl_tplg_fill_pipe_tkn(dev, mconfig->pipe,
2526 tkn_elem->token, tkn_elem->value);
2533 case SKL_TKN_U32_PATH_MEM_PGS:
2534 case SKL_TKN_U32_CFG_FREQ:
2535 case SKL_TKN_U8_CFG_CHAN:
2536 case SKL_TKN_U8_CFG_BPS:
2537 if (mconfig->pipe->nr_cfgs) {
2538 ret = skl_tplg_fill_pipe_cfg(dev, mconfig->pipe,
2539 tkn_elem->token, tkn_elem->value,
2546 case SKL_TKN_CFG_MOD_RES_ID:
2547 mconfig->mod_cfg[conf_idx].res_idx = tkn_elem->value;
2550 case SKL_TKN_CFG_MOD_FMT_ID:
2551 mconfig->mod_cfg[conf_idx].fmt_idx = tkn_elem->value;
2555 * SKL_TKN_U32_DIR_PIN_COUNT token has the value for both
2556 * direction and the pin count. The first four bits represent
2557 * direction and next four the pin count.
2559 case SKL_TKN_U32_DIR_PIN_COUNT:
2560 dir = tkn_elem->value & SKL_IN_DIR_BIT_MASK;
2561 pin_index = (tkn_elem->value &
2562 SKL_PIN_COUNT_MASK) >> 4;
2566 case SKL_TKN_U32_FMT_CH:
2567 case SKL_TKN_U32_FMT_FREQ:
2568 case SKL_TKN_U32_FMT_BIT_DEPTH:
2569 case SKL_TKN_U32_FMT_SAMPLE_SIZE:
2570 case SKL_TKN_U32_FMT_CH_CONFIG:
2571 case SKL_TKN_U32_FMT_INTERLEAVE:
2572 case SKL_TKN_U32_FMT_SAMPLE_TYPE:
2573 case SKL_TKN_U32_FMT_CH_MAP:
2574 ret = skl_tplg_widget_fill_fmt(dev, iface, tkn_elem->token,
2575 tkn_elem->value, dir, pin_index);
2582 case SKL_TKN_U32_PIN_MOD_ID:
2583 case SKL_TKN_U32_PIN_INST_ID:
2585 ret = skl_tplg_fill_pins_info(dev,
2586 mconfig, tkn_elem, dir,
2593 case SKL_TKN_U32_CAPS_SIZE:
2594 mconfig->formats_config.caps_size =
2599 case SKL_TKN_U32_CAPS_SET_PARAMS:
2600 mconfig->formats_config.set_params =
2604 case SKL_TKN_U32_CAPS_PARAMS_ID:
2605 mconfig->formats_config.param_id =
2609 case SKL_TKN_U32_PROC_DOMAIN:
2615 case SKL_TKN_U32_DMA_BUF_SIZE:
2616 mconfig->dma_buffer_size = tkn_elem->value;
2619 case SKL_TKN_U8_IN_PIN_TYPE:
2620 case SKL_TKN_U8_OUT_PIN_TYPE:
2621 case SKL_TKN_U8_CONN_TYPE:
2625 dev_err(dev, "Token %d not handled\n",
2636 * Parse the vendor array for specific tokens to construct
2637 * module private data
2639 static int skl_tplg_get_tokens(struct device *dev,
2640 char *pvt_data, struct skl *skl,
2641 struct skl_module_cfg *mconfig, int block_size)
2643 struct snd_soc_tplg_vendor_array *array;
2644 struct snd_soc_tplg_vendor_value_elem *tkn_elem;
2645 int tkn_count = 0, ret;
2646 int off = 0, tuple_size = 0;
2647 bool is_module_guid = true;
2649 if (block_size <= 0)
2652 while (tuple_size < block_size) {
2653 array = (struct snd_soc_tplg_vendor_array *)(pvt_data + off);
2657 switch (array->type) {
2658 case SND_SOC_TPLG_TUPLE_TYPE_STRING:
2659 dev_warn(dev, "no string tokens expected for skl tplg\n");
2662 case SND_SOC_TPLG_TUPLE_TYPE_UUID:
2663 if (is_module_guid) {
2664 ret = skl_tplg_get_uuid(dev, mconfig->guid,
2666 is_module_guid = false;
2668 ret = skl_tplg_get_token(dev, array->value, skl,
2675 tuple_size += sizeof(*array->uuid);
2680 tkn_elem = array->value;
2685 while (tkn_count <= (array->num_elems - 1)) {
2686 ret = skl_tplg_get_token(dev, tkn_elem,
2692 tkn_count = tkn_count + ret;
2696 tuple_size += tkn_count * sizeof(*tkn_elem);
2703 * Every data block is preceded by a descriptor to read the number
2704 * of data blocks, they type of the block and it's size
2706 static int skl_tplg_get_desc_blocks(struct device *dev,
2707 struct snd_soc_tplg_vendor_array *array)
2709 struct snd_soc_tplg_vendor_value_elem *tkn_elem;
2711 tkn_elem = array->value;
2713 switch (tkn_elem->token) {
2714 case SKL_TKN_U8_NUM_BLOCKS:
2715 case SKL_TKN_U8_BLOCK_TYPE:
2716 case SKL_TKN_U16_BLOCK_SIZE:
2717 return tkn_elem->value;
2720 dev_err(dev, "Invalid descriptor token %d\n", tkn_elem->token);
2727 /* Functions to parse private data from configuration file format v4 */
2730 * Add pipeline from topology binary into driver pipeline list
2732 * If already added we return that instance
2733 * Otherwise we create a new instance and add into driver list
2735 static int skl_tplg_add_pipe_v4(struct device *dev,
2736 struct skl_module_cfg *mconfig, struct skl *skl,
2737 struct skl_dfw_v4_pipe *dfw_pipe)
2739 struct skl_pipeline *ppl;
2740 struct skl_pipe *pipe;
2741 struct skl_pipe_params *params;
2743 list_for_each_entry(ppl, &skl->ppl_list, node) {
2744 if (ppl->pipe->ppl_id == dfw_pipe->pipe_id) {
2745 mconfig->pipe = ppl->pipe;
2750 ppl = devm_kzalloc(dev, sizeof(*ppl), GFP_KERNEL);
2754 pipe = devm_kzalloc(dev, sizeof(*pipe), GFP_KERNEL);
2758 params = devm_kzalloc(dev, sizeof(*params), GFP_KERNEL);
2762 pipe->ppl_id = dfw_pipe->pipe_id;
2763 pipe->memory_pages = dfw_pipe->memory_pages;
2764 pipe->pipe_priority = dfw_pipe->pipe_priority;
2765 pipe->conn_type = dfw_pipe->conn_type;
2766 pipe->state = SKL_PIPE_INVALID;
2767 pipe->p_params = params;
2768 INIT_LIST_HEAD(&pipe->w_list);
2771 list_add(&ppl->node, &skl->ppl_list);
2773 mconfig->pipe = pipe;
2778 static void skl_fill_module_pin_info_v4(struct skl_dfw_v4_module_pin *dfw_pin,
2779 struct skl_module_pin *m_pin,
2780 bool is_dynamic, int max_pin)
2784 for (i = 0; i < max_pin; i++) {
2785 m_pin[i].id.module_id = dfw_pin[i].module_id;
2786 m_pin[i].id.instance_id = dfw_pin[i].instance_id;
2787 m_pin[i].in_use = false;
2788 m_pin[i].is_dynamic = is_dynamic;
2789 m_pin[i].pin_state = SKL_PIN_UNBIND;
2793 static void skl_tplg_fill_fmt_v4(struct skl_module_pin_fmt *dst_fmt,
2794 struct skl_dfw_v4_module_fmt *src_fmt,
2799 for (i = 0; i < pins; i++) {
2800 dst_fmt[i].fmt.channels = src_fmt[i].channels;
2801 dst_fmt[i].fmt.s_freq = src_fmt[i].freq;
2802 dst_fmt[i].fmt.bit_depth = src_fmt[i].bit_depth;
2803 dst_fmt[i].fmt.valid_bit_depth = src_fmt[i].valid_bit_depth;
2804 dst_fmt[i].fmt.ch_cfg = src_fmt[i].ch_cfg;
2805 dst_fmt[i].fmt.ch_map = src_fmt[i].ch_map;
2806 dst_fmt[i].fmt.interleaving_style =
2807 src_fmt[i].interleaving_style;
2808 dst_fmt[i].fmt.sample_type = src_fmt[i].sample_type;
2812 static int skl_tplg_get_pvt_data_v4(struct snd_soc_tplg_dapm_widget *tplg_w,
2813 struct skl *skl, struct device *dev,
2814 struct skl_module_cfg *mconfig)
2816 struct skl_dfw_v4_module *dfw =
2817 (struct skl_dfw_v4_module *)tplg_w->priv.data;
2820 dev_dbg(dev, "Parsing Skylake v4 widget topology data\n");
2822 ret = guid_parse(dfw->uuid, (guid_t *)mconfig->guid);
2825 mconfig->id.module_id = -1;
2826 mconfig->id.instance_id = dfw->instance_id;
2827 mconfig->module->resources[0].cps = dfw->max_mcps;
2828 mconfig->module->resources[0].ibs = dfw->ibs;
2829 mconfig->module->resources[0].obs = dfw->obs;
2830 mconfig->core_id = dfw->core_id;
2831 mconfig->module->max_input_pins = dfw->max_in_queue;
2832 mconfig->module->max_output_pins = dfw->max_out_queue;
2833 mconfig->module->loadable = dfw->is_loadable;
2834 skl_tplg_fill_fmt_v4(mconfig->module->formats[0].inputs, dfw->in_fmt,
2836 skl_tplg_fill_fmt_v4(mconfig->module->formats[0].outputs, dfw->out_fmt,
2839 mconfig->params_fixup = dfw->params_fixup;
2840 mconfig->converter = dfw->converter;
2841 mconfig->m_type = dfw->module_type;
2842 mconfig->vbus_id = dfw->vbus_id;
2843 mconfig->module->resources[0].is_pages = dfw->mem_pages;
2845 ret = skl_tplg_add_pipe_v4(dev, mconfig, skl, &dfw->pipe);
2849 mconfig->dev_type = dfw->dev_type;
2850 mconfig->hw_conn_type = dfw->hw_conn_type;
2851 mconfig->time_slot = dfw->time_slot;
2852 mconfig->formats_config.caps_size = dfw->caps.caps_size;
2854 mconfig->m_in_pin = devm_kcalloc(dev,
2855 MAX_IN_QUEUE, sizeof(*mconfig->m_in_pin),
2857 if (!mconfig->m_in_pin)
2860 mconfig->m_out_pin = devm_kcalloc(dev,
2861 MAX_OUT_QUEUE, sizeof(*mconfig->m_out_pin),
2863 if (!mconfig->m_out_pin)
2866 skl_fill_module_pin_info_v4(dfw->in_pin, mconfig->m_in_pin,
2867 dfw->is_dynamic_in_pin,
2868 mconfig->module->max_input_pins);
2869 skl_fill_module_pin_info_v4(dfw->out_pin, mconfig->m_out_pin,
2870 dfw->is_dynamic_out_pin,
2871 mconfig->module->max_output_pins);
2873 if (mconfig->formats_config.caps_size) {
2874 mconfig->formats_config.set_params = dfw->caps.set_params;
2875 mconfig->formats_config.param_id = dfw->caps.param_id;
2876 mconfig->formats_config.caps =
2877 devm_kzalloc(dev, mconfig->formats_config.caps_size,
2879 if (!mconfig->formats_config.caps)
2881 memcpy(mconfig->formats_config.caps, dfw->caps.caps,
2882 dfw->caps.caps_size);
2889 * Parse the private data for the token and corresponding value.
2890 * The private data can have multiple data blocks. So, a data block
2891 * is preceded by a descriptor for number of blocks and a descriptor
2892 * for the type and size of the suceeding data block.
2894 static int skl_tplg_get_pvt_data(struct snd_soc_tplg_dapm_widget *tplg_w,
2895 struct skl *skl, struct device *dev,
2896 struct skl_module_cfg *mconfig)
2898 struct snd_soc_tplg_vendor_array *array;
2899 int num_blocks, block_size = 0, block_type, off = 0;
2904 * v4 configuration files have a valid UUID at the start of
2905 * the widget's private data.
2907 if (uuid_is_valid((char *)tplg_w->priv.data))
2908 return skl_tplg_get_pvt_data_v4(tplg_w, skl, dev, mconfig);
2910 /* Read the NUM_DATA_BLOCKS descriptor */
2911 array = (struct snd_soc_tplg_vendor_array *)tplg_w->priv.data;
2912 ret = skl_tplg_get_desc_blocks(dev, array);
2918 /* Read the BLOCK_TYPE and BLOCK_SIZE descriptor */
2919 while (num_blocks > 0) {
2920 array = (struct snd_soc_tplg_vendor_array *)
2921 (tplg_w->priv.data + off);
2923 ret = skl_tplg_get_desc_blocks(dev, array);
2930 array = (struct snd_soc_tplg_vendor_array *)
2931 (tplg_w->priv.data + off);
2933 ret = skl_tplg_get_desc_blocks(dev, array);
2940 array = (struct snd_soc_tplg_vendor_array *)
2941 (tplg_w->priv.data + off);
2943 data = (tplg_w->priv.data + off);
2945 if (block_type == SKL_TYPE_TUPLE) {
2946 ret = skl_tplg_get_tokens(dev, data,
2947 skl, mconfig, block_size);
2954 if (mconfig->formats_config.caps_size > 0)
2955 memcpy(mconfig->formats_config.caps, data,
2956 mconfig->formats_config.caps_size);
2958 ret = mconfig->formats_config.caps_size;
2966 static void skl_clear_pin_config(struct snd_soc_component *component,
2967 struct snd_soc_dapm_widget *w)
2970 struct skl_module_cfg *mconfig;
2971 struct skl_pipe *pipe;
2973 if (!strncmp(w->dapm->component->name, component->name,
2974 strlen(component->name))) {
2976 pipe = mconfig->pipe;
2977 for (i = 0; i < mconfig->module->max_input_pins; i++) {
2978 mconfig->m_in_pin[i].in_use = false;
2979 mconfig->m_in_pin[i].pin_state = SKL_PIN_UNBIND;
2981 for (i = 0; i < mconfig->module->max_output_pins; i++) {
2982 mconfig->m_out_pin[i].in_use = false;
2983 mconfig->m_out_pin[i].pin_state = SKL_PIN_UNBIND;
2985 pipe->state = SKL_PIPE_INVALID;
2986 mconfig->m_state = SKL_MODULE_UNINIT;
2990 void skl_cleanup_resources(struct skl *skl)
2992 struct skl_sst *ctx = skl->skl_sst;
2993 struct snd_soc_component *soc_component = skl->component;
2994 struct snd_soc_dapm_widget *w;
2995 struct snd_soc_card *card;
2997 if (soc_component == NULL)
3000 card = soc_component->card;
3001 if (!card || !card->instantiated)
3004 skl->resource.mem = 0;
3005 skl->resource.mcps = 0;
3007 list_for_each_entry(w, &card->widgets, list) {
3008 if (is_skl_dsp_widget_type(w, ctx->dev) && w->priv != NULL)
3009 skl_clear_pin_config(soc_component, w);
3012 skl_clear_module_cnt(ctx->dsp);
3016 * Topology core widget load callback
3018 * This is used to save the private data for each widget which gives
3019 * information to the driver about module and pipeline parameters which DSP
3020 * FW expects like ids, resource values, formats etc
3022 static int skl_tplg_widget_load(struct snd_soc_component *cmpnt, int index,
3023 struct snd_soc_dapm_widget *w,
3024 struct snd_soc_tplg_dapm_widget *tplg_w)
3027 struct hdac_bus *bus = snd_soc_component_get_drvdata(cmpnt);
3028 struct skl *skl = bus_to_skl(bus);
3029 struct skl_module_cfg *mconfig;
3031 if (!tplg_w->priv.size)
3034 mconfig = devm_kzalloc(bus->dev, sizeof(*mconfig), GFP_KERNEL);
3039 if (skl->nr_modules == 0) {
3040 mconfig->module = devm_kzalloc(bus->dev,
3041 sizeof(*mconfig->module), GFP_KERNEL);
3042 if (!mconfig->module)
3049 * module binary can be loaded later, so set it to query when
3050 * module is load for a use case
3052 mconfig->id.module_id = -1;
3054 /* Parse private data for tuples */
3055 ret = skl_tplg_get_pvt_data(tplg_w, skl, bus->dev, mconfig);
3059 skl_debug_init_module(skl->debugfs, w, mconfig);
3062 if (tplg_w->event_type == 0) {
3063 dev_dbg(bus->dev, "ASoC: No event handler required\n");
3067 ret = snd_soc_tplg_widget_bind_event(w, skl_tplg_widget_ops,
3068 ARRAY_SIZE(skl_tplg_widget_ops),
3069 tplg_w->event_type);
3072 dev_err(bus->dev, "%s: No matching event handlers found for %d\n",
3073 __func__, tplg_w->event_type);
3080 static int skl_init_algo_data(struct device *dev, struct soc_bytes_ext *be,
3081 struct snd_soc_tplg_bytes_control *bc)
3083 struct skl_algo_data *ac;
3084 struct skl_dfw_algo_data *dfw_ac =
3085 (struct skl_dfw_algo_data *)bc->priv.data;
3087 ac = devm_kzalloc(dev, sizeof(*ac), GFP_KERNEL);
3091 /* Fill private data */
3092 ac->max = dfw_ac->max;
3093 ac->param_id = dfw_ac->param_id;
3094 ac->set_params = dfw_ac->set_params;
3095 ac->size = dfw_ac->max;
3098 ac->params = devm_kzalloc(dev, ac->max, GFP_KERNEL);
3102 memcpy(ac->params, dfw_ac->params, ac->max);
3105 be->dobj.private = ac;
3109 static int skl_init_enum_data(struct device *dev, struct soc_enum *se,
3110 struct snd_soc_tplg_enum_control *ec)
3115 if (ec->priv.size) {
3116 data = devm_kzalloc(dev, sizeof(ec->priv.size), GFP_KERNEL);
3119 memcpy(data, ec->priv.data, ec->priv.size);
3120 se->dobj.private = data;
3127 static int skl_tplg_control_load(struct snd_soc_component *cmpnt,
3129 struct snd_kcontrol_new *kctl,
3130 struct snd_soc_tplg_ctl_hdr *hdr)
3132 struct soc_bytes_ext *sb;
3133 struct snd_soc_tplg_bytes_control *tplg_bc;
3134 struct snd_soc_tplg_enum_control *tplg_ec;
3135 struct hdac_bus *bus = snd_soc_component_get_drvdata(cmpnt);
3136 struct soc_enum *se;
3138 switch (hdr->ops.info) {
3139 case SND_SOC_TPLG_CTL_BYTES:
3140 tplg_bc = container_of(hdr,
3141 struct snd_soc_tplg_bytes_control, hdr);
3142 if (kctl->access & SNDRV_CTL_ELEM_ACCESS_TLV_CALLBACK) {
3143 sb = (struct soc_bytes_ext *)kctl->private_value;
3144 if (tplg_bc->priv.size)
3145 return skl_init_algo_data(
3146 bus->dev, sb, tplg_bc);
3150 case SND_SOC_TPLG_CTL_ENUM:
3151 tplg_ec = container_of(hdr,
3152 struct snd_soc_tplg_enum_control, hdr);
3153 if (kctl->access & SNDRV_CTL_ELEM_ACCESS_READWRITE) {
3154 se = (struct soc_enum *)kctl->private_value;
3155 if (tplg_ec->priv.size)
3156 return skl_init_enum_data(bus->dev, se,
3162 dev_dbg(bus->dev, "Control load not supported %d:%d:%d\n",
3163 hdr->ops.get, hdr->ops.put, hdr->ops.info);
3170 static int skl_tplg_fill_str_mfest_tkn(struct device *dev,
3171 struct snd_soc_tplg_vendor_string_elem *str_elem,
3175 static int ref_count;
3177 switch (str_elem->token) {
3178 case SKL_TKN_STR_LIB_NAME:
3179 if (ref_count > skl->skl_sst->lib_count - 1) {
3184 strncpy(skl->skl_sst->lib_info[ref_count].name,
3186 ARRAY_SIZE(skl->skl_sst->lib_info[ref_count].name));
3191 dev_err(dev, "Not a string token %d\n", str_elem->token);
3199 static int skl_tplg_get_str_tkn(struct device *dev,
3200 struct snd_soc_tplg_vendor_array *array,
3203 int tkn_count = 0, ret;
3204 struct snd_soc_tplg_vendor_string_elem *str_elem;
3206 str_elem = (struct snd_soc_tplg_vendor_string_elem *)array->value;
3207 while (tkn_count < array->num_elems) {
3208 ret = skl_tplg_fill_str_mfest_tkn(dev, str_elem, skl);
3214 tkn_count = tkn_count + ret;
3220 static int skl_tplg_manifest_fill_fmt(struct device *dev,
3221 struct skl_module_iface *fmt,
3222 struct snd_soc_tplg_vendor_value_elem *tkn_elem,
3223 u32 dir, int fmt_idx)
3225 struct skl_module_pin_fmt *dst_fmt;
3226 struct skl_module_fmt *mod_fmt;
3234 dst_fmt = &fmt->inputs[fmt_idx];
3238 dst_fmt = &fmt->outputs[fmt_idx];
3242 dev_err(dev, "Invalid direction: %d\n", dir);
3246 mod_fmt = &dst_fmt->fmt;
3248 switch (tkn_elem->token) {
3249 case SKL_TKN_MM_U32_INTF_PIN_ID:
3250 dst_fmt->id = tkn_elem->value;
3254 ret = skl_tplg_fill_fmt(dev, mod_fmt, tkn_elem->token,
3264 static int skl_tplg_fill_mod_info(struct device *dev,
3265 struct snd_soc_tplg_vendor_value_elem *tkn_elem,
3266 struct skl_module *mod)
3272 switch (tkn_elem->token) {
3273 case SKL_TKN_U8_IN_PIN_TYPE:
3274 mod->input_pin_type = tkn_elem->value;
3277 case SKL_TKN_U8_OUT_PIN_TYPE:
3278 mod->output_pin_type = tkn_elem->value;
3281 case SKL_TKN_U8_IN_QUEUE_COUNT:
3282 mod->max_input_pins = tkn_elem->value;
3285 case SKL_TKN_U8_OUT_QUEUE_COUNT:
3286 mod->max_output_pins = tkn_elem->value;
3289 case SKL_TKN_MM_U8_NUM_RES:
3290 mod->nr_resources = tkn_elem->value;
3293 case SKL_TKN_MM_U8_NUM_INTF:
3294 mod->nr_interfaces = tkn_elem->value;
3298 dev_err(dev, "Invalid mod info token %d", tkn_elem->token);
3306 static int skl_tplg_get_int_tkn(struct device *dev,
3307 struct snd_soc_tplg_vendor_value_elem *tkn_elem,
3310 int tkn_count = 0, ret, size;
3311 static int mod_idx, res_val_idx, intf_val_idx, dir, pin_idx;
3312 struct skl_module_res *res = NULL;
3313 struct skl_module_iface *fmt = NULL;
3314 struct skl_module *mod = NULL;
3315 static struct skl_astate_param *astate_table;
3316 static int astate_cfg_idx, count;
3320 mod = skl->modules[mod_idx];
3321 res = &mod->resources[res_val_idx];
3322 fmt = &mod->formats[intf_val_idx];
3325 switch (tkn_elem->token) {
3326 case SKL_TKN_U32_LIB_COUNT:
3327 skl->skl_sst->lib_count = tkn_elem->value;
3330 case SKL_TKN_U8_NUM_MOD:
3331 skl->nr_modules = tkn_elem->value;
3332 skl->modules = devm_kcalloc(dev, skl->nr_modules,
3333 sizeof(*skl->modules), GFP_KERNEL);
3337 for (i = 0; i < skl->nr_modules; i++) {
3338 skl->modules[i] = devm_kzalloc(dev,
3339 sizeof(struct skl_module), GFP_KERNEL);
3340 if (!skl->modules[i])
3345 case SKL_TKN_MM_U8_MOD_IDX:
3346 mod_idx = tkn_elem->value;
3349 case SKL_TKN_U32_ASTATE_COUNT:
3350 if (astate_table != NULL) {
3351 dev_err(dev, "More than one entry for A-State count");
3355 if (tkn_elem->value > SKL_MAX_ASTATE_CFG) {
3356 dev_err(dev, "Invalid A-State count %d\n",
3361 size = tkn_elem->value * sizeof(struct skl_astate_param) +
3363 skl->cfg.astate_cfg = devm_kzalloc(dev, size, GFP_KERNEL);
3364 if (!skl->cfg.astate_cfg)
3367 astate_table = skl->cfg.astate_cfg->astate_table;
3368 count = skl->cfg.astate_cfg->count = tkn_elem->value;
3371 case SKL_TKN_U32_ASTATE_IDX:
3372 if (tkn_elem->value >= count) {
3373 dev_err(dev, "Invalid A-State index %d\n",
3378 astate_cfg_idx = tkn_elem->value;
3381 case SKL_TKN_U32_ASTATE_KCPS:
3382 astate_table[astate_cfg_idx].kcps = tkn_elem->value;
3385 case SKL_TKN_U32_ASTATE_CLK_SRC:
3386 astate_table[astate_cfg_idx].clk_src = tkn_elem->value;
3389 case SKL_TKN_U8_IN_PIN_TYPE:
3390 case SKL_TKN_U8_OUT_PIN_TYPE:
3391 case SKL_TKN_U8_IN_QUEUE_COUNT:
3392 case SKL_TKN_U8_OUT_QUEUE_COUNT:
3393 case SKL_TKN_MM_U8_NUM_RES:
3394 case SKL_TKN_MM_U8_NUM_INTF:
3395 ret = skl_tplg_fill_mod_info(dev, tkn_elem, mod);
3400 case SKL_TKN_U32_DIR_PIN_COUNT:
3401 dir = tkn_elem->value & SKL_IN_DIR_BIT_MASK;
3402 pin_idx = (tkn_elem->value & SKL_PIN_COUNT_MASK) >> 4;
3405 case SKL_TKN_MM_U32_RES_ID:
3409 res->id = tkn_elem->value;
3410 res_val_idx = tkn_elem->value;
3413 case SKL_TKN_MM_U32_FMT_ID:
3417 fmt->fmt_idx = tkn_elem->value;
3418 intf_val_idx = tkn_elem->value;
3421 case SKL_TKN_MM_U32_CPS:
3422 case SKL_TKN_MM_U32_DMA_SIZE:
3423 case SKL_TKN_MM_U32_CPC:
3424 case SKL_TKN_U32_MEM_PAGES:
3425 case SKL_TKN_U32_OBS:
3426 case SKL_TKN_U32_IBS:
3427 case SKL_TKN_MM_U32_RES_PIN_ID:
3428 case SKL_TKN_MM_U32_PIN_BUF:
3429 ret = skl_tplg_fill_res_tkn(dev, tkn_elem, res, pin_idx, dir);
3435 case SKL_TKN_MM_U32_NUM_IN_FMT:
3439 res->nr_input_pins = tkn_elem->value;
3442 case SKL_TKN_MM_U32_NUM_OUT_FMT:
3446 res->nr_output_pins = tkn_elem->value;
3449 case SKL_TKN_U32_FMT_CH:
3450 case SKL_TKN_U32_FMT_FREQ:
3451 case SKL_TKN_U32_FMT_BIT_DEPTH:
3452 case SKL_TKN_U32_FMT_SAMPLE_SIZE:
3453 case SKL_TKN_U32_FMT_CH_CONFIG:
3454 case SKL_TKN_U32_FMT_INTERLEAVE:
3455 case SKL_TKN_U32_FMT_SAMPLE_TYPE:
3456 case SKL_TKN_U32_FMT_CH_MAP:
3457 case SKL_TKN_MM_U32_INTF_PIN_ID:
3458 ret = skl_tplg_manifest_fill_fmt(dev, fmt, tkn_elem,
3465 dev_err(dev, "Not a manifest token %d\n", tkn_elem->token);
3473 static int skl_tplg_get_manifest_uuid(struct device *dev,
3475 struct snd_soc_tplg_vendor_uuid_elem *uuid_tkn)
3477 static int ref_count;
3478 struct skl_module *mod;
3480 if (uuid_tkn->token == SKL_TKN_UUID) {
3481 mod = skl->modules[ref_count];
3482 memcpy(&mod->uuid, &uuid_tkn->uuid, sizeof(uuid_tkn->uuid));
3485 dev_err(dev, "Not an UUID token tkn %d\n", uuid_tkn->token);
3493 * Fill the manifest structure by parsing the tokens based on the
3496 static int skl_tplg_get_manifest_tkn(struct device *dev,
3497 char *pvt_data, struct skl *skl,
3500 int tkn_count = 0, ret;
3501 int off = 0, tuple_size = 0;
3502 struct snd_soc_tplg_vendor_array *array;
3503 struct snd_soc_tplg_vendor_value_elem *tkn_elem;
3505 if (block_size <= 0)
3508 while (tuple_size < block_size) {
3509 array = (struct snd_soc_tplg_vendor_array *)(pvt_data + off);
3511 switch (array->type) {
3512 case SND_SOC_TPLG_TUPLE_TYPE_STRING:
3513 ret = skl_tplg_get_str_tkn(dev, array, skl);
3519 tuple_size += tkn_count *
3520 sizeof(struct snd_soc_tplg_vendor_string_elem);
3523 case SND_SOC_TPLG_TUPLE_TYPE_UUID:
3524 ret = skl_tplg_get_manifest_uuid(dev, skl, array->uuid);
3528 tuple_size += sizeof(*array->uuid);
3532 tkn_elem = array->value;
3537 while (tkn_count <= array->num_elems - 1) {
3538 ret = skl_tplg_get_int_tkn(dev,
3543 tkn_count = tkn_count + ret;
3546 tuple_size += (tkn_count * sizeof(*tkn_elem));
3554 * Parse manifest private data for tokens. The private data block is
3555 * preceded by descriptors for type and size of data block.
3557 static int skl_tplg_get_manifest_data(struct snd_soc_tplg_manifest *manifest,
3558 struct device *dev, struct skl *skl)
3560 struct snd_soc_tplg_vendor_array *array;
3561 int num_blocks, block_size = 0, block_type, off = 0;
3565 /* Read the NUM_DATA_BLOCKS descriptor */
3566 array = (struct snd_soc_tplg_vendor_array *)manifest->priv.data;
3567 ret = skl_tplg_get_desc_blocks(dev, array);
3573 /* Read the BLOCK_TYPE and BLOCK_SIZE descriptor */
3574 while (num_blocks > 0) {
3575 array = (struct snd_soc_tplg_vendor_array *)
3576 (manifest->priv.data + off);
3577 ret = skl_tplg_get_desc_blocks(dev, array);
3584 array = (struct snd_soc_tplg_vendor_array *)
3585 (manifest->priv.data + off);
3587 ret = skl_tplg_get_desc_blocks(dev, array);
3594 array = (struct snd_soc_tplg_vendor_array *)
3595 (manifest->priv.data + off);
3597 data = (manifest->priv.data + off);
3599 if (block_type == SKL_TYPE_TUPLE) {
3600 ret = skl_tplg_get_manifest_tkn(dev, data, skl,
3616 static int skl_manifest_load(struct snd_soc_component *cmpnt, int index,
3617 struct snd_soc_tplg_manifest *manifest)
3619 struct hdac_bus *bus = snd_soc_component_get_drvdata(cmpnt);
3620 struct skl *skl = bus_to_skl(bus);
3622 /* proceed only if we have private data defined */
3623 if (manifest->priv.size == 0)
3626 skl_tplg_get_manifest_data(manifest, bus->dev, skl);
3628 if (skl->skl_sst->lib_count > SKL_MAX_LIB) {
3629 dev_err(bus->dev, "Exceeding max Library count. Got:%d\n",
3630 skl->skl_sst->lib_count);
3637 static struct snd_soc_tplg_ops skl_tplg_ops = {
3638 .widget_load = skl_tplg_widget_load,
3639 .control_load = skl_tplg_control_load,
3640 .bytes_ext_ops = skl_tlv_ops,
3641 .bytes_ext_ops_count = ARRAY_SIZE(skl_tlv_ops),
3642 .io_ops = skl_tplg_kcontrol_ops,
3643 .io_ops_count = ARRAY_SIZE(skl_tplg_kcontrol_ops),
3644 .manifest = skl_manifest_load,
3645 .dai_load = skl_dai_load,
3649 * A pipe can have multiple modules, each of them will be a DAPM widget as
3650 * well. While managing a pipeline we need to get the list of all the
3651 * widgets in a pipelines, so this helper - skl_tplg_create_pipe_widget_list()
3652 * helps to get the SKL type widgets in that pipeline
3654 static int skl_tplg_create_pipe_widget_list(struct snd_soc_component *component)
3656 struct snd_soc_dapm_widget *w;
3657 struct skl_module_cfg *mcfg = NULL;
3658 struct skl_pipe_module *p_module = NULL;
3659 struct skl_pipe *pipe;
3661 list_for_each_entry(w, &component->card->widgets, list) {
3662 if (is_skl_dsp_widget_type(w, component->dev) && w->priv) {
3666 p_module = devm_kzalloc(component->dev,
3667 sizeof(*p_module), GFP_KERNEL);
3672 list_add_tail(&p_module->node, &pipe->w_list);
3679 static void skl_tplg_set_pipe_type(struct skl *skl, struct skl_pipe *pipe)
3681 struct skl_pipe_module *w_module;
3682 struct snd_soc_dapm_widget *w;
3683 struct skl_module_cfg *mconfig;
3684 bool host_found = false, link_found = false;
3686 list_for_each_entry(w_module, &pipe->w_list, node) {
3690 if (mconfig->dev_type == SKL_DEVICE_HDAHOST)
3692 else if (mconfig->dev_type != SKL_DEVICE_NONE)
3696 if (host_found && link_found)
3697 pipe->passthru = true;
3699 pipe->passthru = false;
3702 /* This will be read from topology manifest, currently defined here */
3703 #define SKL_MAX_MCPS 30000000
3704 #define SKL_FW_MAX_MEM 1000000
3707 * SKL topology init routine
3709 int skl_tplg_init(struct snd_soc_component *component, struct hdac_bus *bus)
3712 const struct firmware *fw;
3713 struct skl *skl = bus_to_skl(bus);
3714 struct skl_pipeline *ppl;
3716 ret = request_firmware(&fw, skl->tplg_name, bus->dev);
3718 dev_info(bus->dev, "tplg fw %s load failed with %d, falling back to dfw_sst.bin",
3719 skl->tplg_name, ret);
3720 ret = request_firmware(&fw, "dfw_sst.bin", bus->dev);
3722 dev_err(bus->dev, "Fallback tplg fw %s load failed with %d\n",
3723 "dfw_sst.bin", ret);
3729 * The complete tplg for SKL is loaded as index 0, we don't use
3732 ret = snd_soc_tplg_component_load(component,
3733 &skl_tplg_ops, fw, 0);
3735 dev_err(bus->dev, "tplg component load failed%d\n", ret);
3736 release_firmware(fw);
3740 skl->resource.max_mcps = SKL_MAX_MCPS;
3741 skl->resource.max_mem = SKL_FW_MAX_MEM;
3744 ret = skl_tplg_create_pipe_widget_list(component);
3748 list_for_each_entry(ppl, &skl->ppl_list, node)
3749 skl_tplg_set_pipe_type(skl, ppl->pipe);