static int intel_ldma_probe()

in lgm/lgm-dma.c [1564:1718]


static int intel_ldma_probe(struct platform_device *pdev)
{
	struct device *dev = &pdev->dev;
	struct dma_device *dma_dev;
	unsigned long ch_mask;
	struct ldma_chan *c;
	struct ldma_port *p;
	struct ldma_dev *d;
	u32 id, bitn = 32, j;
	int i, ret;

	d = devm_kzalloc(dev, sizeof(*d), GFP_KERNEL);
	if (!d)
		return -ENOMEM;

	/* Link controller to platform device */
	d->dev = &pdev->dev;

	d->inst = device_get_match_data(dev);
	if (!d->inst) {
		dev_err(dev, "No device match found\n");
		return -ENODEV;
	}

	d->base = devm_platform_ioremap_resource(pdev, 0);
	if (IS_ERR(d->base))
		return PTR_ERR(d->base);

	/* Power up and reset the dma engine, some DMAs always on?? */
	d->core_clk = devm_clk_get_optional(dev, NULL);
	if (IS_ERR(d->core_clk))
		return PTR_ERR(d->core_clk);
	clk_prepare_enable(d->core_clk);

	d->rst = devm_reset_control_get_optional(dev, NULL);
	if (IS_ERR(d->rst))
		return PTR_ERR(d->rst);
	reset_control_deassert(d->rst);

	ret = devm_add_action_or_reset(dev, ldma_clk_disable, d);
	if (ret) {
		dev_err(dev, "Failed to devm_add_action_or_reset, %d\n", ret);
		return ret;
	}

	id = readl(d->base + DMA_ID);
	d->chan_nrs = FIELD_GET(DMA_ID_CHNR, id);
	d->port_nrs = FIELD_GET(DMA_ID_PNR, id);
	d->ver = FIELD_GET(DMA_ID_REV, id);

	if (id & DMA_ID_AW_36B)
		d->flags |= DMA_ADDR_36BIT;

	if (IS_ENABLED(CONFIG_64BIT) && (id & DMA_ID_AW_36B))
		bitn = 36;

	if (id & DMA_ID_DW_128B)
		d->flags |= DMA_DATA_128BIT;

	ret = dma_set_mask_and_coherent(dev, DMA_BIT_MASK(bitn));
	if (ret) {
		dev_err(dev, "No usable DMA configuration\n");
		return ret;
	}

	if (d->ver == DMA_VER22) {
		ret = ldma_init_v22(d, pdev);
		if (ret)
			return ret;
	}

	ret = device_property_read_u32(dev, "dma-channel-mask", &d->channels_mask);
	if (ret < 0)
		d->channels_mask = GENMASK(d->chan_nrs - 1, 0);

	dma_dev = &d->dma_dev;

	dma_cap_zero(dma_dev->cap_mask);
	dma_cap_set(DMA_SLAVE, dma_dev->cap_mask);

	/* Channel initializations */
	INIT_LIST_HEAD(&dma_dev->channels);

	/* Port Initializations */
	d->ports = devm_kcalloc(dev, d->port_nrs, sizeof(*p), GFP_KERNEL);
	if (!d->ports)
		return -ENOMEM;

	/* Channels Initializations */
	d->chans = devm_kcalloc(d->dev, d->chan_nrs, sizeof(*c), GFP_KERNEL);
	if (!d->chans)
		return -ENOMEM;

	for (i = 0; i < d->port_nrs; i++) {
		p = &d->ports[i];
		p->portid = i;
		p->ldev = d;
	}

	ret = ldma_cfg_init(d);
	if (ret)
		return ret;

	dma_dev->dev = &pdev->dev;

	ch_mask = (unsigned long)d->channels_mask;
	for_each_set_bit(j, &ch_mask, d->chan_nrs) {
		if (d->ver == DMA_VER22)
			ldma_dma_init_v22(j, d);
		else
			ldma_dma_init_v3X(j, d);
	}

	dma_dev->device_alloc_chan_resources = ldma_alloc_chan_resources;
	dma_dev->device_free_chan_resources = ldma_free_chan_resources;
	dma_dev->device_terminate_all = ldma_terminate_all;
	dma_dev->device_issue_pending = ldma_issue_pending;
	dma_dev->device_tx_status = ldma_tx_status;
	dma_dev->device_resume = ldma_resume_chan;
	dma_dev->device_pause = ldma_pause_chan;
	dma_dev->device_prep_slave_sg = ldma_prep_slave_sg;

	if (d->ver == DMA_VER22) {
		dma_dev->device_config = ldma_slave_config;
		dma_dev->device_synchronize = ldma_synchronize;
		dma_dev->src_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
		dma_dev->dst_addr_widths = BIT(DMA_SLAVE_BUSWIDTH_4_BYTES);
		dma_dev->directions = BIT(DMA_MEM_TO_DEV) |
				      BIT(DMA_DEV_TO_MEM);
		dma_dev->residue_granularity =
					DMA_RESIDUE_GRANULARITY_DESCRIPTOR;
	}

	platform_set_drvdata(pdev, d);

	ldma_dev_init(d);

	ret = dma_async_device_register(dma_dev);
	if (ret) {
		dev_err(dev, "Failed to register slave DMA engine device\n");
		return ret;
	}

	ret = of_dma_controller_register(pdev->dev.of_node, ldma_xlate, d);
	if (ret) {
		dev_err(dev, "Failed to register of DMA controller\n");
		dma_async_device_unregister(dma_dev);
		return ret;
	}

	dev_info(dev, "Init done - rev: %x, ports: %d channels: %d\n", d->ver,
		 d->port_nrs, d->chan_nrs);

	return 0;
}