static int hci_dma_init()

in master/mipi-i3c-hci/dma.c [208:338]


static int hci_dma_init(struct i3c_hci *hci)
{
	struct hci_rings_data *rings;
	struct hci_rh_data *rh;
	u32 regval;
	unsigned int i, nr_rings, xfers_sz, resps_sz;
	unsigned int ibi_status_ring_sz, ibi_data_ring_sz;
	int ret;

	regval = rhs_reg_read(CONTROL);
	nr_rings = FIELD_GET(MAX_HEADER_COUNT_CAP, regval);
	dev_info(&hci->master.dev, "%d DMA rings available\n", nr_rings);
	if (unlikely(nr_rings > 8)) {
		dev_err(&hci->master.dev, "number of rings should be <= 8\n");
		nr_rings = 8;
	}
	if (nr_rings > XFER_RINGS)
		nr_rings = XFER_RINGS;
	rings = kzalloc(struct_size(rings, headers, nr_rings), GFP_KERNEL);
	if (!rings)
		return -ENOMEM;
	hci->io_data = rings;
	rings->total = nr_rings;

	for (i = 0; i < rings->total; i++) {
		u32 offset = rhs_reg_read(RHn_OFFSET(i));

		dev_info(&hci->master.dev, "Ring %d at offset %#x\n", i, offset);
		ret = -EINVAL;
		if (!offset)
			goto err_out;
		rh = &rings->headers[i];
		rh->regs = hci->base_regs + offset;
		spin_lock_init(&rh->lock);
		init_completion(&rh->op_done);

		rh->xfer_entries = XFER_RING_ENTRIES;

		regval = rh_reg_read(CR_SETUP);
		rh->xfer_struct_sz = FIELD_GET(CR_XFER_STRUCT_SIZE, regval);
		rh->resp_struct_sz = FIELD_GET(CR_RESP_STRUCT_SIZE, regval);
		DBG("xfer_struct_sz = %d, resp_struct_sz = %d",
		    rh->xfer_struct_sz, rh->resp_struct_sz);
		xfers_sz = rh->xfer_struct_sz * rh->xfer_entries;
		resps_sz = rh->resp_struct_sz * rh->xfer_entries;

		rh->xfer = dma_alloc_coherent(&hci->master.dev, xfers_sz,
					      &rh->xfer_dma, GFP_KERNEL);
		rh->resp = dma_alloc_coherent(&hci->master.dev, resps_sz,
					      &rh->resp_dma, GFP_KERNEL);
		rh->src_xfers =
			kmalloc_array(rh->xfer_entries, sizeof(*rh->src_xfers),
				      GFP_KERNEL);
		ret = -ENOMEM;
		if (!rh->xfer || !rh->resp || !rh->src_xfers)
			goto err_out;

		rh_reg_write(CMD_RING_BASE_LO, lo32(rh->xfer_dma));
		rh_reg_write(CMD_RING_BASE_HI, hi32(rh->xfer_dma));
		rh_reg_write(RESP_RING_BASE_LO, lo32(rh->resp_dma));
		rh_reg_write(RESP_RING_BASE_HI, hi32(rh->resp_dma));

		regval = FIELD_PREP(CR_RING_SIZE, rh->xfer_entries);
		rh_reg_write(CR_SETUP, regval);

		rh_reg_write(INTR_STATUS_ENABLE, 0xffffffff);
		rh_reg_write(INTR_SIGNAL_ENABLE, INTR_IBI_READY |
						 INTR_TRANSFER_COMPLETION |
						 INTR_RING_OP |
						 INTR_TRANSFER_ERR |
						 INTR_WARN_INS_STOP_MODE |
						 INTR_IBI_RING_FULL |
						 INTR_TRANSFER_ABORT);

		/* IBIs */

		if (i >= IBI_RINGS)
			goto ring_ready;

		regval = rh_reg_read(IBI_SETUP);
		rh->ibi_status_sz = FIELD_GET(IBI_STATUS_STRUCT_SIZE, regval);
		rh->ibi_status_entries = IBI_STATUS_RING_ENTRIES;
		rh->ibi_chunks_total = IBI_CHUNK_POOL_SIZE;

		rh->ibi_chunk_sz = dma_get_cache_alignment();
		rh->ibi_chunk_sz *= IBI_CHUNK_CACHELINES;
		BUG_ON(rh->ibi_chunk_sz > 256);

		ibi_status_ring_sz = rh->ibi_status_sz * rh->ibi_status_entries;
		ibi_data_ring_sz = rh->ibi_chunk_sz * rh->ibi_chunks_total;

		rh->ibi_status =
			dma_alloc_coherent(&hci->master.dev, ibi_status_ring_sz,
					   &rh->ibi_status_dma, GFP_KERNEL);
		rh->ibi_data = kmalloc(ibi_data_ring_sz, GFP_KERNEL);
		ret = -ENOMEM;
		if (!rh->ibi_status || !rh->ibi_data)
			goto err_out;
		rh->ibi_data_dma =
			dma_map_single(&hci->master.dev, rh->ibi_data,
				       ibi_data_ring_sz, DMA_FROM_DEVICE);
		if (dma_mapping_error(&hci->master.dev, rh->ibi_data_dma)) {
			rh->ibi_data_dma = 0;
			ret = -ENOMEM;
			goto err_out;
		}

		regval = FIELD_PREP(IBI_STATUS_RING_SIZE,
				    rh->ibi_status_entries) |
			 FIELD_PREP(IBI_DATA_CHUNK_SIZE,
				    ilog2(rh->ibi_chunk_sz) - 2) |
			 FIELD_PREP(IBI_DATA_CHUNK_COUNT,
				    rh->ibi_chunks_total);
		rh_reg_write(IBI_SETUP, regval);

		regval = rh_reg_read(INTR_SIGNAL_ENABLE);
		regval |= INTR_IBI_READY;
		rh_reg_write(INTR_SIGNAL_ENABLE, regval);

ring_ready:
		rh_reg_write(RING_CONTROL, RING_CTRL_ENABLE);
	}

	regval = FIELD_PREP(MAX_HEADER_COUNT, rings->total);
	rhs_reg_write(CONTROL, regval);
	return 0;

err_out:
	hci_dma_cleanup(hci);
	return ret;
}