Home
last modified time | relevance | path

Searched full:lli (Results 1 – 25 of 61) sorted by relevance

123

/linux-6.14.4/drivers/scsi/cxlflash/
Dlunmgt.c35 struct llun_info *lli = NULL; in create_local() local
37 lli = kzalloc(sizeof(*lli), GFP_KERNEL); in create_local()
38 if (unlikely(!lli)) { in create_local()
39 dev_err(dev, "%s: could not allocate lli\n", __func__); in create_local()
43 lli->sdev = sdev; in create_local()
44 lli->host_no = sdev->host->host_no; in create_local()
45 lli->in_table = false; in create_local()
47 memcpy(lli->wwid, wwid, DK_CXLFLASH_MANAGE_LUN_WWID_LEN); in create_local()
49 return lli; in create_local()
86 struct llun_info *lli, *temp; in lookup_local() local
[all …]
Dvlun.c364 * @lli: LUN information structure that owns the block allocator.
368 static int init_vlun(struct llun_info *lli) in init_vlun() argument
371 struct glun_info *gli = lli->parent; in init_vlun()
378 blka->ba_lun.lun_id = lli->lun_index; in init_vlun()
389 pr_debug("%s: returning rc=%d lli=%p\n", __func__, rc, lli); in init_vlun()
510 struct llun_info *lli = sdev->hostdata; in grow_lxt() local
511 struct glun_info *gli = lli->parent; in grow_lxt()
575 (lli->lun_index << LXT_LUNIDX_SHIFT) | in grow_lxt()
577 lli->port_sel)); in grow_lxt()
628 struct llun_info *lli = sdev->hostdata; in shrink_lxt() local
[all …]
Dsuperpipe.c162 struct llun_info *lli = arg; in get_context() local
168 lli = NULL; in get_context()
218 if (lli) { in get_context()
220 if (lun_access->lli == lli) in get_context()
308 * @lli: LUN destined for capacity request.
330 static int read_cap16(struct scsi_device *sdev, struct llun_info *lli) in read_cap16() argument
334 struct glun_info *gli = lli->parent; in read_cap16()
432 * @lli: LUN associated with request.
437 struct llun_info *lli) in get_rhte() argument
455 if (unlikely(ctxi->rht_lun[rhndl] != lli)) { in get_rhte()
[all …]
Dsuperpipe.h69 struct llun_info *lli; member
138 struct llun_info *lli);
141 struct llun_info *lli);
/linux-6.14.4/drivers/dma/
Dste_dma40_ll.c133 static int d40_phy_fill_lli(struct d40_phy_lli *lli, in d40_phy_fill_lli() argument
161 lli->reg_elt = (data_size / data_width) << D40_SREG_ELEM_PHY_ECNT_POS; in d40_phy_fill_lli()
168 lli->reg_elt |= data_width << D40_SREG_ELEM_PHY_EIDX_POS; in d40_phy_fill_lli()
171 lli->reg_ptr = data; in d40_phy_fill_lli()
172 lli->reg_cfg = reg_cfg; in d40_phy_fill_lli()
176 lli->reg_lnk = BIT(D40_SREG_LNK_PHY_TCP_POS); in d40_phy_fill_lli()
178 lli->reg_lnk = next_lli; in d40_phy_fill_lli()
182 lli->reg_cfg |= BIT(D40_SREG_CFG_TIM_POS); in d40_phy_fill_lli()
184 lli->reg_cfg &= ~BIT(D40_SREG_CFG_TIM_POS); in d40_phy_fill_lli()
213 d40_phy_buf_to_lli(struct d40_phy_lli *lli, dma_addr_t addr, u32 size, in d40_phy_buf_to_lli() argument
[all …]
Dloongson1-apb-dma.c176 /* allocate memory for querying the current lli */ in ls1x_dma_alloc_chan_resources()
193 struct ls1x_dma_lli *lli, *_lli; in ls1x_dma_free_desc() local
195 list_for_each_entry_safe(lli, _lli, &desc->lli_list, node) { in ls1x_dma_free_desc()
196 list_del(&lli->node); in ls1x_dma_free_desc()
197 dma_pool_free(chan->lli_pool, lli, lli->phys); in ls1x_dma_free_desc()
221 struct ls1x_dma_lli *lli, *prev = NULL, *first = NULL; in ls1x_dma_prep_lli() local
255 lli = dma_pool_zalloc(chan->lli_pool, GFP_NOWAIT, &phys); in ls1x_dma_prep_lli()
256 if (!lli) { in ls1x_dma_prep_lli()
257 dev_err(dev, "failed to alloc lli %u\n", i); in ls1x_dma_prep_lli()
262 lli->phys = phys; in ls1x_dma_prep_lli()
[all …]
Dowl-dma.c174 * @lli_list: link list of lli nodes
217 * @lli_pool: a pool for the LLI descriptors
337 static u32 llc_hw_flen(struct owl_dma_lli *lli) in llc_hw_flen() argument
339 return lli->hw[OWL_DMADESC_FLEN] & GENMASK(19, 0); in llc_hw_flen()
343 struct owl_dma_lli *lli) in owl_dma_free_lli() argument
345 list_del(&lli->node); in owl_dma_free_lli()
346 dma_pool_free(od->lli_pool, lli, lli->phys); in owl_dma_free_lli()
351 struct owl_dma_lli *lli; in owl_dma_alloc_lli() local
354 lli = dma_pool_alloc(od->lli_pool, GFP_NOWAIT, &phys); in owl_dma_alloc_lli()
355 if (!lli) in owl_dma_alloc_lli()
[all …]
Dat_hdmac.c196 /* LLI == Linked List Item; aka DMA buffer descriptor */
205 u32 dscr; /* chain to next lli */
211 * @lli: linked list item that is passed to the DMA controller
212 * @lli_phys: physical address of the LLI.
216 struct at_lli *lli; member
343 * @lli_pool: hw lli table
413 static void atc_dump_lli(struct at_dma_chan *atchan, struct at_lli *lli) in atc_dump_lli() argument
417 &lli->saddr, &lli->daddr, in atc_dump_lli()
418 lli->ctrla, lli->ctrlb, &lli->dscr); in atc_dump_lli()
482 u32 ctrlb = desc->sg[i].lli->ctrlb; in set_lli_eol()
[all …]
Damba-pl08x.c30 * - 8-word aligned LLI, instead of 4-word, due to extra CCTL2 word,
52 * which occur for the current LLI entry, and the DMAC raises TC at the
53 * end of every LLI entry. Observed behaviour shows the DMAC listening
59 * zero). The data is transferred from the current LLI entry, until
61 * will then move to the next LLI entry. Unsupported by PL080S.
106 * register and LLI word for transfer size.
145 * @reg_lli: transfer LLI address register
272 * @pool: a pool for the LLI descriptors
273 * @lli_buses: bitmask to or in to LLI pointer selecting AHB port for LLI
276 * @lli_words: how many words are used in each LLI item for this variant
[all …]
Didma64.c222 dma_pool_free(idma64c->pool, hw->lli, hw->llp); in idma64_desc_free()
241 struct idma64_lli *lli = hw->lli; in idma64_hw_desc_fill() local
263 lli->sar = sar; in idma64_hw_desc_fill()
264 lli->dar = dar; in idma64_hw_desc_fill()
266 lli->ctlhi = ctlhi; in idma64_hw_desc_fill()
267 lli->ctllo = ctllo | in idma64_hw_desc_fill()
273 lli->llp = llp; in idma64_hw_desc_fill()
282 struct idma64_lli *lli = hw->lli; in idma64_desc_fill() local
294 lli->ctllo |= IDMA64C_CTLL_INT_EN; in idma64_desc_fill()
297 lli->ctllo &= ~(IDMA64C_CTLL_LLP_S_EN | IDMA64C_CTLL_LLP_D_EN); in idma64_desc_fill()
[all …]
Dsun6i-dma.c95 * LLI address mangling
97 * The LLI link physical address is also mangled, but we avoid dealing
149 * Hardware representation of the LLI
352 struct sun6i_dma_lli *lli; in sun6i_get_chan_size() local
362 for (lli = txd->v_lli; lli; lli = lli->v_lli_next) { in sun6i_get_chan_size()
363 if (lli->p_lli_next == pos) { in sun6i_get_chan_size()
364 for (lli = lli->v_lli_next; lli; lli = lli->v_lli_next) in sun6i_get_chan_size()
365 bytes += lli->len; in sun6i_get_chan_size()
672 dev_err(sdev->slave.dev, "Failed to alloc lli memory\n"); in sun6i_dma_prep_dma_memcpy()
814 dev_err(sdev->slave.dev, "Failed to alloc lli memory\n"); in sun6i_dma_prep_dma_cyclic()
[all …]
Dste_dma40_ll.h12 #define D40_LLI_ALIGN 16 /* LLI alignment must be 16 bytes. */
326 /* LLI related structures */
367 * struct d40_log_lli - logical lli configuration
384 * @src: pointer to src lli configuration.
385 * @dst: pointer to dst lli configuration.
444 struct d40_phy_lli *lli,
Dk3dma.c61 u32 lli; member
163 writel_relaxed(hw->lli, phy->base + CX_LLI); in k3_dma_set_desc()
411 /* end of lli */ in k3_dma_tx_status()
412 if (!ds->desc_hw[index].lli) in k3_dma_tx_status()
450 ds->desc_hw[num].lli = ds->desc_hw_lli + (num + 1) * in k3_dma_fill_desc()
453 ds->desc_hw[num].lli |= CX_LLI_CHAIN_EN; in k3_dma_fill_desc()
526 ds->desc_hw[num-1].lli = 0; /* end of link */ in k3_dma_prep_memcpy()
581 ds->desc_hw[num-1].lli = 0; /* end of link */ in k3_dma_prep_slave_sg()
647 ds->desc_hw[num - 1].lli |= ds->desc_hw_lli; in k3_dma_prep_dma_cyclic()
/linux-6.14.4/drivers/dma/dw-axi-dmac/
Ddw-axi-dmac-platform.c313 struct axi_dma_lli *lli; in axi_desc_get() local
316 lli = dma_pool_zalloc(chan->desc_pool, GFP_NOWAIT, &phys); in axi_desc_get()
317 if (unlikely(!lli)) { in axi_desc_get()
326 return lli; in axi_desc_get()
338 dma_pool_free(chan->desc_pool, hw_desc->lli, hw_desc->llp); in axi_desc_put()
391 desc->lli->llp = cpu_to_le64(adr); in write_desc_llp()
429 u8 lms = 0; /* Select AXI0 master for LLI fetching */ in axi_chan_block_xfer_start()
527 /* LLI address must be aligned to a 64-byte boundary */ in dma_chan_alloc_chan_resources()
600 * If DW_axi_dmac sees CHx_CTL.ShadowReg_Or_LLI_Last bit of the fetched LLI
609 val = le32_to_cpu(desc->lli->ctl_hi); in set_desc_last()
[all …]
Ddw-axi-dmac.h76 /* LLI == Linked List Item */
94 struct axi_dma_lli *lli; member
343 * @DWAXIDMAC_IRQ_LLI_RD_DEC_ERR: LLI read decode error
344 * @DWAXIDMAC_IRQ_LLI_WR_DEC_ERR: LLI write decode error
345 * @DWAXIDMAC_IRQ_LLI_RD_SLV_ERR: LLI read slave error
346 * @DWAXIDMAC_IRQ_LLI_WR_SLV_ERR: LLI write slave error
347 * @DWAXIDMAC_IRQ_INVALID_ERR: LLI invalid error or Shadow register error
/linux-6.14.4/drivers/dma/dw-edma/
Ddw-hdma-v0-core.c161 struct dw_hdma_v0_lli *lli = chunk->ll_region.vaddr.mem + ofs; in dw_hdma_v0_write_ll_data() local
163 lli->control = control; in dw_hdma_v0_write_ll_data()
164 lli->transfer_size = size; in dw_hdma_v0_write_ll_data()
165 lli->sar.reg = sar; in dw_hdma_v0_write_ll_data()
166 lli->dar.reg = dar; in dw_hdma_v0_write_ll_data()
168 struct dw_hdma_v0_lli __iomem *lli = chunk->ll_region.vaddr.io + ofs; in dw_hdma_v0_write_ll_data() local
170 writel(control, &lli->control); in dw_hdma_v0_write_ll_data()
171 writel(size, &lli->transfer_size); in dw_hdma_v0_write_ll_data()
172 writeq(sar, &lli->sar.reg); in dw_hdma_v0_write_ll_data()
173 writeq(dar, &lli->dar.reg); in dw_hdma_v0_write_ll_data()
Ddw-edma-v0-core.c285 struct dw_edma_v0_lli *lli = chunk->ll_region.vaddr.mem + ofs; in dw_edma_v0_write_ll_data() local
287 lli->control = control; in dw_edma_v0_write_ll_data()
288 lli->transfer_size = size; in dw_edma_v0_write_ll_data()
289 lli->sar.reg = sar; in dw_edma_v0_write_ll_data()
290 lli->dar.reg = dar; in dw_edma_v0_write_ll_data()
292 struct dw_edma_v0_lli __iomem *lli = chunk->ll_region.vaddr.io + ofs; in dw_edma_v0_write_ll_data() local
294 writel(control, &lli->control); in dw_edma_v0_write_ll_data()
295 writel(size, &lli->transfer_size); in dw_edma_v0_write_ll_data()
296 writeq(sar, &lli->sar.reg); in dw_edma_v0_write_ll_data()
297 writeq(dar, &lli->dar.reg); in dw_edma_v0_write_ll_data()
/linux-6.14.4/Documentation/devicetree/bindings/dma/
Darm-pl08x.yaml55 lli-bus-interface-ahb1:
59 lli-bus-interface-ahb2:
114 lli-bus-interface-ahb1;
115 lli-bus-interface-ahb2;
135 lli-bus-interface-ahb2;
Dlpc1850-dmamux.txt29 lli-bus-interface-ahb1;
30 lli-bus-interface-ahb2;
/linux-6.14.4/Documentation/devicetree/bindings/dma/stm32/
Dst,stm32-dma3.yaml94 0x2: at LLI level, the transfer complete event is generated at the end
95 of the LLI transfer
96 including the update of the LLI if any
98 end of the last LLI
/linux-6.14.4/drivers/dma/dw/
Dregs.h145 #define DWC_LLP_LOC(x) ((x) & ~3) /* next lli */
368 /* LLI == Linked List Item; a.k.a. DMA block descriptor */
373 __le32 llp; /* chain to next lli */
386 struct dw_lli lli; member
388 #define lli_set(d, reg, v) ((d)->lli.reg |= cpu_to_le32(v))
389 #define lli_clear(d, reg, v) ((d)->lli.reg &= ~cpu_to_le32(v))
390 #define lli_read(d, reg) le32_to_cpu((d)->lli.reg)
391 #define lli_write(d, reg, v) ((d)->lli.reg = cpu_to_le32(v))
/linux-6.14.4/lib/
Dtest_scanf.c221 simple_numbers_loop(long long, "%lld", "lli", check_ll); in numbers_simple()
224 simple_numbers_loop(long long, "0x%llx", "lli", check_ll); in numbers_simple()
405 numbers_list_8(long long, "%lld", delim, "lli", check_ll); in numbers_list_ll()
408 numbers_list_8(long long, "0x%llx", delim, "lli", check_ll); in numbers_list_ll()
464 numbers_list_fix_width(long long, "%lld", delim, 20, "lli", check_ll); in numbers_list_field_width_ll()
467 numbers_list_fix_width(long long, "0x%llx", delim, 18, "lli", check_ll); in numbers_list_field_width_ll()
536 numbers_list_val_width(long long, "%lld", delim, "lli", check_ll); in numbers_list_field_width_val_ll()
539 numbers_list_val_width(long long, "0x%llx", delim, "lli", check_ll); in numbers_list_field_width_val_ll()
630 test_number_prefix(long long, "-1 1", "%1lli %lli", 0, 0, 0, check_ll); in numbers_prefix_overflow()
646 test_number_prefix(long long, "0xA7", "%1lli%llx", 0, 0, 1, check_ll); in numbers_prefix_overflow()
[all …]
/linux-6.14.4/drivers/net/ethernet/chelsio/cxgb4/
Dcxgb4_uld.c511 struct cxgb4_lld_info *lli) in uld_queue_init() argument
517 lli->rxq_ids = rxq_info->rspq_id; in uld_queue_init()
518 lli->nrxq = rxq_info->nrxq; in uld_queue_init()
519 lli->ciq_ids = rxq_info->rspq_id + rxq_info->nrxq; in uld_queue_init()
520 lli->nciq = rxq_info->nciq; in uld_queue_init()
521 lli->ntxq = txq_info->ntxq; in uld_queue_init()
646 struct cxgb4_lld_info lli; in uld_attach() local
649 uld_init(adap, &lli); in uld_attach()
650 uld_queue_init(adap, uld, &lli); in uld_attach()
652 handle = adap->uld[uld].add(&lli); in uld_attach()
/linux-6.14.4/drivers/dma/stm32/
Dstm32-dma3.c274 struct stm32_dma3_lli lli[] __counted_by(lli_size);
376 hwdesc = swdesc->lli[i].hwdesc; in stm32_dma3_chan_dump_hwdesc()
379 dev_dbg(chan2dev(chan), "[%d]@%pad\n", i, &swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_dump_hwdesc()
390 dev_dbg(chan2dev(chan), "-->[0]@%pad\n", &swdesc->lli[0].hwdesc_addr); in stm32_dma3_chan_dump_hwdesc()
412 swdesc = kzalloc(struct_size(swdesc, lli, count), GFP_NOWAIT); in stm32_dma3_chan_desc_alloc()
418 swdesc->lli[i].hwdesc = dma_pool_zalloc(chan->lli_pool, GFP_NOWAIT, in stm32_dma3_chan_desc_alloc()
419 &swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_alloc()
420 if (!swdesc->lli[i].hwdesc) in stm32_dma3_chan_desc_alloc()
426 writel_relaxed(swdesc->lli[0].hwdesc_addr & CLBAR_LBA, in stm32_dma3_chan_desc_alloc()
437 dma_pool_free(chan->lli_pool, swdesc->lli[i].hwdesc, swdesc->lli[i].hwdesc_addr); in stm32_dma3_chan_desc_alloc()
[all …]
/linux-6.14.4/mm/
Dhwpoison-inject.c64 DEFINE_DEBUGFS_ATTRIBUTE(hwpoison_fops, NULL, hwpoison_inject, "%lli\n");
65 DEFINE_DEBUGFS_ATTRIBUTE(unpoison_fops, NULL, hwpoison_unpoison, "%lli\n");

123