Lines Matching full:wq
166 struct idxd_wq *wq; member
189 struct idxd_wq *wq; member
202 struct workqueue_struct *wq; member
307 struct idxd_wq *wq; member
364 struct workqueue_struct *wq; member
429 struct idxd_wq *wq; member
441 #define wq_confdev(wq) &wq->idxd_dev.conf_dev argument
451 static inline struct idxd_device_driver *wq_to_idxd_drv(struct idxd_wq *wq) in wq_to_idxd_drv() argument
453 struct device *dev = wq_confdev(wq); in wq_to_idxd_drv()
559 static inline bool is_idxd_wq_dmaengine(struct idxd_wq *wq) in is_idxd_wq_dmaengine() argument
561 if (wq->type == IDXD_WQT_KERNEL && strcmp(wq->name, "dmaengine") == 0) in is_idxd_wq_dmaengine()
566 static inline bool is_idxd_wq_user(struct idxd_wq *wq) in is_idxd_wq_user() argument
568 return wq->type == IDXD_WQT_USER; in is_idxd_wq_user()
571 static inline bool is_idxd_wq_kernel(struct idxd_wq *wq) in is_idxd_wq_kernel() argument
573 return wq->type == IDXD_WQT_KERNEL; in is_idxd_wq_kernel()
576 static inline bool wq_dedicated(struct idxd_wq *wq) in wq_dedicated() argument
578 return test_bit(WQ_FLAG_DEDICATED, &wq->flags); in wq_dedicated()
581 static inline bool wq_shared(struct idxd_wq *wq) in wq_shared() argument
583 return !test_bit(WQ_FLAG_DEDICATED, &wq->flags); in wq_shared()
596 static inline bool wq_pasid_enabled(struct idxd_wq *wq) in wq_pasid_enabled() argument
598 return (is_idxd_wq_kernel(wq) && device_pasid_enabled(wq->idxd)) || in wq_pasid_enabled()
599 (is_idxd_wq_user(wq) && device_user_pasid_enabled(wq->idxd)); in wq_pasid_enabled()
602 static inline bool wq_shared_supported(struct idxd_wq *wq) in wq_shared_supported() argument
604 return (support_enqcmd && wq_pasid_enabled(wq)); in wq_shared_supported()
638 static inline void __iomem *idxd_wq_portal_addr(struct idxd_wq *wq) in idxd_wq_portal_addr() argument
640 int ofs = wq->portal_offset; in idxd_wq_portal_addr()
642 wq->portal_offset = (ofs + sizeof(struct dsa_raw_desc)) & IDXD_PORTAL_MASK; in idxd_wq_portal_addr()
643 return wq->portal + ofs; in idxd_wq_portal_addr()
646 static inline void idxd_wq_get(struct idxd_wq *wq) in idxd_wq_get() argument
648 wq->client_count++; in idxd_wq_get()
651 static inline void idxd_wq_put(struct idxd_wq *wq) in idxd_wq_put() argument
653 wq->client_count--; in idxd_wq_put()
656 static inline int idxd_wq_refcount(struct idxd_wq *wq) in idxd_wq_refcount() argument
658 return wq->client_count; in idxd_wq_refcount()
661 static inline void idxd_wq_set_private(struct idxd_wq *wq, void *private) in idxd_wq_set_private() argument
663 dev_set_drvdata(wq_confdev(wq), private); in idxd_wq_set_private()
666 static inline void *idxd_wq_get_private(struct idxd_wq *wq) in idxd_wq_get_private() argument
668 return dev_get_drvdata(wq_confdev(wq)); in idxd_wq_get_private()
673 * The max batch size of device, max batch size of wq and
685 static inline void idxd_wq_set_max_batch_size(int idxd_type, struct idxd_wq *wq, in idxd_wq_set_max_batch_size() argument
689 wq->max_batch_size = 0; in idxd_wq_set_max_batch_size()
691 wq->max_batch_size = max_batch_size; in idxd_wq_set_max_batch_size()
703 static inline int idxd_wq_driver_name_match(struct idxd_wq *wq, struct device *dev) in idxd_wq_driver_name_match() argument
705 return (strncmp(wq->driver_name, dev->driver->name, strlen(dev->driver->name)) == 0); in idxd_wq_driver_name_match()
721 void idxd_free_desc(struct idxd_wq *wq, struct idxd_desc *desc);
733 drv = wq_to_idxd_drv(desc->wq); in idxd_desc_complete()
757 int idxd_drv_enable_wq(struct idxd_wq *wq);
758 void idxd_drv_disable_wq(struct idxd_wq *wq);
774 int idxd_wq_alloc_resources(struct idxd_wq *wq);
775 void idxd_wq_free_resources(struct idxd_wq *wq);
776 int idxd_wq_enable(struct idxd_wq *wq);
777 int idxd_wq_disable(struct idxd_wq *wq, bool reset_config);
778 void idxd_wq_drain(struct idxd_wq *wq);
779 void idxd_wq_reset(struct idxd_wq *wq);
780 int idxd_wq_map_portal(struct idxd_wq *wq);
781 void idxd_wq_unmap_portal(struct idxd_wq *wq);
782 int idxd_wq_set_pasid(struct idxd_wq *wq, int pasid);
783 int idxd_wq_disable_pasid(struct idxd_wq *wq);
784 void __idxd_wq_quiesce(struct idxd_wq *wq);
785 void idxd_wq_quiesce(struct idxd_wq *wq);
786 int idxd_wq_init_percpu_ref(struct idxd_wq *wq);
787 void idxd_wq_free_irq(struct idxd_wq *wq);
788 int idxd_wq_request_irq(struct idxd_wq *wq);
791 int idxd_submit_desc(struct idxd_wq *wq, struct idxd_desc *desc);
792 struct idxd_desc *idxd_alloc_desc(struct idxd_wq *wq, enum idxd_op_type optype);
793 int idxd_enqcmds(struct idxd_wq *wq, void __iomem *portal, const void *desc);
803 int idxd_wq_add_cdev(struct idxd_wq *wq);
804 void idxd_wq_del_cdev(struct idxd_wq *wq);
805 int idxd_copy_cr(struct idxd_wq *wq, ioasid_t pasid, unsigned long addr,
807 void idxd_user_counter_increment(struct idxd_wq *wq, u32 pasid, int index);