summaryrefslogtreecommitdiff
path: root/drivers/dma/ioat/dma_v2.h
diff options
context:
space:
mode:
authorDan Williams <dan.j.williams@intel.com>2009-09-08 17:53:04 -0700
committerDan Williams <dan.j.williams@intel.com>2009-09-08 17:53:04 -0700
commit162b96e63e518aa6ff029ce23de12d7f027483bf (patch)
tree532191d0cef7cf975b70a07b1c69a293d6f552f7 /drivers/dma/ioat/dma_v2.h
parent0803172778901e24a75ab074798d98c2b7411559 (diff)
ioat2,3: cacheline align software descriptor allocations
All the necessary fields for handling an ioat2,3 ring entry can fit into one cacheline. Move ->len prior to ->txd in struct ioat_ring_ent, and move allocation of these entries to a hw-cache-aligned kmem cache to reduce the number of cachelines dirtied for descriptor management. Signed-off-by: Dan Williams <dan.j.williams@intel.com>
Diffstat (limited to 'drivers/dma/ioat/dma_v2.h')
-rw-r--r--drivers/dma/ioat/dma_v2.h3
1 files changed, 2 insertions, 1 deletions
diff --git a/drivers/dma/ioat/dma_v2.h b/drivers/dma/ioat/dma_v2.h
index 9baa3d6065f..ac00adc8197 100644
--- a/drivers/dma/ioat/dma_v2.h
+++ b/drivers/dma/ioat/dma_v2.h
@@ -116,8 +116,8 @@ static inline u16 ioat2_xferlen_to_descs(struct ioat2_dma_chan *ioat, size_t len
struct ioat_ring_ent {
struct ioat_dma_descriptor *hw;
- struct dma_async_tx_descriptor txd;
size_t len;
+ struct dma_async_tx_descriptor txd;
#ifdef DEBUG
int id;
#endif
@@ -143,4 +143,5 @@ int __devinit ioat2_dma_probe(struct ioatdma_device *dev, int dca);
int __devinit ioat3_dma_probe(struct ioatdma_device *dev, int dca);
struct dca_provider * __devinit ioat2_dca_init(struct pci_dev *pdev, void __iomem *iobase);
struct dca_provider * __devinit ioat3_dca_init(struct pci_dev *pdev, void __iomem *iobase);
+extern struct kmem_cache *ioat2_cache;
#endif /* IOATDMA_V2_H */