forcedeth: replace pci_alloc_consistent with dma_alloc_coherent
The functions pci_alloc_consistent is obsolete. So it is replaced with dma_alloc_coherent Signed-off-by: Zhu Yanjun <yanjun.zhu@oracle.com> Signed-off-by: David S. Miller <davem@davemloft.net>
This commit is contained in:

committed by
David S. Miller

parent
db32919005
commit
e8992e4082
@@ -1024,12 +1024,18 @@ static void free_rings(struct net_device *dev)
|
|||||||
|
|
||||||
if (!nv_optimized(np)) {
|
if (!nv_optimized(np)) {
|
||||||
if (np->rx_ring.orig)
|
if (np->rx_ring.orig)
|
||||||
pci_free_consistent(np->pci_dev, sizeof(struct ring_desc) * (np->rx_ring_size + np->tx_ring_size),
|
dma_free_coherent(&np->pci_dev->dev,
|
||||||
np->rx_ring.orig, np->ring_addr);
|
sizeof(struct ring_desc) *
|
||||||
|
(np->rx_ring_size +
|
||||||
|
np->tx_ring_size),
|
||||||
|
np->rx_ring.orig, np->ring_addr);
|
||||||
} else {
|
} else {
|
||||||
if (np->rx_ring.ex)
|
if (np->rx_ring.ex)
|
||||||
pci_free_consistent(np->pci_dev, sizeof(struct ring_desc_ex) * (np->rx_ring_size + np->tx_ring_size),
|
dma_free_coherent(&np->pci_dev->dev,
|
||||||
np->rx_ring.ex, np->ring_addr);
|
sizeof(struct ring_desc_ex) *
|
||||||
|
(np->rx_ring_size +
|
||||||
|
np->tx_ring_size),
|
||||||
|
np->rx_ring.ex, np->ring_addr);
|
||||||
}
|
}
|
||||||
kfree(np->rx_skb);
|
kfree(np->rx_skb);
|
||||||
kfree(np->tx_skb);
|
kfree(np->tx_skb);
|
||||||
@@ -4596,13 +4602,17 @@ static int nv_set_ringparam(struct net_device *dev, struct ethtool_ringparam* ri
|
|||||||
|
|
||||||
/* allocate new rings */
|
/* allocate new rings */
|
||||||
if (!nv_optimized(np)) {
|
if (!nv_optimized(np)) {
|
||||||
rxtx_ring = pci_alloc_consistent(np->pci_dev,
|
rxtx_ring = dma_alloc_coherent(&np->pci_dev->dev,
|
||||||
sizeof(struct ring_desc) * (ring->rx_pending + ring->tx_pending),
|
sizeof(struct ring_desc) *
|
||||||
&ring_addr);
|
(ring->rx_pending +
|
||||||
|
ring->tx_pending),
|
||||||
|
&ring_addr, GFP_ATOMIC);
|
||||||
} else {
|
} else {
|
||||||
rxtx_ring = pci_alloc_consistent(np->pci_dev,
|
rxtx_ring = dma_alloc_coherent(&np->pci_dev->dev,
|
||||||
sizeof(struct ring_desc_ex) * (ring->rx_pending + ring->tx_pending),
|
sizeof(struct ring_desc_ex) *
|
||||||
&ring_addr);
|
(ring->rx_pending +
|
||||||
|
ring->tx_pending),
|
||||||
|
&ring_addr, GFP_ATOMIC);
|
||||||
}
|
}
|
||||||
rx_skbuff = kmalloc(sizeof(struct nv_skb_map) * ring->rx_pending, GFP_KERNEL);
|
rx_skbuff = kmalloc(sizeof(struct nv_skb_map) * ring->rx_pending, GFP_KERNEL);
|
||||||
tx_skbuff = kmalloc(sizeof(struct nv_skb_map) * ring->tx_pending, GFP_KERNEL);
|
tx_skbuff = kmalloc(sizeof(struct nv_skb_map) * ring->tx_pending, GFP_KERNEL);
|
||||||
@@ -4610,12 +4620,18 @@ static int nv_set_ringparam(struct net_device *dev, struct ethtool_ringparam* ri
|
|||||||
/* fall back to old rings */
|
/* fall back to old rings */
|
||||||
if (!nv_optimized(np)) {
|
if (!nv_optimized(np)) {
|
||||||
if (rxtx_ring)
|
if (rxtx_ring)
|
||||||
pci_free_consistent(np->pci_dev, sizeof(struct ring_desc) * (ring->rx_pending + ring->tx_pending),
|
dma_free_coherent(&np->pci_dev->dev,
|
||||||
rxtx_ring, ring_addr);
|
sizeof(struct ring_desc) *
|
||||||
|
(ring->rx_pending +
|
||||||
|
ring->tx_pending),
|
||||||
|
rxtx_ring, ring_addr);
|
||||||
} else {
|
} else {
|
||||||
if (rxtx_ring)
|
if (rxtx_ring)
|
||||||
pci_free_consistent(np->pci_dev, sizeof(struct ring_desc_ex) * (ring->rx_pending + ring->tx_pending),
|
dma_free_coherent(&np->pci_dev->dev,
|
||||||
rxtx_ring, ring_addr);
|
sizeof(struct ring_desc_ex) *
|
||||||
|
(ring->rx_pending +
|
||||||
|
ring->tx_pending),
|
||||||
|
rxtx_ring, ring_addr);
|
||||||
}
|
}
|
||||||
|
|
||||||
kfree(rx_skbuff);
|
kfree(rx_skbuff);
|
||||||
@@ -5740,16 +5756,21 @@ static int nv_probe(struct pci_dev *pci_dev, const struct pci_device_id *id)
|
|||||||
np->tx_ring_size = TX_RING_DEFAULT;
|
np->tx_ring_size = TX_RING_DEFAULT;
|
||||||
|
|
||||||
if (!nv_optimized(np)) {
|
if (!nv_optimized(np)) {
|
||||||
np->rx_ring.orig = pci_alloc_consistent(pci_dev,
|
np->rx_ring.orig = dma_alloc_coherent(&pci_dev->dev,
|
||||||
sizeof(struct ring_desc) * (np->rx_ring_size + np->tx_ring_size),
|
sizeof(struct ring_desc) *
|
||||||
&np->ring_addr);
|
(np->rx_ring_size +
|
||||||
|
np->tx_ring_size),
|
||||||
|
&np->ring_addr,
|
||||||
|
GFP_ATOMIC);
|
||||||
if (!np->rx_ring.orig)
|
if (!np->rx_ring.orig)
|
||||||
goto out_unmap;
|
goto out_unmap;
|
||||||
np->tx_ring.orig = &np->rx_ring.orig[np->rx_ring_size];
|
np->tx_ring.orig = &np->rx_ring.orig[np->rx_ring_size];
|
||||||
} else {
|
} else {
|
||||||
np->rx_ring.ex = pci_alloc_consistent(pci_dev,
|
np->rx_ring.ex = dma_alloc_coherent(&pci_dev->dev,
|
||||||
sizeof(struct ring_desc_ex) * (np->rx_ring_size + np->tx_ring_size),
|
sizeof(struct ring_desc_ex) *
|
||||||
&np->ring_addr);
|
(np->rx_ring_size +
|
||||||
|
np->tx_ring_size),
|
||||||
|
&np->ring_addr, GFP_ATOMIC);
|
||||||
if (!np->rx_ring.ex)
|
if (!np->rx_ring.ex)
|
||||||
goto out_unmap;
|
goto out_unmap;
|
||||||
np->tx_ring.ex = &np->rx_ring.ex[np->rx_ring_size];
|
np->tx_ring.ex = &np->rx_ring.ex[np->rx_ring_size];
|
||||||
|
Reference in New Issue
Block a user