pble.h 3.1 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132
  1. /* SPDX-License-Identifier: GPL-2.0 or Linux-OpenIB */
  2. /* Copyright (c) 2015 - 2019 Intel Corporation */
  3. #ifndef IRDMA_PBLE_H
  4. #define IRDMA_PBLE_H
  5. #define PBLE_SHIFT 6
  6. #define PBLE_PER_PAGE 512
  7. #define HMC_PAGED_BP_SHIFT 12
  8. #define PBLE_512_SHIFT 9
  9. #define PBLE_INVALID_IDX 0xffffffff
  10. enum irdma_pble_level {
  11. PBLE_LEVEL_0 = 0,
  12. PBLE_LEVEL_1 = 1,
  13. PBLE_LEVEL_2 = 2,
  14. };
  15. enum irdma_alloc_type {
  16. PBLE_NO_ALLOC = 0,
  17. PBLE_SD_CONTIGOUS = 1,
  18. PBLE_SD_PAGED = 2,
  19. };
  20. struct irdma_chunk;
  21. struct irdma_pble_chunkinfo {
  22. struct irdma_chunk *pchunk;
  23. u64 bit_idx;
  24. u64 bits_used;
  25. };
  26. struct irdma_pble_info {
  27. u64 *addr;
  28. u32 idx;
  29. u32 cnt;
  30. struct irdma_pble_chunkinfo chunkinfo;
  31. };
  32. struct irdma_pble_level2 {
  33. struct irdma_pble_info root;
  34. struct irdma_pble_info *leaf;
  35. struct irdma_virt_mem leafmem;
  36. u32 leaf_cnt;
  37. };
  38. struct irdma_pble_alloc {
  39. u32 total_cnt;
  40. enum irdma_pble_level level;
  41. union {
  42. struct irdma_pble_info level1;
  43. struct irdma_pble_level2 level2;
  44. };
  45. };
  46. struct sd_pd_idx {
  47. u32 sd_idx;
  48. u32 pd_idx;
  49. u32 rel_pd_idx;
  50. };
  51. struct irdma_add_page_info {
  52. struct irdma_chunk *chunk;
  53. struct irdma_hmc_sd_entry *sd_entry;
  54. struct irdma_hmc_info *hmc_info;
  55. struct sd_pd_idx idx;
  56. u32 pages;
  57. };
  58. struct irdma_chunk {
  59. struct list_head list;
  60. struct irdma_dma_info dmainfo;
  61. unsigned long *bitmapbuf;
  62. u32 sizeofbitmap;
  63. u64 size;
  64. void *vaddr;
  65. u64 fpm_addr;
  66. u32 pg_cnt;
  67. enum irdma_alloc_type type;
  68. struct irdma_sc_dev *dev;
  69. struct irdma_virt_mem chunkmem;
  70. };
  71. struct irdma_pble_prm {
  72. struct list_head clist;
  73. spinlock_t prm_lock; /* protect prm bitmap */
  74. u64 total_pble_alloc;
  75. u64 free_pble_cnt;
  76. u8 pble_shift;
  77. };
  78. struct irdma_hmc_pble_rsrc {
  79. u32 unallocated_pble;
  80. struct mutex pble_mutex_lock; /* protect PBLE resource */
  81. struct irdma_sc_dev *dev;
  82. u64 fpm_base_addr;
  83. u64 next_fpm_addr;
  84. struct irdma_pble_prm pinfo;
  85. u64 allocdpbles;
  86. u64 freedpbles;
  87. u32 stats_direct_sds;
  88. u32 stats_paged_sds;
  89. u64 stats_alloc_ok;
  90. u64 stats_alloc_fail;
  91. u64 stats_alloc_freed;
  92. u64 stats_lvl1;
  93. u64 stats_lvl2;
  94. };
  95. void irdma_destroy_pble_prm(struct irdma_hmc_pble_rsrc *pble_rsrc);
  96. int irdma_hmc_init_pble(struct irdma_sc_dev *dev,
  97. struct irdma_hmc_pble_rsrc *pble_rsrc);
  98. void irdma_free_pble(struct irdma_hmc_pble_rsrc *pble_rsrc,
  99. struct irdma_pble_alloc *palloc);
  100. int irdma_get_pble(struct irdma_hmc_pble_rsrc *pble_rsrc,
  101. struct irdma_pble_alloc *palloc, u32 pble_cnt,
  102. bool level1_only);
  103. int irdma_prm_add_pble_mem(struct irdma_pble_prm *pprm,
  104. struct irdma_chunk *pchunk);
  105. int irdma_prm_get_pbles(struct irdma_pble_prm *pprm,
  106. struct irdma_pble_chunkinfo *chunkinfo, u64 mem_size,
  107. u64 **vaddr, u64 *fpm_addr);
  108. void irdma_prm_return_pbles(struct irdma_pble_prm *pprm,
  109. struct irdma_pble_chunkinfo *chunkinfo);
  110. void irdma_pble_acquire_lock(struct irdma_hmc_pble_rsrc *pble_rsrc,
  111. unsigned long *flags);
  112. void irdma_pble_release_lock(struct irdma_hmc_pble_rsrc *pble_rsrc,
  113. unsigned long *flags);
  114. void irdma_pble_free_paged_mem(struct irdma_chunk *chunk);
  115. int irdma_pble_get_paged_mem(struct irdma_chunk *chunk, u32 pg_cnt);
  116. void irdma_prm_rem_bitmapmem(struct irdma_hw *hw, struct irdma_chunk *chunk);
  117. #endif /* IRDMA_PBLE_H */