The librte_sched uses rte_bitmap to manage large arrays of bits in an optimized method so, moving it to eal/common would allow other libraries and applications to use it.
Signed-off-by: Pavan Nikhilesh <pbhagavat...@caviumnetworks.com> --- lib/librte_eal/common/Makefile | 1 + .../common/include}/rte_bitmap.h | 100 +++++++++++++-------- lib/librte_sched/Makefile | 5 +- lib/librte_sched/rte_sched.c | 2 +- 4 files changed, 70 insertions(+), 38 deletions(-) rename lib/{librte_sched => librte_eal/common/include}/rte_bitmap.h (85%) diff --git a/lib/librte_eal/common/Makefile b/lib/librte_eal/common/Makefile index e8fd67a..c2c6a7f 100644 --- a/lib/librte_eal/common/Makefile +++ b/lib/librte_eal/common/Makefile @@ -42,6 +42,7 @@ INC += rte_hexdump.h rte_devargs.h rte_bus.h rte_dev.h rte_vdev.h INC += rte_pci_dev_feature_defs.h rte_pci_dev_features.h INC += rte_malloc.h rte_keepalive.h rte_time.h INC += rte_service.h rte_service_component.h +INC += rte_bitmap.h GENERIC_INC := rte_atomic.h rte_byteorder.h rte_cycles.h rte_prefetch.h GENERIC_INC += rte_spinlock.h rte_memcpy.h rte_cpuflags.h rte_rwlock.h diff --git a/lib/librte_sched/rte_bitmap.h b/lib/librte_eal/common/include/rte_bitmap.h similarity index 85% rename from lib/librte_sched/rte_bitmap.h rename to lib/librte_eal/common/include/rte_bitmap.h index 010d752..0938c63 100644 --- a/lib/librte_sched/rte_bitmap.h +++ b/lib/librte_eal/common/include/rte_bitmap.h @@ -65,6 +65,7 @@ extern "C" { ***/ #include <string.h> + #include <rte_common.h> #include <rte_debug.h> #include <rte_memory.h> @@ -72,36 +73,46 @@ extern "C" { #include <rte_prefetch.h> #ifndef RTE_BITMAP_OPTIMIZATIONS -#define RTE_BITMAP_OPTIMIZATIONS 1 +#define RTE_BITMAP_OPTIMIZATIONS 1 #endif /* Slab */ -#define RTE_BITMAP_SLAB_BIT_SIZE 64 -#define RTE_BITMAP_SLAB_BIT_SIZE_LOG2 6 -#define RTE_BITMAP_SLAB_BIT_MASK (RTE_BITMAP_SLAB_BIT_SIZE - 1) +#define RTE_BITMAP_SLAB_BIT_SIZE 64 +#define RTE_BITMAP_SLAB_BIT_SIZE_LOG2 6 +#define RTE_BITMAP_SLAB_BIT_MASK (RTE_BITMAP_SLAB_BIT_SIZE - 1) /* Cache line (CL) */ -#define RTE_BITMAP_CL_BIT_SIZE (RTE_CACHE_LINE_SIZE * 8) -#define RTE_BITMAP_CL_BIT_SIZE_LOG2 (RTE_CACHE_LINE_SIZE_LOG2 + 3) -#define RTE_BITMAP_CL_BIT_MASK (RTE_BITMAP_CL_BIT_SIZE - 1) +#define RTE_BITMAP_CL_BIT_SIZE (RTE_CACHE_LINE_SIZE * 8) +#define RTE_BITMAP_CL_BIT_SIZE_LOG2 (RTE_CACHE_LINE_SIZE_LOG2 + 3) +#define RTE_BITMAP_CL_BIT_MASK (RTE_BITMAP_CL_BIT_SIZE - 1) -#define RTE_BITMAP_CL_SLAB_SIZE (RTE_BITMAP_CL_BIT_SIZE / RTE_BITMAP_SLAB_BIT_SIZE) -#define RTE_BITMAP_CL_SLAB_SIZE_LOG2 (RTE_BITMAP_CL_BIT_SIZE_LOG2 - RTE_BITMAP_SLAB_BIT_SIZE_LOG2) -#define RTE_BITMAP_CL_SLAB_MASK (RTE_BITMAP_CL_SLAB_SIZE - 1) +#define RTE_BITMAP_CL_SLAB_SIZE \ + (RTE_BITMAP_CL_BIT_SIZE / RTE_BITMAP_SLAB_BIT_SIZE) +#define RTE_BITMAP_CL_SLAB_SIZE_LOG2 \ + (RTE_BITMAP_CL_BIT_SIZE_LOG2 - RTE_BITMAP_SLAB_BIT_SIZE_LOG2) +#define RTE_BITMAP_CL_SLAB_MASK (RTE_BITMAP_CL_SLAB_SIZE - 1) /** Bitmap data structure */ struct rte_bitmap { /* Context for array1 and array2 */ - uint64_t *array1; /**< Bitmap array1 */ - uint64_t *array2; /**< Bitmap array2 */ - uint32_t array1_size; /**< Number of 64-bit slabs in array1 that are actually used */ - uint32_t array2_size; /**< Number of 64-bit slabs in array2 */ + uint64_t *array1; + /**< Bitmap array1 */ + uint64_t *array2; + /**< Bitmap array2 */ + uint32_t array1_size; + /**< Number of 64-bit slabs in array1 that are actually used */ + uint32_t array2_size; + /**< Number of 64-bit slabs in array2 */ /* Context for the "scan next" operation */ - uint32_t index1; /**< Bitmap scan: Index of current array1 slab */ - uint32_t offset1; /**< Bitmap scan: Offset of current bit within current array1 slab */ - uint32_t index2; /**< Bitmap scan: Index of current array2 slab */ - uint32_t go2; /**< Bitmap scan: Go/stop condition for current array2 cache line */ + uint32_t index1; + /**< Bitmap scan: Index of current array1 slab */ + uint32_t offset1; + /**< Bitmap scan: Offset of current bit within current array1 slab */ + uint32_t index2; + /**< Bitmap scan: Index of current array2 slab */ + uint32_t go2; + /**< Bitmap scan: Go/stop condition for current array2 cache line */ /* Storage space for array1 and array2 */ uint8_t memory[]; @@ -122,7 +133,8 @@ __rte_bitmap_mask1_get(struct rte_bitmap *bmp) static inline void __rte_bitmap_index2_set(struct rte_bitmap *bmp) { - bmp->index2 = (((bmp->index1 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2) + bmp->offset1) << RTE_BITMAP_CL_SLAB_SIZE_LOG2); + bmp->index2 = (((bmp->index1 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2) + + bmp->offset1) << RTE_BITMAP_CL_SLAB_SIZE_LOG2); } #if RTE_BITMAP_OPTIMIZATIONS @@ -171,12 +183,18 @@ __rte_bitmap_get_memory_footprint(uint32_t n_bits, uint32_t n_cache_lines_array2; uint32_t n_bytes_total; - n_cache_lines_array2 = (n_bits + RTE_BITMAP_CL_BIT_SIZE - 1) / RTE_BITMAP_CL_BIT_SIZE; - n_slabs_array1 = (n_cache_lines_array2 + RTE_BITMAP_SLAB_BIT_SIZE - 1) / RTE_BITMAP_SLAB_BIT_SIZE; + n_cache_lines_array2 = (n_bits + RTE_BITMAP_CL_BIT_SIZE - 1) / + RTE_BITMAP_CL_BIT_SIZE; + n_slabs_array1 = (n_cache_lines_array2 + RTE_BITMAP_SLAB_BIT_SIZE - 1) / + RTE_BITMAP_SLAB_BIT_SIZE; n_slabs_array1 = rte_align32pow2(n_slabs_array1); - n_slabs_context = (sizeof(struct rte_bitmap) + (RTE_BITMAP_SLAB_BIT_SIZE / 8) - 1) / (RTE_BITMAP_SLAB_BIT_SIZE / 8); - n_cache_lines_context_and_array1 = (n_slabs_context + n_slabs_array1 + RTE_BITMAP_CL_SLAB_SIZE - 1) / RTE_BITMAP_CL_SLAB_SIZE; - n_bytes_total = (n_cache_lines_context_and_array1 + n_cache_lines_array2) * RTE_CACHE_LINE_SIZE; + n_slabs_context = (sizeof(struct rte_bitmap) + + (RTE_BITMAP_SLAB_BIT_SIZE / 8) - 1) / + (RTE_BITMAP_SLAB_BIT_SIZE / 8); + n_cache_lines_context_and_array1 = (n_slabs_context + n_slabs_array1 + + RTE_BITMAP_CL_SLAB_SIZE - 1) / RTE_BITMAP_CL_SLAB_SIZE; + n_bytes_total = (n_cache_lines_context_and_array1 + + n_cache_lines_array2) * RTE_CACHE_LINE_SIZE; if (array1_byte_offset) { *array1_byte_offset = n_slabs_context * (RTE_BITMAP_SLAB_BIT_SIZE / 8); @@ -185,7 +203,8 @@ __rte_bitmap_get_memory_footprint(uint32_t n_bits, *array1_slabs = n_slabs_array1; } if (array2_byte_offset) { - *array2_byte_offset = n_cache_lines_context_and_array1 * RTE_CACHE_LINE_SIZE; + *array2_byte_offset = n_cache_lines_context_and_array1 * + RTE_CACHE_LINE_SIZE; } if (array2_slabs) { *array2_slabs = n_cache_lines_array2 * RTE_BITMAP_CL_SLAB_SIZE; @@ -210,6 +229,7 @@ __rte_bitmap_scan_init(struct rte_bitmap *bmp) * * @param n_bits * Number of bits in the bitmap + * * @return * Bitmap memory footprint measured in bytes on success, 0 on error */ @@ -226,12 +246,14 @@ rte_bitmap_get_memory_footprint(uint32_t n_bits) { /** * Bitmap initialization * - * @param mem_size - * Minimum expected size of bitmap. + * @param n_bits + * Number of pre-allocated bits in array2. Must be non-zero and multiple of + * 512. * @param mem * Base address of array1 and array2. - * @param n_bits - * Number of pre-allocated bits in array2. Must be non-zero and multiple of 512. + * @param mem_size + * Minimum expected size of bitmap. + * * @return * Handle to bitmap instance. */ @@ -277,6 +299,7 @@ rte_bitmap_init(uint32_t n_bits, uint8_t *mem, uint32_t mem_size) * * @param bmp * Handle to bitmap instance + * * @return * 0 upon success, error code otherwise */ @@ -312,6 +335,7 @@ rte_bitmap_reset(struct rte_bitmap *bmp) * Handle to bitmap instance * @param pos * Bit position + * * @return * 0 upon success, error code otherwise */ @@ -333,6 +357,7 @@ rte_bitmap_prefetch0(struct rte_bitmap *bmp, uint32_t pos) * Handle to bitmap instance * @param pos * Bit position + * * @return * 0 when bit is cleared, non-zero when bit is set */ @@ -365,7 +390,8 @@ rte_bitmap_set(struct rte_bitmap *bmp, uint32_t pos) /* Set bit in array2 slab and set bit in array1 slab */ index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2; offset2 = pos & RTE_BITMAP_SLAB_BIT_MASK; - index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2); + index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + + RTE_BITMAP_CL_BIT_SIZE_LOG2); offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK; slab2 = bmp->array2 + index2; slab1 = bmp->array1 + index1; @@ -392,7 +418,8 @@ rte_bitmap_set_slab(struct rte_bitmap *bmp, uint32_t pos, uint64_t slab) /* Set bits in array2 slab and set bit in array1 slab */ index2 = pos >> RTE_BITMAP_SLAB_BIT_SIZE_LOG2; - index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2); + index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + + RTE_BITMAP_CL_BIT_SIZE_LOG2); offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK; slab2 = bmp->array2 + index2; slab1 = bmp->array1 + index1; @@ -449,7 +476,8 @@ rte_bitmap_clear(struct rte_bitmap *bmp, uint32_t pos) } /* The array2 cache line is all-zeros, so clear bit in array1 slab */ - index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + RTE_BITMAP_CL_BIT_SIZE_LOG2); + index1 = pos >> (RTE_BITMAP_SLAB_BIT_SIZE_LOG2 + + RTE_BITMAP_CL_BIT_SIZE_LOG2); offset1 = (pos >> RTE_BITMAP_CL_BIT_SIZE_LOG2) & RTE_BITMAP_SLAB_BIT_MASK; slab1 = bmp->array1 + index1; *slab1 &= ~(1lu << offset1); @@ -500,7 +528,8 @@ __rte_bitmap_scan_read(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab) uint64_t *slab2; slab2 = bmp->array2 + bmp->index2; - for ( ; bmp->go2 ; bmp->index2 ++, slab2 ++, bmp->go2 = bmp->index2 & RTE_BITMAP_CL_SLAB_MASK) { + for ( ; bmp->go2 ; bmp->index2 ++, slab2 ++, + bmp->go2 = bmp->index2 & RTE_BITMAP_CL_SLAB_MASK) { if (*slab2) { *pos = bmp->index2 << RTE_BITMAP_SLAB_BIT_SIZE_LOG2; *slab = *slab2; @@ -520,10 +549,10 @@ __rte_bitmap_scan_read(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab) * * @param bmp * Handle to bitmap instance - * @param pos + * @param[out] pos * When function call returns 1, pos contains the position of the next set * bit, otherwise not modified - * @param slab + * @param[out] slab * When function call returns 1, slab contains the value of the entire 64-bit * slab where the bit indicated by pos is located. Slabs are always 64-bit * aligned, so the position of the first bit of the slab (this bit is not @@ -532,6 +561,7 @@ __rte_bitmap_scan_read(struct rte_bitmap *bmp, uint32_t *pos, uint64_t *slab) * after this slab, so the same slab will not be returned again if it * contains more than one bit which is set. When function call returns 0, * slab is not modified. + * * @return * 0 if there is no bit set in the bitmap, 1 otherwise */ diff --git a/lib/librte_sched/Makefile b/lib/librte_sched/Makefile index 18274e7..072cd63 100644 --- a/lib/librte_sched/Makefile +++ b/lib/librte_sched/Makefile @@ -55,7 +55,8 @@ SRCS-$(CONFIG_RTE_LIBRTE_SCHED) += rte_sched.c rte_red.c rte_approx.c SRCS-$(CONFIG_RTE_LIBRTE_SCHED) += rte_reciprocal.c # install includes -SYMLINK-$(CONFIG_RTE_LIBRTE_SCHED)-include := rte_sched.h rte_bitmap.h rte_sched_common.h rte_red.h rte_approx.h -SYMLINK-$(CONFIG_RTE_LIBRTE_SCHED)-include += rte_reciprocal.h +SYMLINK-$(CONFIG_RTE_LIBRTE_SCHED)-include := rte_sched.h +SYMLINK-$(CONFIG_RTE_LIBRTE_SCHED)-include += rte_sched_common.h rte_red.h +SYMLINK-$(CONFIG_RTE_LIBRTE_SCHED)-include += rte_approx.h include $(RTE_SDK)/mk/rte.lib.mk diff --git a/lib/librte_sched/rte_sched.c b/lib/librte_sched/rte_sched.c index b7cba11..b3e0d4f 100644 --- a/lib/librte_sched/rte_sched.c +++ b/lib/librte_sched/rte_sched.c @@ -34,6 +34,7 @@ #include <stdio.h> #include <string.h> +#include <rte_bitmap.h> #include <rte_common.h> #include <rte_log.h> #include <rte_memory.h> @@ -44,7 +45,6 @@ #include <rte_mbuf.h> #include "rte_sched.h" -#include "rte_bitmap.h" #include "rte_sched_common.h" #include "rte_approx.h" #include "rte_reciprocal.h" -- 2.7.4