From 6402e2b0d4b406ee3f73e5f4e3233d4af23d603b Mon Sep 17 00:00:00 2001 From: Daniel Micay Date: Sat, 12 Oct 2024 03:17:44 -0400 Subject: [PATCH] reduce probability hint for is_memtag_enabled --- h_malloc.c | 18 +++++++++--------- util.h | 2 ++ 2 files changed, 11 insertions(+), 9 deletions(-) diff --git a/h_malloc.c b/h_malloc.c index 15be0a2..89ef91d 100644 --- a/h_malloc.c +++ b/h_malloc.c @@ -470,7 +470,7 @@ static void write_after_free_check(const char *p, size_t size) { } #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { return; } #endif @@ -505,7 +505,7 @@ static void set_slab_canary_value(UNUSED struct slab_metadata *metadata, UNUSED static void set_canary(UNUSED const struct slab_metadata *metadata, UNUSED void *p, UNUSED size_t size) { #if SLAB_CANARY #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { return; } #endif @@ -517,7 +517,7 @@ static void set_canary(UNUSED const struct slab_metadata *metadata, UNUSED void static void check_canary(UNUSED const struct slab_metadata *metadata, UNUSED const void *p, UNUSED size_t size) { #if SLAB_CANARY #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { return; } #endif @@ -624,7 +624,7 @@ static inline void *allocate_small(unsigned arena, size_t requested_size) { write_after_free_check(p, size - canary_size); set_canary(metadata, p, size); #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { p = tag_and_clear_slab_slot(metadata, p, slot, size); } #endif @@ -661,7 +661,7 @@ static inline void *allocate_small(unsigned arena, size_t requested_size) { if (requested_size) { set_canary(metadata, p, size); #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { p = tag_and_clear_slab_slot(metadata, p, slot, size); } #endif @@ -688,7 +688,7 @@ static inline void *allocate_small(unsigned arena, size_t requested_size) { if (requested_size) { set_canary(metadata, p, size); #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { p = tag_and_clear_slab_slot(metadata, p, slot, size); } #endif @@ -717,7 +717,7 @@ static inline void *allocate_small(unsigned arena, size_t requested_size) { write_after_free_check(p, size - canary_size); set_canary(metadata, p, size); #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { p = tag_and_clear_slab_slot(metadata, p, slot, size); } #endif @@ -805,7 +805,7 @@ static inline void deallocate_small(void *p, const size_t *expected_size) { bool skip_zero = false; #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { arm_mte_tag_and_clear_mem(set_pointer_tag(p, RESERVED_TAG), size); // metadata->arm_mte_tags is intentionally not updated, see tag_and_clear_slab_slot() skip_zero = true; @@ -1243,7 +1243,7 @@ COLD static void init_slow_path(void) { fatal_error("failed to unprotect memory for regions table"); } #ifdef HAS_ARM_MTE - if (likely(is_memtag_enabled())) { + if (likely51(is_memtag_enabled())) { ro.slab_region_start = memory_map_mte(slab_region_size); } else { ro.slab_region_start = memory_map(slab_region_size); diff --git a/util.h b/util.h index fc22c23..6b1a390 100644 --- a/util.h +++ b/util.h @@ -9,7 +9,9 @@ #define noreturn __attribute__((noreturn)) #define likely(x) __builtin_expect(!!(x), 1) +#define likely51(x) __builtin_expect_with_probability(!!(x), 1, 0.51) #define unlikely(x) __builtin_expect(!!(x), 0) +#define unlikely51(x) __builtin_expect_with_probability(!!(x), 0, 0.51) #define min(x, y) ({ \ __typeof__(x) _x = (x); \