@@ -886,13 +886,15 @@ imalloc_prof(size_t usize)
886886 void * p ;
887887 prof_tctx_t * tctx ;
888888
889- tctx = prof_alloc_prep (usize );
889+ tctx = prof_alloc_prep (usize , true );
890890 if ((uintptr_t )tctx != (uintptr_t )1U )
891891 p = imalloc_prof_sample (usize , tctx );
892892 else
893893 p = imalloc (usize );
894- if (p == NULL )
894+ if (p == NULL ) {
895+ prof_alloc_rollback (tctx , true);
895896 return (NULL );
897+ }
896898 prof_malloc (p , usize , tctx );
897899
898900 return (p );
@@ -962,16 +964,20 @@ imemalign_prof_sample(size_t alignment, size_t usize, prof_tctx_t *tctx)
962964}
963965
964966JEMALLOC_ALWAYS_INLINE_C void *
965- imemalign_prof (size_t alignment , size_t usize , prof_tctx_t * tctx )
967+ imemalign_prof (size_t alignment , size_t usize )
966968{
967969 void * p ;
970+ prof_tctx_t * tctx ;
968971
972+ tctx = prof_alloc_prep (usize , true);
969973 if ((uintptr_t )tctx != (uintptr_t )1U )
970974 p = imemalign_prof_sample (alignment , usize , tctx );
971975 else
972976 p = ipalloc (usize , alignment , false);
973- if (p == NULL )
977+ if (p == NULL ) {
978+ prof_alloc_rollback (tctx , true);
974979 return (NULL );
980+ }
975981 prof_malloc (p , usize , tctx );
976982
977983 return (p );
@@ -1013,12 +1019,9 @@ imemalign(void **memptr, size_t alignment, size_t size, size_t min_alignment)
10131019 goto label_oom ;
10141020 }
10151021
1016- if (config_prof && opt_prof ) {
1017- prof_tctx_t * tctx ;
1018-
1019- tctx = prof_alloc_prep (usize );
1020- result = imemalign_prof (alignment , usize , tctx );
1021- } else
1022+ if (config_prof && opt_prof )
1023+ result = imemalign_prof (alignment , usize );
1024+ else
10221025 result = ipalloc (usize , alignment , false);
10231026 if (result == NULL )
10241027 goto label_oom ;
@@ -1087,16 +1090,20 @@ icalloc_prof_sample(size_t usize, prof_tctx_t *tctx)
10871090}
10881091
10891092JEMALLOC_ALWAYS_INLINE_C void *
1090- icalloc_prof (size_t usize , prof_tctx_t * tctx )
1093+ icalloc_prof (size_t usize )
10911094{
10921095 void * p ;
1096+ prof_tctx_t * tctx ;
10931097
1098+ tctx = prof_alloc_prep (usize , true);
10941099 if ((uintptr_t )tctx != (uintptr_t )1U )
10951100 p = icalloc_prof_sample (usize , tctx );
10961101 else
10971102 p = icalloc (usize );
1098- if (p == NULL )
1103+ if (p == NULL ) {
1104+ prof_alloc_rollback (tctx , true);
10991105 return (NULL );
1106+ }
11001107 prof_malloc (p , usize , tctx );
11011108
11021109 return (p );
@@ -1136,11 +1143,8 @@ je_calloc(size_t num, size_t size)
11361143 }
11371144
11381145 if (config_prof && opt_prof ) {
1139- prof_tctx_t * tctx ;
1140-
11411146 usize = s2u (num_size );
1142- tctx = prof_alloc_prep (usize );
1143- ret = icalloc_prof (usize , tctx );
1147+ ret = icalloc_prof (usize );
11441148 } else {
11451149 if (config_stats || (config_valgrind && in_valgrind ))
11461150 usize = s2u (num_size );
@@ -1184,19 +1188,20 @@ irealloc_prof_sample(void *oldptr, size_t usize, prof_tctx_t *tctx)
11841188}
11851189
11861190JEMALLOC_ALWAYS_INLINE_C void *
1187- irealloc_prof (void * oldptr , size_t old_usize , size_t usize , prof_tctx_t * tctx )
1191+ irealloc_prof (void * oldptr , size_t old_usize , size_t usize )
11881192{
11891193 void * p ;
1190- prof_tctx_t * old_tctx ;
1194+ prof_tctx_t * old_tctx , * tctx ;
11911195
11921196 old_tctx = prof_tctx_get (oldptr );
1197+ tctx = prof_alloc_prep (usize , true);
11931198 if ((uintptr_t )tctx != (uintptr_t )1U )
11941199 p = irealloc_prof_sample (oldptr , usize , tctx );
11951200 else
11961201 p = iralloc (oldptr , usize , 0 , false);
11971202 if (p == NULL )
11981203 return (NULL );
1199- prof_realloc (p , usize , tctx , old_usize , old_tctx );
1204+ prof_realloc (p , usize , tctx , true, old_usize , old_tctx );
12001205
12011206 return (p );
12021207}
@@ -1270,11 +1275,8 @@ je_realloc(void *ptr, size_t size)
12701275 old_rzsize = config_prof ? p2rz (ptr ) : u2rz (old_usize );
12711276
12721277 if (config_prof && opt_prof ) {
1273- prof_tctx_t * tctx ;
1274-
12751278 usize = s2u (size );
1276- tctx = prof_alloc_prep (usize );
1277- ret = irealloc_prof (ptr , old_usize , usize , tctx );
1279+ ret = irealloc_prof (ptr , old_usize , usize );
12781280 } else {
12791281 if (config_stats || (config_valgrind && in_valgrind ))
12801282 usize = s2u (size );
@@ -1477,7 +1479,7 @@ imallocx_prof(size_t size, int flags, size_t *usize)
14771479
14781480 imallocx_flags_decode (size , flags , usize , & alignment , & zero ,
14791481 & try_tcache , & arena );
1480- tctx = prof_alloc_prep (* usize );
1482+ tctx = prof_alloc_prep (* usize , true );
14811483 if ((uintptr_t )tctx == (uintptr_t )1U ) {
14821484 p = imallocx_maybe_flags (size , flags , * usize , alignment , zero ,
14831485 try_tcache , arena );
@@ -1486,8 +1488,10 @@ imallocx_prof(size_t size, int flags, size_t *usize)
14861488 try_tcache , arena );
14871489 } else
14881490 p = NULL ;
1489- if (p == NULL )
1491+ if (p == NULL ) {
1492+ prof_alloc_rollback (tctx , true);
14901493 return (NULL );
1494+ }
14911495 prof_malloc (p , * usize , tctx );
14921496
14931497 return (p );
@@ -1572,21 +1576,24 @@ irallocx_prof_sample(void *oldptr, size_t size, size_t alignment, size_t usize,
15721576JEMALLOC_ALWAYS_INLINE_C void *
15731577irallocx_prof (void * oldptr , size_t old_usize , size_t size , size_t alignment ,
15741578 size_t * usize , bool zero , bool try_tcache_alloc , bool try_tcache_dalloc ,
1575- arena_t * arena , prof_tctx_t * tctx )
1579+ arena_t * arena )
15761580{
15771581 void * p ;
1578- prof_tctx_t * old_tctx ;
1582+ prof_tctx_t * old_tctx , * tctx ;
15791583
15801584 old_tctx = prof_tctx_get (oldptr );
1581- if ((uintptr_t )tctx != (uintptr_t )1U )
1585+ tctx = prof_alloc_prep (* usize , true);
1586+ if ((uintptr_t )tctx != (uintptr_t )1U ) {
15821587 p = irallocx_prof_sample (oldptr , size , alignment , * usize , zero ,
15831588 try_tcache_alloc , try_tcache_dalloc , arena , tctx );
1584- else {
1589+ } else {
15851590 p = iralloct (oldptr , size , alignment , zero , try_tcache_alloc ,
15861591 try_tcache_dalloc , arena );
15871592 }
1588- if (p == NULL )
1593+ if (p == NULL ) {
1594+ prof_alloc_rollback (tctx , true);
15891595 return (NULL );
1596+ }
15901597
15911598 if (p == oldptr && alignment != 0 ) {
15921599 /*
@@ -1599,7 +1606,7 @@ irallocx_prof(void *oldptr, size_t old_usize, size_t size, size_t alignment,
15991606 */
16001607 * usize = isalloc (p , config_prof );
16011608 }
1602- prof_realloc (p , * usize , tctx , old_usize , old_tctx );
1609+ prof_realloc (p , * usize , tctx , true, old_usize , old_tctx );
16031610
16041611 return (p );
16051612}
@@ -1641,13 +1648,10 @@ je_rallocx(void *ptr, size_t size, int flags)
16411648 old_rzsize = u2rz (old_usize );
16421649
16431650 if (config_prof && opt_prof ) {
1644- prof_tctx_t * tctx ;
1645-
16461651 usize = (alignment == 0 ) ? s2u (size ) : sa2u (size , alignment );
16471652 assert (usize != 0 );
1648- tctx = prof_alloc_prep (usize );
16491653 p = irallocx_prof (ptr , old_usize , size , alignment , & usize , zero ,
1650- try_tcache_alloc , try_tcache_dalloc , arena , tctx );
1654+ try_tcache_alloc , try_tcache_dalloc , arena );
16511655 if (p == NULL )
16521656 goto label_oom ;
16531657 } else {
@@ -1720,23 +1724,33 @@ ixallocx_prof_sample(void *ptr, size_t old_usize, size_t size, size_t extra,
17201724
17211725JEMALLOC_ALWAYS_INLINE_C size_t
17221726ixallocx_prof (void * ptr , size_t old_usize , size_t size , size_t extra ,
1723- size_t alignment , size_t max_usize , bool zero , arena_t * arena ,
1724- prof_tctx_t * tctx )
1727+ size_t alignment , bool zero , arena_t * arena )
17251728{
1726- size_t usize ;
1727- prof_tctx_t * old_tctx ;
1729+ size_t max_usize , usize ;
1730+ prof_tctx_t * old_tctx , * tctx ;
17281731
17291732 old_tctx = prof_tctx_get (ptr );
1733+ /*
1734+ * usize isn't knowable before ixalloc() returns when extra is non-zero.
1735+ * Therefore, compute its maximum possible value and use that in
1736+ * prof_alloc_prep() to decide whether to capture a backtrace.
1737+ * prof_realloc() will use the actual usize to decide whether to sample.
1738+ */
1739+ max_usize = (alignment == 0 ) ? s2u (size + extra ) : sa2u (size + extra ,
1740+ alignment );
1741+ tctx = prof_alloc_prep (max_usize , false);
17301742 if ((uintptr_t )tctx != (uintptr_t )1U ) {
17311743 usize = ixallocx_prof_sample (ptr , old_usize , size , extra ,
17321744 alignment , zero , max_usize , arena , tctx );
17331745 } else {
17341746 usize = ixallocx_helper (ptr , old_usize , size , extra , alignment ,
17351747 zero , arena );
17361748 }
1737- if (usize == old_usize )
1749+ if (usize == old_usize ) {
1750+ prof_alloc_rollback (tctx , false);
17381751 return (usize );
1739- prof_realloc (ptr , usize , tctx , old_usize , old_tctx );
1752+ }
1753+ prof_realloc (ptr , usize , tctx , false, old_usize , old_tctx );
17401754
17411755 return (usize );
17421756}
@@ -1767,19 +1781,8 @@ je_xallocx(void *ptr, size_t size, size_t extra, int flags)
17671781 old_rzsize = u2rz (old_usize );
17681782
17691783 if (config_prof && opt_prof ) {
1770- prof_tctx_t * tctx ;
1771- /*
1772- * usize isn't knowable before ixalloc() returns when extra is
1773- * non-zero. Therefore, compute its maximum possible value and
1774- * use that in prof_alloc_prep() to decide whether to capture a
1775- * backtrace. prof_realloc() will use the actual usize to
1776- * decide whether to sample.
1777- */
1778- size_t max_usize = (alignment == 0 ) ? s2u (size + extra ) :
1779- sa2u (size + extra , alignment );
1780- tctx = prof_alloc_prep (max_usize );
17811784 usize = ixallocx_prof (ptr , old_usize , size , extra , alignment ,
1782- max_usize , zero , arena , tctx );
1785+ zero , arena );
17831786 } else {
17841787 usize = ixallocx_helper (ptr , old_usize , size , extra , alignment ,
17851788 zero , arena );
0 commit comments