mirror of
https://github.com/jemalloc/jemalloc.git
synced 2026-05-14 08:56:20 +03:00
Fix irallocx_prof() sample logic.
Fix irallocx_prof() sample logic to only update the threshold counter
after it knows what size the allocation ended up being. This regression
was caused by 6e73dc194e (Fix a profile
sampling race.), which did not make it into any releases prior to this
fix.
This commit is contained in:
parent
9c640bfdd4
commit
c3e9e7b041
1 changed files with 3 additions and 3 deletions
|
|
@ -1582,7 +1582,7 @@ irallocx_prof(void *oldptr, size_t old_usize, size_t size, size_t alignment,
|
|||
prof_tctx_t *old_tctx, *tctx;
|
||||
|
||||
old_tctx = prof_tctx_get(oldptr);
|
||||
tctx = prof_alloc_prep(*usize, true);
|
||||
tctx = prof_alloc_prep(*usize, false);
|
||||
if (unlikely((uintptr_t)tctx != (uintptr_t)1U)) {
|
||||
p = irallocx_prof_sample(oldptr, size, alignment, *usize, zero,
|
||||
try_tcache_alloc, try_tcache_dalloc, arena, tctx);
|
||||
|
|
@ -1591,7 +1591,7 @@ irallocx_prof(void *oldptr, size_t old_usize, size_t size, size_t alignment,
|
|||
try_tcache_dalloc, arena);
|
||||
}
|
||||
if (unlikely(p == NULL)) {
|
||||
prof_alloc_rollback(tctx, true);
|
||||
prof_alloc_rollback(tctx, false);
|
||||
return (NULL);
|
||||
}
|
||||
|
||||
|
|
@ -1606,7 +1606,7 @@ irallocx_prof(void *oldptr, size_t old_usize, size_t size, size_t alignment,
|
|||
*/
|
||||
*usize = isalloc(p, config_prof);
|
||||
}
|
||||
prof_realloc(p, *usize, tctx, true, old_usize, old_tctx);
|
||||
prof_realloc(p, *usize, tctx, false, old_usize, old_tctx);
|
||||
|
||||
return (p);
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue