On 23.09.19 16:31, Jan Beulich wrote:
Hi, Jan
+
+ if ( ptr == NULL || ptr == ZERO_BLOCK_PTR )
+ return _xmalloc(size, align);
+
+ ASSERT((align & (align - 1)) == 0);
+ if ( align < MEM_ALIGN )
+ align = MEM_ALIGN;
+
+ tmp_size = size + align - MEM_ALIGN;
+
+ if (
On 23.09.2019 14:50, Oleksandr wrote:
> Does the diff below is close to what you meant?
Almost.
> @@ -598,10 +621,70 @@ void *_xzalloc(unsigned long size, unsigned long align)
> return p ? memset(p, 0, size) : p;
> }
>
> -void xfree(void *p)
> +void *_xrealloc(void *ptr, unsigned long si
On 16.09.19 18:24, Jan Beulich wrote:
Hi, Jan.
+ROUNDUP_SIZE(tmp_size);
+
+if ( tmp_size <= curr_size && ((unsigned long)ptr & (align - 1)) == 0 )
+return ptr; /* the size and alignment fit in already allocated space */
You also don't seem to ever update ptr in case yo
On 16.09.2019 17:03, Oleksandr wrote:
> On 16.09.19 13:13, Jan Beulich wrote:
>> On 13.09.2019 17:35, Oleksandr Tyshchenko wrote:
>>> --- a/xen/common/xmalloc_tlsf.c
>>> +++ b/xen/common/xmalloc_tlsf.c
>>> @@ -598,6 +598,58 @@ void *_xzalloc(unsigned long size, unsigned long align)
>>> return
On 16.09.19 13:13, Jan Beulich wrote:
Hi, Jan
On 13.09.2019 17:35, Oleksandr Tyshchenko wrote:
--- a/xen/common/xmalloc_tlsf.c
+++ b/xen/common/xmalloc_tlsf.c
@@ -598,6 +598,58 @@ void *_xzalloc(unsigned long size, unsigned long align)
return p ? memset(p, 0, size) : p;
}
+void *_x
On 13.09.2019 17:35, Oleksandr Tyshchenko wrote:
> --- a/xen/common/xmalloc_tlsf.c
> +++ b/xen/common/xmalloc_tlsf.c
> @@ -598,6 +598,58 @@ void *_xzalloc(unsigned long size, unsigned long align)
> return p ? memset(p, 0, size) : p;
> }
>
> +void *_xrealloc(void *ptr, unsigned long size, un