This patch to libgo fixes memory allocation on 32-bit systems when a lot of memory has been allocated. The problem is described in this patch to the master repository: https://codereview.appspot.com/49460043 .
runtime: fix 32-bit malloc for pointers >= 0x80000000 The spans array is allocated in runtime·mallocinit. On a 32-bit system the number of entries in the spans array is MaxArena32 / PageSize, which (2U << 30) / (1 << 12) == (1 << 19). So we are allocating an array that can hold 19 bits for an index that can hold 20 bits. According to the comment in the function, this is intentional: we only allocate enough spans (and bitmaps) for a 2G arena, because allocating more would probably be wasteful. But since the span index is simply the upper 20 bits of the memory address, this scheme only works if memory addresses are limited to the low 2G of memory. That would be OK if we were careful to enforce it, but we're not. What we are careful to enforce, in functions like runtime·MHeap_SysAlloc, is that we always return addresses between the heap's arena_start and arena_start + MaxArena32. We generally get away with it because we start allocating just after the program end, so we only run into trouble with programs that allocate a lot of memory, enough to get past address 0x80000000. This changes the code that computes a span index to subtract arena_start on 32-bit systems just as we currently do on 64-bit systems. This is the same patch applied to libgo. Bootstrapped and ran Go testsuite on x86_64-unknown-linux-gnu, both 64-bit and 32-bit. Committed to mainline. Ian
diff -r f3e5e6e92709 libgo/runtime/malloc.goc --- a/libgo/runtime/malloc.goc Wed Jan 08 13:58:47 2014 -0800 +++ b/libgo/runtime/malloc.goc Thu Jan 09 15:12:36 2014 -0800 @@ -637,8 +637,7 @@ // (Manually inlined copy of runtime_MHeap_Lookup) p = (uintptr)v>>PageShift; - if(sizeof(void*) == 8) - p -= (uintptr)runtime_mheap.arena_start >> PageShift; + p -= (uintptr)runtime_mheap.arena_start >> PageShift; s = runtime_mheap.spans[p]; if(s->sizeclass == 0) { diff -r f3e5e6e92709 libgo/runtime/mgc0.c --- a/libgo/runtime/mgc0.c Wed Jan 08 13:58:47 2014 -0800 +++ b/libgo/runtime/mgc0.c Thu Jan 09 15:12:36 2014 -0800 @@ -269,8 +269,7 @@ // (Manually inlined copy of MHeap_LookupMaybe.) k = (uintptr)obj>>PageShift; x = k; - if(sizeof(void*) == 8) - x -= (uintptr)runtime_mheap.arena_start>>PageShift; + x -= (uintptr)runtime_mheap.arena_start>>PageShift; s = runtime_mheap.spans[x]; if(s == nil || k < s->start || (byte*)obj >= s->limit || s->state != MSpanInUse) return false; @@ -453,8 +452,7 @@ // (Manually inlined copy of MHeap_LookupMaybe.) k = (uintptr)obj>>PageShift; x = k; - if(sizeof(void*) == 8) - x -= (uintptr)arena_start>>PageShift; + x -= (uintptr)arena_start>>PageShift; s = runtime_mheap.spans[x]; if(s == nil || k < s->start || obj >= s->limit || s->state != MSpanInUse) continue; @@ -501,8 +499,7 @@ // Ask span about size class. // (Manually inlined copy of MHeap_Lookup.) x = (uintptr)obj >> PageShift; - if(sizeof(void*) == 8) - x -= (uintptr)arena_start>>PageShift; + x -= (uintptr)arena_start>>PageShift; s = runtime_mheap.spans[x]; PREFETCH(obj); @@ -617,8 +614,7 @@ if(t == nil) return; x = (uintptr)obj >> PageShift; - if(sizeof(void*) == 8) - x -= (uintptr)(runtime_mheap.arena_start)>>PageShift; + x -= (uintptr)(runtime_mheap.arena_start)>>PageShift; s = runtime_mheap.spans[x]; objstart = (byte*)((uintptr)s->start<<PageShift); if(s->sizeclass != 0) { diff -r f3e5e6e92709 libgo/runtime/mheap.c --- a/libgo/runtime/mheap.c Wed Jan 08 13:58:47 2014 -0800 +++ b/libgo/runtime/mheap.c Thu Jan 09 15:12:36 2014 -0800 @@ -73,8 +73,7 @@ // Map spans array, PageSize at a time. n = (uintptr)h->arena_used; - if(sizeof(void*) == 8) - n -= (uintptr)h->arena_start; + n -= (uintptr)h->arena_start; n = n / PageSize * sizeof(h->spans[0]); n = ROUND(n, PageSize); pagesize = getpagesize(); @@ -170,8 +169,7 @@ runtime_MSpan_Init(t, s->start + npage, s->npages - npage); s->npages = npage; p = t->start; - if(sizeof(void*) == 8) - p -= ((uintptr)h->arena_start>>PageShift); + p -= ((uintptr)h->arena_start>>PageShift); if(p > 0) h->spans[p-1] = s; h->spans[p] = t; @@ -189,8 +187,7 @@ s->elemsize = (sizeclass==0 ? s->npages<<PageShift : (uintptr)runtime_class_to_size[sizeclass]); s->types.compression = MTypes_Empty; p = s->start; - if(sizeof(void*) == 8) - p -= ((uintptr)h->arena_start>>PageShift); + p -= ((uintptr)h->arena_start>>PageShift); for(n=0; n<npage; n++) h->spans[p+n] = s; return s; @@ -258,8 +255,7 @@ s = runtime_FixAlloc_Alloc(&h->spanalloc); runtime_MSpan_Init(s, (uintptr)v>>PageShift, ask>>PageShift); p = s->start; - if(sizeof(void*) == 8) - p -= ((uintptr)h->arena_start>>PageShift); + p -= ((uintptr)h->arena_start>>PageShift); h->spans[p] = s; h->spans[p + s->npages - 1] = s; s->state = MSpanInUse; @@ -276,8 +272,7 @@ uintptr p; p = (uintptr)v; - if(sizeof(void*) == 8) - p -= (uintptr)h->arena_start; + p -= (uintptr)h->arena_start; return h->spans[p >> PageShift]; } @@ -298,8 +293,7 @@ return nil; p = (uintptr)v>>PageShift; q = p; - if(sizeof(void*) == 8) - q -= (uintptr)h->arena_start >> PageShift; + q -= (uintptr)h->arena_start >> PageShift; s = h->spans[q]; if(s == nil || p < s->start || (byte*)v >= s->limit || s->state != MSpanInUse) return nil; @@ -346,8 +340,7 @@ // Coalesce with earlier, later spans. p = s->start; - if(sizeof(void*) == 8) - p -= (uintptr)h->arena_start >> PageShift; + p -= (uintptr)h->arena_start >> PageShift; if(p > 0 && (t = h->spans[p-1]) != nil && t->state != MSpanInUse) { if(t->npreleased == 0) { // cant't touch this otherwise tp = (uintptr*)(t->start<<PageShift);