Since I not found any support of that idea I think it is better to
keep things simple. So I introduce implementation of the "ReScan"
idea.
The diff must be almost the same as in the first message in thread. I
added some logic into uvm_map_findspace(). If it can't found
reasonable gap using hint it then "reset" hint upto "start of area"
(using logic from uvm_map_hint) and try to search any space. It is
last chance to allocate memory.
Tested with postgres on i386, 1G RAM.
Index: uvm/uvm_map.c
===================================================================
RCS file: /cvs/src/sys/uvm/uvm_map.c,v
retrieving revision 1.123
diff -u uvm/uvm_map.c
--- uvm/uvm_map.c 28 Aug 2009 00:40:03 -0000 1.123
+++ uvm/uvm_map.c 4 Mar 2010 16:11:30 -0000
@@ -86,8 +86,6 @@
#endif
#include <uvm/uvm.h>
-#undef RB_AUGMENT
-#define RB_AUGMENT(x) uvm_rb_augment(x)
#ifdef DDB
#include <uvm/uvm_ddb.h>
@@ -216,111 +214,33 @@
void uvm_rb_insert(struct vm_map *, struct vm_map_entry *);
void uvm_rb_remove(struct vm_map *, struct vm_map_entry *);
vsize_t uvm_rb_space(struct vm_map *, struct vm_map_entry *);
+void uvm_rb_fixup(struct vm_map *, struct vm_map_entry *);
+void uvmtree_space_remove_entry(struct vm_map *, struct vm_map_entry *);
+void uvmtree_space_insert_entry(struct vm_map *, struct vm_map_entry *);
#ifdef DEBUG
int _uvm_tree_sanity(struct vm_map *map, const char *name);
+void _uvm_tree_dump(struct vm_map *);
#endif
-vsize_t uvm_rb_subtree_space(struct vm_map_entry *);
-void uvm_rb_fixup(struct vm_map *, struct vm_map_entry *);
-static __inline int
-uvm_compare(struct vm_map_entry *a, struct vm_map_entry *b)
+static int
+uvm_compare_start(struct vm_map_entry *a, struct vm_map_entry *b)
{
- if (a->start < b->start)
- return (-1);
- else if (a->start > b->start)
- return (1);
-
- return (0);
+ return (a->start < b->start ? -1 : a->start > b->start);
}
-
-
-static __inline void
-uvm_rb_augment(struct vm_map_entry *entry)
+static int
+uvm_compare_space(struct vm_map_entry *a, struct vm_map_entry *b)
{
- entry->space = uvm_rb_subtree_space(entry);
+ return (a->space < b->space ? -1 : a->space > b->space);
}
-RB_PROTOTYPE(uvm_tree, vm_map_entry, rb_entry, uvm_compare);
+RB_PROTOTYPE(uvm_tree_start, vm_map_entry, rb_entry_start, uvm_compare_start);
+RB_PROTOTYPE(uvm_tree_space, vm_map_entry, rb_entry_space, uvm_compare_space);
-RB_GENERATE(uvm_tree, vm_map_entry, rb_entry, uvm_compare);
+RB_GENERATE(uvm_tree_start, vm_map_entry, rb_entry_start, uvm_compare_start);
+RB_GENERATE(uvm_tree_space, vm_map_entry, rb_entry_space, uvm_compare_space);
-vsize_t
-uvm_rb_space(struct vm_map *map, struct vm_map_entry *entry)
-{
- struct vm_map_entry *next;
- vaddr_t space;
- if ((next = entry->next) == &map->header)
- space = map->max_offset - entry->end;
- else {
- KASSERT(next);
- space = next->start - entry->end;
- }
- return (space);
-}
-
-vsize_t
-uvm_rb_subtree_space(struct vm_map_entry *entry)
-{
- vaddr_t space, tmp;
-
- space = entry->ownspace;
- if (RB_LEFT(entry, rb_entry)) {
- tmp = RB_LEFT(entry, rb_entry)->space;
- if (tmp > space)
- space = tmp;
- }
-
- if (RB_RIGHT(entry, rb_entry)) {
- tmp = RB_RIGHT(entry, rb_entry)->space;
- if (tmp > space)
- space = tmp;
- }
-
- return (space);
-}
-
-void
-uvm_rb_fixup(struct vm_map *map, struct vm_map_entry *entry)
-{
- /* We need to traverse to the very top */
- do {
- entry->ownspace = uvm_rb_space(map, entry);
- entry->space = uvm_rb_subtree_space(entry);
- } while ((entry = RB_PARENT(entry, rb_entry)) != NULL);
-}
-
-void
-uvm_rb_insert(struct vm_map *map, struct vm_map_entry *entry)
-{
- vaddr_t space = uvm_rb_space(map, entry);
- struct vm_map_entry *tmp;
-
- entry->ownspace = entry->space = space;
- tmp = RB_INSERT(uvm_tree, &(map)->rbhead, entry);
-#ifdef DIAGNOSTIC
- if (tmp != NULL)
- panic("uvm_rb_insert: duplicate entry?");
-#endif
- uvm_rb_fixup(map, entry);
- if (entry->prev != &map->header)
- uvm_rb_fixup(map, entry->prev);
-}
-
-void
-uvm_rb_remove(struct vm_map *map, struct vm_map_entry *entry)
-{
- struct vm_map_entry *parent;
-
- parent = RB_PARENT(entry, rb_entry);
- RB_REMOVE(uvm_tree, &(map)->rbhead, entry);
- if (entry->prev != &map->header)
- uvm_rb_fixup(map, entry->prev);
- if (parent)
- uvm_rb_fixup(map, parent);
-}
-
#ifdef DEBUG
#define uvm_tree_sanity(x,y) _uvm_tree_sanity(x,y)
#else
@@ -331,6 +251,29 @@
int
_uvm_tree_sanity(struct vm_map *map, const char *name)
{
+ struct vm_map_entry *tmp, *subtmp;
+ vsize_t space, prespace;
+
+ space = prespace = 0;
+ RB_FOREACH(tmp, uvm_tree_space, &map->rb_space) {
+ space = tmp->space;
+ subtmp = tmp;
+ for ( ; subtmp; subtmp = subtmp->next_with_equal_space)
+ if (subtmp->space != tmp->space) {
+ (void)_uvm_tree_dump(map);
+ panic("_uvm_tree_sanity: %s", name);
+ }
+ if (space <= prespace) {
+ /* error detected. printout tree and panic */
+ (void)_uvm_tree_dump(map);
+ panic("_uvm_tree_sanity: %s: space[0x%lx] <=
prespace[0x%lx]",
name, space, prespace);
+ }
+ prespace = space;
+ }
+ return 0;
+ /* We can do more - compare "->space" with uvm_rb_space() */
+/* I just don't know what to do with this...
+
struct vm_map_entry *tmp, *trtmp;
int n = 0, i = 1;
@@ -379,14 +322,269 @@
return (0);
error:
+*/
#ifdef DDB
/* handy breakpoint location for error case */
__asm(".globl treesanity_label\ntreesanity_label:");
#endif
- return (-1);
+/* return (-1);*/
}
-#endif
+void
+_uvm_tree_dump(struct vm_map *map)
+{
+ struct vm_map_entry *tmp, *subtmp;
+ vsize_t space, prespace;
+ int i;
+
+ printf("DUMP TREE:\nstart:\n");
+ RB_FOREACH(tmp, uvm_tree_start, &map->rb_start) {
+ printf("entry[0x%lx] start[0x%lx] end[0x%lx] space[0x%lx]\n",
tmp,
tmp->start, tmp->end, tmp->space);
+ }
+ printf("space:\n");
+ space = prespace = 0;
+ RB_FOREACH(tmp, uvm_tree_space, &map->rb_space) {
+ space = tmp->space;
+ i = 0;
+ for (subtmp = tmp; subtmp; subtmp =
subtmp->next_with_equal_space) {
+ printf("%sentry[0x%lx] start[0x%lx] end[0x%lx]
space[0x%lx]\n", i
? " " : "-", subtmp, subtmp->start, subtmp->end, subtmp->space);
+ i = 1;
+ }
+ if (space <= prespace) {
+ printf("space not grow!\n");
+ }
+ prespace = space;
+ }
+ printf("=========\n");
+}
+#endif /* DEBUG */
+
+vsize_t
+uvm_rb_space(struct vm_map *map, struct vm_map_entry *entry)
+{
+ struct vm_map_entry *next;
+ vaddr_t space;
+ /* Is there are any entry after our start? */
+ if ((next = RB_NEXT(uvm_tree_start, &map->rb_start, entry)))
+ space = next->start - entry->end;
+ else
+ space = map->max_offset - entry->end;
+
+ return (space);
+}
+
+void
+uvm_rb_fixup(struct vm_map *map, struct vm_map_entry *entry)
+{
+ /*
+ * Here we need to check "entry"'s space and, if it changed, reposition
+ * it in the "space" tree.
+ *
+ * Be careful about order of operations:
+ * - first, remove UNTOUCHED entry
+ * (because RB_FIND/NFIND search it by it's "->space" variable),
+ * - THEN change it's ->space variable, and then insert back.
+ */
+ if (entry->space != uvm_rb_space(map, entry)) {
+ /*
+ * First, remove entry from "space" tree
+ */
+ uvmtree_space_remove_entry(map, entry);
+ /*
+ * Ok. Now we can recalculate and set new space of that "entry"
+ * and set it into appropriate place in "space" tree
+ */
+ entry->space = uvm_rb_space(map, entry);
+ /*
+ * Insert the "entry" into it's correct place in "space" tree
+ */
+ uvmtree_space_insert_entry(map, entry);
+ }
+}
+
+void
+uvm_rb_insert(struct vm_map *map, struct vm_map_entry *entry)
+{
+ struct vm_map_entry *tmp, *prev;
+
+ uvm_tree_sanity(map, "uvm_rb_insert entry");
+
+ /* First, insert new entry into "start" tree */
+ tmp = RB_INSERT(uvm_tree_start, &map->rb_start, entry);
+ if (tmp != NULL)
+ panic("uvm_rb_insert: duplicate entry?");
+
+ /* If there are no entries with bigger address - we are last */
+ if (RB_NEXT(uvm_tree_start, &map->rb_start, entry) == NULL)
+ map->last_entry_in_tree = entry;
+
+ /* Second, insert new entry into "space" tree */
+ entry->space = uvm_rb_space(map, entry);
+ uvmtree_space_insert_entry(map, entry);
+ /*
+ * Here, when "entry" was already inserted into "start" tree, we need
+ * to check previous entry (if exists) in "start" tree - recalculate
+ * it's new space and then re-set it's position in "space" tree.
+ */
+ prev = RB_PREV(uvm_tree_start, &map->rb_start, entry);
+ if (prev)
+ /*
+ * Here it is. Recalculate it's space and reposition it,
+ * if needed.
+ */
+ uvm_rb_fixup(map, prev);
+}
+
+void
+uvm_rb_remove(struct vm_map *map, struct vm_map_entry *entry)
+{
+ struct vm_map_entry *prev, *next;
+
+ uvm_tree_sanity(map, "uvm_rb_remove entry");
+
+ /* Is that entry exist in tree? */
+ if (!RB_FIND(uvm_tree_start, &map->rb_start, entry))
+ panic("uvm_rb_remove: entry not exist in rb_start tree!");
+
+ /*
+ * Remember previous entry in "start" tree (if any).
+ * We will need to recalculate it's free space after removing complete.
+ * Also we need to remember next entry to check map->last_entry_in_tree
+ */
+ prev = RB_PREV(uvm_tree_start, &map->rb_start, entry);
+ next = RB_NEXT(uvm_tree_start, &map->rb_start, entry);
+
+ /* Now remove entry from "start" tree */
+ RB_REMOVE(uvm_tree_start, &map->rb_start, entry);
+ /* And from "space" tree. */
+ uvmtree_space_remove_entry(map, entry);
+
+ /*
+ * Now, when "entry" was already removed from "start" tree, we need to
+ * check if previous entry existed. If so, then it's free space need
+ * to be recalculated and then it need to re-set into it's new place
+ * in "space" tree.
+ * While here, check if there are no next entry in the tree. If so then
+ * the previous becomes last_entry_in_tree. If there are no previous
+ * entry then there are no entries at all.
+ */
+ if (prev) {
+ /*
+ * Here it is. Recalculate it's space and reposition it,
+ * if needed.
+ */
+ uvm_rb_fixup(map, prev);
+
+ if (next == NULL)
+ map->last_entry_in_tree = prev;
+ } else if (next == NULL)
+ map->last_entry_in_tree = NULL;
+}
+
+void
+uvmtree_space_insert_entry(struct vm_map *map, struct vm_map_entry *entry)
+{
+ struct vm_map_entry *tmp;
+
+ /* Needless to keep zero space entries in uvm_tree_space */
+ if (entry->space == 0)
+ return;
+ /*
+ * Check if there are already exists old entry with same space
+ * as "entry"? If so, link "entry" into existing entry's chain.
+ */
+ if ((tmp = RB_FIND(uvm_tree_space, &map->rb_space, entry))) {
+ /*
+ * Exists. Append "entry" to chain.
+ */
+ while(tmp->next_with_equal_space)
+ tmp = tmp->next_with_equal_space;
+ tmp->next_with_equal_space = entry;
+ /*
+ * Don't forget to close it's "next_with_equal_space" pointer!
+ * (Or things become very, very bad and will be much,
+ * much worse... :-))
+ */
+ entry->next_with_equal_space = NULL;
+ } else {
+ /*
+ * There are no entries with such space in the tree.
+ * Just insert our entry with brand new space.
+ */
+ RB_INSERT(uvm_tree_space, &map->rb_space, entry);
+ /*
+ * Don't forget to close it's "next_with_equal_space" pointer!
+ * (Or things become very, very bad and will be much,
+ * much worse... :-))
+ */
+ entry->next_with_equal_space = NULL;
+ }
+}
+
+void
+uvmtree_space_remove_entry(struct vm_map *map, struct vm_map_entry *entry)
+{
+ struct vm_map_entry *tmp;
+
+ /* There are no entries with zero space in uvm_tree_space */
+ if (entry->space == 0)
+ return;
+ /*
+ * There MUST be entry with that space in tree.
+ * At least the "entry" itself.
+ */
+ if ((tmp = RB_FIND(uvm_tree_space, &map->rb_space, entry)) == NULL)
+ panic("uvmtree_space_remove_entry: entry not exist in rb_space "
+ "tree!");
+ /*
+ * Is the "entry" the header of chain, or just some underneath element?
+ */
+ if (entry == tmp) {
+ /*
+ * Yes, "entry" is the head of chain.
+ * In this case we need to remove it from tree, get the next
+ * element from chain (it becomes new "header")
+ * and insert this new "header" into tree in place of removed
+ * "entry".
+ */
+ RB_REMOVE(uvm_tree_space, &map->rb_space, entry);
+ /*
+ * Get the next element in chain, if any.
+ * If it exists - it is the new header now, so insert it into
+ * tree in place of "entry"
+ */
+ if (entry->next_with_equal_space)
+ RB_INSERT(uvm_tree_space, &map->rb_space,
+ entry->next_with_equal_space);
+ /*
+ * Close our removed "entry". Just to be clear.
+ */
+ entry->next_with_equal_space = NULL;
+ } else {
+ /*
+ * No, "entry" is just some underneath element in chain. We can
+ * just go down by chain, extract entry, and remove it.
+ */
+ while(tmp->next_with_equal_space != entry)
+ tmp = tmp->next_with_equal_space;
+ /*
+ * Now, "tmp" is set to previous element before "entry" in the
+ * chain. Don't forget about underneath elements (if any) after
+ * "entry" -- link them back into chain.
+ * If there are no such elements, don't forget to close the
+ * chain.
+ * (Or things become very, very bad and will be much,
+ * much worse... :-))
+ */
+ if (entry->next_with_equal_space)
+ tmp->next_with_equal_space =
+ entry->next_with_equal_space;
+ else
+ tmp->next_with_equal_space = NULL;
+ }
+}
+
+
/*
* uvm_mapent_alloc: allocate a map entry
*/
@@ -606,18 +804,29 @@
new_entry = uvm_mapent_alloc(map);
uvm_mapent_copy(entry, new_entry); /* entry -> new_entry */
+ /*
+ * The "entry"'s space is not touched so nothing to do with it's space.
+ * But it's start changed, so remove it from "start" tree and then
+ * insert it back into it's new position.
+ * Be careful about order - first remove, THEN change start.
+ */
+ RB_REMOVE(uvm_tree_start, &map->rb_start, entry);
+
new_entry->end = start;
new_adj = start - new_entry->start;
if (entry->object.uvm_obj)
entry->offset += new_adj; /* shift start over */
- /* Does not change order for the RB tree */
entry->start = start;
+ /* start changed, return "entry" back into the "start" tree */
+ RB_INSERT(uvm_tree_start, &map->rb_start, entry);
+
if (new_entry->aref.ar_amap) {
amap_splitref(&new_entry->aref, &entry->aref, new_adj);
}
+ /* new_entry inserted as usually */
uvm_map_entry_link(map, entry->prev, new_entry);
if (UVM_ET_ISSUBMAP(entry)) {
@@ -665,9 +874,13 @@
if (entry->aref.ar_amap)
amap_splitref(&entry->aref, &new_entry->aref, new_adj);
-
- uvm_rb_fixup(map, entry);
-
+ /*
+ * The "entry's start is not touched and we not have to do anything
+ * with it's position in "start" tree.
+ * When new_entry will be inserted into the "start" tree it will find
+ * the "entry" behind and take care of it's space and so it's
+ * position in "space" tree.
+ */
uvm_map_entry_link(map, entry, new_entry);
if (UVM_ET_ISSUBMAP(entry)) {
@@ -906,6 +1119,7 @@
uobj->pgops->pgo_detach(uobj);
prev_entry->end += size;
+ /* don't forget to repair entry's space */
uvm_rb_fixup(map, prev_entry);
map->size += size;
if (p && uobj == NULL)
@@ -985,15 +1199,6 @@
if (p && uobj == NULL)
p->p_vmspace->vm_dused += atop(size);
-
- /*
- * Update the free space hint
- */
-
- if ((map->first_free == prev_entry) &&
- (prev_entry->end >= new_entry->start))
- map->first_free = new_entry;
-
#ifdef KVA_GUARDPAGES
/*
* Create the guard entry.
@@ -1037,9 +1242,7 @@
uvm_map_lookup_entry(struct vm_map *map, vaddr_t address,
struct vm_map_entry **entry)
{
- struct vm_map_entry *cur;
- struct vm_map_entry *last;
- int use_tree = 0;
+ struct vm_map_entry *cur, *last, search;
UVMHIST_FUNC("uvm_map_lookup_entry");
UVMHIST_CALLED(maphist);
@@ -1079,49 +1282,93 @@
cur, 0, 0, 0);
return (TRUE);
}
-
- if (map->nentries > 30)
- use_tree = 1;
- } else {
+ /*} else {*/
/*
* go from start to hint, *inclusively*
*/
- last = cur->next;
- cur = map->header.next;
- use_tree = 1;
+ /*last = cur->next;
+ cur = map->header.next;*/
}
+ /*
+ * Let's add another quick check if we looking for address after
+ * last entry.
+ */
+ if (map->last_entry_in_tree && address >=
map->last_entry_in_tree->start) {
+ *entry = map->last_entry_in_tree;
+ SAVE_HINT(map, map->hint, map->last_entry_in_tree);
+ if (address < map->last_entry_in_tree->end)
+ return (TRUE);
+ else
+ return (FALSE);
+ }
uvm_tree_sanity(map, __func__);
- if (use_tree) {
- struct vm_map_entry *prev = &map->header;
- cur = RB_ROOT(&map->rbhead);
-
- /*
- * Simple lookup in the tree. Happens when the hint is
- * invalid, or nentries reach a threshold.
- */
- while (cur) {
- if (address >= cur->start) {
- if (address < cur->end) {
- *entry = cur;
- SAVE_HINT(map, map->hint, cur);
+ /* use the tree unconditionally */
+ /* Try to find any entry start at or after the address */
+ search.start = address;
+ if ((cur = RB_NFIND(uvm_tree_start, &map->rb_start, &search))) {
+ /* Ok, here it is. Is it start exactly at address? */
+ if (cur->start == address) {
+ *entry = cur;
+ SAVE_HINT(map, map->hint, cur);
+ return (TRUE);
+ }
+ /*
+ * Let's check previous entry. It must cover that
+ * address. If not, or entry not exists, then there are
+ * no such entry at all.
+ */
+ cur = RB_PREV(uvm_tree_start, &map->rb_start, cur);
+ /*
+ * Here "cur" set to entry which prior to address
+ * It may contain address, or address somewhere in
+ * it's free space.
+ * ...if there were entry prior to address...,
+ * if not, then "cur" is NULL.
+ */
+ if (cur) {
+ *entry = cur;
+ SAVE_HINT(map, map->hint, cur);
+ if (address < cur->end)
return (TRUE);
- }
- prev = cur;
- cur = RB_RIGHT(cur, rb_entry);
- } else
- cur = RB_LEFT(cur, rb_entry);
+ else
+ return (FALSE);
+ }
+ /* cur == NULL here */
+ } else {
+ /*
+ * This must be the same as the:
+ * cur = RB_MAX(uvm_tree_start, &map->rb_start);
+ */
+ cur = map->last_entry_in_tree;
+ /*
+ * If there are any entry in the tree - the last entry
+ * lay before the address. Just check is the address
+ * is covered by last entry or not.
+ * If there are no entries at all, then only
+ * map->header exists...
+ */
+ if (cur) {
+ *entry = cur;
+ SAVE_HINT(map, map->hint, cur);
+ if (address < cur->end)
+ return (TRUE);
+ else
+ return (FALSE);
+ }
+ /* cur == NULL here */
}
- *entry = prev;
+ /* no entry found, return map->header as linearsearch do */
+ *entry = &map->header;
+ SAVE_HINT(map, map->hint, *entry);
UVMHIST_LOG(maphist,"<- failed!",0,0,0,0);
return (FALSE);
- }
/*
* search linearly
*/
-
+ /*listsearch:*/
while (cur != last) {
if (cur->end > address) {
if (address >= cur->start) {
@@ -1270,10 +1517,11 @@
vaddr_t *result, struct uvm_object *uobj, voff_t uoffset, vsize_t align,
int flags)
{
- struct vm_map_entry *entry, *next, *tmp;
- struct vm_map_entry *child, *prev = NULL;
+ struct vm_map_entry *entry, *next, *tmp, tmpsearch, *found_entry;
+ vaddr_t end, orig_hint, saved_hint;
+ vm_prot_t prot = UVM_PROTECTION(flags);
+ int retry;
- vaddr_t end, orig_hint;
UVMHIST_FUNC("uvm_map_findspace");
UVMHIST_CALLED(maphist);
@@ -1304,28 +1552,14 @@
return(NULL);
}
- /*
- * Look for the first possible address; if there's already
- * something at this address, we have to start after it.
- */
-
- if ((flags & UVM_FLAG_FIXED) == 0 && hint == map->min_offset) {
- if ((entry = map->first_free) != &map->header)
- hint = entry->end;
- } else {
- if (uvm_map_lookup_entry(map, hint, &tmp)) {
+ /* process UVM_FLAG_FIXED case separately - it has no alternatives */
+ if (flags & UVM_FLAG_FIXED) {
+ if (uvm_map_lookup_entry(map, hint, &entry)) {
/* "hint" address already in use ... */
- if (flags & UVM_FLAG_FIXED) {
- UVMHIST_LOG(maphist,"<- fixed & VA in use",
- 0, 0, 0, 0);
- return(NULL);
- }
- hint = tmp->end;
+ UVMHIST_LOG(maphist,"<- fixed & VA in use",
+ 0, 0, 0, 0);
+ return(NULL);
}
- entry = tmp;
- }
-
- if (flags & UVM_FLAG_FIXED) {
end = hint + length;
if (end > map->max_offset || end < hint) {
UVMHIST_LOG(maphist,"<- failed (off end)", 0,0,0,0);
@@ -1338,90 +1572,78 @@
return(NULL); /* only one shot at it ... */
}
- /* Try to find the space in the red-black tree */
+ retry = 1;
+ ReScan:
+ saved_hint = hint;
- /* Check slot before any entry */
+ if (uvm_map_lookup_entry(map, hint, &entry))
+ if (entry != &map->header)
+ hint = entry->end;
+ found_entry = entry;
+ /* Check slot found by hint */
if (uvm_map_spacefits(map, &hint, length, entry->next, uoffset, align))
goto found;
-
- /* If there is not enough space in the whole tree, we fail */
- tmp = RB_ROOT(&map->rbhead);
- if (tmp == NULL || tmp->space < length)
+
+ /* Try to find the space in the red-black tree
+ *
+ * If there are no elements in tree or not enough space
+ * in the whole tree, we fail.
+ */
+ if (RB_EMPTY(&map->rb_space))
goto error;
- /* Find an entry close to hint that has enough space */
- for (; tmp;) {
- if (tmp->end >= hint &&
- (prev == NULL || tmp->end < prev->end)) {
- if (tmp->ownspace >= length)
- prev = tmp;
- else if ((child = RB_RIGHT(tmp, rb_entry)) != NULL &&
- child->space >= length)
- prev = tmp;
+ tmpsearch.space = length;
+ tmp = RB_NFIND(uvm_tree_space, &map->rb_space, &tmpsearch);
+ if (tmp == NULL)
+ goto error;
+ /*
+ * Find an entry close to hint that has enough space.
+ *
+ * We search for entry with smallest free space we needed.
+ * It is not exactly the same what listsearch do (it find the first
+ * "useful" entry, whatever it's space, it just must be more than...).
+ * The tree search will find the entry with smallest space (or one of
+ * such entries). So it might better compact entire map.
+ */
+ /* loop from entry found above to the end of tree */
+ for (/* tmp */; tmp != NULL;
+ tmp = RB_NEXT(uvm_tree_space, &map->rb_space, tmp)) {
+ for (entry = tmp; entry; entry = entry->next_with_equal_space) {
+ /* restore hint before each test */
+ hint = saved_hint;
+ /* quick skip entries before the hint */
+ if (hint > (entry->end + entry->space))
+ continue;
+ if (hint < entry->end)
+ hint = entry->end;
+ if (uvm_map_spacefits(map, &hint, length, entry->next,
+ uoffset, align))
+ goto found;
}
- if (tmp->end < hint)
- child = RB_RIGHT(tmp, rb_entry);
- else if (tmp->end > hint)
- child = RB_LEFT(tmp, rb_entry);
- else {
- if (tmp->ownspace >= length)
- break;
- child = RB_RIGHT(tmp, rb_entry);
- }
- if (child == NULL || child->space < length)
- break;
- tmp = child;
}
-
- if (tmp != NULL && hint < tmp->end + tmp->ownspace) {
- /*
- * Check if the entry that we found satifies the
- * space requirement
- */
- if (hint < tmp->end)
- hint = tmp->end;
- if (uvm_map_spacefits(map, &hint, length, tmp->next, uoffset,
- align)) {
- entry = tmp;
- goto found;
- } else if (tmp->ownspace >= length)
- goto listsearch;
- }
- if (prev == NULL)
- goto error;
-
- hint = prev->end;
- if (uvm_map_spacefits(map, &hint, length, prev->next, uoffset,
- align)) {
- entry = prev;
- goto found;
- } else if (prev->ownspace >= length)
- goto listsearch;
-
- tmp = RB_RIGHT(prev, rb_entry);
- for (;;) {
- KASSERT(tmp && tmp->space >= length);
- child = RB_LEFT(tmp, rb_entry);
- if (child && child->space >= length) {
- tmp = child;
- continue;
+ /*
+ * If unaligned search still not found anything, let's try from start
+ * of virtual space. It's our last chance...
+ */
+ if (retry && !align && vm_map_pmap(map) != pmap_kernel()) {
+ retry = 0;
+#ifdef __i386__
+ if (prot & VM_PROT_EXECUTE) {
+ hint = PAGE_SIZE * 2;
+ goto ReScan;
}
- if (tmp->ownspace >= length)
- break;
- tmp = RB_RIGHT(tmp, rb_entry);
+#endif
+#if !defined(__vax__)
+ hint = MAXDSIZ;
+#else
+ hint = BRKSIZ;
+#endif
+ goto ReScan;
}
-
- hint = tmp->end;
- if (uvm_map_spacefits(map, &hint, length, tmp->next, uoffset, align)) {
- entry = tmp;
- goto found;
- }
+ /* Nothing found */
+ goto error;
- /*
- * The tree fails to find an entry because of offset or alignment
- * restrictions. Search the list instead.
- */
- listsearch:
+ entry = found_entry;
/*
* Look through the rest of the map, trying to fit a new region in
* the gap between existing regions, or after the very last region.
@@ -1561,13 +1783,6 @@
}
/*
- * Save the free space hint
- */
-
- if (map->first_free->start >= start)
- map->first_free = entry->prev;
-
- /*
* note: we now re-use first_entry for a different task. we remove
* a number of map entries from the map and save them in a linked
* list headed by "first_entry". once we remove them from the map
@@ -1903,8 +2118,6 @@
/* critical: flush stale hints out of map */
SAVE_HINT(map, map->hint, newents);
- if (map->first_free == oldent)
- map->first_free = last;
last->next = oldent->next;
last->next->prev = last;
@@ -1931,8 +2144,6 @@
/* critical: flush stale hints out of map */
SAVE_HINT(map, map->hint, oldent->prev);
- if (map->first_free == oldent)
- map->first_free = oldent->prev;
/* NULL list of new entries: just remove the old one */
uvm_map_entry_unlink(map, oldent);
@@ -2193,8 +2404,6 @@
/* purge possible stale hints from srcmap */
if (flags & UVM_EXTRACT_REMOVE) {
SAVE_HINT(srcmap, srcmap->hint, orig_entry->prev);
- if (srcmap->first_free->start >= start)
- srcmap->first_free = orig_entry->prev;
}
entry = orig_entry;
@@ -2473,7 +2682,7 @@
uvm_map_inherit(struct vm_map *map, vaddr_t start, vaddr_t end,
vm_inherit_t new_inheritance)
{
- struct vm_map_entry *entry, *temp_entry;
+ struct vm_map_entry *entry;
UVMHIST_FUNC("uvm_map_inherit"); UVMHIST_CALLED(maphist);
UVMHIST_LOG(maphist,"(map=%p,start=0x%lx,end=0x%lx,new_inh=0x%lx)",
map, start, end, new_inheritance);
@@ -2492,11 +2701,10 @@
VM_MAP_RANGE_CHECK(map, start, end);
- if (uvm_map_lookup_entry(map, start, &temp_entry)) {
- entry = temp_entry;
+ if (uvm_map_lookup_entry(map, start, &entry)) {
UVM_MAP_CLIP_START(map, entry, start);
} else {
- entry = temp_entry->next;
+ entry = entry->next;
}
while ((entry != &map->header) && (entry->start < end)) {
@@ -2519,18 +2727,29 @@
int
uvm_map_advice(struct vm_map *map, vaddr_t start, vaddr_t end, int new_advice)
{
- struct vm_map_entry *entry, *temp_entry;
+ struct vm_map_entry *entry;
UVMHIST_FUNC("uvm_map_advice"); UVMHIST_CALLED(maphist);
UVMHIST_LOG(maphist,"(map=%p,start=0x%lx,end=0x%lx,new_adv=0x%lx)",
map, start, end, new_advice);
+ switch (new_advice) {
+ case MADV_NORMAL:
+ case MADV_RANDOM:
+ case MADV_SEQUENTIAL:
+ /* nothing special here */
+ break;
+
+ default:
+ UVMHIST_LOG(maphist,"<- done (INVALID ARG)",0,0,0,0);
+ return (EINVAL);
+ }
+
vm_map_lock(map);
VM_MAP_RANGE_CHECK(map, start, end);
- if (uvm_map_lookup_entry(map, start, &temp_entry)) {
- entry = temp_entry;
+ if (uvm_map_lookup_entry(map, start, &entry)) {
UVM_MAP_CLIP_START(map, entry, start);
} else {
- entry = temp_entry->next;
+ entry = entry->next;
}
/*
@@ -2539,19 +2758,6 @@
while ((entry != &map->header) && (entry->start < end)) {
UVM_MAP_CLIP_END(map, entry, end);
-
- switch (new_advice) {
- case MADV_NORMAL:
- case MADV_RANDOM:
- case MADV_SEQUENTIAL:
- /* nothing special here */
- break;
-
- default:
- vm_map_unlock(map);
- UVMHIST_LOG(maphist,"<- done (INVALID ARG)",0,0,0,0);
- return (EINVAL);
- }
entry->advice = new_advice;
entry = entry->next;
}
@@ -3400,8 +3606,10 @@
map->min_offset = start;
uvm_tree_sanity(map, "resize enter");
map->max_offset = end;
- if (map->header.prev != &map->header)
+ if (map->header.prev != &map->header) {
+ /* XXX don't completely checked is that needed at all */
uvm_rb_fixup(map, map->header.prev);
+ }
uvm_tree_sanity(map, "resize leave");
vm_map_unlock(map);
@@ -3490,15 +3698,16 @@
uvm_map_setup(vm_map_t map, vaddr_t min, vaddr_t max, int flags)
{
- RB_INIT(&map->rbhead);
+ RB_INIT(&map->rb_start);
+ RB_INIT(&map->rb_space);
map->header.next = map->header.prev = &map->header;
+ map->last_entry_in_tree = NULL; /* there are no entries in tree yet */
map->nentries = 0;
map->size = 0;
map->ref_count = 1;
map->min_offset = min;
map->max_offset = max;
map->flags = flags;
- map->first_free = &map->header;
map->hint = &map->header;
map->timestamp = 0;
rw_init(&map->lock, "vmmaplk");
Index: uvm/uvm_map.h
===================================================================
RCS file: /cvs/src/sys/uvm/uvm_map.h,v
retrieving revision 1.42
diff -u uvm/uvm_map.h
--- uvm/uvm_map.h 28 Aug 2009 00:40:03 -0000 1.42
+++ uvm/uvm_map.h 4 Mar 2010 16:11:32 -0000
@@ -133,9 +133,10 @@
* Also included is control information for virtual copy operations.
*/
struct vm_map_entry {
- RB_ENTRY(vm_map_entry) rb_entry; /* tree information */
- vaddr_t ownspace; /* free space after */
- vaddr_t space; /* space in subtree */
+ RB_ENTRY(vm_map_entry) rb_entry_start; /* uvm_tree_start information */
+ RB_ENTRY(vm_map_entry) rb_entry_space; /* uvm_tree_space information */
+ vaddr_t space; /* free space after */
+ struct vm_map_entry *next_with_equal_space; /* used only for linked
list entries in rb_space tree */
struct vm_map_entry *prev; /* previous entry */
struct vm_map_entry *next; /* next entry */
vaddr_t start; /* start address */
@@ -218,7 +219,8 @@
struct vm_map {
struct pmap * pmap; /* Physical map */
struct rwlock lock; /* Lock for map data */
- RB_HEAD(uvm_tree, vm_map_entry) rbhead; /* Tree for entries */
+ RB_HEAD(uvm_tree_start, vm_map_entry) rb_start; /* Tree by start */
+ RB_HEAD(uvm_tree_space, vm_map_entry) rb_space; /* Tree by space */
struct vm_map_entry header; /* List of entries */
int nentries; /* Number of entries */
vsize_t size; /* virtual size */
@@ -229,9 +231,11 @@
vm_map_entry_t first_free; /* First free space hint */
int flags; /* flags */
unsigned int timestamp; /* Version number */
+#define last_entry_in_tree header.next_with_equal_space
#define min_offset header.start
#define max_offset header.end
};
+/* last_entry_in_tree used as pointer to last entry in uvm_tree_start */
/* vm_map flags */
#define VM_MAP_PAGEABLE 0x01 /* ro: entries are
pageable */
--
antonvm