diff options
Diffstat (limited to 'sys')
-rw-r--r-- | sys/uvm/uvm_extern.h | 3 | ||||
-rw-r--r-- | sys/uvm/uvm_map.c | 435 | ||||
-rw-r--r-- | sys/uvm/uvm_map.h | 6 | ||||
-rw-r--r-- | sys/uvm/uvm_map_i.h | 3 |
4 files changed, 427 insertions, 20 deletions
diff --git a/sys/uvm/uvm_extern.h b/sys/uvm/uvm_extern.h index 39d6fcb6767..aafd01207ec 100644 --- a/sys/uvm/uvm_extern.h +++ b/sys/uvm/uvm_extern.h @@ -1,4 +1,4 @@ -/* $OpenBSD: uvm_extern.h,v 1.40 2001/12/19 08:58:07 art Exp $ */ +/* $OpenBSD: uvm_extern.h,v 1.41 2002/02/25 00:20:45 provos Exp $ */ /* $NetBSD: uvm_extern.h,v 1.57 2001/03/09 01:02:12 chs Exp $ */ /* @@ -373,6 +373,7 @@ extern struct uvmexp uvmexp; */ #include <sys/vmmeter.h> #include <sys/queue.h> +#include <sys/tree.h> #include <uvm/uvm_param.h> #include <sys/lock.h> #include <uvm/uvm_page.h> diff --git a/sys/uvm/uvm_map.c b/sys/uvm/uvm_map.c index 4c02265a654..0bf712b09ed 100644 --- a/sys/uvm/uvm_map.c +++ b/sys/uvm/uvm_map.c @@ -1,4 +1,4 @@ -/* $OpenBSD: uvm_map.c,v 1.38 2002/02/18 10:02:20 art Exp $ */ +/* $OpenBSD: uvm_map.c,v 1.39 2002/02/25 00:20:45 provos Exp $ */ /* $NetBSD: uvm_map.c,v 1.86 2000/11/27 08:40:03 chs Exp $ */ /* @@ -83,6 +83,7 @@ #include <sys/shm.h> #endif +#define RB_AUGMENT(x) uvm_rb_augment(x) #define UVM_MAP #include <uvm/uvm.h> @@ -90,7 +91,6 @@ #include <uvm/uvm_ddb.h> #endif - struct uvm_cnt uvm_map_call, map_backmerge, map_forwmerge; struct uvm_cnt uvm_mlk_call, uvm_mlk_hint; const char vmmapbsy[] = "vmmapbsy"; @@ -134,6 +134,7 @@ vaddr_t uvm_maxkaddr; (entry)->next = (after_where)->next; \ (entry)->prev->next = (entry); \ (entry)->next->prev = (entry); \ + uvm_rb_insert(map, entry); \ } while (0) /* @@ -145,6 +146,7 @@ vaddr_t uvm_maxkaddr; (map)->nentries--; \ (entry)->next->prev = (entry)->prev; \ (entry)->prev->next = (entry)->next; \ + uvm_rb_remove(map, entry); \ } while (0) /* @@ -185,6 +187,164 @@ static void uvm_map_entry_unwire __P((vm_map_t, vm_map_entry_t)); static void uvm_map_reference_amap __P((vm_map_entry_t, int)); static void uvm_map_unreference_amap __P((vm_map_entry_t, int)); +int uvm_map_spacefits __P((vm_map_t, vaddr_t *, vsize_t, vm_map_entry_t, voff_t, vsize_t)); + +int _uvm_tree_sanity(vm_map_t map, char *name); +static int uvm_rb_subtree_space __P((vm_map_entry_t)); + +static __inline int +uvm_compare(vm_map_entry_t a, vm_map_entry_t b) +{ + if (a->start < b->start) + return (-1); + else if (a->start > b->start) + return (1); + + return (0); +} + + +static __inline void +uvm_rb_augment(vm_map_entry_t entry) +{ + entry->space = uvm_rb_subtree_space(entry); +} + +RB_PROTOTYPE(uvm_tree, vm_map_entry, rb_entry, uvm_compare); + +RB_GENERATE(uvm_tree, vm_map_entry, rb_entry, uvm_compare); + +static __inline int +uvm_rb_space(vm_map_t map, vm_map_entry_t entry) +{ + vm_map_entry_t next; + vaddr_t space; + + if ((next = entry->next) == &map->header) + space = map->max_offset - entry->end; + else { + KASSERT(next); + space = next->start - entry->end; + } + return (space); +} + +static int +uvm_rb_subtree_space(vm_map_entry_t entry) +{ + vaddr_t space, tmp; + + space = entry->ownspace; + if (RB_LEFT(entry, rb_entry)) { + tmp = RB_LEFT(entry, rb_entry)->space; + if (tmp > space) + space = tmp; + } + + if (RB_RIGHT(entry, rb_entry)) { + tmp = RB_RIGHT(entry, rb_entry)->space; + if (tmp > space) + space = tmp; + } + + return (space); +} + +static __inline void +uvm_rb_fixup(vm_map_t map, vm_map_entry_t entry) +{ + /* We need to traverse to the very top */ + do { + entry->ownspace = uvm_rb_space(map, entry); + entry->space = uvm_rb_subtree_space(entry); + } while ((entry = RB_PARENT(entry, rb_entry)) != NULL); +} + +static __inline void +uvm_rb_insert(vm_map_t map, vm_map_entry_t entry) +{ + vaddr_t space = uvm_rb_space(map, entry); + + entry->ownspace = entry->space = space; + RB_INSERT(uvm_tree, &(map)->rbhead, entry); + uvm_rb_fixup(map, entry); + if (entry->prev != &map->header) + uvm_rb_fixup(map, entry->prev); +} + +static __inline void +uvm_rb_remove(vm_map_t map, vm_map_entry_t entry) +{ + vm_map_entry_t parent; + + parent = RB_PARENT(entry, rb_entry); + RB_REMOVE(uvm_tree, &(map)->rbhead, entry); + if (entry->prev != &map->header) + uvm_rb_fixup(map, entry->prev); + if (parent) + uvm_rb_fixup(map, parent); +} + +#define uvm_tree_sanity(x,y) + +int +_uvm_tree_sanity(vm_map_t map, char *name) +{ + vm_map_entry_t tmp, trtmp; + int n = 0, i = 1; + + RB_FOREACH(tmp, uvm_tree, &map->rbhead) { + if (tmp->ownspace != uvm_rb_space(map, tmp)) { + printf("%s: %d/%d ownspace %x != %x %s\n", + name, n + 1, map->nentries, + tmp->ownspace, uvm_rb_space(map, tmp), + tmp->next == &map->header ? "(last)" : ""); + goto error; + } + } + trtmp = NULL; + RB_FOREACH(tmp, uvm_tree, &map->rbhead) { + if (tmp->space != uvm_rb_subtree_space(tmp)) { + printf("%s: space %d != %d\n", + name, tmp->space, uvm_rb_subtree_space(tmp)); + goto error; + } + if (trtmp != NULL && trtmp->start >= tmp->start) { + printf("%s: corrupt: %p >= %p\n", + name, trtmp->start, tmp->start); + goto error; + } + n++; + + trtmp = tmp; + } + + if (n != map->nentries) { + printf("%s: nentries: %d vs %d\n", + name, n, map->nentries); + goto error; + } + + for (tmp = map->header.next; tmp && tmp != &map->header; + tmp = tmp->next, i++) { + trtmp = RB_FIND(uvm_tree, &map->rbhead, tmp); + if (trtmp != tmp) { + printf("%s: lookup: %d: %p - %p: %p\n", + name, i, tmp, trtmp, + RB_PARENT(tmp, rb_entry)); + goto error; + } + } + + return (0); + error: +#ifdef DDB + /* handy breakpoint location for error case */ + __asm(".globl treesanity_label ; treesanity_label:"); +#endif + return (-1); +} + /* * local inlines */ @@ -389,6 +549,8 @@ void uvm_map_clip_start(map, entry, start) /* uvm_map_simplify_entry(map, entry); */ /* XXX */ + uvm_tree_sanity(map, "clip_start entry"); + /* * Split off the front portion. note that we must insert the new * entry BEFORE this one, so that this entry has the specified @@ -402,6 +564,8 @@ void uvm_map_clip_start(map, entry, start) new_adj = start - new_entry->start; if (entry->object.uvm_obj) entry->offset += new_adj; /* shift start over */ + + /* Does not change order for the RB tree */ entry->start = start; if (new_entry->aref.ar_amap) { @@ -420,6 +584,8 @@ void uvm_map_clip_start(map, entry, start) entry->object.uvm_obj->pgops->pgo_reference( entry->object.uvm_obj); } + + uvm_tree_sanity(map, "clip_start leave"); } /* @@ -440,6 +606,7 @@ uvm_map_clip_end(map, entry, end) vm_map_entry_t new_entry; vaddr_t new_adj; /* #bytes we move start forward */ + uvm_tree_sanity(map, "clip_end entry"); /* * Create a new entry and insert it * AFTER the specified entry @@ -455,6 +622,8 @@ uvm_map_clip_end(map, entry, end) if (entry->aref.ar_amap) amap_splitref(&entry->aref, &new_entry->aref, new_adj); + + uvm_rb_fixup(map, entry); uvm_map_entry_link(map, entry, new_entry); @@ -468,6 +637,7 @@ uvm_map_clip_end(map, entry, end) entry->object.uvm_obj->pgops->pgo_reference( entry->object.uvm_obj); } + uvm_tree_sanity(map, "clip_end leave"); } @@ -522,6 +692,8 @@ uvm_map(map, startp, size, uobj, uoffset, align, flags) map, *startp, size, flags); UVMHIST_LOG(maphist, " uobj/offset 0x%x/%d", uobj, uoffset,0,0); + uvm_tree_sanity(map, "map entry"); + /* * step 0: sanity check of protection code */ @@ -644,8 +816,11 @@ uvm_map(map, startp, size, uobj, uoffset, align, flags) } prev_entry->end += size; + uvm_rb_fixup(map, prev_entry); map->size += size; + uvm_tree_sanity(map, "map leave 2"); + UVMHIST_LOG(maphist,"<- done (via backmerge)!", 0, 0, 0, 0); vm_map_unlock(map); return (KERN_SUCCESS); @@ -717,6 +892,8 @@ step3: (prev_entry->end >= new_entry->start)) map->first_free = new_entry; + uvm_tree_sanity(map, "map leave"); + UVMHIST_LOG(maphist,"<- done!", 0, 0, 0, 0); vm_map_unlock(map); return(KERN_SUCCESS); @@ -738,6 +915,7 @@ uvm_map_lookup_entry(map, address, entry) { vm_map_entry_t cur; vm_map_entry_t last; + int use_tree = 0; UVMHIST_FUNC("uvm_map_lookup_entry"); UVMHIST_CALLED(maphist); @@ -777,12 +955,43 @@ uvm_map_lookup_entry(map, address, entry) cur, 0, 0, 0); return (TRUE); } + + if (map->nentries > 30) + use_tree = 1; } else { /* * go from start to hint, *inclusively* */ last = cur->next; cur = map->header.next; + use_tree = 1; + } + + uvm_tree_sanity(map, __FUNCTION__); + + if (use_tree) { + vm_map_entry_t prev = &map->header; + cur = RB_ROOT(&map->rbhead); + + /* + * Simple lookup in the tree. Happens when the hint is + * invalid, or nentries reach a threshold. + */ + while (cur) { + if (address >= cur->start) { + if (address < cur->end) { + *entry = cur; + SAVE_HINT(map, map->hint, cur); + return (TRUE); + } + prev = cur; + cur = RB_RIGHT(cur, rb_entry); + } else + cur = RB_LEFT(cur, rb_entry); + } + *entry = prev; + UVMHIST_LOG(maphist,"<- failed!",0,0,0,0); + return (FALSE); } /* @@ -807,6 +1016,7 @@ uvm_map_lookup_entry(map, address, entry) } cur = cur->next; } + *entry = cur->prev; SAVE_HINT(map, map->hint, *entry); UVMHIST_LOG(maphist,"<- failed!",0,0,0,0); @@ -814,6 +1024,40 @@ uvm_map_lookup_entry(map, address, entry) } /* + * Checks if address pointed to be phint fits into the empty + * space before the vm_map_entry after. Takes aligment and + * offset into consideration. + */ + +int +uvm_map_spacefits(vm_map_t map, vaddr_t *phint, vsize_t length, + vm_map_entry_t after, voff_t uoffset, vsize_t align) +{ + vaddr_t hint = *phint; + vaddr_t end; + +#ifdef PMAP_PREFER + /* + * push hint forward as needed to avoid VAC alias problems. + * we only do this if a valid offset is specified. + */ + if (uoffset != UVM_UNKNOWN_OFFSET) + PMAP_PREFER(uoffset, &hint); +#endif + if (align != 0) + if ((hint & (align - 1)) != 0) + hint = roundup(hint, align); + end = hint + length; + if (end > map->max_offset || end < hint) + return (FALSE); + if (after != NULL && after != &map->header && after->start < end) + return (FALSE); + + *phint = hint; + return (TRUE); +} + +/* * uvm_map_findspace: find "length" sized space in "map". * * => "hint" is a hint about where we want it, unless FINDSPACE_FIXED is @@ -838,6 +1082,8 @@ uvm_map_findspace(map, hint, length, result, uobj, uoffset, align, flags) int flags; { vm_map_entry_t entry, next, tmp; + vm_map_entry_t child, prev = NULL; + vaddr_t end, orig_hint; UVMHIST_FUNC("uvm_map_findspace"); UVMHIST_CALLED(maphist); @@ -847,6 +1093,8 @@ uvm_map_findspace(map, hint, length, result, uobj, uoffset, align, flags) KASSERT((align & (align - 1)) == 0); KASSERT((flags & UVM_FLAG_FIXED) == 0 || align == 0); + uvm_tree_sanity(map, "map_findspace entry"); + /* * remember the original hint. if we are aligning, then we * may have to try again with no alignment constraint if @@ -888,6 +1136,123 @@ uvm_map_findspace(map, hint, length, result, uobj, uoffset, align, flags) entry = tmp; } + if (flags & UVM_FLAG_FIXED) { +#ifdef PMAP_PREFER + /* + * push hint forward as needed to avoid VAC alias problems. + * we only do this if a valid offset is specified. + */ + if (uoffset != UVM_UNKNOWN_OFFSET) + PMAP_PREFER(uoffset, &hint); +#endif + if (align != 0) { + if ((hint & (align - 1)) != 0) + hint = roundup(hint, align); + /* + * XXX Should we PMAP_PREFER() here again? + */ + } + end = hint + length; + if (end > map->max_offset || end < hint) { + UVMHIST_LOG(maphist,"<- failed (off end)", 0,0,0,0); + goto error; + } + next = entry->next; + if (next == &map->header || next->start >= end) + goto found; + UVMHIST_LOG(maphist,"<- fixed mapping failed", 0,0,0,0); + return(NULL); /* only one shot at it ... */ + } + + /* Try to find the space in the red-black tree */ + + /* Check slot before any entry */ + tmp = map->header.next; + if (uvm_map_spacefits(map, &hint, length, tmp, uoffset, align)) { + entry = map->first_free; + goto found; + } + + /* If there is not enough space in the whole tree, we fail */ + tmp = RB_ROOT(&map->rbhead); + if (tmp == NULL || tmp->space < length) + goto error; + + /* Find an entry close to hint that has enough space */ + for (; tmp;) { + if (tmp->start > hint && + (prev == NULL || tmp->start < prev->start)) { + if (tmp->ownspace >= length) + prev = tmp; + else if ((child = RB_RIGHT(tmp, rb_entry)) != NULL && + child->space >= length) + prev = tmp; + } + if (tmp->start < hint) + child = RB_RIGHT(tmp, rb_entry); + else if (tmp->start > hint) + child = RB_LEFT(tmp, rb_entry); + else { + if (tmp->ownspace >= length) + break; + child = RB_RIGHT(tmp, rb_entry); + } + if (child == NULL || child->space < length) + break; + tmp = child; + } + + if (tmp != NULL) { + /* + * Check if the entry that we found satifies the + * space requirement + */ + if (hint < tmp->end) + hint = tmp->end; + if (uvm_map_spacefits(map, &hint, length, tmp->next, uoffset, + align)) { + entry = tmp; + hint = hint; + goto found; + } + } + if (prev == NULL) + goto error; + + hint = prev->end; + if (uvm_map_spacefits(map, &hint, length, prev->next, uoffset, + align)) { + entry = prev; + goto found; + } + + if ((tmp = RB_RIGHT(prev, rb_entry)) == NULL || + tmp->space < length) + goto error; + + for (;;) { + KASSERT(tmp && tmp->space >= length); + child = RB_LEFT(tmp, rb_entry); + if (child && child->space >= length) { + tmp = child; + continue; + } + if (tmp->ownspace >= length) + break; + tmp = RB_RIGHT(tmp, rb_entry); + } + + hint = tmp->end; + if (uvm_map_spacefits(map, &hint, length, tmp->next, uoffset, align)) { + entry = tmp; + goto found; + } + + /* + * The tree fails to find an entry because of offset or alignment + * restrictions. Search the list instead. + */ + /* * Look through the rest of the map, trying to fit a new region in * the gap between existing regions, or after the very last region. @@ -908,8 +1273,7 @@ uvm_map_findspace(map, hint, length, result, uobj, uoffset, align, flags) * push hint forward as needed to avoid VAC alias problems. * we only do this if a valid offset is specified. */ - if ((flags & UVM_FLAG_FIXED) == 0 && - uoffset != UVM_UNKNOWN_OFFSET) + if (uoffset != UVM_UNKNOWN_OFFSET) PMAP_PREFER(uoffset, &hint); #endif if (align != 0) { @@ -922,27 +1286,27 @@ uvm_map_findspace(map, hint, length, result, uobj, uoffset, align, flags) end = hint + length; if (end > map->max_offset || end < hint) { UVMHIST_LOG(maphist,"<- failed (off end)", 0,0,0,0); - if (align != 0) { - UVMHIST_LOG(maphist, - "calling recursively, no align", - 0,0,0,0); - return (uvm_map_findspace(map, orig_hint, - length, result, uobj, uoffset, 0, flags)); - } - return (NULL); + goto error; } next = entry->next; if (next == &map->header || next->start >= end) break; - if (flags & UVM_FLAG_FIXED) { - UVMHIST_LOG(maphist,"<- fixed mapping failed", 0,0,0,0); - return(NULL); /* only one shot at it ... */ - } } + found: SAVE_HINT(map, map->hint, entry); *result = hint; UVMHIST_LOG(maphist,"<- got it! (result=0x%x)", hint, 0,0,0); return (entry); + + error: + if (align != 0) { + UVMHIST_LOG(maphist, + "calling recursively, no align", + 0,0,0,0); + return (uvm_map_findspace(map, orig_hint, + length, result, uobj, uoffset, 0, flags)); + } + return (NULL); } /* @@ -974,6 +1338,8 @@ uvm_unmap_remove(map, start, end, entry_list) VM_MAP_RANGE_CHECK(map, start, end); + uvm_tree_sanity(map, "unmap_remove entry"); + /* * find first entry */ @@ -1119,6 +1485,8 @@ uvm_unmap_remove(map, start, end, entry_list) entry = next; /* next entry, please */ } + uvm_tree_sanity(map, "unmap_remove leave"); + /* * now we've cleaned up the map and are ready for the caller to drop * references to the mapped objects. @@ -1245,6 +1613,8 @@ uvm_map_replace(map, start, end, newents, nnewents) { vm_map_entry_t oldent, last; + uvm_tree_sanity(map, "map_replace entry"); + /* * first find the blank map entry at the specified address */ @@ -1301,7 +1671,6 @@ uvm_map_replace(map, start, end, newents, nnewents) */ if (newents) { - last = newents->prev; /* we expect this */ /* critical: flush stale hints out of map */ @@ -1311,10 +1680,25 @@ uvm_map_replace(map, start, end, newents, nnewents) last->next = oldent->next; last->next->prev = last; + + /* Fix RB tree */ + uvm_rb_remove(map, oldent); + newents->prev = oldent->prev; newents->prev->next = newents; map->nentries = map->nentries + (nnewents - 1); + /* Fixup the RB tree */ + { + int i; + vm_map_entry_t tmp; + + tmp = newents; + for (i = 0; i < nnewents && tmp; i++) { + uvm_rb_insert(map, tmp); + tmp = tmp->next; + } + } } else { /* critical: flush stale hints out of map */ @@ -1327,6 +1711,8 @@ uvm_map_replace(map, start, end, newents, nnewents) } + uvm_tree_sanity(map, "map_replace leave"); + /* * now we can free the old blank entry, unlock the map and return. */ @@ -1372,6 +1758,9 @@ uvm_map_extract(srcmap, start, len, dstmap, dstaddrp, flags) len,0); UVMHIST_LOG(maphist," ...,dstmap=0x%x, flags=0x%x)", dstmap,flags,0,0); + uvm_tree_sanity(srcmap, "map_extract src enter"); + uvm_tree_sanity(dstmap, "map_extract dst enter"); + /* * step 0: sanity check: start must be on a page boundary, length * must be page sized. can't ask for CONTIG/QREF if you asked for @@ -1649,6 +2038,10 @@ uvm_map_extract(srcmap, start, len, dstmap, dstaddrp, flags) goto bad2; } } + + uvm_tree_sanity(srcmap, "map_extract src leave"); + uvm_tree_sanity(dstmap, "map_extract dst leave"); + return(0); /* @@ -1660,6 +2053,10 @@ bad2: /* src already unlocked */ if (chain) uvm_unmap_detach(chain, (flags & UVM_EXTRACT_QREF) ? AMAP_REFALL : 0); + + uvm_tree_sanity(srcmap, "map_extract src err leave"); + uvm_tree_sanity(dstmap, "map_extract dst err leave"); + uvm_unmap(dstmap, dstaddr, dstaddr+len); /* ??? */ return(error); } @@ -2825,7 +3222,11 @@ uvmspace_exec(p, start, end) */ vm_map_lock(map); map->min_offset = start; + uvm_tree_sanity(map, "resize enter"); map->max_offset = end; + if (map->header.prev != &map->header) + uvm_rb_fixup(map, map->header.prev); + uvm_tree_sanity(map, "resize leave"); vm_map_unlock(map); diff --git a/sys/uvm/uvm_map.h b/sys/uvm/uvm_map.h index ad865ece22b..36e5707f1c9 100644 --- a/sys/uvm/uvm_map.h +++ b/sys/uvm/uvm_map.h @@ -1,4 +1,4 @@ -/* $OpenBSD: uvm_map.h,v 1.21 2002/02/18 10:02:20 art Exp $ */ +/* $OpenBSD: uvm_map.h,v 1.22 2002/02/25 00:20:45 provos Exp $ */ /* $NetBSD: uvm_map.h,v 1.24 2001/02/18 21:19:08 chs Exp $ */ /* @@ -139,6 +139,9 @@ union vm_map_object { * Also included is control information for virtual copy operations. */ struct vm_map_entry { + RB_ENTRY(vm_map_entry) rb_entry; /* tree information */ + vaddr_t ownspace; /* free space after */ + vaddr_t space; /* space in subtree */ struct vm_map_entry *prev; /* previous entry */ struct vm_map_entry *next; /* next entry */ vaddr_t start; /* start address */ @@ -217,6 +220,7 @@ struct vm_map_entry { struct vm_map { struct pmap * pmap; /* Physical map */ lock_data_t lock; /* Lock for map data */ + RB_HEAD(uvm_tree, vm_map_entry) rbhead; /* Tree for entries */ struct vm_map_entry header; /* List of entries */ int nentries; /* Number of entries */ vsize_t size; /* virtual size */ diff --git a/sys/uvm/uvm_map_i.h b/sys/uvm/uvm_map_i.h index 54625e7fb4e..4556f48e6a8 100644 --- a/sys/uvm/uvm_map_i.h +++ b/sys/uvm/uvm_map_i.h @@ -1,4 +1,4 @@ -/* $OpenBSD: uvm_map_i.h,v 1.12 2001/12/19 08:58:07 art Exp $ */ +/* $OpenBSD: uvm_map_i.h,v 1.13 2002/02/25 00:20:45 provos Exp $ */ /* $NetBSD: uvm_map_i.h,v 1.18 2000/11/27 08:40:04 chs Exp $ */ /* @@ -114,6 +114,7 @@ uvm_map_setup(map, min, max, flags) int flags; { + RB_INIT(&map->rbhead); map->header.next = map->header.prev = &map->header; map->nentries = 0; map->size = 0; |