summaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChris Wilson <chris@chris-wilson.co.uk>2012-12-12 21:56:22 +0000
committerChris Wilson <chris@chris-wilson.co.uk>2012-12-12 21:56:22 +0000
commit74bbf20e6e652cba55d6d0bc17066f4112f8548c (patch)
treeb86c42dba5cd4670c1b6a42cdb812dcb843141b8
parent52c8c9218c8f28fb049b02214d833912a803f911 (diff)
sna: Improve the initialisation failure path for pinned batches
Simplify the later checks by always populating the lists with a single, albeit unpinned, bo in the case we fail to create pinned batches. Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
-rw-r--r--src/sna/kgem.c20
1 files changed, 19 insertions, 1 deletions
diff --git a/src/sna/kgem.c b/src/sna/kgem.c
index f1682da5..90594cfa 100644
--- a/src/sna/kgem.c
+++ b/src/sna/kgem.c
@@ -869,7 +869,6 @@ static bool kgem_init_pinned_batches(struct kgem *kgem)
bo->presumed_offset = pin.offset;
debug_alloc__bo(kgem, bo);
list_add(&bo->list, &kgem->pinned_batches[n]);
- bo->refcnt = 1;
}
}
@@ -883,6 +882,25 @@ err:
struct kgem_bo, list));
}
}
+
+ /* For simplicity populate the lists with a single unpinned bo */
+ for (n = 0; n < ARRAY_SIZE(count); n++) {
+ struct kgem_bo *bo;
+ uint32_t handle;
+
+ handle = gem_create(kgem->fd, size[n]);
+ if (handle == 0)
+ break;
+
+ bo = __kgem_bo_alloc(handle, size[n]);
+ if (bo == NULL) {
+ gem_close(kgem->fd, handle);
+ break;
+ }
+
+ debug_alloc__bo(kgem, bo);
+ list_add(&bo->list, &kgem->pinned_batches[n]);
+ }
return false;
}