diff options
author | Chris Wilson <chris@chris-wilson.co.uk> | 2012-11-13 09:46:19 +0000 |
---|---|---|
committer | Chris Wilson <chris@chris-wilson.co.uk> | 2012-11-13 09:46:19 +0000 |
commit | 2954f15e2bcb590a90c2cb6077c0843ee25a4413 (patch) | |
tree | 0d8426fabb193957f61d2929778195c99d52383a /src/sna/sna_io.c | |
parent | 66eb0adffa63ef8ece7621ba90dc96af91549612 (diff) |
sna: Specialise the decision for inplace xor uploads
Fixes a regression from
commit 0be1d964713ca407f029278a8256d02d925dc9da
Author: Chris Wilson <chris@chris-wilson.co.uk>
Date: Tue Sep 11 21:48:24 2012 +0100
sna: Use inplace X tiling for LLC uploads
which introduced the ability to swizzle into CPU maps, but also
convinced the xorg path to the same - which for large images blows up.
Reported-by: Michael Laß <bevan@bi-co.net>
Bugzilla: https://bugs.freedesktop.org/show_bug.cgi?id=57031
Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
Diffstat (limited to 'src/sna/sna_io.c')
-rw-r--r-- | src/sna/sna_io.c | 44 |
1 files changed, 33 insertions, 11 deletions
diff --git a/src/sna/sna_io.c b/src/sna/sna_io.c index 69d920c7..2038e5df 100644 --- a/src/sna/sna_io.c +++ b/src/sna/sna_io.c @@ -579,19 +579,13 @@ static bool write_boxes_inplace(struct kgem *kgem, return true; } -static bool upload_inplace(struct kgem *kgem, - struct kgem_bo *bo, - const BoxRec *box, - int n, int bpp) +static bool __upload_inplace(struct kgem *kgem, + struct kgem_bo *bo, + const BoxRec *box, + int n, int bpp) { unsigned int bytes; - if (kgem->wedged) - return true; - - if (!kgem_bo_can_map(kgem, bo) && !upload_inplace__tiled(kgem, bo)) - return false; - if (FORCE_INPLACE) return FORCE_INPLACE > 0; @@ -610,6 +604,20 @@ static bool upload_inplace(struct kgem *kgem, return bytes * bpp >> 12; } +static bool upload_inplace(struct kgem *kgem, + struct kgem_bo *bo, + const BoxRec *box, + int n, int bpp) +{ + if (kgem->wedged) + return true; + + if (!kgem_bo_can_map(kgem, bo) && !upload_inplace__tiled(kgem, bo)) + return false; + + return __upload_inplace(kgem, bo, box, n,bpp); +} + bool sna_write_boxes(struct sna *sna, PixmapPtr dst, struct kgem_bo * const dst_bo, int16_t const dst_dx, int16_t const dst_dy, const void * const src, int const stride, int16_t const src_dx, int16_t const src_dy, @@ -960,6 +968,20 @@ write_boxes_inplace__xor(struct kgem *kgem, } while (--n); } +static bool upload_inplace__xor(struct kgem *kgem, + struct kgem_bo *bo, + const BoxRec *box, + int n, int bpp) +{ + if (kgem->wedged) + return true; + + if (!kgem_bo_can_map(kgem, bo)) + return false; + + return __upload_inplace(kgem, bo, box, n, bpp); +} + void sna_write_boxes__xor(struct sna *sna, PixmapPtr dst, struct kgem_bo *dst_bo, int16_t dst_dx, int16_t dst_dy, const void *src, int stride, int16_t src_dx, int16_t src_dy, @@ -976,7 +998,7 @@ void sna_write_boxes__xor(struct sna *sna, PixmapPtr dst, DBG(("%s x %d\n", __FUNCTION__, nbox)); - if (upload_inplace(kgem, dst_bo, box, nbox, dst->drawable.bitsPerPixel)) { + if (upload_inplace__xor(kgem, dst_bo, box, nbox, dst->drawable.bitsPerPixel)) { fallback: write_boxes_inplace__xor(kgem, src, stride, dst->drawable.bitsPerPixel, src_dx, src_dy, |