summaryrefslogtreecommitdiff
path: root/src/sna/sna_tiling.c
diff options
context:
space:
mode:
authorChris Wilson <chris@chris-wilson.co.uk>2013-11-06 14:51:42 +0000
committerChris Wilson <chris@chris-wilson.co.uk>2013-11-06 14:51:42 +0000
commitae380a960df6b3a9714d78eb6cb42249764488ba (patch)
tree10a9f101b4f26e12d2004accc6609582eeebb002 /src/sna/sna_tiling.c
parent7578809ddcb244ad78ebf86359b7ee2a61e27ff6 (diff)
sna: Use tiling BLT fallback for BLT composite operations
This avoid a circuituous route through the render pathways and multiple levels of tiling fallbacks to accomplish the same copy. Signed-off-by: Chris Wilson <chris@chris-wilson.co.uk>
Diffstat (limited to 'src/sna/sna_tiling.c')
-rw-r--r--src/sna/sna_tiling.c311
1 files changed, 311 insertions, 0 deletions
diff --git a/src/sna/sna_tiling.c b/src/sna/sna_tiling.c
index d23fb00e..ae9c84b3 100644
--- a/src/sna/sna_tiling.c
+++ b/src/sna/sna_tiling.c
@@ -689,6 +689,317 @@ done:
return ret;
}
+fastcall static void
+tiling_blt(struct sna *sna,
+ const struct sna_composite_op *op,
+ const struct sna_composite_rectangles *r)
+{
+ int x1, x2, y1, y2;
+ int src_x, src_y;
+ BoxRec box;
+
+ DBG(("%s: src=(%d, %d), dst=(%d, %d), size=(%d, %d)\n",
+ __FUNCTION__,
+ r->src.x, r->src.y,
+ r->dst.x, r->dst.y,
+ r->width, r->height));
+
+ /* XXX higher layer should have clipped? */
+
+ x1 = r->dst.x + op->dst.x;
+ y1 = r->dst.y + op->dst.y;
+ x2 = x1 + r->width;
+ y2 = y1 + r->height;
+
+ src_x = r->src.x - x1 + op->u.blt.sx;
+ src_y = r->src.y - y1 + op->u.blt.sy;
+
+ /* clip against dst */
+ if (x1 < 0)
+ x1 = 0;
+ if (y1 < 0)
+ y1 = 0;
+
+ if (x2 > op->dst.width)
+ x2 = op->dst.width;
+
+ if (y2 > op->dst.height)
+ y2 = op->dst.height;
+
+ DBG(("%s: box=(%d, %d), (%d, %d)\n", __FUNCTION__, x1, y1, x2, y2));
+
+ if (x2 <= x1 || y2 <= y1)
+ return;
+
+ box.x1 = x1; box.y1 = y1;
+ box.x2 = x2; box.y2 = y2;
+ sna_tiling_blt_copy_boxes(sna, GXcopy,
+ op->src.bo, src_x, src_y,
+ op->dst.bo, 0, 0,
+ op->u.blt.bpp,
+ &box, 1);
+}
+
+fastcall static void
+tiling_blt_box(struct sna *sna,
+ const struct sna_composite_op *op,
+ const BoxRec *box)
+{
+ DBG(("%s: box (%d, %d), (%d, %d)\n",
+ __FUNCTION__, box->x1, box->y1, box->x2, box->y2));
+ sna_tiling_blt_copy_boxes(sna, GXcopy,
+ op->src.bo, op->u.blt.sx, op->u.blt.sy,
+ op->dst.bo, op->dst.x, op->dst.y,
+ op->u.blt.bpp,
+ box, 1);
+}
+
+static void
+tiling_blt_boxes(struct sna *sna,
+ const struct sna_composite_op *op,
+ const BoxRec *box, int nbox)
+{
+ DBG(("%s: nbox=%d\n", __FUNCTION__, nbox));
+ sna_tiling_blt_copy_boxes(sna, GXcopy,
+ op->src.bo, op->u.blt.sx, op->u.blt.sy,
+ op->dst.bo, op->dst.x, op->dst.y,
+ op->u.blt.bpp,
+ box, nbox);
+}
+
+static bool
+sna_tiling_blt_copy_boxes__with_alpha(struct sna *sna, uint8_t alu,
+ struct kgem_bo *src_bo, int16_t src_dx, int16_t src_dy,
+ struct kgem_bo *dst_bo, int16_t dst_dx, int16_t dst_dy,
+ int bpp, int alpha_fixup,
+ const BoxRec *box, int nbox)
+{
+ RegionRec region, tile, this;
+ struct kgem_bo *bo;
+ int max_size, step;
+ bool ret = false;
+
+ if (wedged(sna) ||
+ !kgem_bo_can_blt(&sna->kgem, src_bo) ||
+ !kgem_bo_can_blt(&sna->kgem, dst_bo)) {
+ /* XXX */
+ DBG(("%s: tiling blt fail: src?=%d, dst?=%d\n",
+ __FUNCTION__,
+ kgem_bo_can_blt(&sna->kgem, src_bo),
+ kgem_bo_can_blt(&sna->kgem, dst_bo)));
+ return false;
+ }
+
+ max_size = sna->kgem.aperture_high * PAGE_SIZE;
+ max_size -= MAX(kgem_bo_size(src_bo), kgem_bo_size(dst_bo));
+ if (max_size <= 0) {
+ DBG(("%s: tiles cannot fit into aperture\n", __FUNCTION__));
+ return false;
+ }
+ if (max_size > sna->kgem.max_copy_tile_size)
+ max_size = sna->kgem.max_copy_tile_size;
+
+ pixman_region_init_rects(&region, box, nbox);
+
+ /* Use a small step to accommodate enlargement through tile alignment */
+ step = sna->render.max_3d_size;
+ if (region.extents.x1 & (8*512 / bpp - 1) || region.extents.y1 & 63)
+ step /= 2;
+ while (step * step * 4 > max_size)
+ step /= 2;
+ if (sna->kgem.gen < 033)
+ step /= 2; /* accommodate severe fence restrictions */
+ if (step == 0) {
+ DBG(("%s: tiles cannot fit into aperture\n", __FUNCTION__));
+ return false;
+ }
+
+ DBG(("%s (alu=%d), tile.size=%d, box=%dx[(%d, %d), (%d, %d)])\n",
+ __FUNCTION__, alu, step, nbox,
+ region.extents.x1, region.extents.y1,
+ region.extents.x2, region.extents.y2));
+
+ for (tile.extents.y1 = tile.extents.y2 = region.extents.y1;
+ tile.extents.y2 < region.extents.y2;
+ tile.extents.y1 = tile.extents.y2) {
+ int y2 = tile.extents.y1 + step;
+ if (y2 > region.extents.y2)
+ y2 = region.extents.y2;
+ tile.extents.y2 = y2;
+
+ for (tile.extents.x1 = tile.extents.x2 = region.extents.x1;
+ tile.extents.x2 < region.extents.x2;
+ tile.extents.x1 = tile.extents.x2) {
+ int w, h;
+ int x2 = tile.extents.x1 + step;
+ if (x2 > region.extents.x2)
+ x2 = region.extents.x2;
+ tile.extents.x2 = x2;
+
+ tile.data = NULL;
+
+ RegionNull(&this);
+ RegionIntersect(&this, &region, &tile);
+ if (RegionNil(&this))
+ continue;
+
+ w = this.extents.x2 - this.extents.x1;
+ h = this.extents.y2 - this.extents.y1;
+ bo = kgem_create_2d(&sna->kgem, w, h, bpp,
+ kgem_choose_tiling(&sna->kgem,
+ I915_TILING_X,
+ w, h, bpp),
+ CREATE_TEMPORARY);
+ if (bo) {
+ int16_t dx = this.extents.x1;
+ int16_t dy = this.extents.y1;
+
+ assert(bo->pitch <= 8192);
+ assert(bo->tiling != I915_TILING_Y);
+
+ if (!sna_blt_copy_boxes(sna, alu,
+ src_bo, src_dx, src_dy,
+ bo, -dx, -dy,
+ bpp, REGION_RECTS(&this), REGION_NUM_RECTS(&this)))
+ goto err;
+
+ if (!sna_blt_copy_boxes__with_alpha(sna, alu,
+ bo, -dx, -dy,
+ dst_bo, dst_dx, dst_dy,
+ bpp, alpha_fixup,
+ REGION_RECTS(&this), REGION_NUM_RECTS(&this)))
+ goto err;
+
+ kgem_bo_destroy(&sna->kgem, bo);
+ }
+ RegionUninit(&this);
+ }
+ }
+
+ ret = true;
+ goto done;
+err:
+ kgem_bo_destroy(&sna->kgem, bo);
+ RegionUninit(&this);
+done:
+ pixman_region_fini(&region);
+ return ret;
+}
+
+fastcall static void
+tiling_blt__with_alpha(struct sna *sna,
+ const struct sna_composite_op *op,
+ const struct sna_composite_rectangles *r)
+{
+ int x1, x2, y1, y2;
+ int src_x, src_y;
+ BoxRec box;
+
+ DBG(("%s: src=(%d, %d), dst=(%d, %d), size=(%d, %d)\n",
+ __FUNCTION__,
+ r->src.x, r->src.y,
+ r->dst.x, r->dst.y,
+ r->width, r->height));
+
+ /* XXX higher layer should have clipped? */
+
+ x1 = r->dst.x + op->dst.x;
+ y1 = r->dst.y + op->dst.y;
+ x2 = x1 + r->width;
+ y2 = y1 + r->height;
+
+ src_x = r->src.x - x1 + op->u.blt.sx;
+ src_y = r->src.y - y1 + op->u.blt.sy;
+
+ /* clip against dst */
+ if (x1 < 0)
+ x1 = 0;
+ if (y1 < 0)
+ y1 = 0;
+
+ if (x2 > op->dst.width)
+ x2 = op->dst.width;
+
+ if (y2 > op->dst.height)
+ y2 = op->dst.height;
+
+ DBG(("%s: box=(%d, %d), (%d, %d)\n", __FUNCTION__, x1, y1, x2, y2));
+
+ if (x2 <= x1 || y2 <= y1)
+ return;
+
+ box.x1 = x1; box.y1 = y1;
+ box.x2 = x2; box.y2 = y2;
+ sna_tiling_blt_copy_boxes__with_alpha(sna, GXcopy,
+ op->src.bo, src_x, src_y,
+ op->dst.bo, 0, 0,
+ op->u.blt.bpp, op->u.blt.pixel,
+ &box, 1);
+}
+
+fastcall static void
+tiling_blt_box__with_alpha(struct sna *sna,
+ const struct sna_composite_op *op,
+ const BoxRec *box)
+{
+ DBG(("%s: box (%d, %d), (%d, %d)\n",
+ __FUNCTION__, box->x1, box->y1, box->x2, box->y2));
+ sna_tiling_blt_copy_boxes__with_alpha(sna, GXcopy,
+ op->src.bo, op->u.blt.sx, op->u.blt.sy,
+ op->dst.bo, op->dst.x, op->dst.y,
+ op->u.blt.bpp, op->u.blt.pixel,
+ box, 1);
+}
+
+static void
+tiling_blt_boxes__with_alpha(struct sna *sna,
+ const struct sna_composite_op *op,
+ const BoxRec *box, int nbox)
+{
+ DBG(("%s: nbox=%d\n", __FUNCTION__, nbox));
+ sna_tiling_blt_copy_boxes__with_alpha(sna, GXcopy,
+ op->src.bo, op->u.blt.sx, op->u.blt.sy,
+ op->dst.bo, op->dst.x, op->dst.y,
+ op->u.blt.bpp, op->u.blt.pixel,
+ box, nbox);
+}
+
+static void nop_done(struct sna *sna, const struct sna_composite_op *op)
+{
+ assert(sna->kgem.nbatch <= KGEM_BATCH_SIZE(&sna->kgem));
+ (void)op;
+}
+
+bool
+sna_tiling_blt_composite(struct sna *sna,
+ struct sna_composite_op *op,
+ struct kgem_bo *bo,
+ int bpp,
+ uint32_t alpha_fixup)
+{
+ assert(op->op == PictOpSrc);
+ assert(op->dst.bo);
+ assert(kgem_bo_can_blt(&sna->kgem, op->dst.bo));
+ assert(kgem_bo_can_blt(&sna->kgem, bo));
+
+ op->src.bo = bo;
+ op->u.blt.bpp = bpp;
+ op->u.blt.pixel = alpha_fixup;
+
+ if (alpha_fixup) {
+ op->blt = tiling_blt__with_alpha;
+ op->box = tiling_blt_box__with_alpha;
+ op->boxes = tiling_blt_boxes__with_alpha;
+ } else {
+ op->blt = tiling_blt;
+ op->box = tiling_blt_box;
+ op->boxes = tiling_blt_boxes;
+ }
+ op->done = nop_done;
+
+ return true;
+}
+
bool sna_tiling_blt_copy_boxes(struct sna *sna, uint8_t alu,
struct kgem_bo *src_bo, int16_t src_dx, int16_t src_dy,
struct kgem_bo *dst_bo, int16_t dst_dx, int16_t dst_dy,