[cairo-commit] 7 commits - src/cairo-boxes.c src/cairo-boxes-private.h src/cairo-gl-spans-compositor.c src/cairo-image-compositor.c src/cairo-spans-compositor.c src/cairo-spans-compositor-private.h src/cairo-traps.c src/cairo-traps-compositor.c src/cairo-traps-private.h test/reference
Chris Wilson
ickle at kemper.freedesktop.org
Sun Mar 4 14:55:31 PST 2012
src/cairo-boxes-private.h | 5
src/cairo-boxes.c | 59 ++++++
src/cairo-gl-spans-compositor.c | 65 +++++++
src/cairo-image-compositor.c | 301 ++++++++++++++++++++++++++---------
src/cairo-spans-compositor-private.h | 11 +
src/cairo-spans-compositor.c | 62 ++++++-
src/cairo-traps-compositor.c | 45 ++++-
src/cairo-traps-private.h | 6
src/cairo-traps.c | 62 +++++++
test/reference/hatchings.ref.png |binary
10 files changed, 534 insertions(+), 82 deletions(-)
New commits:
commit ac80097a31098a9ac746c3d14c1eeaa2198dbef8
Author: Chris Wilson <chris at chris-wilson.co.uk>
Date: Sun Mar 4 19:01:45 2012 +0000
traps: First attempt to convert curvy unantialiased polygon to scanlines
If we have a curvy polygon, we can expect to generate lots of short
trapezoids. However, we may be able to reduce the transport size by
converting them into a set of boxes instead.
Signed-off-by: Chris Wilson <chris at chris-wilson.co.uk>
diff --git a/src/cairo-boxes-private.h b/src/cairo-boxes-private.h
index 57d1228..910b2c5 100644
--- a/src/cairo-boxes-private.h
+++ b/src/cairo-boxes-private.h
@@ -116,6 +116,11 @@ _cairo_boxes_for_each_box (cairo_boxes_t *boxes,
cairo_bool_t (*func) (cairo_box_t *box, void *data),
void *data);
+cairo_private cairo_status_t
+_cairo_rasterise_polygon_to_boxes (cairo_polygon_t *polygon,
+ cairo_fill_rule_t fill_rule,
+ cairo_boxes_t *boxes);
+
cairo_private void
_cairo_boxes_fini (cairo_boxes_t *boxes);
diff --git a/src/cairo-boxes.c b/src/cairo-boxes.c
index 182601a..a872f0d 100644
--- a/src/cairo-boxes.c
+++ b/src/cairo-boxes.c
@@ -373,6 +373,65 @@ _cairo_boxes_for_each_box (cairo_boxes_t *boxes,
return TRUE;
}
+struct cairo_box_renderer {
+ cairo_span_renderer_t base;
+ cairo_boxes_t *boxes;
+};
+
+static cairo_status_t
+span_to_boxes (void *abstract_renderer, int y, int h,
+ const cairo_half_open_span_t *spans, unsigned num_spans)
+{
+ struct cairo_box_renderer *r = abstract_renderer;
+ cairo_status_t status = CAIRO_STATUS_SUCCESS;
+ cairo_box_t box;
+
+ if (num_spans == 0)
+ return CAIRO_STATUS_SUCCESS;
+
+ box.p1.y = _cairo_fixed_from_int (y);
+ box.p2.y = _cairo_fixed_from_int (y + h);
+ do {
+ if (spans[0].coverage) {
+ box.p1.x = _cairo_fixed_from_int(spans[0].x);
+ box.p2.x = _cairo_fixed_from_int(spans[1].x);
+ status = _cairo_boxes_add (r->boxes, CAIRO_ANTIALIAS_DEFAULT, &box);
+ }
+ spans++;
+ } while (--num_spans > 1 && status == CAIRO_STATUS_SUCCESS);
+
+ return status;
+}
+
+cairo_status_t
+_cairo_rasterise_polygon_to_boxes (cairo_polygon_t *polygon,
+ cairo_fill_rule_t fill_rule,
+ cairo_boxes_t *boxes)
+{
+ struct cairo_box_renderer renderer;
+ cairo_scan_converter_t *converter;
+ cairo_int_status_t status;
+ cairo_rectangle_int_t r;
+
+ TRACE ((stderr, "%s: fill_rule=%d\n", __FUNCTION__, fill_rule));
+
+ _cairo_box_round_to_rectangle (&polygon->extents, &r);
+ converter = _cairo_mono_scan_converter_create (r.x, r.y,
+ r.x + r.width,
+ r.y + r.height,
+ fill_rule);
+ status = _cairo_mono_scan_converter_add_polygon (converter, polygon);
+ if (unlikely (status))
+ goto cleanup_converter;
+
+ renderer.boxes = boxes;
+ renderer.base.render_rows = span_to_boxes;
+
+ status = converter->generate (converter, &renderer.base);
+cleanup_converter:
+ converter->destroy (converter);
+ return status;
+}
void
_cairo_debug_print_boxes (FILE *stream, const cairo_boxes_t *boxes)
diff --git a/src/cairo-traps-compositor.c b/src/cairo-traps-compositor.c
index 964d85e..b1ade5c 100644
--- a/src/cairo-traps-compositor.c
+++ b/src/cairo-traps-compositor.c
@@ -1434,6 +1434,20 @@ clip_and_composite_polygon (const cairo_traps_compositor_t *compositor,
}
}
+ if (antialias == CAIRO_ANTIALIAS_NONE && curvy) {
+ cairo_boxes_t boxes;
+
+ _cairo_boxes_init (&boxes);
+ status = _cairo_rasterise_polygon_to_boxes (polygon, fill_rule, &boxes);
+ if (likely (status == CAIRO_INT_STATUS_SUCCESS)) {
+ assert (boxes.is_pixel_aligned);
+ status = clip_and_composite_boxes (compositor, extents, &boxes);
+ }
+ _cairo_boxes_fini (&boxes);
+ if ((status != CAIRO_INT_STATUS_UNSUPPORTED))
+ return status;
+ }
+
_cairo_traps_init (&traps.traps);
if (antialias == CAIRO_ANTIALIAS_NONE && curvy) {
commit c45822886aae53def2e76ef582aac167adf7fd56
Author: Chris Wilson <chris at chris-wilson.co.uk>
Date: Sun Mar 4 18:31:20 2012 +0000
traps: Use the mono-scan-converter to reduce the number of traps
This trick only seems effective with mono-rasterisation, with a win of
about 10% for tiger-demo --antialias=none. At other antialias setting,
performance is reduced.
Signed-off-by: Chris Wilson <chris at chris-wilson.co.uk>
diff --git a/src/cairo-traps-compositor.c b/src/cairo-traps-compositor.c
index d93b057..964d85e 100644
--- a/src/cairo-traps-compositor.c
+++ b/src/cairo-traps-compositor.c
@@ -1370,7 +1370,8 @@ clip_and_composite_polygon (const cairo_traps_compositor_t *compositor,
cairo_composite_rectangles_t *extents,
cairo_polygon_t *polygon,
cairo_antialias_t antialias,
- cairo_fill_rule_t fill_rule)
+ cairo_fill_rule_t fill_rule,
+ cairo_bool_t curvy)
{
composite_traps_info_t traps;
cairo_surface_t *dst = extents->surface;
@@ -1435,7 +1436,11 @@ clip_and_composite_polygon (const cairo_traps_compositor_t *compositor,
_cairo_traps_init (&traps.traps);
- status = _cairo_bentley_ottmann_tessellate_polygon (&traps.traps, polygon, fill_rule);
+ if (antialias == CAIRO_ANTIALIAS_NONE && curvy) {
+ status = _cairo_rasterise_polygon_to_traps (polygon, fill_rule, antialias, &traps.traps);
+ } else {
+ status = _cairo_bentley_ottmann_tessellate_polygon (&traps.traps, polygon, fill_rule);
+ }
if (unlikely (status))
goto CLEANUP_TRAPS;
@@ -1647,7 +1652,7 @@ clip_and_composite_boxes (const cairo_traps_compositor_t *compositor,
extents->clip = clip;
status = clip_and_composite_polygon (compositor, extents, &polygon,
- antialias, fill_rule);
+ antialias, fill_rule, FALSE);
clip = extents->clip;
extents->clip = saved_clip;
@@ -2032,6 +2037,24 @@ _cairo_traps_compositor_stroke (const cairo_compositor_t *_compositor,
_cairo_tristrip_fini (&info.strip);
}
+ if (status == CAIRO_INT_STATUS_UNSUPPORTED &&
+ path->has_curve_to && antialias == CAIRO_ANTIALIAS_NONE) {
+ cairo_polygon_t polygon;
+
+ _cairo_polygon_init_with_clip (&polygon, extents->clip);
+ status = _cairo_path_fixed_stroke_to_polygon (path, style,
+ ctm, ctm_inverse,
+ tolerance,
+ &polygon);
+ if (likely (status == CAIRO_INT_STATUS_SUCCESS))
+ status = clip_and_composite_polygon (compositor,
+ extents, &polygon,
+ CAIRO_ANTIALIAS_NONE,
+ CAIRO_FILL_RULE_WINDING,
+ TRUE);
+ _cairo_polygon_fini (&polygon);
+ }
+
if (status == CAIRO_INT_STATUS_UNSUPPORTED) {
composite_traps_info_t info;
@@ -2108,7 +2131,7 @@ _cairo_traps_compositor_fill (const cairo_compositor_t *_compositor,
#endif
if (likely (status == CAIRO_INT_STATUS_SUCCESS)) {
status = clip_and_composite_polygon (compositor, extents, &polygon,
- antialias, fill_rule);
+ antialias, fill_rule, path->has_curve_to);
}
_cairo_polygon_fini (&polygon);
}
diff --git a/src/cairo-traps-private.h b/src/cairo-traps-private.h
index 5b17719..62c0fe7 100644
--- a/src/cairo-traps-private.h
+++ b/src/cairo-traps-private.h
@@ -121,6 +121,12 @@ cairo_private cairo_status_t
_cairo_traps_path (const cairo_traps_t *traps,
cairo_path_fixed_t *path);
+cairo_private cairo_int_status_t
+_cairo_rasterise_polygon_to_traps (cairo_polygon_t *polygon,
+ cairo_fill_rule_t fill_rule,
+ cairo_antialias_t antialias,
+ cairo_traps_t *traps);
+
CAIRO_END_DECLS
#endif /* CAIRO_TRAPS_PRIVATE_H */
diff --git a/src/cairo-traps.c b/src/cairo-traps.c
index 30e626f..c776d33 100644
--- a/src/cairo-traps.c
+++ b/src/cairo-traps.c
@@ -44,6 +44,7 @@
#include "cairo-region-private.h"
#include "cairo-slope-private.h"
#include "cairo-traps-private.h"
+#include "cairo-spans-private.h"
/* private functions */
@@ -750,3 +751,64 @@ _cairo_debug_print_traps (FILE *file, const cairo_traps_t *traps)
traps->traps[n].right.p2.y);
}
}
+
+struct cairo_trap_renderer {
+ cairo_span_renderer_t base;
+ cairo_traps_t *traps;
+};
+
+static cairo_status_t
+span_to_traps (void *abstract_renderer, int y, int h,
+ const cairo_half_open_span_t *spans, unsigned num_spans)
+{
+ struct cairo_trap_renderer *r = abstract_renderer;
+ cairo_fixed_t top, bot;
+
+ if (num_spans == 0)
+ return CAIRO_STATUS_SUCCESS;
+
+ top = _cairo_fixed_from_int (y);
+ bot = _cairo_fixed_from_int (y + h);
+ do {
+ if (spans[0].coverage) {
+ cairo_fixed_t x0 = _cairo_fixed_from_int(spans[0].x);
+ cairo_fixed_t x1 = _cairo_fixed_from_int(spans[1].x);
+ cairo_line_t left = { { x0, top }, { x0, bot } },
+ right = { { x1, top }, { x1, bot } };
+ _cairo_traps_add_trap (r->traps, top, bot, &left, &right);
+ }
+ spans++;
+ } while (--num_spans > 1);
+
+ return CAIRO_STATUS_SUCCESS;
+}
+
+cairo_int_status_t
+_cairo_rasterise_polygon_to_traps (cairo_polygon_t *polygon,
+ cairo_fill_rule_t fill_rule,
+ cairo_antialias_t antialias,
+ cairo_traps_t *traps)
+{
+ struct cairo_trap_renderer renderer;
+ cairo_scan_converter_t *converter;
+ cairo_int_status_t status;
+ cairo_rectangle_int_t r;
+
+ TRACE ((stderr, "%s: fill_rule=%d, antialias=%d\n",
+ __FUNCTION__, fill_rule, antialias));
+ assert(antialias == CAIRO_ANTIALIAS_NONE);
+
+ renderer.traps = traps;
+ renderer.base.render_rows = span_to_traps;
+
+ _cairo_box_round_to_rectangle (&polygon->extents, &r);
+ converter = _cairo_mono_scan_converter_create (r.x, r.y,
+ r.x + r.width,
+ r.y + r.height,
+ fill_rule);
+ status = _cairo_mono_scan_converter_add_polygon (converter, polygon);
+ if (likely (status == CAIRO_INT_STATUS_SUCCESS))
+ status = converter->generate (converter, &renderer.base);
+ converter->destroy (converter);
+ return status;
+}
commit 38a242a380d24c669f10dd542c3bab606434b8ad
Author: Chris Wilson <chris at chris-wilson.co.uk>
Date: Sun Mar 4 17:59:28 2012 +0000
spans,image,gl: Add fast-path for simple copies
Signed-off-by: Chris Wilson <chris at chris-wilson.co.uk>
diff --git a/src/cairo-gl-spans-compositor.c b/src/cairo-gl-spans-compositor.c
index 81de3c4..8c83a43 100644
--- a/src/cairo-gl-spans-compositor.c
+++ b/src/cairo-gl-spans-compositor.c
@@ -282,6 +282,67 @@ FAIL:
}
static cairo_int_status_t
+draw_image_boxes (void *_dst,
+ cairo_image_surface_t *image,
+ cairo_boxes_t *boxes,
+ int dx, int dy)
+{
+ cairo_gl_surface_t *dst = _dst;
+ struct _cairo_boxes_chunk *chunk;
+ int i;
+
+ for (chunk = &boxes->chunks; chunk; chunk = chunk->next) {
+ for (i = 0; i < chunk->count; i++) {
+ cairo_box_t *b = &chunk->base[i];
+ int x = _cairo_fixed_integer_part (b->p1.x);
+ int y = _cairo_fixed_integer_part (b->p1.y);
+ int w = _cairo_fixed_integer_part (b->p2.x) - x;
+ int h = _cairo_fixed_integer_part (b->p2.y) - y;
+ cairo_status_t status;
+
+ status = _cairo_gl_surface_draw_image (dst, image,
+ x + dx, y + dy,
+ w, h,
+ x, y);
+ if (unlikely (status))
+ return status;
+ }
+ }
+
+ return CAIRO_STATUS_SUCCESS;
+}
+
+static cairo_int_status_t copy_boxes (void *_dst,
+ cairo_surface_t *src,
+ cairo_boxes_t *boxes,
+ const cairo_rectangle_int_t *extents,
+ int dx, int dy)
+{
+ cairo_gl_composite_t setup;
+ cairo_gl_context_t *ctx;
+ cairo_int_status_t status;
+
+ TRACE ((stderr, "%s\n", __FUNCTION__));
+ status = _cairo_gl_composite_init (&setup, CAIRO_OPERATOR_SOURCE, _dst, FALSE);
+ if (unlikely (status))
+ goto FAIL;
+
+ _cairo_gl_composite_set_source_operand (&setup, source_to_operand (src));
+ _cairo_gl_operand_translate (&setup.src, dx, dy);
+
+ status = _cairo_gl_composite_begin (&setup, &ctx);
+ if (unlikely (status))
+ goto FAIL;
+
+ emit_aligned_boxes (ctx, boxes);
+ status = _cairo_gl_context_release (ctx, CAIRO_STATUS_SUCCESS);
+
+FAIL:
+ _cairo_gl_composite_fini (&setup);
+ return status;
+}
+
+static cairo_int_status_t
composite_boxes (void *_dst,
cairo_operator_t op,
cairo_surface_t *abstract_src,
@@ -307,7 +368,7 @@ composite_boxes (void *_dst,
_cairo_gl_composite_set_source_operand (&setup,
source_to_operand (abstract_src));
- _cairo_gl_operand_translate (&setup.mask, -src_x, -src_y);
+ _cairo_gl_operand_translate (&setup.src, -src_x, -src_y);
_cairo_gl_composite_set_mask_operand (&setup,
source_to_operand (abstract_mask));
@@ -434,6 +495,8 @@ _cairo_gl_span_compositor_get (void)
_cairo_spans_compositor_init (&spans, &shape);
spans.fill_boxes = fill_boxes;
+ spans.draw_image_boxes = draw_image_boxes;
+ spans.copy_boxes = copy_boxes;
//spans.check_composite_boxes = check_composite_boxes;
spans.pattern_to_surface = _cairo_gl_pattern_to_source;
spans.composite_boxes = composite_boxes;
diff --git a/src/cairo-image-compositor.c b/src/cairo-image-compositor.c
index 0a1557f..0f8142b 100644
--- a/src/cairo-image-compositor.c
+++ b/src/cairo-image-compositor.c
@@ -1976,6 +1976,8 @@ _cairo_image_spans_compositor_get (void)
//spans.acquire = acquire;
//spans.release = release;
spans.fill_boxes = fill_boxes;
+ spans.draw_image_boxes = draw_image_boxes;
+ //spans.copy_boxes = copy_boxes;
spans.pattern_to_surface = _cairo_image_source_create_for_pattern;
//spans.check_composite_boxes = check_composite_boxes;
spans.composite_boxes = composite_boxes;
diff --git a/src/cairo-spans-compositor-private.h b/src/cairo-spans-compositor-private.h
index 74a412d..d8b94fb 100644
--- a/src/cairo-spans-compositor-private.h
+++ b/src/cairo-spans-compositor-private.h
@@ -61,6 +61,17 @@ struct cairo_spans_compositor {
const cairo_color_t *color,
cairo_boxes_t *boxes);
+ cairo_int_status_t (*draw_image_boxes) (void *surface,
+ cairo_image_surface_t *image,
+ cairo_boxes_t *boxes,
+ int dx, int dy);
+
+ cairo_int_status_t (*copy_boxes) (void *surface,
+ cairo_surface_t *src,
+ cairo_boxes_t *boxes,
+ const cairo_rectangle_int_t *extents,
+ int dx, int dy);
+
cairo_surface_t * (*pattern_to_surface) (cairo_surface_t *dst,
const cairo_pattern_t *pattern,
cairo_bool_t is_mask,
diff --git a/src/cairo-spans-compositor.c b/src/cairo-spans-compositor.c
index 2398ace..86d9896 100644
--- a/src/cairo-spans-compositor.c
+++ b/src/cairo-spans-compositor.c
@@ -436,6 +436,49 @@ op_reduces_to_source (const cairo_composite_rectangles_t *extents,
return FALSE;
}
+static cairo_status_t
+upload_boxes (const cairo_spans_compositor_t *compositor,
+ const cairo_composite_rectangles_t *extents,
+ cairo_boxes_t *boxes)
+{
+ cairo_surface_t *dst = extents->surface;
+ const cairo_surface_pattern_t *source = &extents->source_pattern.surface;
+ cairo_surface_t *src;
+ cairo_rectangle_int_t limit;
+ cairo_int_status_t status;
+ int tx, ty;
+
+ TRACE ((stderr, "%s\n", __FUNCTION__));
+
+ src = _cairo_pattern_get_source(source, &limit);
+ if (!(src->type == CAIRO_SURFACE_TYPE_IMAGE || src->type == dst->type))
+ return CAIRO_INT_STATUS_UNSUPPORTED;
+
+ if (! _cairo_matrix_is_integer_translation (&source->base.matrix, &tx, &ty))
+ return CAIRO_INT_STATUS_UNSUPPORTED;
+
+ /* Check that the data is entirely within the image */
+ if (extents->bounded.x + tx < limit.x || extents->bounded.y + ty < limit.y)
+ return CAIRO_INT_STATUS_UNSUPPORTED;
+
+ if (extents->bounded.x + extents->bounded.width + tx > limit.x + limit.width ||
+ extents->bounded.y + extents->bounded.height + ty > limit.y + limit.height)
+ return CAIRO_INT_STATUS_UNSUPPORTED;
+
+ tx += limit.x;
+ ty += limit.y;
+
+ if (src->type == CAIRO_SURFACE_TYPE_IMAGE)
+ status = compositor->draw_image_boxes (dst,
+ (cairo_image_surface_t *)src,
+ boxes, tx, ty);
+ else
+ status = compositor->copy_boxes (dst, src, boxes, &extents->bounded,
+ tx, ty);
+
+ return status;
+}
+
static cairo_int_status_t
composite_aligned_boxes (const cairo_spans_compositor_t *compositor,
const cairo_composite_rectangles_t *extents,
@@ -502,10 +545,8 @@ composite_aligned_boxes (const cairo_spans_compositor_t *compositor,
if (op_is_source)
op = CAIRO_OPERATOR_SOURCE;
status = compositor->fill_boxes (dst, op, color, boxes);
-#if 0
} else if (inplace && source->type == CAIRO_PATTERN_TYPE_SURFACE) {
- status = upload_inplace (compositor, extents, boxes);
-#endif
+ status = upload_boxes (compositor, extents, boxes);
} else {
cairo_surface_t *src;
cairo_surface_t *mask = NULL;
commit aed5a1cf1e38ae451d2aeaf0a56aa1248b42c0fa
Author: Chris Wilson <chris at chris-wilson.co.uk>
Date: Sun Mar 4 17:22:38 2012 +0000
spans: Reduce composite_aligned_boxes with over to source for opaque patterns
Signed-off-by: Chris Wilson <chris at chris-wilson.co.uk>
diff --git a/src/cairo-spans-compositor.c b/src/cairo-spans-compositor.c
index 9a1c905..2398ace 100644
--- a/src/cairo-spans-compositor.c
+++ b/src/cairo-spans-compositor.c
@@ -420,7 +420,8 @@ recording_pattern_contains_sample (const cairo_pattern_t *pattern,
}
static cairo_bool_t
-op_reduces_to_source (const cairo_composite_rectangles_t *extents)
+op_reduces_to_source (const cairo_composite_rectangles_t *extents,
+ cairo_bool_t no_mask)
{
if (extents->op == CAIRO_OPERATOR_SOURCE)
return TRUE;
@@ -428,6 +429,10 @@ op_reduces_to_source (const cairo_composite_rectangles_t *extents)
if (extents->surface->is_clear)
return extents->op == CAIRO_OPERATOR_OVER || extents->op == CAIRO_OPERATOR_ADD;
+ if (no_mask && extents->op == CAIRO_OPERATOR_OVER)
+ return _cairo_pattern_is_opaque (&extents->source_pattern.base,
+ &extents->source_sample_area);
+
return FALSE;
}
@@ -450,9 +455,9 @@ composite_aligned_boxes (const cairo_spans_compositor_t *compositor,
if (need_clip_mask && ! extents->is_bounded)
return CAIRO_INT_STATUS_UNSUPPORTED;
- op_is_source = op_reduces_to_source (extents);
no_mask = extents->mask_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID &&
- CAIRO_ALPHA_IS_OPAQUE (extents->mask_pattern.solid.color.alpha);
+ CAIRO_COLOR_IS_OPAQUE (&extents->mask_pattern.solid.color);
+ op_is_source = op_reduces_to_source (extents, no_mask);
inplace = ! need_clip_mask && op_is_source && no_mask;
TRACE ((stderr, "%s: op-is-source=%d [op=%d], no-mask=%d, inplace=%d\n",
commit cc13379ee37cc077cd728588dfbb15a38980c23c
Author: Chris Wilson <chris at chris-wilson.co.uk>
Date: Sun Mar 4 16:51:02 2012 +0000
image: Add a fast-path for mono-rasterised blits
Signed-off-by: Chris Wilson <chris at chris-wilson.co.uk>
diff --git a/src/cairo-image-compositor.c b/src/cairo-image-compositor.c
index 843d9c7..0a1557f 100644
--- a/src/cairo-image-compositor.c
+++ b/src/cairo-image-compositor.c
@@ -1331,21 +1331,37 @@ span_renderer_fini (cairo_abstract_span_renderer_t *_r,
typedef struct _cairo_image_span_renderer {
cairo_span_renderer_t base;
+ const cairo_composite_rectangles_t *composite;
cairo_rectangle_int_t extents;
float opacity;
- int stride;
- int bpp;
- uint8_t *data;
-
- const cairo_composite_rectangles_t *composite;
- pixman_image_t *src, *mask, *dst;
- int src_x, src_y;
- int mask_x, mask_y;
- uint32_t pixel;
-
uint8_t op;
+ int bpp;
+ pixman_image_t *src, *mask;
+ union {
+ struct fill {
+ int stride;
+ uint8_t *data;
+ uint32_t pixel;
+ } fill;
+ struct blit {
+ int stride;
+ uint8_t *data;
+ int src_stride;
+ uint8_t *src_data;
+ } blit;
+ struct composite {
+ pixman_image_t *dst;
+ int src_x, src_y;
+ int mask_x, mask_y;
+ } composite;
+ struct finish {
+ int src_x, src_y;
+ int stride;
+ uint8_t *data;
+ } mask;
+ } u;
uint8_t buf[sizeof(cairo_abstract_span_renderer_t)-128];
} cairo_image_span_renderer_t;
COMPILE_TIME_ASSERT (sizeof (cairo_image_span_renderer_t) <= sizeof (cairo_abstract_span_renderer_t));
@@ -1363,7 +1379,7 @@ _cairo_image_spans (void *abstract_renderer,
if (num_spans == 0)
return CAIRO_STATUS_SUCCESS;
- mask = r->data + (y - r->extents.y) * r->stride;
+ mask = r->u.mask.data + (y - r->extents.y) * r->u.mask.stride;
mask += spans[0].x - r->extents.x;
row = mask;
@@ -1381,7 +1397,7 @@ _cairo_image_spans (void *abstract_renderer,
len = row - mask;
row = mask;
while (--height) {
- mask += r->stride;
+ mask += r->u.mask.stride;
memcpy (mask, row, len);
}
@@ -1398,17 +1414,17 @@ _cairo_image_spans_and_zero (void *abstract_renderer,
uint8_t *mask;
int len;
- mask = r->data;
+ mask = r->u.mask.data;
if (y > r->extents.y) {
- len = (y - r->extents.y) * r->stride;
+ len = (y - r->extents.y) * r->u.mask.stride;
memset (mask, 0, len);
mask += len;
}
r->extents.y = y + height;
- r->data = mask + height * r->stride;
+ r->u.mask.data = mask + height * r->u.mask.stride;
if (num_spans == 0) {
- memset (mask, 0, height * r->stride);
+ memset (mask, 0, height * r->u.mask.stride);
} else {
uint8_t *row = mask;
@@ -1435,7 +1451,7 @@ _cairo_image_spans_and_zero (void *abstract_renderer,
row = mask;
while (--height) {
- mask += r->stride;
+ mask += r->u.mask.stride;
memcpy (mask, row, r->extents.width);
}
}
@@ -1449,7 +1465,7 @@ _cairo_image_finish_spans_and_zero (void *abstract_renderer)
cairo_image_span_renderer_t *r = abstract_renderer;
if (r->extents.y < r->extents.height)
- memset (r->data, 0, (r->extents.height - r->extents.y) * r->stride);
+ memset (r->u.mask.data, 0, (r->extents.height - r->extents.y) * r->u.mask.stride);
return CAIRO_STATUS_SUCCESS;
}
@@ -1467,9 +1483,9 @@ _fill8_spans (void *abstract_renderer, int y, int h,
do {
if (spans[0].coverage) {
int len = spans[1].x - spans[0].x;
- uint8_t *d = r->data + r->stride*y + spans[0].x;
+ uint8_t *d = r->u.fill.data + r->u.fill.stride*y + spans[0].x;
while (len--)
- *d++ = r->pixel;
+ *d++ = r->u.fill.pixel;
}
spans++;
} while (--num_spans > 1);
@@ -1479,9 +1495,9 @@ _fill8_spans (void *abstract_renderer, int y, int h,
int yy = y, hh = h;
do {
int len = spans[1].x - spans[0].x;
- uint8_t *d = r->data + r->stride*yy + spans[0].x;
+ uint8_t *d = r->u.fill.data + r->u.fill.stride*yy + spans[0].x;
while (len--)
- *d++ = r->pixel;
+ *d++ = r->u.fill.pixel;
yy++;
} while (--hh);
}
@@ -1505,9 +1521,9 @@ _fill16_spans (void *abstract_renderer, int y, int h,
do {
if (spans[0].coverage) {
int len = spans[1].x - spans[0].x;
- uint16_t *d = (uint16_t*)(r->data + r->stride*y + spans[0].x*2);
+ uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*2);
while (len--)
- *d++ = r->pixel;
+ *d++ = r->u.fill.pixel;
}
spans++;
} while (--num_spans > 1);
@@ -1517,9 +1533,9 @@ _fill16_spans (void *abstract_renderer, int y, int h,
int yy = y, hh = h;
do {
int len = spans[1].x - spans[0].x;
- uint16_t *d = (uint16_t*)(r->data + r->stride*yy + spans[0].x*2);
+ uint16_t *d = (uint16_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*2);
while (len--)
- *d++ = r->pixel;
+ *d++ = r->u.fill.pixel;
yy++;
} while (--hh);
}
@@ -1544,12 +1560,12 @@ _fill32_spans (void *abstract_renderer, int y, int h,
if (spans[0].coverage) {
int len = spans[1].x - spans[0].x;
if (len > 32) {
- pixman_fill ((uint32_t *) r->data, r->stride / sizeof(uint32_t), r->bpp,
- spans[0].x, y, len, 1, r->pixel);
+ pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
+ spans[0].x, y, len, 1, r->u.fill.pixel);
} else {
- uint32_t *d = (uint32_t*)(r->data + r->stride*y + spans[0].x*4);
+ uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*y + spans[0].x*4);
while (len--)
- *d++ = r->pixel;
+ *d++ = r->u.fill.pixel;
}
}
spans++;
@@ -1558,16 +1574,16 @@ _fill32_spans (void *abstract_renderer, int y, int h,
do {
if (spans[0].coverage) {
if (spans[1].x - spans[0].x > 16) {
- pixman_fill ((uint32_t *) r->data, r->stride / sizeof(uint32_t), r->bpp,
+ pixman_fill ((uint32_t *)r->u.fill.data, r->u.fill.stride / sizeof(uint32_t), r->bpp,
spans[0].x, y, spans[1].x - spans[0].x, h,
- r->pixel);
+ r->u.fill.pixel);
} else {
int yy = y, hh = h;
do {
int len = spans[1].x - spans[0].x;
- uint32_t *d = (uint32_t*)(r->data + r->stride*yy + spans[0].x*4);
+ uint32_t *d = (uint32_t*)(r->u.fill.data + r->u.fill.stride*yy + spans[0].x*4);
while (len--)
- *d++ = r->pixel;
+ *d++ = r->u.fill.pixel;
yy++;
} while (--hh);
}
@@ -1604,6 +1620,84 @@ _fill_spans (void *abstract_renderer, int y, int h,
#endif
static cairo_status_t
+_blit_spans (void *abstract_renderer, int y, int h,
+ const cairo_half_open_span_t *spans, unsigned num_spans)
+{
+ cairo_image_span_renderer_t *r = abstract_renderer;
+ int cpp;
+
+ if (num_spans == 0)
+ return CAIRO_STATUS_SUCCESS;
+
+ cpp = r->bpp/8;
+ if (likely (h == 1)) {
+ uint8_t *src = r->u.blit.src_data + y*r->u.blit.src_stride;
+ uint8_t *dst = r->u.blit.data + y*r->u.blit.stride;
+ do {
+ if (spans[0].coverage) {
+ void *s = src + spans[0].x*cpp;
+ void *d = dst + spans[0].x*cpp;
+ int len = (spans[1].x - spans[0].x) * cpp;
+ switch (len) {
+ case 1:
+ *(uint8_t *)d = *(uint8_t *)s;
+ break;
+ case 2:
+ *(uint16_t *)d = *(uint16_t *)s;
+ break;
+ case 4:
+ *(uint32_t *)d = *(uint32_t *)s;
+ break;
+#if HAVE_UINT64_T
+ case 8:
+ *(uint64_t *)d = *(uint64_t *)s;
+ break;
+#endif
+ default:
+ memcpy(d, s, len);
+ break;
+ }
+ }
+ spans++;
+ } while (--num_spans > 1);
+ } else {
+ do {
+ if (spans[0].coverage) {
+ int yy = y, hh = y;
+ do {
+ void *src = r->u.blit.src_data + yy*r->u.blit.src_stride + spans[0].x*cpp;
+ void *dst = r->u.blit.data + yy*r->u.blit.stride + spans[0].x*cpp;
+ int len = (spans[1].x - spans[0].x) * cpp;
+ switch (len) {
+ case 1:
+ *(uint8_t *)dst = *(uint8_t *)src;
+ break;
+ case 2:
+ *(uint16_t *)dst = *(uint16_t *)src;
+ break;
+ case 4:
+ *(uint32_t *)dst = *(uint32_t *)src;
+ break;
+#if HAVE_UINT64_T
+ case 8:
+ *(uint64_t *)dst = *(uint64_t *)src;
+ break;
+#endif
+ default:
+ memcpy(dst, src, len);
+ break;
+ }
+ yy++;
+ } while (--hh);
+ }
+ spans++;
+ } while (--num_spans > 1);
+ }
+
+ return CAIRO_STATUS_SUCCESS;
+}
+
+static cairo_status_t
_mono_spans (void *abstract_renderer, int y, int h,
const cairo_half_open_span_t *spans, unsigned num_spans)
{
@@ -1614,8 +1708,9 @@ _mono_spans (void *abstract_renderer, int y, int h,
do {
if (spans[0].coverage) {
- pixman_image_composite32 (r->op, r->src, NULL, (pixman_image_t*)r->dst,
- spans[0].x + r->src_x, y + r->src_y,
+ pixman_image_composite32 (r->op,
+ r->src, NULL, r->u.composite.dst,
+ spans[0].x + r->u.composite.src_x, y + r->u.composite.src_y,
0, 0,
spans[0].x, y,
spans[1].x - spans[0].x, h);
@@ -1646,7 +1741,7 @@ mono_renderer_init (cairo_image_span_renderer_t *r,
color = &composite->source_pattern.solid.color;
if (fill_reduces_to_source (composite->op, color, dst) &&
- color_to_pixel (color, dst->pixman_format, &r->pixel)) {
+ color_to_pixel (color, dst->pixman_format, &r->u.fill.pixel)) {
/* Use plain C for the fill operations as the span length is
* typically small, too small to payback the startup overheads of
* using SSE2 etc.
@@ -1691,11 +1786,11 @@ mono_renderer_init (cairo_image_span_renderer_t *r,
r->src = _pixman_image_for_pattern (dst, &composite->source_pattern.base, FALSE,
&composite->unbounded,
&composite->source_sample_area,
- &r->src_x, &r->src_y);
+ &r->u.composite.src_x, &r->u.composite.src_y);
if (unlikely (r->src == NULL))
return _cairo_error (CAIRO_STATUS_NO_MEMORY);
- r->dst = to_pixman_image (composite->surface);
+ r->u.composite.dst = to_pixman_image (composite->surface);
r->op = _pixman_operator (composite->op);
r->base.render_rows = _mono_spans;
} else{
@@ -1704,8 +1799,6 @@ mono_renderer_init (cairo_image_span_renderer_t *r,
}
r->base.finish = NULL;
- r->data = dst->data;
- r->stride = dst->stride;
r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
return CAIRO_INT_STATUS_SUCCESS;
@@ -1764,7 +1857,7 @@ span_renderer_init (cairo_abstract_span_renderer_t *_r,
r->src = _pixman_image_for_pattern (dst, source, FALSE,
&composite->unbounded,
&composite->source_sample_area,
- &r->src_x, &r->src_y);
+ &r->u.mask.src_x, &r->u.mask.src_y);
if (unlikely (r->src == NULL))
return _cairo_error (CAIRO_STATUS_NO_MEMORY);
@@ -1790,8 +1883,8 @@ span_renderer_init (cairo_abstract_span_renderer_t *_r,
{
pixman_image_unref (r->src);
r->src = mask;
- r->src_x = mask_x;
- r->src_y = mask_y;
+ r->u.mask.src_x = mask_x;
+ r->u.mask.src_y = mask_y;
mask = NULL;
}
@@ -1802,8 +1895,8 @@ span_renderer_init (cairo_abstract_span_renderer_t *_r,
}
r->extents = composite->unbounded;
- r->stride = (r->extents.width + 3) & ~3;
- if (r->extents.height * r->stride > (int)sizeof (r->buf)) {
+ r->u.mask.stride = (r->extents.width + 3) & ~3;
+ if (r->extents.height * r->u.mask.stride > (int)sizeof (r->buf)) {
r->mask = pixman_image_create_bits (PIXMAN_a8,
r->extents.width,
r->extents.height,
@@ -1815,7 +1908,7 @@ span_renderer_init (cairo_abstract_span_renderer_t *_r,
r->mask = pixman_image_create_bits (PIXMAN_a8,
r->extents.width,
r->extents.height,
- (uint32_t *)r->buf, r->stride);
+ (uint32_t *)r->buf, r->u.mask.stride);
r->base.render_rows = _cairo_image_spans_and_zero;
r->base.finish = _cairo_image_finish_spans_and_zero;
@@ -1823,8 +1916,8 @@ span_renderer_init (cairo_abstract_span_renderer_t *_r,
if (unlikely (r->mask == NULL))
return _cairo_error (CAIRO_STATUS_NO_MEMORY);
- r->data = (uint8_t *) pixman_image_get_data (r->mask);
- r->stride = pixman_image_get_stride (r->mask);
+ r->u.mask.data = (uint8_t *) pixman_image_get_data (r->mask);
+ r->u.mask.stride = pixman_image_get_stride (r->mask);
r->extents.height += r->extents.y;
return CAIRO_STATUS_SUCCESS;
@@ -1846,8 +1939,8 @@ span_renderer_fini (cairo_abstract_span_renderer_t *_r,
pixman_image_composite32 (r->op, r->src, r->mask,
to_pixman_image (composite->surface),
- composite->unbounded.x + r->src_x,
- composite->unbounded.y + r->src_y,
+ composite->unbounded.x + r->u.mask.src_x,
+ composite->unbounded.y + r->u.mask.src_y,
0, 0,
composite->unbounded.x,
composite->unbounded.y,
commit 937325d6640aed9c1367627260f80a47b9ed7042
Author: Chris Wilson <chris at chris-wilson.co.uk>
Date: Sun Mar 4 16:16:38 2012 +0000
image: Perform the general composite operation inplace for mono rasterisation
We suffer from the large overhead in calling pixman_image_composite32
per-span, but even will that overhead it is a net win with the usual
caveat about cache efficiency and function call overhead.
Signed-off-by: Chris Wilson <chris at chris-wilson.co.uk>
diff --git a/src/cairo-image-compositor.c b/src/cairo-image-compositor.c
index 4507597..843d9c7 100644
--- a/src/cairo-image-compositor.c
+++ b/src/cairo-image-compositor.c
@@ -1339,8 +1339,9 @@ typedef struct _cairo_image_span_renderer {
uint8_t *data;
const cairo_composite_rectangles_t *composite;
- pixman_image_t *src, *mask;
+ pixman_image_t *src, *mask, *dst;
int src_x, src_y;
+ int mask_x, mask_y;
uint32_t pixel;
uint8_t op;
@@ -1602,6 +1603,29 @@ _fill_spans (void *abstract_renderer, int y, int h,
}
#endif
+static cairo_status_t
+_mono_spans (void *abstract_renderer, int y, int h,
+ const cairo_half_open_span_t *spans, unsigned num_spans)
+{
+ cairo_image_span_renderer_t *r = abstract_renderer;
+
+ if (num_spans == 0)
+ return CAIRO_STATUS_SUCCESS;
+
+ do {
+ if (spans[0].coverage) {
+ pixman_image_composite32 (r->op, r->src, NULL, (pixman_image_t*)r->dst,
+ spans[0].x + r->src_x, y + r->src_y,
+ 0, 0,
+ spans[0].x, y,
+ spans[1].x - spans[0].x, h);
+ }
+ spans++;
+ } while (--num_spans > 1);
+
+ return CAIRO_STATUS_SUCCESS;
+}
+
static cairo_int_status_t
mono_renderer_init (cairo_image_span_renderer_t *r,
const cairo_composite_rectangles_t *composite,
@@ -1609,39 +1633,81 @@ mono_renderer_init (cairo_image_span_renderer_t *r,
cairo_bool_t needs_clip)
{
cairo_image_surface_t *dst = (cairo_image_surface_t *)composite->surface;
- const cairo_color_t *color;
if (antialias != CAIRO_ANTIALIAS_NONE)
return CAIRO_INT_STATUS_UNSUPPORTED;
- if (composite->source_pattern.base.type != CAIRO_PATTERN_TYPE_SOLID)
- return CAIRO_INT_STATUS_UNSUPPORTED;
-
if (!_cairo_pattern_is_opaque_solid (&composite->mask_pattern.base))
return CAIRO_INT_STATUS_UNSUPPORTED;
- color = &composite->source_pattern.solid.color;
- if (!fill_reduces_to_source (composite->op, color, dst))
- return CAIRO_INT_STATUS_UNSUPPORTED;
+ r->base.render_rows = NULL;
+ if (composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SOLID) {
+ const cairo_color_t *color;
+
+ color = &composite->source_pattern.solid.color;
+ if (fill_reduces_to_source (composite->op, color, dst) &&
+ color_to_pixel (color, dst->pixman_format, &r->pixel)) {
+ /* Use plain C for the fill operations as the span length is
+ * typically small, too small to payback the startup overheads of
+ * using SSE2 etc.
+ */
+ switch (r->bpp) {
+ case 8: r->base.render_rows = _fill8_spans; break;
+ case 16: r->base.render_rows = _fill16_spans; break;
+ case 32: r->base.render_rows = _fill32_spans; break;
+ default: break;
+ }
+ r->u.fill.data = dst->data;
+ r->u.fill.stride = dst->stride;
+ }
+ } else if ((composite->op == CAIRO_OPERATOR_SOURCE ||
+ (composite->op == CAIRO_OPERATOR_OVER &&
+ (dst->base.is_clear || (dst->base.content & CAIRO_CONTENT_ALPHA) == 0))) &&
+ composite->source_pattern.base.type == CAIRO_PATTERN_TYPE_SURFACE &&
+ composite->source_pattern.surface.surface->backend->type == CAIRO_SURFACE_TYPE_IMAGE &&
+ to_image_surface(composite->source_pattern.surface.surface)->format == dst->format)
+ {
+ cairo_image_surface_t *src =
+ to_image_surface(composite->source_pattern.surface.surface);
+ int tx, ty;
+
+ if (_cairo_matrix_is_integer_translation(&composite->source_pattern.base.matrix,
+ &tx, &ty) &&
+ composite->bounded.x + tx >= 0 &&
+ composite->bounded.y + ty >= 0 &&
+ composite->bounded.x + composite->bounded.width + tx <= src->width &&
+ composite->bounded.y + composite->bounded.height + ty <= src->height) {
+
+ r->u.blit.stride = dst->stride;
+ r->u.blit.data = dst->data;
+ r->u.blit.src_stride = src->stride;
+ r->u.blit.src_data = src->data + src->stride * ty + tx * PIXMAN_FORMAT_BPP(src->format)/8;
+ r->base.render_rows = _blit_spans;
+ }
+ }
- if (!color_to_pixel (color, dst->pixman_format, &r->pixel))
- return CAIRO_INT_STATUS_UNSUPPORTED;
+ if (r->base.render_rows == NULL) {
+ if (1) { /* XXX calling pixman_image_composite per span is too slow */
+ r->src = _pixman_image_for_pattern (dst, &composite->source_pattern.base, FALSE,
+ &composite->unbounded,
+ &composite->source_sample_area,
+ &r->src_x, &r->src_y);
+ if (unlikely (r->src == NULL))
+ return _cairo_error (CAIRO_STATUS_NO_MEMORY);
+
+ r->dst = to_pixman_image (composite->surface);
+ r->op = _pixman_operator (composite->op);
+ r->base.render_rows = _mono_spans;
+ } else{
+ return CAIRO_INT_STATUS_UNSUPPORTED;
+ }
+ }
+ r->base.finish = NULL;
r->data = dst->data;
r->stride = dst->stride;
r->bpp = PIXMAN_FORMAT_BPP(dst->pixman_format);
- /* Use plain C for the fill operations as the span length is typically
- * small, too small to payback the startup overheads of using SSE2 etc.
- */
- switch (r->bpp) {
- case 8: r->base.render_rows = _fill8_spans; break;
- case 16: r->base.render_rows = _fill16_spans; break;
- case 32: r->base.render_rows = _fill32_spans; break;
- default: return CAIRO_INT_STATUS_UNSUPPORTED;
- }
- r->base.finish = NULL;
-
return CAIRO_INT_STATUS_SUCCESS;
}
@@ -1662,13 +1728,14 @@ span_renderer_init (cairo_abstract_span_renderer_t *_r,
if (needs_clip)
return CAIRO_INT_STATUS_UNSUPPORTED;
+ r->composite = composite;
+ r->mask = NULL;
+ r->src = NULL;
+
status = mono_renderer_init (r, composite, antialias, needs_clip);
if (status != CAIRO_INT_STATUS_UNSUPPORTED)
return status;
- r->composite = composite;
- r->mask = NULL;
- r->src = NULL;
r->bpp = 0;
if (op == CAIRO_OPERATOR_CLEAR) {
@@ -1771,10 +1838,7 @@ span_renderer_fini (cairo_abstract_span_renderer_t *_r,
TRACE ((stderr, "%s\n", __FUNCTION__));
- if (r->bpp)
- return;
-
- if (likely (status == CAIRO_INT_STATUS_SUCCESS)) {
+ if (likely (status == CAIRO_INT_STATUS_SUCCESS && r->bpp == 0)) {
const cairo_composite_rectangles_t *composite = r->composite;
if (r->base.finish)
diff --git a/test/reference/hatchings.ref.png b/test/reference/hatchings.ref.png
index 5aabc2c..7f367a1 100644
Binary files a/test/reference/hatchings.ref.png and b/test/reference/hatchings.ref.png differ
commit bda545dee47e6ddc4a82406262a8f2c75413fc75
Author: Chris Wilson <chris at chris-wilson.co.uk>
Date: Sun Mar 4 16:15:43 2012 +0000
spans: Retrim extents to clipped polygon
After combining the mask polygon with the clip polygon, recompute the
extents of the operation.
Signed-off-by: Chris Wilson <chris at chris-wilson.co.uk>
diff --git a/src/cairo-spans-compositor.c b/src/cairo-spans-compositor.c
index d55827b..9a1c905 100644
--- a/src/cairo-spans-compositor.c
+++ b/src/cairo-spans-compositor.c
@@ -818,6 +818,10 @@ clip_and_composite_polygon (const cairo_spans_compositor_t *compositor,
old_clip = extents->clip;
extents->clip = _cairo_clip_copy_region (extents->clip);
_cairo_clip_destroy (old_clip);
+
+ status = trim_extents_to_polygon (extents, polygon);
+ if (unlikely (status))
+ return status;
} else {
_cairo_polygon_fini (&clipper);
}
More information about the cairo-commit
mailing list