🔧 Improve extrect caching and tile computation

This commit is contained in:
Elena Torro 2026-03-09 17:17:25 +01:00
parent 804d94d0cc
commit 04207aebe3
6 changed files with 156 additions and 14 deletions

View File

@ -1169,6 +1169,8 @@
(process-pending shapes thumbnails-acc full-acc noop-fn
(fn []
(end-shapes-loading!)
;; Pre-compute extrect cache bottom-up before first tile rebuild
(h/call wasm/internal-module "_warm_extrect_cache")
(if render-callback
(render-callback)
(render-finish))
@ -1192,6 +1194,8 @@
(perf/end-measure "set-objects")
(process-pending shapes thumbnails full noop-fn
(fn []
;; Pre-compute extrect cache bottom-up before first tile rebuild
(h/call wasm/internal-module "_warm_extrect_cache")
(if render-callback
(render-callback)
(render-finish))

View File

@ -346,6 +346,20 @@ pub extern "C" fn set_view_end() -> Result<()> {
Ok(())
}
/// Pre-compute and cache extrect (extended rectangle) for all shapes
/// by walking the tree bottom-up. This avoids expensive recursive
/// extrect calculations during the first tile rebuild after loading.
#[no_mangle]
#[wasm_error]
pub extern "C" fn warm_extrect_cache() -> Result<()> {
with_state!(state, {
performance::begin_measure!("warm_extrect_cache");
state.warm_extrect_cache();
performance::end_measure!("warm_extrect_cache");
});
Ok(())
}
/// Like set_view_end but uses chunked tile rebuild to avoid blocking
/// the main thread. Prepares the view state and starts the async
/// tile rebuild process. Call `tile_rebuild_step` in a rAF loop after this.

View File

@ -40,6 +40,10 @@ const VIEWPORT_INTEREST_AREA_THRESHOLD: i32 = 3;
const MAX_BLOCKING_TIME_MS: i32 = 32;
const NODE_BATCH_THRESHOLD: i32 = 3;
const BLUR_DOWNSCALE_THRESHOLD: f32 = 8.0;
// Tile margin for non-clipping shapes in the selrect early-out check.
// Non-clipping frames/groups can have children extending beyond their selrect.
// 10 tiles = 5120px at 1x zoom — generous enough for most overflow scenarios.
const NON_CLIP_TILE_MARGIN: i32 = 10;
type ClipStack = Vec<(Rect, Option<Corners>, Matrix)>;
@ -2378,10 +2382,31 @@ impl RenderState {
*/
pub fn get_tiles_for_shape(&mut self, shape: &Shape, tree: ShapesPoolRef) -> TileRect {
let scale = self.get_scale();
let extrect = self.get_cached_extrect(shape, tree, scale);
let tile_size = tiles::get_tile_size(scale);
let shape_tiles = tiles::get_tiles_for_rect(extrect, tile_size);
let interest_rect = &self.tile_viewbox.interest_rect;
// Fast path: check selrect against interest area before computing full extrect.
// For clipping shapes, selrect IS the bounds (children can't extend beyond),
// so we can use exact selrect check with no margin.
// For non-clipping shapes, children may extend beyond selrect, so we add a
// generous margin. If the selrect is far outside the interest area (beyond the
// margin), the shape and its children are almost certainly off-screen.
let selrect_tiles = tiles::get_tiles_for_rect(shape.selrect, tile_size);
let margin = if shape.clip_content {
0
} else {
NON_CLIP_TILE_MARGIN
};
if selrect_tiles.x1() > interest_rect.x2() + margin
|| selrect_tiles.x2() < interest_rect.x1() - margin
|| selrect_tiles.y1() > interest_rect.y2() + margin
|| selrect_tiles.y2() < interest_rect.y1() - margin
{
return TileRect(0, 0, -1, -1);
}
let extrect = self.get_cached_extrect(shape, tree, scale);
let shape_tiles = tiles::get_tiles_for_rect(extrect, tile_size);
// Calculate the intersection of shape_tiles with interest_rect
// This returns only the tiles that are both in the shape and in the interest area
let intersection_x1 = shape_tiles.x1().max(interest_rect.x1());
@ -2598,6 +2623,7 @@ impl RenderState {
false
}
#[allow(dead_code)]
pub fn is_tile_rebuild_in_progress(&self) -> bool {
self.tile_rebuild_in_progress
}

View File

@ -193,7 +193,7 @@ pub struct Shape {
pub shadows: Vec<Shadow>,
pub layout_item: Option<LayoutItem>,
pub bounds: OnceCell<math::Bounds>,
pub extrect_cache: RefCell<Option<(math::Rect, u32)>>,
pub extrect_cache: RefCell<Option<math::Rect>>,
pub svg_transform: Option<Matrix>,
pub ignore_constraints: bool,
deleted: bool,
@ -762,6 +762,12 @@ impl Shape {
}
pub fn visually_insignificant(&self, scale: f32, shapes_pool: ShapesPoolRef) -> bool {
// Fast path: if selrect is large enough, shape is definitely visible
if self.selrect.width() * scale >= MIN_VISIBLE_SIZE
|| self.selrect.height() * scale >= MIN_VISIBLE_SIZE
{
return false;
}
let extrect = self.extrect(shapes_pool, scale);
extrect.width() * scale < MIN_VISIBLE_SIZE && extrect.height() * scale < MIN_VISIBLE_SIZE
}
@ -1013,17 +1019,13 @@ impl Shape {
}
pub fn calculate_extrect(&self, shapes_pool: ShapesPoolRef, scale: f32) -> math::Rect {
let scale_key = (scale * 1000.0).round() as u32;
if let Some((cached_extrect, cached_scale)) = *self.extrect_cache.borrow() {
if cached_scale == scale_key {
return cached_extrect;
}
if let Some(cached_extrect) = *self.extrect_cache.borrow() {
return cached_extrect;
}
let extrect = self.calculate_extrect_uncached(shapes_pool, scale);
*self.extrect_cache.borrow_mut() = Some((extrect, scale_key));
*self.extrect_cache.borrow_mut() = Some(extrect);
extrect
}

View File

@ -201,10 +201,9 @@ impl State {
// asuming the parent is updating its children list via set_children() calls.
// Calling add_child here would create duplicates.
// Invalidate parent's extrect so it gets recalculated to include the new child
if let Some(parent) = self.shapes.get_mut(&id) {
parent.invalidate_extrect();
}
// Invalidate parent's extrect (and ancestors) so it gets recalculated
// to include the new child
self.shapes.invalidate_extrect_with_ancestors(&id);
}
pub fn rebuild_tiles_shallow(&mut self) {
@ -275,13 +274,19 @@ impl State {
self.shapes.set_modifiers(modifiers);
}
pub fn warm_extrect_cache(&self) {
self.shapes.warm_extrect_cache();
}
pub fn touch_current(&mut self) {
if let Some(current_id) = self.current_id {
self.render_state.mark_touched(current_id);
self.shapes.invalidate_extrect_with_ancestors(&current_id);
}
}
pub fn touch_shape(&mut self, id: Uuid) {
self.render_state.mark_touched(id);
self.shapes.invalidate_extrect_with_ancestors(&id);
}
}

View File

@ -211,6 +211,97 @@ impl ShapesPoolImpl {
self.modified_shape_cache.clear()
}
/// Warm the extrect cache for all shapes by walking the tree bottom-up
/// (post-order DFS). This ensures that when a parent's extrect is computed,
/// all children already have cached values, avoiding deep recursive traversals.
/// Should be called after all shapes are loaded to pre-populate the cache
/// before the first tile rebuild.
pub fn warm_extrect_cache(&self) {
// Post-order DFS: visit children before parents so child caches
// are populated before parent tries to read them.
let mut stack: Vec<(Uuid, bool)> = vec![(Uuid::nil(), false)];
while let Some((id, children_visited)) = stack.pop() {
if let Some(shape) = self.get(&id) {
if shape.deleted() {
continue;
}
if children_visited {
// Children have been visited; now compute this shape's extrect
if !id.is_nil() {
// calculate_extrect will cache the result
let _ = shape.calculate_extrect(self, 1.0);
}
} else {
// First visit: push self again (marked as children_visited),
// then push children so they get processed first
stack.push((id, true));
for child_id in shape.children.iter() {
stack.push((*child_id, false));
}
}
}
}
}
/// Update the extrect cache for a shape and its ancestors after a shape changes.
///
/// The changed shape's cache is invalidated and recomputed. Then, instead of
/// invalidating all ancestor caches (forcing expensive O(subtree) recomputation),
/// we **expand** each ancestor's cached extrect to include the child's new extrect.
///
/// This is conservative — ancestor extrects may be slightly larger than the true
/// minimum, but this is always correct for tile assignment (shapes won't be missed).
/// The cache is "tightened" on the next full recomputation (e.g., via warm_extrect_cache).
pub fn invalidate_extrect_with_ancestors(&mut self, id: &Uuid) {
// First, invalidate the changed shape's own cache so it gets recomputed
if let Some(idx) = self.uuid_to_idx.get(id).copied() {
self.shapes[idx].invalidate_extrect();
} else {
return;
}
// Compute the changed shape's new extrect.
// Reborrow &mut self as &self for the immutable calculate_extrect call.
let child_extrect = {
let pool: &ShapesPoolImpl = self;
let idx = pool.uuid_to_idx[id];
let shape = &pool.shapes[idx];
shape.calculate_extrect(pool, 1.0)
};
// Walk up ancestors and expand their cached extrects
let parent_id = {
let idx = self.uuid_to_idx[id];
self.shapes[idx].parent_id
};
let mut current_id = parent_id;
while let Some(cid) = current_id {
if cid.is_nil() {
break;
}
if let Some(idx) = self.uuid_to_idx.get(&cid).copied() {
let shape = &self.shapes[idx];
let mut cache = shape.extrect_cache.borrow_mut();
match cache.as_mut() {
Some(cached_rect) => {
// Expand the cached extrect to include the child's new extrect
cached_rect.join(child_extrect);
}
None => {
// No cache yet — nothing to expand, stop walking
// (ancestors above also won't have useful caches)
break;
}
}
drop(cache);
current_id = self.shapes[idx].parent_id;
} else {
break;
}
}
}
pub fn set_modifiers(&mut self, modifiers: HashMap<Uuid, skia::Matrix>) {
// Convert HashMap<Uuid, V> to HashMap<usize, V> using indices
// Initialize the cache cells for affected shapes