terminal: switch search to use flattened highlights

pull/9687/head
Mitchell Hashimoto 2025-11-24 10:19:10 -08:00
parent 05d6315e82
commit 6623c20c2d
No known key found for this signature in database
GPG Key ID: 523D5DC389D273BC
9 changed files with 412 additions and 1622 deletions

View File

@ -132,6 +132,18 @@ pub const Flattened = struct {
};
}
pub fn deinit(self: *Flattened, alloc: Allocator) void {
self.chunks.deinit(alloc);
}
pub fn clone(self: *const Flattened, alloc: Allocator) Allocator.Error!Flattened {
return .{
.chunks = try self.chunks.clone(alloc),
.top_x = self.top_x,
.bot_x = self.bot_x,
};
}
/// Convert to an Untracked highlight.
pub fn untracked(self: Flattened) Untracked {
const slice = self.chunks.slice();

View File

@ -19,5 +19,4 @@ test {
// Non-public APIs
_ = @import("search/sliding_window.zig");
_ = @import("search/sliding_window2.zig");
}

View File

@ -12,11 +12,13 @@ const std = @import("std");
const builtin = @import("builtin");
const testing = std.testing;
const Allocator = std.mem.Allocator;
const ArenaAllocator = std.heap.ArenaAllocator;
const Mutex = std.Thread.Mutex;
const xev = @import("../../global.zig").xev;
const internal_os = @import("../../os/main.zig");
const BlockingQueue = @import("../../datastruct/main.zig").BlockingQueue;
const point = @import("../point.zig");
const FlattenedHighlight = @import("../highlight.zig").Flattened;
const PageList = @import("../PageList.zig");
const Screen = @import("../Screen.zig");
const ScreenSet = @import("../ScreenSet.zig");
@ -387,7 +389,7 @@ pub const Event = union(enum) {
/// Matches in the viewport have changed. The memory is owned by the
/// search thread and is only valid during the callback.
viewport_matches: []const Selection,
viewport_matches: []const FlattenedHighlight,
};
/// Search state.
@ -603,10 +605,13 @@ const Search = struct {
// process will make it stale again.
self.stale_viewport_matches = false;
var results: std.ArrayList(Selection) = .empty;
defer results.deinit(alloc);
while (self.viewport.next()) |sel| {
results.append(alloc, sel) catch |err| switch (err) {
var arena: ArenaAllocator = .init(alloc);
defer arena.deinit();
const arena_alloc = arena.allocator();
var results: std.ArrayList(FlattenedHighlight) = .empty;
while (self.viewport.next()) |hl| {
const hl_cloned = hl.clone(arena_alloc) catch continue;
results.append(arena_alloc, hl_cloned) catch |err| switch (err) {
error.OutOfMemory => {
log.warn(
"error collecting viewport matches err={}",
@ -637,7 +642,12 @@ test {
const Self = @This();
reset: std.Thread.ResetEvent = .{},
total: usize = 0,
viewport: []const Selection = &.{},
viewport: []FlattenedHighlight = &.{},
fn deinit(self: *Self) void {
for (self.viewport) |*hl| hl.deinit(testing.allocator);
testing.allocator.free(self.viewport);
}
fn callback(event: Event, userdata: ?*anyopaque) void {
const ud: *Self = @ptrCast(@alignCast(userdata.?));
@ -645,11 +655,16 @@ test {
.complete => ud.reset.set(),
.total_matches => |v| ud.total = v,
.viewport_matches => |v| {
for (ud.viewport) |*hl| hl.deinit(testing.allocator);
testing.allocator.free(ud.viewport);
ud.viewport = testing.allocator.dupe(
Selection,
v,
ud.viewport = testing.allocator.alloc(
FlattenedHighlight,
v.len,
) catch unreachable;
for (ud.viewport, v) |*dst, src| {
dst.* = src.clone(testing.allocator) catch unreachable;
}
},
}
}
@ -665,7 +680,7 @@ test {
try stream.nextSlice("Hello, world");
var ud: UserData = .{};
defer alloc.free(ud.viewport);
defer ud.deinit();
var thread: Thread = try .init(alloc, .{
.mutex = &mutex,
.terminal = &t,
@ -698,14 +713,14 @@ test {
try testing.expectEqual(1, ud.total);
try testing.expectEqual(1, ud.viewport.len);
{
const sel = ud.viewport[0];
const sel = ud.viewport[0].untracked();
try testing.expectEqual(point.Point{ .screen = .{
.x = 7,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.start).?);
try testing.expectEqual(point.Point{ .screen = .{
.x = 11,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.end).?);
}
}

View File

@ -3,6 +3,7 @@ const testing = std.testing;
const Allocator = std.mem.Allocator;
const point = @import("../point.zig");
const size = @import("../size.zig");
const FlattenedHighlight = @import("../highlight.zig").Flattened;
const PageList = @import("../PageList.zig");
const Selection = @import("../Selection.zig");
const SlidingWindow = @import("sliding_window.zig").SlidingWindow;
@ -96,7 +97,7 @@ pub const ActiveSearch = struct {
/// Find the next match for the needle in the active area. This returns
/// null when there are no more matches.
pub fn next(self: *ActiveSearch) ?Selection {
pub fn next(self: *ActiveSearch) ?FlattenedHighlight {
return self.window.next();
}
};
@ -115,26 +116,28 @@ test "simple search" {
_ = try search.update(&t.screens.active.pages);
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 0,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 3,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 0,
.y = 2,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 3,
.y = 2,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(search.next() == null);
}
@ -158,15 +161,16 @@ test "clear screen and search" {
_ = try search.update(&t.screens.active.pages);
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 0,
.y = 1,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 3,
.y = 1,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(search.next() == null);
}

View File

@ -5,6 +5,7 @@ const testing = std.testing;
const CircBuf = @import("../../datastruct/main.zig").CircBuf;
const terminal = @import("../main.zig");
const point = terminal.point;
const FlattenedHighlight = @import("../highlight.zig").Flattened;
const Page = terminal.Page;
const PageList = terminal.PageList;
const Pin = PageList.Pin;
@ -97,7 +98,7 @@ pub const PageListSearch = struct {
///
/// This does NOT access the PageList, so it can be called without
/// a lock held.
pub fn next(self: *PageListSearch) ?Selection {
pub fn next(self: *PageListSearch) ?FlattenedHighlight {
return self.window.next();
}
@ -149,26 +150,28 @@ test "simple search" {
defer search.deinit();
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 0,
.y = 2,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 3,
.y = 2,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 0,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 3,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(search.next() == null);
@ -335,12 +338,13 @@ test "feed with match spanning page boundary" {
try testing.expect(try search.feed());
// Should find the spanning match
const sel = search.next().?;
try testing.expect(sel.start().node != sel.end().node);
const h = search.next().?;
const sel = h.untracked();
try testing.expect(sel.start.node != sel.end.node);
{
const str = try t.screens.active.selectionString(
alloc,
.{ .sel = sel },
.{ .sel = .init(sel.start, sel.end, false) },
);
defer alloc.free(str);
try testing.expectEqualStrings(str, "Test");

View File

@ -3,6 +3,7 @@ const assert = @import("../../quirks.zig").inlineAssert;
const testing = std.testing;
const Allocator = std.mem.Allocator;
const point = @import("../point.zig");
const FlattenedHighlight = @import("../highlight.zig").Flattened;
const PageList = @import("../PageList.zig");
const Pin = PageList.Pin;
const Screen = @import("../Screen.zig");
@ -44,8 +45,8 @@ pub const ScreenSearch = struct {
/// is mostly immutable once found, while active area results may
/// change. This lets us easily reset the active area results for a
/// re-search scenario.
history_results: std.ArrayList(Selection),
active_results: std.ArrayList(Selection),
history_results: std.ArrayList(FlattenedHighlight),
active_results: std.ArrayList(FlattenedHighlight),
/// History search state.
const HistorySearch = struct {
@ -120,7 +121,9 @@ pub const ScreenSearch = struct {
const alloc = self.allocator();
self.active.deinit();
if (self.history) |*h| h.deinit(self.screen);
for (self.active_results.items) |*hl| hl.deinit(alloc);
self.active_results.deinit(alloc);
for (self.history_results.items) |*hl| hl.deinit(alloc);
self.history_results.deinit(alloc);
}
@ -145,11 +148,11 @@ pub const ScreenSearch = struct {
pub fn matches(
self: *ScreenSearch,
alloc: Allocator,
) Allocator.Error![]Selection {
) Allocator.Error![]FlattenedHighlight {
const active_results = self.active_results.items;
const history_results = self.history_results.items;
const results = try alloc.alloc(
Selection,
FlattenedHighlight,
active_results.len + history_results.len,
);
errdefer alloc.free(results);
@ -162,7 +165,7 @@ pub const ScreenSearch = struct {
results[0..active_results.len],
active_results,
);
std.mem.reverse(Selection, results[0..active_results.len]);
std.mem.reverse(FlattenedHighlight, results[0..active_results.len]);
// History does a backward search, so we can just append them
// after.
@ -247,13 +250,15 @@ pub const ScreenSearch = struct {
// For the active area, we consume the entire search in one go
// because the active area is generally small.
const alloc = self.allocator();
while (self.active.next()) |sel| {
while (self.active.next()) |hl| {
// If this fails, then we miss a result since `active.next()`
// moves forward and prunes data. In the future, we may want
// to have some more robust error handling but the only
// scenario this would fail is OOM and we're probably in
// deeper trouble at that point anyways.
try self.active_results.append(alloc, sel);
var hl_cloned = try hl.clone(alloc);
errdefer hl_cloned.deinit(alloc);
try self.active_results.append(alloc, hl_cloned);
}
// We've consumed the entire active area, move to history.
@ -270,13 +275,15 @@ pub const ScreenSearch = struct {
// Try to consume all the loaded matches in one go, because
// the search is generally fast for loaded data.
const alloc = self.allocator();
while (history.searcher.next()) |sel| {
while (history.searcher.next()) |hl| {
// Ignore selections that are found within the starting
// node since those are covered by the active area search.
if (sel.start().node == history.start_pin.node) continue;
if (hl.chunks.items(.node)[0] == history.start_pin.node) continue;
// Same note as tickActive for error handling.
try self.history_results.append(alloc, sel);
var hl_cloned = try hl.clone(alloc);
errdefer hl_cloned.deinit(alloc);
try self.history_results.append(alloc, hl_cloned);
}
// We need to be fed more data.
@ -291,6 +298,7 @@ pub const ScreenSearch = struct {
///
/// The caller must hold the necessary locks to access the screen state.
pub fn reloadActive(self: *ScreenSearch) Allocator.Error!void {
const alloc = self.allocator();
const list: *PageList = &self.screen.pages;
if (try self.active.update(list)) |history_node| history: {
// We need to account for any active area growth that would
@ -305,6 +313,7 @@ pub const ScreenSearch = struct {
if (h.start_pin.garbage) {
h.deinit(self.screen);
self.history = null;
for (self.history_results.items) |*hl| hl.deinit(alloc);
self.history_results.clearRetainingCapacity();
break :state null;
}
@ -317,7 +326,7 @@ pub const ScreenSearch = struct {
// initialize.
var search: PageListSearch = try .init(
self.allocator(),
alloc,
self.needle(),
list,
history_node,
@ -346,7 +355,6 @@ pub const ScreenSearch = struct {
// collect all the results into a new list. We ASSUME that
// reloadActive is being called frequently enough that there isn't
// a massive amount of history to search here.
const alloc = self.allocator();
var window: SlidingWindow = try .init(
alloc,
.forward,
@ -361,17 +369,17 @@ pub const ScreenSearch = struct {
}
assert(history.start_pin.node == history_node);
var results: std.ArrayList(Selection) = try .initCapacity(
var results: std.ArrayList(FlattenedHighlight) = try .initCapacity(
alloc,
self.history_results.items.len,
);
errdefer results.deinit(alloc);
while (window.next()) |sel| {
if (sel.start().node == history_node) continue;
try results.append(
alloc,
sel,
);
while (window.next()) |hl| {
if (hl.chunks.items(.node)[0] == history_node) continue;
var hl_cloned = try hl.clone(alloc);
errdefer hl_cloned.deinit(alloc);
try results.append(alloc, hl_cloned);
}
// If we have no matches then there is nothing to change
@ -380,13 +388,14 @@ pub const ScreenSearch = struct {
// Matches! Reverse our list then append all the remaining
// history items that didn't start on our original node.
std.mem.reverse(Selection, results.items);
std.mem.reverse(FlattenedHighlight, results.items);
try results.appendSlice(alloc, self.history_results.items);
self.history_results.deinit(alloc);
self.history_results = results;
}
// Reset our active search results and search again.
for (self.active_results.items) |*hl| hl.deinit(alloc);
self.active_results.clearRetainingCapacity();
switch (self.state) {
// If we're in the active state we run a normal tick so
@ -425,26 +434,26 @@ test "simple search" {
try testing.expectEqual(2, matches.len);
{
const sel = matches[0];
const sel = matches[0].untracked();
try testing.expectEqual(point.Point{ .screen = .{
.x = 0,
.y = 2,
} }, t.screens.active.pages.pointFromPin(.screen, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.start).?);
try testing.expectEqual(point.Point{ .screen = .{
.x = 3,
.y = 2,
} }, t.screens.active.pages.pointFromPin(.screen, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.end).?);
}
{
const sel = matches[1];
const sel = matches[1].untracked();
try testing.expectEqual(point.Point{ .screen = .{
.x = 0,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.start).?);
try testing.expectEqual(point.Point{ .screen = .{
.x = 3,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.end).?);
}
}
@ -477,15 +486,15 @@ test "simple search with history" {
try testing.expectEqual(1, matches.len);
{
const sel = matches[0];
const sel = matches[0].untracked();
try testing.expectEqual(point.Point{ .screen = .{
.x = 0,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.start).?);
try testing.expectEqual(point.Point{ .screen = .{
.x = 3,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.end).?);
}
}
@ -528,26 +537,26 @@ test "reload active with history change" {
defer alloc.free(matches);
try testing.expectEqual(2, matches.len);
{
const sel = matches[1];
const sel = matches[1].untracked();
try testing.expectEqual(point.Point{ .screen = .{
.x = 0,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.start).?);
try testing.expectEqual(point.Point{ .screen = .{
.x = 3,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.end).?);
}
{
const sel = matches[0];
const sel = matches[0].untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 1,
.y = 1,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 4,
.y = 1,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
}
@ -562,15 +571,15 @@ test "reload active with history change" {
defer alloc.free(matches);
try testing.expectEqual(1, matches.len);
{
const sel = matches[0];
const sel = matches[0].untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 2,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 5,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
}
}
@ -603,14 +612,14 @@ test "active change contents" {
try testing.expectEqual(1, matches.len);
{
const sel = matches[0];
const sel = matches[0].untracked();
try testing.expectEqual(point.Point{ .screen = .{
.x = 0,
.y = 1,
} }, t.screens.active.pages.pointFromPin(.screen, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.start).?);
try testing.expectEqual(point.Point{ .screen = .{
.x = 3,
.y = 1,
} }, t.screens.active.pages.pointFromPin(.screen, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.end).?);
}
}

View File

@ -4,11 +4,13 @@ const Allocator = std.mem.Allocator;
const CircBuf = @import("../../datastruct/main.zig").CircBuf;
const terminal = @import("../main.zig");
const point = terminal.point;
const size = terminal.size;
const PageList = terminal.PageList;
const Pin = PageList.Pin;
const Selection = terminal.Selection;
const Screen = terminal.Screen;
const PageFormatter = @import("../formatter.zig").PageFormatter;
const FlattenedHighlight = terminal.highlight.Flattened;
/// Searches page nodes via a sliding window. The sliding window maintains
/// the invariant that data isn't pruned until (1) we've searched it and
@ -51,6 +53,10 @@ pub const SlidingWindow = struct {
/// data to meta.
meta: MetaBuf,
/// Buffer that can fit any amount of chunks necessary for next
/// to never fail allocation.
chunk_buf: std.MultiArrayList(FlattenedHighlight.Chunk),
/// Offset into data for our current state. This handles the
/// situation where our search moved through meta[0] but didn't
/// do enough to prune it.
@ -113,6 +119,7 @@ pub const SlidingWindow = struct {
.alloc = alloc,
.data = data,
.meta = meta,
.chunk_buf = .empty,
.needle = needle,
.direction = direction,
.overlap_buf = overlap_buf,
@ -122,6 +129,7 @@ pub const SlidingWindow = struct {
pub fn deinit(self: *SlidingWindow) void {
self.alloc.free(self.overlap_buf);
self.alloc.free(self.needle);
self.chunk_buf.deinit(self.alloc);
self.data.deinit(self.alloc);
var meta_it = self.meta.iterator(.forward);
@ -143,14 +151,17 @@ pub const SlidingWindow = struct {
/// the invariant that the window is always big enough to contain
/// the needle.
///
/// It may seem wasteful to return a full selection, since the needle
/// length is known it seems like we can get away with just returning
/// the start index. However, returning a full selection will give us
/// more flexibility in the future (e.g. if we want to support regex
/// searches or other more complex searches). It does cost us some memory,
/// but searches are expected to be relatively rare compared to normal
/// operations and can eat up some extra memory temporarily.
pub fn next(self: *SlidingWindow) ?Selection {
/// This returns a flattened highlight on a match. The
/// flattened highlight requires allocation and is therefore more expensive
/// than a normal selection, but it is more efficient to render since it
/// has all the information without having to dereference pointers into
/// the terminal state.
///
/// The flattened highlight chunks reference internal memory for this
/// sliding window and are only valid until the next call to `next()`
/// or `append()`. If the caller wants to retain the flattened highlight
/// then they should clone it.
pub fn next(self: *SlidingWindow) ?FlattenedHighlight {
const slices = slices: {
// If we have less data then the needle then we can't possibly match
const data_len = self.data.len();
@ -164,7 +175,7 @@ pub const SlidingWindow = struct {
// Search the first slice for the needle.
if (std.mem.indexOf(u8, slices[0], self.needle)) |idx| {
return self.selection(
return self.highlight(
idx,
self.needle.len,
);
@ -197,7 +208,7 @@ pub const SlidingWindow = struct {
// We found a match in the overlap buffer. We need to map the
// index back to the data buffer in order to get our selection.
return self.selection(
return self.highlight(
slices[0].len - prefix.len + idx,
self.needle.len,
);
@ -205,7 +216,7 @@ pub const SlidingWindow = struct {
// Search the last slice for the needle.
if (std.mem.indexOf(u8, slices[1], self.needle)) |idx| {
return self.selection(
return self.highlight(
slices[0].len + idx,
self.needle.len,
);
@ -263,114 +274,230 @@ pub const SlidingWindow = struct {
return null;
}
/// Return a selection for the given start and length into the data
/// buffer and also prune the data/meta buffers if possible up to
/// this start index.
/// Return a flattened highlight for the given start and length.
///
/// The flattened highlight can be used to render the highlight
/// in the most efficient way because it doesn't require a terminal
/// lock to access terminal data to compare whether some viewport
/// matches the highlight (because it doesn't need to traverse
/// the page nodes).
///
/// The start index is assumed to be relative to the offset. i.e.
/// index zero is actually at `self.data[self.data_offset]`. The
/// selection will account for the offset.
fn selection(
fn highlight(
self: *SlidingWindow,
start_offset: usize,
len: usize,
) Selection {
) terminal.highlight.Flattened {
const start = start_offset + self.data_offset;
assert(start < self.data.len());
assert(start + len <= self.data.len());
const end = start + len - 1;
if (comptime std.debug.runtime_safety) {
assert(start < self.data.len());
assert(start + len <= self.data.len());
}
// meta_consumed is the number of bytes we've consumed in the
// data buffer up to and NOT including the meta where we've
// found our pin. This is important because it tells us the
// amount of data we can safely deleted from self.data since
// we can't partially delete a meta block's data. (The partial
// amount is represented by self.data_offset).
var meta_it = self.meta.iterator(.forward);
var meta_consumed: usize = 0;
const tl: Pin = pin(&meta_it, &meta_consumed, start);
// Clear our previous chunk buffer to store this result
self.chunk_buf.clearRetainingCapacity();
var result: terminal.highlight.Flattened = .empty;
// Store the information required to prune later. We store this
// now because we only want to prune up to our START so we can
// find overlapping matches.
const tl_meta_idx = meta_it.idx - 1;
const tl_meta_consumed = meta_consumed;
// Go through the meta nodes to find our start.
const tl: struct {
/// If non-null, we need to continue searching for the bottom-right.
br: ?struct {
it: MetaBuf.Iterator,
consumed: usize,
},
// We have to seek back so that we reinspect our current
// iterator value again in case the start and end are in the
// same segment.
meta_it.seekBy(-1);
const br: Pin = pin(&meta_it, &meta_consumed, start + len - 1);
assert(meta_it.idx >= 1);
/// Data to prune, both are lengths.
prune: struct {
meta: usize,
data: usize,
},
} = tl: {
var meta_it = self.meta.iterator(.forward);
var meta_consumed: usize = 0;
while (meta_it.next()) |meta| {
// Always increment our consumed count so that our index
// is right for the end search if we do it.
const prior_meta_consumed = meta_consumed;
meta_consumed += meta.cell_map.items.len;
// meta_i is the index we expect to find the match in the
// cell map within this meta if it contains it.
const meta_i = start - prior_meta_consumed;
// This meta doesn't contain the match. This means we
// can also prune this set of data because we only look
// forward.
if (meta_i >= meta.cell_map.items.len) continue;
// Now we look for the end. In MOST cases it is the same as
// our starting chunk because highlights are usually small and
// not on a boundary, so let's optimize for that.
const end_i = end - prior_meta_consumed;
if (end_i < meta.cell_map.items.len) {
@branchHint(.likely);
// The entire highlight is within this meta.
const start_map = meta.cell_map.items[meta_i];
const end_map = meta.cell_map.items[end_i];
result.top_x = start_map.x;
result.bot_x = end_map.x;
self.chunk_buf.appendAssumeCapacity(.{
.node = meta.node,
.start = @intCast(start_map.y),
.end = @intCast(end_map.y + 1),
});
break :tl .{
.br = null,
.prune = .{
.meta = meta_it.idx - 1,
.data = prior_meta_consumed,
},
};
} else {
// We found the meta that contains the start of the match
// only. Consume this entire node from our start offset.
const map = meta.cell_map.items[meta_i];
result.top_x = map.x;
self.chunk_buf.appendAssumeCapacity(.{
.node = meta.node,
.start = @intCast(map.y),
.end = meta.node.data.size.rows,
});
break :tl .{
.br = .{
.it = meta_it,
.consumed = meta_consumed,
},
.prune = .{
.meta = meta_it.idx - 1,
.data = prior_meta_consumed,
},
};
}
} else {
// Precondition that the start index is within the data buffer.
unreachable;
}
};
// Search for our end.
if (tl.br) |br| {
var meta_it = br.it;
var meta_consumed: usize = br.consumed;
while (meta_it.next()) |meta| {
// meta_i is the index we expect to find the match in the
// cell map within this meta if it contains it.
const meta_i = end - meta_consumed;
if (meta_i >= meta.cell_map.items.len) {
// This meta doesn't contain the match. We still add it
// to our results because we want the full flattened list.
self.chunk_buf.appendAssumeCapacity(.{
.node = meta.node,
.start = 0,
.end = meta.node.data.size.rows,
});
meta_consumed += meta.cell_map.items.len;
continue;
}
// We found it
const map = meta.cell_map.items[meta_i];
result.bot_x = map.x;
self.chunk_buf.appendAssumeCapacity(.{
.node = meta.node,
.start = 0,
.end = @intCast(map.y + 1),
});
break;
} else {
// Precondition that the end index is within the data buffer.
unreachable;
}
}
// Our offset into the current meta block is the start index
// minus the amount of data fully consumed. We then add one
// to move one past the match so we don't repeat it.
self.data_offset = start - tl_meta_consumed + 1;
self.data_offset = start - tl.prune.data + 1;
// meta_it.idx is br's meta index plus one (because the iterator
// moves one past the end; we call next() one last time). So
// we compare against one to check that the meta that we matched
// in has prior meta blocks we can prune.
if (tl_meta_idx > 0) {
// If we went beyond our initial meta node we can prune.
if (tl.prune.meta > 0) {
// Deinit all our memory in the meta blocks prior to our
// match.
const meta_count = tl_meta_idx;
meta_it.reset();
for (0..meta_count) |_| meta_it.next().?.deinit(self.alloc);
if (comptime std.debug.runtime_safety) {
assert(meta_it.idx == meta_count);
assert(meta_it.next().?.node == tl.node);
var meta_it = self.meta.iterator(.forward);
var meta_consumed: usize = 0;
for (0..tl.prune.meta) |_| {
const meta: *Meta = meta_it.next().?;
meta_consumed += meta.cell_map.items.len;
meta.deinit(self.alloc);
}
self.meta.deleteOldest(meta_count);
if (comptime std.debug.runtime_safety) {
assert(meta_it.idx == tl.prune.meta);
assert(meta_it.next().?.node == self.chunk_buf.items(.node)[0]);
}
self.meta.deleteOldest(tl.prune.meta);
// Delete all the data up to our current index.
assert(tl_meta_consumed > 0);
self.data.deleteOldest(tl_meta_consumed);
assert(tl.prune.data > 0);
self.data.deleteOldest(tl.prune.data);
}
self.assertIntegrity();
return switch (self.direction) {
.forward => .init(tl, br, false),
.reverse => .init(br, tl, false),
};
}
switch (self.direction) {
.forward => {},
.reverse => {
if (self.chunk_buf.len > 1) {
// Reverse all our chunks. This should be pretty obvious why.
const slice = self.chunk_buf.slice();
const nodes = slice.items(.node);
const starts = slice.items(.start);
const ends = slice.items(.end);
std.mem.reverse(*PageList.List.Node, nodes);
std.mem.reverse(size.CellCountInt, starts);
std.mem.reverse(size.CellCountInt, ends);
/// Convert a data index into a pin.
///
/// The iterator and offset are both expected to be passed by
/// pointer so that the pin can be efficiently called for multiple
/// indexes (in order). See selection() for an example.
///
/// Precondition: the index must be within the data buffer.
fn pin(
it: *MetaBuf.Iterator,
offset: *usize,
idx: usize,
) Pin {
while (it.next()) |meta| {
// meta_i is the index we expect to find the match in the
// cell map within this meta if it contains it.
const meta_i = idx - offset.*;
if (meta_i >= meta.cell_map.items.len) {
// This meta doesn't contain the match. This means we
// can also prune this set of data because we only look
// forward.
offset.* += meta.cell_map.items.len;
continue;
}
// Now normally with forward traversal with multiple pages,
// the suffix of the first page and the prefix of the last
// page are used.
//
// For a reverse traversal, this is inverted (since the
// pages are in reverse order we get the suffix of the last
// page and the prefix of the first page). So we need to
// invert this.
//
// We DON'T need to do this for any middle pages because
// they always use the full page.
//
// We DON'T need to do this for chunks.len == 1 because
// the pages themselves aren't reversed and we don't have
// any prefix/suffix problems.
//
// This is a fixup that makes our start/end match the
// same logic as the loops above if they were in forward
// order.
assert(nodes.len >= 2);
starts[0] = ends[0] - 1;
ends[0] = nodes[0].data.size.rows;
ends[nodes.len - 1] = starts[nodes.len - 1] + 1;
starts[nodes.len - 1] = 0;
}
// We found the meta that contains the start of the match.
const map = meta.cell_map.items[meta_i];
return .{
.node = meta.node,
.y = @intCast(map.y),
.x = map.x,
};
// X values also need to be reversed since the top/bottom
// are swapped for the nodes.
const top_x = result.top_x;
result.top_x = result.bot_x;
result.bot_x = top_x;
},
}
// Unreachable because it is a precondition that the index is
// within the data buffer.
unreachable;
// Copy over our MultiArrayList so it points to the proper memory.
result.chunks = self.chunk_buf;
return result;
}
/// Add a new node to the sliding window. This will always grow
@ -442,6 +569,7 @@ pub const SlidingWindow = struct {
// Ensure our buffers are big enough to store what we need.
try self.data.ensureUnusedCapacity(self.alloc, written.len);
try self.meta.ensureUnusedCapacity(self.alloc, 1);
try self.chunk_buf.ensureTotalCapacity(self.alloc, self.meta.capacity());
// Append our new node to the circular buffer.
self.data.appendSliceAssumeCapacity(written);
@ -505,26 +633,28 @@ test "SlidingWindow single append" {
// We should be able to find two matches.
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 7,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start));
try testing.expectEqual(point.Point{ .active = .{
.x = 10,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end));
}
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 19,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start));
try testing.expectEqual(point.Point{ .active = .{
.x = 22,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end));
}
try testing.expect(w.next() == null);
try testing.expect(w.next() == null);
@ -582,26 +712,28 @@ test "SlidingWindow two pages" {
// Search should find two matches
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 76,
.y = 22,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 79,
.y = 22,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 7,
.y = 23,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 10,
.y = 23,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
try testing.expect(w.next() == null);
@ -634,15 +766,16 @@ test "SlidingWindow two pages match across boundary" {
// Search should find a match
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 76,
.y = 22,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 7,
.y = 23,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
try testing.expect(w.next() == null);
@ -831,15 +964,16 @@ test "SlidingWindow single append across circular buffer boundary" {
try testing.expect(slices[1].len > 0);
}
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 19,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 21,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
}
@ -889,15 +1023,16 @@ test "SlidingWindow single append match on boundary" {
try testing.expect(slices[1].len > 0);
}
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 21,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 1,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
}
@ -920,26 +1055,28 @@ test "SlidingWindow single append reversed" {
// We should be able to find two matches.
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 19,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 22,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 7,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 10,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
try testing.expect(w.next() == null);
@ -997,26 +1134,28 @@ test "SlidingWindow two pages reversed" {
// Search should find two matches (in reverse order)
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 7,
.y = 23,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 10,
.y = 23,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 76,
.y = 22,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 79,
.y = 22,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
try testing.expect(w.next() == null);
@ -1049,15 +1188,16 @@ test "SlidingWindow two pages match across boundary reversed" {
// Search should find a match
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 76,
.y = 22,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 7,
.y = 23,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
try testing.expect(w.next() == null);
@ -1185,15 +1325,16 @@ test "SlidingWindow single append across circular buffer boundary reversed" {
try testing.expect(slices[1].len > 0);
}
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 19,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 21,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
}
@ -1244,15 +1385,16 @@ test "SlidingWindow single append match on boundary reversed" {
try testing.expect(slices[1].len > 0);
}
{
const sel = w.next().?;
const h = w.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 21,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.start()).?);
} }, s.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 1,
.y = 0,
} }, s.pages.pointFromPin(.active, sel.end()).?);
} }, s.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(w.next() == null);
}

File diff suppressed because it is too large Load Diff

View File

@ -4,6 +4,7 @@ const testing = std.testing;
const Allocator = std.mem.Allocator;
const point = @import("../point.zig");
const size = @import("../size.zig");
const FlattenedHighlight = @import("../highlight.zig").Flattened;
const PageList = @import("../PageList.zig");
const Selection = @import("../Selection.zig");
const SlidingWindow = @import("sliding_window.zig").SlidingWindow;
@ -150,7 +151,7 @@ pub const ViewportSearch = struct {
/// Find the next match for the needle in the active area. This returns
/// null when there are no more matches.
pub fn next(self: *ViewportSearch) ?Selection {
pub fn next(self: *ViewportSearch) ?FlattenedHighlight {
return self.window.next();
}
@ -207,26 +208,28 @@ test "simple search" {
try testing.expect(try search.update(&t.screens.active.pages));
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 0,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 3,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 0,
.y = 2,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 3,
.y = 2,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(search.next() == null);
}
@ -250,15 +253,16 @@ test "clear screen and search" {
try testing.expect(try search.update(&t.screens.active.pages));
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .active = .{
.x = 0,
.y = 1,
} }, t.screens.active.pages.pointFromPin(.active, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.start).?);
try testing.expectEqual(point.Point{ .active = .{
.x = 3,
.y = 1,
} }, t.screens.active.pages.pointFromPin(.active, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.active, sel.end).?);
}
try testing.expect(search.next() == null);
}
@ -289,15 +293,16 @@ test "history search, no active area" {
try testing.expect(try search.update(&t.screens.active.pages));
{
const sel = search.next().?;
const h = search.next().?;
const sel = h.untracked();
try testing.expectEqual(point.Point{ .screen = .{
.x = 0,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.start()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.start).?);
try testing.expectEqual(point.Point{ .screen = .{
.x = 3,
.y = 0,
} }, t.screens.active.pages.pointFromPin(.screen, sel.end()).?);
} }, t.screens.active.pages.pointFromPin(.screen, sel.end).?);
}
try testing.expect(search.next() == null);