Skip to content

Commit

Permalink
search buffer contents during global search
Browse files Browse the repository at this point in the history
  • Loading branch information
pascalkuthe committed Jul 11, 2023
1 parent 541d2b7 commit 0ad51c8
Show file tree
Hide file tree
Showing 3 changed files with 77 additions and 13 deletions.
2 changes: 2 additions & 0 deletions helix-core/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,9 @@ pub use helix_loader::find_workspace;
pub fn find_first_non_whitespace_char(line: RopeSlice) -> Option<usize> {
line.chars().position(|ch| !ch.is_whitespace())
}
mod rope_reader;

pub use rope_reader::RopeReader;
pub use ropey::{self, str_utils, Rope, RopeBuilder, RopeSlice};

// pub use tendril::StrTendril as Tendril;
Expand Down
37 changes: 37 additions & 0 deletions helix-core/src/rope_reader.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,37 @@
use std::io;

use ropey::iter::Chunks;
use ropey::RopeSlice;

pub struct RopeReader<'a> {
current_chunk: &'a [u8],
chunks: Chunks<'a>,
}

impl<'a> RopeReader<'a> {
pub fn new(rope: RopeSlice<'a>) -> RopeReader<'a> {
RopeReader {
current_chunk: &[],
chunks: rope.chunks(),
}
}
}

impl io::Read for RopeReader<'_> {
fn read(&mut self, mut buf: &mut [u8]) -> io::Result<usize> {
let buf_len = buf.len();
loop {
let read_bytes = self.current_chunk.read(buf)?;
buf = &mut buf[read_bytes..];
if buf.is_empty() {
return Ok(buf_len);
}

if let Some(next_chunk) = self.chunks.next() {
self.current_chunk = next_chunk.as_bytes();
} else {
return Ok(buf_len - buf.len());
}
}
}
}
51 changes: 38 additions & 13 deletions helix-term/src/commands.rs
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ use helix_core::{
tree_sitter::Node,
unicode::width::UnicodeWidthChar,
visual_offset_from_block, Deletion, LineEnding, Position, Range, Rope, RopeGraphemes,
RopeSlice, Selection, SmallVec, Tendril, Transaction,
RopeReader, RopeSlice, Selection, SmallVec, Tendril, Transaction,
};
use helix_view::{
clipboard::ClipboardType,
Expand Down Expand Up @@ -2062,11 +2062,16 @@ fn global_search(cx: &mut Context) {
.map(|comp| (0.., std::borrow::Cow::Owned(comp.clone())))
.collect()
},
move |_editor, regex, event| {
move |editor, regex, event| {
if event != PromptEvent::Validate {
return;
}

let documents: Vec<_> = editor
.documents()
.map(|doc| (doc.path(), doc.text()))
.collect();

if let Ok(matcher) = RegexMatcherBuilder::new()
.case_smart(smart_case)
.build(regex.as_str())
Expand Down Expand Up @@ -2099,6 +2104,7 @@ fn global_search(cx: &mut Context) {
let mut searcher = searcher.clone();
let matcher = matcher.clone();
let all_matches_sx = all_matches_sx.clone();
let documents = &documents;
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
let entry = match entry {
Ok(entry) => entry,
Expand All @@ -2111,17 +2117,36 @@ fn global_search(cx: &mut Context) {
_ => return WalkState::Continue,
};

let result = searcher.search_path(
&matcher,
entry.path(),
sinks::UTF8(|line_num, _| {
all_matches_sx
.send(FileResult::new(entry.path(), line_num as usize - 1))
.unwrap();

Ok(true)
}),
);
let sink = sinks::UTF8(|line_num, _| {
all_matches_sx
.send(FileResult::new(entry.path(), line_num as usize - 1))
.unwrap();

Ok(true)
});
let doc = documents.iter().find(|&(doc_path, _)| {
doc_path.map_or(false, |doc_path| doc_path == entry.path())
});

let result = if let Some((_, doc)) = doc {
// there is already a buffer for this file
// search the buffer instead of the file because it's faster
// and captures new edits without requireing a save
if searcher.multi_line_with_matcher(&matcher) {
// in this case a continous buffer is required
// convert the rope to a string
let text = doc.to_string();
searcher.search_slice(&matcher, text.as_bytes(), sink)
} else {
searcher.search_reader(
&matcher,
RopeReader::new(doc.slice(..)),
sink,
)
}
} else {
searcher.search_path(&matcher, entry.path(), sink)
};

if let Err(err) = result {
log::error!(
Expand Down

0 comments on commit 0ad51c8

Please sign in to comment.