Skip to content

Commit

Permalink
Merge remote-tracking branch 'main/master' into resize-windows
Browse files Browse the repository at this point in the history
  • Loading branch information
sander777 committed Oct 23, 2023
2 parents 7322211 + 88bc52a commit 277910a
Show file tree
Hide file tree
Showing 17 changed files with 345 additions and 130 deletions.
54 changes: 30 additions & 24 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

1 change: 1 addition & 0 deletions book/src/generated/lang-support.md
Original file line number Diff line number Diff line change
Expand Up @@ -155,6 +155,7 @@
| t32 || | | |
| tablegen |||| |
| task || | | |
| templ || | | `templ` |
| tfvars || || `terraform-ls` |
| todotxt || | | |
| toml || | | `taplo` |
Expand Down
4 changes: 3 additions & 1 deletion helix-core/src/wrap.rs
Original file line number Diff line number Diff line change
@@ -1,7 +1,9 @@
use smartstring::{LazyCompact, SmartString};
use textwrap::{Options, WordSplitter::NoHyphenation};

/// Given a slice of text, return the text re-wrapped to fit it
/// within the given width.
pub fn reflow_hard_wrap(text: &str, text_width: usize) -> SmartString<LazyCompact> {
textwrap::refill(text, text_width).into()
let options = Options::new(text_width).word_splitter(NoHyphenation);
textwrap::refill(text, options).into()
}
3 changes: 1 addition & 2 deletions helix-term/src/application.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,8 +162,7 @@ impl Application {
// Unset path to prevent accidentally saving to the original tutor file.
doc_mut!(editor).set_path(None);
} else if !args.files.is_empty() {
let first = &args.files[0].0; // we know it's not empty
if first.is_dir() {
if args.open_cwd {
// NOTE: The working directory is already set to args.files[0] in main()
editor.new_file(Action::VerticalSplit);
let picker = ui::file_picker(".".into(), &config.load().editor);
Expand Down
1 change: 1 addition & 0 deletions helix-term/src/args.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ pub struct Args {
pub log_file: Option<PathBuf>,
pub config_file: Option<PathBuf>,
pub files: Vec<(PathBuf, Position)>,
pub open_cwd: bool,
pub working_directory: Option<PathBuf>,
}

Expand Down
137 changes: 72 additions & 65 deletions helix-term/src/commands.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2208,7 +2208,10 @@ fn global_search(cx: &mut Context) {
let searcher = SearcherBuilder::new()
.binary_detection(BinaryDetection::quit(b'\x00'))
.build();
WalkBuilder::new(search_root)

let mut walk_builder = WalkBuilder::new(search_root);

walk_builder
.hidden(file_picker_config.hidden)
.parents(file_picker_config.parents)
.ignore(file_picker_config.ignore)
Expand All @@ -2219,73 +2222,77 @@ fn global_search(cx: &mut Context) {
.max_depth(file_picker_config.max_depth)
.filter_entry(move |entry| {
filter_picker_entry(entry, &absolute_root, dedup_symlinks)
})
.build_parallel()
.run(|| {
let mut searcher = searcher.clone();
let matcher = matcher.clone();
let injector = injector_.clone();
let documents = &documents;
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
let entry = match entry {
Ok(entry) => entry,
Err(_) => return WalkState::Continue,
};

match entry.file_type() {
Some(entry) if entry.is_file() => {}
// skip everything else
_ => return WalkState::Continue,
};

let mut stop = false;
let sink = sinks::UTF8(|line_num, _| {
stop = injector
.push(FileResult::new(entry.path(), line_num as usize - 1))
.is_err();

Ok(!stop)
});
let doc = documents.iter().find(|&(doc_path, _)| {
doc_path
.as_ref()
.map_or(false, |doc_path| doc_path == entry.path())
});

let result = if let Some((_, doc)) = doc {
// there is already a buffer for this file
// search the buffer instead of the file because it's faster
// and captures new edits without requiring a save
if searcher.multi_line_with_matcher(&matcher) {
// in this case a continous buffer is required
// convert the rope to a string
let text = doc.to_string();
searcher.search_slice(&matcher, text.as_bytes(), sink)
} else {
searcher.search_reader(
&matcher,
RopeReader::new(doc.slice(..)),
sink,
)
}
} else {
searcher.search_path(&matcher, entry.path(), sink)
};
});

if let Err(err) = result {
log::error!(
"Global search error: {}, {}",
entry.path().display(),
err
);
}
if stop {
WalkState::Quit
walk_builder
.add_custom_ignore_filename(helix_loader::config_dir().join("ignore"));
walk_builder.add_custom_ignore_filename(".helix/ignore");

walk_builder.build_parallel().run(|| {
let mut searcher = searcher.clone();
let matcher = matcher.clone();
let injector = injector_.clone();
let documents = &documents;
Box::new(move |entry: Result<DirEntry, ignore::Error>| -> WalkState {
let entry = match entry {
Ok(entry) => entry,
Err(_) => return WalkState::Continue,
};

match entry.file_type() {
Some(entry) if entry.is_file() => {}
// skip everything else
_ => return WalkState::Continue,
};

let mut stop = false;
let sink = sinks::UTF8(|line_num, _| {
stop = injector
.push(FileResult::new(entry.path(), line_num as usize - 1))
.is_err();

Ok(!stop)
});
let doc = documents.iter().find(|&(doc_path, _)| {
doc_path
.as_ref()
.map_or(false, |doc_path| doc_path == entry.path())
});

let result = if let Some((_, doc)) = doc {
// there is already a buffer for this file
// search the buffer instead of the file because it's faster
// and captures new edits without requiring a save
if searcher.multi_line_with_matcher(&matcher) {
// in this case a continous buffer is required
// convert the rope to a string
let text = doc.to_string();
searcher.search_slice(&matcher, text.as_bytes(), sink)
} else {
WalkState::Continue
searcher.search_reader(
&matcher,
RopeReader::new(doc.slice(..)),
sink,
)
}
})
});
} else {
searcher.search_path(&matcher, entry.path(), sink)
};

if let Err(err) = result {
log::error!(
"Global search error: {}, {}",
entry.path().display(),
err
);
}
if stop {
WalkState::Quit
} else {
WalkState::Continue
}
})
});
});

cx.jobs.callback(async move {
Expand Down
Loading

0 comments on commit 277910a

Please sign in to comment.