Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(lints): clippy 1.84 #12462

Merged
merged 2 commits into from
Jan 23, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 2 additions & 6 deletions helix-core/src/comment.rs
Original file line number Diff line number Diff line change
Expand Up @@ -204,13 +204,9 @@ pub fn find_block_comments(
range: *range,
start_pos,
end_pos,
start_margin: selection_slice
.get_char(after_start)
.map_or(false, |c| c == ' '),
start_margin: selection_slice.get_char(after_start) == Some(' '),
end_margin: after_start != before_end
&& selection_slice
.get_char(before_end)
.map_or(false, |c| c == ' '),
&& (selection_slice.get_char(before_end) == Some(' ')),
start_token: start_token.to_string(),
end_token: end_token.to_string(),
});
Expand Down
11 changes: 6 additions & 5 deletions helix-core/src/doc_formatter.rs
Original file line number Diff line number Diff line change
Expand Up @@ -370,8 +370,8 @@ impl<'t> DocumentFormatter<'t> {
match col.cmp(&(self.text_fmt.viewport_width as usize)) {
// The EOF char and newline chars are always selectable in helix. That means
// that wrapping happens "too-early" if a word fits a line perfectly. This
// is intentional so that all selectable graphemes are always visisble (and
// therefore the cursor never dissapears). However if the user manually set a
// is intentional so that all selectable graphemes are always visible (and
// therefore the cursor never disappears). However if the user manually set a
// lower softwrap width then this is undesirable. Just increasing the viewport-
// width by one doesn't work because if a line is wrapped multiple times then
// some words may extend past the specified width.
Expand All @@ -380,9 +380,10 @@ impl<'t> DocumentFormatter<'t> {
// by a newline/eof character here.
Ordering::Equal
if self.text_fmt.soft_wrap_at_text_width
&& self.peek_grapheme(col, char_pos).map_or(false, |grapheme| {
grapheme.is_newline() || grapheme.is_eof()
}) => {}
&& self
.peek_grapheme(col, char_pos)
.is_some_and(|grapheme| grapheme.is_newline() || grapheme.is_eof()) => {
}
Ordering::Equal if word_width > self.text_fmt.max_wrap as usize => return,
Ordering::Greater if word_width > self.text_fmt.max_wrap as usize => {
self.peeked_grapheme = self.word_buf.pop();
Expand Down
4 changes: 2 additions & 2 deletions helix-core/src/indent.rs
Original file line number Diff line number Diff line change
Expand Up @@ -456,15 +456,15 @@ struct IndentQueryResult<'a> {
fn get_node_start_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
let mut node_line = node.start_position().row;
// Adjust for the new line that will be inserted
if new_line_byte_pos.map_or(false, |pos| node.start_byte() >= pos) {
if new_line_byte_pos.is_some_and(|pos| node.start_byte() >= pos) {
node_line += 1;
}
node_line
}
fn get_node_end_line(node: Node, new_line_byte_pos: Option<usize>) -> usize {
let mut node_line = node.end_position().row;
// Adjust for the new line that will be inserted (with a strict inequality since end_byte is exclusive)
if new_line_byte_pos.map_or(false, |pos| node.end_byte() > pos) {
if new_line_byte_pos.is_some_and(|pos| node.end_byte() > pos) {
node_line += 1;
}
node_line
Expand Down
4 changes: 2 additions & 2 deletions helix-loader/src/grammar.rs
Original file line number Diff line number Diff line change
Expand Up @@ -273,12 +273,12 @@ fn fetch_grammar(grammar: GrammarConfiguration) -> Result<FetchStatus> {
}

// ensure the remote matches the configured remote
if get_remote_url(&grammar_dir).map_or(true, |s| s != remote) {
if get_remote_url(&grammar_dir).as_ref() != Some(&remote) {
set_remote(&grammar_dir, &remote)?;
}

// ensure the revision matches the configured revision
if get_revision(&grammar_dir).map_or(true, |s| s != revision) {
if get_revision(&grammar_dir).as_ref() != Some(&revision) {
// Fetch the exact revision from the remote.
// Supported by server-side git since v2.5.0 (July 2015),
// enabled by default on major git hosts.
Expand Down
2 changes: 1 addition & 1 deletion helix-lsp/src/client.rs
Original file line number Diff line number Diff line change
Expand Up @@ -85,7 +85,7 @@ impl Client {
.and_then(|root| lsp::Url::from_file_path(root).ok());

if self.root_path == root.unwrap_or(workspace)
|| root_uri.as_ref().map_or(false, |root_uri| {
|| root_uri.as_ref().is_some_and(|root_uri| {
self.workspace_folders
.lock()
.iter()
Expand Down
4 changes: 2 additions & 2 deletions helix-stdx/src/rope.rs
Original file line number Diff line number Diff line change
Expand Up @@ -43,7 +43,7 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
return false;
}
self.get_byte_slice(len - text.len()..)
.map_or(false, |end| end == text)
.is_some_and(|end| end == text)
}

fn starts_with(self, text: &str) -> bool {
Expand All @@ -52,7 +52,7 @@ impl<'a> RopeSliceExt<'a> for RopeSlice<'a> {
return false;
}
self.get_byte_slice(..text.len())
.map_or(false, |start| start == text)
.is_some_and(|start| start == text)
}

fn regex_input(self) -> RegexInput<RopeyCursor<'a>> {
Expand Down
10 changes: 5 additions & 5 deletions helix-term/src/commands.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1301,7 +1301,7 @@ fn goto_file_impl(cx: &mut Context, action: Action) {
.line_to_byte(text.byte_to_line(pos) + 1)
.min(pos + 1000);
let search_range = text.slice(search_start..search_end);
// we also allow paths that are next to the cursor (can be ambigous but
// we also allow paths that are next to the cursor (can be ambiguous but
// rarely so in practice) so that gf on quoted/braced path works (not sure about this
// but apparently that is how gf has worked historically in helix)
let path = find_paths(search_range, true)
Expand Down Expand Up @@ -2487,15 +2487,15 @@ fn global_search(cx: &mut Context) {
let doc = documents.iter().find(|&(doc_path, _)| {
doc_path
.as_ref()
.map_or(false, |doc_path| doc_path == entry.path())
.is_some_and(|doc_path| doc_path == entry.path())
});

let result = if let Some((_, doc)) = doc {
// there is already a buffer for this file
// search the buffer instead of the file because it's faster
// and captures new edits without requiring a save
if searcher.multi_line_with_matcher(&matcher) {
// in this case a continous buffer is required
// in this case a continuous buffer is required
// convert the rope to a string
let text = doc.to_string();
searcher.search_slice(&matcher, text.as_bytes(), sink)
Expand Down Expand Up @@ -4036,7 +4036,7 @@ pub mod insert {
let on_auto_pair = doc
.auto_pairs(cx.editor)
.and_then(|pairs| pairs.get(prev))
.map_or(false, |pair| pair.open == prev && pair.close == curr);
.is_some_and(|pair| pair.open == prev && pair.close == curr);

let local_offs = if let Some(token) = continue_comment_token {
new_text.push_str(doc.line_ending.as_str());
Expand Down Expand Up @@ -6456,7 +6456,7 @@ fn jump_to_word(cx: &mut Context, behaviour: Movement) {
let mut cursor_rev = Range::point(cursor);
if text.get_char(cursor).is_some_and(|c| !c.is_whitespace()) {
let cursor_word_end = movement::move_next_word_end(text, cursor_fwd, 1);
// single grapheme words need a specical case
// single grapheme words need a special case
if cursor_word_end.anchor == cursor {
cursor_fwd = cursor_word_end;
}
Expand Down
2 changes: 1 addition & 1 deletion helix-term/src/commands/lsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1288,7 +1288,7 @@ fn compute_inlay_hints_for_view(
if !doc.inlay_hints_oudated
&& doc
.inlay_hints(view_id)
.map_or(false, |dih| dih.id == new_doc_inlay_hints_id)
.is_some_and(|dih| dih.id == new_doc_inlay_hints_id)
{
return None;
}
Expand Down
2 changes: 1 addition & 1 deletion helix-term/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ fn filter_picker_entry(entry: &DirEntry, root: &Path, dedup_symlinks: bool) -> b
.path()
.canonicalize()
.ok()
.map_or(false, |path| !path.starts_with(root));
.is_some_and(|path| !path.starts_with(root));
}

true
Expand Down
7 changes: 4 additions & 3 deletions helix-term/src/ui/completion.rs
Original file line number Diff line number Diff line change
Expand Up @@ -53,9 +53,10 @@ impl menu::Item for CompletionItem {
let deprecated = match self {
CompletionItem::Lsp(LspCompletionItem { item, .. }) => {
item.deprecated.unwrap_or_default()
|| item.tags.as_ref().map_or(false, |tags| {
tags.contains(&lsp::CompletionItemTag::DEPRECATED)
})
|| item
.tags
.as_ref()
.is_some_and(|tags| tags.contains(&lsp::CompletionItemTag::DEPRECATED))
}
CompletionItem::Other(_) => false,
};
Expand Down
4 changes: 2 additions & 2 deletions helix-term/src/ui/mod.rs
Original file line number Diff line number Diff line change
Expand Up @@ -363,7 +363,7 @@ pub mod completers {
git_ignore: bool,
) -> Vec<Completion> {
filename_impl(editor, input, git_ignore, |entry| {
let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir());
let is_dir = entry.file_type().is_some_and(|entry| entry.is_dir());

if is_dir {
FileMatch::AcceptIncomplete
Expand Down Expand Up @@ -414,7 +414,7 @@ pub mod completers {
git_ignore: bool,
) -> Vec<Completion> {
filename_impl(editor, input, git_ignore, |entry| {
let is_dir = entry.file_type().map_or(false, |entry| entry.is_dir());
let is_dir = entry.file_type().is_some_and(|entry| entry.is_dir());

if is_dir {
FileMatch::Accept
Expand Down
8 changes: 4 additions & 4 deletions helix-view/src/annotations/diagnostics.rs
Original file line number Diff line number Diff line change
Expand Up @@ -178,7 +178,7 @@ impl<'a> InlineDiagnosticAccumulator<'a> {
horizontal_off: usize,
) -> bool {
// TODO: doing the cursor tracking here works well but is somewhat
// duplicate effort/tedious maybe centralize this somehwere?
// duplicate effort/tedious maybe centralize this somewhere?
// In the DocFormatter?
if grapheme.char_idx == self.cursor {
self.cursor_line = true;
Expand Down Expand Up @@ -248,9 +248,9 @@ impl<'a> InlineDiagnosticAccumulator<'a> {
}

pub fn has_multi(&self, width: u16) -> bool {
self.stack.last().map_or(false, |&(_, anchor)| {
anchor > self.config.max_diagnostic_start(width)
})
self.stack
.last()
.is_some_and(|&(_, anchor)| anchor > self.config.max_diagnostic_start(width))
}
}

Expand Down
11 changes: 4 additions & 7 deletions helix-view/src/document.rs
Original file line number Diff line number Diff line change
Expand Up @@ -717,10 +717,7 @@ impl Document {
config: Arc<dyn DynAccess<Config>>,
) -> Result<Self, DocumentOpenError> {
// If the path is not a regular file (e.g.: /dev/random) it should not be opened.
if path
.metadata()
.map_or(false, |metadata| !metadata.is_file())
{
if path.metadata().is_ok_and(|metadata| !metadata.is_file()) {
return Err(DocumentOpenError::IrregularFile);
}

Expand Down Expand Up @@ -1999,8 +1996,8 @@ impl Document {
};

let ends_at_word =
start != end && end != 0 && text.get_char(end - 1).map_or(false, char_is_word);
let starts_at_word = start != end && text.get_char(start).map_or(false, char_is_word);
start != end && end != 0 && text.get_char(end - 1).is_some_and(char_is_word);
let starts_at_word = start != end && text.get_char(start).is_some_and(char_is_word);

Some(Diagnostic {
range: Range { start, end },
Expand Down Expand Up @@ -2033,7 +2030,7 @@ impl Document {
self.clear_diagnostics(language_server_id);
} else {
self.diagnostics.retain(|d| {
if language_server_id.map_or(false, |id| id != d.provider) {
if language_server_id.is_some_and(|id| id != d.provider) {
return true;
}

Expand Down
4 changes: 2 additions & 2 deletions helix-view/src/handlers/lsp.rs
Original file line number Diff line number Diff line change
Expand Up @@ -244,7 +244,7 @@ impl Editor {
ResourceOp::Create(op) => {
let uri = Uri::try_from(&op.uri)?;
let path = uri.as_path().expect("URIs are valid paths");
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
let ignore_if_exists = op.options.as_ref().is_some_and(|options| {
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
});
if !ignore_if_exists || !path.exists() {
Expand Down Expand Up @@ -288,7 +288,7 @@ impl Editor {
let from = from_uri.as_path().expect("URIs are valid paths");
let to_uri = Uri::try_from(&op.new_uri)?;
let to = to_uri.as_path().expect("URIs are valid paths");
let ignore_if_exists = op.options.as_ref().map_or(false, |options| {
let ignore_if_exists = op.options.as_ref().is_some_and(|options| {
!options.overwrite.unwrap_or(false) && options.ignore_if_exists.unwrap_or(false)
});
if !ignore_if_exists || !to.exists() {
Expand Down
5 changes: 1 addition & 4 deletions helix-view/src/theme.rs
Original file line number Diff line number Diff line change
Expand Up @@ -500,10 +500,7 @@ impl ThemePalette {
let modifiers = value.as_array().ok_or("Modifiers should be an array")?;

for modifier in modifiers {
if modifier
.as_str()
.map_or(false, |modifier| modifier == "underlined")
{
if modifier.as_str() == Some("underlined") {
*style = style.underline_style(UnderlineStyle::Line);
} else {
*style = style.add_modifier(Self::parse_modifier(modifier)?);
Expand Down
Loading