diff --git a/crates/server/src/completion.rs b/crates/server/src/completion.rs new file mode 100644 index 0000000..bd708b1 --- /dev/null +++ b/crates/server/src/completion.rs @@ -0,0 +1,258 @@ +fn normalize_document_and_cursor_position( + doc: &str, + cursor_line: usize, + cursor_char: usize, +) -> (String, usize, usize) { + let lines: Vec<&str> = doc.lines().collect(); + if lines.len() <= 1 { + return (doc.to_string(), cursor_line, cursor_char); + } + + let content_before_cursor = + lines.iter().take(cursor_line).copied().collect::>().join(" "); + let new_cursor_char = content_before_cursor.len() + cursor_char + 1; + let normalized_doc = format!("{}\n", lines.join(" ")); + + (normalized_doc, 0, new_cursor_char) +} + +fn cursor_matches( + cursor_line: usize, + cursor_char: usize, + query_start: tree_sitter::Point, + query_end: tree_sitter::Point, +) -> bool { + // Completely envelop the cursor line-wise + if query_start.row < cursor_line && query_end.row > cursor_line { + return true; + } + + // Single line match, check columns on both sides + if cursor_line == query_start.row + && cursor_line == query_end.row + && query_start.column <= cursor_char + && query_end.column >= cursor_char + { + return true; + } + + // Start lines overlap, but the start column is before the cursor column + if cursor_line == query_start.row && query_start.column <= cursor_char { + return true; + } + + // End lines overlap, but the end column is after the cursor column + if cursor_line == query_end.row && query_end.column >= cursor_char { + return true; + } + + false +} + +fn cursor_before(cursor_line: usize, cursor_char: usize, query_start: tree_sitter::Point) -> bool { + cursor_line < query_start.row + || (cursor_line == query_start.row && cursor_char < query_start.column) +} + +fn cursor_after(cursor_line: usize, cursor_char: usize, query_end: tree_sitter::Point) -> bool { + cursor_line > query_end.row || (cursor_line == query_end.row && cursor_char > query_end.column) +} + +fn get_completion_for_context( + cursor: &mut tree_sitter::QueryCursor, + root_node: tree_sitter::Node, + doc_bytes: &[u8], + cursor_line: usize, + cursor_char: usize, +) -> Option> { + static QUERY_CONTEXT: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| { + tree_sitter::Query::new( + tree_sitter_surrealql::language(), + r#"(from_clause (target) @target_options . ) (keyword_select) @select_options"#, + ) + .expect("Could not initialize query") + }); + + static COMPLETION_CONTEXT_MAP: once_cell::sync::Lazy< + std::collections::HashMap>, + > = once_cell::sync::Lazy::new(|| { + let mut map: std::collections::HashMap> = + std::collections::HashMap::new(); + map.insert( + "target_options".to_string(), + vec![ + "WHERE".to_string(), + "SPLIT".to_string(), + "WITH".to_string(), + "GROUP BY".to_string(), + "LIMIT".to_string(), + "ORDER BY".to_string(), + "TIMEOUT".to_string(), + "EXPLAIN".to_string(), + "PARALLEL".to_string(), + ], + ); + map.insert("select_options".to_string(), vec!["VALUE".to_string()]); + map + }); + + let mut last_match = None; + for m in cursor.matches(&QUERY_CONTEXT, root_node, doc_bytes) { + for capture in m.captures.iter() { + let capture_name = &QUERY_CONTEXT.capture_names()[capture.index as usize]; + if cursor_matches( + cursor_line, + cursor_char, + capture.node.range().start_point, + capture.node.range().end_point, + ) { + last_match = Some((capture_name.clone(), capture.node.range())); + } + } + } + + last_match.and_then(|(capture_name, _range)| { + COMPLETION_CONTEXT_MAP.get(capture_name.as_str()).cloned() + }) +} + +fn get_completion_for_errors( + cursor: &mut tree_sitter::QueryCursor, + root_node: tree_sitter::Node, + doc_bytes: &[u8], + cursor_line: usize, + cursor_char: usize, +) -> Option> { + static QUERY_ERROR_START: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| { + tree_sitter::Query::new(tree_sitter_surrealql::language(), "(ERROR) @start") + .expect("Could not initialize query") + }); + + for m in cursor.matches(&QUERY_ERROR_START, root_node, doc_bytes) { + for capture in m.captures.iter() { + if cursor_matches( + cursor_line, + cursor_char, + capture.node.range().start_point, + capture.node.range().end_point, + ) { + return Some(vec!["VALUE".to_string()]); + } + } + } + None +} + +fn get_completion_for_select_neighbors( + cursor: &mut tree_sitter::QueryCursor, + root_node: tree_sitter::Node, + doc_bytes: &[u8], + cursor_line: usize, + cursor_char: usize, +) -> Option> { + static QUERY_SELECT_NEIGHBOR: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| { + tree_sitter::Query::new( + tree_sitter_surrealql::language(), + r#"( + (keyword_select) @select + . + (_) @neighbor + )"#, + ) + .expect("Could not initialize query") + }); + + for m in cursor.matches(&QUERY_SELECT_NEIGHBOR, root_node, doc_bytes) { + if m.captures.len() < 2 { + continue; + } + let select_range = m.captures[0].node.range(); + let neighbor_range = m.captures[1].node.range(); + if cursor_after(cursor_line, cursor_char, select_range.end_point) + && cursor_before(cursor_line, cursor_char, neighbor_range.start_point) + { + return Some(vec!["VALUE".to_string()]); + } + } + None +} + +fn get_completion_for_select( + cursor: &mut tree_sitter::QueryCursor, + root_node: tree_sitter::Node, + doc_bytes: &[u8], + cursor_line: usize, + cursor_char: usize, +) -> Option> { + static QUERY_SELECT: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| { + tree_sitter::Query::new(tree_sitter_surrealql::language(), "(keyword_select) @select") + .expect("Could not initialize query") + }); + cursor.set_point_range(std::ops::Range { + start: tree_sitter::Point { row: cursor_line, column: 0 }, + end: tree_sitter::Point { row: cursor_line, column: usize::MAX }, + }); + for m in cursor.matches(&QUERY_SELECT, root_node, doc_bytes) { + for capture in m.captures { + if cursor_after(cursor_line, cursor_char, capture.node.range().end_point) { + return Some(vec!["VALUE".to_string()]); + } + } + } + None +} + +pub(crate) fn get_completion_list( + curr_doc: &str, + parser: &mut tree_sitter::Parser, + curr_tree: &mut Option, + params: &tower_lsp::lsp_types::CompletionParams, +) -> Option> { + let cursor_line = params.text_document_position.position.line as usize; + let cursor_char = params.text_document_position.position.character as usize; + + let (normalized_doc, cursor_line, cursor_char) = + normalize_document_and_cursor_position(curr_doc, cursor_line, cursor_char); + + *curr_tree = parser.parse(&normalized_doc, curr_tree.as_ref()); + if let Some(tree) = curr_tree { + let mut cursor = tree_sitter::QueryCursor::new(); + let doc_bytes = normalized_doc.as_bytes(); + let root_node = tree.root_node(); + + get_completion_for_context(&mut cursor, root_node, doc_bytes, cursor_line, cursor_char) + .or_else(|| { + get_completion_for_errors( + &mut cursor, + root_node, + doc_bytes, + cursor_line, + cursor_char, + ) + }) + .or_else(|| { + get_completion_for_select_neighbors( + &mut cursor, + root_node, + doc_bytes, + cursor_line, + cursor_char, + ) + }) + .or_else(|| { + get_completion_for_select( + &mut cursor, + root_node, + doc_bytes, + cursor_line, + cursor_char, + ) + }) + } else { + None + } +} diff --git a/crates/server/src/keywords.rs b/crates/server/src/keywords.rs new file mode 100644 index 0000000..468f40e --- /dev/null +++ b/crates/server/src/keywords.rs @@ -0,0 +1,19 @@ +pub(crate) type KeywordDocsMap = std::collections::HashMap; + +pub(crate) fn load_kw_docs() -> KeywordDocsMap { + let mut map = KeywordDocsMap::new(); + map.insert("EXPLAIN".to_string(), include_str!("./md/explain.md").to_string()); + map.insert("FROM".to_string(), include_str!("./md/from.md").to_string()); + map.insert("GROUP BY".to_string(), include_str!("./md/group_by.md").to_string()); + map.insert("LIMIT".to_string(), include_str!("./md/limit.md").to_string()); + map.insert("ONLY".to_string(), include_str!("./md/only.md").to_string()); + map.insert("ORDER BY".to_string(), include_str!("./md/order_by.md").to_string()); + map.insert("PARALLEL".to_string(), include_str!("./md/parallel.md").to_string()); + map.insert("SELECT".to_string(), include_str!("./md/select.md").to_string()); + map.insert("SPLIT".to_string(), include_str!("./md/split.md").to_string()); + map.insert("TIMEOUT".to_string(), include_str!("./md/timeout.md").to_string()); + map.insert("VALUE".to_string(), include_str!("./md/value.md").to_string()); + map.insert("WHERE".to_string(), include_str!("./md/where.md").to_string()); + map.insert("WITH".to_string(), include_str!("./md/with.md").to_string()); + map +} diff --git a/crates/server/src/lsp.rs b/crates/server/src/lsp.rs new file mode 100644 index 0000000..3f13886 --- /dev/null +++ b/crates/server/src/lsp.rs @@ -0,0 +1,225 @@ +fn text_doc_change_to_tree_sitter_edit( + change: &tower_lsp::lsp_types::TextDocumentContentChangeEvent, + doc: &lsp_textdocument::FullTextDocument, +) -> Result { + let range = change.range.as_ref().ok_or("Invalid edit range")?; + let start = range.start; + let end = range.end; + + let start_byte = doc.offset_at(start) as usize; + let old_end_byte = doc.offset_at(end) as usize; + let new_end_byte = start_byte + change.text.len(); + + let new_end_pos = doc.position_at(new_end_byte as u32); + + Ok(tree_sitter::InputEdit { + start_byte, + old_end_byte, + new_end_byte, + start_position: tree_sitter::Point { + row: start.line as usize, + column: start.character as usize, + }, + old_end_position: tree_sitter::Point { + row: end.line as usize, + column: end.character as usize, + }, + new_end_position: tree_sitter::Point { + row: new_end_pos.line as usize, + column: new_end_pos.character as usize, + }, + }) +} + +pub(crate) struct Backend { + client: tower_lsp::Client, + parser: std::sync::Arc>, + curr_doc: std::sync::Arc>>, + tree: std::sync::Arc>>, + kw_docs: crate::keywords::KeywordDocsMap, +} + +impl Backend { + pub fn new(client: tower_lsp::Client) -> Self { + Self { + client, + parser: std::sync::Arc::new( + tokio::sync::Mutex::new(crate::parser::initialise_parser()), + ), + curr_doc: std::sync::Arc::new(tokio::sync::Mutex::new(None)), + tree: std::sync::Arc::new(tokio::sync::Mutex::new(None)), + kw_docs: crate::keywords::load_kw_docs(), + } + } +} + +#[tower_lsp::async_trait] +impl tower_lsp::LanguageServer for Backend { + async fn initialize( + &self, + _: tower_lsp::lsp_types::InitializeParams, + ) -> tower_lsp::jsonrpc::Result { + Ok(tower_lsp::lsp_types::InitializeResult { + server_info: Some(tower_lsp::lsp_types::ServerInfo { + name: String::from("surrealql-lsp"), + version: Some(String::from("0.0.1")), + }), + capabilities: tower_lsp::lsp_types::ServerCapabilities { + text_document_sync: Some(tower_lsp::lsp_types::TextDocumentSyncCapability::Kind( + tower_lsp::lsp_types::TextDocumentSyncKind::INCREMENTAL, + )), + hover_provider: Some(tower_lsp::lsp_types::HoverProviderCapability::Simple(true)), + completion_provider: Some(tower_lsp::lsp_types::CompletionOptions { + resolve_provider: Some(false), + work_done_progress_options: Default::default(), + all_commit_characters: None, + ..Default::default() + }), + ..tower_lsp::lsp_types::ServerCapabilities::default() + }, + }) + } + + async fn initialized(&self, _: tower_lsp::lsp_types::InitializedParams) { + self.client.log_message(tower_lsp::lsp_types::MessageType::INFO, "initialized!").await; + } + + async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { + Ok(()) + } + + async fn did_open(&self, params: tower_lsp::lsp_types::DidOpenTextDocumentParams) { + let mut curr_doc = self.curr_doc.lock().await; + let mut tree = self.tree.lock().await; + let mut parser = self.parser.lock().await; + + *curr_doc = Some(lsp_textdocument::FullTextDocument::new( + params.text_document.language_id.clone(), + params.text_document.version, + params.text_document.text.clone(), + )); + *tree = parser.parse(params.text_document.text, None); + } + + async fn did_change(&self, params: tower_lsp::lsp_types::DidChangeTextDocumentParams) { + let mut curr_doc = self.curr_doc.lock().await; + let mut tree = self.tree.lock().await; + + if let Some(ref mut doc) = *curr_doc { + doc.update(¶ms.content_changes, params.text_document.version); + for change in params.content_changes.iter() { + if let Some(ref mut curr_tree) = *tree { + match text_doc_change_to_tree_sitter_edit(change, doc) { + Ok(edit) => { + curr_tree.edit(&edit); + } + Err(err) => { + self.client + .log_message( + tower_lsp::lsp_types::MessageType::ERROR, + format!("Bad edit info, failed to edit tree: {}", err), + ) + .await; + } + } + } + } + } + } + + async fn hover( + &self, + params: tower_lsp::lsp_types::HoverParams, + ) -> tower_lsp::jsonrpc::Result> { + let curr_doc = self.curr_doc.lock().await; + let mut tree = self.tree.lock().await; + let mut parser = self.parser.lock().await; + + let doc = match &*curr_doc { + Some(doc) => doc, + _ => return Ok(None), + }; + + let keyword = crate::position::retrieve_keyword_at_position( + doc.get_content(None), + &mut parser, + &mut tree, + params.text_document_position_params.position.line as usize, + params.text_document_position_params.position.character as usize, + ); + + match keyword { + Some(keyword) => { + if let Some(doc_content) = self.kw_docs.get(&keyword) { + let hover_contents = tower_lsp::lsp_types::HoverContents::Markup( + tower_lsp::lsp_types::MarkupContent { + kind: tower_lsp::lsp_types::MarkupKind::Markdown, + value: doc_content.clone(), + }, + ); + let hover = + tower_lsp::lsp_types::Hover { contents: hover_contents, range: None }; + Ok(Some(hover)) + } else { + self.client + .log_message( + tower_lsp::lsp_types::MessageType::WARNING, + format!("Documentation for keyword '{}' not found.", keyword), + ) + .await; + Ok(None) + } + } + _ => Ok(None), + } + } + + async fn completion( + &self, + params: tower_lsp::lsp_types::CompletionParams, + ) -> tower_lsp::jsonrpc::Result> { + let curr_doc = self.curr_doc.lock().await; + let mut tree = self.tree.lock().await; + let mut parser = self.parser.lock().await; + + let doc = match &*curr_doc { + Some(doc) => doc, + _ => return Ok(None), + }; + + let completion_list = crate::completion::get_completion_list( + doc.get_content(None), + &mut parser, + &mut tree, + ¶ms, + ); + + match completion_list { + Some(list) => { + let items: Vec = list + .iter() + .map(|item| { + let documentation = + self.kw_docs.get(item).map(|doc| tower_lsp::lsp_types::MarkupContent { + kind: tower_lsp::lsp_types::MarkupKind::Markdown, + value: doc.to_string(), + }); + + tower_lsp::lsp_types::CompletionItem { + label: item.to_string(), + kind: Some(tower_lsp::lsp_types::CompletionItemKind::KEYWORD), + documentation: documentation + .map(tower_lsp::lsp_types::Documentation::MarkupContent), + ..tower_lsp::lsp_types::CompletionItem::default() + } + }) + .collect(); + + Ok(Some(tower_lsp::lsp_types::CompletionResponse::List( + tower_lsp::lsp_types::CompletionList { is_incomplete: true, items }, + ))) + } + _ => Ok(None), + } + } +} diff --git a/crates/server/src/main.rs b/crates/server/src/main.rs index c1d83f1..6a3a73b 100644 --- a/crates/server/src/main.rs +++ b/crates/server/src/main.rs @@ -1,552 +1,8 @@ -use std::sync::Arc; -use tokio::sync::Mutex; -use tree_sitter::Point; - -struct KeywordDocumentation<'a> { - keyword: &'a str, - documentation: &'a str, -} - -pub fn get_all_keywords_documentation() -> std::collections::HashMap<&'static str, &'static str> { - let mut map = std::collections::HashMap::new(); - map.insert("EXPLAIN", include_str!("./md/explain.md")); - map.insert("FROM", include_str!("./md/from.md")); - map.insert("GROUP BY", include_str!("./md/group_by.md")); - map.insert("LIMIT", include_str!("./md/limit.md")); - map.insert("ONLY", include_str!("./md/only.md")); - map.insert("ORDER BY", include_str!("./md/order_by.md")); - map.insert("PARALLEL", include_str!("./md/parallel.md")); - map.insert("SELECT", include_str!("./md/select.md")); - map.insert("SPLIT", include_str!("./md/split.md")); - map.insert("TIMEOUT", include_str!("./md/timeout.md")); - map.insert("VALUE", include_str!("./md/value.md")); - map.insert("WHERE", include_str!("./md/where.md")); - map.insert("WITH", include_str!("./md/with.md")); - map -} - -fn get_keyword_documentations(keywords: &[&'static str]) -> Vec> { - let documentation_map = get_all_keywords_documentation(); - keywords - .iter() - .map(|&keyword| KeywordDocumentation { - keyword, - documentation: documentation_map.get(keyword).expect("Could not find keyword"), - }) - .collect() -} - -fn get_target_options_completion_details() -> Vec> { - let keywords = [ - "WHERE", "SPLIT", "WITH", "GROUP BY", "LIMIT", "ORDER BY", "TIMEOUT", "EXPLAIN", "PARALLEL", - ]; - get_keyword_documentations(&keywords) -} - -fn get_select_options_completion_details() -> Vec> { - let keywords = ["VALUE"]; - get_keyword_documentations(&keywords) -} - -fn get_select_completion_details() -> Vec> { - let keywords = ["SELECT"]; - get_keyword_documentations(&keywords) -} - -pub fn get_keyword_documentation_at_pos<'a>( - curr_doc: &str, - parser: &mut tree_sitter::Parser, - curr_tree: &mut Option, - params: &tower_lsp::lsp_types::HoverParams, -) -> Option<&'a str> { - let cursor_line = params.text_document_position_params.position.line as usize; - let cursor_char = params.text_document_position_params.position.character as usize; - let keywords_doc_map = get_all_keywords_documentation(); - - *curr_tree = parser.parse(curr_doc, curr_tree.as_ref()); - if let Some(tree) = curr_tree { - let mut cursor = tree_sitter::QueryCursor::new(); - let curr_doc = curr_doc.as_bytes(); - - static QUERY_KEYWORDS: once_cell::sync::Lazy = - once_cell::sync::Lazy::new(|| { - tree_sitter::Query::new( - tree_sitter_surrealql::language(), - r#" - [ - (keyword_explain) - (keyword_from) - (keyword_group_by) - (keyword_limit) - (keyword_only) - (keyword_order_by) - (keyword_parallel) - (keyword_select) - (keyword_split) - (keyword_timeout) - (keyword_value) - (keyword_where) - (keyword_with) - ] @keywords - "#, - ) - .expect("Could not initialize query") - }); - - let matches_iter = cursor.matches(&QUERY_KEYWORDS, tree.root_node(), curr_doc); - - for match_ in matches_iter { - for capture in match_.captures.iter() { - let node = capture.node; - let arg_start = capture.node.range().start_point; - let arg_end = capture.node.range().end_point; - if arg_start.row == cursor_line - && arg_end.row == cursor_line - && arg_start.column <= cursor_char - && arg_end.column >= cursor_char - { - if let Ok(keyword) = node.utf8_text(curr_doc) { - return keywords_doc_map.get(keyword).cloned(); - } - } - } - } - } - None -} - -fn create_completion_item( - keyword: &str, - documentation: &str, -) -> tower_lsp::lsp_types::CompletionItem { - tower_lsp::lsp_types::CompletionItem { - label: keyword.to_string(), - kind: Some(tower_lsp::lsp_types::CompletionItemKind::KEYWORD), - documentation: Some(tower_lsp::lsp_types::Documentation::MarkupContent( - tower_lsp::lsp_types::MarkupContent { - kind: tower_lsp::lsp_types::MarkupKind::Markdown, - value: documentation.to_string(), - }, - )), - ..Default::default() - } -} - -fn get_completion_items( - completion_details: Vec>, -) -> Vec { - completion_details - .iter() - .map(|detail| create_completion_item(detail.keyword, detail.documentation)) - .collect() -} - -fn get_options_to_completion_items_map( -) -> std::collections::HashMap<&'static str, Vec> { - let mut map = std::collections::HashMap::new(); - map.insert("select", get_completion_items(get_select_completion_details())); - map.insert("select_options", get_completion_items(get_select_options_completion_details())); - map.insert("target_options", get_completion_items(get_target_options_completion_details())); - map -} - -fn cursor_matches( - cursor_line: usize, - cursor_char: usize, - query_start: Point, - query_end: Point, -) -> bool { - // completely envelop the cursor line-wise - if query_start.row < cursor_line && query_end.row > cursor_line { - return true; - } - - // single line match, check columns on both sides - if cursor_line == query_start.row - && cursor_line == query_end.row - && query_start.column <= cursor_char - && query_end.column >= cursor_char - { - return true; - } - - // start lines overlap, but the start column is before the cursor column - if cursor_line == query_start.row && query_start.column <= cursor_char { - return true; - } - - // end lines overlap, but the end column is after the cursor column - if cursor_line == query_end.row && query_end.column >= cursor_char { - return true; - } - - false -} - -fn cursor_before(cursor_line: usize, cursor_char: usize, query_start: Point) -> bool { - if cursor_line < query_start.row - || (cursor_line == query_start.row && cursor_char < query_start.column) - { - return true; - } - - false -} - -fn cursor_after(cursor_line: usize, cursor_char: usize, query_end: Point) -> bool { - if cursor_line > query_end.row - || (cursor_line == query_end.row && cursor_char > query_end.column) - { - return true; - } - - false -} - -fn text_doc_change_to_tree_sitter_edit( - change: &tower_lsp::lsp_types::TextDocumentContentChangeEvent, - doc: &lsp_textdocument::FullTextDocument, -) -> Result { - let range = change.range.ok_or("Invalid edit range")?; - let start = range.start; - let end = range.end; - - let start_byte = doc.offset_at(start) as usize; - let new_end_byte = start_byte + change.text.len(); - let new_end_pos = doc.position_at(new_end_byte as u32); - - Ok(tree_sitter::InputEdit { - start_byte, - old_end_byte: doc.offset_at(end) as usize, - new_end_byte, - start_position: tree_sitter::Point { - row: start.line as usize, - column: start.character as usize, - }, - old_end_position: tree_sitter::Point { - row: end.line as usize, - column: end.character as usize, - }, - new_end_position: tree_sitter::Point { - row: new_end_pos.line as usize, - column: new_end_pos.character as usize, - }, - }) -} - -fn initialise_parser() -> tree_sitter::Parser { - let mut parser = tree_sitter::Parser::new(); - if parser.set_language(tree_sitter_surrealql::language()).is_err() { - panic!("Failed to set parser language"); - } - parser -} - -fn normalize_document_and_cursor_position( - doc: &str, - cursor_line: usize, - cursor_char: usize, -) -> (String, usize, usize) { - let lines: Vec<&str> = doc.lines().collect(); - if lines.len() <= 1 { - return (doc.to_string(), cursor_line, cursor_char); - } - let join_char = " "; - - let content_before_cursor = - lines.iter().take(cursor_line).copied().collect::>().join(join_char); - - let cursor_char = content_before_cursor.len() + cursor_char + 1; - let curr_doc = format!("{}\n", &lines.join(join_char)); - - (curr_doc, 0, cursor_char) -} - -fn get_completion_list( - curr_doc: &str, - parser: &mut tree_sitter::Parser, - curr_tree: &mut Option, - params: &tower_lsp::lsp_types::CompletionParams, - options_to_completions_map: &std::collections::HashMap< - &'static str, - Vec, - >, -) -> Option> { - let cursor_line = params.text_document_position.position.line as usize; - let cursor_char = params.text_document_position.position.character as usize; - let (curr_doc, cursor_line, cursor_char) = - normalize_document_and_cursor_position(curr_doc, cursor_line, cursor_char); - let curr_doc = &curr_doc; - - *curr_tree = parser.parse(curr_doc, curr_tree.as_ref()); - if let Some(tree) = curr_tree { - let mut cursor = tree_sitter::QueryCursor::new(); - let curr_doc = curr_doc.as_bytes(); - - static QUERY_INSTR_ANY: once_cell::sync::Lazy = - once_cell::sync::Lazy::new(|| { - tree_sitter::Query::new( - tree_sitter_surrealql::language(), - r#" - (from_clause (target) @target_options . ) - - (keyword_select) @select_options - "#, - ) - .expect("Could not initialise query") - }); - - let matches_iter = cursor.matches(&QUERY_INSTR_ANY, tree.root_node(), curr_doc); - let mut last_match: Option<(String, tree_sitter::Range)> = None; - - for match_ in matches_iter { - for capture in match_.captures.iter() { - let capture_name = &QUERY_INSTR_ANY.capture_names()[capture.index as usize]; - let arg_start = capture.node.range().start_point; - let arg_end = capture.node.range().end_point; - - if cursor_matches(cursor_line, cursor_char, arg_start, arg_end) { - last_match = Some((capture_name.clone(), capture.node.range())); - } - } - } - - if let Some((capture_name, _range)) = last_match { - if let Some(completion_items) = options_to_completions_map.get(capture_name.as_str()) { - return Some(completion_items.clone()); - } - } - - static QUERY_STATEMEMENT_START: once_cell::sync::Lazy = - once_cell::sync::Lazy::new(|| { - tree_sitter::Query::new(tree_sitter_surrealql::language(), "(ERROR) @start") - .expect("Could not initialise query") - }); - - for match_ in cursor.matches(&QUERY_STATEMEMENT_START, tree.root_node(), curr_doc) { - for capture in match_.captures.iter() { - let arg_start = capture.node.range().start_point; - let arg_end = capture.node.range().end_point; - - if cursor_matches(cursor_line, cursor_char, arg_start, arg_end) { - return options_to_completions_map.get("select").cloned(); - } - } - } - - // match SELECT and ensure the next neighbor is beyond the cursor - static QUERY_SELECT_1: once_cell::sync::Lazy = - once_cell::sync::Lazy::new(|| { - tree_sitter::Query::new( - tree_sitter_surrealql::language(), - r#"( - (keyword_select) @select - . - (_) @neighbor - - )"#, - ) - .expect("Could not initialise query") - }); - - for match_ in cursor.matches(&QUERY_SELECT_1, tree.root_node(), curr_doc) { - let caps = match_.captures; - if caps.len() < 2 { - continue; - } - let select_range = caps[0].node.range(); - let neighbor_range = caps[1].node.range(); - if cursor_after(cursor_line, cursor_char, select_range.end_point) - && cursor_before(cursor_line, cursor_char, neighbor_range.start_point) - { - return options_to_completions_map.get("select_options").cloned(); - } - } - - // finally, we'll restrict our query to the cursor's current line and check - // for a select without worrying about neighbors - static QUERY_SELECT_2: once_cell::sync::Lazy = - once_cell::sync::Lazy::new(|| { - tree_sitter::Query::new( - tree_sitter_surrealql::language(), - "(keyword_select) @select", - ) - .expect("Could not initialise query") - }); - - // suggest * if cursor is past but on the same line - cursor.set_point_range(std::ops::Range { - start: tree_sitter::Point { row: cursor_line, column: 0 }, - end: tree_sitter::Point { row: cursor_line, column: usize::MAX }, - }); - for match_ in cursor.matches(&QUERY_SELECT_2, tree.root_node(), curr_doc) { - for capture in match_.captures.iter() { - let arg_end = capture.node.range().end_point; - if cursor_after(cursor_line, cursor_char, arg_end) { - return options_to_completions_map.get("select_options").cloned(); - } - } - } - } - None -} - -struct Backend { - client: tower_lsp::Client, - parser: Arc>, - curr_doc: Arc>>, - tree: Arc>>, - completions_map: - std::collections::HashMap<&'static str, Vec>, -} - -impl Backend { - fn new(client: tower_lsp::Client) -> Self { - Self { - client, - parser: Arc::new(Mutex::new(initialise_parser())), - curr_doc: Arc::new(Mutex::new(None)), - tree: Arc::new(Mutex::new(None)), - completions_map: get_options_to_completion_items_map(), - } - } -} - -#[tower_lsp::async_trait] -impl tower_lsp::LanguageServer for Backend { - async fn initialize( - &self, - _: tower_lsp::lsp_types::InitializeParams, - ) -> tower_lsp::jsonrpc::Result { - Ok(tower_lsp::lsp_types::InitializeResult { - server_info: Some(tower_lsp::lsp_types::ServerInfo { - name: String::from("surrealql-lsp"), - version: Some(String::from("0.0.1")), - }), - capabilities: tower_lsp::lsp_types::ServerCapabilities { - text_document_sync: Some(tower_lsp::lsp_types::TextDocumentSyncCapability::Kind( - tower_lsp::lsp_types::TextDocumentSyncKind::INCREMENTAL, - )), - hover_provider: Some(tower_lsp::lsp_types::HoverProviderCapability::Simple(true)), - completion_provider: Some(tower_lsp::lsp_types::CompletionOptions { - resolve_provider: Some(false), - work_done_progress_options: Default::default(), - all_commit_characters: None, - ..Default::default() - }), - ..tower_lsp::lsp_types::ServerCapabilities::default() - }, - }) - } - - async fn initialized(&self, _: tower_lsp::lsp_types::InitializedParams) { - self.client.log_message(tower_lsp::lsp_types::MessageType::INFO, "initialized!").await; - } - - async fn shutdown(&self) -> tower_lsp::jsonrpc::Result<()> { - Ok(()) - } - - async fn did_open(&self, params: tower_lsp::lsp_types::DidOpenTextDocumentParams) { - let mut curr_doc = self.curr_doc.lock().await; - let mut tree = self.tree.lock().await; - let mut parser = self.parser.lock().await; - - *curr_doc = Some(lsp_textdocument::FullTextDocument::new( - params.text_document.language_id.clone(), - params.text_document.version, - params.text_document.text.clone(), - )); - *tree = parser.parse(params.text_document.text, None); - } - - async fn did_change(&self, params: tower_lsp::lsp_types::DidChangeTextDocumentParams) { - let mut curr_doc = self.curr_doc.lock().await; - let mut tree = self.tree.lock().await; - - if let Some(ref mut doc) = *curr_doc { - doc.update(¶ms.content_changes, params.text_document.version); - for change in params.content_changes.iter() { - if let Some(ref mut curr_tree) = *tree { - match text_doc_change_to_tree_sitter_edit(change, doc) { - Ok(edit) => { - curr_tree.edit(&edit); - } - Err(err) => { - self.client - .log_message( - tower_lsp::lsp_types::MessageType::ERROR, - format!("Bad edit info, failed to edit tree: {}", err), - ) - .await; - } - } - } - } - } - } - - async fn completion( - &self, - params: tower_lsp::lsp_types::CompletionParams, - ) -> tower_lsp::jsonrpc::Result> { - let curr_doc = self.curr_doc.lock().await; - let mut tree = self.tree.lock().await; - let mut parser = self.parser.lock().await; - - if let Some(ref doc) = *curr_doc { - let completion_list = get_completion_list( - doc.get_content(None), - &mut parser, - &mut tree, - ¶ms, - &self.completions_map, - ); - match completion_list { - Some(list) => { - return Ok(Some(tower_lsp::lsp_types::CompletionResponse::List( - tower_lsp::lsp_types::CompletionList { is_incomplete: true, items: list }, - ))) - } - _ => return Ok(None), - } - } - - Ok(None) - } - async fn hover( - &self, - params: tower_lsp::lsp_types::HoverParams, - ) -> tower_lsp::jsonrpc::Result> { - let curr_doc = self.curr_doc.lock().await; - let mut tree = self.tree.lock().await; - let mut parser = self.parser.lock().await; - - if let Some(ref doc) = *curr_doc { - let documentation = get_keyword_documentation_at_pos( - doc.get_content(None), - &mut parser, - &mut tree, - ¶ms, - ); - match documentation { - Some(documentation_) => { - return Ok(Some(tower_lsp::lsp_types::Hover { - contents: tower_lsp::lsp_types::HoverContents::Markup( - tower_lsp::lsp_types::MarkupContent { - kind: tower_lsp::lsp_types::MarkupKind::Markdown, - value: documentation_.to_string(), - }, - ), - range: None, - })); - } - _ => return Ok(None), - } - } - Ok(None) - } -} +mod completion; +mod keywords; +mod lsp; +mod parser; +mod position; #[tokio::main] async fn main() { @@ -554,44 +10,6 @@ async fn main() { let (stdin, stdout) = (tokio::io::stdin(), tokio::io::stdout()); - let (service, socket) = tower_lsp::LspService::build(Backend::new).finish(); + let (service, socket) = tower_lsp::LspService::build(lsp::Backend::new).finish(); tower_lsp::Server::new(stdin, stdout, socket).serve(service).await; } - -#[cfg(test)] -mod tests { - use crate::normalize_document_and_cursor_position; - - #[test] - fn correctly_normalizes_document() { - let doc = r#" -abc -def -hij"#; - let cursor_line = 3; - let cursor_char = 2; - - let expected = (" abc def hij\n".to_string(), 0, 11); - let out = normalize_document_and_cursor_position(doc, cursor_line, cursor_char); - assert_eq!(out, expected); - - let doc = r#"abcdef -hij -klm"#; - let cursor_line = 1; - let cursor_char = 2; - - let expected = ("abcdef hij klm\n".to_string(), 0, 9); - let out = normalize_document_and_cursor_position(doc, cursor_line, cursor_char); - assert_eq!(out, expected); - let doc = r#"SELECT * FROM table -WH -"#; - let cursor_line = 1; - let cursor_char = 2; - - let expected = ("SELECT * FROM table WH\n".to_string(), 0, 22); - let out = normalize_document_and_cursor_position(doc, cursor_line, cursor_char); - assert_eq!(out, expected); - } -} diff --git a/crates/server/src/parser.rs b/crates/server/src/parser.rs new file mode 100644 index 0000000..b481f2b --- /dev/null +++ b/crates/server/src/parser.rs @@ -0,0 +1,7 @@ +pub(crate) fn initialise_parser() -> tree_sitter::Parser { + let mut parser = tree_sitter::Parser::new(); + if parser.set_language(tree_sitter_surrealql::language()).is_err() { + panic!("Failed to set parser language"); + } + parser +} diff --git a/crates/server/src/position.rs b/crates/server/src/position.rs new file mode 100644 index 0000000..6422a4b --- /dev/null +++ b/crates/server/src/position.rs @@ -0,0 +1,81 @@ +pub(crate) fn retrieve_keyword_at_position( + document_content: &str, + parser: &mut tree_sitter::Parser, + syntax_tree: &mut Option, + cursor_line: usize, + cursor_character: usize, +) -> Option { + *syntax_tree = parser.parse(document_content, syntax_tree.as_ref()); + let tree = syntax_tree.as_ref()?; + + let mut query_cursor = tree_sitter::QueryCursor::new(); + let document_bytes = document_content.as_bytes(); + + static KEYWORD_QUERY: once_cell::sync::Lazy = + once_cell::sync::Lazy::new(|| { + tree_sitter::Query::new( + tree_sitter_surrealql::language(), + r#" + [ + (keyword_explain) + (keyword_from) + (keyword_group_by) + (keyword_limit) + (keyword_only) + (keyword_order_by) + (keyword_parallel) + (keyword_select) + (keyword_split) + (keyword_timeout) + (keyword_value) + (keyword_where) + (keyword_with) + ] @keywords + "#, + ) + .expect("Failed to create keyword query") + }); + + find_keyword_at_position( + &mut query_cursor, + &KEYWORD_QUERY, + tree.root_node(), + document_bytes, + cursor_line, + cursor_character, + ) +} + +fn find_keyword_at_position( + query_cursor: &mut tree_sitter::QueryCursor, + query: &tree_sitter::Query, + root_node: tree_sitter::Node, + document_bytes: &[u8], + cursor_line: usize, + cursor_character: usize, +) -> Option { + for match_ in query_cursor.matches(query, root_node, document_bytes) { + for capture in match_.captures { + let node = capture.node; + let start_position = node.start_position(); + let end_position = node.end_position(); + + if is_within_cursor_range(start_position, end_position, cursor_line, cursor_character) { + return node.utf8_text(document_bytes).ok().map(String::from); + } + } + } + None +} + +fn is_within_cursor_range( + start_position: tree_sitter::Point, + end_position: tree_sitter::Point, + cursor_line: usize, + cursor_character: usize, +) -> bool { + start_position.row == cursor_line + && end_position.row == cursor_line + && start_position.column <= cursor_character + && end_position.column >= cursor_character +}