Skip to content

Hash completion items to properly match them during /resolve #18653

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 8 commits into from
Dec 11, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 15 additions & 0 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

3 changes: 1 addition & 2 deletions crates/ide-completion/src/item.rs
Original file line number Diff line number Diff line change
Expand Up @@ -346,8 +346,7 @@ pub enum CompletionItemKind {
impl_from!(SymbolKind for CompletionItemKind);

impl CompletionItemKind {
#[cfg(test)]
Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

This seems to be quite an odd change, but it was quite tempting to reuse existing &str producer for this large enum.

As an alternative, I can copy this into the module with the hashing function, are there better ideas?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Can't we just make CompletionItemKind and SymbolKind Hash?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Oh I see, tenthash doesn't accept hash things, only byte slices...

pub(crate) fn tag(self) -> &'static str {
pub fn tag(self) -> &'static str {
match self {
CompletionItemKind::SymbolKind(kind) => match kind {
SymbolKind::Attribute => "at",
Expand Down
1 change: 1 addition & 0 deletions crates/ide-completion/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -34,6 +34,7 @@ pub use crate::{
config::{CallableSnippets, CompletionConfig},
item::{
CompletionItem, CompletionItemKind, CompletionRelevance, CompletionRelevancePostfixMatch,
CompletionRelevanceReturnType, CompletionRelevanceTypeMatch,
},
snippet::{Snippet, SnippetScope},
};
Expand Down
3 changes: 3 additions & 0 deletions crates/rust-analyzer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -21,9 +21,11 @@ path = "src/bin/main.rs"

[dependencies]
anyhow.workspace = true
base64 = "0.22"
crossbeam-channel.workspace = true
dirs = "5.0.1"
dissimilar.workspace = true
ide-completion.workspace = true
itertools.workspace = true
scip = "0.5.1"
lsp-types = { version = "=0.95.0", features = ["proposed"] }
Expand All @@ -34,6 +36,7 @@ rayon.workspace = true
rustc-hash.workspace = true
serde_json = { workspace = true, features = ["preserve_order"] }
serde.workspace = true
tenthash = "0.4.0"
num_cpus = "1.15.0"
mimalloc = { version = "0.1.30", default-features = false, optional = true }
lsp-server.workspace = true
Expand Down
2 changes: 1 addition & 1 deletion crates/rust-analyzer/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -1440,7 +1440,7 @@ impl Config {
limit: self.completion_limit(source_root).to_owned(),
enable_term_search: self.completion_termSearch_enable(source_root).to_owned(),
term_search_fuel: self.completion_termSearch_fuel(source_root).to_owned() as u64,
fields_to_resolve: if self.client_is_helix() || self.client_is_neovim() {
fields_to_resolve: if self.client_is_neovim() {
CompletionFieldsToResolve::empty()
} else {
CompletionFieldsToResolve::from_client_capabilities(&client_capability_fields)
Expand Down
29 changes: 20 additions & 9 deletions crates/rust-analyzer/src/handlers/request.rs
Original file line number Diff line number Diff line change
Expand Up @@ -10,6 +10,7 @@ use std::{

use anyhow::Context;

use base64::{prelude::BASE64_STANDARD, Engine};
use ide::{
AnnotationConfig, AssistKind, AssistResolveStrategy, Cancellable, CompletionFieldsToResolve,
FilePosition, FileRange, HoverAction, HoverGotoTypeData, InlayFieldsToResolve, Query,
Expand All @@ -36,6 +37,7 @@ use triomphe::Arc;
use vfs::{AbsPath, AbsPathBuf, FileId, VfsPath};

use crate::{
completion_item_hash,
config::{Config, RustfmtConfig, WorkspaceSymbolConfig},
diagnostics::convert_diagnostic,
global_state::{FetchWorkspaceRequest, GlobalState, GlobalStateSnapshot},
Expand Down Expand Up @@ -1127,30 +1129,39 @@ pub(crate) fn handle_completion_resolve(
forced_resolve_completions_config.fields_to_resolve = CompletionFieldsToResolve::empty();

let position = FilePosition { file_id, offset };
let Some(resolved_completions) = snap.analysis.completions(
let Some(completions) = snap.analysis.completions(
&forced_resolve_completions_config,
position,
resolve_data.trigger_character,
)?
else {
return Ok(original_completion);
};
let Ok(resolve_data_hash) = BASE64_STANDARD.decode(resolve_data.hash) else {
return Ok(original_completion);
};

let Some(corresponding_completion) = completions.into_iter().find(|completion_item| {
// Avoid computing hashes for items that obviously do not match
// r-a might append a detail-based suffix to the label, so we cannot check for equality
original_completion.label.starts_with(completion_item.label.as_str())
&& resolve_data_hash == completion_item_hash(completion_item, resolve_data.for_ref)
}) else {
return Ok(original_completion);
};

let mut resolved_completions = to_proto::completion_items(
&snap.config,
&forced_resolve_completions_config.fields_to_resolve,
&line_index,
snap.file_version(position.file_id),
resolve_data.position,
resolve_data.trigger_character,
resolved_completions,
vec![corresponding_completion],
);

let mut resolved_completion =
if resolved_completions.get(resolve_data.completion_item_index).is_some() {
resolved_completions.swap_remove(resolve_data.completion_item_index)
} else {
return Ok(original_completion);
};
let Some(mut resolved_completion) = resolved_completions.pop() else {
return Ok(original_completion);
};

if !resolve_data.imports.is_empty() {
let additional_edits = snap
Expand Down
78 changes: 78 additions & 0 deletions crates/rust-analyzer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,9 @@ use self::lsp::ext as lsp_ext;
#[cfg(test)]
mod integrated_benchmarks;

use ide::{CompletionItem, CompletionRelevance};
use serde::de::DeserializeOwned;
use tenthash::TentHasher;

pub use crate::{
lsp::capabilities::server_capabilities, main_loop::main_loop, reload::ws_to_crate_graph,
Expand All @@ -61,3 +63,79 @@ pub fn from_json<T: DeserializeOwned>(
serde_json::from_value(json.clone())
.map_err(|e| anyhow::format_err!("Failed to deserialize {what}: {e}; {json}"))
}

fn completion_item_hash(item: &CompletionItem, is_ref_completion: bool) -> [u8; 20] {
fn hash_completion_relevance(hasher: &mut TentHasher, relevance: &CompletionRelevance) {
use ide_completion::{
CompletionRelevancePostfixMatch, CompletionRelevanceReturnType,
CompletionRelevanceTypeMatch,
};

hasher.update([
u8::from(relevance.exact_name_match),
u8::from(relevance.is_local),
u8::from(relevance.is_name_already_imported),
u8::from(relevance.requires_import),
u8::from(relevance.is_private_editable),
]);
if let Some(type_match) = &relevance.type_match {
let label = match type_match {
CompletionRelevanceTypeMatch::CouldUnify => "could_unify",
CompletionRelevanceTypeMatch::Exact => "exact",
};
hasher.update(label);
}
if let Some(trait_) = &relevance.trait_ {
hasher.update([u8::from(trait_.is_op_method), u8::from(trait_.notable_trait)]);
}
if let Some(postfix_match) = &relevance.postfix_match {
let label = match postfix_match {
CompletionRelevancePostfixMatch::NonExact => "non_exact",
CompletionRelevancePostfixMatch::Exact => "exact",
};
hasher.update(label);
}
if let Some(function) = &relevance.function {
hasher.update([u8::from(function.has_params), u8::from(function.has_self_param)]);
let label = match function.return_type {
CompletionRelevanceReturnType::Other => "other",
CompletionRelevanceReturnType::DirectConstructor => "direct_constructor",
CompletionRelevanceReturnType::Constructor => "constructor",
CompletionRelevanceReturnType::Builder => "builder",
};
hasher.update(label);
}
}

let mut hasher = TentHasher::new();
hasher.update([
u8::from(is_ref_completion),
u8::from(item.is_snippet),
u8::from(item.deprecated),
u8::from(item.trigger_call_info),
]);
hasher.update(&item.label);
if let Some(label_detail) = &item.label_detail {
hasher.update(label_detail);
}
// NB: do not hash edits or source range, as those may change between the time the client sends the resolve request
// and the time it receives it: some editors do allow changing the buffer between that, leading to ranges being different.
//
// Documentation hashing is skipped too, as it's a large blob to process,
// while not really making completion properties more unique as they are already.
hasher.update(item.kind.tag());
hasher.update(&item.lookup);
if let Some(detail) = &item.detail {
hasher.update(detail);
}
hash_completion_relevance(&mut hasher, &item.relevance);
if let Some((mutability, text_size)) = &item.ref_match {
hasher.update(mutability.as_keyword_for_ref());
hasher.update(u32::from(*text_size).to_le_bytes());
}
for (import_path, import_name) in &item.import_to_add {
hasher.update(import_path);
hasher.update(import_name);
}
hasher.finalize()
}
2 changes: 1 addition & 1 deletion crates/rust-analyzer/src/lsp/capabilities.rs
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ pub fn server_capabilities(config: &Config) -> ServerCapabilities {
})),
hover_provider: Some(HoverProviderCapability::Simple(true)),
completion_provider: Some(CompletionOptions {
resolve_provider: if config.client_is_helix() || config.client_is_neovim() {
resolve_provider: if config.client_is_neovim() {
config.completion_item_edit_resolve().then_some(true)
} else {
Some(config.caps().completions_resolve_provider())
Expand Down
3 changes: 2 additions & 1 deletion crates/rust-analyzer/src/lsp/ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -826,7 +826,8 @@ pub struct CompletionResolveData {
pub imports: Vec<CompletionImport>,
pub version: Option<i32>,
pub trigger_character: Option<char>,
pub completion_item_index: usize,
pub for_ref: bool,
pub hash: String,
}

#[derive(Debug, Serialize, Deserialize)]
Expand Down
19 changes: 11 additions & 8 deletions crates/rust-analyzer/src/lsp/to_proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@ use std::{
sync::atomic::{AtomicU32, Ordering},
};

use base64::{prelude::BASE64_STANDARD, Engine};
use ide::{
Annotation, AnnotationKind, Assist, AssistKind, Cancellable, CompletionFieldsToResolve,
CompletionItem, CompletionItemKind, CompletionRelevance, Documentation, FileId, FileRange,
Expand All @@ -21,6 +22,7 @@ use serde_json::to_value;
use vfs::AbsPath;

use crate::{
completion_item_hash,
config::{CallInfoConfig, Config},
global_state::GlobalStateSnapshot,
line_index::{LineEndings, LineIndex, PositionEncoding},
Expand Down Expand Up @@ -295,7 +297,7 @@ fn completion_item(
// non-trivial mapping here.
let mut text_edit = None;
let source_range = item.source_range;
for indel in item.text_edit {
for indel in &item.text_edit {
if indel.delete.contains_range(source_range) {
// Extract this indel as the main edit
text_edit = Some(if indel.delete == source_range {
Expand Down Expand Up @@ -347,7 +349,7 @@ fn completion_item(
something_to_resolve |= item.documentation.is_some();
None
} else {
item.documentation.map(documentation)
item.documentation.clone().map(documentation)
};

let mut lsp_item = lsp_types::CompletionItem {
Expand All @@ -371,10 +373,10 @@ fn completion_item(
} else {
lsp_item.label_details = Some(lsp_types::CompletionItemLabelDetails {
detail: item.label_detail.as_ref().map(ToString::to_string),
description: item.detail,
description: item.detail.clone(),
});
}
} else if let Some(label_detail) = item.label_detail {
} else if let Some(label_detail) = &item.label_detail {
lsp_item.label.push_str(label_detail.as_str());
}

Expand All @@ -383,6 +385,7 @@ fn completion_item(
let imports =
if config.completion(None).enable_imports_on_the_fly && !item.import_to_add.is_empty() {
item.import_to_add
.clone()
.into_iter()
.map(|(import_path, import_name)| lsp_ext::CompletionImport {
full_import_path: import_path,
Expand All @@ -393,16 +396,15 @@ fn completion_item(
Vec::new()
};
let (ref_resolve_data, resolve_data) = if something_to_resolve || !imports.is_empty() {
let mut item_index = acc.len();
let ref_resolve_data = if ref_match.is_some() {
let ref_resolve_data = lsp_ext::CompletionResolveData {
position: tdpp.clone(),
imports: Vec::new(),
version,
trigger_character: completion_trigger_character,
completion_item_index: item_index,
for_ref: true,
hash: BASE64_STANDARD.encode(completion_item_hash(&item, true)),
};
item_index += 1;
Some(to_value(ref_resolve_data).unwrap())
} else {
None
Expand All @@ -412,7 +414,8 @@ fn completion_item(
imports,
version,
trigger_character: completion_trigger_character,
completion_item_index: item_index,
for_ref: false,
hash: BASE64_STANDARD.encode(completion_item_hash(&item, false)),
};
(ref_resolve_data, Some(to_value(resolve_data).unwrap()))
} else {
Expand Down
2 changes: 1 addition & 1 deletion docs/dev/lsp-extensions.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<!---
lsp/ext.rs hash: 96f88b7a5d0080c6
lsp/ext.rs hash: 14b7fb1309f5bb00

If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
Expand Down
Loading