Skip to content

Server-side Semantic Tokens #3159

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Feb 25, 2020
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion crates/ra_ide/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -75,7 +75,7 @@ pub use crate::{
runnables::{Runnable, RunnableKind, TestId},
source_change::{FileSystemEdit, SourceChange, SourceFileEdit},
ssr::SsrError,
syntax_highlighting::HighlightedRange,
syntax_highlighting::{tags, HighlightedRange},
};

pub use hir::Documentation;
Expand Down
52 changes: 26 additions & 26 deletions crates/ra_ide/src/syntax_highlighting.rs
Original file line number Diff line number Diff line change
Expand Up @@ -17,32 +17,32 @@ use crate::{
};

pub mod tags {
pub(crate) const FIELD: &str = "field";
pub(crate) const FUNCTION: &str = "function";
pub(crate) const MODULE: &str = "module";
pub(crate) const CONSTANT: &str = "constant";
pub(crate) const MACRO: &str = "macro";

pub(crate) const VARIABLE: &str = "variable";
pub(crate) const VARIABLE_MUT: &str = "variable.mut";

pub(crate) const TYPE: &str = "type";
pub(crate) const TYPE_BUILTIN: &str = "type.builtin";
pub(crate) const TYPE_SELF: &str = "type.self";
pub(crate) const TYPE_PARAM: &str = "type.param";
pub(crate) const TYPE_LIFETIME: &str = "type.lifetime";

pub(crate) const LITERAL_BYTE: &str = "literal.byte";
pub(crate) const LITERAL_NUMERIC: &str = "literal.numeric";
pub(crate) const LITERAL_CHAR: &str = "literal.char";

pub(crate) const LITERAL_COMMENT: &str = "comment";
pub(crate) const LITERAL_STRING: &str = "string";
pub(crate) const LITERAL_ATTRIBUTE: &str = "attribute";

pub(crate) const KEYWORD: &str = "keyword";
pub(crate) const KEYWORD_UNSAFE: &str = "keyword.unsafe";
pub(crate) const KEYWORD_CONTROL: &str = "keyword.control";
pub const FIELD: &str = "field";
pub const FUNCTION: &str = "function";
pub const MODULE: &str = "module";
pub const CONSTANT: &str = "constant";
pub const MACRO: &str = "macro";

pub const VARIABLE: &str = "variable";
pub const VARIABLE_MUT: &str = "variable.mut";

pub const TYPE: &str = "type";
pub const TYPE_BUILTIN: &str = "type.builtin";
pub const TYPE_SELF: &str = "type.self";
pub const TYPE_PARAM: &str = "type.param";
pub const TYPE_LIFETIME: &str = "type.lifetime";

pub const LITERAL_BYTE: &str = "literal.byte";
pub const LITERAL_NUMERIC: &str = "literal.numeric";
pub const LITERAL_CHAR: &str = "literal.char";

pub const LITERAL_COMMENT: &str = "comment";
pub const LITERAL_STRING: &str = "string";
pub const LITERAL_ATTRIBUTE: &str = "attribute";

pub const KEYWORD: &str = "keyword";
pub const KEYWORD_UNSAFE: &str = "keyword.unsafe";
pub const KEYWORD_CONTROL: &str = "keyword.control";
}

#[derive(Debug)]
Expand Down
24 changes: 20 additions & 4 deletions crates/rust-analyzer/src/caps.rs
Original file line number Diff line number Diff line change
@@ -1,12 +1,15 @@
//! Advertizes the capabilities of the LSP Server.

use crate::semantic_tokens;

use lsp_types::{
CallHierarchyServerCapability, CodeActionProviderCapability, CodeLensOptions,
CompletionOptions, DocumentOnTypeFormattingOptions, FoldingRangeProviderCapability,
ImplementationProviderCapability, RenameOptions, RenameProviderCapability, SaveOptions,
SelectionRangeProviderCapability, ServerCapabilities, SignatureHelpOptions,
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
TypeDefinitionProviderCapability, WorkDoneProgressOptions,
SelectionRangeProviderCapability, SemanticTokensDocumentProvider, SemanticTokensLegend,
SemanticTokensOptions, SemanticTokensServerCapabilities, ServerCapabilities,
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
};

pub fn server_capabilities() -> ServerCapabilities {
Expand Down Expand Up @@ -57,7 +60,20 @@ pub fn server_capabilities() -> ServerCapabilities {
execute_command_provider: None,
workspace: None,
call_hierarchy_provider: Some(CallHierarchyServerCapability::Simple(true)),
semantic_tokens_provider: None,
semantic_tokens_provider: Some(SemanticTokensServerCapabilities::SemanticTokensOptions(
SemanticTokensOptions {
legend: SemanticTokensLegend {
token_types: semantic_tokens::supported_token_types().iter().cloned().collect(),
token_modifiers: semantic_tokens::supported_token_modifiers()
.iter()
.cloned()
.collect(),
},

document_provider: Some(SemanticTokensDocumentProvider::Bool(true)),
..SemanticTokensOptions::default()
},
)),
experimental: Default::default(),
}
}
79 changes: 75 additions & 4 deletions crates/rust-analyzer/src/conv.rs
Original file line number Diff line number Diff line change
Expand Up @@ -4,19 +4,20 @@
use lsp_types::{
self, CreateFile, DiagnosticSeverity, DocumentChangeOperation, DocumentChanges, Documentation,
Location, LocationLink, MarkupContent, MarkupKind, Position, Range, RenameFile, ResourceOp,
SymbolKind, TextDocumentEdit, TextDocumentIdentifier, TextDocumentItem,
TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier, WorkspaceEdit,
SemanticTokenModifier, SemanticTokenType, SymbolKind, TextDocumentEdit, TextDocumentIdentifier,
TextDocumentItem, TextDocumentPositionParams, Url, VersionedTextDocumentIdentifier,
WorkspaceEdit,
};
use ra_ide::{
translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
tags, translate_offset_with_edit, CompletionItem, CompletionItemKind, FileId, FilePosition,
FileRange, FileSystemEdit, Fold, FoldKind, InsertTextFormat, LineCol, LineIndex,
NavigationTarget, RangeInfo, ReferenceAccess, Severity, SourceChange, SourceFileEdit,
};
use ra_syntax::{SyntaxKind, TextRange, TextUnit};
use ra_text_edit::{AtomTextEdit, TextEdit};
use ra_vfs::LineEndings;

use crate::{req, world::WorldSnapshot, Result};
use crate::{req, semantic_tokens, world::WorldSnapshot, Result};

pub trait Conv {
type Output;
Expand Down Expand Up @@ -302,6 +303,76 @@ impl ConvWith<&FoldConvCtx<'_>> for Fold {
}
}

impl Conv for &'static str {
type Output = (SemanticTokenType, Vec<SemanticTokenModifier>);

fn conv(self) -> (SemanticTokenType, Vec<SemanticTokenModifier>) {
Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

We could do a more direct conversion here, avoiding the Vec allocation, right?

Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think smallvec could be more appropriate here?

Copy link
Member

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The problem is using &'static str type in the first place. It doesn't make sense to optimize impl details while the API itself is wrong. I think we should move to a model closer to tag+modifier of LSP.

let token_type: SemanticTokenType = match self {
tags::FIELD => SemanticTokenType::MEMBER,
tags::FUNCTION => SemanticTokenType::FUNCTION,
tags::MODULE => SemanticTokenType::NAMESPACE,
tags::CONSTANT => {
return (
SemanticTokenType::VARIABLE,
vec![SemanticTokenModifier::STATIC, SemanticTokenModifier::READONLY],
)
}
tags::MACRO => SemanticTokenType::MACRO,

tags::VARIABLE => {
return (SemanticTokenType::VARIABLE, vec![SemanticTokenModifier::READONLY])
}
tags::VARIABLE_MUT => SemanticTokenType::VARIABLE,

tags::TYPE => SemanticTokenType::TYPE,
tags::TYPE_BUILTIN => SemanticTokenType::TYPE,
tags::TYPE_SELF => {
return (SemanticTokenType::TYPE, vec![SemanticTokenModifier::REFERENCE])
}
tags::TYPE_PARAM => SemanticTokenType::TYPE_PARAMETER,
tags::TYPE_LIFETIME => {
return (SemanticTokenType::LABEL, vec![SemanticTokenModifier::REFERENCE])
}

tags::LITERAL_BYTE => SemanticTokenType::NUMBER,
tags::LITERAL_NUMERIC => SemanticTokenType::NUMBER,
tags::LITERAL_CHAR => SemanticTokenType::NUMBER,

tags::LITERAL_COMMENT => {
return (SemanticTokenType::COMMENT, vec![SemanticTokenModifier::DOCUMENTATION])
}

tags::LITERAL_STRING => SemanticTokenType::STRING,
tags::LITERAL_ATTRIBUTE => SemanticTokenType::KEYWORD,

tags::KEYWORD => SemanticTokenType::KEYWORD,
tags::KEYWORD_UNSAFE => SemanticTokenType::KEYWORD,
tags::KEYWORD_CONTROL => SemanticTokenType::KEYWORD,
unknown => panic!("Unknown semantic token: {}", unknown),
};

(token_type, vec![])
}
}

impl Conv for (SemanticTokenType, Vec<SemanticTokenModifier>) {
type Output = (u32, u32);

fn conv(self) -> Self::Output {
let token_index =
semantic_tokens::supported_token_types().iter().position(|it| *it == self.0).unwrap();
let mut token_modifier_bitset = 0;
for modifier in self.1.iter() {
token_modifier_bitset |= semantic_tokens::supported_token_modifiers()
.iter()
.position(|it| it == modifier)
.unwrap();
}

(token_index as u32, token_modifier_bitset as u32)
}
}

impl<T: ConvWith<CTX>, CTX> ConvWith<CTX> for Option<T> {
type Output = Option<T::Output>;

Expand Down
1 change: 1 addition & 0 deletions crates/rust-analyzer/src/lib.rs
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ pub mod req;
mod config;
mod world;
mod diagnostics;
mod semantic_tokens;

use serde::de::DeserializeOwned;

Expand Down
1 change: 1 addition & 0 deletions crates/rust-analyzer/src/main_loop.rs
Original file line number Diff line number Diff line change
Expand Up @@ -528,6 +528,7 @@ fn on_request(
.on::<req::CallHierarchyIncomingCalls>(handlers::handle_call_hierarchy_incoming)?
.on::<req::CallHierarchyOutgoingCalls>(handlers::handle_call_hierarchy_outgoing)?
.on::<req::Ssr>(handlers::handle_ssr)?
.on::<req::SemanticTokensRequest>(handlers::handle_semantic_tokens)?
.finish();
Ok(())
}
Expand Down
28 changes: 26 additions & 2 deletions crates/rust-analyzer/src/main_loop/handlers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -16,8 +16,9 @@ use lsp_types::{
CodeAction, CodeActionOrCommand, CodeActionResponse, CodeLens, Command, CompletionItem,
Diagnostic, DocumentFormattingParams, DocumentHighlight, DocumentSymbol, FoldingRange,
FoldingRangeParams, Hover, HoverContents, Location, MarkupContent, MarkupKind, Position,
PrepareRenameResponse, Range, RenameParams, SymbolInformation, TextDocumentIdentifier,
TextEdit, WorkspaceEdit,
PrepareRenameResponse, Range, RenameParams, SemanticTokenModifier, SemanticTokenType,
SemanticTokens, SemanticTokensParams, SemanticTokensResult, SymbolInformation,
TextDocumentIdentifier, TextEdit, WorkspaceEdit,
};
use ra_ide::{
AssistId, FileId, FilePosition, FileRange, Query, RangeInfo, Runnable, RunnableKind,
Expand All @@ -38,6 +39,7 @@ use crate::{
diagnostics::DiagnosticTask,
from_json,
req::{self, Decoration, InlayHint, InlayHintsParams, InlayKind},
semantic_tokens::SemanticTokensBuilder,
world::WorldSnapshot,
LspError, Result,
};
Expand Down Expand Up @@ -1068,3 +1070,25 @@ pub fn handle_call_hierarchy_outgoing(

Ok(Some(res))
}

pub fn handle_semantic_tokens(
world: WorldSnapshot,
params: SemanticTokensParams,
) -> Result<Option<SemanticTokensResult>> {
let _p = profile("handle_semantic_tokens");

let file_id = params.text_document.try_conv_with(&world)?;
let line_index = world.analysis().file_line_index(file_id)?;

let mut builder = SemanticTokensBuilder::default();

for h in world.analysis().highlight(file_id)?.into_iter() {
let type_and_modifiers: (SemanticTokenType, Vec<SemanticTokenModifier>) = h.tag.conv();
let (token_type, token_modifiers) = type_and_modifiers.conv();
builder.push(h.range.conv_with(&line_index), token_type, token_modifiers);
}

let tokens = SemanticTokens { data: builder.build(), ..Default::default() };

Ok(Some(tokens.into()))
}
6 changes: 3 additions & 3 deletions crates/rust-analyzer/src/req.rs
Original file line number Diff line number Diff line change
Expand Up @@ -12,9 +12,9 @@ pub use lsp_types::{
DocumentSymbolResponse, FileSystemWatcher, Hover, InitializeResult, MessageType,
PartialResultParams, ProgressParams, ProgressParamsValue, ProgressToken,
PublishDiagnosticsParams, ReferenceParams, Registration, RegistrationParams, SelectionRange,
SelectionRangeParams, ServerCapabilities, ShowMessageParams, SignatureHelp, SymbolKind,
TextDocumentEdit, TextDocumentPositionParams, TextEdit, WorkDoneProgressParams, WorkspaceEdit,
WorkspaceSymbolParams,
SelectionRangeParams, SemanticTokensParams, SemanticTokensResult, ServerCapabilities,
ShowMessageParams, SignatureHelp, SymbolKind, TextDocumentEdit, TextDocumentPositionParams,
TextEdit, WorkDoneProgressParams, WorkspaceEdit, WorkspaceSymbolParams,
};

pub enum AnalyzerStatus {}
Expand Down
94 changes: 94 additions & 0 deletions crates/rust-analyzer/src/semantic_tokens.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,94 @@
//! Semantic Tokens helpers

use lsp_types::{Range, SemanticToken, SemanticTokenModifier, SemanticTokenType};

const SUPPORTED_TYPES: &[SemanticTokenType] = &[
SemanticTokenType::COMMENT,
SemanticTokenType::KEYWORD,
SemanticTokenType::STRING,
SemanticTokenType::NUMBER,
SemanticTokenType::REGEXP,
SemanticTokenType::OPERATOR,
SemanticTokenType::NAMESPACE,
SemanticTokenType::TYPE,
SemanticTokenType::STRUCT,
SemanticTokenType::CLASS,
SemanticTokenType::INTERFACE,
SemanticTokenType::ENUM,
SemanticTokenType::TYPE_PARAMETER,
SemanticTokenType::FUNCTION,
SemanticTokenType::MEMBER,
SemanticTokenType::PROPERTY,
SemanticTokenType::MACRO,
SemanticTokenType::VARIABLE,
SemanticTokenType::PARAMETER,
SemanticTokenType::LABEL,
];

const SUPPORTED_MODIFIERS: &[SemanticTokenModifier] = &[
SemanticTokenModifier::DOCUMENTATION,
SemanticTokenModifier::DECLARATION,
SemanticTokenModifier::DEFINITION,
SemanticTokenModifier::REFERENCE,
SemanticTokenModifier::STATIC,
SemanticTokenModifier::ABSTRACT,
SemanticTokenModifier::DEPRECATED,
SemanticTokenModifier::ASYNC,
SemanticTokenModifier::VOLATILE,
SemanticTokenModifier::READONLY,
];

/// Token types that the server supports
pub(crate) fn supported_token_types() -> &'static [SemanticTokenType] {
SUPPORTED_TYPES
}

/// Token modifiers that the server supports
pub(crate) fn supported_token_modifiers() -> &'static [SemanticTokenModifier] {
SUPPORTED_MODIFIERS
}

/// Tokens are encoded relative to each other.
///
/// This is a direct port of https://github.com/microsoft/vscode-languageserver-node/blob/f425af9de46a0187adb78ec8a46b9b2ce80c5412/server/src/sematicTokens.proposed.ts#L45
#[derive(Default)]
pub(crate) struct SemanticTokensBuilder {
prev_line: u32,
prev_char: u32,
data: Vec<SemanticToken>,
}

impl SemanticTokensBuilder {
/// Push a new token onto the builder
pub fn push(&mut self, range: Range, token_index: u32, modifier_bitset: u32) {
let mut push_line = range.start.line as u32;
let mut push_char = range.start.character as u32;

if !self.data.is_empty() {
push_line -= self.prev_line;
if push_line == 0 {
push_char -= self.prev_char;
}
}

// A token cannot be multiline
let token_len = range.end.character - range.start.character;

let token = SemanticToken {
delta_line: push_line,
delta_start: push_char,
length: token_len as u32,
token_type: token_index,
token_modifiers_bitset: modifier_bitset,
};

self.data.push(token);

self.prev_line = range.start.line as u32;
self.prev_char = range.start.character as u32;
}

pub fn build(self) -> Vec<SemanticToken> {
self.data
}
}