Skip to content

Switch to upstream positionEncoding #13484

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Merged
merged 2 commits into from
Oct 26, 2022
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 1 addition & 1 deletion crates/rust-analyzer/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ crossbeam-channel = "0.5.5"
dissimilar = "1.0.4"
itertools = "0.10.5"
scip = "0.1.1"
lsp-types = { version = "0.93.1", features = ["proposed"] }
lsp-types = { version = "=0.93.2", features = ["proposed"] }
parking_lot = "0.12.1"
xflags = "0.3.0"
oorandom = "11.1.3"
Expand Down
8 changes: 2 additions & 6 deletions crates/rust-analyzer/src/bin/main.rs
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ use std::{env, fs, path::Path, process};

use lsp_server::Connection;
use project_model::ProjectManifest;
use rust_analyzer::{cli::flags, config::Config, from_json, lsp_ext::supports_utf8, Result};
use rust_analyzer::{cli::flags, config::Config, from_json, Result};
use vfs::AbsPathBuf;

#[cfg(all(feature = "mimalloc"))]
Expand Down Expand Up @@ -191,11 +191,7 @@ fn run_server() -> Result<()> {
name: String::from("rust-analyzer"),
version: Some(rust_analyzer::version().to_string()),
}),
offset_encoding: if supports_utf8(config.caps()) {
Some("utf-8".to_string())
} else {
None
},
offset_encoding: None,
};

let initialize_result = serde_json::to_value(initialize_result).unwrap();
Expand Down
14 changes: 10 additions & 4 deletions crates/rust-analyzer/src/caps.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,19 +6,25 @@ use lsp_types::{
FileOperationFilter, FileOperationPattern, FileOperationPatternKind,
FileOperationRegistrationOptions, FoldingRangeProviderCapability, HoverProviderCapability,
ImplementationProviderCapability, InlayHintOptions, InlayHintServerCapabilities, OneOf,
RenameOptions, SaveOptions, SelectionRangeProviderCapability, SemanticTokensFullOptions,
SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities, SignatureHelpOptions,
TextDocumentSyncCapability, TextDocumentSyncKind, TextDocumentSyncOptions,
TypeDefinitionProviderCapability, WorkDoneProgressOptions,
PositionEncodingKind, RenameOptions, SaveOptions, SelectionRangeProviderCapability,
SemanticTokensFullOptions, SemanticTokensLegend, SemanticTokensOptions, ServerCapabilities,
SignatureHelpOptions, TextDocumentSyncCapability, TextDocumentSyncKind,
TextDocumentSyncOptions, TypeDefinitionProviderCapability, WorkDoneProgressOptions,
WorkspaceFileOperationsServerCapabilities, WorkspaceServerCapabilities,
};
use serde_json::json;

use crate::config::{Config, RustfmtConfig};
use crate::lsp_ext::supports_utf8;
use crate::semantic_tokens;

pub fn server_capabilities(config: &Config) -> ServerCapabilities {
ServerCapabilities {
position_encoding: if supports_utf8(config.caps()) {
Some(PositionEncodingKind::UTF8)
} else {
None
},
text_document_sync: Some(TextDocumentSyncCapability::Options(TextDocumentSyncOptions {
open_close: Some(true),
change: Some(TextDocumentSyncKind::INCREMENTAL),
Expand Down
6 changes: 3 additions & 3 deletions crates/rust-analyzer/src/cli/lsif.rs
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ use crate::cli::{
load_cargo::{load_workspace, LoadCargoConfig},
Result,
};
use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
use crate::to_proto;
use crate::version::version;

Expand Down Expand Up @@ -126,7 +126,7 @@ impl LsifManager<'_> {
let line_index = self.db.line_index(file_id);
let line_index = LineIndex {
index: line_index,
encoding: OffsetEncoding::Utf16,
encoding: PositionEncoding::Utf16,
endings: LineEndings::Unix,
};
let range_id = self.add_vertex(lsif::Vertex::Range {
Expand Down Expand Up @@ -248,7 +248,7 @@ impl LsifManager<'_> {
let line_index = self.db.line_index(file_id);
let line_index = LineIndex {
index: line_index,
encoding: OffsetEncoding::Utf16,
encoding: PositionEncoding::Utf16,
endings: LineEndings::Unix,
};
let result = folds
Expand Down
4 changes: 2 additions & 2 deletions crates/rust-analyzer/src/cli/scip.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ use std::{
time::Instant,
};

use crate::line_index::{LineEndings, LineIndex, OffsetEncoding};
use crate::line_index::{LineEndings, LineIndex, PositionEncoding};
use hir::Name;
use ide::{
LineCol, MonikerDescriptorKind, StaticIndex, StaticIndexedFile, TextRange, TokenId,
Expand Down Expand Up @@ -91,7 +91,7 @@ impl flags::Scip {

let line_index = LineIndex {
index: db.line_index(file_id),
encoding: OffsetEncoding::Utf8,
encoding: PositionEncoding::Utf8,
endings: LineEndings::Unix,
};

Expand Down
8 changes: 4 additions & 4 deletions crates/rust-analyzer/src/config.rs
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ use vfs::AbsPathBuf;
use crate::{
caps::completion_item_edit_resolve,
diagnostics::DiagnosticsMapConfig,
line_index::OffsetEncoding,
line_index::PositionEncoding,
lsp_ext::{self, supports_utf8, WorkspaceSymbolSearchKind, WorkspaceSymbolSearchScope},
};

Expand Down Expand Up @@ -948,11 +948,11 @@ impl Config {
.is_some()
}

pub fn offset_encoding(&self) -> OffsetEncoding {
pub fn position_encoding(&self) -> PositionEncoding {
if supports_utf8(&self.caps) {
OffsetEncoding::Utf8
PositionEncoding::Utf8
} else {
OffsetEncoding::Utf16
PositionEncoding::Utf16
}
}

Expand Down
16 changes: 8 additions & 8 deletions crates/rust-analyzer/src/diagnostics/to_proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use stdx::format_to;
use vfs::{AbsPath, AbsPathBuf};

use crate::{
global_state::GlobalStateSnapshot, line_index::OffsetEncoding, lsp_ext,
global_state::GlobalStateSnapshot, line_index::PositionEncoding, lsp_ext,
to_proto::url_from_abs_path,
};

Expand Down Expand Up @@ -66,17 +66,17 @@ fn location(
let uri = url_from_abs_path(&file_name);

let range = {
let offset_encoding = snap.config.offset_encoding();
let position_encoding = snap.config.position_encoding();
lsp_types::Range::new(
position(&offset_encoding, span, span.line_start, span.column_start),
position(&offset_encoding, span, span.line_end, span.column_end),
position(&position_encoding, span, span.line_start, span.column_start),
position(&position_encoding, span, span.line_end, span.column_end),
)
};
lsp_types::Location::new(uri, range)
}

fn position(
offset_encoding: &OffsetEncoding,
position_encoding: &PositionEncoding,
span: &DiagnosticSpan,
line_offset: usize,
column_offset: usize,
Expand All @@ -93,9 +93,9 @@ fn position(
};
}
let mut char_offset = 0;
let len_func = match offset_encoding {
OffsetEncoding::Utf8 => char::len_utf8,
OffsetEncoding::Utf16 => char::len_utf16,
let len_func = match position_encoding {
PositionEncoding::Utf8 => char::len_utf8,
PositionEncoding::Utf16 => char::len_utf16,
};
for c in line.text.chars() {
char_offset += 1;
Expand Down
6 changes: 3 additions & 3 deletions crates/rust-analyzer/src/from_proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@ use vfs::AbsPathBuf;
use crate::{
from_json,
global_state::GlobalStateSnapshot,
line_index::{LineIndex, OffsetEncoding},
line_index::{LineIndex, PositionEncoding},
lsp_ext,
lsp_utils::invalid_params_error,
Result,
Expand All @@ -25,10 +25,10 @@ pub(crate) fn vfs_path(url: &lsp_types::Url) -> Result<vfs::VfsPath> {

pub(crate) fn offset(line_index: &LineIndex, position: lsp_types::Position) -> Result<TextSize> {
let line_col = match line_index.encoding {
OffsetEncoding::Utf8 => {
PositionEncoding::Utf8 => {
LineCol { line: position.line as u32, col: position.character as u32 }
}
OffsetEncoding::Utf16 => {
PositionEncoding::Utf16 => {
let line_col =
LineColUtf16 { line: position.line as u32, col: position.character as u32 };
line_index.index.to_utf8(line_col)
Expand Down
2 changes: 1 addition & 1 deletion crates/rust-analyzer/src/global_state.rs
Original file line number Diff line number Diff line change
Expand Up @@ -383,7 +383,7 @@ impl GlobalStateSnapshot {
pub(crate) fn file_line_index(&self, file_id: FileId) -> Cancellable<LineIndex> {
let endings = self.vfs.read().1[&file_id];
let index = self.analysis.file_line_index(file_id)?;
let res = LineIndex { index, endings, encoding: self.config.offset_encoding() };
let res = LineIndex { index, endings, encoding: self.config.position_encoding() };
Ok(res)
}

Expand Down
4 changes: 2 additions & 2 deletions crates/rust-analyzer/src/line_index.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,15 +7,15 @@

use std::sync::Arc;

pub enum OffsetEncoding {
pub enum PositionEncoding {
Utf8,
Utf16,
}

pub(crate) struct LineIndex {
pub(crate) index: Arc<ide::LineIndex>,
pub(crate) endings: LineEndings,
pub(crate) encoding: OffsetEncoding,
pub(crate) encoding: PositionEncoding,
}

#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
Expand Down
11 changes: 10 additions & 1 deletion crates/rust-analyzer/src/lsp_ext.rs
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
use std::{collections::HashMap, path::PathBuf};

use lsp_types::request::Request;
use lsp_types::PositionEncodingKind;
use lsp_types::{
notification::Notification, CodeActionKind, DocumentOnTypeFormattingParams,
PartialResultParams, Position, Range, TextDocumentIdentifier, WorkDoneProgressParams,
Expand Down Expand Up @@ -455,7 +456,15 @@ pub(crate) enum CodeLensResolveData {
}

pub fn supports_utf8(caps: &lsp_types::ClientCapabilities) -> bool {
caps.offset_encoding.as_deref().unwrap_or_default().iter().any(|it| it == "utf-8")
match &caps.general {
Some(general) => general
.position_encodings
.as_deref()
.unwrap_or_default()
.iter()
.any(|it| it == &PositionEncodingKind::UTF8),
_ => false,
}
}

pub enum MoveItem {}
Expand Down
4 changes: 2 additions & 2 deletions crates/rust-analyzer/src/lsp_utils.rs
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ use lsp_server::Notification;
use crate::{
from_proto,
global_state::GlobalState,
line_index::{LineEndings, LineIndex, OffsetEncoding},
line_index::{LineEndings, LineIndex, PositionEncoding},
LspError,
};

Expand Down Expand Up @@ -140,7 +140,7 @@ pub(crate) fn apply_document_changes(
index: Arc::new(ide::LineIndex::new(old_text)),
// We don't care about line endings or offset encoding here.
endings: LineEndings::Unix,
encoding: OffsetEncoding::Utf16,
encoding: PositionEncoding::Utf16,
};

// The changes we got must be applied sequentially, but can cross lines so we
Expand Down
8 changes: 4 additions & 4 deletions crates/rust-analyzer/src/to_proto.rs
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ use crate::{
cargo_target_spec::CargoTargetSpec,
config::{CallInfoConfig, Config},
global_state::GlobalStateSnapshot,
line_index::{LineEndings, LineIndex, OffsetEncoding},
line_index::{LineEndings, LineIndex, PositionEncoding},
lsp_ext,
lsp_utils::invalid_params_error,
semantic_tokens, Result,
Expand All @@ -30,8 +30,8 @@ use crate::{
pub(crate) fn position(line_index: &LineIndex, offset: TextSize) -> lsp_types::Position {
let line_col = line_index.index.line_col(offset);
match line_index.encoding {
OffsetEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
OffsetEncoding::Utf16 => {
PositionEncoding::Utf8 => lsp_types::Position::new(line_col.line, line_col.col),
PositionEncoding::Utf16 => {
let line_col = line_index.index.to_utf16(line_col);
lsp_types::Position::new(line_col.line, line_col.col)
}
Expand Down Expand Up @@ -1394,7 +1394,7 @@ fn main() {
let line_index = LineIndex {
index: Arc::new(ide::LineIndex::new(text)),
endings: LineEndings::Unix,
encoding: OffsetEncoding::Utf16,
encoding: PositionEncoding::Utf16,
};
let converted: Vec<lsp_types::FoldingRange> =
folds.into_iter().map(|it| folding_range(text, &line_index, true, it)).collect();
Expand Down
8 changes: 1 addition & 7 deletions docs/dev/lsp-extensions.md
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
<!---
lsp_ext.rs hash: 7b710095d773b978
lsp_ext.rs hash: 62068e53ac202dc8

If you need to change the above hash to make the test pass, please check if you
need to adjust this doc as well and ping this issue:
Expand All @@ -19,12 +19,6 @@ Requests, which are likely to always remain specific to `rust-analyzer` are unde

If you want to be notified about the changes to this document, subscribe to [#4604](https://github.com/rust-lang/rust-analyzer/issues/4604).

## UTF-8 offsets

rust-analyzer supports clangd's extension for opting into UTF-8 as the coordinate space for offsets (by default, LSP uses UTF-16 offsets).

https://clangd.llvm.org/extensions.html#utf-8-offsets

## Configuration in `initializationOptions`

**Upstream Issue:** https://github.com/microsoft/language-server-protocol/issues/567
Expand Down
2 changes: 1 addition & 1 deletion lib/lsp-server/Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -13,4 +13,4 @@ serde = { version = "1.0.144", features = ["derive"] }
crossbeam-channel = "0.5.6"

[dev-dependencies]
lsp-types = "0.93.1"
lsp-types = "=0.93.2"