Skip to content

Commit 8938112

Browse files
committed
Address PR suggestions regarding semantic tokens
- Remove token bounding logic in DocumentManager Also address some stylistic suggestions. - Make hasSemanticTokensRegistration take only a single language - Make lspName internal and add _lspName wrappers for testing - Assert that tokens are sorted while encoding - Compute function token length in terms of UTF-16 code units ...these weren't handled correctly before for emoji. - Let splitToSingleLineTokens return a non-optional
1 parent 0505ece commit 8938112

File tree

7 files changed

+48
-41
lines changed

7 files changed

+48
-41
lines changed

Sources/SourceKitLSP/CapabilityRegistry.swift

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -143,8 +143,8 @@ public final class CapabilityRegistry {
143143
}
144144

145145
/// Checks whether a registration for semantic tokens for the given languages exists.
146-
public func hasSemanticTokensRegistration(for languages: [Language]) -> Bool {
147-
registration(for: languages, in: semanticTokens) != nil
146+
public func hasSemanticTokensRegistration(for language: Language) -> Bool {
147+
registration(for: [language], in: semanticTokens) != nil
148148
}
149149

150150
private func documentSelector(for langauges: [Language]) -> DocumentSelector {

Sources/SourceKitLSP/DocumentManager.swift

Lines changed: 12 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,7 @@ public struct DocumentTokens {
2323
/// Semantic tokens, e.g. variable references, type references, ...
2424
public var semantic: [SyntaxHighlightingToken] = []
2525

26-
public var merged: [SyntaxHighlightingToken] {
26+
private var merged: [SyntaxHighlightingToken] {
2727
[lexical, syntactic, semantic].reduce([]) { $0.mergingTokens(with: $1) }
2828
}
2929
public var mergedAndSorted: [SyntaxHighlightingToken] {
@@ -174,19 +174,11 @@ public final class DocumentManager {
174174
let lastLineLengthDelta = newLines.last!.count - lastLineReplaceLength
175175
let lineDelta = newLines.count - previousLineCount
176176

177-
func isTokenBounding(character: Character) -> Bool {
178-
character.isWhitespace || character.isPunctuation || character.isSymbol
179-
}
180-
181-
func update(tokens: inout [SyntaxHighlightingToken]) {
177+
document.latestTokens.withMutableTokensOfEachKind { tokens in
182178
tokens = Array(tokens.lazy
183179
.filter {
184-
// Only keep tokens that don't overlap with or are directly
185-
// adjacent to the edit range and also are adjacent to a
186-
// token-bounding character.
187-
$0.start > range.upperBound || range.lowerBound > $0.sameLineEnd
188-
|| ($0.start == range.upperBound && (edit.text.first.map(isTokenBounding(character:)) ?? true))
189-
|| ($0.sameLineEnd == range.lowerBound && (edit.text.last.map(isTokenBounding(character:)) ?? true))
180+
// Only keep tokens that don't overlap with the edit range
181+
$0.start >= range.upperBound || range.lowerBound >= $0.sameLineEnd
190182
}
191183
.map {
192184
// Shift tokens after the edit range
@@ -201,8 +193,6 @@ public final class DocumentManager {
201193
return token
202194
})
203195
}
204-
205-
document.latestTokens.withMutableTokensOfEachKind(update(tokens:))
206196
} else {
207197
// Full text replacement.
208198
document.latestLineTable = LineTable(edit.text)
@@ -259,7 +249,7 @@ public final class DocumentManager {
259249
///
260250
/// - parameter uri: The URI of the document to be updated
261251
/// - parameter range: The range to replace tokens in (nil means the entire document)
262-
/// - parameter tokens: The tokens to be added
252+
/// - parameter newTokens: The tokens to be added
263253
@discardableResult
264254
public func replaceLexicalTokens(
265255
_ uri: DocumentURI,
@@ -271,17 +261,15 @@ public final class DocumentManager {
271261
throw Error.missingDocument(uri)
272262
}
273263

274-
if !newTokens.isEmpty {
275-
if let range = range {
276-
document.latestTokens.lexical.removeAll {
277-
// Remove overlapping or bounding tokens
278-
$0.start <= range.upperBound && range.lowerBound <= $0.sameLineEnd
279-
}
280-
}
281-
282-
document.latestTokens.lexical += newTokens
264+
// Remove all tokens in `range` (or the entire document if `range` is `nil`)
265+
document.latestTokens.lexical.removeAll { token in
266+
range.map {
267+
token.start <= $0.upperBound && $0.lowerBound <= token.sameLineEnd
268+
} ?? true
283269
}
284270

271+
document.latestTokens.lexical += newTokens
272+
285273
return document.latestSnapshot
286274
}
287275
}

Sources/SourceKitLSP/SourceKitServer.swift

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -861,7 +861,7 @@ extension SourceKitServer {
861861
return false
862862
}
863863
let language = snapshot.document.language
864-
return workspace.capabilityRegistry.hasSemanticTokensRegistration(for: [language])
864+
return workspace.capabilityRegistry.hasSemanticTokensRegistration(for: language)
865865
}
866866

867867
func documentSemanticTokens(

Sources/SourceKitLSP/Swift/SwiftLanguageServer.swift

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -221,7 +221,6 @@ public final class SwiftLanguageServer: ToolchainLanguageServer {
221221

222222
do {
223223
try documentManager.replaceSemanticTokens(uri, tokens: tokens)
224-
225224
} catch {
226225
log("updating semantic tokens for \(uri) failed: \(error)", level: .warning)
227226
}

Sources/SourceKitLSP/Swift/SyntaxHighlightingToken.swift

Lines changed: 21 additions & 11 deletions
Original file line numberDiff line numberDiff line change
@@ -32,7 +32,6 @@ public struct SyntaxHighlightingToken: Hashable {
3232
}
3333

3434
public init(
35-
name: String? = nil,
3635
start: Position,
3736
length: Int,
3837
kind: Kind,
@@ -45,9 +44,9 @@ public struct SyntaxHighlightingToken: Hashable {
4544
}
4645

4746
/// Splits a potentially multi-line token to multiple single-line tokens.
48-
public func splitToSingleLineTokens(in snapshot: DocumentSnapshot) -> [Self]? {
47+
public func splitToSingleLineTokens(in snapshot: DocumentSnapshot) -> [Self] {
4948
guard let startIndex = snapshot.index(of: start) else {
50-
return nil
49+
fatalError("Token \(self) begins outside of the document")
5150
}
5251

5352
let endIndex = snapshot.text.index(startIndex, offsetBy: length)
@@ -104,8 +103,7 @@ public struct SyntaxHighlightingToken: Hashable {
104103
case `operator`
105104

106105
/// The name of the token type used by LSP.
107-
/// **Public for testing.**
108-
public var lspName: String {
106+
var lspName: String {
109107
switch self {
110108
case .namespace: return "namespace"
111109
case .type: return "type"
@@ -131,6 +129,11 @@ public struct SyntaxHighlightingToken: Hashable {
131129
case .operator: return "operator"
132130
}
133131
}
132+
133+
/// **Public for testing**
134+
public var _lspName: String {
135+
lspName
136+
}
134137
}
135138

136139
/// Additional metadata about a token. Similar to `Kind`, the raw
@@ -169,8 +172,7 @@ public struct SyntaxHighlightingToken: Hashable {
169172
/// The name of the modifier used by LSP, if this
170173
/// is a single modifier. Note that every modifier
171174
/// in `allCases` must have an associated `lspName`.
172-
/// **Public for testing.**
173-
public var lspName: String? {
175+
var lspName: String? {
174176
switch self {
175177
case .declaration: return "declaration"
176178
case .definition: return "definition"
@@ -186,6 +188,11 @@ public struct SyntaxHighlightingToken: Hashable {
186188
}
187189
}
188190

191+
/// **Public for testing**
192+
public var _lspName: String? {
193+
lspName
194+
}
195+
189196
public init(rawValue: UInt32) {
190197
self.rawValue = rawValue
191198
}
@@ -207,6 +214,11 @@ extension Array where Element == SyntaxHighlightingToken {
207214
// only if the token is on the previous token's line.
208215
previous.line == token.start.line ? previous.utf16index : 0
209216
)
217+
218+
// We assert that the tokens are actually sorted
219+
assert(lineDelta >= 0)
220+
assert(charDelta >= 0)
221+
210222
previous = token.start
211223
rawTokens += [
212224
UInt32(lineDelta),
@@ -259,7 +271,7 @@ struct SyntaxHighlightingTokenParser {
259271
if useName && [.function, .method, .enumMember].contains(kind) && modifiers.contains(.declaration),
260272
let name: String = response[keys.name],
261273
name.contains("("),
262-
let funcNameLength: Int = name.split(separator: "(").first?.count {
274+
let funcNameLength: Int = name.split(separator: "(").first?.utf16.count {
263275
length = funcNameLength
264276
}
265277

@@ -277,9 +289,7 @@ struct SyntaxHighlightingTokenParser {
277289
modifiers: modifiers
278290
)
279291

280-
if let newTokens = multiLineToken.splitToSingleLineTokens(in: snapshot) {
281-
tokens += newTokens
282-
}
292+
tokens += multiLineToken.splitToSingleLineTokens(in: snapshot)
283293
}
284294

285295
if let substructure: SKDResponseArray = response[keys.substructure] {

Tests/SourceKitLSPTests/SemanticTokensTests.swift

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -51,8 +51,8 @@ final class SemanticTokensTests: XCTestCase {
5151
range: .bool(true),
5252
full: .bool(true)
5353
),
54-
tokenTypes: Token.Kind.allCases.map(\.lspName),
55-
tokenModifiers: Token.Modifiers.allCases.map { $0.lspName! },
54+
tokenTypes: Token.Kind.allCases.map(\._lspName),
55+
tokenModifiers: Token.Modifiers.allCases.map { $0._lspName! },
5656
formats: [.relative]
5757
)
5858
)
@@ -320,6 +320,15 @@ final class SemanticTokensTests: XCTestCase {
320320
])
321321
}
322322

323+
func testSemanticTokensForFunctionSignaturesWithEmoji() {
324+
let text = "func 👍abc() {}"
325+
let tokens = performSemanticTokensRequest(text: text)
326+
XCTAssertEqual(tokens, [
327+
Token(start: Position(line: 0, utf16index: 0), length: 4, kind: .keyword),
328+
Token(start: Position(line: 0, utf16index: 5), length: 5, kind: .function, modifiers: .declaration),
329+
])
330+
}
331+
323332
func testSemanticTokensForStaticMethods() {
324333
let text = """
325334
class X {

Tests/SourceKitLSPTests/XCTestManifests.swift

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -165,6 +165,7 @@ extension SemanticTokensTests {
165165
("testSemanticTokens", testSemanticTokens),
166166
("testSemanticTokensForEnumMembers", testSemanticTokensForEnumMembers),
167167
("testSemanticTokensForFunctionSignatures", testSemanticTokensForFunctionSignatures),
168+
("testSemanticTokensForFunctionSignaturesWithEmoji", testSemanticTokensForFunctionSignaturesWithEmoji),
168169
("testSemanticTokensForProtocols", testSemanticTokensForProtocols),
169170
("testSemanticTokensForStaticMethods", testSemanticTokensForStaticMethods),
170171
("testSyntacticTokens", testSyntacticTokens),

0 commit comments

Comments
 (0)