diff --git a/internal/lsp/token_encoder.go b/internal/lsp/token_encoder.go index b2ef9085..7915c1f6 100644 --- a/internal/lsp/token_encoder.go +++ b/internal/lsp/token_encoder.go @@ -12,6 +12,10 @@ type TokenEncoder struct { Lines source.Lines Tokens []lang.SemanticToken ClientCaps lsp.SemanticTokensClientCapabilities + + // lastEncodedTokenIdx tracks index of the last encoded token + // so we can account for any skipped tokens in calculating diff + lastEncodedTokenIdx int } func (te *TokenEncoder) Encode() []uint32 { @@ -92,10 +96,10 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 { previousLine := 0 previousStartChar := 0 if i > 0 { - previousLine = te.Tokens[i-1].Range.End.Line - 1 + previousLine = te.Tokens[te.lastEncodedTokenIdx].Range.End.Line - 1 currentLine := te.Tokens[i].Range.End.Line - 1 if currentLine == previousLine { - previousStartChar = te.Tokens[i-1].Range.Start.Column - 1 + previousStartChar = te.Tokens[te.lastEncodedTokenIdx].Range.Start.Column - 1 } } @@ -140,6 +144,8 @@ func (te *TokenEncoder) encodeTokenOfIndex(i int) []uint32 { } } + te.lastEncodedTokenIdx = i + return data } diff --git a/internal/lsp/token_encoder_test.go b/internal/lsp/token_encoder_test.go index 6070e6bf..77fcc83a 100644 --- a/internal/lsp/token_encoder_test.go +++ b/internal/lsp/token_encoder_test.go @@ -80,6 +80,59 @@ func TestTokenEncoder_singleLineTokens(t *testing.T) { } } +func TestTokenEncoder_unknownTokenType(t *testing.T) { + bytes := []byte(`variable "test" { + type = string + default = "foo" +} +`) + te := &TokenEncoder{ + Lines: source.MakeSourceLines("test.tf", bytes), + Tokens: []lang.SemanticToken{ + { + Type: lang.SemanticTokenType(999), + Modifiers: []lang.SemanticTokenModifier{}, + Range: hcl.Range{ + Filename: "main.tf", + Start: hcl.Pos{Line: 1, Column: 1, Byte: 0}, + End: hcl.Pos{Line: 1, Column: 9, Byte: 8}, + }, + }, + { + Type: lang.SemanticTokenType(1000), + Modifiers: []lang.SemanticTokenModifier{}, + Range: hcl.Range{ + Filename: "main.tf", + Start: hcl.Pos{Line: 2, Column: 3, Byte: 20}, + End: hcl.Pos{Line: 2, Column: 7, Byte: 24}, + }, + }, + { + Type: lang.TokenAttrName, + Modifiers: []lang.SemanticTokenModifier{}, + Range: hcl.Range{ + Filename: "main.tf", + Start: hcl.Pos{Line: 3, Column: 3, Byte: 36}, + End: hcl.Pos{Line: 3, Column: 10, Byte: 43}, + }, + }, + }, + ClientCaps: protocol.SemanticTokensClientCapabilities{ + TokenTypes: serverTokenTypes.AsStrings(), + TokenModifiers: serverTokenModifiers.AsStrings(), + }, + } + data := te.Encode() + expectedData := []uint32{ + 2, 2, 7, 2, 0, + } + + if diff := cmp.Diff(expectedData, data); diff != "" { + t.Fatalf("unexpected encoded data.\nexpected: %#v\ngiven: %#v", + expectedData, data) + } +} + func TestTokenEncoder_multiLineTokens(t *testing.T) { bytes := []byte(`myblock "mytype" { str_attr = "something"