1
1
use std:: {
2
2
error:: Error ,
3
+ fs,
3
4
io:: { self , ErrorKind } ,
4
5
process:: exit,
5
6
} ;
6
7
8
+ use bstr:: ByteSlice ;
7
9
use lsp_server:: { Connection , Message , Request , Response } ;
8
10
use lsp_types:: {
9
11
request:: { Request as _, SemanticTokensFullRequest } ,
10
12
InitializeParams , SemanticToken , SemanticTokenModifier , SemanticTokenType , SemanticTokens ,
11
13
SemanticTokensFullOptions , SemanticTokensLegend , SemanticTokensOptions , SemanticTokensParams ,
12
14
SemanticTokensResult , SemanticTokensServerCapabilities , ServerCapabilities ,
13
15
} ;
16
+ use omniwsa:: {
17
+ dialects:: { Dialect , Palaiologos } ,
18
+ tokens:: {
19
+ comment:: BlockCommentError ,
20
+ string:: { CharError , QuotedError , StringError } ,
21
+ Token ,
22
+ } ,
23
+ } ;
14
24
use serde_json:: { from_value as from_json, to_value as to_json} ;
15
25
26
+ // TODO:
27
+ // - Implement text document API, instead of reading from disk.
28
+ // - Record spans in tokens.
29
+
16
30
fn main ( ) {
17
31
if let Err ( err) = do_main ( ) {
18
32
eprintln ! ( "Error: {err}" ) ;
@@ -39,11 +53,13 @@ fn server_capabilities() -> ServerCapabilities {
39
53
work_done_progress_options : Default :: default ( ) ,
40
54
legend : SemanticTokensLegend {
41
55
token_types : vec ! [
56
+ SemanticTokenType :: VARIABLE ,
42
57
SemanticTokenType :: FUNCTION ,
43
58
SemanticTokenType :: KEYWORD ,
44
59
SemanticTokenType :: COMMENT ,
45
60
SemanticTokenType :: STRING ,
46
61
SemanticTokenType :: NUMBER ,
62
+ SemanticTokenType :: OPERATOR ,
47
63
] ,
48
64
token_modifiers : vec ! [
49
65
SemanticTokenModifier :: DECLARATION ,
@@ -58,40 +74,119 @@ fn server_capabilities() -> ServerCapabilities {
58
74
}
59
75
}
60
76
77
+ // The order corresponds to the index in SemanticTokensLegend::token_types.
78
+ #[ derive( Clone , Copy , Debug ) ]
79
+ enum TokenType {
80
+ Variable ,
81
+ Function ,
82
+ Keyword ,
83
+ Comment ,
84
+ String ,
85
+ Number ,
86
+ Operator ,
87
+ }
88
+
89
+ // The order corresponds to the index in SemanticTokensLegend::token_modifiers.
90
+ #[ derive( Clone , Copy , Debug ) ]
91
+ enum TokenModifier {
92
+ Declaration ,
93
+ Definition ,
94
+ }
95
+
61
96
fn main_loop (
62
97
connection : Connection ,
63
98
_initialize_params : InitializeParams ,
64
99
) -> Result < ( ) , Box < dyn Error + Send + Sync > > {
65
100
for msg in & connection. receiver {
66
101
match msg {
67
102
Message :: Request ( req) => {
68
- eprintln ! (
69
- "Received request {} #{}: {:?}" ,
70
- req. method, req. id, req. params,
71
- ) ;
103
+ eprintln ! ( "Receive {req:?}" ) ;
72
104
if connection. handle_shutdown ( & req) ? {
73
105
return Ok ( ( ) ) ;
74
106
}
75
107
let Request { id, method, params } = req;
76
108
match & * method {
77
109
SemanticTokensFullRequest :: METHOD => {
78
- let _params: SemanticTokensParams = from_json ( params) ?;
79
- let tokens = vec ! [ SemanticToken {
80
- delta_line: 1 ,
81
- delta_start: 2 ,
82
- length: 3 ,
83
- token_type: 0 ,
84
- token_modifiers_bitset: 0 ,
85
- } ] ;
110
+ // https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_semanticTokens
111
+ // https://code.visualstudio.com/api/language-extensions/semantic-highlight-guide
112
+ // https://github.com/rust-lang/rust-analyzer/blob/master/crates/rust-analyzer/src/lsp/semantic_tokens.rs
113
+ // https://github.com/rust-lang/rust-analyzer/blob/c4e040ea8dc4651569514bd8a8d8b293b49390a6/crates/rust-analyzer/src/lsp/capabilities.rs#L123
114
+
115
+ let params: SemanticTokensParams = from_json ( params) ?;
116
+ // TODO: Implement text document API, instead of reading
117
+ // from disk.
118
+ let path = params. text_document . uri . as_str ( ) ;
119
+ let path = path. strip_prefix ( "file://" ) . unwrap_or ( path) ;
120
+ let src = fs:: read ( path) ?;
121
+ let tokens = Palaiologos :: new ( ) . lex ( & src) ;
122
+
123
+ let mut tokens_out = Vec :: with_capacity ( tokens. len ( ) ) ;
124
+ let ( mut curr_line, mut curr_col) = ( 0 , 0 ) ;
125
+ let ( mut prev_line, mut prev_col) = ( 0 , 0 ) ;
126
+ for tok in & tokens {
127
+ eprintln ! ( "{tok:?}" ) ;
128
+ let ungrouped = tok. ungroup ( ) ;
129
+ let ty = match ungrouped {
130
+ Token :: Mnemonic ( _) => Some ( TokenType :: Keyword ) ,
131
+ Token :: Integer ( _) => Some ( TokenType :: Number ) ,
132
+ Token :: String ( _) | Token :: Char ( _) => Some ( TokenType :: String ) ,
133
+ Token :: Variable ( _) => Some ( TokenType :: Variable ) ,
134
+ Token :: Label ( _) => Some ( TokenType :: Function ) ,
135
+ Token :: LabelColon ( _) => Some ( TokenType :: Operator ) ,
136
+ Token :: Space ( _) | Token :: LineTerm ( _) | Token :: Eof ( _) => None ,
137
+ Token :: InstSep ( _) | Token :: ArgSep ( _) => Some ( TokenType :: Operator ) ,
138
+ Token :: LineComment ( _) | Token :: BlockComment ( _) => {
139
+ Some ( TokenType :: Comment )
140
+ }
141
+ Token :: Word ( _) => Some ( TokenType :: Variable ) ,
142
+ Token :: Quoted ( _) | Token :: Spliced ( _) => panic ! ( "not ungrouped" ) ,
143
+ Token :: Error ( _) => None ,
144
+ Token :: Placeholder => panic ! ( "placeholder" ) ,
145
+ } ;
146
+ let modifiers = match ungrouped {
147
+ Token :: Label ( _) => TokenModifier :: Declaration as _ ,
148
+ Token :: Variable ( _) => TokenModifier :: Definition as _ ,
149
+ _ => 0 ,
150
+ } ;
151
+ let ( len, hlen, vlen) = token_len ( tok) ;
152
+ let ( mut next_line, mut next_col) = ( curr_line, curr_col) ;
153
+ if vlen != 0 {
154
+ next_col = 0 ;
155
+ next_line += vlen;
156
+ }
157
+ next_col += hlen;
158
+ if let Some ( ty) = ty {
159
+ let token_out = SemanticToken {
160
+ delta_line : ( curr_line - prev_line) as _ ,
161
+ delta_start : if curr_line == prev_line {
162
+ ( curr_col - prev_col) as _
163
+ } else {
164
+ curr_col as _
165
+ } ,
166
+ length : len as _ ,
167
+ token_type : ty as _ ,
168
+ token_modifiers_bitset : modifiers,
169
+ } ;
170
+ eprintln ! ( "=> {token_out:?}" ) ;
171
+ tokens_out. push ( token_out) ;
172
+ prev_line = curr_line;
173
+ prev_col = curr_col;
174
+ }
175
+ curr_line = next_line;
176
+ curr_col = next_col;
177
+ }
178
+
86
179
let result = Some ( SemanticTokensResult :: Tokens ( SemanticTokens {
87
180
result_id : None ,
88
- data : tokens ,
181
+ data : tokens_out ,
89
182
} ) ) ;
90
183
let resp = Response {
91
184
id,
92
185
result : Some ( to_json ( & result) ?) ,
93
186
error : None ,
94
187
} ;
188
+ eprintln ! ( "Send {resp:?}" ) ;
189
+ eprintln ! ( ) ;
95
190
connection. sender . send ( Message :: Response ( resp) ) ?;
96
191
}
97
192
_ => {
@@ -103,12 +198,95 @@ fn main_loop(
103
198
}
104
199
}
105
200
Message :: Response ( resp) => {
106
- eprintln ! ( "Received response: {resp:?}" ) ;
201
+ eprintln ! ( "Receive {resp:?}" ) ;
107
202
}
108
203
Message :: Notification ( notif) => {
109
- eprintln ! ( "Received notification: {notif:?}" ) ;
204
+ eprintln ! ( "Receive {notif:?}" ) ;
110
205
}
111
206
}
112
207
}
113
208
Ok ( ( ) )
114
209
}
210
+
211
+ /// Computes the length of the token. Returns the linear, horizontal, vertical
212
+ /// lengths in chars.
213
+ // TODO: Record spans in tokens instead of this hack.
214
+ fn token_len ( tok : & Token < ' _ > ) -> ( usize , usize , usize ) {
215
+ let ( text, len_before, len_after) : ( & [ u8 ] , usize , usize ) = match tok {
216
+ Token :: Mnemonic ( tok) => ( & tok. mnemonic , 0 , 0 ) ,
217
+ Token :: Integer ( tok) => ( & tok. literal , 0 , 0 ) ,
218
+ Token :: String ( tok) => (
219
+ & tok. literal ,
220
+ tok. quotes . quote ( ) . len ( ) ,
221
+ if tok. errors . contains ( StringError :: Unterminated ) {
222
+ 0
223
+ } else {
224
+ tok. quotes . quote ( ) . len ( )
225
+ } ,
226
+ ) ,
227
+ Token :: Char ( tok) => (
228
+ & tok. literal ,
229
+ tok. quotes . quote ( ) . len ( ) ,
230
+ if tok. errors . contains ( CharError :: Unterminated ) {
231
+ 0
232
+ } else {
233
+ tok. quotes . quote ( ) . len ( )
234
+ } ,
235
+ ) ,
236
+ Token :: Variable ( tok) => ( & tok. ident , tok. style . sigil ( ) . len ( ) , 0 ) ,
237
+ Token :: Label ( tok) => ( & tok. label , tok. style . sigil ( ) . len ( ) , 0 ) ,
238
+ Token :: LabelColon ( _) => ( b":" , 0 , 0 ) ,
239
+ Token :: Space ( tok) => ( & tok. space , 0 , 0 ) ,
240
+ Token :: LineTerm ( tok) => ( tok. style . as_str ( ) . as_bytes ( ) , 0 , 0 ) ,
241
+ Token :: Eof ( _) => ( b"" , 0 , 0 ) ,
242
+ Token :: InstSep ( tok) => ( tok. style . as_str ( ) . as_bytes ( ) , 0 , 0 ) ,
243
+ Token :: ArgSep ( tok) => ( tok. style . as_str ( ) . as_bytes ( ) , 0 , 0 ) ,
244
+ Token :: LineComment ( tok) => ( tok. text , tok. style . prefix ( ) . len ( ) , 0 ) ,
245
+ Token :: BlockComment ( tok) => (
246
+ tok. text ,
247
+ tok. style . open ( ) . len ( ) ,
248
+ if tok. errors . contains ( BlockCommentError :: Unterminated ) {
249
+ 0
250
+ } else {
251
+ tok. style . close ( ) . len ( )
252
+ } ,
253
+ ) ,
254
+ Token :: Word ( tok) => ( & tok. word , 0 , 0 ) ,
255
+ Token :: Quoted ( tok) => {
256
+ let ( len, hlen, vlen) = token_len ( & tok. inner ) ;
257
+ let mut quotes = 0 ;
258
+ if vlen != 0 {
259
+ quotes += tok. quotes . quote ( ) . len ( ) ;
260
+ }
261
+ if !tok. errors . contains ( QuotedError :: Unterminated ) {
262
+ quotes += tok. quotes . quote ( ) . len ( ) ;
263
+ }
264
+ return ( len + quotes, hlen + quotes, vlen) ;
265
+ }
266
+ Token :: Spliced ( tok) => {
267
+ let ( mut spliced_len, mut spliced_hlen, mut spliced_vlen) = ( 0 , 0 , 0 ) ;
268
+ for tok in & tok. tokens {
269
+ let ( len, hlen, vlen) = token_len ( tok) ;
270
+ if vlen != 0 {
271
+ spliced_hlen = 0 ;
272
+ spliced_vlen += vlen;
273
+ }
274
+ spliced_len += len;
275
+ spliced_hlen += hlen;
276
+ }
277
+ return ( spliced_len, spliced_hlen, spliced_vlen) ;
278
+ }
279
+ Token :: Error ( tok) => ( & tok. text , 0 , 0 ) ,
280
+ Token :: Placeholder => panic ! ( "placeholder" ) ,
281
+ } ;
282
+ let ( len, hlen, vlen) =
283
+ text. chars ( )
284
+ . fold ( ( len_before, len_before, 0 ) , |( len, hlen, vlen) , ch| {
285
+ if ch == '\n' {
286
+ ( len + 1 , 0 , vlen + 1 )
287
+ } else {
288
+ ( len + 1 , hlen + 1 , vlen)
289
+ }
290
+ } ) ;
291
+ ( len + len_after, hlen + len_after, vlen)
292
+ }
0 commit comments