1
1
mod semantic_token;
2
+ mod utils;
3
+
2
4
use dashmap:: DashMap ;
3
- use log:: { debug, error , info , log_enabled , Level } ;
4
- use parser:: { Parse , Parser , SyntaxKind , SyntaxNode , SyntaxToken } ;
5
+ use log:: debug;
6
+ use parser:: { Parse , Parser } ;
5
7
use ropey:: Rope ;
6
- use semantic_token:: LEGEND_TYPE ;
8
+ use semantic_token:: { ImCompleteSemanticToken , LEGEND_TYPE } ;
7
9
use serde_json:: Value ;
8
10
use tower_lsp:: jsonrpc:: Result ;
9
11
use tower_lsp:: lsp_types:: * ;
10
12
use tower_lsp:: { Client , LanguageServer , LspService , Server } ;
11
13
12
14
use crate :: semantic_token:: semantic_token_from_syntax_kind;
15
+ use crate :: utils:: offset_to_position;
13
16
14
17
#[ derive( Debug ) ]
15
18
struct Backend {
16
19
client : Client ,
17
20
parse_map : DashMap < String , Parse > ,
18
- // ast_map: DashMap<String, HashMap<String, Func>>,
19
21
document_map : DashMap < String , Rope > ,
20
- // semantic_token_map: DashMap<String, Vec<ImCompleteSemanticToken>>,
22
+ semantic_token_map : DashMap < String , Vec < ImCompleteSemanticToken > > ,
21
23
}
22
24
23
25
#[ tower_lsp:: async_trait]
@@ -31,9 +33,9 @@ impl LanguageServer for Backend {
31
33
offset_encoding : None ,
32
34
capabilities : ServerCapabilities {
33
35
// inlay_hint_provider: Some(OneOf::Left(true)),
34
- // text_document_sync: Some(TextDocumentSyncCapability::Kind(
35
- // TextDocumentSyncKind::FULL,
36
- // )),
36
+ text_document_sync : Some ( TextDocumentSyncCapability :: Kind (
37
+ TextDocumentSyncKind :: FULL ,
38
+ ) ) ,
37
39
// completion_provider: Some(CompletionOptions {
38
40
// resolve_provider: Some(false),
39
41
// trigger_characters: Some(vec![".".to_string()]),
@@ -45,13 +47,13 @@ impl LanguageServer for Backend {
45
47
// commands: vec!["dummy.do_something".to_string()],
46
48
// work_done_progress_options: Default::default(),
47
49
// }),
48
- // workspace: Some(WorkspaceServerCapabilities {
49
- // workspace_folders: Some(WorkspaceFoldersServerCapabilities {
50
- // supported: Some(true),
51
- // change_notifications: Some(OneOf::Left(true)),
52
- // }),
53
- // file_operations: None,
54
- // }),
50
+ workspace : Some ( WorkspaceServerCapabilities {
51
+ workspace_folders : Some ( WorkspaceFoldersServerCapabilities {
52
+ supported : Some ( true ) ,
53
+ change_notifications : Some ( OneOf :: Left ( true ) ) ,
54
+ } ) ,
55
+ file_operations : None ,
56
+ } ) ,
55
57
semantic_tokens_provider : Some (
56
58
SemanticTokensServerCapabilities :: SemanticTokensRegistrationOptions (
57
59
SemanticTokensRegistrationOptions {
@@ -110,6 +112,9 @@ impl LanguageServer for Backend {
110
112
}
111
113
112
114
async fn did_change ( & self , mut params : DidChangeTextDocumentParams ) {
115
+ self . client
116
+ . log_message ( MessageType :: INFO , "file changed!" )
117
+ . await ;
113
118
self . on_change ( TextDocumentItem {
114
119
uri : params. text_document . uri ,
115
120
text : std:: mem:: take ( & mut params. content_changes [ 0 ] . text ) ,
@@ -133,8 +138,55 @@ impl LanguageServer for Backend {
133
138
& self ,
134
139
params : SemanticTokensParams ,
135
140
) -> Result < Option < SemanticTokensResult > > {
136
- println ! ( "semantic_tokens_full" ) ;
137
- return Ok ( None ) ;
141
+ let uri = params. text_document . uri . to_string ( ) ;
142
+ self . client
143
+ . log_message ( MessageType :: LOG , "semantic_token_full" )
144
+ . await ;
145
+ let semantic_tokens = || -> Option < Vec < SemanticToken > > {
146
+ let mut im_complete_tokens = self . semantic_token_map . get_mut ( & uri) ?;
147
+ let rope = self . document_map . get ( & uri) ?;
148
+ im_complete_tokens. sort_by ( |a, b| a. start . cmp ( & b. start ) ) ;
149
+ let mut pre_line = 0 ;
150
+ let mut pre_start = 0 ;
151
+ let semantic_tokens = im_complete_tokens
152
+ . iter ( )
153
+ . filter_map ( |token| {
154
+ let line = rope. try_byte_to_line ( token. start ) . ok ( ) ? as u32 ;
155
+ let first = rope. try_line_to_char ( line as usize ) . ok ( ) ? as u32 ;
156
+ let start = rope. try_byte_to_char ( token. start ) . ok ( ) ? as u32 - first;
157
+ let delta_line = line - pre_line;
158
+ let delta_start = if delta_line == 0 {
159
+ start - pre_start
160
+ } else {
161
+ start
162
+ } ;
163
+ let ret = Some ( SemanticToken {
164
+ delta_line,
165
+ delta_start,
166
+ length : token. length as u32 ,
167
+ token_type : token. token_type as u32 ,
168
+ token_modifiers_bitset : 0 ,
169
+ } ) ;
170
+ pre_line = line;
171
+ pre_start = start;
172
+ ret
173
+ } )
174
+ . collect :: < Vec < _ > > ( ) ;
175
+ Some ( semantic_tokens)
176
+ } ( ) ;
177
+ self . client
178
+ . log_message (
179
+ MessageType :: LOG ,
180
+ format ! ( "semantic_tokens: {:?}" , semantic_tokens) ,
181
+ )
182
+ . await ;
183
+ if let Some ( semantic_token) = semantic_tokens {
184
+ return Ok ( Some ( SemanticTokensResult :: Tokens ( SemanticTokens {
185
+ result_id : None ,
186
+ data : semantic_token,
187
+ } ) ) ) ;
188
+ }
189
+ Ok ( None )
138
190
}
139
191
140
192
async fn semantic_tokens_range (
@@ -185,83 +237,60 @@ struct TextDocumentItem {
185
237
}
186
238
impl Backend {
187
239
async fn on_change ( & self , params : TextDocumentItem ) {
188
- debug ! ( "on_change {:?}" , params. uri) ;
240
+ self . client
241
+ . log_message ( MessageType :: INFO , format ! ( "on_change {:?}" , params. uri) )
242
+ . await ;
189
243
let rope = ropey:: Rope :: from_str ( & params. text ) ;
190
244
self . document_map
191
245
. insert ( params. uri . to_string ( ) , rope. clone ( ) ) ;
246
+
247
+ let rope = ropey:: Rope :: from_str ( & params. text ) ;
192
248
let mut parser = Parser :: default ( ) ;
249
+
193
250
parser. parse_source_file ( & params. text ) ;
251
+
194
252
let result = parser. finish ( ) ;
195
253
196
- let semantic_tokens = result. cst . descendants_with_tokens ( ) . filter_map ( |item| {
197
- match SyntaxKind :: try_from ( item. syntax_kind ( ) ) {
198
- _ => panic ! ( "unexpected syntax kind" ) ,
199
- }
200
- } ) ;
201
-
202
- // let (ast, errors, semantic_tokens) = parse(¶ms.text);
203
-
204
- // let diagnostics = errors
205
- // .into_iter()
206
- // .filter_map(|item| {
207
- // let (message, span) = match item.reason() {
208
- // chumsky::error::SimpleReason::Unclosed { span, delimiter } => {
209
- // (format!("Unclosed delimiter {}", delimiter), span.clone())
210
- // }
211
- // chumsky::error::SimpleReason::Unexpected => (
212
- // format!(
213
- // "{}, expected {}",
214
- // if item.found().is_some() {
215
- // "Unexpected token in input"
216
- // } else {
217
- // "Unexpected end of input"
218
- // },
219
- // if item.expected().len() == 0 {
220
- // "something else".to_string()
221
- // } else {
222
- // item.expected()
223
- // .map(|expected| match expected {
224
- // Some(expected) => expected.to_string(),
225
- // None => "end of input".to_string(),
226
- // })
227
- // .collect::<Vec<_>>()
228
- // .join(", ")
229
- // }
230
- // ),
231
- // item.span(),
232
- // ),
233
- // chumsky::error::SimpleReason::Custom(msg) => (msg.to_string(), item.span()),
234
- // };
235
- //
236
- // || -> Option<Diagnostic> {
237
- // // let start_line = rope.try_char_to_line(span.start)?;
238
- // // let first_char = rope.try_line_to_char(start_line)?;
239
- // // let start_column = span.start - first_char;
240
- // let start_position = offset_to_position(span.start, &rope)?;
241
- // let end_position = offset_to_position(span.end, &rope)?;
242
- // // let end_line = rope.try_char_to_line(span.end)?;
243
- // // let first_char = rope.try_line_to_char(end_line)?;
244
- // // let end_column = span.end - first_char;
245
- // Some(Diagnostic::new_simple(
246
- // Range::new(start_position, end_position),
247
- // message,
248
- // ))
249
- // }()
250
- // })
251
- // .collect::<Vec<_>>();
254
+ dbg ! ( & result. cst) ;
255
+
256
+ // update semantic tokens
257
+ let semantic_tokens = result
258
+ . cst
259
+ . descendants_with_tokens ( )
260
+ . filter_map ( |item| match semantic_token_from_syntax_kind ( item. kind ( ) ) {
261
+ Some ( token_type) => Some ( ImCompleteSemanticToken {
262
+ start : item. text_range ( ) . start ( ) . into ( ) ,
263
+ token_type,
264
+ length : item. text_range ( ) . len ( ) . into ( ) ,
265
+ } ) ,
266
+ None => None ,
267
+ } )
268
+ . collect :: < Vec < _ > > ( ) ;
269
+
270
+ // publish diagnostics
252
271
//
253
- // self.client
254
- // .publish_diagnostics(params.uri.clone(), diagnostics, Some(params.version))
255
- // .await;
256
-
257
- // if let Some(ast) = ast {
258
- // self.ast_map.insert(params.uri.to_string(), ast);
259
- // }
260
- // self.client
261
- // .log_message(MessageType::INFO, &format!("{:?}", semantic_tokens))
262
- // .await;
263
- // self.semantic_token_map
264
- // .insert(params.uri.to_string(), semantic_tokens);
272
+ let diagnostics = result
273
+ . errors
274
+ . iter ( )
275
+ . map ( |error| {
276
+ Diagnostic :: new_simple (
277
+ Range {
278
+ start : offset_to_position ( error. range ( ) . start ( ) . into ( ) , & rope) . unwrap ( ) ,
279
+ end : offset_to_position ( error. range ( ) . start ( ) . into ( ) , & rope) . unwrap ( ) ,
280
+ } ,
281
+ error. to_string ( ) ,
282
+ )
283
+ } )
284
+ . collect :: < Vec < _ > > ( ) ;
285
+
286
+ self . client
287
+ . publish_diagnostics ( params. uri . clone ( ) , diagnostics, Some ( params. version ) )
288
+ . await ;
289
+
290
+ self . semantic_token_map
291
+ . insert ( params. uri . to_string ( ) , semantic_tokens) ;
292
+
293
+ self . parse_map . insert ( params. uri . to_string ( ) , result) ;
265
294
}
266
295
}
267
296
@@ -278,7 +307,8 @@ async fn main() {
278
307
client,
279
308
// ast_map: DashMap::new(),
280
309
document_map : DashMap :: new ( ) ,
281
- // semantic_token_map: DashMap::new(),
310
+ parse_map : DashMap :: new ( ) ,
311
+ semantic_token_map : DashMap :: new ( ) ,
282
312
} )
283
313
. finish ( ) ;
284
314
0 commit comments