|
| 1 | +use std::collections::HashSet; |
| 2 | + |
| 3 | +use pg_query_proto_parser::{Node, ProtoParser, Token}; |
| 4 | +use proc_macro2::{Ident, Literal}; |
| 5 | +use quote::{format_ident, quote}; |
| 6 | + |
| 7 | +pub fn syntax_kind_mod(_item: proc_macro2::TokenStream) -> proc_macro2::TokenStream { |
| 8 | + // let parser = ProtoParser::new( |
| 9 | + // "/Users/raminder.singh/src/rust/postgres_lsp/crates/parser/proto/source.proto", |
| 10 | + // ); |
| 11 | + let parser = ProtoParser::new("./crates/parser/proto/source.proto"); |
| 12 | + let proto_file = parser.parse(); |
| 13 | + |
| 14 | + let mut current_enum_names: HashSet<&str> = HashSet::new(); |
| 15 | + |
| 16 | + let custom_node_names = custom_node_names(); |
| 17 | + let custom_node_identifiers = custom_node_identifiers(&custom_node_names); |
| 18 | + current_enum_names.extend(&custom_node_names); |
| 19 | + |
| 20 | + let node_identifiers = node_identifiers(&proto_file.nodes, ¤t_enum_names); |
| 21 | + current_enum_names.extend(node_names(&proto_file.nodes)); |
| 22 | + |
| 23 | + let token_identifiers = token_identifiers(&proto_file.tokens, ¤t_enum_names); |
| 24 | + let token_value_literals = token_value_literals(&proto_file.tokens, ¤t_enum_names); |
| 25 | + |
| 26 | + let syntax_kind_impl = |
| 27 | + syntax_kind_impl(&node_identifiers, &token_identifiers, &token_value_literals); |
| 28 | + |
| 29 | + quote! { |
| 30 | + use cstree::Syntax; |
| 31 | + use pg_query::{protobuf::ScanToken, NodeEnum, NodeRef}; |
| 32 | + |
| 33 | + /// An u32 enum of all valid syntax elements (nodes and tokens) of the postgres |
| 34 | + /// sql dialect, and a few custom ones that are not parsed by pg_query.rs, such |
| 35 | + /// as `Whitespace`. |
| 36 | + #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug, Syntax)] |
| 37 | + #[repr(u32)] |
| 38 | + pub enum SyntaxKind { |
| 39 | + // custom nodes, which are not parsed by pg_query.rs |
| 40 | + #(#custom_node_identifiers),*, |
| 41 | + #(#node_identifiers),*, |
| 42 | + #(#token_identifiers),*, |
| 43 | + } |
| 44 | + |
| 45 | + #syntax_kind_impl |
| 46 | + } |
| 47 | +} |
| 48 | + |
| 49 | +fn custom_node_names() -> Vec<&'static str> { |
| 50 | + vec![ |
| 51 | + "SourceFile", |
| 52 | + "Comment", |
| 53 | + "Whitespace", |
| 54 | + "Newline", |
| 55 | + "Tab", |
| 56 | + "Stmt", |
| 57 | + ] |
| 58 | +} |
| 59 | + |
| 60 | +fn node_names(nodes: &[Node]) -> impl Iterator<Item = &str> { |
| 61 | + nodes.iter().map(|node| node.name.as_str()) |
| 62 | +} |
| 63 | + |
| 64 | +fn custom_node_identifiers(custom_node_names: &[&str]) -> Vec<Ident> { |
| 65 | + custom_node_names |
| 66 | + .iter() |
| 67 | + .map(|&node_name| format_ident!("{}", node_name)) |
| 68 | + .collect() |
| 69 | +} |
| 70 | + |
| 71 | +fn node_identifiers(nodes: &[Node], existing_enum_names: &HashSet<&str>) -> Vec<Ident> { |
| 72 | + nodes |
| 73 | + .iter() |
| 74 | + .filter(|&token| !existing_enum_names.contains(token.name.as_str())) |
| 75 | + .map(|node| format_ident!("{}", &node.name)) |
| 76 | + .collect() |
| 77 | +} |
| 78 | + |
| 79 | +fn token_identifiers(tokens: &[Token], existing_enum_names: &HashSet<&str>) -> Vec<Ident> { |
| 80 | + tokens |
| 81 | + .iter() |
| 82 | + .filter(|&token| !existing_enum_names.contains(token.name.as_str())) |
| 83 | + .map(|token| format_ident!("{}", &token.name)) |
| 84 | + .collect() |
| 85 | +} |
| 86 | + |
| 87 | +fn token_value_literals(tokens: &[Token], existing_enum_names: &HashSet<&str>) -> Vec<Literal> { |
| 88 | + tokens |
| 89 | + .iter() |
| 90 | + .filter(|&token| !existing_enum_names.contains(token.name.as_str())) |
| 91 | + .map(|token| Literal::i32_unsuffixed(token.value)) |
| 92 | + .collect() |
| 93 | +} |
| 94 | + |
| 95 | +fn syntax_kind_impl( |
| 96 | + node_identifiers: &[Ident], |
| 97 | + token_identifiers: &[Ident], |
| 98 | + token_value_literals: &[Literal], |
| 99 | +) -> proc_macro2::TokenStream { |
| 100 | + let new_from_pg_query_node_fn = new_from_pg_query_node_fn(node_identifiers); |
| 101 | + let new_from_pg_query_token_fn = |
| 102 | + new_from_pg_query_token_fn(token_identifiers, token_value_literals); |
| 103 | + quote! { |
| 104 | + impl SyntaxKind { |
| 105 | + #new_from_pg_query_node_fn |
| 106 | + |
| 107 | + #new_from_pg_query_token_fn |
| 108 | + } |
| 109 | + } |
| 110 | +} |
| 111 | + |
| 112 | +fn new_from_pg_query_node_fn(node_identifiers: &[Ident]) -> proc_macro2::TokenStream { |
| 113 | + quote! { |
| 114 | + /// Converts a `pg_query` node to a `SyntaxKind` |
| 115 | + pub fn new_from_pg_query_node(node: &NodeEnum) -> Self { |
| 116 | + match node { |
| 117 | + #(NodeEnum::#node_identifiers(_) => SyntaxKind::#node_identifiers),* |
| 118 | + } |
| 119 | + } |
| 120 | + } |
| 121 | +} |
| 122 | + |
| 123 | +fn new_from_pg_query_token_fn( |
| 124 | + token_identifiers: &[Ident], |
| 125 | + token_value_literals: &[Literal], |
| 126 | +) -> proc_macro2::TokenStream { |
| 127 | + quote! { |
| 128 | + /// Converts a `pg_query` token to a `SyntaxKind` |
| 129 | + pub fn new_from_pg_query_token(token: &ScanToken) -> Self { |
| 130 | + match token.token { |
| 131 | + #(#token_value_literals => SyntaxKind::#token_identifiers),*, |
| 132 | + _ => panic!("Unknown token"), |
| 133 | + } |
| 134 | + } |
| 135 | + } |
| 136 | +} |
0 commit comments