From 11a75ceac6e23989675c050aa94b40f4d955e2b2 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Mon, 26 May 2025 09:04:19 +0200 Subject: [PATCH 01/30] feat: workspace support --- Cargo.lock | 14 ++ Cargo.toml | 2 + crates/pgt_cli/src/commands/mod.rs | 2 +- crates/pgt_configuration/Cargo.toml | 1 + crates/pgt_configuration/src/diagnostics.rs | 53 ++++++ crates/pgt_configuration/src/lib.rs | 6 + crates/pgt_diagnostics/Cargo.toml | 1 + crates/pgt_diagnostics/src/adapters.rs | 25 +++ crates/pgt_fs/Cargo.toml | 1 + crates/pgt_fs/src/fs.rs | 15 ++ crates/pgt_fs/src/fs/memory.rs | 9 + crates/pgt_fs/src/fs/os.rs | 21 +++ crates/pgt_lsp/src/session.rs | 2 +- crates/pgt_workspace/Cargo.toml | 1 + crates/pgt_workspace/src/configuration.rs | 175 ++++++++++++++++++- crates/pgt_workspace/src/diagnostics.rs | 7 + crates/pgt_workspace/src/lib.rs | 1 + crates/pgt_workspace/src/settings.rs | 181 ++++++++++++++------ crates/pgt_workspace/src/workspace.rs | 63 +++++++ justfile | 8 +- 20 files changed, 529 insertions(+), 59 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index 4771c8a1..875d4d9f 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2599,6 +2599,7 @@ dependencies = [ "biome_deserialize_macros 0.6.0", "bpaf", "indexmap 2.7.0", + "oxc_resolver", "pgt_analyse", "pgt_analyser", "pgt_console", @@ -2631,6 +2632,7 @@ dependencies = [ "backtrace", "bpaf", "enumflags2", + "oxc_resolver", "pgt_console", "pgt_diagnostics_categories", "pgt_diagnostics_macros", @@ -2676,6 +2678,7 @@ dependencies = [ "crossbeam", "directories", "enumflags2", + "oxc_resolver", "parking_lot", "pgt_diagnostics", "rayon", @@ -2912,6 +2915,7 @@ dependencies = [ "schemars", "serde", "serde_json", + "slotmap", "sqlx", "strum", "tempfile", @@ -3701,6 +3705,16 @@ dependencies = [ "autocfg", ] +[[package]] +name = "slotmap" +version = "1.0.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbff4acf519f630b3a3ddcfaea6c06b42174d9a44bc70c620e9ed1649d58b82a" +dependencies = [ + "serde", + "version_check", +] + [[package]] name = "smallvec" version = "1.13.2" diff --git a/Cargo.toml b/Cargo.toml index aaaa9035..fe00d7ca 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -28,6 +28,7 @@ enumflags2 = "0.7.11" ignore = "0.4.23" indexmap = { version = "2.6.0", features = ["serde"] } insta = "1.31.0" +oxc_resolver = "1.12.0" pg_query = "6.1.0" proc-macro2 = "1.0.66" quote = "1.0.33" @@ -38,6 +39,7 @@ schemars = { version = "0.8.22", features = ["indexmap2", "small serde = "1.0.195" serde_json = "1.0.114" similar = "2.6.0" +slotmap = "1.0.7" smallvec = { version = "1.13.2", features = ["union", "const_new", "serde"] } strum = { version = "0.27.1", features = ["derive"] } # this will use tokio if available, otherwise async-std diff --git a/crates/pgt_cli/src/commands/mod.rs b/crates/pgt_cli/src/commands/mod.rs index ebd16e3d..90ac4153 100644 --- a/crates/pgt_cli/src/commands/mod.rs +++ b/crates/pgt_cli/src/commands/mod.rs @@ -9,8 +9,8 @@ use bpaf::Bpaf; use pgt_configuration::{PartialConfiguration, partial_configuration}; use pgt_console::Console; use pgt_fs::FileSystem; +use pgt_workspace::PartialConfigurationExt; use pgt_workspace::configuration::{LoadedConfiguration, load_configuration}; -use pgt_workspace::settings::PartialConfigurationExt; use pgt_workspace::workspace::UpdateSettingsParams; use pgt_workspace::{DynRef, Workspace, WorkspaceError}; use std::ffi::OsString; diff --git a/crates/pgt_configuration/Cargo.toml b/crates/pgt_configuration/Cargo.toml index 61da458b..3bd685fa 100644 --- a/crates/pgt_configuration/Cargo.toml +++ b/crates/pgt_configuration/Cargo.toml @@ -16,6 +16,7 @@ biome_deserialize = { workspace = true, features = ["schema"] } biome_deserialize_macros = { workspace = true } bpaf = { workspace = true } indexmap = { workspace = true } +oxc_resolver = { workspace = true } pgt_analyse = { workspace = true } pgt_analyser = { workspace = true } pgt_console = { workspace = true } diff --git a/crates/pgt_configuration/src/diagnostics.rs b/crates/pgt_configuration/src/diagnostics.rs index dc835ed7..79fd7714 100644 --- a/crates/pgt_configuration/src/diagnostics.rs +++ b/crates/pgt_configuration/src/diagnostics.rs @@ -1,5 +1,7 @@ use pgt_console::fmt::Display; use pgt_console::{MarkupBuf, markup}; +use pgt_diagnostics::adapters::ResolveError; + use pgt_diagnostics::{Advices, Diagnostic, Error, LogCategory, MessageAndDescription, Visit}; use serde::{Deserialize, Serialize}; use std::fmt::{Debug, Formatter}; @@ -21,6 +23,12 @@ pub enum ConfigurationDiagnostic { /// Thrown when the pattern inside the `ignore` field errors InvalidIgnorePattern(InvalidIgnorePattern), + + /// Thrown when there's something wrong with the files specified inside `"extends"` + CantLoadExtendFile(CantLoadExtendFile), + + /// Thrown when a configuration file can't be resolved from `node_modules` + CantResolve(CantResolve), } impl ConfigurationDiagnostic { @@ -72,6 +80,18 @@ impl ConfigurationDiagnostic { message: MessageAndDescription::from(markup! {{message}}.to_owned()), }) } + + pub fn cant_resolve(path: impl Display, source: oxc_resolver::ResolveError) -> Self { + Self::CantResolve(CantResolve { + message: MessageAndDescription::from( + markup! { + "Failed to resolve the configuration from "{{path}} + } + .to_owned(), + ), + source: Some(Error::from(ResolveError::from(source))), + }) + } } impl Debug for ConfigurationDiagnostic { @@ -168,3 +188,36 @@ pub struct CantResolve { #[source] source: Option, } + +#[derive(Debug, Serialize, Deserialize, Diagnostic)] +#[diagnostic( + category = "configuration", + severity = Error, +)] +pub struct CantLoadExtendFile { + #[location(resource)] + file_path: String, + #[message] + #[description] + message: MessageAndDescription, + + #[verbose_advice] + verbose_advice: ConfigurationAdvices, +} + +impl CantLoadExtendFile { + pub fn new(file_path: impl Into, message: impl Display) -> Self { + Self { + file_path: file_path.into(), + message: MessageAndDescription::from(markup! {{message}}.to_owned()), + verbose_advice: ConfigurationAdvices::default(), + } + } + + pub fn with_verbose_advice(mut self, messsage: impl Display) -> Self { + self.verbose_advice + .messages + .push(markup! {{messsage}}.to_owned()); + self + } +} diff --git a/crates/pgt_configuration/src/lib.rs b/crates/pgt_configuration/src/lib.rs index fcf0b5c6..b862dce4 100644 --- a/crates/pgt_configuration/src/lib.rs +++ b/crates/pgt_configuration/src/lib.rs @@ -22,6 +22,7 @@ pub use analyser::{ RulePlainConfiguration, RuleSelector, RuleWithFixOptions, RuleWithOptions, Rules, partial_linter_configuration, }; +use biome_deserialize::StringSet; use biome_deserialize_macros::{Merge, Partial}; use bpaf::Bpaf; use database::{ @@ -50,6 +51,10 @@ pub struct Configuration { #[partial(bpaf(hide))] pub schema: String, + /// A list of paths to other JSON files, used to extends the current configuration. + #[partial(bpaf(hide))] + pub extends: StringSet, + /// The configuration of the VCS integration #[partial(type, bpaf(external(partial_vcs_configuration), optional, hide_usage))] pub vcs: VcsConfiguration, @@ -85,6 +90,7 @@ impl PartialConfiguration { pub fn init() -> Self { Self { schema: Some(format!("https://pgtools.dev/schemas/{VERSION}/schema.json")), + extends: Some(StringSet::default()), files: Some(PartialFilesConfiguration { ignore: Some(Default::default()), ..Default::default() diff --git a/crates/pgt_diagnostics/Cargo.toml b/crates/pgt_diagnostics/Cargo.toml index 190b25f0..06c6f8dc 100644 --- a/crates/pgt_diagnostics/Cargo.toml +++ b/crates/pgt_diagnostics/Cargo.toml @@ -15,6 +15,7 @@ version = "0.0.0" backtrace = "0.3.74" bpaf = { workspace = true } enumflags2 = { workspace = true } +oxc_resolver = { workspace = true } pgt_console = { workspace = true, features = ["serde"] } pgt_diagnostics_categories = { workspace = true, features = ["serde"] } pgt_diagnostics_macros = { workspace = true } diff --git a/crates/pgt_diagnostics/src/adapters.rs b/crates/pgt_diagnostics/src/adapters.rs index ca627d3b..5c3dcdd5 100644 --- a/crates/pgt_diagnostics/src/adapters.rs +++ b/crates/pgt_diagnostics/src/adapters.rs @@ -134,3 +134,28 @@ impl Diagnostic for SerdeJsonError { fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) })) } } + +#[derive(Debug)] +pub struct ResolveError { + error: oxc_resolver::ResolveError, +} + +impl From for ResolveError { + fn from(error: oxc_resolver::ResolveError) -> Self { + Self { error } + } +} + +impl Diagnostic for ResolveError { + fn category(&self) -> Option<&'static Category> { + Some(category!("internalError/io")) + } + + fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(fmt, "{}", self.error) + } + + fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> { + fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) })) + } +} diff --git a/crates/pgt_fs/Cargo.toml b/crates/pgt_fs/Cargo.toml index 1e4a7b4f..40478934 100644 --- a/crates/pgt_fs/Cargo.toml +++ b/crates/pgt_fs/Cargo.toml @@ -15,6 +15,7 @@ version = "0.0.0" crossbeam = { workspace = true } directories = "5.0.1" enumflags2 = { workspace = true } +oxc_resolver = { workspace = true } parking_lot = { version = "0.12.3", features = ["arc_lock"] } pgt_diagnostics = { workspace = true } rayon = { workspace = true } diff --git a/crates/pgt_fs/src/fs.rs b/crates/pgt_fs/src/fs.rs index b73aef6e..2bfd2e51 100644 --- a/crates/pgt_fs/src/fs.rs +++ b/crates/pgt_fs/src/fs.rs @@ -1,6 +1,7 @@ use crate::{PathInterner, PgTPath}; pub use memory::{ErrorEntry, MemoryFileSystem}; pub use os::OsFileSystem; +use oxc_resolver::{Resolution, ResolveError}; use pgt_diagnostics::{Advices, Diagnostic, LogCategory, Visit, console}; use pgt_diagnostics::{Error, Severity}; use serde::{Deserialize, Serialize}; @@ -164,6 +165,12 @@ pub trait FileSystem: Send + Sync + RefUnwindSafe { fn get_changed_files(&self, base: &str) -> io::Result>; fn get_staged_files(&self) -> io::Result>; + + fn resolve_configuration( + &self, + specifier: &str, + path: &Path, + ) -> Result; } /// Result of the auto search @@ -355,6 +362,14 @@ where fn get_staged_files(&self) -> io::Result> { T::get_staged_files(self) } + + fn resolve_configuration( + &self, + specifier: &str, + path: &Path, + ) -> Result { + T::resolve_configuration(self, specifier, path) + } } #[derive(Debug, Diagnostic, Deserialize, Serialize)] diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index baffe0ab..59e9cbcb 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -1,3 +1,4 @@ +use oxc_resolver::{Resolution, ResolveError}; use rustc_hash::FxHashMap; use std::collections::hash_map::{Entry, IntoIter}; use std::io; @@ -227,6 +228,14 @@ impl FileSystem for MemoryFileSystem { Ok(cb()) } + + fn resolve_configuration( + &self, + _specifier: &str, + _path: &Path, + ) -> Result { + todo!() + } } struct MemoryFile { diff --git a/crates/pgt_fs/src/fs/os.rs b/crates/pgt_fs/src/fs/os.rs index a2e40695..4b85d5c5 100644 --- a/crates/pgt_fs/src/fs/os.rs +++ b/crates/pgt_fs/src/fs/os.rs @@ -5,9 +5,11 @@ use crate::{ FileSystem, PgTPath, fs::{TraversalContext, TraversalScope}, }; +use oxc_resolver::{Resolution, ResolveError, ResolveOptions, Resolver}; use pgt_diagnostics::{DiagnosticExt, Error, Severity, adapters::IoError}; use rayon::{Scope, scope}; use std::fs::{DirEntry, FileType}; +use std::panic::AssertUnwindSafe; use std::process::Command; use std::{ env, fs, @@ -21,12 +23,18 @@ const MAX_SYMLINK_DEPTH: u8 = 3; /// Implementation of [FileSystem] that directly calls through to the underlying OS pub struct OsFileSystem { pub working_directory: Option, + pub configuration_resolver: AssertUnwindSafe, } impl OsFileSystem { pub fn new(working_directory: PathBuf) -> Self { Self { working_directory: Some(working_directory), + configuration_resolver: AssertUnwindSafe(Resolver::new(ResolveOptions { + condition_names: vec!["node".to_string(), "import".to_string()], + extensions: vec![".json".to_string(), ".jsonc".to_string()], + ..ResolveOptions::default() + })), } } } @@ -35,6 +43,11 @@ impl Default for OsFileSystem { fn default() -> Self { Self { working_directory: env::current_dir().ok(), + configuration_resolver: AssertUnwindSafe(Resolver::new(ResolveOptions { + condition_names: vec!["node".to_string(), "import".to_string()], + extensions: vec![".json".to_string(), ".jsonc".to_string()], + ..ResolveOptions::default() + })), } } } @@ -116,6 +129,14 @@ impl FileSystem for OsFileSystem { .map(|l| l.to_string()) .collect()) } + + fn resolve_configuration( + &self, + specifier: &str, + path: &Path, + ) -> Result { + self.configuration_resolver.resolve(path, specifier) + } } struct OsFile { diff --git a/crates/pgt_lsp/src/session.rs b/crates/pgt_lsp/src/session.rs index 7ccf2bab..4ab0abcf 100644 --- a/crates/pgt_lsp/src/session.rs +++ b/crates/pgt_lsp/src/session.rs @@ -10,10 +10,10 @@ use pgt_analyse::RuleCategoriesBuilder; use pgt_configuration::{ConfigurationPathHint, PartialConfiguration}; use pgt_diagnostics::{DiagnosticExt, Error}; use pgt_fs::{FileSystem, PgTPath}; +use pgt_workspace::PartialConfigurationExt; use pgt_workspace::Workspace; use pgt_workspace::configuration::{LoadedConfiguration, load_configuration}; use pgt_workspace::features; -use pgt_workspace::settings::PartialConfigurationExt; use pgt_workspace::workspace::UpdateSettingsParams; use pgt_workspace::{DynRef, WorkspaceError}; use rustc_hash::FxHashMap; diff --git a/crates/pgt_workspace/Cargo.toml b/crates/pgt_workspace/Cargo.toml index 5f598b2d..bfa413e3 100644 --- a/crates/pgt_workspace/Cargo.toml +++ b/crates/pgt_workspace/Cargo.toml @@ -35,6 +35,7 @@ rustc-hash = { workspace = true } schemars = { workspace = true, optional = true } serde = { workspace = true, features = ["derive"] } serde_json = { workspace = true, features = ["raw_value"] } +slotmap = { workspace = true, features = ["serde"] } sqlx.workspace = true strum = { workspace = true } tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index 88c04eec..55f2b5f8 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -1,14 +1,17 @@ use std::{ + ffi::OsStr, io::ErrorKind, ops::Deref, path::{Path, PathBuf}, }; +use biome_deserialize::Merge; use pgt_analyse::AnalyserRules; use pgt_configuration::{ ConfigurationDiagnostic, ConfigurationPathHint, ConfigurationPayload, PartialConfiguration, - VERSION, push_to_analyser_rules, + VERSION, diagnostics::CantLoadExtendFile, push_to_analyser_rules, }; +use pgt_console::markup; use pgt_fs::{AutoSearchResult, ConfigName, FileSystem, OpenOptions}; use crate::{DynRef, WorkspaceError, settings::Settings}; @@ -265,6 +268,176 @@ pub fn strip_jsonc_comments(jsonc_input: &str) -> String { json_output } +pub trait PartialConfigurationExt { + fn apply_extends( + &mut self, + fs: &DynRef<'_, dyn FileSystem>, + file_path: &Path, + external_resolution_base_path: &Path, + ) -> Result<(), WorkspaceError>; + + fn deserialize_extends( + &mut self, + fs: &DynRef<'_, dyn FileSystem>, + relative_resolution_base_path: &Path, + external_resolution_base_path: &Path, + ) -> Result, WorkspaceError>; + + fn retrieve_gitignore_matches( + &self, + file_system: &DynRef<'_, dyn FileSystem>, + vcs_base_path: Option<&Path>, + ) -> Result<(Option, Vec), WorkspaceError>; +} + +impl PartialConfigurationExt for PartialConfiguration { + /// Mutates the configuration so that any fields that have not been configured explicitly are + /// filled in with their values from configs listed in the `extends` field. + /// + /// The `extends` configs are applied from left to right. + /// + /// If a configuration can't be resolved from the file system, the operation will fail. + fn apply_extends( + &mut self, + fs: &DynRef<'_, dyn FileSystem>, + file_path: &Path, + external_resolution_base_path: &Path, + ) -> Result<(), WorkspaceError> { + let configurations = self.deserialize_extends( + fs, + file_path.parent().expect("file path should have a parent"), + external_resolution_base_path, + )?; + + let extended_configuration = configurations.into_iter().reduce( + |mut previous_configuration, current_configuration| { + previous_configuration.merge_with(current_configuration); + previous_configuration + }, + ); + if let Some(mut extended_configuration) = extended_configuration { + // We swap them to avoid having to clone `self.configuration` to merge it. + std::mem::swap(self, &mut extended_configuration); + self.merge_with(extended_configuration) + } + + Ok(()) + } + + /// It attempts to deserialize all the configuration files that were specified in the `extends` property + fn deserialize_extends( + &mut self, + fs: &DynRef<'_, dyn FileSystem>, + relative_resolution_base_path: &Path, + external_resolution_base_path: &Path, + ) -> Result, WorkspaceError> { + let Some(extends) = &self.extends else { + return Ok(Vec::new()); + }; + + let mut deserialized_configurations = vec![]; + for extend_entry in extends.iter() { + let extend_entry_as_path = Path::new(extend_entry); + + let extend_configuration_file_path = if extend_entry_as_path.starts_with(".") + || matches!( + extend_entry_as_path + .extension() + .map(OsStr::as_encoded_bytes), + Some(b"json" | b"jsonc") + ) { + relative_resolution_base_path.join(extend_entry) + } else { + fs.resolve_configuration(extend_entry.as_str(), external_resolution_base_path) + .map_err(|error| { + ConfigurationDiagnostic::cant_resolve( + external_resolution_base_path.display().to_string(), + error, + ) + })? + .into_path_buf() + }; + + let mut file = fs + .open_with_options( + extend_configuration_file_path.as_path(), + OpenOptions::default().read(true), + ) + .map_err(|err| { + CantLoadExtendFile::new( + extend_configuration_file_path.display().to_string(), + err.to_string(), + ) + .with_verbose_advice(markup! { + "Biome tried to load the configuration file \""{ + extend_configuration_file_path.display().to_string() + }"\" in \"extends\" using \""{ + external_resolution_base_path.display().to_string() + }"\" as the base path." + }) + })?; + + let mut content = String::new(); + file.read_to_string(&mut content).map_err(|err| { + CantLoadExtendFile::new(extend_configuration_file_path.display().to_string(), err.to_string()).with_verbose_advice( + markup!{ + "It's possible that the file was created with a different user/group. Make sure you have the rights to read the file." + } + ) + + })?; + + let deserialized = serde_json::from_str::(&content) + .map_err(ConfigurationDiagnostic::new_deserialization_error)?; + deserialized_configurations.push(deserialized) + } + Ok(deserialized_configurations) + } + + /// This function checks if the VCS integration is enabled, and if so, it will attempts to resolve the + /// VCS root directory and the `.gitignore` file. + /// + /// ## Returns + /// + /// A tuple with VCS root folder and the contents of the `.gitignore` file + fn retrieve_gitignore_matches( + &self, + file_system: &DynRef<'_, dyn FileSystem>, + vcs_base_path: Option<&Path>, + ) -> Result<(Option, Vec), WorkspaceError> { + let Some(vcs) = &self.vcs else { + return Ok((None, vec![])); + }; + if vcs.is_enabled() { + let vcs_base_path = match (vcs_base_path, &vcs.root) { + (Some(vcs_base_path), Some(root)) => vcs_base_path.join(root), + (None, Some(root)) => PathBuf::from(root), + (Some(vcs_base_path), None) => PathBuf::from(vcs_base_path), + (None, None) => return Err(WorkspaceError::vcs_disabled()), + }; + if let Some(client_kind) = &vcs.client_kind { + if !vcs.ignore_file_disabled() { + let result = file_system + .auto_search(&vcs_base_path, &[client_kind.ignore_file()], false) + .map_err(WorkspaceError::from)?; + + if let Some(result) = result { + return Ok(( + result.file_path.parent().map(PathBuf::from), + result + .content + .lines() + .map(String::from) + .collect::>(), + )); + } + } + } + } + Ok((None, vec![])) + } +} + #[cfg(test)] mod tests { use super::*; diff --git a/crates/pgt_workspace/src/diagnostics.rs b/crates/pgt_workspace/src/diagnostics.rs index 9ba02a1a..492fb0de 100644 --- a/crates/pgt_workspace/src/diagnostics.rs +++ b/crates/pgt_workspace/src/diagnostics.rs @@ -1,4 +1,5 @@ use pgt_configuration::ConfigurationDiagnostic; +use pgt_configuration::diagnostics::CantLoadExtendFile; use pgt_console::fmt::Bytes; use pgt_console::markup; use pgt_diagnostics::{ @@ -354,3 +355,9 @@ impl Diagnostic for FileTooLarge { ) } } + +impl From for WorkspaceError { + fn from(value: CantLoadExtendFile) -> Self { + WorkspaceError::Configuration(ConfigurationDiagnostic::CantLoadExtendFile(value).into()) + } +} diff --git a/crates/pgt_workspace/src/lib.rs b/crates/pgt_workspace/src/lib.rs index 99fe063f..df8b0ba7 100644 --- a/crates/pgt_workspace/src/lib.rs +++ b/crates/pgt_workspace/src/lib.rs @@ -14,6 +14,7 @@ pub mod workspace; #[cfg(feature = "schema")] pub mod workspace_types; +pub use crate::configuration::PartialConfigurationExt; pub use crate::diagnostics::{TransportError, WorkspaceError}; pub use crate::workspace::Workspace; diff --git a/crates/pgt_workspace/src/settings.rs b/crates/pgt_workspace/src/settings.rs index f9275aa9..9a679b1c 100644 --- a/crates/pgt_workspace/src/settings.rs +++ b/crates/pgt_workspace/src/settings.rs @@ -8,6 +8,7 @@ use std::{ sync::{RwLock, RwLockReadGuard, RwLockWriteGuard}, time::Duration, }; +use tracing::trace; use ignore::gitignore::{Gitignore, GitignoreBuilder}; use pgt_configuration::{ @@ -17,9 +18,132 @@ use pgt_configuration::{ files::FilesConfiguration, migrations::{MigrationsConfiguration, PartialMigrationsConfiguration}, }; -use pgt_fs::FileSystem; +use pgt_fs::{FileSystem, PgTPath}; -use crate::{DynRef, WorkspaceError, matcher::Matcher}; +use crate::{ + DynRef, WorkspaceError, + matcher::Matcher, + workspace::{ProjectKey, WorkspaceData}, +}; + +#[derive(Debug, Default)] +/// The information tracked for each project +pub struct ProjectData { + /// The root path of the project. This path should be **absolute**. + path: PgTPath, + /// The settings of the project, usually inferred from the configuration file e.g. `biome.json`. + settings: Settings, +} + +#[derive(Debug, Default)] +/// Type that manages different projects inside the workspace. +pub struct WorkspaceSettings { + /// The data of the projects + data: WorkspaceData, + /// The ID of the current project. + current_project: ProjectKey, +} + +impl WorkspaceSettings { + pub fn get_current_project_key(&self) -> ProjectKey { + self.current_project + } + + pub fn get_current_project_data_mut(&mut self) -> &mut ProjectData { + self.data + .get_mut(self.current_project) + .expect("You must have at least one workspace.") + } + + /// Retrieves the settings of the current workspace folder + pub fn get_current_settings(&self) -> Option<&Settings> { + trace!("Current key {:?}", self.current_project); + let data = self.data.get(self.current_project); + if let Some(data) = data { + Some(&data.settings) + } else { + None + } + } + + /// Retrieves a mutable reference of the settings of the current project + pub fn get_current_settings_mut(&mut self) -> &mut Settings { + &mut self + .data + .get_mut(self.current_project) + .expect("You must have at least one workspace.") + .settings + } + + /// Register the current project using its unique key + pub fn register_current_project(&mut self, key: ProjectKey) { + self.current_project = key; + } + + /// Insert a new project using its folder. Use [WorkspaceSettings::get_current_settings_mut] to retrieve + /// a mutable reference to its [Settings] and manipulate them. + pub fn insert_project(&mut self, workspace_path: impl Into) -> ProjectKey { + let path = PgTPath::new(workspace_path.into()); + trace!("Insert workspace folder: {:?}", path); + self.data.insert(ProjectData { + path, + settings: Settings::default(), + }) + } + + /// Remove a project using its folder. + pub fn remove_project(&mut self, workspace_path: &Path) { + let keys_to_remove = { + let mut data = vec![]; + let iter = self.data.iter(); + + for (key, path_to_settings) in iter { + if path_to_settings.path.as_path() == workspace_path { + data.push(key) + } + } + + data + }; + + for key in keys_to_remove { + self.data.remove(key) + } + } + + /// Checks if the current path belongs to a registered project. + /// + /// If there's a match, and the match **isn't** the current project, it returns the new key. + pub fn path_belongs_to_current_workspace(&self, path: &PgTPath) -> Option { + if self.data.is_empty() { + return None; + } + trace!("Current key: {:?}", self.current_project); + let iter = self.data.iter(); + for (key, path_to_settings) in iter { + trace!( + "Workspace path {:?}, file path {:?}", + path_to_settings.path, path + ); + trace!("Iter key: {:?}", key); + if key == self.current_project { + continue; + } + if path.strip_prefix(path_to_settings.path.as_path()).is_ok() { + trace!("Update workspace to {:?}", key); + return Some(key); + } + } + None + } + + /// Checks if the current path belongs to a registered project. + /// + /// If there's a match, and the match **isn't** the current project, the function will mark the match as the current project. + pub fn set_current_project(&mut self, new_key: ProjectKey) { + self.current_project = new_key; + } +} /// Global settings for the entire workspace #[derive(Debug, Default)] @@ -397,59 +521,6 @@ impl Default for FilesSettings { } } -pub trait PartialConfigurationExt { - fn retrieve_gitignore_matches( - &self, - file_system: &DynRef<'_, dyn FileSystem>, - vcs_base_path: Option<&Path>, - ) -> Result<(Option, Vec), WorkspaceError>; -} - -impl PartialConfigurationExt for PartialConfiguration { - /// This function checks if the VCS integration is enabled, and if so, it will attempts to resolve the - /// VCS root directory and the `.gitignore` file. - /// - /// ## Returns - /// - /// A tuple with VCS root folder and the contents of the `.gitignore` file - fn retrieve_gitignore_matches( - &self, - file_system: &DynRef<'_, dyn FileSystem>, - vcs_base_path: Option<&Path>, - ) -> Result<(Option, Vec), WorkspaceError> { - let Some(vcs) = &self.vcs else { - return Ok((None, vec![])); - }; - if vcs.is_enabled() { - let vcs_base_path = match (vcs_base_path, &vcs.root) { - (Some(vcs_base_path), Some(root)) => vcs_base_path.join(root), - (None, Some(root)) => PathBuf::from(root), - (Some(vcs_base_path), None) => PathBuf::from(vcs_base_path), - (None, None) => return Err(WorkspaceError::vcs_disabled()), - }; - if let Some(client_kind) = &vcs.client_kind { - if !vcs.ignore_file_disabled() { - let result = file_system - .auto_search(&vcs_base_path, &[client_kind.ignore_file()], false) - .map_err(WorkspaceError::from)?; - - if let Some(result) = result { - return Ok(( - result.file_path.parent().map(PathBuf::from), - result - .content - .lines() - .map(String::from) - .collect::>(), - )); - } - } - } - } - Ok((None, vec![])) - } -} - #[cfg(test)] mod tests { use biome_deserialize::StringSet; diff --git a/crates/pgt_workspace/src/workspace.rs b/crates/pgt_workspace/src/workspace.rs index 873dd83e..4b5187e8 100644 --- a/crates/pgt_workspace/src/workspace.rs +++ b/crates/pgt_workspace/src/workspace.rs @@ -6,6 +6,7 @@ use pgt_configuration::{PartialConfiguration, RuleSelector}; use pgt_fs::PgTPath; use pgt_text_size::TextRange; use serde::{Deserialize, Serialize}; +use slotmap::{DenseSlotMap, new_key_type}; use crate::{ WorkspaceError, @@ -222,3 +223,65 @@ impl Drop for FileGuard<'_, W> { .ok(); } } + +new_key_type! { + pub struct ProjectKey; +} + +#[derive(Debug, Default)] +pub struct WorkspaceData { + /// [DenseSlotMap] is the slowest type in insertion/removal, but the fastest in iteration + /// + /// Users wouldn't change workspace folders very often, + paths: DenseSlotMap, +} + +impl WorkspaceData { + /// Inserts an item + pub fn insert(&mut self, item: V) -> ProjectKey { + self.paths.insert(item) + } + + /// Removes an item + pub fn remove(&mut self, key: ProjectKey) { + self.paths.remove(key); + } + + /// Get a reference of the value + pub fn get(&self, key: ProjectKey) -> Option<&V> { + self.paths.get(key) + } + + /// Get a mutable reference of the value + pub fn get_mut(&mut self, key: ProjectKey) -> Option<&mut V> { + self.paths.get_mut(key) + } + + pub fn is_empty(&self) -> bool { + self.paths.is_empty() + } + + pub fn iter(&self) -> WorkspaceDataIterator<'_, V> { + WorkspaceDataIterator::new(self) + } +} + +pub struct WorkspaceDataIterator<'a, V> { + iterator: slotmap::dense::Iter<'a, ProjectKey, V>, +} + +impl<'a, V> WorkspaceDataIterator<'a, V> { + fn new(data: &'a WorkspaceData) -> Self { + Self { + iterator: data.paths.iter(), + } + } +} + +impl<'a, V> Iterator for WorkspaceDataIterator<'a, V> { + type Item = (ProjectKey, &'a V); + + fn next(&mut self) -> Option { + self.iterator.next() + } +} diff --git a/justfile b/justfile index a55207ae..bf982739 100644 --- a/justfile +++ b/justfile @@ -132,10 +132,16 @@ merge-main: git fetch origin main:main git merge main +quick-create branch commit: + git checkout -b {{branch}} + git add -A + git commit -m "{{commit}}" + git push + gh pr create --fill # Make sure to set your PGT_LOG_PATH in your shell profile. # You can use the PGT_LOG_LEVEL to set your log level. # We recommend to install `bunyan` (npm i -g bunyan) and pipe the output through there for color-coding: # just show-logs | bunyan show-logs: - tail -f $(ls $PGT_LOG_PATH/server.log.* | sort -t- -k2,2 -k3,3 -k4,4 | tail -n 1) \ No newline at end of file + tail -f $(ls $PGT_LOG_PATH/server.log.* | sort -t- -k2,2 -k3,3 -k4,4 | tail -n 1) From 3c34a95c25ae80081ae8de4c7c56b51c7421cc2f Mon Sep 17 00:00:00 2001 From: psteinroe Date: Tue, 27 May 2025 09:49:01 +0200 Subject: [PATCH 02/30] progress --- crates/pgt_workspace/src/settings.rs | 64 +++++- crates/pgt_workspace/src/workspace.rs | 33 ++- crates/pgt_workspace/src/workspace/client.rs | 16 +- crates/pgt_workspace/src/workspace/server.rs | 216 ++++++++++++------ .../workspace/server/connection_manager.rs | 114 +++++++++ .../src/workspace/server/db_connection.rs | 40 ---- 6 files changed, 362 insertions(+), 121 deletions(-) create mode 100644 crates/pgt_workspace/src/workspace/server/connection_manager.rs delete mode 100644 crates/pgt_workspace/src/workspace/server/db_connection.rs diff --git a/crates/pgt_workspace/src/settings.rs b/crates/pgt_workspace/src/settings.rs index 9a679b1c..7f361560 100644 --- a/crates/pgt_workspace/src/settings.rs +++ b/crates/pgt_workspace/src/settings.rs @@ -12,18 +12,18 @@ use tracing::trace; use ignore::gitignore::{Gitignore, GitignoreBuilder}; use pgt_configuration::{ - ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration, database::PartialDatabaseConfiguration, diagnostics::InvalidIgnorePattern, files::FilesConfiguration, migrations::{MigrationsConfiguration, PartialMigrationsConfiguration}, + ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration, }; -use pgt_fs::{FileSystem, PgTPath}; +use pgt_fs::PgTPath; use crate::{ - DynRef, WorkspaceError, matcher::Matcher, workspace::{ProjectKey, WorkspaceData}, + WorkspaceError, }; #[derive(Debug, Default)] @@ -49,6 +49,16 @@ impl WorkspaceSettings { self.current_project } + pub fn get_current_project_path(&self) -> Option<&PgTPath> { + trace!("Current key {:?}", self.current_project); + let data = self.data.get(self.current_project); + if let Some(data) = data { + Some(&data.path) + } else { + None + } + } + pub fn get_current_project_data_mut(&mut self) -> &mut ProjectData { self.data .get_mut(self.current_project) @@ -123,7 +133,8 @@ impl WorkspaceSettings { for (key, path_to_settings) in iter { trace!( "Workspace path {:?}, file path {:?}", - path_to_settings.path, path + path_to_settings.path, + path ); trace!("Iter key: {:?}", key); if key == self.current_project { @@ -145,6 +156,51 @@ impl WorkspaceSettings { } } +#[derive(Debug)] +pub struct WorkspaceSettingsHandle<'a> { + inner: RwLockReadGuard<'a, WorkspaceSettings>, +} + +impl<'a> WorkspaceSettingsHandle<'a> { + pub(crate) fn new(settings: &'a RwLock) -> Self { + Self { + inner: settings.read().unwrap(), + } + } + + pub(crate) fn settings(&self) -> Option<&Settings> { + self.inner.get_current_settings() + } + + pub(crate) fn path(&self) -> Option<&PgTPath> { + self.inner.get_current_project_path() + } +} + +impl<'a> AsRef for WorkspaceSettingsHandle<'a> { + fn as_ref(&self) -> &WorkspaceSettings { + &self.inner + } +} + +pub struct WorkspaceSettingsHandleMut<'a> { + inner: RwLockWriteGuard<'a, WorkspaceSettings>, +} + +impl<'a> WorkspaceSettingsHandleMut<'a> { + pub(crate) fn new(settings: &'a RwLock) -> Self { + Self { + inner: settings.write().unwrap(), + } + } +} + +impl<'a> AsMut for WorkspaceSettingsHandleMut<'a> { + fn as_mut(&mut self) -> &mut WorkspaceSettings { + &mut self.inner + } +} + /// Global settings for the entire workspace #[derive(Debug, Default)] pub struct Settings { diff --git a/crates/pgt_workspace/src/workspace.rs b/crates/pgt_workspace/src/workspace.rs index 4b5187e8..1b5cdb25 100644 --- a/crates/pgt_workspace/src/workspace.rs +++ b/crates/pgt_workspace/src/workspace.rs @@ -6,10 +6,9 @@ use pgt_configuration::{PartialConfiguration, RuleSelector}; use pgt_fs::PgTPath; use pgt_text_size::TextRange; use serde::{Deserialize, Serialize}; -use slotmap::{DenseSlotMap, new_key_type}; +use slotmap::{new_key_type, DenseSlotMap}; use crate::{ - WorkspaceError, features::{ code_actions::{ CodeActionsParams, CodeActionsResult, ExecuteStatementParams, ExecuteStatementResult, @@ -17,13 +16,14 @@ use crate::{ completions::{CompletionsResult, GetCompletionsParams}, diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, }, + WorkspaceError, }; mod client; mod server; -pub use server::StatementId; pub(crate) use server::parsed_document::*; +pub use server::StatementId; #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] @@ -93,6 +93,21 @@ pub struct ServerInfo { pub version: Option, } +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[serde(rename_all = "camelCase")] +pub struct RegisterProjectFolderParams { + pub path: Option, + pub set_as_current_workspace: bool, +} + +#[derive(Debug, serde::Serialize, serde::Deserialize)] +#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] +#[serde(rename_all = "camelCase")] +pub struct UnregisterProjectFolderParams { + pub path: PgTPath, +} + pub trait Workspace: Send + Sync + RefUnwindSafe { /// Retrieves the list of diagnostics associated to a file fn pull_diagnostics( @@ -111,6 +126,18 @@ pub trait Workspace: Send + Sync + RefUnwindSafe { params: GetCompletionsParams, ) -> Result; + /// Register a possible workspace project folder. Returns the key of said project. Use this key when you want to switch to different projects. + fn register_project_folder( + &self, + params: RegisterProjectFolderParams, + ) -> Result; + + /// Unregister a workspace project folder. The settings that belong to that project are deleted. + fn unregister_project_folder( + &self, + params: UnregisterProjectFolderParams, + ) -> Result<(), WorkspaceError>; + /// Update the global settings for this workspace fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError>; diff --git a/crates/pgt_workspace/src/workspace/client.rs b/crates/pgt_workspace/src/workspace/client.rs index d727fff6..880da08d 100644 --- a/crates/pgt_workspace/src/workspace/client.rs +++ b/crates/pgt_workspace/src/workspace/client.rs @@ -1,6 +1,6 @@ use crate::workspace::ServerInfo; use crate::{TransportError, Workspace, WorkspaceError}; -use serde::{Deserialize, Serialize, de::DeserializeOwned}; +use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::json; use std::{ panic::RefUnwindSafe, @@ -103,6 +103,20 @@ where self.request("pgt/execute_statement", params) } + fn register_project_folder( + &self, + params: RegisterProjectFolderParams, + ) -> Result { + self.request("pgt/register_project_folder", params) + } + + fn unregister_project_folder( + &self, + params: UnregisterProjectFolderParams, + ) -> Result<(), WorkspaceError> { + self.request("pgt/unregister_project_folder", params) + } + fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError> { self.request("pgt/open_file", params) } diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index 82e79e10..f29960ef 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -1,16 +1,16 @@ use std::{ fs, panic::RefUnwindSafe, - path::Path, + path::{Path, PathBuf}, sync::{Arc, RwLock}, }; use analyser::AnalyserVisitorBuilder; use async_helper::run_async; +use connection_manager::ConnectionManager; use dashmap::DashMap; -use db_connection::DbConnection; use document::Document; -use futures::{StreamExt, stream}; +use futures::{stream, StreamExt}; use parsed_document::{ AsyncDiagnosticsMapper, CursorPositionFilter, DefaultMapper, ExecuteStatementMapper, ParsedDocument, SyncDiagnosticsMapper, @@ -18,30 +18,34 @@ use parsed_document::{ use pgt_analyse::{AnalyserOptions, AnalysisFilter}; use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; use pgt_diagnostics::{ - Diagnostic, DiagnosticExt, Error, Severity, serde::Diagnostic as SDiagnostic, + serde::Diagnostic as SDiagnostic, Diagnostic, DiagnosticExt, Error, Severity, }; use pgt_fs::{ConfigName, PgTPath}; use pgt_typecheck::{IdentifierType, TypecheckParams, TypedIdentifier}; use schema_cache_manager::SchemaCacheManager; -use sqlx::Executor; -use tracing::info; +use sqlx::{Executor, PgPool}; +use tracing::{debug, info}; use crate::{ - WorkspaceError, configuration::to_analyser_rules, features::{ code_actions::{ self, CodeAction, CodeActionKind, CodeActionsResult, CommandAction, CommandActionCategory, ExecuteStatementParams, ExecuteStatementResult, }, - completions::{CompletionsResult, GetCompletionsParams, get_statement_for_completions}, + completions::{get_statement_for_completions, CompletionsResult, GetCompletionsParams}, diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, }, - settings::{Settings, SettingsHandle, SettingsHandleMut}, + settings::{ + Settings, SettingsHandle, SettingsHandleMut, WorkspaceSettings, WorkspaceSettingsHandle, + WorkspaceSettingsHandleMut, + }, + WorkspaceError, }; use super::{ - GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ServerInfo, UpdateSettingsParams, + GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ProjectKey, + RegisterProjectFolderParams, ServerInfo, UnregisterProjectFolderParams, UpdateSettingsParams, Workspace, }; @@ -51,7 +55,7 @@ mod analyser; mod annotation; mod async_helper; mod change; -mod db_connection; +mod connection_manager; pub(crate) mod document; mod migration; pub(crate) mod parsed_document; @@ -63,14 +67,15 @@ mod tree_sitter; pub(super) struct WorkspaceServer { /// global settings object for this workspace - settings: RwLock, + settings: RwLock, /// Stores the schema cache for this workspace + /// TODO: store this per connection schema_cache: SchemaCacheManager, parsed_documents: DashMap, - connection: RwLock, + connection: ConnectionManager, } /// The `Workspace` object is long-lived, so we want it to be able to cross @@ -92,22 +97,59 @@ impl WorkspaceServer { settings: RwLock::default(), parsed_documents: DashMap::default(), schema_cache: SchemaCacheManager::default(), - connection: RwLock::default(), + connection: ConnectionManager::default(), } } /// Provides a reference to the current settings - fn settings(&self) -> SettingsHandle { - SettingsHandle::new(&self.settings) + fn workspaces(&self) -> WorkspaceSettingsHandle { + WorkspaceSettingsHandle::new(&self.settings) + } + + fn workspaces_mut(&self) -> WorkspaceSettingsHandleMut { + WorkspaceSettingsHandleMut::new(&self.settings) + } + + fn get_current_connection(&self) -> Option { + let settings = self.workspaces(); + let settings = settings.settings()?; + self.connection.get_pool(&settings.db) + } + + /// Register a new project in the current workspace + fn register_project(&self, path: PathBuf) -> ProjectKey { + let mut workspace = self.workspaces_mut(); + let workspace_mut = workspace.as_mut(); + workspace_mut.insert_project(path.clone()) } - fn settings_mut(&self) -> SettingsHandleMut { - SettingsHandleMut::new(&self.settings) + /// Retrieves the current project path + fn get_current_project_path(&self) -> Option { + self.workspaces().path().cloned() + } + + /// Sets the current project of the current workspace + fn set_current_project(&self, project_key: ProjectKey) { + let mut workspace = self.workspaces_mut(); + let workspace_mut = workspace.as_mut(); + workspace_mut.set_current_project(project_key); + } + + /// Checks whether, if the current path belongs to the current project. + /// + /// If there's a match, and the match **isn't** the current project, it returns the new key. + fn path_belongs_to_current_workspace(&self, path: &PgTPath) -> Option { + let workspaces = self.workspaces(); + workspaces.as_ref().path_belongs_to_current_workspace(path) } fn is_ignored_by_migration_config(&self, path: &Path) -> bool { - let set = self.settings(); - set.as_ref() + let settings = self.workspaces(); + let settings = settings.settings(); + let Some(settings) = settings else { + return false; + }; + settings .migrations .as_ref() .and_then(|migration_settings| { @@ -131,8 +173,12 @@ impl WorkspaceServer { /// Check whether a file is ignored in the top-level config `files.ignore`/`files.include` fn is_ignored_by_top_level_config(&self, path: &Path) -> bool { - let set = self.settings(); - let settings = set.as_ref(); + let settings = self.workspaces(); + let settings = settings.settings(); + let Some(settings) = settings else { + return false; + }; + let is_included = settings.files.included_files.is_empty() || is_dir(path) || settings.files.included_files.matches_path(path); @@ -155,6 +201,44 @@ impl WorkspaceServer { } impl Workspace for WorkspaceServer { + fn register_project_folder( + &self, + params: RegisterProjectFolderParams, + ) -> Result { + let current_project_path = self.get_current_project_path(); + debug!( + "Compare the current project with the new one {:?} {:?} {:?}", + current_project_path, + params.path.as_ref(), + current_project_path.as_deref() != params.path.as_ref() + ); + + let is_new_path = match (current_project_path.as_deref(), params.path.as_ref()) { + (Some(current_project_path), Some(params_path)) => current_project_path != params_path, + _ => true, + }; + + if is_new_path { + let path = params.path.unwrap_or_default(); + let key = self.register_project(path.clone()); + if params.set_as_current_workspace { + self.set_current_project(key); + } + Ok(key) + } else { + Ok(self.workspaces().as_ref().get_current_project_key()) + } + } + + fn unregister_project_folder( + &self, + params: UnregisterProjectFolderParams, + ) -> Result<(), WorkspaceError> { + let mut workspace = self.workspaces_mut(); + workspace.as_mut().remove_project(params.path.as_path()); + Ok(()) + } + /// Update the global settings for this workspace /// /// ## Panics @@ -163,23 +247,17 @@ impl Workspace for WorkspaceServer { #[tracing::instrument(level = "trace", skip(self), err)] fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError> { tracing::info!("Updating settings in workspace"); - - self.settings_mut().as_mut().merge_with_configuration( - params.configuration, - params.workspace_directory, - params.vcs_base_path, - params.gitignore_matches.as_slice(), - )?; - - tracing::info!("Updated settings in workspace"); - tracing::debug!("Updated settings are {:#?}", self.settings()); - - self.connection - .write() - .unwrap() - .set_conn_settings(&self.settings().as_ref().db); - - tracing::info!("Updated Db connection settings"); + let mut workspace = self.workspaces_mut(); + + workspace + .as_mut() + .get_current_settings_mut() + .merge_with_configuration( + params.configuration, + params.workspace_directory, + params.vcs_base_path, + params.gitignore_matches.as_slice(), + )?; Ok(()) } @@ -250,15 +328,13 @@ impl Workspace for WorkspaceServer { .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; - let settings = self - .settings - .read() - .expect("Unable to read settings for Code Actions"); + let settings = self.workspaces(); + let settings = settings.settings(); - let disabled_reason: Option = if settings.db.allow_statement_executions { - None - } else { - Some("Statement execution not allowed against database.".into()) + let disabled_reason = match settings { + Some(settings) if settings.db.allow_statement_executions => None, + Some(_) => Some("Statement execution is disabled in the settings.".into()), + None => Some("Statement execution not allowed against database.".into()), }; let actions = parser @@ -310,15 +386,13 @@ impl Workspace for WorkspaceServer { }); }; - let conn = self.connection.read().unwrap(); - let pool = match conn.get_pool() { - Some(p) => p, - None => { - return Ok(ExecuteStatementResult { - message: "Not connected to database.".into(), - }); - } - }; + let pool = self.get_current_connection(); + if pool.is_none() { + return Ok(ExecuteStatementResult { + message: "No database connection available.".into(), + }); + } + let pool = pool.unwrap(); let result = run_async(async move { pool.execute(sqlx::query(&content)).await })??; @@ -334,16 +408,19 @@ impl Workspace for WorkspaceServer { &self, params: PullDiagnosticsParams, ) -> Result { - let settings = self.settings(); + let settings = self.workspaces(); + // TODO: not sure how we should handle this + // maybe fallback to default settings? or return an error? + let settings = settings.settings().expect("Settings should be initialized"); // create analyser for this run // first, collect enabled and disabled rules from the workspace settings - let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(settings.as_ref()) + let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(settings) .with_linter_rules(¶ms.only, ¶ms.skip) .finish(); // then, build a map that contains all options let options = AnalyserOptions { - rules: to_analyser_rules(settings.as_ref()), + rules: to_analyser_rules(settings), }; // next, build the analysis filter which will be used to match rules let filter = AnalysisFilter { @@ -364,12 +441,7 @@ impl Workspace for WorkspaceServer { let mut diagnostics: Vec = parser.document_diagnostics().to_vec(); - if let Some(pool) = self - .connection - .read() - .expect("DbConnection RwLock panicked") - .get_pool() - { + if let Some(pool) = self.get_current_connection() { let path_clone = params.path.clone(); let schema_cache = self.schema_cache.load(pool.clone())?; let schema_cache_arc = schema_cache.get_arc(); @@ -461,7 +533,6 @@ impl Workspace for WorkspaceServer { || d.severity(), |category| { settings - .as_ref() .get_severity_from_rule_code(category) .unwrap_or(Severity::Warning) }, @@ -503,13 +574,12 @@ impl Workspace for WorkspaceServer { .get(¶ms.path) .ok_or(WorkspaceError::not_found())?; - let pool = match self.connection.read().unwrap().get_pool() { - Some(pool) => pool, - None => { - tracing::debug!("No connection to database. Skipping completions."); - return Ok(CompletionsResult::default()); - } - }; + let pool = self.get_current_connection(); + if pool.is_none() { + tracing::debug!("No database connection available. Skipping completions."); + return Ok(CompletionsResult::default()); + } + let pool = pool.unwrap(); let schema_cache = self.schema_cache.load(pool)?; diff --git a/crates/pgt_workspace/src/workspace/server/connection_manager.rs b/crates/pgt_workspace/src/workspace/server/connection_manager.rs new file mode 100644 index 00000000..4a04cc5b --- /dev/null +++ b/crates/pgt_workspace/src/workspace/server/connection_manager.rs @@ -0,0 +1,114 @@ +use std::time::{Duration, Instant}; + +use dashmap::DashMap; +use sqlx::{pool::PoolOptions, postgres::PgConnectOptions, PgPool, Postgres}; + +use crate::settings::DatabaseSettings; + +/// A unique identifier for database connection settings +#[derive(Clone, PartialEq, Eq, Hash)] +struct ConnectionKey { + host: String, + port: u16, + username: String, + password: String, + database: String, +} + +impl From<&DatabaseSettings> for ConnectionKey { + fn from(settings: &DatabaseSettings) -> Self { + Self { + host: settings.host.clone(), + port: settings.port, + username: settings.username.clone(), + password: settings.password.clone(), + database: settings.database.clone(), + } + } +} + +/// Cached connection pool with last access time +struct CachedPool { + pool: PgPool, + last_accessed: Instant, + idle_timeout: Duration, +} + +#[derive(Default)] +pub struct ConnectionManager { + pools: DashMap, +} + +impl ConnectionManager { + pub fn new() -> Self { + Self { + pools: DashMap::new(), + } + } + + /// Get a connection pool for the given database settings. + /// If a pool already exists for these settings, it will be returned. + /// If not, a new pool will be created if connections are enabled. + pub(crate) fn get_pool(&self, settings: &DatabaseSettings) -> Option { + let key = ConnectionKey::from(settings); + + // Cleanup idle connections first + self.cleanup_idle_pools(&key); + + if !settings.enable_connection { + tracing::info!("Database connection disabled."); + return None; + } + + // If we have a cached pool, update its last_accessed time and return it + if let Some(mut cached_pool) = self.pools.get_mut(&key) { + cached_pool.last_accessed = Instant::now(); + return Some(cached_pool.pool.clone()); + } + + // Create a new pool + let config = PgConnectOptions::new() + .host(&settings.host) + .port(settings.port) + .username(&settings.username) + .password(&settings.password) + .database(&settings.database); + + let timeout = settings.conn_timeout_secs; + + let pool = PoolOptions::::new() + .acquire_timeout(timeout) + .acquire_slow_threshold(Duration::from_secs(2)) + .connect_lazy_with(config); + + let cached_pool = CachedPool { + pool: pool.clone(), + last_accessed: Instant::now(), + // TODO: add this to the db settings, for now default to one minute + idle_timeout: Duration::from_secs(60), + }; + + self.pools.insert(key, cached_pool); + + Some(pool) + } + + /// Remove pools that haven't been accessed for longer than the idle timeout + fn cleanup_idle_pools(&self, ignore_key: &ConnectionKey) { + let now = Instant::now(); + + // Use retain to keep only non-idle connections + self.pools.retain(|key, cached_pool| { + let idle_duration = now.duration_since(cached_pool.last_accessed); + if idle_duration > cached_pool.idle_timeout && key != ignore_key { + tracing::debug!( + "Removing idle database connection (idle for {:?})", + idle_duration + ); + false + } else { + true + } + }); + } +} diff --git a/crates/pgt_workspace/src/workspace/server/db_connection.rs b/crates/pgt_workspace/src/workspace/server/db_connection.rs deleted file mode 100644 index d002c0a2..00000000 --- a/crates/pgt_workspace/src/workspace/server/db_connection.rs +++ /dev/null @@ -1,40 +0,0 @@ -use std::time::Duration; - -use sqlx::{PgPool, Postgres, pool::PoolOptions, postgres::PgConnectOptions}; - -use crate::settings::DatabaseSettings; - -#[derive(Default)] -pub struct DbConnection { - pool: Option, -} - -impl DbConnection { - /// There might be no pool available if the user decides to skip db checks. - pub(crate) fn get_pool(&self) -> Option { - self.pool.clone() - } - - pub(crate) fn set_conn_settings(&mut self, settings: &DatabaseSettings) { - if !settings.enable_connection { - tracing::info!("Database connection disabled."); - return; - } - - let config = PgConnectOptions::new() - .host(&settings.host) - .port(settings.port) - .username(&settings.username) - .password(&settings.password) - .database(&settings.database); - - let timeout = settings.conn_timeout_secs; - - let pool = PoolOptions::::new() - .acquire_timeout(timeout) - .acquire_slow_threshold(Duration::from_secs(2)) - .connect_lazy_with(config); - - self.pool = Some(pool); - } -} From d161f189c1270fc513dd60fc3dc495e200ba3821 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 28 May 2025 08:06:06 +0200 Subject: [PATCH 03/30] progress --- crates/pgt_workspace/src/settings.rs | 7 +- crates/pgt_workspace/src/workspace.rs | 6 +- crates/pgt_workspace/src/workspace/client.rs | 7 +- crates/pgt_workspace/src/workspace/server.rs | 17 ++-- .../src/workspace/server/connection_key.rs | 44 +++++++++ .../workspace/server/connection_manager.rs | 29 ++---- .../workspace/server/schema_cache_manager.rs | 89 +++---------------- 7 files changed, 78 insertions(+), 121 deletions(-) create mode 100644 crates/pgt_workspace/src/workspace/server/connection_key.rs diff --git a/crates/pgt_workspace/src/settings.rs b/crates/pgt_workspace/src/settings.rs index 7f361560..8fc0c07a 100644 --- a/crates/pgt_workspace/src/settings.rs +++ b/crates/pgt_workspace/src/settings.rs @@ -12,18 +12,18 @@ use tracing::trace; use ignore::gitignore::{Gitignore, GitignoreBuilder}; use pgt_configuration::{ + ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration, database::PartialDatabaseConfiguration, diagnostics::InvalidIgnorePattern, files::FilesConfiguration, migrations::{MigrationsConfiguration, PartialMigrationsConfiguration}, - ConfigurationDiagnostic, LinterConfiguration, PartialConfiguration, }; use pgt_fs::PgTPath; use crate::{ + WorkspaceError, matcher::Matcher, workspace::{ProjectKey, WorkspaceData}, - WorkspaceError, }; #[derive(Debug, Default)] @@ -133,8 +133,7 @@ impl WorkspaceSettings { for (key, path_to_settings) in iter { trace!( "Workspace path {:?}, file path {:?}", - path_to_settings.path, - path + path_to_settings.path, path ); trace!("Iter key: {:?}", key); if key == self.current_project { diff --git a/crates/pgt_workspace/src/workspace.rs b/crates/pgt_workspace/src/workspace.rs index 1b5cdb25..c08c9d27 100644 --- a/crates/pgt_workspace/src/workspace.rs +++ b/crates/pgt_workspace/src/workspace.rs @@ -6,9 +6,10 @@ use pgt_configuration::{PartialConfiguration, RuleSelector}; use pgt_fs::PgTPath; use pgt_text_size::TextRange; use serde::{Deserialize, Serialize}; -use slotmap::{new_key_type, DenseSlotMap}; +use slotmap::{DenseSlotMap, new_key_type}; use crate::{ + WorkspaceError, features::{ code_actions::{ CodeActionsParams, CodeActionsResult, ExecuteStatementParams, ExecuteStatementResult, @@ -16,14 +17,13 @@ use crate::{ completions::{CompletionsResult, GetCompletionsParams}, diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, }, - WorkspaceError, }; mod client; mod server; -pub(crate) use server::parsed_document::*; pub use server::StatementId; +pub(crate) use server::parsed_document::*; #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] diff --git a/crates/pgt_workspace/src/workspace/client.rs b/crates/pgt_workspace/src/workspace/client.rs index 880da08d..2bd21513 100644 --- a/crates/pgt_workspace/src/workspace/client.rs +++ b/crates/pgt_workspace/src/workspace/client.rs @@ -1,13 +1,16 @@ use crate::workspace::ServerInfo; use crate::{TransportError, Workspace, WorkspaceError}; -use serde::{de::DeserializeOwned, Deserialize, Serialize}; +use serde::{Deserialize, Serialize, de::DeserializeOwned}; use serde_json::json; use std::{ panic::RefUnwindSafe, sync::atomic::{AtomicU64, Ordering}, }; -use super::{CloseFileParams, GetFileContentParams, IsPathIgnoredParams, OpenFileParams}; +use super::{ + CloseFileParams, GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ProjectKey, + RegisterProjectFolderParams, UnregisterProjectFolderParams, +}; pub struct WorkspaceClient { transport: T, diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index f29960ef..63e86057 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -10,7 +10,7 @@ use async_helper::run_async; use connection_manager::ConnectionManager; use dashmap::DashMap; use document::Document; -use futures::{stream, StreamExt}; +use futures::{StreamExt, stream}; use parsed_document::{ AsyncDiagnosticsMapper, CursorPositionFilter, DefaultMapper, ExecuteStatementMapper, ParsedDocument, SyncDiagnosticsMapper, @@ -18,7 +18,7 @@ use parsed_document::{ use pgt_analyse::{AnalyserOptions, AnalysisFilter}; use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; use pgt_diagnostics::{ - serde::Diagnostic as SDiagnostic, Diagnostic, DiagnosticExt, Error, Severity, + Diagnostic, DiagnosticExt, Error, Severity, serde::Diagnostic as SDiagnostic, }; use pgt_fs::{ConfigName, PgTPath}; use pgt_typecheck::{IdentifierType, TypecheckParams, TypedIdentifier}; @@ -27,20 +27,17 @@ use sqlx::{Executor, PgPool}; use tracing::{debug, info}; use crate::{ + WorkspaceError, configuration::to_analyser_rules, features::{ code_actions::{ self, CodeAction, CodeActionKind, CodeActionsResult, CommandAction, CommandActionCategory, ExecuteStatementParams, ExecuteStatementResult, }, - completions::{get_statement_for_completions, CompletionsResult, GetCompletionsParams}, + completions::{CompletionsResult, GetCompletionsParams, get_statement_for_completions}, diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, }, - settings::{ - Settings, SettingsHandle, SettingsHandleMut, WorkspaceSettings, WorkspaceSettingsHandle, - WorkspaceSettingsHandleMut, - }, - WorkspaceError, + settings::{WorkspaceSettings, WorkspaceSettingsHandle, WorkspaceSettingsHandleMut}, }; use super::{ @@ -55,6 +52,7 @@ mod analyser; mod annotation; mod async_helper; mod change; +mod connection_key; mod connection_manager; pub(crate) mod document; mod migration; @@ -444,7 +442,6 @@ impl Workspace for WorkspaceServer { if let Some(pool) = self.get_current_connection() { let path_clone = params.path.clone(); let schema_cache = self.schema_cache.load(pool.clone())?; - let schema_cache_arc = schema_cache.get_arc(); let input = parser.iter(AsyncDiagnosticsMapper).collect::>(); // sorry for the ugly code :( let async_results = run_async(async move { @@ -452,7 +449,7 @@ impl Workspace for WorkspaceServer { .map(|(_id, range, content, ast, cst, sign)| { let pool = pool.clone(); let path = path_clone.clone(); - let schema_cache = Arc::clone(&schema_cache_arc); + let schema_cache = Arc::clone(&schema_cache); async move { if let Some(ast) = ast { pgt_typecheck::check_sql(TypecheckParams { diff --git a/crates/pgt_workspace/src/workspace/server/connection_key.rs b/crates/pgt_workspace/src/workspace/server/connection_key.rs new file mode 100644 index 00000000..abdd8025 --- /dev/null +++ b/crates/pgt_workspace/src/workspace/server/connection_key.rs @@ -0,0 +1,44 @@ +use sqlx::PgPool; + +use crate::settings::DatabaseSettings; + +/// A unique identifier for database connection settings +#[derive(Clone, PartialEq, Eq, Hash)] +pub(crate) struct ConnectionKey { + pub host: String, + pub port: u16, + pub username: String, + pub database: String, +} + +impl From<&DatabaseSettings> for ConnectionKey { + fn from(settings: &DatabaseSettings) -> Self { + Self { + host: settings.host.clone(), + port: settings.port, + username: settings.username.clone(), + database: settings.database.clone(), + } + } +} + +impl From<&PgPool> for ConnectionKey { + fn from(pool: &PgPool) -> Self { + let conn = pool.connect_options(); + + match conn.get_database() { + None => Self { + host: conn.get_host().to_string(), + port: conn.get_port(), + username: conn.get_username().to_string(), + database: String::new(), + }, + Some(db) => Self { + host: conn.get_host().to_string(), + port: conn.get_port(), + username: conn.get_username().to_string(), + database: db.to_string(), + }, + } + } +} diff --git a/crates/pgt_workspace/src/workspace/server/connection_manager.rs b/crates/pgt_workspace/src/workspace/server/connection_manager.rs index 4a04cc5b..d21988f0 100644 --- a/crates/pgt_workspace/src/workspace/server/connection_manager.rs +++ b/crates/pgt_workspace/src/workspace/server/connection_manager.rs @@ -1,31 +1,11 @@ use std::time::{Duration, Instant}; use dashmap::DashMap; -use sqlx::{pool::PoolOptions, postgres::PgConnectOptions, PgPool, Postgres}; +use sqlx::{PgPool, Postgres, pool::PoolOptions, postgres::PgConnectOptions}; use crate::settings::DatabaseSettings; -/// A unique identifier for database connection settings -#[derive(Clone, PartialEq, Eq, Hash)] -struct ConnectionKey { - host: String, - port: u16, - username: String, - password: String, - database: String, -} - -impl From<&DatabaseSettings> for ConnectionKey { - fn from(settings: &DatabaseSettings) -> Self { - Self { - host: settings.host.clone(), - port: settings.port, - username: settings.username.clone(), - password: settings.password.clone(), - database: settings.database.clone(), - } - } -} +use super::connection_key::ConnectionKey; /// Cached connection pool with last access time struct CachedPool { @@ -49,6 +29,7 @@ impl ConnectionManager { /// Get a connection pool for the given database settings. /// If a pool already exists for these settings, it will be returned. /// If not, a new pool will be created if connections are enabled. + /// Will also clean up idle connections that haven't been accessed for a while. pub(crate) fn get_pool(&self, settings: &DatabaseSettings) -> Option { let key = ConnectionKey::from(settings); @@ -84,8 +65,8 @@ impl ConnectionManager { let cached_pool = CachedPool { pool: pool.clone(), last_accessed: Instant::now(), - // TODO: add this to the db settings, for now default to one minute - idle_timeout: Duration::from_secs(60), + // TODO: add this to the db settings, for now default to five minutes + idle_timeout: Duration::from_secs(60 * 5), }; self.pools.insert(key, cached_pool); diff --git a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs index 03cd6ded..10dab3dd 100644 --- a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs +++ b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs @@ -1,97 +1,30 @@ -use std::sync::{Arc, RwLock, RwLockReadGuard}; +use std::sync::Arc; +use dashmap::DashMap; use pgt_schema_cache::SchemaCache; use sqlx::PgPool; use crate::WorkspaceError; -use super::async_helper::run_async; - -pub(crate) struct SchemaCacheHandle<'a> { - inner: RwLockReadGuard<'a, SchemaCacheManagerInner>, -} - -impl<'a> SchemaCacheHandle<'a> { - pub(crate) fn new(cache: &'a RwLock) -> Self { - Self { - inner: cache.read().unwrap(), - } - } - - pub(crate) fn wrap(inner: RwLockReadGuard<'a, SchemaCacheManagerInner>) -> Self { - Self { inner } - } - - pub fn get_arc(&self) -> Arc { - Arc::clone(&self.inner.cache) - } -} - -impl AsRef for SchemaCacheHandle<'_> { - fn as_ref(&self) -> &SchemaCache { - &self.inner.cache - } -} - -#[derive(Default)] -pub(crate) struct SchemaCacheManagerInner { - cache: Arc, - conn_str: String, -} +use super::{async_helper::run_async, connection_key::ConnectionKey}; #[derive(Default)] pub struct SchemaCacheManager { - inner: RwLock, + schemas: DashMap>, } impl SchemaCacheManager { - pub fn load(&self, pool: PgPool) -> Result { - let new_conn_str = pool_to_conn_str(&pool); + pub fn load(&self, pool: PgPool) -> Result, WorkspaceError> { + let key: ConnectionKey = (&pool).into(); - { - // return early if the connection string is the same - let inner = self.inner.read().unwrap(); - if new_conn_str == inner.conn_str { - tracing::info!("Same connection string, no updates."); - return Ok(SchemaCacheHandle::wrap(inner)); - } + if let Some(cache) = self.schemas.get(&key) { + return Ok(Arc::clone(&*cache)); } - let maybe_refreshed = run_async(async move { SchemaCache::load(&pool).await })?; - let refreshed = maybe_refreshed?; - - { - // write lock must be dropped before we return the reference below, hence the block - let mut inner = self.inner.write().unwrap(); - - // Double-check that we still need to refresh (another thread might have done it) - if new_conn_str != inner.conn_str { - inner.cache = Arc::new(refreshed); - inner.conn_str = new_conn_str; - tracing::info!("Refreshed connection."); - } - } - - Ok(SchemaCacheHandle::new(&self.inner)) - } -} + let schema_cache = Arc::new(run_async(async move { SchemaCache::load(&pool).await })??); -fn pool_to_conn_str(pool: &PgPool) -> String { - let conn = pool.connect_options(); + self.schemas.insert(key, Arc::clone(&schema_cache)); - match conn.get_database() { - None => format!( - "postgres://{}:@{}:{}", - conn.get_username(), - conn.get_host(), - conn.get_port() - ), - Some(db) => format!( - "postgres://{}:@{}:{}/{}", - conn.get_username(), - conn.get_host(), - conn.get_port(), - db - ), + Ok(schema_cache) } } From 84a76a2ca385a16ba8d97b4f5e59981a689ca102 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 28 May 2025 09:16:59 +0200 Subject: [PATCH 04/30] progress --- crates/pgt_cli/src/commands/mod.rs | 6 ++- crates/pgt_lsp/src/server.rs | 46 ++++++++++++++++++++ crates/pgt_lsp/src/session.rs | 40 ++++++++++++++++- crates/pgt_workspace/src/workspace/server.rs | 4 ++ 4 files changed, 94 insertions(+), 2 deletions(-) diff --git a/crates/pgt_cli/src/commands/mod.rs b/crates/pgt_cli/src/commands/mod.rs index 90ac4153..19dc56ad 100644 --- a/crates/pgt_cli/src/commands/mod.rs +++ b/crates/pgt_cli/src/commands/mod.rs @@ -11,7 +11,7 @@ use pgt_console::Console; use pgt_fs::FileSystem; use pgt_workspace::PartialConfigurationExt; use pgt_workspace::configuration::{LoadedConfiguration, load_configuration}; -use pgt_workspace::workspace::UpdateSettingsParams; +use pgt_workspace::workspace::{RegisterProjectFolderParams, UpdateSettingsParams}; use pgt_workspace::{DynRef, Workspace, WorkspaceError}; use std::ffi::OsString; use std::path::PathBuf; @@ -301,6 +301,10 @@ pub(crate) trait CommandRunner: Sized { let (vcs_base_path, gitignore_matches) = configuration.retrieve_gitignore_matches(fs, vcs_base_path.as_deref())?; let paths = self.get_files_to_process(fs, &configuration)?; + workspace.register_project_folder(RegisterProjectFolderParams { + path: fs.working_directory(), + set_as_current_workspace: true, + })?; workspace.update_settings(UpdateSettingsParams { workspace_directory: fs.working_directory(), diff --git a/crates/pgt_lsp/src/server.rs b/crates/pgt_lsp/src/server.rs index 4c05c0e4..079801fd 100644 --- a/crates/pgt_lsp/src/server.rs +++ b/crates/pgt_lsp/src/server.rs @@ -5,6 +5,7 @@ use crate::utils::{into_lsp_error, panic_to_lsp_error}; use futures::FutureExt; use futures::future::ready; use pgt_fs::{ConfigName, FileSystem, OsFileSystem}; +use pgt_workspace::workspace::{RegisterProjectFolderParams, UnregisterProjectFolderParams}; use pgt_workspace::{DynRef, Workspace, workspace}; use rustc_hash::FxHashMap; use serde_json::json; @@ -107,6 +108,10 @@ impl LanguageServer for LSPServer { self.session.initialize( params.capabilities, + params.client_info.map(|client_info| ClientInformation { + name: client_info.name, + version: client_info.version, + }), params.root_uri, params.workspace_folders, ); @@ -217,6 +222,47 @@ impl LanguageServer for LSPServer { .ok(); } + async fn did_change_workspace_folders(&self, params: DidChangeWorkspaceFoldersParams) { + for removed in ¶ms.event.removed { + if let Ok(project_path) = self.session.file_path(&removed.uri) { + let result = self + .session + .workspace + .unregister_project_folder(UnregisterProjectFolderParams { path: project_path }) + .map_err(into_lsp_error); + + if let Err(err) = result { + error!("Failed to remove project from the workspace: {}", err); + self.session + .client + .log_message(MessageType::ERROR, err) + .await; + } + } + } + + for added in ¶ms.event.added { + if let Ok(project_path) = self.session.file_path(&added.uri) { + let result = self + .session + .workspace + .register_project_folder(RegisterProjectFolderParams { + path: Some(project_path.to_path_buf()), + set_as_current_workspace: true, + }) + .map_err(into_lsp_error); + + if let Err(err) = result { + error!("Failed to add project to the workspace: {}", err); + self.session + .client + .log_message(MessageType::ERROR, err) + .await; + } + } + } + } + #[tracing::instrument(level = "trace", skip_all)] async fn completion(&self, params: CompletionParams) -> LspResult> { match handlers::completions::get_completions(&self.session, params) { diff --git a/crates/pgt_lsp/src/session.rs b/crates/pgt_lsp/src/session.rs index 4ab0abcf..fd5af2da 100644 --- a/crates/pgt_lsp/src/session.rs +++ b/crates/pgt_lsp/src/session.rs @@ -14,7 +14,7 @@ use pgt_workspace::PartialConfigurationExt; use pgt_workspace::Workspace; use pgt_workspace::configuration::{LoadedConfiguration, load_configuration}; use pgt_workspace::features; -use pgt_workspace::workspace::UpdateSettingsParams; +use pgt_workspace::workspace::{RegisterProjectFolderParams, UpdateSettingsParams}; use pgt_workspace::{DynRef, WorkspaceError}; use rustc_hash::FxHashMap; use serde_json::Value; @@ -31,6 +31,14 @@ use tower_lsp::lsp_types::{MessageType, Registration}; use tower_lsp::lsp_types::{Unregistration, WorkspaceFolder}; use tracing::{error, info}; +pub(crate) struct ClientInformation { + /// The name of the client + pub(crate) name: String, + + /// The version of the client + pub(crate) version: Option, +} + /// Key, uniquely identifying a LSP session. #[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] pub(crate) struct SessionKey(pub u64); @@ -68,6 +76,7 @@ pub(crate) struct Session { struct InitializeParams { /// The capabilities provided by the client as part of [`lsp_types::InitializeParams`] client_capabilities: lsp_types::ClientCapabilities, + client_information: Option, root_uri: Option, #[allow(unused)] workspace_folders: Option>, @@ -164,11 +173,13 @@ impl Session { pub(crate) fn initialize( &self, client_capabilities: lsp_types::ClientCapabilities, + client_information: Option, root_uri: Option, workspace_folders: Option>, ) { let result = self.initialize_params.set(InitializeParams { client_capabilities, + client_information, root_uri, workspace_folders, }); @@ -446,6 +457,8 @@ impl Session { info!("Configuration loaded successfully from disk."); info!("Update workspace settings."); + let fs = &self.fs; + if let Some(ws_configuration) = extra_config { fs_configuration.merge_with(ws_configuration); } @@ -455,6 +468,31 @@ impl Session { match result { Ok((vcs_base_path, gitignore_matches)) => { + let register_result = + if let ConfigurationPathHint::FromWorkspace(path) = &base_path { + // We don't need the key + self.workspace + .register_project_folder(RegisterProjectFolderParams { + path: Some(path.clone()), + // This is naive, but we don't know if the user has a file already open or not, so we register every project as the current one. + // The correct one is actually set when the LSP calls `textDocument/didOpen` + set_as_current_workspace: true, + }) + .err() + } else { + self.workspace + .register_project_folder(RegisterProjectFolderParams { + path: fs.working_directory(), + set_as_current_workspace: true, + }) + .err() + }; + if let Some(error) = register_result { + error!("Failed to register the project folder: {}", error); + self.client.log_message(MessageType::ERROR, &error).await; + return ConfigurationStatus::Error; + } + let result = self.workspace.update_settings(UpdateSettingsParams { workspace_directory: self.fs.working_directory(), configuration: fs_configuration, diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index 63e86057..ecbd8eb5 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -269,6 +269,10 @@ impl Workspace for WorkspaceServer { ParsedDocument::new(params.path.clone(), params.content, params.version) }); + if let Some(project_key) = self.path_belongs_to_current_workspace(¶ms.path) { + self.set_current_project(project_key); + } + Ok(()) } From 7ec9c94530adcdef4733e1b693416963231b4258 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 28 May 2025 09:40:23 +0200 Subject: [PATCH 05/30] progress --- crates/pgt_lsp/src/server.rs | 4 +- crates/pgt_lsp/tests/server.rs | 237 +++++++++++++++++++++++++++++++-- 2 files changed, 232 insertions(+), 9 deletions(-) diff --git a/crates/pgt_lsp/src/server.rs b/crates/pgt_lsp/src/server.rs index 079801fd..a05c2a9b 100644 --- a/crates/pgt_lsp/src/server.rs +++ b/crates/pgt_lsp/src/server.rs @@ -1,6 +1,8 @@ use crate::capabilities::server_capabilities; use crate::handlers; -use crate::session::{CapabilitySet, CapabilityStatus, Session, SessionHandle, SessionKey}; +use crate::session::{ + CapabilitySet, CapabilityStatus, ClientInformation, Session, SessionHandle, SessionKey, +}; use crate::utils::{into_lsp_error, panic_to_lsp_error}; use futures::FutureExt; use futures::future::ready; diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 581ea1fe..460c7a37 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -40,6 +40,7 @@ use tower_lsp::lsp_types::Position; use tower_lsp::lsp_types::Range; use tower_lsp::lsp_types::TextDocumentPositionParams; use tower_lsp::lsp_types::WorkDoneProgressParams; +use tower_lsp::lsp_types::WorkspaceFolder; use tower_lsp::lsp_types::{ ClientCapabilities, DidChangeConfigurationParams, DidChangeTextDocumentParams, DidCloseTextDocumentParams, DidOpenTextDocumentParams, InitializeResult, InitializedParams, @@ -164,6 +165,42 @@ impl Server { Ok(()) } + /// It creates two workspaces, one at folder `test_one` and the other in `test_two`. + /// + /// Hence, the two roots will be `/workspace/test_one` and `/workspace/test_two` + #[allow(deprecated)] + async fn initialize_workspaces(&mut self) -> Result<()> { + let _res: InitializeResult = self + .request( + "initialize", + "_init", + InitializeParams { + process_id: None, + root_path: None, + root_uri: Some(url!("/")), + initialization_options: None, + capabilities: ClientCapabilities::default(), + trace: None, + workspace_folders: Some(vec![ + WorkspaceFolder { + name: "test_one".to_string(), + uri: url!("test_one"), + }, + WorkspaceFolder { + name: "test_two".to_string(), + uri: url!("test_two"), + }, + ]), + client_info: None, + locale: None, + }, + ) + .await? + .context("initialize returned None")?; + + Ok(()) + } + /// Basic implementation of the `initialized` notification for tests async fn initialized(&mut self) -> Result<()> { self.notify("initialized", InitializedParams {}).await @@ -204,13 +241,18 @@ impl Server { } /// Opens a document with given contents and given name. The name must contain the extension too - async fn open_named_document(&mut self, text: impl Display, document_name: Url) -> Result<()> { + async fn open_named_document( + &mut self, + text: impl Display, + document_name: Url, + language: impl Display, + ) -> Result<()> { self.notify( "textDocument/didOpen", DidOpenTextDocumentParams { text_document: TextDocumentItem { uri: document_name, - language_id: String::from("sql"), + language_id: language.to_string(), version: 0, text: text.to_string(), }, @@ -230,24 +272,31 @@ impl Server { .await } - async fn change_document( + async fn change_named_document( &mut self, + uri: Url, version: i32, content_changes: Vec, ) -> Result<()> { self.notify( "textDocument/didChange", DidChangeTextDocumentParams { - text_document: VersionedTextDocumentIdentifier { - uri: url!("document.sql"), - version, - }, + text_document: VersionedTextDocumentIdentifier { uri, version }, content_changes, }, ) .await } + async fn change_document( + &mut self, + version: i32, + content_changes: Vec, + ) -> Result<()> { + self.change_named_document(url!("document.sql"), version, content_changes) + .await + } + #[allow(unused)] async fn close_document(&mut self) -> Result<()> { self.notify( @@ -835,7 +884,7 @@ async fn test_execute_statement() -> Result<()> { let doc_url = url!("test.sql"); server - .open_named_document(doc_content.to_string(), doc_url.clone()) + .open_named_document(doc_content.to_string(), doc_url.clone(), "sql") .await?; let code_actions_response = server @@ -1113,3 +1162,175 @@ async fn test_issue_303() -> Result<()> { Ok(()) } + +#[tokio::test] +async fn multiple_projects() -> Result<()> { + let factory = ServerFactory::default(); + let mut fs = MemoryFileSystem::default(); + let test_db = get_new_test_db().await; + + let setup = r#" + create table public.users ( + id serial primary key, + name varchar(255) not null + ); + "#; + + test_db + .execute(setup) + .await + .expect("Failed to setup test database"); + + // Setup configurations + // - test_one with db connection + let mut conf_with_db = PartialConfiguration::init(); + conf_with_db.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + fs.insert( + url!("test_one/postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_with_db).unwrap(), + ); + + // -- test_two without db connection + let mut conf_without_db = PartialConfiguration::init(); + conf_without_db.merge_with(PartialConfiguration { + db: Some(PartialDatabaseConfiguration { + disable_connection: Some(true), + ..Default::default() + }), + ..Default::default() + }); + fs.insert( + url!("test_two/postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_with_db).unwrap(), + ); + + let (service, client) = factory + .create_with_fs(None, DynRef::Owned(Box::new(fs))) + .into_inner(); + + let (stream, sink) = client.split(); + let mut server = Server::new(service); + + let (sender, _) = channel(CHANNEL_BUFFER_SIZE); + let reader = tokio::spawn(client_handler(stream, sink, sender)); + + server.initialize_workspaces().await?; + server.initialized().await?; + + server.load_configuration().await?; + + // do the same change in both workspaces and request completions in both workspaces + + server + .open_named_document( + "alter table appointment alter column end_time drop not null;\n", + url!("test_one/document.sql"), + "sql", + ) + .await?; + + server + .change_named_document( + url!("test_one/document.sql"), + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 24, + }, + end: Position { + line: 0, + character: 24, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + let res_ws_one = server + .get_completion(CompletionParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: url!("test_one/document.sql"), + }, + position: Position { + line: 0, + character: 25, + }, + }, + }) + .await?; + + server + .open_named_document( + "alter table appointment alter column end_time drop not null;\n", + url!("test_two/document.sql"), + "sql", + ) + .await?; + + server + .change_named_document( + url!("test_two/document.sql"), + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 24, + }, + end: Position { + line: 0, + character: 24, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + let res_ws_two = server + .get_completion(CompletionParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: url!("test_two/document.sql"), + }, + position: Position { + line: 0, + character: 25, + }, + }, + }) + .await?; + + // only the first one has a db connection and should return completion items + assert!(res_ws_one.is_some()); + assert!(res_ws_two.is_none()); + + server.shutdown().await?; + reader.abort(); + + Ok(()) +} From b01a2e8b1d4a123be0b96aa2fad9d105e2d15f22 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 28 May 2025 21:32:19 +0200 Subject: [PATCH 06/30] fix test --- crates/pgt_lsp/src/server.rs | 2 ++ crates/pgt_lsp/tests/server.rs | 35 +++++++++++++++++++++------------- 2 files changed, 24 insertions(+), 13 deletions(-) diff --git a/crates/pgt_lsp/src/server.rs b/crates/pgt_lsp/src/server.rs index a05c2a9b..6420c511 100644 --- a/crates/pgt_lsp/src/server.rs +++ b/crates/pgt_lsp/src/server.rs @@ -446,6 +446,8 @@ impl ServerFactory { workspace_method!(builder, close_file); workspace_method!(builder, pull_diagnostics); workspace_method!(builder, get_completions); + workspace_method!(builder, register_project_folder); + workspace_method!(builder, unregister_project_folder); let (service, socket) = builder.finish(); ServerConnection { socket, service } diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 460c7a37..834eadd9 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -1213,7 +1213,7 @@ async fn multiple_projects() -> Result<()> { }); fs.insert( url!("test_two/postgrestools.jsonc").to_file_path().unwrap(), - serde_json::to_string_pretty(&conf_with_db).unwrap(), + serde_json::to_string_pretty(&conf_without_db).unwrap(), ); let (service, client) = factory @@ -1235,7 +1235,7 @@ async fn multiple_projects() -> Result<()> { server .open_named_document( - "alter table appointment alter column end_time drop not null;\n", + "select from public.users;\n", url!("test_one/document.sql"), "sql", ) @@ -1249,11 +1249,11 @@ async fn multiple_projects() -> Result<()> { range: Some(Range { start: Position { line: 0, - character: 24, + character: 7, }, end: Position { line: 0, - character: 24, + character: 7, }, }), range_length: Some(0), @@ -1273,15 +1273,16 @@ async fn multiple_projects() -> Result<()> { }, position: Position { line: 0, - character: 25, + character: 8, }, }, }) - .await?; + .await? + .unwrap(); server .open_named_document( - "alter table appointment alter column end_time drop not null;\n", + "select from public.users;\n", url!("test_two/document.sql"), "sql", ) @@ -1295,11 +1296,11 @@ async fn multiple_projects() -> Result<()> { range: Some(Range { start: Position { line: 0, - character: 24, + character: 7, }, end: Position { line: 0, - character: 24, + character: 7, }, }), range_length: Some(0), @@ -1319,15 +1320,23 @@ async fn multiple_projects() -> Result<()> { }, position: Position { line: 0, - character: 25, + character: 8, }, }, }) - .await?; + .await? + .unwrap(); + println!("{:?}", res_ws_two); // only the first one has a db connection and should return completion items - assert!(res_ws_one.is_some()); - assert!(res_ws_two.is_none()); + assert!(!match res_ws_one { + CompletionResponse::Array(a) => a.is_empty(), + CompletionResponse::List(l) => l.items.is_empty(), + }); + assert!(match res_ws_two { + CompletionResponse::Array(a) => a.is_empty(), + CompletionResponse::List(l) => l.items.is_empty(), + }); server.shutdown().await?; reader.abort(); From 891c58eb58f16f8b7deb7f950f01f5728af40774 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 28 May 2025 21:33:14 +0200 Subject: [PATCH 07/30] fix: lint --- crates/pgt_workspace/src/diagnostics.rs | 2 +- crates/pgt_workspace/src/settings.rs | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/crates/pgt_workspace/src/diagnostics.rs b/crates/pgt_workspace/src/diagnostics.rs index 492fb0de..5020cc62 100644 --- a/crates/pgt_workspace/src/diagnostics.rs +++ b/crates/pgt_workspace/src/diagnostics.rs @@ -358,6 +358,6 @@ impl Diagnostic for FileTooLarge { impl From for WorkspaceError { fn from(value: CantLoadExtendFile) -> Self { - WorkspaceError::Configuration(ConfigurationDiagnostic::CantLoadExtendFile(value).into()) + WorkspaceError::Configuration(ConfigurationDiagnostic::CantLoadExtendFile(value)) } } diff --git a/crates/pgt_workspace/src/settings.rs b/crates/pgt_workspace/src/settings.rs index 8fc0c07a..f7adb8db 100644 --- a/crates/pgt_workspace/src/settings.rs +++ b/crates/pgt_workspace/src/settings.rs @@ -176,7 +176,7 @@ impl<'a> WorkspaceSettingsHandle<'a> { } } -impl<'a> AsRef for WorkspaceSettingsHandle<'a> { +impl AsRef for WorkspaceSettingsHandle<'_> { fn as_ref(&self) -> &WorkspaceSettings { &self.inner } @@ -194,7 +194,7 @@ impl<'a> WorkspaceSettingsHandleMut<'a> { } } -impl<'a> AsMut for WorkspaceSettingsHandleMut<'a> { +impl AsMut for WorkspaceSettingsHandleMut<'_> { fn as_mut(&mut self) -> &mut WorkspaceSettings { &mut self.inner } From 12571e2ff61a692c10fc25f38be02824af241f38 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Wed, 28 May 2025 22:07:07 +0200 Subject: [PATCH 08/30] fix: codegen --- docs/schemas/0.0.0/schema.json | 11 +++++++++++ docs/schemas/latest/schema.json | 11 +++++++++++ .../backend-jsonrpc/src/workspace.ts | 15 +++++++++++++-- 3 files changed, 35 insertions(+), 2 deletions(-) diff --git a/docs/schemas/0.0.0/schema.json b/docs/schemas/0.0.0/schema.json index faba3b5c..8c478d0a 100644 --- a/docs/schemas/0.0.0/schema.json +++ b/docs/schemas/0.0.0/schema.json @@ -22,6 +22,17 @@ } ] }, + "extends": { + "description": "A list of paths to other JSON files, used to extends the current configuration.", + "anyOf": [ + { + "$ref": "#/definitions/StringSet" + }, + { + "type": "null" + } + ] + }, "files": { "description": "The configuration of the filesystem", "anyOf": [ diff --git a/docs/schemas/latest/schema.json b/docs/schemas/latest/schema.json index faba3b5c..8c478d0a 100644 --- a/docs/schemas/latest/schema.json +++ b/docs/schemas/latest/schema.json @@ -22,6 +22,17 @@ } ] }, + "extends": { + "description": "A list of paths to other JSON files, used to extends the current configuration.", + "anyOf": [ + { + "$ref": "#/definitions/StringSet" + }, + { + "type": "null" + } + ] + }, "files": { "description": "The configuration of the filesystem", "anyOf": [ diff --git a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts index a35dad81..679146f1 100644 --- a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts +++ b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts @@ -185,6 +185,7 @@ export interface CompletionsResult { export interface CompletionItem { completion_text?: CompletionText; description: string; + detail?: string; kind: CompletionItemKind; label: string; preselected: boolean; @@ -199,13 +200,19 @@ export interface CompletionItem { label: "users", description: "Schema: auth", completion_text: "auth.users". */ export interface CompletionText { + is_snippet: boolean; /** * A `range` is required because some editors replace the current token, others naively insert the text. Having a range where start == end makes it an insertion. */ range: TextRange; text: string; } -export type CompletionItemKind = "table" | "function" | "column" | "schema"; +export type CompletionItemKind = + | "table" + | "function" + | "column" + | "schema" + | "policy"; export interface UpdateSettingsParams { configuration: PartialConfiguration; gitignore_matches: string[]; @@ -224,6 +231,10 @@ export interface PartialConfiguration { * The configuration of the database connection */ db?: PartialDatabaseConfiguration; + /** + * A list of paths to other JSON files, used to extends the current configuration. + */ + extends?: StringSet; /** * The configuration of the filesystem */ @@ -271,6 +282,7 @@ export interface PartialDatabaseConfiguration { */ username?: string; } +export type StringSet = string[]; /** * The configuration of the filesystem */ @@ -346,7 +358,6 @@ If we can't find the configuration, it will attempt to use the current working d */ useIgnoreFile?: boolean; } -export type StringSet = string[]; export interface Rules { /** * It enables ALL rules. The rules that belong to `nursery` won't be enabled. From 37193ee699ec2af65ef1a5c885d685354a0c2a8a Mon Sep 17 00:00:00 2001 From: psteinroe Date: Thu, 29 May 2025 17:17:37 +0200 Subject: [PATCH 09/30] implement extends --- crates/pgt_lsp/tests/server.rs | 202 +++++++++++++++++++++- crates/pgt_workspace/src/configuration.rs | 84 ++++++--- 2 files changed, 262 insertions(+), 24 deletions(-) diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 834eadd9..80f35642 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -3,6 +3,7 @@ use anyhow::Error; use anyhow::Result; use anyhow::bail; use biome_deserialize::Merge; +use biome_deserialize::StringSet; use futures::Sink; use futures::SinkExt; use futures::Stream; @@ -1326,7 +1327,6 @@ async fn multiple_projects() -> Result<()> { }) .await? .unwrap(); - println!("{:?}", res_ws_two); // only the first one has a db connection and should return completion items assert!(!match res_ws_one { @@ -1343,3 +1343,203 @@ async fn multiple_projects() -> Result<()> { Ok(()) } + +#[tokio::test] +async fn extends_config() -> Result<()> { + let factory = ServerFactory::default(); + let mut fs = MemoryFileSystem::default(); + let test_db = get_new_test_db().await; + + let setup = r#" + create table public.extends_config_test ( + id serial primary key, + name varchar(255) not null + ); + "#; + + test_db + .execute(setup) + .await + .expect("Failed to setup test database"); + + // shared config with default db connection + let conf_with_db = PartialConfiguration::init(); + fs.insert( + url!("postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_with_db).unwrap(), + ); + + // test_one extends the shared config but sets our test db + let mut conf_with_db = PartialConfiguration::init(); + conf_with_db.merge_with(PartialConfiguration { + extends: Some(StringSet::from_iter(["../postgrestools.jsonc".to_string()])), + db: Some(PartialDatabaseConfiguration { + database: Some( + test_db + .connect_options() + .get_database() + .unwrap() + .to_string(), + ), + ..Default::default() + }), + ..Default::default() + }); + fs.insert( + url!("test_one/postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_with_db).unwrap(), + ); + + // test_two extends it but keeps the default one + let mut conf_without_db = PartialConfiguration::init(); + conf_without_db.merge_with(PartialConfiguration { + extends: Some(StringSet::from_iter(["../postgrestools.jsonc".to_string()])), + ..Default::default() + }); + fs.insert( + url!("test_two/postgrestools.jsonc").to_file_path().unwrap(), + serde_json::to_string_pretty(&conf_without_db).unwrap(), + ); + + let (service, client) = factory + .create_with_fs(None, DynRef::Owned(Box::new(fs))) + .into_inner(); + + let (stream, sink) = client.split(); + let mut server = Server::new(service); + + let (sender, _) = channel(CHANNEL_BUFFER_SIZE); + let reader = tokio::spawn(client_handler(stream, sink, sender)); + + server.initialize_workspaces().await?; + server.initialized().await?; + + server.load_configuration().await?; + + server + .open_named_document( + "select from public.extends_config_test;\n", + url!("test_one/document.sql"), + "sql", + ) + .await?; + + server + .change_named_document( + url!("test_one/document.sql"), + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 7, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + let res_ws_one = server + .get_completion(CompletionParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: url!("test_one/document.sql"), + }, + position: Position { + line: 0, + character: 8, + }, + }, + }) + .await? + .unwrap(); + + server + .open_named_document( + "select from public.users;\n", + url!("test_two/document.sql"), + "sql", + ) + .await?; + + server + .change_named_document( + url!("test_two/document.sql"), + 3, + vec![TextDocumentContentChangeEvent { + range: Some(Range { + start: Position { + line: 0, + character: 7, + }, + end: Position { + line: 0, + character: 7, + }, + }), + range_length: Some(0), + text: " ".to_string(), + }], + ) + .await?; + + let res_ws_two = server + .get_completion(CompletionParams { + work_done_progress_params: WorkDoneProgressParams::default(), + partial_result_params: PartialResultParams::default(), + context: None, + text_document_position: TextDocumentPositionParams { + text_document: TextDocumentIdentifier { + uri: url!("test_two/document.sql"), + }, + position: Position { + line: 0, + character: 8, + }, + }, + }) + .await? + .unwrap(); + + let items_one = match res_ws_one { + CompletionResponse::Array(ref a) => a, + CompletionResponse::List(ref l) => &l.items, + }; + + // test one should have our test db connection and should return the completion items for the `extends_config_test` table + assert!(items_one.iter().any(|item| { + item.label_details.clone().is_some_and(|details| { + details + .description + .is_some_and(|desc| desc.contains("public.extends_config_test")) + }) + })); + + let items_two = match res_ws_two { + CompletionResponse::Array(ref a) => a, + CompletionResponse::List(ref l) => &l.items, + }; + + // test two should not have a db connection and should not return the completion items for the `extends_config_test` table + assert!(!items_two.iter().any(|item| { + item.label_details.clone().is_some_and(|details| { + details + .description + .is_some_and(|desc| desc.contains("public.extends_config_test")) + }) + })); + + server.shutdown().await?; + reader.abort(); + + Ok(()) +} diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index 55f2b5f8..dcf0e8e9 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -31,34 +31,41 @@ pub struct LoadedConfiguration { } impl LoadedConfiguration { - /// Return the path of the **directory** where the configuration is - pub fn directory_path(&self) -> Option<&Path> { - self.directory_path.as_deref() - } - - /// Return the path of the **file** where the configuration is - pub fn file_path(&self) -> Option<&Path> { - self.file_path.as_deref() - } -} - -impl From> for LoadedConfiguration { - fn from(value: Option) -> Self { + fn try_from_payload( + value: Option, + fs: &DynRef<'_, dyn FileSystem>, + ) -> Result { let Some(value) = value else { - return LoadedConfiguration::default(); + return Ok(LoadedConfiguration::default()); }; let ConfigurationPayload { + external_resolution_base_path, configuration_file_path, - deserialized: partial_configuration, - .. + deserialized: mut partial_configuration, } = value; - LoadedConfiguration { + partial_configuration.apply_extends( + fs, + &configuration_file_path, + &external_resolution_base_path, + )?; + + Ok(Self { configuration: partial_configuration, directory_path: configuration_file_path.parent().map(PathBuf::from), file_path: Some(configuration_file_path), - } + }) + } + + /// Return the path of the **directory** where the configuration is + pub fn directory_path(&self) -> Option<&Path> { + self.directory_path.as_deref() + } + + /// Return the path of the **file** where the configuration is + pub fn file_path(&self) -> Option<&Path> { + self.file_path.as_deref() } } @@ -68,7 +75,7 @@ pub fn load_configuration( config_path: ConfigurationPathHint, ) -> Result { let config = load_config(fs, config_path)?; - Ok(LoadedConfiguration::from(config)) + LoadedConfiguration::try_from_payload(config, fs) } /// - [Result]: if an error occurred while loading the configuration file. @@ -123,7 +130,7 @@ fn load_config( ConfigurationPathHint::None => file_system.working_directory().unwrap_or_default(), }; - // We first search for `postgrestools.jsonc` + // We first search for `postgrestools.jsonc` files if let Some(auto_search_result) = file_system.auto_search( &configuration_directory, ConfigName::file_names().as_slice(), @@ -344,9 +351,10 @@ impl PartialConfigurationExt for PartialConfiguration { extend_entry_as_path .extension() .map(OsStr::as_encoded_bytes), - Some(b"json" | b"jsonc") + Some(b"jsonc") ) { - relative_resolution_base_path.join(extend_entry) + // Normalize the path to handle relative segments like "../" + normalize_path(&relative_resolution_base_path.join(extend_entry)) } else { fs.resolve_configuration(extend_entry.as_str(), external_resolution_base_path) .map_err(|error| { @@ -369,7 +377,7 @@ impl PartialConfigurationExt for PartialConfiguration { err.to_string(), ) .with_verbose_advice(markup! { - "Biome tried to load the configuration file \""{ + "Postgres Tools tried to load the configuration file \""{ extend_configuration_file_path.display().to_string() }"\" in \"extends\" using \""{ external_resolution_base_path.display().to_string() @@ -438,6 +446,36 @@ impl PartialConfigurationExt for PartialConfiguration { } } +/// Normalizes a path, resolving '..' and '.' segments without requiring the path to exist +fn normalize_path(path: &Path) -> PathBuf { + let mut components = Vec::new(); + for component in path.components() { + match component { + std::path::Component::ParentDir => { + if !components.is_empty() { + components.pop(); + } + } + std::path::Component::Normal(c) => components.push(c), + std::path::Component::CurDir => {} + c @ std::path::Component::RootDir | c @ std::path::Component::Prefix(_) => { + components.clear(); + components.push(c.as_os_str()); + } + } + } + + if components.is_empty() { + PathBuf::from("/") + } else { + let mut result = PathBuf::new(); + for component in components { + result.push(component); + } + result + } +} + #[cfg(test)] mod tests { use super::*; From 688e5d2e43f8f9e1ae388991e1eb224091ade849 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Thu, 29 May 2025 17:26:20 +0200 Subject: [PATCH 10/30] fix: test --- crates/pgt_cli/src/diagnostics.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/pgt_cli/src/diagnostics.rs b/crates/pgt_cli/src/diagnostics.rs index d24d02e9..20d32113 100644 --- a/crates/pgt_cli/src/diagnostics.rs +++ b/crates/pgt_cli/src/diagnostics.rs @@ -455,7 +455,7 @@ mod test { fn termination_diagnostic_size() { assert_eq!( std::mem::size_of::(), - 80, + 96, "you successfully decreased the size of the diagnostic!" ) } From 8107e5af54b680fd979112540e166fda7d93d4be Mon Sep 17 00:00:00 2001 From: psteinroe Date: Thu, 29 May 2025 17:52:12 +0200 Subject: [PATCH 11/30] add new method to js bindings --- crates/pgt_workspace/src/workspace.rs | 13 +++++++++++++ crates/pgt_workspace/src/workspace_types.rs | 3 ++- .../backend-jsonrpc/src/workspace.ts | 17 ++++++++++++++--- .../backend-jsonrpc/tests/workspace.test.mjs | 5 ++++- 4 files changed, 33 insertions(+), 5 deletions(-) diff --git a/crates/pgt_workspace/src/workspace.rs b/crates/pgt_workspace/src/workspace.rs index c08c9d27..61d60a49 100644 --- a/crates/pgt_workspace/src/workspace.rs +++ b/crates/pgt_workspace/src/workspace.rs @@ -5,6 +5,8 @@ use pgt_analyse::RuleCategories; use pgt_configuration::{PartialConfiguration, RuleSelector}; use pgt_fs::PgTPath; use pgt_text_size::TextRange; +#[cfg(feature = "schema")] +use schemars::{JsonSchema, SchemaGenerator, schema::Schema}; use serde::{Deserialize, Serialize}; use slotmap::{DenseSlotMap, new_key_type}; @@ -255,6 +257,17 @@ new_key_type! { pub struct ProjectKey; } +#[cfg(feature = "schema")] +impl JsonSchema for ProjectKey { + fn schema_name() -> String { + "ProjectKey".to_string() + } + + fn json_schema(generator: &mut SchemaGenerator) -> Schema { + ::json_schema(generator) + } +} + #[derive(Debug, Default)] pub struct WorkspaceData { /// [DenseSlotMap] is the slowest type in insertion/removal, but the fastest in iteration diff --git a/crates/pgt_workspace/src/workspace_types.rs b/crates/pgt_workspace/src/workspace_types.rs index 02215e79..b902fad6 100644 --- a/crates/pgt_workspace/src/workspace_types.rs +++ b/crates/pgt_workspace/src/workspace_types.rs @@ -457,9 +457,10 @@ macro_rules! workspace_method { } /// Returns a list of signature for all the methods in the [Workspace] trait -pub fn methods() -> [WorkspaceMethod; 8] { +pub fn methods() -> [WorkspaceMethod; 9] { [ workspace_method!(is_path_ignored), + workspace_method!(register_project_folder), workspace_method!(get_file_content), workspace_method!(pull_diagnostics), workspace_method!(get_completions), diff --git a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts index 679146f1..aaa5a42a 100644 --- a/packages/@postgrestools/backend-jsonrpc/src/workspace.ts +++ b/packages/@postgrestools/backend-jsonrpc/src/workspace.ts @@ -19,6 +19,11 @@ export type FileKind = FileKind2[]; * The priority of the file */ export type FileKind2 = "Config" | "Ignore" | "Inspectable" | "Handleable"; +export interface RegisterProjectFolderParams { + path?: string; + setAsCurrentWorkspace: boolean; +} +export type ProjectKey = string; export interface GetFileContentParams { path: PgTPath; } @@ -92,7 +97,7 @@ export type DiagnosticTags = DiagnosticTag[]; /** * Serializable representation of a [Diagnostic](super::Diagnostic) advice -See the [Visitor] trait for additional documentation on all the supported advice types. +See the [Visitor] trait for additional documentation on all the supported advice types. */ export type Advice = | { log: [LogCategory, MarkupBuf] } @@ -197,7 +202,7 @@ export interface CompletionItem { /** * The text that the editor should fill in. If `None`, the `label` should be used. Tables, for example, might have different completion_texts: -label: "users", description: "Schema: auth", completion_text: "auth.users". +label: "users", description: "Schema: auth", completion_text: "auth.users". */ export interface CompletionText { is_snippet: boolean; @@ -350,7 +355,7 @@ export interface PartialVcsConfiguration { /** * The folder where we should check for VCS files. By default, we will use the same folder where `postgrestools.jsonc` was found. -If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted +If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted */ root?: string; /** @@ -436,6 +441,9 @@ export interface CloseFileParams { export type Configuration = PartialConfiguration; export interface Workspace { isPathIgnored(params: IsPathIgnoredParams): Promise; + registerProjectFolder( + params: RegisterProjectFolderParams, + ): Promise; getFileContent(params: GetFileContentParams): Promise; pullDiagnostics( params: PullDiagnosticsParams, @@ -452,6 +460,9 @@ export function createWorkspace(transport: Transport): Workspace { isPathIgnored(params) { return transport.request("pgt/is_path_ignored", params); }, + registerProjectFolder(params) { + return transport.request("pgt/register_project_folder", params); + }, getFileContent(params) { return transport.request("pgt/get_file_content", params); }, diff --git a/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs b/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs index c83d5e44..c35904c4 100644 --- a/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs +++ b/packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs @@ -2,7 +2,7 @@ import { resolve } from "node:path"; import { fileURLToPath } from "node:url"; import { describe, expect, it } from "vitest"; -import { createWorkspaceWithBinary } from "../dist"; +import { createWorkspaceWithBinary } from "../src"; describe("Workspace API", () => { it("should process remote requests", async () => { @@ -14,6 +14,9 @@ describe("Workspace API", () => { ); const workspace = await createWorkspaceWithBinary(command); + workspace.registerProjectFolder({ + setAsCurrentWorkspace: true, + }); await workspace.openFile({ path: { path: "test.sql", From 1b884f2c1539b783a3f9594dff5fd89be2d7274b Mon Sep 17 00:00:00 2001 From: psteinroe Date: Thu, 29 May 2025 21:01:32 +0200 Subject: [PATCH 12/30] progress --- crates/pgt_lsp/tests/server.rs | 14 ++++++++------ crates/pgt_workspace/src/workspace/server.rs | 8 ++++---- justfile | 6 ++++++ 3 files changed, 18 insertions(+), 10 deletions(-) diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 80f35642..b4106cda 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -1,23 +1,23 @@ +use anyhow::bail; use anyhow::Context; use anyhow::Error; use anyhow::Result; -use anyhow::bail; use biome_deserialize::Merge; use biome_deserialize::StringSet; +use futures::channel::mpsc::{channel, Sender}; use futures::Sink; use futures::SinkExt; use futures::Stream; use futures::StreamExt; -use futures::channel::mpsc::{Sender, channel}; -use pgt_configuration::PartialConfiguration; use pgt_configuration::database::PartialDatabaseConfiguration; +use pgt_configuration::PartialConfiguration; use pgt_fs::MemoryFileSystem; use pgt_lsp::LSPServer; use pgt_lsp::ServerFactory; use pgt_test_utils::test_database::get_new_test_db; use pgt_workspace::DynRef; -use serde::Serialize; use serde::de::DeserializeOwned; +use serde::Serialize; use serde_json::Value; use serde_json::{from_value, to_value}; use sqlx::Executor; @@ -26,7 +26,6 @@ use std::fmt::Display; use std::time::Duration; use tower::timeout::Timeout; use tower::{Service, ServiceExt}; -use tower_lsp::LspService; use tower_lsp::jsonrpc; use tower_lsp::jsonrpc::Response; use tower_lsp::lsp_types as lsp; @@ -48,6 +47,7 @@ use tower_lsp::lsp_types::{ PublishDiagnosticsParams, TextDocumentContentChangeEvent, TextDocumentIdentifier, TextDocumentItem, Url, VersionedTextDocumentIdentifier, }; +use tower_lsp::LspService; use tower_lsp::{jsonrpc::Request, lsp_types::InitializeParams}; /// Statically build an [Url] instance that points to the file at `$path` @@ -1393,7 +1393,9 @@ async fn extends_config() -> Result<()> { // test_two extends it but keeps the default one let mut conf_without_db = PartialConfiguration::init(); conf_without_db.merge_with(PartialConfiguration { - extends: Some(StringSet::from_iter(["../postgrestools.jsonc".to_string()])), + extends: Some(StringSet::from_iter([Path::new("..") + .join("postgrestools.jsonc") + .to_string()])), ..Default::default() }); fs.insert( diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index ecbd8eb5..2cb0ac5c 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -10,7 +10,7 @@ use async_helper::run_async; use connection_manager::ConnectionManager; use dashmap::DashMap; use document::Document; -use futures::{StreamExt, stream}; +use futures::{stream, StreamExt}; use parsed_document::{ AsyncDiagnosticsMapper, CursorPositionFilter, DefaultMapper, ExecuteStatementMapper, ParsedDocument, SyncDiagnosticsMapper, @@ -18,7 +18,7 @@ use parsed_document::{ use pgt_analyse::{AnalyserOptions, AnalysisFilter}; use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; use pgt_diagnostics::{ - Diagnostic, DiagnosticExt, Error, Severity, serde::Diagnostic as SDiagnostic, + serde::Diagnostic as SDiagnostic, Diagnostic, DiagnosticExt, Error, Severity, }; use pgt_fs::{ConfigName, PgTPath}; use pgt_typecheck::{IdentifierType, TypecheckParams, TypedIdentifier}; @@ -27,17 +27,17 @@ use sqlx::{Executor, PgPool}; use tracing::{debug, info}; use crate::{ - WorkspaceError, configuration::to_analyser_rules, features::{ code_actions::{ self, CodeAction, CodeActionKind, CodeActionsResult, CommandAction, CommandActionCategory, ExecuteStatementParams, ExecuteStatementResult, }, - completions::{CompletionsResult, GetCompletionsParams, get_statement_for_completions}, + completions::{get_statement_for_completions, CompletionsResult, GetCompletionsParams}, diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, }, settings::{WorkspaceSettings, WorkspaceSettingsHandle, WorkspaceSettingsHandleMut}, + WorkspaceError, }; use super::{ diff --git a/justfile b/justfile index bf982739..493caa7f 100644 --- a/justfile +++ b/justfile @@ -6,6 +6,7 @@ alias r := ready alias l := lint alias t := test alias rg := reset-git +alias qm := quick-modify # Installs the tools needed to develop install-tools: @@ -139,6 +140,11 @@ quick-create branch commit: git push gh pr create --fill +quick-modify: + git add -A + git commit -m "progress" + git push + # Make sure to set your PGT_LOG_PATH in your shell profile. # You can use the PGT_LOG_LEVEL to set your log level. # We recommend to install `bunyan` (npm i -g bunyan) and pipe the output through there for color-coding: From 856dc29ab299e6c93df80e1c2b4e6a29801c1cdc Mon Sep 17 00:00:00 2001 From: psteinroe Date: Thu, 29 May 2025 21:13:49 +0200 Subject: [PATCH 13/30] progress --- crates/pgt_lsp/tests/server.rs | 12 +++++++----- crates/pgt_workspace/src/workspace/server.rs | 8 ++++---- justfile | 1 + 3 files changed, 12 insertions(+), 9 deletions(-) diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index b4106cda..2922e5e4 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -1,31 +1,33 @@ -use anyhow::bail; use anyhow::Context; use anyhow::Error; use anyhow::Result; +use anyhow::bail; use biome_deserialize::Merge; use biome_deserialize::StringSet; -use futures::channel::mpsc::{channel, Sender}; use futures::Sink; use futures::SinkExt; use futures::Stream; use futures::StreamExt; -use pgt_configuration::database::PartialDatabaseConfiguration; +use futures::channel::mpsc::{Sender, channel}; use pgt_configuration::PartialConfiguration; +use pgt_configuration::database::PartialDatabaseConfiguration; use pgt_fs::MemoryFileSystem; use pgt_lsp::LSPServer; use pgt_lsp::ServerFactory; use pgt_test_utils::test_database::get_new_test_db; use pgt_workspace::DynRef; -use serde::de::DeserializeOwned; use serde::Serialize; +use serde::de::DeserializeOwned; use serde_json::Value; use serde_json::{from_value, to_value}; use sqlx::Executor; use std::any::type_name; use std::fmt::Display; +use std::path::Path; use std::time::Duration; use tower::timeout::Timeout; use tower::{Service, ServiceExt}; +use tower_lsp::LspService; use tower_lsp::jsonrpc; use tower_lsp::jsonrpc::Response; use tower_lsp::lsp_types as lsp; @@ -47,7 +49,6 @@ use tower_lsp::lsp_types::{ PublishDiagnosticsParams, TextDocumentContentChangeEvent, TextDocumentIdentifier, TextDocumentItem, Url, VersionedTextDocumentIdentifier, }; -use tower_lsp::LspService; use tower_lsp::{jsonrpc::Request, lsp_types::InitializeParams}; /// Statically build an [Url] instance that points to the file at `$path` @@ -1395,6 +1396,7 @@ async fn extends_config() -> Result<()> { conf_without_db.merge_with(PartialConfiguration { extends: Some(StringSet::from_iter([Path::new("..") .join("postgrestools.jsonc") + .to_string_lossy() .to_string()])), ..Default::default() }); diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index 2cb0ac5c..ecbd8eb5 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -10,7 +10,7 @@ use async_helper::run_async; use connection_manager::ConnectionManager; use dashmap::DashMap; use document::Document; -use futures::{stream, StreamExt}; +use futures::{StreamExt, stream}; use parsed_document::{ AsyncDiagnosticsMapper, CursorPositionFilter, DefaultMapper, ExecuteStatementMapper, ParsedDocument, SyncDiagnosticsMapper, @@ -18,7 +18,7 @@ use parsed_document::{ use pgt_analyse::{AnalyserOptions, AnalysisFilter}; use pgt_analyser::{Analyser, AnalyserConfig, AnalyserContext}; use pgt_diagnostics::{ - serde::Diagnostic as SDiagnostic, Diagnostic, DiagnosticExt, Error, Severity, + Diagnostic, DiagnosticExt, Error, Severity, serde::Diagnostic as SDiagnostic, }; use pgt_fs::{ConfigName, PgTPath}; use pgt_typecheck::{IdentifierType, TypecheckParams, TypedIdentifier}; @@ -27,17 +27,17 @@ use sqlx::{Executor, PgPool}; use tracing::{debug, info}; use crate::{ + WorkspaceError, configuration::to_analyser_rules, features::{ code_actions::{ self, CodeAction, CodeActionKind, CodeActionsResult, CommandAction, CommandActionCategory, ExecuteStatementParams, ExecuteStatementResult, }, - completions::{get_statement_for_completions, CompletionsResult, GetCompletionsParams}, + completions::{CompletionsResult, GetCompletionsParams, get_statement_for_completions}, diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult}, }, settings::{WorkspaceSettings, WorkspaceSettingsHandle, WorkspaceSettingsHandleMut}, - WorkspaceError, }; use super::{ diff --git a/justfile b/justfile index 493caa7f..c868a122 100644 --- a/justfile +++ b/justfile @@ -141,6 +141,7 @@ quick-create branch commit: gh pr create --fill quick-modify: + just format git add -A git commit -m "progress" git push From 60feb9614bb7a229c5875b616aa82739fa2b2c77 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Thu, 29 May 2025 22:49:09 +0200 Subject: [PATCH 14/30] progress --- crates/pgt_workspace/src/configuration.rs | 26 ++++++++++++++++++++--- 1 file changed, 23 insertions(+), 3 deletions(-) diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index dcf0e8e9..ec01b2b4 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -449,16 +449,27 @@ impl PartialConfigurationExt for PartialConfiguration { /// Normalizes a path, resolving '..' and '.' segments without requiring the path to exist fn normalize_path(path: &Path) -> PathBuf { let mut components = Vec::new(); + let mut has_root_or_prefix = false; + for component in path.components() { match component { std::path::Component::ParentDir => { - if !components.is_empty() { + if !components.is_empty() + && !matches!(components.last(), Some(c) if matches!(Path::new(c).components().next(), + Some(std::path::Component::Prefix(_)))) + { components.pop(); } } std::path::Component::Normal(c) => components.push(c), std::path::Component::CurDir => {} - c @ std::path::Component::RootDir | c @ std::path::Component::Prefix(_) => { + c @ std::path::Component::RootDir => { + has_root_or_prefix = true; + components.clear(); + components.push(c.as_os_str()); + } + c @ std::path::Component::Prefix(_) => { + has_root_or_prefix = true; components.clear(); components.push(c.as_os_str()); } @@ -466,7 +477,16 @@ fn normalize_path(path: &Path) -> PathBuf { } if components.is_empty() { - PathBuf::from("/") + if has_root_or_prefix { + // On Windows, this would be something like "C:\" or "\" + path.ancestors() + .last() + .unwrap_or(Path::new("")) + .to_path_buf() + } else { + // Return current directory as a relative path + PathBuf::from(".") + } } else { let mut result = PathBuf::new(); for component in components { From 24b073ae1155e55976ed65ace63ec165513df99b Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 07:45:28 +0200 Subject: [PATCH 15/30] progress --- crates/pgt_lsp/tests/server.rs | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 2922e5e4..a8fecf5c 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -1373,7 +1373,10 @@ async fn extends_config() -> Result<()> { // test_one extends the shared config but sets our test db let mut conf_with_db = PartialConfiguration::init(); conf_with_db.merge_with(PartialConfiguration { - extends: Some(StringSet::from_iter(["../postgrestools.jsonc".to_string()])), + extends: Some(StringSet::from_iter([Path::new("..") + .join("postgrestools.jsonc") + .to_string_lossy() + .to_string()])), db: Some(PartialDatabaseConfiguration { database: Some( test_db From 94cad0113136d1817b29d40121cd9925897318f6 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 08:21:31 +0200 Subject: [PATCH 16/30] progress --- crates/pgt_lsp/tests/server.rs | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index a8fecf5c..650a53f1 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -25,6 +25,7 @@ use std::any::type_name; use std::fmt::Display; use std::path::Path; use std::time::Duration; +use test_log::test; use tower::timeout::Timeout; use tower::{Service, ServiceExt}; use tower_lsp::LspService; @@ -1345,7 +1346,8 @@ async fn multiple_projects() -> Result<()> { Ok(()) } -#[tokio::test] +// #[tokio::test] +#[test(tokio::test)] async fn extends_config() -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); From 9fe33c304c748724fa5585b91ddcd67b06dad495 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 08:42:08 +0200 Subject: [PATCH 17/30] progress --- crates/pgt_lsp/tests/server.rs | 22 ++++++++++++++-------- crates/pgt_workspace/src/configuration.rs | 6 ++++++ 2 files changed, 20 insertions(+), 8 deletions(-) diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 650a53f1..981aec21 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -1365,6 +1365,11 @@ async fn extends_config() -> Result<()> { .await .expect("Failed to setup test database"); + tracing::info!( + "writing to {:?}", + url!("postgrestools.jsonc").to_file_path().unwrap(), + ); + // shared config with default db connection let conf_with_db = PartialConfiguration::init(); fs.insert( @@ -1372,13 +1377,16 @@ async fn extends_config() -> Result<()> { serde_json::to_string_pretty(&conf_with_db).unwrap(), ); + let relative_path = if cfg!(windows) { + "..\\postgrestools.jsonc" + } else { + "../postgrestools.jsonc" + }; + // test_one extends the shared config but sets our test db let mut conf_with_db = PartialConfiguration::init(); conf_with_db.merge_with(PartialConfiguration { - extends: Some(StringSet::from_iter([Path::new("..") - .join("postgrestools.jsonc") - .to_string_lossy() - .to_string()])), + extends: Some(StringSet::from_iter([relative_path.to_string()])), db: Some(PartialDatabaseConfiguration { database: Some( test_db @@ -1391,6 +1399,7 @@ async fn extends_config() -> Result<()> { }), ..Default::default() }); + fs.insert( url!("test_one/postgrestools.jsonc").to_file_path().unwrap(), serde_json::to_string_pretty(&conf_with_db).unwrap(), @@ -1399,10 +1408,7 @@ async fn extends_config() -> Result<()> { // test_two extends it but keeps the default one let mut conf_without_db = PartialConfiguration::init(); conf_without_db.merge_with(PartialConfiguration { - extends: Some(StringSet::from_iter([Path::new("..") - .join("postgrestools.jsonc") - .to_string_lossy() - .to_string()])), + extends: Some(StringSet::from_iter([relative_path.to_string()])), ..Default::default() }); fs.insert( diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index ec01b2b4..985d9fff 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -130,6 +130,12 @@ fn load_config( ConfigurationPathHint::None => file_system.working_directory().unwrap_or_default(), }; + // REMOVE + tracing::info!( + "Searching for configuration files in {}", + configuration_directory.display() + ); + // We first search for `postgrestools.jsonc` files if let Some(auto_search_result) = file_system.auto_search( &configuration_directory, From efe479e6121fe38860f7fca4ef8c189a9b235415 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 09:05:14 +0200 Subject: [PATCH 18/30] progress --- crates/pgt_fs/src/fs/memory.rs | 1 + crates/pgt_workspace/src/configuration.rs | 3 +++ 2 files changed, 4 insertions(+) diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index 59e9cbcb..296f9600 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -162,6 +162,7 @@ impl FileSystem for MemoryFileSystem { } } else { let files = self.files.0.read(); + tracing::info!("files: {:?}", files); let entry = files.get(path).ok_or_else(|| { io::Error::new( io::ErrorKind::NotFound, diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index 985d9fff..d36e6e0a 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -103,6 +103,9 @@ fn load_config( .map_or(PathBuf::new(), |working_directory| working_directory), }; + // REMOVE + tracing::info!("Searching for configuration files in {:?}", base_path); + // If the configuration path hint is from user and is a file path, // we'll load it directly if let ConfigurationPathHint::FromUser(ref config_file_path) = base_path { From a512da6830f82638ce75eb6c83c90c8d3d9feee7 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 09:20:50 +0200 Subject: [PATCH 19/30] progress --- crates/pgt_fs/src/fs/memory.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index 296f9600..856f0171 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -162,7 +162,7 @@ impl FileSystem for MemoryFileSystem { } } else { let files = self.files.0.read(); - tracing::info!("files: {:?}", files); + tracing::info!("files: {:?}", files.keys()); let entry = files.get(path).ok_or_else(|| { io::Error::new( io::ErrorKind::NotFound, From a27979c5300102d629865deca263ac0386a03d96 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 10:39:26 +0200 Subject: [PATCH 20/30] progress --- crates/pgt_fs/src/fs.rs | 11 +++++++++++ 1 file changed, 11 insertions(+) diff --git a/crates/pgt_fs/src/fs.rs b/crates/pgt_fs/src/fs.rs index 2bfd2e51..556c8ad2 100644 --- a/crates/pgt_fs/src/fs.rs +++ b/crates/pgt_fs/src/fs.rs @@ -86,9 +86,20 @@ pub trait FileSystem: Send + Sync + RefUnwindSafe { loop { let mut errors: Vec = vec![]; + tracing::info!( + "Searching for files {:?} in directory {:?}", + file_names, + curret_search_dir.display() + ); + // Iterate all possible file names for file_name in file_names { let file_path = curret_search_dir.join(file_name); + println!( + "Checking for file: {:?} in directory: {:?}", + file_path, + curret_search_dir.display() + ); match self.read_file_from_path(&file_path) { Ok(content) => { if is_searching_in_parent_dir { From 927e97b98977af69aa8bb3d7c2411394fc21d847 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 11:00:55 +0200 Subject: [PATCH 21/30] progress --- crates/pgt_fs/src/fs/memory.rs | 2 +- crates/pgt_workspace/src/configuration.rs | 1 + 2 files changed, 2 insertions(+), 1 deletion(-) diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index 856f0171..1caea7fc 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -162,7 +162,7 @@ impl FileSystem for MemoryFileSystem { } } else { let files = self.files.0.read(); - tracing::info!("files: {:?}", files.keys()); + tracing::info!("files: {:?} path {:?}", files.keys(), path); let entry = files.get(path).ok_or_else(|| { io::Error::new( io::ErrorKind::NotFound, diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index d36e6e0a..cc8bd518 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -157,6 +157,7 @@ fn load_config( external_resolution_base_path, })) } else { + tracing::info!("auto search returned none"); Ok(None) } } From 7ca134a23faceea89d1b28e7ca6fbecbf5732d85 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 11:49:28 +0200 Subject: [PATCH 22/30] progress --- crates/pgt_fs/src/fs.rs | 1 + crates/pgt_fs/src/fs/memory.rs | 2 ++ crates/pgt_workspace/src/configuration.rs | 4 ++++ 3 files changed, 7 insertions(+) diff --git a/crates/pgt_fs/src/fs.rs b/crates/pgt_fs/src/fs.rs index 556c8ad2..d016b2fa 100644 --- a/crates/pgt_fs/src/fs.rs +++ b/crates/pgt_fs/src/fs.rs @@ -147,6 +147,7 @@ pub trait FileSystem: Send + Sync + RefUnwindSafe { /// - If the file cannot be opened, possibly due to incorrect path or permission issues. /// - If the file is opened but its content cannot be read, potentially due to the file being damaged. fn read_file_from_path(&self, file_path: &PathBuf) -> Result { + tracing::info!("Reading file from path: {:?}", file_path); match self.open_with_options(file_path, OpenOptions::default().read(true)) { Ok(mut file) => { let mut content = String::new(); diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index 1caea7fc..c322beed 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -134,6 +134,8 @@ impl FileSystem for MemoryFileSystem { )); } + tracing::info!("open_with_options: path {:?}, options: {:?}", path, options); + let mut inner = if options.create || options.create_new { // Acquire write access to the files map if the file may need to be created let mut files = self.files.0.write(); diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index cc8bd518..5af3033a 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -110,6 +110,10 @@ fn load_config( // we'll load it directly if let ConfigurationPathHint::FromUser(ref config_file_path) = base_path { if file_system.path_is_file(config_file_path) { + tracing::info!( + "Loading configuration file from user path: {}", + config_file_path.display() + ); let content = strip_jsonc_comments(&file_system.read_file_from_path(config_file_path)?); let deserialized = serde_json::from_str::(&content) From d9e317b1cb36f27b1fce0b57eac5e9cafe4d431e Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 23:27:26 +0200 Subject: [PATCH 23/30] progress --- crates/pgt_fs/src/fs/memory.rs | 3 +++ crates/pgt_workspace/src/configuration.rs | 10 ++++++++++ 2 files changed, 13 insertions(+) diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index c322beed..93fbcc8a 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -175,6 +175,9 @@ impl FileSystem for MemoryFileSystem { entry.lock_arc() }; + // REMOVE + tracing::info!("inner"); + if options.truncate { // Clear the buffer if the file was open with `truncate` inner.clear(); diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index 5af3033a..91ec9894 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -35,6 +35,8 @@ impl LoadedConfiguration { value: Option, fs: &DynRef<'_, dyn FileSystem>, ) -> Result { + // REMOVE + tracing::info!("try from payload"); let Some(value) = value else { return Ok(LoadedConfiguration::default()); }; @@ -150,6 +152,8 @@ fn load_config( should_error, )? { let AutoSearchResult { content, file_path } = auto_search_result; + // REMOVE + tracing::info!("Found configuration file: {}", file_path.display()); let deserialized = serde_json::from_str::(&strip_jsonc_comments(&content)) @@ -380,6 +384,12 @@ impl PartialConfigurationExt for PartialConfiguration { .into_path_buf() }; + // REMOVE + tracing::info!( + "Resolving extend configuration file: {}", + extend_configuration_file_path.display() + ); + let mut file = fs .open_with_options( extend_configuration_file_path.as_path(), From c642cc0b0cc9f35b53af6dea7052702e0e02eaaa Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 23:42:30 +0200 Subject: [PATCH 24/30] progress --- crates/pgt_workspace/src/configuration.rs | 101 ++++++++++++++++------ 1 file changed, 73 insertions(+), 28 deletions(-) diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index 91ec9894..076fd971 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -473,57 +473,102 @@ impl PartialConfigurationExt for PartialConfiguration { /// Normalizes a path, resolving '..' and '.' segments without requiring the path to exist fn normalize_path(path: &Path) -> PathBuf { let mut components = Vec::new(); - let mut has_root_or_prefix = false; + let mut prefix_component = None; + let mut is_absolute = false; for component in path.components() { match component { + std::path::Component::Prefix(prefix) => { + prefix_component = Some(component); + components.clear(); + } + std::path::Component::RootDir => { + is_absolute = true; + components.clear(); + } std::path::Component::ParentDir => { - if !components.is_empty() - && !matches!(components.last(), Some(c) if matches!(Path::new(c).components().next(), - Some(std::path::Component::Prefix(_)))) - { + if !components.is_empty() { components.pop(); + } else if !is_absolute && prefix_component.is_none() { + // Only keep parent dir if we're not absolute and have no prefix + components.push(component.as_os_str()); } } - std::path::Component::Normal(c) => components.push(c), - std::path::Component::CurDir => {} - c @ std::path::Component::RootDir => { - has_root_or_prefix = true; - components.clear(); - components.push(c.as_os_str()); + std::path::Component::Normal(c) => { + components.push(c); } - c @ std::path::Component::Prefix(_) => { - has_root_or_prefix = true; - components.clear(); - components.push(c.as_os_str()); + std::path::Component::CurDir => { + // Skip current directory components } } } - if components.is_empty() { - if has_root_or_prefix { - // On Windows, this would be something like "C:\" or "\" - path.ancestors() + let mut result = PathBuf::new(); + + // Add prefix component (like C: on Windows) + if let Some(prefix) = prefix_component { + result.push(prefix.as_os_str()); + } + + // Add root directory if path is absolute + if is_absolute { + result.push(std::path::Component::RootDir.as_os_str()); + } + + // Add normalized components + for component in components { + result.push(component); + } + + // Handle edge cases + if result.as_os_str().is_empty() { + if prefix_component.is_some() || is_absolute { + // This shouldn't happen with proper input, but fallback to original path's root + return path + .ancestors() .last() .unwrap_or(Path::new("")) - .to_path_buf() + .to_path_buf(); } else { - // Return current directory as a relative path - PathBuf::from(".") - } - } else { - let mut result = PathBuf::new(); - for component in components { - result.push(component); + return PathBuf::from("."); } - result } + + result } #[cfg(test)] mod tests { use super::*; + #[test] + fn test_normalize_path_windows_drive() { + if cfg!(windows) { + let path = Path::new(r"z:\workspace\test_one\..\postgrestools.jsonc"); + let normalized = normalize_path(path); + assert_eq!( + normalized, + PathBuf::from(r"z:\workspace\postgrestools.jsonc") + ); + } + } + + #[test] + fn test_normalize_path_relative() { + let path = Path::new("workspace/test_one/../postgrestools.jsonc"); + let normalized = normalize_path(path); + assert_eq!(normalized, PathBuf::from("workspace/postgrestools.jsonc")); + } + + #[test] + fn test_normalize_path_multiple_parent_dirs() { + if cfg!(windows) { + let path = Path::new(r"c:\a\b\c\..\..\d"); + let normalized = normalize_path(path); + assert_eq!(normalized, PathBuf::from(r"c:\a\d")); + } + } + #[test] fn test_strip_jsonc_comments_line_comments() { let input = r#"{ From 73bcd057a0238a4e5fe5e558255508c948f45223 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Fri, 30 May 2025 23:42:38 +0200 Subject: [PATCH 25/30] progress --- crates/pgt_workspace/src/configuration.rs | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index 076fd971..95a3aebb 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -478,7 +478,7 @@ fn normalize_path(path: &Path) -> PathBuf { for component in path.components() { match component { - std::path::Component::Prefix(prefix) => { + std::path::Component::Prefix(_prefix) => { prefix_component = Some(component); components.clear(); } From 82956f2ea5b7c68379732f6630fb4338e337eae9 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sat, 31 May 2025 08:53:58 +0200 Subject: [PATCH 26/30] progress --- crates/pgt_fs/src/fs.rs | 7 ------- crates/pgt_fs/src/fs/memory.rs | 6 ------ crates/pgt_fs/src/fs/os.rs | 2 -- crates/pgt_lsp/tests/server.rs | 8 +------- crates/pgt_workspace/src/configuration.rs | 2 -- crates/pgt_workspace/src/workspace/server.rs | 5 ++--- .../src/workspace/server/schema_cache_manager.rs | 6 ++++++ 7 files changed, 9 insertions(+), 27 deletions(-) diff --git a/crates/pgt_fs/src/fs.rs b/crates/pgt_fs/src/fs.rs index d016b2fa..c8faa0e9 100644 --- a/crates/pgt_fs/src/fs.rs +++ b/crates/pgt_fs/src/fs.rs @@ -86,12 +86,6 @@ pub trait FileSystem: Send + Sync + RefUnwindSafe { loop { let mut errors: Vec = vec![]; - tracing::info!( - "Searching for files {:?} in directory {:?}", - file_names, - curret_search_dir.display() - ); - // Iterate all possible file names for file_name in file_names { let file_path = curret_search_dir.join(file_name); @@ -147,7 +141,6 @@ pub trait FileSystem: Send + Sync + RefUnwindSafe { /// - If the file cannot be opened, possibly due to incorrect path or permission issues. /// - If the file is opened but its content cannot be read, potentially due to the file being damaged. fn read_file_from_path(&self, file_path: &PathBuf) -> Result { - tracing::info!("Reading file from path: {:?}", file_path); match self.open_with_options(file_path, OpenOptions::default().read(true)) { Ok(mut file) => { let mut content = String::new(); diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index 93fbcc8a..59e9cbcb 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -134,8 +134,6 @@ impl FileSystem for MemoryFileSystem { )); } - tracing::info!("open_with_options: path {:?}, options: {:?}", path, options); - let mut inner = if options.create || options.create_new { // Acquire write access to the files map if the file may need to be created let mut files = self.files.0.write(); @@ -164,7 +162,6 @@ impl FileSystem for MemoryFileSystem { } } else { let files = self.files.0.read(); - tracing::info!("files: {:?} path {:?}", files.keys(), path); let entry = files.get(path).ok_or_else(|| { io::Error::new( io::ErrorKind::NotFound, @@ -175,9 +172,6 @@ impl FileSystem for MemoryFileSystem { entry.lock_arc() }; - // REMOVE - tracing::info!("inner"); - if options.truncate { // Clear the buffer if the file was open with `truncate` inner.clear(); diff --git a/crates/pgt_fs/src/fs/os.rs b/crates/pgt_fs/src/fs/os.rs index 4b85d5c5..5033f296 100644 --- a/crates/pgt_fs/src/fs/os.rs +++ b/crates/pgt_fs/src/fs/os.rs @@ -408,8 +408,6 @@ fn follow_symlink( path: &Path, ctx: &dyn TraversalContext, ) -> Result<(PathBuf, FileType), SymlinkExpansionError> { - tracing::info!("Translating symlink: {path:?}"); - let target_path = fs::read_link(path).map_err(|err| { ctx.push_diagnostic(IoError::from(err).with_file_path(path.to_string_lossy().to_string())); SymlinkExpansionError diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index 981aec21..cf8f18f4 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -1346,8 +1346,7 @@ async fn multiple_projects() -> Result<()> { Ok(()) } -// #[tokio::test] -#[test(tokio::test)] +#[tokio::test] async fn extends_config() -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); @@ -1365,11 +1364,6 @@ async fn extends_config() -> Result<()> { .await .expect("Failed to setup test database"); - tracing::info!( - "writing to {:?}", - url!("postgrestools.jsonc").to_file_path().unwrap(), - ); - // shared config with default db connection let conf_with_db = PartialConfiguration::init(); fs.insert( diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index 95a3aebb..fff11758 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -35,8 +35,6 @@ impl LoadedConfiguration { value: Option, fs: &DynRef<'_, dyn FileSystem>, ) -> Result { - // REMOVE - tracing::info!("try from payload"); let Some(value) = value else { return Ok(LoadedConfiguration::default()); }; diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index ecbd8eb5..34a832b0 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -94,8 +94,8 @@ impl WorkspaceServer { Self { settings: RwLock::default(), parsed_documents: DashMap::default(), - schema_cache: SchemaCacheManager::default(), - connection: ConnectionManager::default(), + schema_cache: SchemaCacheManager::new(), + connection: ConnectionManager::new(), } } @@ -244,7 +244,6 @@ impl Workspace for WorkspaceServer { /// by another thread having previously panicked while holding the lock #[tracing::instrument(level = "trace", skip(self), err)] fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError> { - tracing::info!("Updating settings in workspace"); let mut workspace = self.workspaces_mut(); workspace diff --git a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs index 10dab3dd..2cd39772 100644 --- a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs +++ b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs @@ -14,6 +14,12 @@ pub struct SchemaCacheManager { } impl SchemaCacheManager { + pub fn new() -> Self { + Self { + schemas: DashMap::new(), + } + } + pub fn load(&self, pool: PgPool) -> Result, WorkspaceError> { let key: ConnectionKey = (&pool).into(); From c6e7045861826de10cbc7dfcdca33586b14db283 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sat, 31 May 2025 08:54:46 +0200 Subject: [PATCH 27/30] progress --- crates/pgt_lsp/tests/server.rs | 2 -- 1 file changed, 2 deletions(-) diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index cf8f18f4..a12aa79a 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -23,9 +23,7 @@ use serde_json::{from_value, to_value}; use sqlx::Executor; use std::any::type_name; use std::fmt::Display; -use std::path::Path; use std::time::Duration; -use test_log::test; use tower::timeout::Timeout; use tower::{Service, ServiceExt}; use tower_lsp::LspService; From 8616270e74991fa5b4cd0513ed7ee7b13973a678 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sat, 31 May 2025 08:55:34 +0200 Subject: [PATCH 28/30] progress --- crates/pgt_workspace/src/configuration.rs | 22 ---------------------- 1 file changed, 22 deletions(-) diff --git a/crates/pgt_workspace/src/configuration.rs b/crates/pgt_workspace/src/configuration.rs index fff11758..1baebcc7 100644 --- a/crates/pgt_workspace/src/configuration.rs +++ b/crates/pgt_workspace/src/configuration.rs @@ -103,17 +103,10 @@ fn load_config( .map_or(PathBuf::new(), |working_directory| working_directory), }; - // REMOVE - tracing::info!("Searching for configuration files in {:?}", base_path); - // If the configuration path hint is from user and is a file path, // we'll load it directly if let ConfigurationPathHint::FromUser(ref config_file_path) = base_path { if file_system.path_is_file(config_file_path) { - tracing::info!( - "Loading configuration file from user path: {}", - config_file_path.display() - ); let content = strip_jsonc_comments(&file_system.read_file_from_path(config_file_path)?); let deserialized = serde_json::from_str::(&content) @@ -137,12 +130,6 @@ fn load_config( ConfigurationPathHint::None => file_system.working_directory().unwrap_or_default(), }; - // REMOVE - tracing::info!( - "Searching for configuration files in {}", - configuration_directory.display() - ); - // We first search for `postgrestools.jsonc` files if let Some(auto_search_result) = file_system.auto_search( &configuration_directory, @@ -150,8 +137,6 @@ fn load_config( should_error, )? { let AutoSearchResult { content, file_path } = auto_search_result; - // REMOVE - tracing::info!("Found configuration file: {}", file_path.display()); let deserialized = serde_json::from_str::(&strip_jsonc_comments(&content)) @@ -163,7 +148,6 @@ fn load_config( external_resolution_base_path, })) } else { - tracing::info!("auto search returned none"); Ok(None) } } @@ -382,12 +366,6 @@ impl PartialConfigurationExt for PartialConfiguration { .into_path_buf() }; - // REMOVE - tracing::info!( - "Resolving extend configuration file: {}", - extend_configuration_file_path.display() - ); - let mut file = fs .open_with_options( extend_configuration_file_path.as_path(), From 91ff8b2f6f0c3de3041e53324ab8b6378b9597d2 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 1 Jun 2025 15:46:24 +0200 Subject: [PATCH 29/30] progress --- crates/pgt_fs/src/fs.rs | 5 ---- crates/pgt_fs/src/fs/memory.rs | 1 + crates/pgt_workspace/src/settings.rs | 12 ++++------ crates/pgt_workspace/src/workspace/server.rs | 23 +++++++++++++++---- .../workspace/server/schema_cache_manager.rs | 21 +++++++++++++---- 5 files changed, 40 insertions(+), 22 deletions(-) diff --git a/crates/pgt_fs/src/fs.rs b/crates/pgt_fs/src/fs.rs index c8faa0e9..2bfd2e51 100644 --- a/crates/pgt_fs/src/fs.rs +++ b/crates/pgt_fs/src/fs.rs @@ -89,11 +89,6 @@ pub trait FileSystem: Send + Sync + RefUnwindSafe { // Iterate all possible file names for file_name in file_names { let file_path = curret_search_dir.join(file_name); - println!( - "Checking for file: {:?} in directory: {:?}", - file_path, - curret_search_dir.display() - ); match self.read_file_from_path(&file_path) { Ok(content) => { if is_searching_in_parent_dir { diff --git a/crates/pgt_fs/src/fs/memory.rs b/crates/pgt_fs/src/fs/memory.rs index 59e9cbcb..a744e575 100644 --- a/crates/pgt_fs/src/fs/memory.rs +++ b/crates/pgt_fs/src/fs/memory.rs @@ -234,6 +234,7 @@ impl FileSystem for MemoryFileSystem { _specifier: &str, _path: &Path, ) -> Result { + // not needed for the memory file system todo!() } } diff --git a/crates/pgt_workspace/src/settings.rs b/crates/pgt_workspace/src/settings.rs index f7adb8db..08854493 100644 --- a/crates/pgt_workspace/src/settings.rs +++ b/crates/pgt_workspace/src/settings.rs @@ -51,18 +51,16 @@ impl WorkspaceSettings { pub fn get_current_project_path(&self) -> Option<&PgTPath> { trace!("Current key {:?}", self.current_project); - let data = self.data.get(self.current_project); - if let Some(data) = data { - Some(&data.path) - } else { - None - } + self.data + .get(self.current_project) + .as_ref() + .map(|d| &d.path) } pub fn get_current_project_data_mut(&mut self) -> &mut ProjectData { self.data .get_mut(self.current_project) - .expect("You must have at least one workspace.") + .expect("Current project not configured") } /// Retrieves the settings of the current workspace folder diff --git a/crates/pgt_workspace/src/workspace/server.rs b/crates/pgt_workspace/src/workspace/server.rs index 34a832b0..67a3713c 100644 --- a/crates/pgt_workspace/src/workspace/server.rs +++ b/crates/pgt_workspace/src/workspace/server.rs @@ -68,7 +68,6 @@ pub(super) struct WorkspaceServer { settings: RwLock, /// Stores the schema cache for this workspace - /// TODO: store this per connection schema_cache: SchemaCacheManager, parsed_documents: DashMap, @@ -133,7 +132,7 @@ impl WorkspaceServer { workspace_mut.set_current_project(project_key); } - /// Checks whether, if the current path belongs to the current project. + /// Checks whether the current path belongs to the current project. /// /// If there's a match, and the match **isn't** the current project, it returns the new key. fn path_belongs_to_current_workspace(&self, path: &PgTPath) -> Option { @@ -213,6 +212,10 @@ impl Workspace for WorkspaceServer { let is_new_path = match (current_project_path.as_deref(), params.path.as_ref()) { (Some(current_project_path), Some(params_path)) => current_project_path != params_path, + (Some(_), None) => { + // If the current project is set, but no path is provided, we assume it's a new project + true + } _ => true, }; @@ -410,9 +413,19 @@ impl Workspace for WorkspaceServer { params: PullDiagnosticsParams, ) -> Result { let settings = self.workspaces(); - // TODO: not sure how we should handle this - // maybe fallback to default settings? or return an error? - let settings = settings.settings().expect("Settings should be initialized"); + + let settings = match settings.settings() { + Some(settings) => settings, + None => { + // return an empty result if no settings are available + // we might want to return an error here in the future + return Ok(PullDiagnosticsResult { + diagnostics: Vec::new(), + errors: 0, + skipped_diagnostics: 0, + }); + } + }; // create analyser for this run // first, collect enabled and disabled rules from the workspace settings diff --git a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs index 2cd39772..b42dfc34 100644 --- a/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs +++ b/crates/pgt_workspace/src/workspace/server/schema_cache_manager.rs @@ -27,10 +27,21 @@ impl SchemaCacheManager { return Ok(Arc::clone(&*cache)); } - let schema_cache = Arc::new(run_async(async move { SchemaCache::load(&pool).await })??); - - self.schemas.insert(key, Arc::clone(&schema_cache)); - - Ok(schema_cache) + let schema_cache = self + .schemas + .entry(key) + .or_try_insert_with::(|| { + // This closure will only be called once per key if multiple threads + // try to access the same key simultaneously + let pool_clone = pool.clone(); + let schema_cache = + Arc::new(run_async( + async move { SchemaCache::load(&pool_clone).await }, + )??); + + Ok(schema_cache) + })?; + + Ok(Arc::clone(&schema_cache)) } } From f0d740cac99af91aa6e23ddfd46f6b2db22f1e61 Mon Sep 17 00:00:00 2001 From: psteinroe Date: Sun, 1 Jun 2025 18:49:01 +0200 Subject: [PATCH 30/30] progress --- crates/pgt_lsp/tests/server.rs | 10 ++++------ 1 file changed, 4 insertions(+), 6 deletions(-) diff --git a/crates/pgt_lsp/tests/server.rs b/crates/pgt_lsp/tests/server.rs index b6501e85..438c7298 100644 --- a/crates/pgt_lsp/tests/server.rs +++ b/crates/pgt_lsp/tests/server.rs @@ -1158,11 +1158,10 @@ async fn test_issue_303(test_db: PgPool) -> Result<()> { Ok(()) } -#[tokio::test] -async fn multiple_projects() -> Result<()> { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn multiple_projects(test_db: PgPool) -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; let setup = r#" create table public.users ( @@ -1338,11 +1337,10 @@ async fn multiple_projects() -> Result<()> { Ok(()) } -#[tokio::test] -async fn extends_config() -> Result<()> { +#[sqlx::test(migrator = "pgt_test_utils::MIGRATIONS")] +async fn extends_config(test_db: PgPool) -> Result<()> { let factory = ServerFactory::default(); let mut fs = MemoryFileSystem::default(); - let test_db = get_new_test_db().await; let setup = r#" create table public.extends_config_test (