Skip to content
This commit does not belong to any branch on this repository, and may belong to a fork outside of the repository.

Commit 3698af5

Browse files
authoredJun 1, 2025··
feat: workspace support (supabase-community#408)
1 parent f6b752c commit 3698af5

File tree

34 files changed

+1688
-292
lines changed

34 files changed

+1688
-292
lines changed
 

‎Cargo.lock

Lines changed: 14 additions & 0 deletions
Some generated files are not rendered by default. Learn more about customizing how changed files appear on GitHub.

‎Cargo.toml

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,7 @@ enumflags2 = "0.7.11"
2828
ignore = "0.4.23"
2929
indexmap = { version = "2.6.0", features = ["serde"] }
3030
insta = "1.31.0"
31+
oxc_resolver = "1.12.0"
3132
pg_query = "6.1.0"
3233
proc-macro2 = "1.0.66"
3334
quote = "1.0.33"
@@ -38,6 +39,7 @@ schemars = { version = "0.8.22", features = ["indexmap2", "small
3839
serde = "1.0.195"
3940
serde_json = "1.0.114"
4041
similar = "2.6.0"
42+
slotmap = "1.0.7"
4143
smallvec = { version = "1.13.2", features = ["union", "const_new", "serde"] }
4244
strum = { version = "0.27.1", features = ["derive"] }
4345
# this will use tokio if available, otherwise async-std

‎crates/pgt_cli/src/commands/mod.rs

Lines changed: 6 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,9 +9,9 @@ use bpaf::Bpaf;
99
use pgt_configuration::{PartialConfiguration, partial_configuration};
1010
use pgt_console::Console;
1111
use pgt_fs::FileSystem;
12+
use pgt_workspace::PartialConfigurationExt;
1213
use pgt_workspace::configuration::{LoadedConfiguration, load_configuration};
13-
use pgt_workspace::settings::PartialConfigurationExt;
14-
use pgt_workspace::workspace::UpdateSettingsParams;
14+
use pgt_workspace::workspace::{RegisterProjectFolderParams, UpdateSettingsParams};
1515
use pgt_workspace::{DynRef, Workspace, WorkspaceError};
1616
use std::ffi::OsString;
1717
use std::path::PathBuf;
@@ -301,6 +301,10 @@ pub(crate) trait CommandRunner: Sized {
301301
let (vcs_base_path, gitignore_matches) =
302302
configuration.retrieve_gitignore_matches(fs, vcs_base_path.as_deref())?;
303303
let paths = self.get_files_to_process(fs, &configuration)?;
304+
workspace.register_project_folder(RegisterProjectFolderParams {
305+
path: fs.working_directory(),
306+
set_as_current_workspace: true,
307+
})?;
304308

305309
workspace.update_settings(UpdateSettingsParams {
306310
workspace_directory: fs.working_directory(),

‎crates/pgt_cli/src/diagnostics.rs

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -455,7 +455,7 @@ mod test {
455455
fn termination_diagnostic_size() {
456456
assert_eq!(
457457
std::mem::size_of::<CliDiagnostic>(),
458-
80,
458+
96,
459459
"you successfully decreased the size of the diagnostic!"
460460
)
461461
}

‎crates/pgt_configuration/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -16,6 +16,7 @@ biome_deserialize = { workspace = true, features = ["schema"] }
1616
biome_deserialize_macros = { workspace = true }
1717
bpaf = { workspace = true }
1818
indexmap = { workspace = true }
19+
oxc_resolver = { workspace = true }
1920
pgt_analyse = { workspace = true }
2021
pgt_analyser = { workspace = true }
2122
pgt_console = { workspace = true }

‎crates/pgt_configuration/src/diagnostics.rs

Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,7 @@
11
use pgt_console::fmt::Display;
22
use pgt_console::{MarkupBuf, markup};
3+
use pgt_diagnostics::adapters::ResolveError;
4+
35
use pgt_diagnostics::{Advices, Diagnostic, Error, LogCategory, MessageAndDescription, Visit};
46
use serde::{Deserialize, Serialize};
57
use std::fmt::{Debug, Formatter};
@@ -21,6 +23,12 @@ pub enum ConfigurationDiagnostic {
2123

2224
/// Thrown when the pattern inside the `ignore` field errors
2325
InvalidIgnorePattern(InvalidIgnorePattern),
26+
27+
/// Thrown when there's something wrong with the files specified inside `"extends"`
28+
CantLoadExtendFile(CantLoadExtendFile),
29+
30+
/// Thrown when a configuration file can't be resolved from `node_modules`
31+
CantResolve(CantResolve),
2432
}
2533

2634
impl ConfigurationDiagnostic {
@@ -72,6 +80,18 @@ impl ConfigurationDiagnostic {
7280
message: MessageAndDescription::from(markup! {{message}}.to_owned()),
7381
})
7482
}
83+
84+
pub fn cant_resolve(path: impl Display, source: oxc_resolver::ResolveError) -> Self {
85+
Self::CantResolve(CantResolve {
86+
message: MessageAndDescription::from(
87+
markup! {
88+
"Failed to resolve the configuration from "{{path}}
89+
}
90+
.to_owned(),
91+
),
92+
source: Some(Error::from(ResolveError::from(source))),
93+
})
94+
}
7595
}
7696

7797
impl Debug for ConfigurationDiagnostic {
@@ -168,3 +188,36 @@ pub struct CantResolve {
168188
#[source]
169189
source: Option<Error>,
170190
}
191+
192+
#[derive(Debug, Serialize, Deserialize, Diagnostic)]
193+
#[diagnostic(
194+
category = "configuration",
195+
severity = Error,
196+
)]
197+
pub struct CantLoadExtendFile {
198+
#[location(resource)]
199+
file_path: String,
200+
#[message]
201+
#[description]
202+
message: MessageAndDescription,
203+
204+
#[verbose_advice]
205+
verbose_advice: ConfigurationAdvices,
206+
}
207+
208+
impl CantLoadExtendFile {
209+
pub fn new(file_path: impl Into<String>, message: impl Display) -> Self {
210+
Self {
211+
file_path: file_path.into(),
212+
message: MessageAndDescription::from(markup! {{message}}.to_owned()),
213+
verbose_advice: ConfigurationAdvices::default(),
214+
}
215+
}
216+
217+
pub fn with_verbose_advice(mut self, messsage: impl Display) -> Self {
218+
self.verbose_advice
219+
.messages
220+
.push(markup! {{messsage}}.to_owned());
221+
self
222+
}
223+
}

‎crates/pgt_configuration/src/lib.rs

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,7 @@ pub use analyser::{
2222
RulePlainConfiguration, RuleSelector, RuleWithFixOptions, RuleWithOptions, Rules,
2323
partial_linter_configuration,
2424
};
25+
use biome_deserialize::StringSet;
2526
use biome_deserialize_macros::{Merge, Partial};
2627
use bpaf::Bpaf;
2728
use database::{
@@ -50,6 +51,10 @@ pub struct Configuration {
5051
#[partial(bpaf(hide))]
5152
pub schema: String,
5253

54+
/// A list of paths to other JSON files, used to extends the current configuration.
55+
#[partial(bpaf(hide))]
56+
pub extends: StringSet,
57+
5358
/// The configuration of the VCS integration
5459
#[partial(type, bpaf(external(partial_vcs_configuration), optional, hide_usage))]
5560
pub vcs: VcsConfiguration,
@@ -85,6 +90,7 @@ impl PartialConfiguration {
8590
pub fn init() -> Self {
8691
Self {
8792
schema: Some(format!("https://pgtools.dev/schemas/{VERSION}/schema.json")),
93+
extends: Some(StringSet::default()),
8894
files: Some(PartialFilesConfiguration {
8995
ignore: Some(Default::default()),
9096
..Default::default()

‎crates/pgt_diagnostics/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ version = "0.0.0"
1515
backtrace = "0.3.74"
1616
bpaf = { workspace = true }
1717
enumflags2 = { workspace = true }
18+
oxc_resolver = { workspace = true }
1819
pgt_console = { workspace = true, features = ["serde"] }
1920
pgt_diagnostics_categories = { workspace = true, features = ["serde"] }
2021
pgt_diagnostics_macros = { workspace = true }

‎crates/pgt_diagnostics/src/adapters.rs

Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -134,3 +134,28 @@ impl Diagnostic for SerdeJsonError {
134134
fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) }))
135135
}
136136
}
137+
138+
#[derive(Debug)]
139+
pub struct ResolveError {
140+
error: oxc_resolver::ResolveError,
141+
}
142+
143+
impl From<oxc_resolver::ResolveError> for ResolveError {
144+
fn from(error: oxc_resolver::ResolveError) -> Self {
145+
Self { error }
146+
}
147+
}
148+
149+
impl Diagnostic for ResolveError {
150+
fn category(&self) -> Option<&'static Category> {
151+
Some(category!("internalError/io"))
152+
}
153+
154+
fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
155+
write!(fmt, "{}", self.error)
156+
}
157+
158+
fn message(&self, fmt: &mut fmt::Formatter<'_>) -> io::Result<()> {
159+
fmt.write_markup(markup!({ AsConsoleDisplay(&self.error) }))
160+
}
161+
}

‎crates/pgt_fs/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ version = "0.0.0"
1515
crossbeam = { workspace = true }
1616
directories = "5.0.1"
1717
enumflags2 = { workspace = true }
18+
oxc_resolver = { workspace = true }
1819
parking_lot = { version = "0.12.3", features = ["arc_lock"] }
1920
pgt_diagnostics = { workspace = true }
2021
rayon = { workspace = true }

‎crates/pgt_fs/src/fs.rs

Lines changed: 15 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,6 +1,7 @@
11
use crate::{PathInterner, PgTPath};
22
pub use memory::{ErrorEntry, MemoryFileSystem};
33
pub use os::OsFileSystem;
4+
use oxc_resolver::{Resolution, ResolveError};
45
use pgt_diagnostics::{Advices, Diagnostic, LogCategory, Visit, console};
56
use pgt_diagnostics::{Error, Severity};
67
use serde::{Deserialize, Serialize};
@@ -164,6 +165,12 @@ pub trait FileSystem: Send + Sync + RefUnwindSafe {
164165
fn get_changed_files(&self, base: &str) -> io::Result<Vec<String>>;
165166

166167
fn get_staged_files(&self) -> io::Result<Vec<String>>;
168+
169+
fn resolve_configuration(
170+
&self,
171+
specifier: &str,
172+
path: &Path,
173+
) -> Result<Resolution, ResolveError>;
167174
}
168175

169176
/// Result of the auto search
@@ -355,6 +362,14 @@ where
355362
fn get_staged_files(&self) -> io::Result<Vec<String>> {
356363
T::get_staged_files(self)
357364
}
365+
366+
fn resolve_configuration(
367+
&self,
368+
specifier: &str,
369+
path: &Path,
370+
) -> Result<Resolution, ResolveError> {
371+
T::resolve_configuration(self, specifier, path)
372+
}
358373
}
359374

360375
#[derive(Debug, Diagnostic, Deserialize, Serialize)]

‎crates/pgt_fs/src/fs/memory.rs

Lines changed: 10 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,3 +1,4 @@
1+
use oxc_resolver::{Resolution, ResolveError};
12
use rustc_hash::FxHashMap;
23
use std::collections::hash_map::{Entry, IntoIter};
34
use std::io;
@@ -227,6 +228,15 @@ impl FileSystem for MemoryFileSystem {
227228

228229
Ok(cb())
229230
}
231+
232+
fn resolve_configuration(
233+
&self,
234+
_specifier: &str,
235+
_path: &Path,
236+
) -> Result<Resolution, ResolveError> {
237+
// not needed for the memory file system
238+
todo!()
239+
}
230240
}
231241

232242
struct MemoryFile {

‎crates/pgt_fs/src/fs/os.rs

Lines changed: 21 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -5,9 +5,11 @@ use crate::{
55
FileSystem, PgTPath,
66
fs::{TraversalContext, TraversalScope},
77
};
8+
use oxc_resolver::{Resolution, ResolveError, ResolveOptions, Resolver};
89
use pgt_diagnostics::{DiagnosticExt, Error, Severity, adapters::IoError};
910
use rayon::{Scope, scope};
1011
use std::fs::{DirEntry, FileType};
12+
use std::panic::AssertUnwindSafe;
1113
use std::process::Command;
1214
use std::{
1315
env, fs,
@@ -21,12 +23,18 @@ const MAX_SYMLINK_DEPTH: u8 = 3;
2123
/// Implementation of [FileSystem] that directly calls through to the underlying OS
2224
pub struct OsFileSystem {
2325
pub working_directory: Option<PathBuf>,
26+
pub configuration_resolver: AssertUnwindSafe<Resolver>,
2427
}
2528

2629
impl OsFileSystem {
2730
pub fn new(working_directory: PathBuf) -> Self {
2831
Self {
2932
working_directory: Some(working_directory),
33+
configuration_resolver: AssertUnwindSafe(Resolver::new(ResolveOptions {
34+
condition_names: vec!["node".to_string(), "import".to_string()],
35+
extensions: vec![".json".to_string(), ".jsonc".to_string()],
36+
..ResolveOptions::default()
37+
})),
3038
}
3139
}
3240
}
@@ -35,6 +43,11 @@ impl Default for OsFileSystem {
3543
fn default() -> Self {
3644
Self {
3745
working_directory: env::current_dir().ok(),
46+
configuration_resolver: AssertUnwindSafe(Resolver::new(ResolveOptions {
47+
condition_names: vec!["node".to_string(), "import".to_string()],
48+
extensions: vec![".json".to_string(), ".jsonc".to_string()],
49+
..ResolveOptions::default()
50+
})),
3851
}
3952
}
4053
}
@@ -116,6 +129,14 @@ impl FileSystem for OsFileSystem {
116129
.map(|l| l.to_string())
117130
.collect())
118131
}
132+
133+
fn resolve_configuration(
134+
&self,
135+
specifier: &str,
136+
path: &Path,
137+
) -> Result<Resolution, ResolveError> {
138+
self.configuration_resolver.resolve(path, specifier)
139+
}
119140
}
120141

121142
struct OsFile {
@@ -387,8 +408,6 @@ fn follow_symlink(
387408
path: &Path,
388409
ctx: &dyn TraversalContext,
389410
) -> Result<(PathBuf, FileType), SymlinkExpansionError> {
390-
tracing::info!("Translating symlink: {path:?}");
391-
392411
let target_path = fs::read_link(path).map_err(|err| {
393412
ctx.push_diagnostic(IoError::from(err).with_file_path(path.to_string_lossy().to_string()));
394413
SymlinkExpansionError

‎crates/pgt_lsp/src/server.rs

Lines changed: 51 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,13 @@
11
use crate::capabilities::server_capabilities;
22
use crate::handlers;
3-
use crate::session::{CapabilitySet, CapabilityStatus, Session, SessionHandle, SessionKey};
3+
use crate::session::{
4+
CapabilitySet, CapabilityStatus, ClientInformation, Session, SessionHandle, SessionKey,
5+
};
46
use crate::utils::{into_lsp_error, panic_to_lsp_error};
57
use futures::FutureExt;
68
use futures::future::ready;
79
use pgt_fs::{ConfigName, FileSystem, OsFileSystem};
10+
use pgt_workspace::workspace::{RegisterProjectFolderParams, UnregisterProjectFolderParams};
811
use pgt_workspace::{DynRef, Workspace, workspace};
912
use rustc_hash::FxHashMap;
1013
use serde_json::json;
@@ -107,6 +110,10 @@ impl LanguageServer for LSPServer {
107110

108111
self.session.initialize(
109112
params.capabilities,
113+
params.client_info.map(|client_info| ClientInformation {
114+
name: client_info.name,
115+
version: client_info.version,
116+
}),
110117
params.root_uri,
111118
params.workspace_folders,
112119
);
@@ -217,6 +224,47 @@ impl LanguageServer for LSPServer {
217224
.ok();
218225
}
219226

227+
async fn did_change_workspace_folders(&self, params: DidChangeWorkspaceFoldersParams) {
228+
for removed in &params.event.removed {
229+
if let Ok(project_path) = self.session.file_path(&removed.uri) {
230+
let result = self
231+
.session
232+
.workspace
233+
.unregister_project_folder(UnregisterProjectFolderParams { path: project_path })
234+
.map_err(into_lsp_error);
235+
236+
if let Err(err) = result {
237+
error!("Failed to remove project from the workspace: {}", err);
238+
self.session
239+
.client
240+
.log_message(MessageType::ERROR, err)
241+
.await;
242+
}
243+
}
244+
}
245+
246+
for added in &params.event.added {
247+
if let Ok(project_path) = self.session.file_path(&added.uri) {
248+
let result = self
249+
.session
250+
.workspace
251+
.register_project_folder(RegisterProjectFolderParams {
252+
path: Some(project_path.to_path_buf()),
253+
set_as_current_workspace: true,
254+
})
255+
.map_err(into_lsp_error);
256+
257+
if let Err(err) = result {
258+
error!("Failed to add project to the workspace: {}", err);
259+
self.session
260+
.client
261+
.log_message(MessageType::ERROR, err)
262+
.await;
263+
}
264+
}
265+
}
266+
}
267+
220268
#[tracing::instrument(level = "trace", skip_all)]
221269
async fn completion(&self, params: CompletionParams) -> LspResult<Option<CompletionResponse>> {
222270
match handlers::completions::get_completions(&self.session, params) {
@@ -398,6 +446,8 @@ impl ServerFactory {
398446
workspace_method!(builder, close_file);
399447
workspace_method!(builder, pull_diagnostics);
400448
workspace_method!(builder, get_completions);
449+
workspace_method!(builder, register_project_folder);
450+
workspace_method!(builder, unregister_project_folder);
401451

402452
let (service, socket) = builder.finish();
403453
ServerConnection { socket, service }

‎crates/pgt_lsp/src/session.rs

Lines changed: 40 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -10,11 +10,11 @@ use pgt_analyse::RuleCategoriesBuilder;
1010
use pgt_configuration::{ConfigurationPathHint, PartialConfiguration};
1111
use pgt_diagnostics::{DiagnosticExt, Error};
1212
use pgt_fs::{FileSystem, PgTPath};
13+
use pgt_workspace::PartialConfigurationExt;
1314
use pgt_workspace::Workspace;
1415
use pgt_workspace::configuration::{LoadedConfiguration, load_configuration};
1516
use pgt_workspace::features;
16-
use pgt_workspace::settings::PartialConfigurationExt;
17-
use pgt_workspace::workspace::UpdateSettingsParams;
17+
use pgt_workspace::workspace::{RegisterProjectFolderParams, UpdateSettingsParams};
1818
use pgt_workspace::{DynRef, WorkspaceError};
1919
use rustc_hash::FxHashMap;
2020
use serde_json::Value;
@@ -31,6 +31,14 @@ use tower_lsp::lsp_types::{MessageType, Registration};
3131
use tower_lsp::lsp_types::{Unregistration, WorkspaceFolder};
3232
use tracing::{error, info};
3333

34+
pub(crate) struct ClientInformation {
35+
/// The name of the client
36+
pub(crate) name: String,
37+
38+
/// The version of the client
39+
pub(crate) version: Option<String>,
40+
}
41+
3442
/// Key, uniquely identifying a LSP session.
3543
#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)]
3644
pub(crate) struct SessionKey(pub u64);
@@ -68,6 +76,7 @@ pub(crate) struct Session {
6876
struct InitializeParams {
6977
/// The capabilities provided by the client as part of [`lsp_types::InitializeParams`]
7078
client_capabilities: lsp_types::ClientCapabilities,
79+
client_information: Option<ClientInformation>,
7180
root_uri: Option<Url>,
7281
#[allow(unused)]
7382
workspace_folders: Option<Vec<WorkspaceFolder>>,
@@ -164,11 +173,13 @@ impl Session {
164173
pub(crate) fn initialize(
165174
&self,
166175
client_capabilities: lsp_types::ClientCapabilities,
176+
client_information: Option<ClientInformation>,
167177
root_uri: Option<Url>,
168178
workspace_folders: Option<Vec<WorkspaceFolder>>,
169179
) {
170180
let result = self.initialize_params.set(InitializeParams {
171181
client_capabilities,
182+
client_information,
172183
root_uri,
173184
workspace_folders,
174185
});
@@ -446,6 +457,8 @@ impl Session {
446457
info!("Configuration loaded successfully from disk.");
447458
info!("Update workspace settings.");
448459

460+
let fs = &self.fs;
461+
449462
if let Some(ws_configuration) = extra_config {
450463
fs_configuration.merge_with(ws_configuration);
451464
}
@@ -455,6 +468,31 @@ impl Session {
455468

456469
match result {
457470
Ok((vcs_base_path, gitignore_matches)) => {
471+
let register_result =
472+
if let ConfigurationPathHint::FromWorkspace(path) = &base_path {
473+
// We don't need the key
474+
self.workspace
475+
.register_project_folder(RegisterProjectFolderParams {
476+
path: Some(path.clone()),
477+
// This is naive, but we don't know if the user has a file already open or not, so we register every project as the current one.
478+
// The correct one is actually set when the LSP calls `textDocument/didOpen`
479+
set_as_current_workspace: true,
480+
})
481+
.err()
482+
} else {
483+
self.workspace
484+
.register_project_folder(RegisterProjectFolderParams {
485+
path: fs.working_directory(),
486+
set_as_current_workspace: true,
487+
})
488+
.err()
489+
};
490+
if let Some(error) = register_result {
491+
error!("Failed to register the project folder: {}", error);
492+
self.client.log_message(MessageType::ERROR, &error).await;
493+
return ConfigurationStatus::Error;
494+
}
495+
458496
let result = self.workspace.update_settings(UpdateSettingsParams {
459497
workspace_directory: self.fs.working_directory(),
460498
configuration: fs_configuration,

‎crates/pgt_lsp/tests/server.rs

Lines changed: 443 additions & 8 deletions
Large diffs are not rendered by default.

‎crates/pgt_workspace/Cargo.toml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -35,6 +35,7 @@ rustc-hash = { workspace = true }
3535
schemars = { workspace = true, optional = true }
3636
serde = { workspace = true, features = ["derive"] }
3737
serde_json = { workspace = true, features = ["raw_value"] }
38+
slotmap = { workspace = true, features = ["serde"] }
3839
sqlx.workspace = true
3940
strum = { workspace = true }
4041
tokio = { workspace = true, features = ["rt", "rt-multi-thread"] }

‎crates/pgt_workspace/src/configuration.rs

Lines changed: 297 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,17 @@
11
use std::{
2+
ffi::OsStr,
23
io::ErrorKind,
34
ops::Deref,
45
path::{Path, PathBuf},
56
};
67

8+
use biome_deserialize::Merge;
79
use pgt_analyse::AnalyserRules;
810
use pgt_configuration::{
911
ConfigurationDiagnostic, ConfigurationPathHint, ConfigurationPayload, PartialConfiguration,
10-
VERSION, push_to_analyser_rules,
12+
VERSION, diagnostics::CantLoadExtendFile, push_to_analyser_rules,
1113
};
14+
use pgt_console::markup;
1215
use pgt_fs::{AutoSearchResult, ConfigName, FileSystem, OpenOptions};
1316

1417
use crate::{DynRef, WorkspaceError, settings::Settings};
@@ -28,34 +31,41 @@ pub struct LoadedConfiguration {
2831
}
2932

3033
impl LoadedConfiguration {
31-
/// Return the path of the **directory** where the configuration is
32-
pub fn directory_path(&self) -> Option<&Path> {
33-
self.directory_path.as_deref()
34-
}
35-
36-
/// Return the path of the **file** where the configuration is
37-
pub fn file_path(&self) -> Option<&Path> {
38-
self.file_path.as_deref()
39-
}
40-
}
41-
42-
impl From<Option<ConfigurationPayload>> for LoadedConfiguration {
43-
fn from(value: Option<ConfigurationPayload>) -> Self {
34+
fn try_from_payload(
35+
value: Option<ConfigurationPayload>,
36+
fs: &DynRef<'_, dyn FileSystem>,
37+
) -> Result<Self, WorkspaceError> {
4438
let Some(value) = value else {
45-
return LoadedConfiguration::default();
39+
return Ok(LoadedConfiguration::default());
4640
};
4741

4842
let ConfigurationPayload {
43+
external_resolution_base_path,
4944
configuration_file_path,
50-
deserialized: partial_configuration,
51-
..
45+
deserialized: mut partial_configuration,
5246
} = value;
5347

54-
LoadedConfiguration {
48+
partial_configuration.apply_extends(
49+
fs,
50+
&configuration_file_path,
51+
&external_resolution_base_path,
52+
)?;
53+
54+
Ok(Self {
5555
configuration: partial_configuration,
5656
directory_path: configuration_file_path.parent().map(PathBuf::from),
5757
file_path: Some(configuration_file_path),
58-
}
58+
})
59+
}
60+
61+
/// Return the path of the **directory** where the configuration is
62+
pub fn directory_path(&self) -> Option<&Path> {
63+
self.directory_path.as_deref()
64+
}
65+
66+
/// Return the path of the **file** where the configuration is
67+
pub fn file_path(&self) -> Option<&Path> {
68+
self.file_path.as_deref()
5969
}
6070
}
6171

@@ -65,7 +75,7 @@ pub fn load_configuration(
6575
config_path: ConfigurationPathHint,
6676
) -> Result<LoadedConfiguration, WorkspaceError> {
6777
let config = load_config(fs, config_path)?;
68-
Ok(LoadedConfiguration::from(config))
78+
LoadedConfiguration::try_from_payload(config, fs)
6979
}
7080

7181
/// - [Result]: if an error occurred while loading the configuration file.
@@ -120,7 +130,7 @@ fn load_config(
120130
ConfigurationPathHint::None => file_system.working_directory().unwrap_or_default(),
121131
};
122132

123-
// We first search for `postgrestools.jsonc`
133+
// We first search for `postgrestools.jsonc` files
124134
if let Some(auto_search_result) = file_system.auto_search(
125135
&configuration_directory,
126136
ConfigName::file_names().as_slice(),
@@ -265,10 +275,276 @@ pub fn strip_jsonc_comments(jsonc_input: &str) -> String {
265275
json_output
266276
}
267277

278+
pub trait PartialConfigurationExt {
279+
fn apply_extends(
280+
&mut self,
281+
fs: &DynRef<'_, dyn FileSystem>,
282+
file_path: &Path,
283+
external_resolution_base_path: &Path,
284+
) -> Result<(), WorkspaceError>;
285+
286+
fn deserialize_extends(
287+
&mut self,
288+
fs: &DynRef<'_, dyn FileSystem>,
289+
relative_resolution_base_path: &Path,
290+
external_resolution_base_path: &Path,
291+
) -> Result<Vec<PartialConfiguration>, WorkspaceError>;
292+
293+
fn retrieve_gitignore_matches(
294+
&self,
295+
file_system: &DynRef<'_, dyn FileSystem>,
296+
vcs_base_path: Option<&Path>,
297+
) -> Result<(Option<PathBuf>, Vec<String>), WorkspaceError>;
298+
}
299+
300+
impl PartialConfigurationExt for PartialConfiguration {
301+
/// Mutates the configuration so that any fields that have not been configured explicitly are
302+
/// filled in with their values from configs listed in the `extends` field.
303+
///
304+
/// The `extends` configs are applied from left to right.
305+
///
306+
/// If a configuration can't be resolved from the file system, the operation will fail.
307+
fn apply_extends(
308+
&mut self,
309+
fs: &DynRef<'_, dyn FileSystem>,
310+
file_path: &Path,
311+
external_resolution_base_path: &Path,
312+
) -> Result<(), WorkspaceError> {
313+
let configurations = self.deserialize_extends(
314+
fs,
315+
file_path.parent().expect("file path should have a parent"),
316+
external_resolution_base_path,
317+
)?;
318+
319+
let extended_configuration = configurations.into_iter().reduce(
320+
|mut previous_configuration, current_configuration| {
321+
previous_configuration.merge_with(current_configuration);
322+
previous_configuration
323+
},
324+
);
325+
if let Some(mut extended_configuration) = extended_configuration {
326+
// We swap them to avoid having to clone `self.configuration` to merge it.
327+
std::mem::swap(self, &mut extended_configuration);
328+
self.merge_with(extended_configuration)
329+
}
330+
331+
Ok(())
332+
}
333+
334+
/// It attempts to deserialize all the configuration files that were specified in the `extends` property
335+
fn deserialize_extends(
336+
&mut self,
337+
fs: &DynRef<'_, dyn FileSystem>,
338+
relative_resolution_base_path: &Path,
339+
external_resolution_base_path: &Path,
340+
) -> Result<Vec<PartialConfiguration>, WorkspaceError> {
341+
let Some(extends) = &self.extends else {
342+
return Ok(Vec::new());
343+
};
344+
345+
let mut deserialized_configurations = vec![];
346+
for extend_entry in extends.iter() {
347+
let extend_entry_as_path = Path::new(extend_entry);
348+
349+
let extend_configuration_file_path = if extend_entry_as_path.starts_with(".")
350+
|| matches!(
351+
extend_entry_as_path
352+
.extension()
353+
.map(OsStr::as_encoded_bytes),
354+
Some(b"jsonc")
355+
) {
356+
// Normalize the path to handle relative segments like "../"
357+
normalize_path(&relative_resolution_base_path.join(extend_entry))
358+
} else {
359+
fs.resolve_configuration(extend_entry.as_str(), external_resolution_base_path)
360+
.map_err(|error| {
361+
ConfigurationDiagnostic::cant_resolve(
362+
external_resolution_base_path.display().to_string(),
363+
error,
364+
)
365+
})?
366+
.into_path_buf()
367+
};
368+
369+
let mut file = fs
370+
.open_with_options(
371+
extend_configuration_file_path.as_path(),
372+
OpenOptions::default().read(true),
373+
)
374+
.map_err(|err| {
375+
CantLoadExtendFile::new(
376+
extend_configuration_file_path.display().to_string(),
377+
err.to_string(),
378+
)
379+
.with_verbose_advice(markup! {
380+
"Postgres Tools tried to load the configuration file \""<Emphasis>{
381+
extend_configuration_file_path.display().to_string()
382+
}</Emphasis>"\" in \"extends\" using \""<Emphasis>{
383+
external_resolution_base_path.display().to_string()
384+
}</Emphasis>"\" as the base path."
385+
})
386+
})?;
387+
388+
let mut content = String::new();
389+
file.read_to_string(&mut content).map_err(|err| {
390+
CantLoadExtendFile::new(extend_configuration_file_path.display().to_string(), err.to_string()).with_verbose_advice(
391+
markup!{
392+
"It's possible that the file was created with a different user/group. Make sure you have the rights to read the file."
393+
}
394+
)
395+
396+
})?;
397+
398+
let deserialized = serde_json::from_str::<PartialConfiguration>(&content)
399+
.map_err(ConfigurationDiagnostic::new_deserialization_error)?;
400+
deserialized_configurations.push(deserialized)
401+
}
402+
Ok(deserialized_configurations)
403+
}
404+
405+
/// This function checks if the VCS integration is enabled, and if so, it will attempts to resolve the
406+
/// VCS root directory and the `.gitignore` file.
407+
///
408+
/// ## Returns
409+
///
410+
/// A tuple with VCS root folder and the contents of the `.gitignore` file
411+
fn retrieve_gitignore_matches(
412+
&self,
413+
file_system: &DynRef<'_, dyn FileSystem>,
414+
vcs_base_path: Option<&Path>,
415+
) -> Result<(Option<PathBuf>, Vec<String>), WorkspaceError> {
416+
let Some(vcs) = &self.vcs else {
417+
return Ok((None, vec![]));
418+
};
419+
if vcs.is_enabled() {
420+
let vcs_base_path = match (vcs_base_path, &vcs.root) {
421+
(Some(vcs_base_path), Some(root)) => vcs_base_path.join(root),
422+
(None, Some(root)) => PathBuf::from(root),
423+
(Some(vcs_base_path), None) => PathBuf::from(vcs_base_path),
424+
(None, None) => return Err(WorkspaceError::vcs_disabled()),
425+
};
426+
if let Some(client_kind) = &vcs.client_kind {
427+
if !vcs.ignore_file_disabled() {
428+
let result = file_system
429+
.auto_search(&vcs_base_path, &[client_kind.ignore_file()], false)
430+
.map_err(WorkspaceError::from)?;
431+
432+
if let Some(result) = result {
433+
return Ok((
434+
result.file_path.parent().map(PathBuf::from),
435+
result
436+
.content
437+
.lines()
438+
.map(String::from)
439+
.collect::<Vec<String>>(),
440+
));
441+
}
442+
}
443+
}
444+
}
445+
Ok((None, vec![]))
446+
}
447+
}
448+
449+
/// Normalizes a path, resolving '..' and '.' segments without requiring the path to exist
450+
fn normalize_path(path: &Path) -> PathBuf {
451+
let mut components = Vec::new();
452+
let mut prefix_component = None;
453+
let mut is_absolute = false;
454+
455+
for component in path.components() {
456+
match component {
457+
std::path::Component::Prefix(_prefix) => {
458+
prefix_component = Some(component);
459+
components.clear();
460+
}
461+
std::path::Component::RootDir => {
462+
is_absolute = true;
463+
components.clear();
464+
}
465+
std::path::Component::ParentDir => {
466+
if !components.is_empty() {
467+
components.pop();
468+
} else if !is_absolute && prefix_component.is_none() {
469+
// Only keep parent dir if we're not absolute and have no prefix
470+
components.push(component.as_os_str());
471+
}
472+
}
473+
std::path::Component::Normal(c) => {
474+
components.push(c);
475+
}
476+
std::path::Component::CurDir => {
477+
// Skip current directory components
478+
}
479+
}
480+
}
481+
482+
let mut result = PathBuf::new();
483+
484+
// Add prefix component (like C: on Windows)
485+
if let Some(prefix) = prefix_component {
486+
result.push(prefix.as_os_str());
487+
}
488+
489+
// Add root directory if path is absolute
490+
if is_absolute {
491+
result.push(std::path::Component::RootDir.as_os_str());
492+
}
493+
494+
// Add normalized components
495+
for component in components {
496+
result.push(component);
497+
}
498+
499+
// Handle edge cases
500+
if result.as_os_str().is_empty() {
501+
if prefix_component.is_some() || is_absolute {
502+
// This shouldn't happen with proper input, but fallback to original path's root
503+
return path
504+
.ancestors()
505+
.last()
506+
.unwrap_or(Path::new(""))
507+
.to_path_buf();
508+
} else {
509+
return PathBuf::from(".");
510+
}
511+
}
512+
513+
result
514+
}
515+
268516
#[cfg(test)]
269517
mod tests {
270518
use super::*;
271519

520+
#[test]
521+
fn test_normalize_path_windows_drive() {
522+
if cfg!(windows) {
523+
let path = Path::new(r"z:\workspace\test_one\..\postgrestools.jsonc");
524+
let normalized = normalize_path(path);
525+
assert_eq!(
526+
normalized,
527+
PathBuf::from(r"z:\workspace\postgrestools.jsonc")
528+
);
529+
}
530+
}
531+
532+
#[test]
533+
fn test_normalize_path_relative() {
534+
let path = Path::new("workspace/test_one/../postgrestools.jsonc");
535+
let normalized = normalize_path(path);
536+
assert_eq!(normalized, PathBuf::from("workspace/postgrestools.jsonc"));
537+
}
538+
539+
#[test]
540+
fn test_normalize_path_multiple_parent_dirs() {
541+
if cfg!(windows) {
542+
let path = Path::new(r"c:\a\b\c\..\..\d");
543+
let normalized = normalize_path(path);
544+
assert_eq!(normalized, PathBuf::from(r"c:\a\d"));
545+
}
546+
}
547+
272548
#[test]
273549
fn test_strip_jsonc_comments_line_comments() {
274550
let input = r#"{

‎crates/pgt_workspace/src/diagnostics.rs

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,5 @@
11
use pgt_configuration::ConfigurationDiagnostic;
2+
use pgt_configuration::diagnostics::CantLoadExtendFile;
23
use pgt_console::fmt::Bytes;
34
use pgt_console::markup;
45
use pgt_diagnostics::{
@@ -354,3 +355,9 @@ impl Diagnostic for FileTooLarge {
354355
)
355356
}
356357
}
358+
359+
impl From<CantLoadExtendFile> for WorkspaceError {
360+
fn from(value: CantLoadExtendFile) -> Self {
361+
WorkspaceError::Configuration(ConfigurationDiagnostic::CantLoadExtendFile(value))
362+
}
363+
}

‎crates/pgt_workspace/src/lib.rs

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@ pub mod workspace;
1414
#[cfg(feature = "schema")]
1515
pub mod workspace_types;
1616

17+
pub use crate::configuration::PartialConfigurationExt;
1718
pub use crate::diagnostics::{TransportError, WorkspaceError};
1819
pub use crate::workspace::Workspace;
1920

‎crates/pgt_workspace/src/settings.rs

Lines changed: 179 additions & 55 deletions
Original file line numberDiff line numberDiff line change
@@ -8,6 +8,7 @@ use std::{
88
sync::{RwLock, RwLockReadGuard, RwLockWriteGuard},
99
time::Duration,
1010
};
11+
use tracing::trace;
1112

1213
use ignore::gitignore::{Gitignore, GitignoreBuilder};
1314
use pgt_configuration::{
@@ -17,9 +18,185 @@ use pgt_configuration::{
1718
files::FilesConfiguration,
1819
migrations::{MigrationsConfiguration, PartialMigrationsConfiguration},
1920
};
20-
use pgt_fs::FileSystem;
21+
use pgt_fs::PgTPath;
2122

22-
use crate::{DynRef, WorkspaceError, matcher::Matcher};
23+
use crate::{
24+
WorkspaceError,
25+
matcher::Matcher,
26+
workspace::{ProjectKey, WorkspaceData},
27+
};
28+
29+
#[derive(Debug, Default)]
30+
/// The information tracked for each project
31+
pub struct ProjectData {
32+
/// The root path of the project. This path should be **absolute**.
33+
path: PgTPath,
34+
/// The settings of the project, usually inferred from the configuration file e.g. `biome.json`.
35+
settings: Settings,
36+
}
37+
38+
#[derive(Debug, Default)]
39+
/// Type that manages different projects inside the workspace.
40+
pub struct WorkspaceSettings {
41+
/// The data of the projects
42+
data: WorkspaceData<ProjectData>,
43+
/// The ID of the current project.
44+
current_project: ProjectKey,
45+
}
46+
47+
impl WorkspaceSettings {
48+
pub fn get_current_project_key(&self) -> ProjectKey {
49+
self.current_project
50+
}
51+
52+
pub fn get_current_project_path(&self) -> Option<&PgTPath> {
53+
trace!("Current key {:?}", self.current_project);
54+
self.data
55+
.get(self.current_project)
56+
.as_ref()
57+
.map(|d| &d.path)
58+
}
59+
60+
pub fn get_current_project_data_mut(&mut self) -> &mut ProjectData {
61+
self.data
62+
.get_mut(self.current_project)
63+
.expect("Current project not configured")
64+
}
65+
66+
/// Retrieves the settings of the current workspace folder
67+
pub fn get_current_settings(&self) -> Option<&Settings> {
68+
trace!("Current key {:?}", self.current_project);
69+
let data = self.data.get(self.current_project);
70+
if let Some(data) = data {
71+
Some(&data.settings)
72+
} else {
73+
None
74+
}
75+
}
76+
77+
/// Retrieves a mutable reference of the settings of the current project
78+
pub fn get_current_settings_mut(&mut self) -> &mut Settings {
79+
&mut self
80+
.data
81+
.get_mut(self.current_project)
82+
.expect("You must have at least one workspace.")
83+
.settings
84+
}
85+
86+
/// Register the current project using its unique key
87+
pub fn register_current_project(&mut self, key: ProjectKey) {
88+
self.current_project = key;
89+
}
90+
91+
/// Insert a new project using its folder. Use [WorkspaceSettings::get_current_settings_mut] to retrieve
92+
/// a mutable reference to its [Settings] and manipulate them.
93+
pub fn insert_project(&mut self, workspace_path: impl Into<PathBuf>) -> ProjectKey {
94+
let path = PgTPath::new(workspace_path.into());
95+
trace!("Insert workspace folder: {:?}", path);
96+
self.data.insert(ProjectData {
97+
path,
98+
settings: Settings::default(),
99+
})
100+
}
101+
102+
/// Remove a project using its folder.
103+
pub fn remove_project(&mut self, workspace_path: &Path) {
104+
let keys_to_remove = {
105+
let mut data = vec![];
106+
let iter = self.data.iter();
107+
108+
for (key, path_to_settings) in iter {
109+
if path_to_settings.path.as_path() == workspace_path {
110+
data.push(key)
111+
}
112+
}
113+
114+
data
115+
};
116+
117+
for key in keys_to_remove {
118+
self.data.remove(key)
119+
}
120+
}
121+
122+
/// Checks if the current path belongs to a registered project.
123+
///
124+
/// If there's a match, and the match **isn't** the current project, it returns the new key.
125+
pub fn path_belongs_to_current_workspace(&self, path: &PgTPath) -> Option<ProjectKey> {
126+
if self.data.is_empty() {
127+
return None;
128+
}
129+
trace!("Current key: {:?}", self.current_project);
130+
let iter = self.data.iter();
131+
for (key, path_to_settings) in iter {
132+
trace!(
133+
"Workspace path {:?}, file path {:?}",
134+
path_to_settings.path, path
135+
);
136+
trace!("Iter key: {:?}", key);
137+
if key == self.current_project {
138+
continue;
139+
}
140+
if path.strip_prefix(path_to_settings.path.as_path()).is_ok() {
141+
trace!("Update workspace to {:?}", key);
142+
return Some(key);
143+
}
144+
}
145+
None
146+
}
147+
148+
/// Checks if the current path belongs to a registered project.
149+
///
150+
/// If there's a match, and the match **isn't** the current project, the function will mark the match as the current project.
151+
pub fn set_current_project(&mut self, new_key: ProjectKey) {
152+
self.current_project = new_key;
153+
}
154+
}
155+
156+
#[derive(Debug)]
157+
pub struct WorkspaceSettingsHandle<'a> {
158+
inner: RwLockReadGuard<'a, WorkspaceSettings>,
159+
}
160+
161+
impl<'a> WorkspaceSettingsHandle<'a> {
162+
pub(crate) fn new(settings: &'a RwLock<WorkspaceSettings>) -> Self {
163+
Self {
164+
inner: settings.read().unwrap(),
165+
}
166+
}
167+
168+
pub(crate) fn settings(&self) -> Option<&Settings> {
169+
self.inner.get_current_settings()
170+
}
171+
172+
pub(crate) fn path(&self) -> Option<&PgTPath> {
173+
self.inner.get_current_project_path()
174+
}
175+
}
176+
177+
impl AsRef<WorkspaceSettings> for WorkspaceSettingsHandle<'_> {
178+
fn as_ref(&self) -> &WorkspaceSettings {
179+
&self.inner
180+
}
181+
}
182+
183+
pub struct WorkspaceSettingsHandleMut<'a> {
184+
inner: RwLockWriteGuard<'a, WorkspaceSettings>,
185+
}
186+
187+
impl<'a> WorkspaceSettingsHandleMut<'a> {
188+
pub(crate) fn new(settings: &'a RwLock<WorkspaceSettings>) -> Self {
189+
Self {
190+
inner: settings.write().unwrap(),
191+
}
192+
}
193+
}
194+
195+
impl AsMut<WorkspaceSettings> for WorkspaceSettingsHandleMut<'_> {
196+
fn as_mut(&mut self) -> &mut WorkspaceSettings {
197+
&mut self.inner
198+
}
199+
}
23200

24201
/// Global settings for the entire workspace
25202
#[derive(Debug, Default)]
@@ -397,59 +574,6 @@ impl Default for FilesSettings {
397574
}
398575
}
399576

400-
pub trait PartialConfigurationExt {
401-
fn retrieve_gitignore_matches(
402-
&self,
403-
file_system: &DynRef<'_, dyn FileSystem>,
404-
vcs_base_path: Option<&Path>,
405-
) -> Result<(Option<PathBuf>, Vec<String>), WorkspaceError>;
406-
}
407-
408-
impl PartialConfigurationExt for PartialConfiguration {
409-
/// This function checks if the VCS integration is enabled, and if so, it will attempts to resolve the
410-
/// VCS root directory and the `.gitignore` file.
411-
///
412-
/// ## Returns
413-
///
414-
/// A tuple with VCS root folder and the contents of the `.gitignore` file
415-
fn retrieve_gitignore_matches(
416-
&self,
417-
file_system: &DynRef<'_, dyn FileSystem>,
418-
vcs_base_path: Option<&Path>,
419-
) -> Result<(Option<PathBuf>, Vec<String>), WorkspaceError> {
420-
let Some(vcs) = &self.vcs else {
421-
return Ok((None, vec![]));
422-
};
423-
if vcs.is_enabled() {
424-
let vcs_base_path = match (vcs_base_path, &vcs.root) {
425-
(Some(vcs_base_path), Some(root)) => vcs_base_path.join(root),
426-
(None, Some(root)) => PathBuf::from(root),
427-
(Some(vcs_base_path), None) => PathBuf::from(vcs_base_path),
428-
(None, None) => return Err(WorkspaceError::vcs_disabled()),
429-
};
430-
if let Some(client_kind) = &vcs.client_kind {
431-
if !vcs.ignore_file_disabled() {
432-
let result = file_system
433-
.auto_search(&vcs_base_path, &[client_kind.ignore_file()], false)
434-
.map_err(WorkspaceError::from)?;
435-
436-
if let Some(result) = result {
437-
return Ok((
438-
result.file_path.parent().map(PathBuf::from),
439-
result
440-
.content
441-
.lines()
442-
.map(String::from)
443-
.collect::<Vec<String>>(),
444-
));
445-
}
446-
}
447-
}
448-
}
449-
Ok((None, vec![]))
450-
}
451-
}
452-
453577
#[cfg(test)]
454578
mod tests {
455579
use biome_deserialize::StringSet;

‎crates/pgt_workspace/src/workspace.rs

Lines changed: 103 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,7 +5,10 @@ use pgt_analyse::RuleCategories;
55
use pgt_configuration::{PartialConfiguration, RuleSelector};
66
use pgt_fs::PgTPath;
77
use pgt_text_size::TextRange;
8+
#[cfg(feature = "schema")]
9+
use schemars::{JsonSchema, SchemaGenerator, schema::Schema};
810
use serde::{Deserialize, Serialize};
11+
use slotmap::{DenseSlotMap, new_key_type};
912

1013
use crate::{
1114
WorkspaceError,
@@ -92,6 +95,21 @@ pub struct ServerInfo {
9295
pub version: Option<String>,
9396
}
9497

98+
#[derive(Debug, serde::Serialize, serde::Deserialize)]
99+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
100+
#[serde(rename_all = "camelCase")]
101+
pub struct RegisterProjectFolderParams {
102+
pub path: Option<PathBuf>,
103+
pub set_as_current_workspace: bool,
104+
}
105+
106+
#[derive(Debug, serde::Serialize, serde::Deserialize)]
107+
#[cfg_attr(feature = "schema", derive(schemars::JsonSchema))]
108+
#[serde(rename_all = "camelCase")]
109+
pub struct UnregisterProjectFolderParams {
110+
pub path: PgTPath,
111+
}
112+
95113
pub trait Workspace: Send + Sync + RefUnwindSafe {
96114
/// Retrieves the list of diagnostics associated to a file
97115
fn pull_diagnostics(
@@ -110,6 +128,18 @@ pub trait Workspace: Send + Sync + RefUnwindSafe {
110128
params: GetCompletionsParams,
111129
) -> Result<CompletionsResult, WorkspaceError>;
112130

131+
/// Register a possible workspace project folder. Returns the key of said project. Use this key when you want to switch to different projects.
132+
fn register_project_folder(
133+
&self,
134+
params: RegisterProjectFolderParams,
135+
) -> Result<ProjectKey, WorkspaceError>;
136+
137+
/// Unregister a workspace project folder. The settings that belong to that project are deleted.
138+
fn unregister_project_folder(
139+
&self,
140+
params: UnregisterProjectFolderParams,
141+
) -> Result<(), WorkspaceError>;
142+
113143
/// Update the global settings for this workspace
114144
fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError>;
115145

@@ -222,3 +252,76 @@ impl<W: Workspace + ?Sized> Drop for FileGuard<'_, W> {
222252
.ok();
223253
}
224254
}
255+
256+
new_key_type! {
257+
pub struct ProjectKey;
258+
}
259+
260+
#[cfg(feature = "schema")]
261+
impl JsonSchema for ProjectKey {
262+
fn schema_name() -> String {
263+
"ProjectKey".to_string()
264+
}
265+
266+
fn json_schema(generator: &mut SchemaGenerator) -> Schema {
267+
<String>::json_schema(generator)
268+
}
269+
}
270+
271+
#[derive(Debug, Default)]
272+
pub struct WorkspaceData<V> {
273+
/// [DenseSlotMap] is the slowest type in insertion/removal, but the fastest in iteration
274+
///
275+
/// Users wouldn't change workspace folders very often,
276+
paths: DenseSlotMap<ProjectKey, V>,
277+
}
278+
279+
impl<V> WorkspaceData<V> {
280+
/// Inserts an item
281+
pub fn insert(&mut self, item: V) -> ProjectKey {
282+
self.paths.insert(item)
283+
}
284+
285+
/// Removes an item
286+
pub fn remove(&mut self, key: ProjectKey) {
287+
self.paths.remove(key);
288+
}
289+
290+
/// Get a reference of the value
291+
pub fn get(&self, key: ProjectKey) -> Option<&V> {
292+
self.paths.get(key)
293+
}
294+
295+
/// Get a mutable reference of the value
296+
pub fn get_mut(&mut self, key: ProjectKey) -> Option<&mut V> {
297+
self.paths.get_mut(key)
298+
}
299+
300+
pub fn is_empty(&self) -> bool {
301+
self.paths.is_empty()
302+
}
303+
304+
pub fn iter(&self) -> WorkspaceDataIterator<'_, V> {
305+
WorkspaceDataIterator::new(self)
306+
}
307+
}
308+
309+
pub struct WorkspaceDataIterator<'a, V> {
310+
iterator: slotmap::dense::Iter<'a, ProjectKey, V>,
311+
}
312+
313+
impl<'a, V> WorkspaceDataIterator<'a, V> {
314+
fn new(data: &'a WorkspaceData<V>) -> Self {
315+
Self {
316+
iterator: data.paths.iter(),
317+
}
318+
}
319+
}
320+
321+
impl<'a, V> Iterator for WorkspaceDataIterator<'a, V> {
322+
type Item = (ProjectKey, &'a V);
323+
324+
fn next(&mut self) -> Option<Self::Item> {
325+
self.iterator.next()
326+
}
327+
}

‎crates/pgt_workspace/src/workspace/client.rs

Lines changed: 18 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -7,7 +7,10 @@ use std::{
77
sync::atomic::{AtomicU64, Ordering},
88
};
99

10-
use super::{CloseFileParams, GetFileContentParams, IsPathIgnoredParams, OpenFileParams};
10+
use super::{
11+
CloseFileParams, GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ProjectKey,
12+
RegisterProjectFolderParams, UnregisterProjectFolderParams,
13+
};
1114

1215
pub struct WorkspaceClient<T> {
1316
transport: T,
@@ -103,6 +106,20 @@ where
103106
self.request("pgt/execute_statement", params)
104107
}
105108

109+
fn register_project_folder(
110+
&self,
111+
params: RegisterProjectFolderParams,
112+
) -> Result<ProjectKey, WorkspaceError> {
113+
self.request("pgt/register_project_folder", params)
114+
}
115+
116+
fn unregister_project_folder(
117+
&self,
118+
params: UnregisterProjectFolderParams,
119+
) -> Result<(), WorkspaceError> {
120+
self.request("pgt/unregister_project_folder", params)
121+
}
122+
106123
fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError> {
107124
self.request("pgt/open_file", params)
108125
}

‎crates/pgt_workspace/src/workspace/server.rs

Lines changed: 156 additions & 73 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,14 @@
11
use std::{
22
fs,
33
panic::RefUnwindSafe,
4-
path::Path,
4+
path::{Path, PathBuf},
55
sync::{Arc, RwLock},
66
};
77

88
use analyser::AnalyserVisitorBuilder;
99
use async_helper::run_async;
10+
use connection_manager::ConnectionManager;
1011
use dashmap::DashMap;
11-
use db_connection::DbConnection;
1212
use document::Document;
1313
use futures::{StreamExt, stream};
1414
use parsed_document::{
@@ -23,8 +23,8 @@ use pgt_diagnostics::{
2323
use pgt_fs::{ConfigName, PgTPath};
2424
use pgt_typecheck::{IdentifierType, TypecheckParams, TypedIdentifier};
2525
use schema_cache_manager::SchemaCacheManager;
26-
use sqlx::Executor;
27-
use tracing::info;
26+
use sqlx::{Executor, PgPool};
27+
use tracing::{debug, info};
2828

2929
use crate::{
3030
WorkspaceError,
@@ -37,11 +37,12 @@ use crate::{
3737
completions::{CompletionsResult, GetCompletionsParams, get_statement_for_completions},
3838
diagnostics::{PullDiagnosticsParams, PullDiagnosticsResult},
3939
},
40-
settings::{Settings, SettingsHandle, SettingsHandleMut},
40+
settings::{WorkspaceSettings, WorkspaceSettingsHandle, WorkspaceSettingsHandleMut},
4141
};
4242

4343
use super::{
44-
GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ServerInfo, UpdateSettingsParams,
44+
GetFileContentParams, IsPathIgnoredParams, OpenFileParams, ProjectKey,
45+
RegisterProjectFolderParams, ServerInfo, UnregisterProjectFolderParams, UpdateSettingsParams,
4546
Workspace,
4647
};
4748

@@ -51,7 +52,8 @@ mod analyser;
5152
mod annotation;
5253
mod async_helper;
5354
mod change;
54-
mod db_connection;
55+
mod connection_key;
56+
mod connection_manager;
5557
pub(crate) mod document;
5658
mod migration;
5759
pub(crate) mod parsed_document;
@@ -63,14 +65,14 @@ mod tree_sitter;
6365

6466
pub(super) struct WorkspaceServer {
6567
/// global settings object for this workspace
66-
settings: RwLock<Settings>,
68+
settings: RwLock<WorkspaceSettings>,
6769

6870
/// Stores the schema cache for this workspace
6971
schema_cache: SchemaCacheManager,
7072

7173
parsed_documents: DashMap<PgTPath, ParsedDocument>,
7274

73-
connection: RwLock<DbConnection>,
75+
connection: ConnectionManager,
7476
}
7577

7678
/// The `Workspace` object is long-lived, so we want it to be able to cross
@@ -91,23 +93,60 @@ impl WorkspaceServer {
9193
Self {
9294
settings: RwLock::default(),
9395
parsed_documents: DashMap::default(),
94-
schema_cache: SchemaCacheManager::default(),
95-
connection: RwLock::default(),
96+
schema_cache: SchemaCacheManager::new(),
97+
connection: ConnectionManager::new(),
9698
}
9799
}
98100

99101
/// Provides a reference to the current settings
100-
fn settings(&self) -> SettingsHandle {
101-
SettingsHandle::new(&self.settings)
102+
fn workspaces(&self) -> WorkspaceSettingsHandle {
103+
WorkspaceSettingsHandle::new(&self.settings)
102104
}
103105

104-
fn settings_mut(&self) -> SettingsHandleMut {
105-
SettingsHandleMut::new(&self.settings)
106+
fn workspaces_mut(&self) -> WorkspaceSettingsHandleMut {
107+
WorkspaceSettingsHandleMut::new(&self.settings)
108+
}
109+
110+
fn get_current_connection(&self) -> Option<PgPool> {
111+
let settings = self.workspaces();
112+
let settings = settings.settings()?;
113+
self.connection.get_pool(&settings.db)
114+
}
115+
116+
/// Register a new project in the current workspace
117+
fn register_project(&self, path: PathBuf) -> ProjectKey {
118+
let mut workspace = self.workspaces_mut();
119+
let workspace_mut = workspace.as_mut();
120+
workspace_mut.insert_project(path.clone())
121+
}
122+
123+
/// Retrieves the current project path
124+
fn get_current_project_path(&self) -> Option<PgTPath> {
125+
self.workspaces().path().cloned()
126+
}
127+
128+
/// Sets the current project of the current workspace
129+
fn set_current_project(&self, project_key: ProjectKey) {
130+
let mut workspace = self.workspaces_mut();
131+
let workspace_mut = workspace.as_mut();
132+
workspace_mut.set_current_project(project_key);
133+
}
134+
135+
/// Checks whether the current path belongs to the current project.
136+
///
137+
/// If there's a match, and the match **isn't** the current project, it returns the new key.
138+
fn path_belongs_to_current_workspace(&self, path: &PgTPath) -> Option<ProjectKey> {
139+
let workspaces = self.workspaces();
140+
workspaces.as_ref().path_belongs_to_current_workspace(path)
106141
}
107142

108143
fn is_ignored_by_migration_config(&self, path: &Path) -> bool {
109-
let set = self.settings();
110-
set.as_ref()
144+
let settings = self.workspaces();
145+
let settings = settings.settings();
146+
let Some(settings) = settings else {
147+
return false;
148+
};
149+
settings
111150
.migrations
112151
.as_ref()
113152
.and_then(|migration_settings| {
@@ -131,8 +170,12 @@ impl WorkspaceServer {
131170

132171
/// Check whether a file is ignored in the top-level config `files.ignore`/`files.include`
133172
fn is_ignored_by_top_level_config(&self, path: &Path) -> bool {
134-
let set = self.settings();
135-
let settings = set.as_ref();
173+
let settings = self.workspaces();
174+
let settings = settings.settings();
175+
let Some(settings) = settings else {
176+
return false;
177+
};
178+
136179
let is_included = settings.files.included_files.is_empty()
137180
|| is_dir(path)
138181
|| settings.files.included_files.matches_path(path);
@@ -155,31 +198,66 @@ impl WorkspaceServer {
155198
}
156199

157200
impl Workspace for WorkspaceServer {
201+
fn register_project_folder(
202+
&self,
203+
params: RegisterProjectFolderParams,
204+
) -> Result<ProjectKey, WorkspaceError> {
205+
let current_project_path = self.get_current_project_path();
206+
debug!(
207+
"Compare the current project with the new one {:?} {:?} {:?}",
208+
current_project_path,
209+
params.path.as_ref(),
210+
current_project_path.as_deref() != params.path.as_ref()
211+
);
212+
213+
let is_new_path = match (current_project_path.as_deref(), params.path.as_ref()) {
214+
(Some(current_project_path), Some(params_path)) => current_project_path != params_path,
215+
(Some(_), None) => {
216+
// If the current project is set, but no path is provided, we assume it's a new project
217+
true
218+
}
219+
_ => true,
220+
};
221+
222+
if is_new_path {
223+
let path = params.path.unwrap_or_default();
224+
let key = self.register_project(path.clone());
225+
if params.set_as_current_workspace {
226+
self.set_current_project(key);
227+
}
228+
Ok(key)
229+
} else {
230+
Ok(self.workspaces().as_ref().get_current_project_key())
231+
}
232+
}
233+
234+
fn unregister_project_folder(
235+
&self,
236+
params: UnregisterProjectFolderParams,
237+
) -> Result<(), WorkspaceError> {
238+
let mut workspace = self.workspaces_mut();
239+
workspace.as_mut().remove_project(params.path.as_path());
240+
Ok(())
241+
}
242+
158243
/// Update the global settings for this workspace
159244
///
160245
/// ## Panics
161246
/// This function may panic if the internal settings mutex has been poisoned
162247
/// by another thread having previously panicked while holding the lock
163248
#[tracing::instrument(level = "trace", skip(self), err)]
164249
fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError> {
165-
tracing::info!("Updating settings in workspace");
166-
167-
self.settings_mut().as_mut().merge_with_configuration(
168-
params.configuration,
169-
params.workspace_directory,
170-
params.vcs_base_path,
171-
params.gitignore_matches.as_slice(),
172-
)?;
173-
174-
tracing::info!("Updated settings in workspace");
175-
tracing::debug!("Updated settings are {:#?}", self.settings());
176-
177-
self.connection
178-
.write()
179-
.unwrap()
180-
.set_conn_settings(&self.settings().as_ref().db);
181-
182-
tracing::info!("Updated Db connection settings");
250+
let mut workspace = self.workspaces_mut();
251+
252+
workspace
253+
.as_mut()
254+
.get_current_settings_mut()
255+
.merge_with_configuration(
256+
params.configuration,
257+
params.workspace_directory,
258+
params.vcs_base_path,
259+
params.gitignore_matches.as_slice(),
260+
)?;
183261

184262
Ok(())
185263
}
@@ -193,6 +271,10 @@ impl Workspace for WorkspaceServer {
193271
ParsedDocument::new(params.path.clone(), params.content, params.version)
194272
});
195273

274+
if let Some(project_key) = self.path_belongs_to_current_workspace(&params.path) {
275+
self.set_current_project(project_key);
276+
}
277+
196278
Ok(())
197279
}
198280

@@ -250,15 +332,13 @@ impl Workspace for WorkspaceServer {
250332
.get(&params.path)
251333
.ok_or(WorkspaceError::not_found())?;
252334

253-
let settings = self
254-
.settings
255-
.read()
256-
.expect("Unable to read settings for Code Actions");
335+
let settings = self.workspaces();
336+
let settings = settings.settings();
257337

258-
let disabled_reason: Option<String> = if settings.db.allow_statement_executions {
259-
None
260-
} else {
261-
Some("Statement execution not allowed against database.".into())
338+
let disabled_reason = match settings {
339+
Some(settings) if settings.db.allow_statement_executions => None,
340+
Some(_) => Some("Statement execution is disabled in the settings.".into()),
341+
None => Some("Statement execution not allowed against database.".into()),
262342
};
263343

264344
let actions = parser
@@ -310,15 +390,13 @@ impl Workspace for WorkspaceServer {
310390
});
311391
};
312392

313-
let conn = self.connection.read().unwrap();
314-
let pool = match conn.get_pool() {
315-
Some(p) => p,
316-
None => {
317-
return Ok(ExecuteStatementResult {
318-
message: "Not connected to database.".into(),
319-
});
320-
}
321-
};
393+
let pool = self.get_current_connection();
394+
if pool.is_none() {
395+
return Ok(ExecuteStatementResult {
396+
message: "No database connection available.".into(),
397+
});
398+
}
399+
let pool = pool.unwrap();
322400

323401
let result = run_async(async move { pool.execute(sqlx::query(&content)).await })??;
324402

@@ -334,16 +412,29 @@ impl Workspace for WorkspaceServer {
334412
&self,
335413
params: PullDiagnosticsParams,
336414
) -> Result<PullDiagnosticsResult, WorkspaceError> {
337-
let settings = self.settings();
415+
let settings = self.workspaces();
416+
417+
let settings = match settings.settings() {
418+
Some(settings) => settings,
419+
None => {
420+
// return an empty result if no settings are available
421+
// we might want to return an error here in the future
422+
return Ok(PullDiagnosticsResult {
423+
diagnostics: Vec::new(),
424+
errors: 0,
425+
skipped_diagnostics: 0,
426+
});
427+
}
428+
};
338429

339430
// create analyser for this run
340431
// first, collect enabled and disabled rules from the workspace settings
341-
let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(settings.as_ref())
432+
let (enabled_rules, disabled_rules) = AnalyserVisitorBuilder::new(settings)
342433
.with_linter_rules(&params.only, &params.skip)
343434
.finish();
344435
// then, build a map that contains all options
345436
let options = AnalyserOptions {
346-
rules: to_analyser_rules(settings.as_ref()),
437+
rules: to_analyser_rules(settings),
347438
};
348439
// next, build the analysis filter which will be used to match rules
349440
let filter = AnalysisFilter {
@@ -364,23 +455,17 @@ impl Workspace for WorkspaceServer {
364455

365456
let mut diagnostics: Vec<SDiagnostic> = parser.document_diagnostics().to_vec();
366457

367-
if let Some(pool) = self
368-
.connection
369-
.read()
370-
.expect("DbConnection RwLock panicked")
371-
.get_pool()
372-
{
458+
if let Some(pool) = self.get_current_connection() {
373459
let path_clone = params.path.clone();
374460
let schema_cache = self.schema_cache.load(pool.clone())?;
375-
let schema_cache_arc = schema_cache.get_arc();
376461
let input = parser.iter(AsyncDiagnosticsMapper).collect::<Vec<_>>();
377462
// sorry for the ugly code :(
378463
let async_results = run_async(async move {
379464
stream::iter(input)
380465
.map(|(_id, range, content, ast, cst, sign)| {
381466
let pool = pool.clone();
382467
let path = path_clone.clone();
383-
let schema_cache = Arc::clone(&schema_cache_arc);
468+
let schema_cache = Arc::clone(&schema_cache);
384469
async move {
385470
if let Some(ast) = ast {
386471
pgt_typecheck::check_sql(TypecheckParams {
@@ -461,7 +546,6 @@ impl Workspace for WorkspaceServer {
461546
|| d.severity(),
462547
|category| {
463548
settings
464-
.as_ref()
465549
.get_severity_from_rule_code(category)
466550
.unwrap_or(Severity::Warning)
467551
},
@@ -503,13 +587,12 @@ impl Workspace for WorkspaceServer {
503587
.get(&params.path)
504588
.ok_or(WorkspaceError::not_found())?;
505589

506-
let pool = match self.connection.read().unwrap().get_pool() {
507-
Some(pool) => pool,
508-
None => {
509-
tracing::debug!("No connection to database. Skipping completions.");
510-
return Ok(CompletionsResult::default());
511-
}
512-
};
590+
let pool = self.get_current_connection();
591+
if pool.is_none() {
592+
tracing::debug!("No database connection available. Skipping completions.");
593+
return Ok(CompletionsResult::default());
594+
}
595+
let pool = pool.unwrap();
513596

514597
let schema_cache = self.schema_cache.load(pool)?;
515598

Lines changed: 44 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,44 @@
1+
use sqlx::PgPool;
2+
3+
use crate::settings::DatabaseSettings;
4+
5+
/// A unique identifier for database connection settings
6+
#[derive(Clone, PartialEq, Eq, Hash)]
7+
pub(crate) struct ConnectionKey {
8+
pub host: String,
9+
pub port: u16,
10+
pub username: String,
11+
pub database: String,
12+
}
13+
14+
impl From<&DatabaseSettings> for ConnectionKey {
15+
fn from(settings: &DatabaseSettings) -> Self {
16+
Self {
17+
host: settings.host.clone(),
18+
port: settings.port,
19+
username: settings.username.clone(),
20+
database: settings.database.clone(),
21+
}
22+
}
23+
}
24+
25+
impl From<&PgPool> for ConnectionKey {
26+
fn from(pool: &PgPool) -> Self {
27+
let conn = pool.connect_options();
28+
29+
match conn.get_database() {
30+
None => Self {
31+
host: conn.get_host().to_string(),
32+
port: conn.get_port(),
33+
username: conn.get_username().to_string(),
34+
database: String::new(),
35+
},
36+
Some(db) => Self {
37+
host: conn.get_host().to_string(),
38+
port: conn.get_port(),
39+
username: conn.get_username().to_string(),
40+
database: db.to_string(),
41+
},
42+
}
43+
}
44+
}
Lines changed: 95 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,95 @@
1+
use std::time::{Duration, Instant};
2+
3+
use dashmap::DashMap;
4+
use sqlx::{PgPool, Postgres, pool::PoolOptions, postgres::PgConnectOptions};
5+
6+
use crate::settings::DatabaseSettings;
7+
8+
use super::connection_key::ConnectionKey;
9+
10+
/// Cached connection pool with last access time
11+
struct CachedPool {
12+
pool: PgPool,
13+
last_accessed: Instant,
14+
idle_timeout: Duration,
15+
}
16+
17+
#[derive(Default)]
18+
pub struct ConnectionManager {
19+
pools: DashMap<ConnectionKey, CachedPool>,
20+
}
21+
22+
impl ConnectionManager {
23+
pub fn new() -> Self {
24+
Self {
25+
pools: DashMap::new(),
26+
}
27+
}
28+
29+
/// Get a connection pool for the given database settings.
30+
/// If a pool already exists for these settings, it will be returned.
31+
/// If not, a new pool will be created if connections are enabled.
32+
/// Will also clean up idle connections that haven't been accessed for a while.
33+
pub(crate) fn get_pool(&self, settings: &DatabaseSettings) -> Option<PgPool> {
34+
let key = ConnectionKey::from(settings);
35+
36+
// Cleanup idle connections first
37+
self.cleanup_idle_pools(&key);
38+
39+
if !settings.enable_connection {
40+
tracing::info!("Database connection disabled.");
41+
return None;
42+
}
43+
44+
// If we have a cached pool, update its last_accessed time and return it
45+
if let Some(mut cached_pool) = self.pools.get_mut(&key) {
46+
cached_pool.last_accessed = Instant::now();
47+
return Some(cached_pool.pool.clone());
48+
}
49+
50+
// Create a new pool
51+
let config = PgConnectOptions::new()
52+
.host(&settings.host)
53+
.port(settings.port)
54+
.username(&settings.username)
55+
.password(&settings.password)
56+
.database(&settings.database);
57+
58+
let timeout = settings.conn_timeout_secs;
59+
60+
let pool = PoolOptions::<Postgres>::new()
61+
.acquire_timeout(timeout)
62+
.acquire_slow_threshold(Duration::from_secs(2))
63+
.connect_lazy_with(config);
64+
65+
let cached_pool = CachedPool {
66+
pool: pool.clone(),
67+
last_accessed: Instant::now(),
68+
// TODO: add this to the db settings, for now default to five minutes
69+
idle_timeout: Duration::from_secs(60 * 5),
70+
};
71+
72+
self.pools.insert(key, cached_pool);
73+
74+
Some(pool)
75+
}
76+
77+
/// Remove pools that haven't been accessed for longer than the idle timeout
78+
fn cleanup_idle_pools(&self, ignore_key: &ConnectionKey) {
79+
let now = Instant::now();
80+
81+
// Use retain to keep only non-idle connections
82+
self.pools.retain(|key, cached_pool| {
83+
let idle_duration = now.duration_since(cached_pool.last_accessed);
84+
if idle_duration > cached_pool.idle_timeout && key != ignore_key {
85+
tracing::debug!(
86+
"Removing idle database connection (idle for {:?})",
87+
idle_duration
88+
);
89+
false
90+
} else {
91+
true
92+
}
93+
});
94+
}
95+
}

‎crates/pgt_workspace/src/workspace/server/db_connection.rs

Lines changed: 0 additions & 40 deletions
This file was deleted.
Lines changed: 28 additions & 78 deletions
Original file line numberDiff line numberDiff line change
@@ -1,97 +1,47 @@
1-
use std::sync::{Arc, RwLock, RwLockReadGuard};
1+
use std::sync::Arc;
22

3+
use dashmap::DashMap;
34
use pgt_schema_cache::SchemaCache;
45
use sqlx::PgPool;
56

67
use crate::WorkspaceError;
78

8-
use super::async_helper::run_async;
9-
10-
pub(crate) struct SchemaCacheHandle<'a> {
11-
inner: RwLockReadGuard<'a, SchemaCacheManagerInner>,
12-
}
13-
14-
impl<'a> SchemaCacheHandle<'a> {
15-
pub(crate) fn new(cache: &'a RwLock<SchemaCacheManagerInner>) -> Self {
16-
Self {
17-
inner: cache.read().unwrap(),
18-
}
19-
}
20-
21-
pub(crate) fn wrap(inner: RwLockReadGuard<'a, SchemaCacheManagerInner>) -> Self {
22-
Self { inner }
23-
}
24-
25-
pub fn get_arc(&self) -> Arc<SchemaCache> {
26-
Arc::clone(&self.inner.cache)
27-
}
28-
}
29-
30-
impl AsRef<SchemaCache> for SchemaCacheHandle<'_> {
31-
fn as_ref(&self) -> &SchemaCache {
32-
&self.inner.cache
33-
}
34-
}
35-
36-
#[derive(Default)]
37-
pub(crate) struct SchemaCacheManagerInner {
38-
cache: Arc<SchemaCache>,
39-
conn_str: String,
40-
}
9+
use super::{async_helper::run_async, connection_key::ConnectionKey};
4110

4211
#[derive(Default)]
4312
pub struct SchemaCacheManager {
44-
inner: RwLock<SchemaCacheManagerInner>,
13+
schemas: DashMap<ConnectionKey, Arc<SchemaCache>>,
4514
}
4615

4716
impl SchemaCacheManager {
48-
pub fn load(&self, pool: PgPool) -> Result<SchemaCacheHandle, WorkspaceError> {
49-
let new_conn_str = pool_to_conn_str(&pool);
50-
51-
{
52-
// return early if the connection string is the same
53-
let inner = self.inner.read().unwrap();
54-
if new_conn_str == inner.conn_str {
55-
tracing::info!("Same connection string, no updates.");
56-
return Ok(SchemaCacheHandle::wrap(inner));
57-
}
17+
pub fn new() -> Self {
18+
Self {
19+
schemas: DashMap::new(),
5820
}
21+
}
5922

60-
let maybe_refreshed = run_async(async move { SchemaCache::load(&pool).await })?;
61-
let refreshed = maybe_refreshed?;
62-
63-
{
64-
// write lock must be dropped before we return the reference below, hence the block
65-
let mut inner = self.inner.write().unwrap();
23+
pub fn load(&self, pool: PgPool) -> Result<Arc<SchemaCache>, WorkspaceError> {
24+
let key: ConnectionKey = (&pool).into();
6625

67-
// Double-check that we still need to refresh (another thread might have done it)
68-
if new_conn_str != inner.conn_str {
69-
inner.cache = Arc::new(refreshed);
70-
inner.conn_str = new_conn_str;
71-
tracing::info!("Refreshed connection.");
72-
}
26+
if let Some(cache) = self.schemas.get(&key) {
27+
return Ok(Arc::clone(&*cache));
7328
}
7429

75-
Ok(SchemaCacheHandle::new(&self.inner))
76-
}
77-
}
78-
79-
fn pool_to_conn_str(pool: &PgPool) -> String {
80-
let conn = pool.connect_options();
81-
82-
match conn.get_database() {
83-
None => format!(
84-
"postgres://{}:<redacted_pw>@{}:{}",
85-
conn.get_username(),
86-
conn.get_host(),
87-
conn.get_port()
88-
),
89-
Some(db) => format!(
90-
"postgres://{}:<redacted_pw>@{}:{}/{}",
91-
conn.get_username(),
92-
conn.get_host(),
93-
conn.get_port(),
94-
db
95-
),
30+
let schema_cache = self
31+
.schemas
32+
.entry(key)
33+
.or_try_insert_with::<WorkspaceError>(|| {
34+
// This closure will only be called once per key if multiple threads
35+
// try to access the same key simultaneously
36+
let pool_clone = pool.clone();
37+
let schema_cache =
38+
Arc::new(run_async(
39+
async move { SchemaCache::load(&pool_clone).await },
40+
)??);
41+
42+
Ok(schema_cache)
43+
})?;
44+
45+
Ok(Arc::clone(&schema_cache))
9646
}
9747
}

‎crates/pgt_workspace/src/workspace_types.rs

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -457,9 +457,10 @@ macro_rules! workspace_method {
457457
}
458458

459459
/// Returns a list of signature for all the methods in the [Workspace] trait
460-
pub fn methods() -> [WorkspaceMethod; 8] {
460+
pub fn methods() -> [WorkspaceMethod; 9] {
461461
[
462462
workspace_method!(is_path_ignored),
463+
workspace_method!(register_project_folder),
463464
workspace_method!(get_file_content),
464465
workspace_method!(pull_diagnostics),
465466
workspace_method!(get_completions),

‎docs/schemas/0.0.0/schema.json

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,17 @@
2222
}
2323
]
2424
},
25+
"extends": {
26+
"description": "A list of paths to other JSON files, used to extends the current configuration.",
27+
"anyOf": [
28+
{
29+
"$ref": "#/definitions/StringSet"
30+
},
31+
{
32+
"type": "null"
33+
}
34+
]
35+
},
2536
"files": {
2637
"description": "The configuration of the filesystem",
2738
"anyOf": [

‎docs/schemas/latest/schema.json

Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -22,6 +22,17 @@
2222
}
2323
]
2424
},
25+
"extends": {
26+
"description": "A list of paths to other JSON files, used to extends the current configuration.",
27+
"anyOf": [
28+
{
29+
"$ref": "#/definitions/StringSet"
30+
},
31+
{
32+
"type": "null"
33+
}
34+
]
35+
},
2536
"files": {
2637
"description": "The configuration of the filesystem",
2738
"anyOf": [

‎justfile

Lines changed: 14 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -6,6 +6,7 @@ alias r := ready
66
alias l := lint
77
alias t := test
88
alias rg := reset-git
9+
alias qm := quick-modify
910

1011
# Installs the tools needed to develop
1112
install-tools:
@@ -132,10 +133,22 @@ merge-main:
132133
git fetch origin main:main
133134
git merge main
134135

136+
quick-create branch commit:
137+
git checkout -b {{branch}}
138+
git add -A
139+
git commit -m "{{commit}}"
140+
git push
141+
gh pr create --fill
142+
143+
quick-modify:
144+
just format
145+
git add -A
146+
git commit -m "progress"
147+
git push
135148

136149
# Make sure to set your PGT_LOG_PATH in your shell profile.
137150
# You can use the PGT_LOG_LEVEL to set your log level.
138151
# We recommend to install `bunyan` (npm i -g bunyan) and pipe the output through there for color-coding:
139152
# just show-logs | bunyan
140153
show-logs:
141-
tail -f $(ls $PGT_LOG_PATH/server.log.* | sort -t- -k2,2 -k3,3 -k4,4 | tail -n 1)
154+
tail -f $(ls $PGT_LOG_PATH/server.log.* | sort -t- -k2,2 -k3,3 -k4,4 | tail -n 1)

‎packages/@postgrestools/backend-jsonrpc/src/workspace.ts

Lines changed: 27 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,11 @@ export type FileKind = FileKind2[];
1919
* The priority of the file
2020
*/
2121
export type FileKind2 = "Config" | "Ignore" | "Inspectable" | "Handleable";
22+
export interface RegisterProjectFolderParams {
23+
path?: string;
24+
setAsCurrentWorkspace: boolean;
25+
}
26+
export type ProjectKey = string;
2227
export interface GetFileContentParams {
2328
path: PgTPath;
2429
}
@@ -92,7 +97,7 @@ export type DiagnosticTags = DiagnosticTag[];
9297
/**
9398
* Serializable representation of a [Diagnostic](super::Diagnostic) advice
9499
95-
See the [Visitor] trait for additional documentation on all the supported advice types.
100+
See the [Visitor] trait for additional documentation on all the supported advice types.
96101
*/
97102
export type Advice =
98103
| { log: [LogCategory, MarkupBuf] }
@@ -185,6 +190,7 @@ export interface CompletionsResult {
185190
export interface CompletionItem {
186191
completion_text?: CompletionText;
187192
description: string;
193+
detail?: string;
188194
kind: CompletionItemKind;
189195
label: string;
190196
preselected: boolean;
@@ -196,16 +202,22 @@ export interface CompletionItem {
196202
/**
197203
* The text that the editor should fill in. If `None`, the `label` should be used. Tables, for example, might have different completion_texts:
198204
199-
label: "users", description: "Schema: auth", completion_text: "auth.users".
205+
label: "users", description: "Schema: auth", completion_text: "auth.users".
200206
*/
201207
export interface CompletionText {
208+
is_snippet: boolean;
202209
/**
203210
* A `range` is required because some editors replace the current token, others naively insert the text. Having a range where start == end makes it an insertion.
204211
*/
205212
range: TextRange;
206213
text: string;
207214
}
208-
export type CompletionItemKind = "table" | "function" | "column" | "schema";
215+
export type CompletionItemKind =
216+
| "table"
217+
| "function"
218+
| "column"
219+
| "schema"
220+
| "policy";
209221
export interface UpdateSettingsParams {
210222
configuration: PartialConfiguration;
211223
gitignore_matches: string[];
@@ -224,6 +236,10 @@ export interface PartialConfiguration {
224236
* The configuration of the database connection
225237
*/
226238
db?: PartialDatabaseConfiguration;
239+
/**
240+
* A list of paths to other JSON files, used to extends the current configuration.
241+
*/
242+
extends?: StringSet;
227243
/**
228244
* The configuration of the filesystem
229245
*/
@@ -271,6 +287,7 @@ export interface PartialDatabaseConfiguration {
271287
*/
272288
username?: string;
273289
}
290+
export type StringSet = string[];
274291
/**
275292
* The configuration of the filesystem
276293
*/
@@ -338,15 +355,14 @@ export interface PartialVcsConfiguration {
338355
/**
339356
* The folder where we should check for VCS files. By default, we will use the same folder where `postgrestools.jsonc` was found.
340357
341-
If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted
358+
If we can't find the configuration, it will attempt to use the current working directory. If no current working directory can't be found, we won't use the VCS integration, and a diagnostic will be emitted
342359
*/
343360
root?: string;
344361
/**
345362
* Whether we should use the VCS ignore file. When [true], we will ignore the files specified in the ignore file.
346363
*/
347364
useIgnoreFile?: boolean;
348365
}
349-
export type StringSet = string[];
350366
export interface Rules {
351367
/**
352368
* It enables ALL rules. The rules that belong to `nursery` won't be enabled.
@@ -425,6 +441,9 @@ export interface CloseFileParams {
425441
export type Configuration = PartialConfiguration;
426442
export interface Workspace {
427443
isPathIgnored(params: IsPathIgnoredParams): Promise<boolean>;
444+
registerProjectFolder(
445+
params: RegisterProjectFolderParams,
446+
): Promise<ProjectKey>;
428447
getFileContent(params: GetFileContentParams): Promise<string>;
429448
pullDiagnostics(
430449
params: PullDiagnosticsParams,
@@ -441,6 +460,9 @@ export function createWorkspace(transport: Transport): Workspace {
441460
isPathIgnored(params) {
442461
return transport.request("pgt/is_path_ignored", params);
443462
},
463+
registerProjectFolder(params) {
464+
return transport.request("pgt/register_project_folder", params);
465+
},
444466
getFileContent(params) {
445467
return transport.request("pgt/get_file_content", params);
446468
},

‎packages/@postgrestools/backend-jsonrpc/tests/workspace.test.mjs

Lines changed: 4 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -2,7 +2,7 @@ import { resolve } from "node:path";
22
import { fileURLToPath } from "node:url";
33
import { describe, expect, it } from "vitest";
44

5-
import { createWorkspaceWithBinary } from "../dist";
5+
import { createWorkspaceWithBinary } from "../src";
66

77
describe("Workspace API", () => {
88
it("should process remote requests", async () => {
@@ -14,6 +14,9 @@ describe("Workspace API", () => {
1414
);
1515

1616
const workspace = await createWorkspaceWithBinary(command);
17+
workspace.registerProjectFolder({
18+
setAsCurrentWorkspace: true,
19+
});
1720
await workspace.openFile({
1821
path: {
1922
path: "test.sql",

0 commit comments

Comments
 (0)
Please sign in to comment.