diff --git a/Cargo.lock b/Cargo.lock index f36aea5e..96342d36 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1841,18 +1841,6 @@ dependencies = [ "value-bag", ] -[[package]] -name = "lsp-server" -version = "0.7.7" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "550446e84739dcaf6d48a4a093973850669e13e8a34d8f8d64851041be267cd9" -dependencies = [ - "crossbeam-channel", - "log", - "serde", - "serde_json", -] - [[package]] name = "lsp-types" version = "0.94.1" @@ -1866,19 +1854,6 @@ dependencies = [ "url", ] -[[package]] -name = "lsp-types" -version = "0.95.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8e34d33a8e9b006cd3fc4fe69a921affa097bae4bb65f76271f4644f9a334365" -dependencies = [ - "bitflags 1.3.2", - "serde", - "serde_json", - "serde_repr", - "url", -] - [[package]] name = "matchers" version = "0.1.0" @@ -2078,16 +2053,6 @@ dependencies = [ "libm", ] -[[package]] -name = "num_cpus" -version = "1.16.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4161fcb6d602d4d2081af7c3a45852d875a03dd337a6bfdd6e06407b61342a43" -dependencies = [ - "hermit-abi 0.3.9", - "libc", -] - [[package]] name = "object" version = "0.36.5" @@ -2280,9 +2245,9 @@ dependencies = [ "pg_diagnostics", "pg_flags", "pg_fs", - "pg_lsp_new", + "pg_lsp", "pg_text_edit", - "pg_workspace_new", + "pg_workspace", "quick-junit", "rayon", "rustc-hash 2.1.0", @@ -2489,32 +2454,24 @@ name = "pg_lsp" version = "0.0.0" dependencies = [ "anyhow", - "async-channel 2.3.1", - "async-std", - "crossbeam-channel", - "dashmap 5.5.3", - "line_index", - "lsp-server", - "lsp-types 0.95.1", - "pg_base_db", - "pg_commands", + "biome_deserialize", + "futures", + "pg_analyse", "pg_completions", + "pg_configuration", + "pg_console", "pg_diagnostics", "pg_fs", - "pg_hover", - "pg_inlay_hints", - "pg_schema_cache", + "pg_lsp_converters", + "pg_text_edit", "pg_workspace", + "rustc-hash 2.1.0", "serde", "serde_json", - "sqlx", "text-size", - "threadpool", "tokio", - "tokio-util", "tower-lsp", "tracing", - "tracing-subscriber", ] [[package]] @@ -2527,31 +2484,6 @@ dependencies = [ "tower-lsp", ] -[[package]] -name = "pg_lsp_new" -version = "0.0.0" -dependencies = [ - "anyhow", - "biome_deserialize", - "futures", - "pg_analyse", - "pg_completions", - "pg_configuration", - "pg_console", - "pg_diagnostics", - "pg_fs", - "pg_lsp_converters", - "pg_text_edit", - "pg_workspace_new", - "rustc-hash 2.1.0", - "serde", - "serde_json", - "text-size", - "tokio", - "tower-lsp", - "tracing", -] - [[package]] name = "pg_markup" version = "0.0.0" @@ -2703,26 +2635,6 @@ dependencies = [ [[package]] name = "pg_workspace" version = "0.0.0" -dependencies = [ - "async-std", - "dashmap 5.5.3", - "pg_base_db", - "pg_fs", - "pg_hover", - "pg_lint", - "pg_query_ext", - "pg_schema_cache", - "pg_syntax", - "pg_typecheck", - "sqlx", - "text-size", - "tree-sitter", - "tree_sitter_sql", -] - -[[package]] -name = "pg_workspace_new" -version = "0.0.0" dependencies = [ "biome_deserialize", "dashmap 5.5.3", @@ -3220,7 +3132,7 @@ dependencies = [ "pg_diagnostics", "pg_query_ext", "pg_statement_splitter", - "pg_workspace_new", + "pg_workspace", "pulldown-cmark", ] @@ -3951,15 +3863,6 @@ dependencies = [ "once_cell", ] -[[package]] -name = "threadpool" -version = "1.8.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d050e60b33d41c19108b32cea32164033a9013fe3b46cbd4457559bfbf77afaa" -dependencies = [ - "num_cpus", -] - [[package]] name = "tikv-jemalloc-sys" version = "0.6.0+5.3.0-1-ge13ca993e8ccb9ba9847cc330696e02839f328f7" @@ -4144,7 +4047,7 @@ dependencies = [ "dashmap 5.5.3", "futures", "httparse", - "lsp-types 0.94.1", + "lsp-types", "memchr", "serde", "serde_json", diff --git a/Cargo.toml b/Cargo.toml index 5b6fb00a..9bf1c100 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -65,8 +65,8 @@ pg_inlay_hints = { path = "./crates/pg_inlay_hints", version = "0.0.0 pg_lexer = { path = "./crates/pg_lexer", version = "0.0.0" } pg_lexer_codegen = { path = "./crates/pg_lexer_codegen", version = "0.0.0" } pg_lint = { path = "./crates/pg_lint", version = "0.0.0" } +pg_lsp = { path = "./crates/pg_lsp", version = "0.0.0" } pg_lsp_converters = { path = "./crates/pg_lsp_converters", version = "0.0.0" } -pg_lsp_new = { path = "./crates/pg_lsp_new", version = "0.0.0" } pg_markup = { path = "./crates/pg_markup", version = "0.0.0" } pg_query_ext = { path = "./crates/pg_query_ext", version = "0.0.0" } pg_query_ext_codegen = { path = "./crates/pg_query_ext_codegen", version = "0.0.0" } @@ -79,7 +79,6 @@ pg_treesitter_queries = { path = "./crates/pg_treesitter_queries", version = pg_type_resolver = { path = "./crates/pg_type_resolver", version = "0.0.0" } pg_typecheck = { path = "./crates/pg_typecheck", version = "0.0.0" } pg_workspace = { path = "./crates/pg_workspace", version = "0.0.0" } -pg_workspace_new = { path = "./crates/pg_workspace_new", version = "0.0.0" } pg_test_utils = { path = "./crates/pg_test_utils" } # parser = { path = "./crates/parser", version = "0.0.0" } diff --git a/crates/pg_cli/Cargo.toml b/crates/pg_cli/Cargo.toml index 6abad15a..d4903698 100644 --- a/crates/pg_cli/Cargo.toml +++ b/crates/pg_cli/Cargo.toml @@ -24,9 +24,9 @@ pg_console = { workspace = true } pg_diagnostics = { workspace = true } pg_flags = { workspace = true } pg_fs = { workspace = true } -pg_lsp_new = { workspace = true } +pg_lsp = { workspace = true } pg_text_edit = { workspace = true } -pg_workspace_new = { workspace = true } +pg_workspace = { workspace = true } quick-junit = "0.5.0" rayon = { workspace = true } rustc-hash = { workspace = true } diff --git a/crates/pg_cli/src/changed.rs b/crates/pg_cli/src/changed.rs index 2c1888f2..300e99b2 100644 --- a/crates/pg_cli/src/changed.rs +++ b/crates/pg_cli/src/changed.rs @@ -1,7 +1,7 @@ use crate::CliDiagnostic; use pg_configuration::PartialConfiguration; use pg_fs::FileSystem; -use pg_workspace_new::DynRef; +use pg_workspace::DynRef; use std::ffi::OsString; pub(crate) fn get_changed_files( diff --git a/crates/pg_cli/src/cli_options.rs b/crates/pg_cli/src/cli_options.rs index 5a4300d1..24d7a3c3 100644 --- a/crates/pg_cli/src/cli_options.rs +++ b/crates/pg_cli/src/cli_options.rs @@ -22,7 +22,7 @@ pub struct CliOptions { #[bpaf(long("verbose"), switch, fallback(false))] pub verbose: bool, - /// Set the file path to the configuration file, or the directory path to find `biome.json` or `biome.jsonc`. + /// Set the file path to the configuration file, or the directory path to find `pglsp.toml`. /// If used, it disables the default configuration file resolution. #[bpaf(long("config-path"), argument("PATH"), optional)] pub config_path: Option, @@ -44,7 +44,7 @@ pub struct CliOptions { #[bpaf(long("no-errors-on-unmatched"), switch)] pub no_errors_on_unmatched: bool, - /// Tell Biome to exit with an error code if some diagnostics emit warnings. + /// Tell PGLSP to exit with an error code if some diagnostics emit warnings. #[bpaf(long("error-on-warnings"), switch)] pub error_on_warnings: bool, @@ -82,7 +82,7 @@ pub struct CliOptions { fallback(Severity::default()), display_fallback )] - /// The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause Biome to print only diagnostics that contain only errors. + /// The level of diagnostics to show. In order, from the lowest to the most important: info, warn, error. Passing `--diagnostic-level=error` will cause PGLSP to print only diagnostics that contain only errors. pub diagnostic_level: Severity, } diff --git a/crates/pg_cli/src/commands/check.rs b/crates/pg_cli/src/commands/check.rs index 986060a6..72f4d007 100644 --- a/crates/pg_cli/src/commands/check.rs +++ b/crates/pg_cli/src/commands/check.rs @@ -1,12 +1,9 @@ -// use super::{determine_fix_file_mode, FixFileModeOptions, LoadEditorConfig}; use crate::cli_options::CliOptions; -// use crate::commands::{get_files_to_process_with_cli_options, CommandRunner}; use crate::{CliDiagnostic, Execution, TraversalMode}; use pg_configuration::PartialConfiguration; use pg_console::Console; -// use biome_deserialize::Merge; use pg_fs::FileSystem; -use pg_workspace_new::{configuration::LoadedConfiguration, DynRef, Workspace, WorkspaceError}; +use pg_workspace::{configuration::LoadedConfiguration, DynRef, Workspace, WorkspaceError}; use std::ffi::OsString; use super::{get_files_to_process_with_cli_options, CommandRunner}; diff --git a/crates/pg_cli/src/commands/daemon.rs b/crates/pg_cli/src/commands/daemon.rs index f35b6111..4ecf6d72 100644 --- a/crates/pg_cli/src/commands/daemon.rs +++ b/crates/pg_cli/src/commands/daemon.rs @@ -4,8 +4,8 @@ use crate::{ CliDiagnostic, CliSession, }; use pg_console::{markup, ConsoleExt}; -use pg_lsp_new::ServerFactory; -use pg_workspace_new::{workspace::WorkspaceClient, TransportError, WorkspaceError}; +use pg_lsp::ServerFactory; +use pg_workspace::{workspace::WorkspaceClient, TransportError, WorkspaceError}; use std::{env, fs, path::PathBuf}; use tokio::io; use tokio::runtime::Runtime; @@ -176,9 +176,9 @@ pub(crate) fn read_most_recent_log_file( log_path: Option, log_file_name_prefix: String, ) -> io::Result> { - let biome_log_path = log_path.unwrap_or(default_pglsp_log_path()); + let pglsp_log_path = log_path.unwrap_or(default_pglsp_log_path()); - let most_recent = fs::read_dir(biome_log_path)? + let most_recent = fs::read_dir(pglsp_log_path)? .flatten() .filter(|file| file.file_type().is_ok_and(|ty| ty.is_file())) .filter_map(|file| { @@ -238,7 +238,7 @@ pub fn default_pglsp_log_path() -> PathBuf { /// Tracing filter enabling: /// - All spans and events at level info or higher -/// - All spans and events at level debug in crates whose name starts with `biome` +/// - All spans and events at level debug in crates whose name starts with `pglsp` struct LoggingFilter; /// Tracing filter used for spans emitted by `pglsp*` crates diff --git a/crates/pg_cli/src/commands/init.rs b/crates/pg_cli/src/commands/init.rs index 77aba0cc..32e9e0c5 100644 --- a/crates/pg_cli/src/commands/init.rs +++ b/crates/pg_cli/src/commands/init.rs @@ -2,7 +2,7 @@ use crate::{CliDiagnostic, CliSession}; use pg_configuration::PartialConfiguration; use pg_console::{markup, ConsoleExt}; use pg_fs::ConfigName; -use pg_workspace_new::configuration::create_config; +use pg_workspace::configuration::create_config; pub(crate) fn init(mut session: CliSession) -> Result<(), CliDiagnostic> { let fs = &mut session.app.fs; diff --git a/crates/pg_cli/src/commands/mod.rs b/crates/pg_cli/src/commands/mod.rs index 22708491..c01cd6a4 100644 --- a/crates/pg_cli/src/commands/mod.rs +++ b/crates/pg_cli/src/commands/mod.rs @@ -9,10 +9,10 @@ use bpaf::Bpaf; use pg_configuration::{partial_configuration, PartialConfiguration}; use pg_console::Console; use pg_fs::FileSystem; -use pg_workspace_new::configuration::{load_configuration, LoadedConfiguration}; -use pg_workspace_new::settings::PartialConfigurationExt; -use pg_workspace_new::workspace::{FixFileMode, UpdateSettingsParams}; -use pg_workspace_new::{DynRef, Workspace, WorkspaceError}; +use pg_workspace::configuration::{load_configuration, LoadedConfiguration}; +use pg_workspace::settings::PartialConfigurationExt; +use pg_workspace::workspace::{FixFileMode, UpdateSettingsParams}; +use pg_workspace::{DynRef, Workspace, WorkspaceError}; use std::ffi::OsString; use std::path::PathBuf; diff --git a/crates/pg_cli/src/commands/version.rs b/crates/pg_cli/src/commands/version.rs index 49900a23..434fa716 100644 --- a/crates/pg_cli/src/commands/version.rs +++ b/crates/pg_cli/src/commands/version.rs @@ -1,6 +1,6 @@ use pg_console::fmt::Formatter; use pg_console::{fmt, markup, ConsoleExt}; -use pg_workspace_new::workspace::ServerInfo; +use pg_workspace::workspace::ServerInfo; use crate::{CliDiagnostic, CliSession, VERSION}; diff --git a/crates/pg_cli/src/diagnostics.rs b/crates/pg_cli/src/diagnostics.rs index 8186fa48..f1f6cdd4 100644 --- a/crates/pg_cli/src/diagnostics.rs +++ b/crates/pg_cli/src/diagnostics.rs @@ -4,7 +4,7 @@ use pg_diagnostics::adapters::{BpafError, IoError, SerdeJsonError}; use pg_diagnostics::{ Advices, Category, Diagnostic, Error, LogCategory, MessageAndDescription, Severity, Visit, }; -use pg_workspace_new::WorkspaceError; +use pg_workspace::WorkspaceError; use std::process::{ExitCode, Termination}; use std::{env::current_exe, fmt::Debug}; @@ -12,10 +12,10 @@ fn command_name() -> String { current_exe() .ok() .and_then(|path| Some(path.file_name()?.to_str()?.to_string())) - .unwrap_or_else(|| String::from("biome")) + .unwrap_or_else(|| String::from("pglsp")) } -/// A diagnostic that is emitted when running biome via CLI. +/// A diagnostic that is emitted when running PGLSP via CLI. /// /// When displaying the diagnostic, #[derive(Debug, Diagnostic)] @@ -42,13 +42,13 @@ pub enum CliDiagnostic { FileCheck(FileCheck), /// When an argument is higher than the expected maximum OverflowNumberArgument(OverflowNumberArgument), - /// Wrapper for an underlying `biome_service` error + /// Wrapper for an underlying pglsp-service error WorkspaceError(WorkspaceError), /// Wrapper for an underlying `std::io` error IoError(IoDiagnostic), /// The daemon is not running ServerNotRunning(ServerNotRunning), - /// The end configuration (`biome.json` + other options) is incompatible with the command + /// The end configuration (`pglsp.toml` + other options) is incompatible with the command IncompatibleEndConfiguration(IncompatibleEndConfiguration), /// No files processed during the file system traversal NoFilesWereProcessed(NoFilesWereProcessed), @@ -410,7 +410,7 @@ impl CliDiagnostic { Self::ServerNotRunning(ServerNotRunning) } - /// Emitted when the end configuration (`biome.json` file + CLI arguments + LSP configuration) + /// Emitted when the end configuration (`pglsp.toml` file + CLI arguments + LSP configuration) /// results in a combination of options that doesn't allow to run the command correctly. /// /// A reason needs to be provided diff --git a/crates/pg_cli/src/execute/process_file/workspace_file.rs b/crates/pg_cli/src/execute/process_file/workspace_file.rs index 36a8f4d0..6f3cedc7 100644 --- a/crates/pg_cli/src/execute/process_file/workspace_file.rs +++ b/crates/pg_cli/src/execute/process_file/workspace_file.rs @@ -2,8 +2,8 @@ use crate::execute::diagnostics::{ResultExt, ResultIoExt}; use crate::execute::process_file::SharedTraversalOptions; use pg_diagnostics::{category, Error}; use pg_fs::{File, OpenOptions, PgLspPath}; -use pg_workspace_new::workspace::{ChangeParams, FileGuard, OpenFileParams}; -use pg_workspace_new::{Workspace, WorkspaceError}; +use pg_workspace::workspace::{ChangeParams, FileGuard, OpenFileParams}; +use pg_workspace::{Workspace, WorkspaceError}; use std::ffi::OsStr; use std::path::{Path, PathBuf}; diff --git a/crates/pg_cli/src/execute/traverse.rs b/crates/pg_cli/src/execute/traverse.rs index b67e9f3d..98a831c8 100644 --- a/crates/pg_cli/src/execute/traverse.rs +++ b/crates/pg_cli/src/execute/traverse.rs @@ -9,9 +9,9 @@ use pg_diagnostics::DiagnosticTags; use pg_diagnostics::{DiagnosticExt, Error, Resource, Severity}; use pg_fs::{FileSystem, PathInterner, PgLspPath}; use pg_fs::{TraversalContext, TraversalScope}; -use pg_workspace_new::dome::Dome; -use pg_workspace_new::workspace::IsPathIgnoredParams; -use pg_workspace_new::{Workspace, WorkspaceError}; +use pg_workspace::dome::Dome; +use pg_workspace::workspace::IsPathIgnoredParams; +use pg_workspace::{Workspace, WorkspaceError}; use rustc_hash::FxHashSet; use std::collections::BTreeSet; use std::sync::atomic::AtomicU32; diff --git a/crates/pg_cli/src/lib.rs b/crates/pg_cli/src/lib.rs index 256d0e22..8bed8509 100644 --- a/crates/pg_cli/src/lib.rs +++ b/crates/pg_cli/src/lib.rs @@ -9,7 +9,7 @@ use commands::check::CheckCommandPayload; use commands::CommandRunner; use pg_console::{ColorMode, Console}; use pg_fs::OsFileSystem; -use pg_workspace_new::{App, DynRef, Workspace, WorkspaceRef}; +use pg_workspace::{App, DynRef, Workspace, WorkspaceRef}; use std::env; mod changed; diff --git a/crates/pg_cli/src/logging.rs b/crates/pg_cli/src/logging.rs index e8cd32dd..72395023 100644 --- a/crates/pg_cli/src/logging.rs +++ b/crates/pg_cli/src/logging.rs @@ -91,12 +91,12 @@ impl Display for LoggingLevel { /// Tracing filter enabling: /// - All spans and events at level info or higher -/// - All spans and events at level debug in crates whose name starts with `biome` +/// - All spans and events at level debug in crates whose name starts with `pglsp` struct LoggingFilter { level: LoggingLevel, } -/// Tracing filter used for spans emitted by `biome*` crates +/// Tracing filter used for spans emitted by `pglsp*` crates const SELF_FILTER: LevelFilter = if cfg!(debug_assertions) { LevelFilter::TRACE } else { diff --git a/crates/pg_cli/src/main.rs b/crates/pg_cli/src/main.rs index b07c3858..c97f9291 100644 --- a/crates/pg_cli/src/main.rs +++ b/crates/pg_cli/src/main.rs @@ -6,7 +6,7 @@ use pg_cli::{ }; use pg_console::{markup, ConsoleExt, EnvConsole}; use pg_diagnostics::{set_bottom_frame, Diagnostic, PrintDiagnostic}; -use pg_workspace_new::workspace; +use pg_workspace::workspace; use std::process::{ExitCode, Termination}; use tokio::runtime::Runtime; diff --git a/crates/pg_cli/src/reporter/gitlab.rs b/crates/pg_cli/src/reporter/gitlab.rs index 9be7974e..ea3fd285 100644 --- a/crates/pg_cli/src/reporter/gitlab.rs +++ b/crates/pg_cli/src/reporter/gitlab.rs @@ -131,8 +131,8 @@ impl Display for GitLabDiagnostics<'_> { true } }) - .filter_map(|biome_diagnostic| { - let absolute_path = match biome_diagnostic.location().resource { + .filter_map(|pglsp_diagnostic| { + let absolute_path = match pglsp_diagnostic.location().resource { Some(Resource::File(file)) => Some(file), _ => None, } @@ -143,11 +143,11 @@ impl Display for GitLabDiagnostics<'_> { None => absolute_path.to_owned(), }; - let initial_fingerprint = self.compute_initial_fingerprint(biome_diagnostic, &path); + let initial_fingerprint = self.compute_initial_fingerprint(pglsp_diagnostic, &path); let fingerprint = hasher.rehash_until_unique(initial_fingerprint); GitLabDiagnostic::try_from_diagnostic( - biome_diagnostic, + pglsp_diagnostic, path.to_string(), fingerprint, ) diff --git a/crates/pg_cli/src/service/mod.rs b/crates/pg_cli/src/service/mod.rs index 6b137c05..43eb6264 100644 --- a/crates/pg_cli/src/service/mod.rs +++ b/crates/pg_cli/src/service/mod.rs @@ -17,7 +17,7 @@ use std::{ use anyhow::{bail, ensure, Context, Error}; use dashmap::DashMap; -use pg_workspace_new::{ +use pg_workspace::{ workspace::{TransportRequest, WorkspaceTransport}, TransportError, }; diff --git a/crates/pg_cli/src/service/unix.rs b/crates/pg_cli/src/service/unix.rs index 9a529b0c..a30d6e38 100644 --- a/crates/pg_cli/src/service/unix.rs +++ b/crates/pg_cli/src/service/unix.rs @@ -6,7 +6,7 @@ use std::{ time::Duration, }; -use pg_lsp_new::{ServerConnection, ServerFactory}; +use pg_lsp::{ServerConnection, ServerFactory}; use tokio::{ io::Interest, net::{ diff --git a/crates/pg_cli/src/service/windows.rs b/crates/pg_cli/src/service/windows.rs index ed2fe1d6..25c59641 100644 --- a/crates/pg_cli/src/service/windows.rs +++ b/crates/pg_cli/src/service/windows.rs @@ -13,7 +13,7 @@ use std::{ time::Duration, }; -use pg_lsp_new::{ServerConnection, ServerFactory}; +use pg_lsp::{ServerConnection, ServerFactory}; use tokio::{ io::{AsyncRead, AsyncWrite, ReadBuf}, net::windows::named_pipe::{ClientOptions, NamedPipeClient, NamedPipeServer, ServerOptions}, diff --git a/crates/pg_configuration/src/analyser/linter/rules.rs b/crates/pg_configuration/src/analyser/linter/rules.rs index cbd875ad..5b5deb06 100644 --- a/crates/pg_configuration/src/analyser/linter/rules.rs +++ b/crates/pg_configuration/src/analyser/linter/rules.rs @@ -46,7 +46,7 @@ impl std::str::FromStr for RuleGroup { #[cfg_attr(feature = "schema", derive(JsonSchema))] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct Rules { - #[doc = r" It enables the lint rules recommended by Biome. `true` by default."] + #[doc = r" It enables the lint rules recommended by PGLSP. `true` by default."] #[serde(skip_serializing_if = "Option::is_none")] pub recommended: Option, #[doc = r" It enables ALL rules. The rules that belong to `nursery` won't be enabled."] diff --git a/crates/pg_fs/src/fs/memory.rs b/crates/pg_fs/src/fs/memory.rs index c467863f..cd807270 100644 --- a/crates/pg_fs/src/fs/memory.rs +++ b/crates/pg_fs/src/fs/memory.rs @@ -297,11 +297,11 @@ impl<'scope> TraversalScope<'scope> for MemoryTraversalScope<'scope> { if should_process_file { let _ = ctx.interner().intern_path(path.into()); - let biome_path = PgLspPath::new(path); - if !ctx.can_handle(&biome_path) { + let pglsp_path = PgLspPath::new(path); + if !ctx.can_handle(&pglsp_path) { continue; } - ctx.store_path(biome_path); + ctx.store_path(pglsp_path); } } } diff --git a/crates/pg_fs/src/fs/os.rs b/crates/pg_fs/src/fs/os.rs index 58a3263e..d5353f55 100644 --- a/crates/pg_fs/src/fs/os.rs +++ b/crates/pg_fs/src/fs/os.rs @@ -195,7 +195,7 @@ impl<'scope> TraversalScope<'scope> for OsTraversalScope<'scope> { } } -// TODO: remove in Biome 2.0, and directly use `.gitignore` +// TODO: remove in 2.0, and directly use `.gitignore` /// Default list of ignored directories, in the future will be supplanted by /// detecting and parsing .ignore files const DEFAULT_IGNORE: &[&[u8]] = &[b".git", b".svn", b".hg", b".yarn", b"node_modules"]; @@ -295,7 +295,7 @@ fn handle_any_file<'scope>( // In case the file is inside a directory that is behind a symbolic link, // the unresolved origin path is used to construct a new path. // This is required to support ignore patterns to symbolic links. - let biome_path = if let Some(old_origin_path) = &origin_path { + let pglsp_path = if let Some(old_origin_path) = &origin_path { if let Some(file_name) = path.file_name() { let new_origin_path = old_origin_path.join(file_name); origin_path = Some(new_origin_path.clone()); @@ -317,7 +317,7 @@ fn handle_any_file<'scope>( // doing a directory traversal, but printing an error message if the // user explicitly requests an unsupported file to be handled. // This check also works for symbolic links. - if !ctx.can_handle(&biome_path) { + if !ctx.can_handle(&pglsp_path) { return; } diff --git a/crates/pg_fs/src/path.rs b/crates/pg_fs/src/path.rs index e25c158e..0889573c 100644 --- a/crates/pg_fs/src/path.rs +++ b/crates/pg_fs/src/path.rs @@ -91,7 +91,7 @@ impl From for FileKinds { )] pub struct PgLspPath { path: PathBuf, - /// Determines the kind of the file inside Biome. Some files are considered as configuration files, others as manifest files, and others as files to handle + /// Determines the kind of the file inside PGLSP. Some files are considered as configuration files, others as manifest files, and others as files to handle kind: FileKinds, /// Whether this path (usually a file) was fixed as a result of a format/lint/check command with the `--write` filag. was_written: bool, @@ -164,7 +164,7 @@ impl PgLspPath { /// Returns the contents of a file, if it exists /// /// ## Error - /// If Biome doesn't have permissions to read the file + /// If PGLSP doesn't have permissions to read the file pub fn get_buffer_from_file(&mut self) -> String { // we assume we have permissions read_to_string(&self.path).expect("cannot read the file to format") @@ -177,7 +177,7 @@ impl PgLspPath { } /// The priority of the file. - /// - `biome.json` and `biome.jsonc` have the highest priority + /// - `pglsp.toml` has the highest priority /// - `package.json` and `tsconfig.json`/`jsconfig.json` have the second-highest priority, and they are considered as manifest files /// - Other files are considered as files to handle fn priority(file_name: &OsStr) -> FileKinds { diff --git a/crates/pg_lsp/Cargo.toml b/crates/pg_lsp/Cargo.toml index 861051ab..88faf022 100644 --- a/crates/pg_lsp/Cargo.toml +++ b/crates/pg_lsp/Cargo.toml @@ -11,41 +11,26 @@ repository.workspace = true version = "0.0.0" -[[bin]] -name = "pglsp" -path = "src/main.rs" -test = false - [dependencies] -anyhow = "1.0.81" -async-channel = "2.3.1" -async-std = "1.12.0" -crossbeam-channel = "0.5.12" -dashmap = "5.5.3" -lsp-server = "0.7.6" -lsp-types = "0.95.0" -serde.workspace = true -serde_json.workspace = true -text-size.workspace = true -threadpool = "1.8.1" - -line_index.workspace = true -sqlx.workspace = true -tower-lsp.workspace = true - -pg_base_db.workspace = true -pg_commands.workspace = true -pg_completions.workspace = true -pg_diagnostics.workspace = true -pg_fs.workspace = true -pg_hover.workspace = true -pg_inlay_hints.workspace = true -pg_schema_cache.workspace = true -pg_workspace.workspace = true -tokio = { version = "1.40.0", features = ["io-std", "macros", "rt-multi-thread", "sync", "time"] } -tokio-util = "0.7.12" -tracing = "0.1.40" -tracing-subscriber = "0.3.18" +anyhow = { workspace = true } +biome_deserialize = { workspace = true } +futures = "0.3.31" +pg_analyse = { workspace = true } +pg_completions = { workspace = true } +pg_configuration = { workspace = true } +pg_console = { workspace = true } +pg_diagnostics = { workspace = true } +pg_fs = { workspace = true } +pg_lsp_converters = { workspace = true } +pg_text_edit = { workspace = true } +pg_workspace = { workspace = true } +rustc-hash = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +text-size.workspace = true +tokio = { workspace = true, features = ["rt", "io-std"] } +tower-lsp = { version = "0.20.0" } +tracing = { workspace = true, features = ["attributes"] } [dev-dependencies] diff --git a/crates/pg_lsp_new/src/capabilities.rs b/crates/pg_lsp/src/capabilities.rs similarity index 100% rename from crates/pg_lsp_new/src/capabilities.rs rename to crates/pg_lsp/src/capabilities.rs diff --git a/crates/pg_lsp/src/client/client_config_opts.rs b/crates/pg_lsp/src/client/client_config_opts.rs deleted file mode 100644 index 054d487e..00000000 --- a/crates/pg_lsp/src/client/client_config_opts.rs +++ /dev/null @@ -1,25 +0,0 @@ -use serde::Deserialize; - -#[derive(Deserialize, Debug)] -pub struct ClientConfigurationOptions { - #[serde(rename(deserialize = "databaseUrl"))] - pub(crate) db_connection_string: Option, -} - -#[cfg(test)] -mod tests { - use serde_json::json; - - use crate::client::client_config_opts::ClientConfigurationOptions; - - #[test] - fn test_json_parsing() { - let config = json!({ - "databaseUrl": "cool-shit" - }); - - let parsed: ClientConfigurationOptions = serde_json::from_value(config).unwrap(); - - assert_eq!(parsed.db_connection_string, Some("cool-shit".into())); - } -} diff --git a/crates/pg_lsp/src/client/client_flags.rs b/crates/pg_lsp/src/client/client_flags.rs deleted file mode 100644 index f8ef2445..00000000 --- a/crates/pg_lsp/src/client/client_flags.rs +++ /dev/null @@ -1,22 +0,0 @@ -use tower_lsp::lsp_types::InitializeParams; - -/// Contains information about the client's capabilities. -/// This is used to determine which features the server can use. -#[derive(Debug, Clone)] -pub struct ClientFlags { - /// If `true`, the server can pull configuration from the client. - pub supports_pull_opts: bool, -} - -impl ClientFlags { - pub(crate) fn from_initialize_request_params(params: &InitializeParams) -> Self { - let supports_pull_opts = params - .capabilities - .workspace - .as_ref() - .and_then(|w| w.configuration) - .unwrap_or(false); - - Self { supports_pull_opts } - } -} diff --git a/crates/pg_lsp/src/client/mod.rs b/crates/pg_lsp/src/client/mod.rs deleted file mode 100644 index 63472a43..00000000 --- a/crates/pg_lsp/src/client/mod.rs +++ /dev/null @@ -1,2 +0,0 @@ -pub mod client_config_opts; -pub mod client_flags; diff --git a/crates/pg_lsp/src/db_connection.rs b/crates/pg_lsp/src/db_connection.rs deleted file mode 100644 index 4e0bdb72..00000000 --- a/crates/pg_lsp/src/db_connection.rs +++ /dev/null @@ -1,86 +0,0 @@ -use std::sync::Arc; - -use pg_schema_cache::SchemaCache; -use pg_workspace::Workspace; -use sqlx::{postgres::PgListener, PgPool}; -use tokio::{sync::RwLock, task::JoinHandle}; - -pub(crate) struct DbConnection { - pool: PgPool, - connection_string: String, - schema_update_handle: JoinHandle<()>, - close_tx: tokio::sync::oneshot::Sender<()>, -} - -impl DbConnection { - #[tracing::instrument(name = "Setting up new Database Connection…", skip(ide))] - pub(crate) async fn new( - connection_string: String, - ide: Arc>, - ) -> Result { - tracing::info!("Trying to connect to pool…"); - let pool = PgPool::connect(&connection_string).await?; - tracing::info!("Connected to Pool."); - - let mut listener = PgListener::connect_with(&pool).await?; - tracing::info!("Connected to Listener."); - - listener.listen_all(["postgres_lsp", "pgrst"]).await?; - tracing::info!("Listening!"); - - let (close_tx, close_rx) = tokio::sync::oneshot::channel::<()>(); - - let cloned_pool = pool.clone(); - - let schema_update_handle: JoinHandle<()> = tokio::spawn(async move { - let mut moved_rx = close_rx; - - loop { - tokio::select! { - res = listener.recv() => { - match res { - Ok(not) => { - if not.payload() == "reload schema" { - let schema_cache = SchemaCache::load(&cloned_pool).await.unwrap(); - ide.write().await.set_schema_cache(schema_cache); - }; - } - Err(why) => { - eprintln!("Error receiving notification: {:?}", why); - break; - } - } - } - - _ = &mut moved_rx => { - return; - } - } - } - }); - tracing::info!("Set up schema update handle."); - - Ok(Self { - pool, - connection_string, - schema_update_handle, - close_tx, - }) - } - - pub(crate) fn connected_to(&self, connection_string: &str) -> bool { - connection_string == self.connection_string - } - - #[tracing::instrument(name = "Closing DB Pool", skip(self))] - pub(crate) async fn close(self) { - let _ = self.close_tx.send(()); - let _ = self.schema_update_handle.await; - - self.pool.close().await; - } - - pub(crate) fn get_pool(&self) -> PgPool { - self.pool.clone() - } -} diff --git a/crates/pg_lsp/src/debouncer.rs b/crates/pg_lsp/src/debouncer.rs deleted file mode 100644 index a970e278..00000000 --- a/crates/pg_lsp/src/debouncer.rs +++ /dev/null @@ -1,86 +0,0 @@ -use std::{ - future::Future, - pin::Pin, - sync::{atomic::AtomicBool, Arc}, -}; - -type AsyncBlock = Pin + 'static + Send>>; - -pub(crate) struct SimpleTokioDebouncer { - handle: tokio::task::JoinHandle<()>, - tx: tokio::sync::mpsc::Sender, - shutdown_flag: Arc, -} - -impl SimpleTokioDebouncer { - pub fn new(timeout: std::time::Duration) -> Self { - let (tx, mut rx) = tokio::sync::mpsc::channel(100); - - let shutdown_flag = Arc::new(AtomicBool::new(false)); - let shutdown_flag_clone = shutdown_flag.clone(); - - let handle = tokio::spawn(async move { - let mut maybe_args: Option = None; - let mut instant = tokio::time::Instant::now() + timeout; - - loop { - if shutdown_flag_clone.load(std::sync::atomic::Ordering::Relaxed) { - break; - } - - tokio::select! { - // If the timeout is reached, execute and reset the last received action - _ = tokio::time::sleep_until(instant) => { - match maybe_args { - Some(block) => { - block.await; - maybe_args = None; - } - None => continue, - } - } - - // If a new action is received, update the action and reset the timeout - cb = rx.recv() => { - match cb { - Some(cb) => { - maybe_args = Some(cb); - instant = tokio::time::Instant::now() + timeout; - } - None => break, // channel closed - } - } - } - } - }); - - Self { - handle, - tx, - shutdown_flag, - } - } - - #[tracing::instrument(name = "Adding task to debouncer", skip(self, block))] - pub async fn debounce(&self, block: AsyncBlock) { - if self - .shutdown_flag - .load(std::sync::atomic::Ordering::Relaxed) - { - tracing::error!( - "Trying to debounce tasks, but the Debouncer is in the process of shutting down." - ); - return; - } - - self.tx.send(block).await.unwrap(); - } - - #[tracing::instrument(name = "Shutting down debouncer", skip(self))] - pub async fn shutdown(&self) { - self.shutdown_flag - .store(true, std::sync::atomic::Ordering::Relaxed); - - self.handle.abort(); // we don't care about any errors during shutdown - } -} diff --git a/crates/pg_lsp_new/src/diagnostics.rs b/crates/pg_lsp/src/diagnostics.rs similarity index 98% rename from crates/pg_lsp_new/src/diagnostics.rs rename to crates/pg_lsp/src/diagnostics.rs index 402043ed..a0a04144 100644 --- a/crates/pg_lsp_new/src/diagnostics.rs +++ b/crates/pg_lsp/src/diagnostics.rs @@ -1,7 +1,7 @@ use crate::utils::into_lsp_error; use anyhow::Error; use pg_diagnostics::print_diagnostic_to_string; -use pg_workspace_new::WorkspaceError; +use pg_workspace::WorkspaceError; use std::fmt::{Display, Formatter}; use tower_lsp::lsp_types::MessageType; diff --git a/crates/pg_lsp_new/src/documents.rs b/crates/pg_lsp/src/documents.rs similarity index 100% rename from crates/pg_lsp_new/src/documents.rs rename to crates/pg_lsp/src/documents.rs diff --git a/crates/pg_lsp_new/src/handlers.rs b/crates/pg_lsp/src/handlers.rs similarity index 100% rename from crates/pg_lsp_new/src/handlers.rs rename to crates/pg_lsp/src/handlers.rs diff --git a/crates/pg_lsp_new/src/handlers/completions.rs b/crates/pg_lsp/src/handlers/completions.rs similarity index 98% rename from crates/pg_lsp_new/src/handlers/completions.rs rename to crates/pg_lsp/src/handlers/completions.rs index 4efba210..f13526cd 100644 --- a/crates/pg_lsp_new/src/handlers/completions.rs +++ b/crates/pg_lsp/src/handlers/completions.rs @@ -1,6 +1,6 @@ use crate::session::Session; use anyhow::Result; -use pg_workspace_new::workspace; +use pg_workspace::workspace; use tower_lsp::lsp_types::{self, CompletionItem, CompletionItemLabelDetails}; #[tracing::instrument(level = "trace", skip_all)] diff --git a/crates/pg_lsp_new/src/handlers/text_document.rs b/crates/pg_lsp/src/handlers/text_document.rs similarity index 96% rename from crates/pg_lsp_new/src/handlers/text_document.rs rename to crates/pg_lsp/src/handlers/text_document.rs index 173d7e99..d612fa65 100644 --- a/crates/pg_lsp_new/src/handlers/text_document.rs +++ b/crates/pg_lsp/src/handlers/text_document.rs @@ -1,7 +1,7 @@ use crate::{documents::Document, session::Session, utils::apply_document_changes}; use anyhow::Result; use pg_lsp_converters::from_proto::text_range; -use pg_workspace_new::workspace::{ +use pg_workspace::workspace::{ ChangeFileParams, ChangeParams, CloseFileParams, GetFileContentParams, OpenFileParams, }; use tower_lsp::lsp_types; @@ -103,11 +103,11 @@ pub(crate) async fn did_close( params: lsp_types::DidCloseTextDocumentParams, ) -> Result<()> { let url = params.text_document.uri; - let biome_path = session.file_path(&url)?; + let pglsp_path = session.file_path(&url)?; session .workspace - .close_file(CloseFileParams { path: biome_path })?; + .close_file(CloseFileParams { path: pglsp_path })?; session.remove_document(&url); diff --git a/crates/pg_lsp/src/lib.rs b/crates/pg_lsp/src/lib.rs index 579d584a..99db526f 100644 --- a/crates/pg_lsp/src/lib.rs +++ b/crates/pg_lsp/src/lib.rs @@ -1,6 +1,9 @@ -mod client; -mod db_connection; -mod debouncer; -pub mod server; +mod capabilities; +mod diagnostics; +mod documents; +mod handlers; +mod server; mod session; mod utils; + +pub use crate::server::{LSPServer, ServerConnection, ServerFactory}; diff --git a/crates/pg_lsp/src/main.rs b/crates/pg_lsp/src/main.rs deleted file mode 100644 index a5202626..00000000 --- a/crates/pg_lsp/src/main.rs +++ /dev/null @@ -1,31 +0,0 @@ -use std::{fs::File, path::PathBuf, str::FromStr}; - -use pg_lsp::server::LspServer; -use tower_lsp::{LspService, Server}; -use tracing_subscriber::fmt::format::FmtSpan; - -#[tokio::main] -async fn main() -> anyhow::Result<()> { - let path = PathBuf::from_str("pglsp.log").expect("Opened the log file."); - let file = File::create(path).expect("Could not open the file."); - - let subscriber = tracing_subscriber::FmtSubscriber::builder() - .with_span_events(FmtSpan::ENTER) - .with_span_events(FmtSpan::CLOSE) - .with_ansi(false) - .with_writer(file) - .finish(); - - tracing::subscriber::set_global_default(subscriber)?; - - let stdin = tokio::io::stdin(); - let stdout = tokio::io::stdout(); - - tracing::info!("Starting server."); - - let (service, socket) = LspService::new(LspServer::new); - - Server::new(stdin, stdout, socket).serve(service).await; - - Ok(()) -} diff --git a/crates/pg_lsp/src/server.rs b/crates/pg_lsp/src/server.rs index 3efb4f17..d07dab9e 100644 --- a/crates/pg_lsp/src/server.rs +++ b/crates/pg_lsp/src/server.rs @@ -1,509 +1,430 @@ -use std::sync::Arc; - -use notification::ShowMessage; -use pg_commands::CommandType; -use tokio::sync::RwLock; -use tower_lsp::jsonrpc; -use tower_lsp::lsp_types::*; -use tower_lsp::{Client, LanguageServer}; - -use crate::client::client_config_opts::ClientConfigurationOptions; -use crate::client::client_flags::ClientFlags; -use crate::debouncer::SimpleTokioDebouncer; -use crate::session::Session; -use crate::utils::file_path; -use crate::utils::normalize_uri; -use crate::utils::to_proto; - -pub struct LspServer { - client: Arc, - session: Arc, - client_capabilities: RwLock>, - debouncer: SimpleTokioDebouncer, +use crate::capabilities::server_capabilities; +use crate::diagnostics::{handle_lsp_error, LspError}; +use crate::handlers; +use crate::session::{ + CapabilitySet, CapabilityStatus, ClientInformation, Session, SessionHandle, SessionKey, +}; +use crate::utils::{into_lsp_error, panic_to_lsp_error}; +use futures::future::ready; +use futures::FutureExt; +use pg_diagnostics::panic::PanicError; +use pg_fs::{ConfigName, FileSystem, OsFileSystem}; +use pg_workspace::{workspace, DynRef, Workspace}; +use rustc_hash::FxHashMap; +use serde_json::json; +use std::panic::RefUnwindSafe; +use std::path::PathBuf; +use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; +use std::sync::{Arc, Mutex}; +use tokio::io::{AsyncRead, AsyncWrite}; +use tokio::sync::Notify; +use tokio::task::spawn_blocking; +use tower_lsp::jsonrpc::Result as LspResult; +use tower_lsp::{lsp_types::*, ClientSocket}; +use tower_lsp::{LanguageServer, LspService, Server}; +use tracing::{error, info}; + +pub struct LSPServer { + session: SessionHandle, + /// Map of all sessions connected to the same [ServerFactory] as this [LSPServer]. + sessions: Sessions, + /// If this is true the server will broadcast a shutdown signal once the + /// last client disconnected + stop_on_disconnect: bool, + /// This shared flag is set to true once at least one session has been + /// initialized on this server instance + is_initialized: Arc, } -impl LspServer { - pub fn new(client: Client) -> Self { - tracing::info!("Setting up server."); - let s = Self { - client: Arc::new(client), - session: Arc::new(Session::new()), - client_capabilities: RwLock::new(None), - debouncer: SimpleTokioDebouncer::new(std::time::Duration::from_millis(500)), - }; - tracing::info!("Server setup complete."); - - s - } - - /// When the client sends a didChangeConfiguration notification, we need to parse the received JSON. - #[tracing::instrument( - name = "Parsing config from client", - skip(self, value), - fields(options = %value) - )] - async fn parse_config_from_client( - &self, - mut value: serde_json::Value, - ) -> Option { - let options = match value.get_mut("pglsp") { - Some(section) => section.take(), - None => value, - }; - - match serde_json::from_value::(options) { - Ok(new_options) => Some(new_options), - Err(why) => { - let message = format!( - "The texlab configuration is invalid; using the default settings instead.\nDetails: {why}" - ); - let typ = MessageType::WARNING; - self.client - .send_notification::(ShowMessageParams { message, typ }) - .await; - None - } +impl RefUnwindSafe for LSPServer {} + +impl LSPServer { + fn new( + session: SessionHandle, + sessions: Sessions, + stop_on_disconnect: bool, + is_initialized: Arc, + ) -> Self { + Self { + session, + sessions, + stop_on_disconnect, + is_initialized, } } - #[tracing::instrument(name = "Processing Config", skip(self))] - async fn process_config(&self, opts: Option) -> anyhow::Result<()> { - if opts - .as_ref() - .is_some_and(|o| o.db_connection_string.is_some()) - { - let conn_str = opts.unwrap().db_connection_string.unwrap(); - self.session.change_db(conn_str).await - } else { - Ok(()) - } - } + async fn setup_capabilities(&self) { + let mut capabilities = CapabilitySet::default(); - async fn parse_and_handle_config_from_client(&self, value: serde_json::Value) { - let parsed = self.parse_config_from_client(value).await; - match self.process_config(parsed).await { - Ok(_) => {} - Err(e) => { - self.client - .show_message( - MessageType::ERROR, - format!("Unable to parse received config: {e:?}"), - ) - .await; - } - }; + capabilities.add_capability( + "pglsp_did_change_extension_settings", + "workspace/didChangeConfiguration", + if self.session.can_register_did_change_configuration() { + CapabilityStatus::Enable(None) + } else { + CapabilityStatus::Disable + }, + ); + + capabilities.add_capability( + "pglsp_did_change_workspace_settings", + "workspace/didChangeWatchedFiles", + if let Some(base_path) = self.session.base_path() { + CapabilityStatus::Enable(Some(json!(DidChangeWatchedFilesRegistrationOptions { + watchers: vec![FileSystemWatcher { + glob_pattern: GlobPattern::String(format!( + "{}/pglsp.toml", + base_path.display() + )), + kind: Some(WatchKind::all()), + },], + }))) + } else { + CapabilityStatus::Disable + }, + ); + + self.session.register_capabilities(capabilities).await; } - #[tracing::instrument(name = "Requesting & Handling Configuration from Client", skip(self))] - async fn request_and_handle_config_from_client(&self) { - let config_items = vec![ConfigurationItem { - section: Some("pglsp".to_string()), - scope_uri: None, - }]; - - tracing::info!("sending workspace/configuration request"); - let config = match self.client.configuration(config_items).await { - Ok(json) => { - // The client reponse fits the requested `ConfigurationParams.items`, - // so the first value is what we're looking for. - json.into_iter() - .next() - .expect("workspace/configuration request did not yield expected response.") - } - Err(why) => { - let message = format!( - "Unable to pull client options via workspace/configuration request: {}", - why - ); - self.client.log_message(MessageType::ERROR, message).await; - return; - } - }; + async fn map_op_error( + &self, + result: Result, LspError>, PanicError>, + ) -> LspResult> { + match result { + Ok(result) => match result { + Ok(result) => Ok(result), + Err(err) => handle_lsp_error(err, &self.session.client).await, + }, - let parsed = self.parse_config_from_client(config).await; - match self.process_config(parsed).await { - Ok(()) => {} - Err(e) => { - self.client - .log_message( - MessageType::ERROR, - format!("Unable to process config from client: {e:?}"), - ) - .await - } - }; + Err(err) => Err(into_lsp_error(err)), + } } +} +#[tower_lsp::async_trait] +impl LanguageServer for LSPServer { + #[allow(deprecated)] #[tracing::instrument( - name="Publishing diagnostics", - skip(self, uri), - fields(%uri) + level = "info", + skip_all, + fields( + root_uri = params.root_uri.as_ref().map(display), + capabilities = debug(¶ms.capabilities), + client_info = params.client_info.as_ref().map(debug), + workspace_folders = params.workspace_folders.as_ref().map(debug), + ) )] - async fn publish_diagnostics(&self, mut uri: Url) { - normalize_uri(&mut uri); - - let url = file_path(&uri); - let diagnostics = self.session.get_diagnostics(url).await; - - let diagnostics: Vec = diagnostics - .into_iter() - .map(|(d, r)| to_proto::diagnostic(d, r)) - .collect(); - - self.client - .send_notification::(ShowMessageParams { - typ: MessageType::INFO, - message: format!("diagnostics {}", diagnostics.len()), - }) - .await; - - let params = PublishDiagnosticsParams { - uri, - diagnostics, - version: None, + async fn initialize(&self, params: InitializeParams) -> LspResult { + info!("Starting Language Server..."); + self.is_initialized.store(true, Ordering::Relaxed); + + let server_capabilities = server_capabilities(¶ms.capabilities); + + self.session.initialize( + params.capabilities, + params.client_info.map(|client_info| ClientInformation { + name: client_info.name, + version: client_info.version, + }), + params.root_uri, + params.workspace_folders, + ); + + // + let init = InitializeResult { + capabilities: server_capabilities, + server_info: Some(ServerInfo { + name: String::from(env!("CARGO_PKG_NAME")), + version: Some(pg_configuration::VERSION.to_string()), + }), }; - self.client - .send_notification::(params) - .await; - } - - #[tracing::instrument( - name="Publishing diagnostics via Debouncer", - skip(self, uri), - fields(%uri) - )] - async fn publish_diagnostics_debounced(&self, mut uri: Url) { - let client = Arc::clone(&self.client); - let session = Arc::clone(&self.session); - - self.debouncer - .debounce(Box::pin(async move { - normalize_uri(&mut uri); - let url = file_path(&uri); - - let diagnostics = session.get_diagnostics(url).await; - - let diagnostics: Vec = diagnostics - .into_iter() - .map(|(d, r)| to_proto::diagnostic(d, r)) - .collect(); - - client - .send_notification::(ShowMessageParams { - typ: MessageType::INFO, - message: format!("diagnostics {}", diagnostics.len()), - }) - .await; - - let params = PublishDiagnosticsParams { - uri, - diagnostics, - version: None, - }; - - client - .send_notification::(params) - .await; - })) - .await; + Ok(init) } -} -#[tower_lsp::async_trait] -impl LanguageServer for LspServer { - #[tracing::instrument(name = "initialize", skip(self, params))] - async fn initialize(&self, params: InitializeParams) -> jsonrpc::Result { - self.client - .show_message(MessageType::INFO, "Initialize Request received") - .await; + #[tracing::instrument(level = "info", skip_all)] + async fn initialized(&self, params: InitializedParams) { + let _ = params; - let flags = ClientFlags::from_initialize_request_params(¶ms); - - tracing::info!("flags: {:?}", flags); - - self.client_capabilities.write().await.replace(flags); - - Ok(InitializeResult { - server_info: None, - capabilities: ServerCapabilities { - text_document_sync: Some(TextDocumentSyncCapability::Options( - TextDocumentSyncOptions { - open_close: Some(true), - change: Some(TextDocumentSyncKind::INCREMENTAL), - will_save: None, - will_save_wait_until: None, - save: Some(TextDocumentSyncSaveOptions::SaveOptions(SaveOptions { - include_text: Some(false), - })), - }, - )), - hover_provider: Some(HoverProviderCapability::Simple(true)), - execute_command_provider: Some(ExecuteCommandOptions { - commands: CommandType::ALL - .iter() - .map(|c| c.id().to_string()) - .collect(), - ..Default::default() - }), - inlay_hint_provider: Some(OneOf::Left(true)), - code_action_provider: Some(CodeActionProviderCapability::Simple(true)), - completion_provider: Some(CompletionOptions::default()), - ..ServerCapabilities::default() - }, - }) - } + info!("Attempting to load the configuration from 'pglsp.toml' file"); - #[tracing::instrument(name = "initialized", skip(self, _params))] - async fn initialized(&self, _params: InitializedParams) { - let capabilities = self.client_capabilities.read().await; + futures::join!(self.session.load_workspace_settings()); - if capabilities.as_ref().unwrap().supports_pull_opts { - self.request_and_handle_config_from_client().await; - } - - self.client - .log_message(MessageType::INFO, "Postgres LSP Connected!") + let msg = format!("Server initialized with PID: {}", std::process::id()); + self.session + .client + .log_message(MessageType::INFO, msg) .await; - } - #[tracing::instrument(name = "shutdown", skip(self))] - async fn shutdown(&self) -> jsonrpc::Result<()> { - self.session.shutdown().await; - self.debouncer.shutdown().await; + self.setup_capabilities().await; - self.client - .send_notification::(ShowMessageParams { - message: "Shutdown successful.".into(), - typ: MessageType::INFO, - }) - .await; + // Diagnostics are disabled by default, so update them after fetching workspace config + self.session.update_all_diagnostics().await; + } + #[tracing::instrument(level = "info", skip_all)] + async fn shutdown(&self) -> LspResult<()> { Ok(()) } - #[tracing::instrument(name = "workspace/didChangeConfiguration", skip(self, params))] + #[tracing::instrument(level = "info", skip_all)] async fn did_change_configuration(&self, params: DidChangeConfigurationParams) { - let capabilities = self.client_capabilities.read().await; - - if capabilities.as_ref().unwrap().supports_pull_opts { - self.request_and_handle_config_from_client().await - } else { - self.parse_and_handle_config_from_client(params.settings) - .await - }; + let _ = params; + self.session.load_workspace_settings().await; + self.setup_capabilities().await; + self.session.update_all_diagnostics().await; } - #[tracing::instrument( - name = "textDocument/didOpen", - skip(self, params), - fields( - uri = %params.text_document.uri - ) - )] - async fn did_open(&self, params: DidOpenTextDocumentParams) { - let mut uri = params.text_document.uri; - - normalize_uri(&mut uri); - - let changed_urls = self - .session - .apply_doc_changes( - file_path(&uri), - params.text_document.version, - params.text_document.text, - ) - .await; - - for url in changed_urls { - let url = Url::from_file_path(url.as_path()).expect("Expected absolute File Path"); - self.publish_diagnostics(url).await; + #[tracing::instrument(level = "trace", skip(self))] + async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { + let file_paths = params + .changes + .iter() + .map(|change| change.uri.to_file_path()); + for file_path in file_paths { + match file_path { + Ok(file_path) => { + let base_path = self.session.base_path(); + if let Some(base_path) = base_path { + let possible_config_toml = file_path.strip_prefix(&base_path); + if let Ok(watched_file) = possible_config_toml { + if ConfigName::file_names() + .contains(&&*watched_file.display().to_string()) + { + self.session.load_workspace_settings().await; + self.setup_capabilities().await; + // self.session.update_all_diagnostics().await; + // for now we are only interested to the configuration file, + // so it's OK to exist the loop + break; + } + } + } + } + Err(_) => { + error!("The Workspace root URI {file_path:?} could not be parsed as a filesystem path"); + continue; + } + } } } - #[tracing::instrument( - name= "textDocument/didSave", - skip(self, params), - fields( - uri = %params.text_document.uri - ) - )] - async fn did_save(&self, params: DidSaveTextDocumentParams) { - let mut uri = params.text_document.uri; - normalize_uri(&mut uri); - - self.publish_diagnostics(uri).await; - - let changed_urls = self.session.recompute_and_get_changed_files().await; - for url in changed_urls { - let url = Url::from_file_path(url.as_path()).expect("Expected absolute File Path"); - self.publish_diagnostics(url).await; - } + #[tracing::instrument(level = "trace", skip(self))] + async fn did_open(&self, params: DidOpenTextDocumentParams) { + handlers::text_document::did_open(&self.session, params) + .await + .ok(); } - #[tracing::instrument( - name= "textDocument/didChange", - skip(self, params), - fields( - uri = %params.text_document.uri - ) - )] + #[tracing::instrument(level = "trace", skip(self, params))] async fn did_change(&self, params: DidChangeTextDocumentParams) { - let mut uri = params.text_document.uri; - normalize_uri(&mut uri); - - tracing::info!("{}", uri); + if let Err(e) = handlers::text_document::did_change(&self.session, params).await { + error!("{}", e); + }; + } - self.publish_diagnostics_debounced(uri).await; + #[tracing::instrument(level = "trace", skip(self))] + async fn did_save(&self, params: DidSaveTextDocumentParams) { + // handlers::text_document::did_save(&self.session, params) + // .await + // .ok(); } - #[tracing::instrument( - name= "textDocument/didClose", - skip(self, params), - fields( - uri = %params.text_document.uri - ) - )] + #[tracing::instrument(level = "trace", skip(self))] async fn did_close(&self, params: DidCloseTextDocumentParams) { - let mut uri = params.text_document.uri; - normalize_uri(&mut uri); - let path = file_path(&uri); - - self.session.on_file_closed(path).await + handlers::text_document::did_close(&self.session, params) + .await + .ok(); } - #[tracing::instrument( - name= "textDocument/codeAction", - skip(self, params), - fields( - uri = %params.text_document.uri - ) - )] - async fn code_action( - &self, - params: CodeActionParams, - ) -> jsonrpc::Result> { - let mut uri = params.text_document.uri; - normalize_uri(&mut uri); - - let path = file_path(&uri); - let range = params.range; - - let actions = self - .session - .get_available_code_actions_or_commands(path, range) - .await; + #[tracing::instrument(level = "trace", skip(self))] + async fn completion(&self, params: CompletionParams) -> LspResult> { + match handlers::completions::get_completions(&self.session, params) { + Ok(result) => LspResult::Ok(Some(result)), + Err(e) => LspResult::Err(into_lsp_error(e)), + } + } +} - Ok(actions) +impl Drop for LSPServer { + fn drop(&mut self) { + if let Ok(mut sessions) = self.sessions.lock() { + let _removed = sessions.remove(&self.session.key); + debug_assert!(_removed.is_some(), "Session did not exist."); + + if self.stop_on_disconnect + && sessions.is_empty() + && self.is_initialized.load(Ordering::Relaxed) + { + self.session.cancellation.notify_one(); + } + } } +} - #[tracing::instrument( - name= "inlayHint/resolve", - skip(self, params), - fields( - uri = %params.text_document.uri - ) - )] - async fn inlay_hint(&self, params: InlayHintParams) -> jsonrpc::Result>> { - let mut uri = params.text_document.uri; - normalize_uri(&mut uri); +/// Map of active sessions connected to a [ServerFactory]. +type Sessions = Arc>>; + +/// Helper method for wrapping a [Workspace] method in a `custom_method` for +/// the [LSPServer] +macro_rules! workspace_method { + ( $builder:ident, $method:ident ) => { + $builder = $builder.custom_method( + concat!("pglsp/", stringify!($method)), + |server: &LSPServer, params| { + let span = tracing::trace_span!(concat!("pglsp/", stringify!($method)), params = ?params).or_current(); + tracing::info!("Received request: {}", stringify!($method)); + + let workspace = server.session.workspace.clone(); + let result = spawn_blocking(move || { + let _guard = span.entered(); + workspace.$method(params) + }); + + result.map(move |result| { + // The type of `result` is `Result, JoinError>`, + // where the inner result is the return value of `$method` while the + // outer one is added by `spawn_blocking` to catch panics or + // cancellations of the task + match result { + Ok(Ok(result)) => Ok(result), + Ok(Err(err)) => Err(into_lsp_error(err)), + Err(err) => match err.try_into_panic() { + Ok(err) => Err(panic_to_lsp_error(err)), + Err(err) => Err(into_lsp_error(err)), + }, + } + }) + }, + ); + }; +} - let path = file_path(&uri); - let range = params.range; +/// Factory data structure responsible for creating [ServerConnection] handles +/// for each incoming connection accepted by the server +#[derive(Default)] +pub struct ServerFactory { + /// Synchronization primitive used to broadcast a shutdown signal to all + /// active connections + cancellation: Arc, + /// Optional [Workspace] instance shared between all clients. Currently + /// this field is always [None] (meaning each connection will get its own + /// workspace) until we figure out how to handle concurrent access to the + /// same workspace from multiple client + workspace: Option>, + + /// The sessions of the connected clients indexed by session key. + sessions: Sessions, + + /// Session key generator. Stores the key of the next session. + next_session_key: AtomicU64, + + /// If this is true the server will broadcast a shutdown signal once the + /// last client disconnected + stop_on_disconnect: bool, + /// This shared flag is set to true once at least one sessions has been + /// initialized on this server instance + is_initialized: Arc, +} - let hints = self.session.get_inlay_hints(path, range).await; +impl ServerFactory { + pub fn new(stop_on_disconnect: bool) -> Self { + Self { + cancellation: Arc::default(), + workspace: None, + sessions: Sessions::default(), + next_session_key: AtomicU64::new(0), + stop_on_disconnect, + is_initialized: Arc::default(), + } + } - Ok(hints) + pub fn create(&self, config_path: Option) -> ServerConnection { + self.create_with_fs(config_path, DynRef::Owned(Box::::default())) } - #[tracing::instrument( - name= "textDocument/completion", - skip(self, params), - fields( - uri = %params.text_document_position.text_document.uri - ) - )] - async fn completion( + /// Create a new [ServerConnection] from this factory + pub fn create_with_fs( &self, - params: CompletionParams, - ) -> jsonrpc::Result> { - let mut uri = params.text_document_position.text_document.uri; - normalize_uri(&mut uri); - - let path = file_path(&uri); - let position = params.text_document_position.position; + config_path: Option, + fs: DynRef<'static, dyn FileSystem>, + ) -> ServerConnection { + let workspace = self + .workspace + .clone() + .unwrap_or_else(workspace::server_sync); + + let session_key = SessionKey(self.next_session_key.fetch_add(1, Ordering::Relaxed)); + + let mut builder = LspService::build(move |client| { + let mut session = Session::new( + session_key, + client, + workspace, + self.cancellation.clone(), + fs, + ); + if let Some(path) = config_path { + session.set_config_path(path); + } + let handle = Arc::new(session); - let completions = self.session.get_available_completions(path, position).await; + let mut sessions = self.sessions.lock().unwrap(); + sessions.insert(session_key, handle.clone()); - Ok(completions.map(CompletionResponse::List)) + LSPServer::new( + handle, + self.sessions.clone(), + self.stop_on_disconnect, + self.is_initialized.clone(), + ) + }); + + // "shutdown" is not part of the Workspace API + builder = builder.custom_method("pglsp/shutdown", |server: &LSPServer, (): ()| { + info!("Sending shutdown signal"); + server.session.broadcast_shutdown(); + ready(Ok(Some(()))) + }); + + workspace_method!(builder, open_file); + workspace_method!(builder, change_file); + workspace_method!(builder, close_file); + workspace_method!(builder, pull_diagnostics); + workspace_method!(builder, get_completions); + + let (service, socket) = builder.finish(); + ServerConnection { socket, service } } - #[tracing::instrument( - name= "textDocument/hover", - skip(self, params), - fields( - uri = %params.text_document_position_params.text_document.uri - ) - )] - async fn hover(&self, params: HoverParams) -> jsonrpc::Result> { - let mut uri = params.text_document_position_params.text_document.uri; - normalize_uri(&mut uri); - - let path = file_path(&uri); - let position = params.text_document_position_params.position; - - let hover_diagnostics = self - .session - .get_available_hover_diagnostics(path, position) - .await; - - Ok(hover_diagnostics) + /// Return a handle to the cancellation token for this server process + pub fn cancellation(&self) -> Arc { + self.cancellation.clone() } +} - #[tracing::instrument(name = "workspace/executeCommand", skip(self, params))] - async fn execute_command( - &self, - params: ExecuteCommandParams, - ) -> jsonrpc::Result> { - match CommandType::from_id(params.command.replace("pglsp.", "").as_str()) { - Some(CommandType::ExecuteStatement) => { - if params.arguments.is_empty() { - return jsonrpc::Result::Err(jsonrpc::Error::invalid_request()); - } +/// Handle type created by the server for each incoming connection +pub struct ServerConnection { + socket: ClientSocket, + service: LspService, +} - let params = params.arguments.into_iter().next().unwrap(); - let stmt = serde_json::from_value(params) - .map_err(|_| jsonrpc::Error::invalid_request())?; - - match self.session.run_stmt(stmt).await { - Ok(rows_affected) => { - self.client - .send_notification::(ShowMessageParams { - typ: MessageType::INFO, - message: format!("Success! Affected rows: {}", rows_affected), - }) - .await; - } - Err(why) => { - self.client - .send_notification::(ShowMessageParams { - typ: MessageType::ERROR, - message: format!("Error! Statement exectuion failed: {}", why), - }) - .await; - } - }; - } - None => { - self.client - .show_message( - MessageType::ERROR, - format!("Unknown command: {}", params.command), - ) - .await; - } - }; +impl ServerConnection { + /// Destructure a connection into its inner service instance and socket + pub fn into_inner(self) -> (LspService, ClientSocket) { + (self.service, self.socket) + } - Ok(None) + /// Accept an incoming connection and run the server async I/O loop to + /// completion + pub async fn accept(self, stdin: I, stdout: O) + where + I: AsyncRead + Unpin, + O: AsyncWrite, + { + Server::new(stdin, stdout, self.socket) + .serve(self.service) + .await; } } diff --git a/crates/pg_lsp/src/session.rs b/crates/pg_lsp/src/session.rs index 16d99459..9af4c83d 100644 --- a/crates/pg_lsp/src/session.rs +++ b/crates/pg_lsp/src/session.rs @@ -1,315 +1,528 @@ -use std::{collections::HashSet, sync::Arc}; - -use pg_base_db::{Change, DocumentChange}; -use pg_commands::{Command, ExecuteStatementCommand}; -use pg_completions::CompletionParams; -use pg_fs::PgLspPath; -use pg_hover::HoverParams; -use pg_workspace::diagnostics::Diagnostic; +use crate::diagnostics::LspError; +use crate::documents::Document; +use crate::utils; +use anyhow::Result; +use futures::stream::FuturesUnordered; +use futures::StreamExt; +use pg_analyse::RuleCategoriesBuilder; +use pg_configuration::ConfigurationPathHint; +use pg_diagnostics::{DiagnosticExt, Error}; +use pg_fs::{FileSystem, PgLspPath}; +use pg_lsp_converters::{negotiated_encoding, PositionEncoding, WideEncoding}; +use pg_workspace::configuration::{load_configuration, LoadedConfiguration}; +use pg_workspace::settings::PartialConfigurationExt; +use pg_workspace::workspace::{PullDiagnosticsParams, UpdateSettingsParams}; use pg_workspace::Workspace; -use text_size::TextSize; -use tokio::sync::RwLock; -use tower_lsp::lsp_types::{ - CodeActionOrCommand, CompletionItem, CompletionList, Hover, HoverContents, InlayHint, - InlayHintKind, InlayHintLabel, MarkedString, Position, Range, -}; - -use crate::{ - db_connection::DbConnection, - utils::{line_index_ext::LineIndexExt, to_lsp_types::to_completion_kind}, -}; - -pub struct Session { - db: RwLock>, - ide: Arc>, +use pg_workspace::{DynRef, WorkspaceError}; +use rustc_hash::FxHashMap; +use serde_json::Value; +use std::path::PathBuf; +use std::sync::atomic::Ordering; +use std::sync::atomic::{AtomicBool, AtomicU8}; +use std::sync::Arc; +use std::sync::RwLock; +use tokio::sync::Notify; +use tokio::sync::OnceCell; +use tower_lsp::lsp_types::Url; +use tower_lsp::lsp_types::{self, ClientCapabilities}; +use tower_lsp::lsp_types::{MessageType, Registration}; +use tower_lsp::lsp_types::{Unregistration, WorkspaceFolder}; +use tracing::{error, info}; + +pub(crate) struct ClientInformation { + /// The name of the client + pub(crate) name: String, + + /// The version of the client + pub(crate) version: Option, +} + +/// Key, uniquely identifying a LSP session. +#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] +pub(crate) struct SessionKey(pub u64); + +/// Represents the state of an LSP server session. +pub(crate) struct Session { + /// The unique key identifying this session. + pub(crate) key: SessionKey, + + /// The LSP client for this session. + pub(crate) client: tower_lsp::Client, + + /// The parameters provided by the client in the "initialize" request + initialize_params: OnceCell, + + pub(crate) workspace: Arc, + + configuration_status: AtomicU8, + + /// A flag to notify a message to the user when the configuration is broken, and the LSP attempts + /// to update the diagnostics + notified_broken_configuration: AtomicBool, + + /// File system to read files inside the workspace + pub(crate) fs: DynRef<'static, dyn FileSystem>, + + documents: RwLock>, + + pub(crate) cancellation: Arc, + + pub(crate) config_path: Option, +} + +/// The parameters provided by the client in the "initialize" request +struct InitializeParams { + /// The capabilities provided by the client as part of [`lsp_types::InitializeParams`] + client_capabilities: lsp_types::ClientCapabilities, + client_information: Option, + root_uri: Option, + #[allow(unused)] + workspace_folders: Option>, +} + +#[repr(u8)] +pub(crate) enum ConfigurationStatus { + /// The configuration file was properly loaded + Loaded = 0, + /// The configuration file does not exist + Missing = 1, + /// The configuration file exists but could not be loaded + Error = 2, + /// Currently loading the configuration + Loading = 3, +} + +impl ConfigurationStatus { + pub(crate) const fn is_error(&self) -> bool { + matches!(self, ConfigurationStatus::Error) + } + + pub(crate) const fn is_loaded(&self) -> bool { + matches!(self, ConfigurationStatus::Loaded) + } +} + +impl TryFrom for ConfigurationStatus { + type Error = (); + + fn try_from(value: u8) -> Result { + match value { + 0 => Ok(Self::Loaded), + 1 => Ok(Self::Missing), + 2 => Ok(Self::Error), + 3 => Ok(Self::Loading), + _ => Err(()), + } + } +} + +pub(crate) type SessionHandle = Arc; + +/// Holds the set of capabilities supported by the Language Server +/// instance and whether they are enabled or not +#[derive(Default)] +pub(crate) struct CapabilitySet { + registry: FxHashMap<&'static str, (&'static str, CapabilityStatus)>, +} + +/// Represents whether a capability is enabled or not, optionally holding the +/// configuration associated with the capability +pub(crate) enum CapabilityStatus { + Enable(Option), + Disable, +} + +impl CapabilitySet { + /// Insert a capability in the set + pub(crate) fn add_capability( + &mut self, + id: &'static str, + method: &'static str, + status: CapabilityStatus, + ) { + self.registry.insert(id, (method, status)); + } } impl Session { - pub fn new() -> Self { + pub(crate) fn new( + key: SessionKey, + client: tower_lsp::Client, + workspace: Arc, + cancellation: Arc, + fs: DynRef<'static, dyn FileSystem>, + ) -> Self { + let documents = Default::default(); Self { - db: RwLock::new(None), - ide: Arc::new(RwLock::new(Workspace::new())), + key, + client, + initialize_params: OnceCell::default(), + workspace, + configuration_status: AtomicU8::new(ConfigurationStatus::Missing as u8), + documents, + fs, + cancellation, + config_path: None, + notified_broken_configuration: AtomicBool::new(false), } } - #[tracing::instrument(name = "Shutting down Session", skip(self))] - pub async fn shutdown(&self) { - let mut db = self.db.write().await; - let db = db.take(); + pub(crate) fn set_config_path(&mut self, path: PathBuf) { + self.config_path = Some(path); + } - if db.is_some() { - db.unwrap().close().await; + /// Initialize this session instance with the incoming initialization parameters from the client + pub(crate) fn initialize( + &self, + client_capabilities: lsp_types::ClientCapabilities, + client_information: Option, + root_uri: Option, + workspace_folders: Option>, + ) { + let result = self.initialize_params.set(InitializeParams { + client_capabilities, + client_information, + root_uri, + workspace_folders, + }); + + if let Err(err) = result { + error!("Failed to initialize session: {err}"); } } - /// `update_db_connection` will update `Self`'s database connection. - /// If the passed-in connection string is the same that we're already connected to, it's a noop. - /// Otherwise, it'll first open a new connection, replace `Self`'s connection, and then close - /// the old one. - #[tracing::instrument(name = "Updating DB Connection", skip(self))] - pub async fn change_db(&self, connection_string: String) -> anyhow::Result<()> { - if self - .db - .read() - .await - .as_ref() - // if the connection is already connected to the same database, do nothing - .is_some_and(|c| c.connected_to(&connection_string)) - { - return Ok(()); + /// Register a set of capabilities with the client + pub(crate) async fn register_capabilities(&self, capabilities: CapabilitySet) { + let mut registrations = Vec::new(); + let mut unregistrations = Vec::new(); + + let mut register_methods = String::new(); + let mut unregister_methods = String::new(); + + for (id, (method, status)) in capabilities.registry { + unregistrations.push(Unregistration { + id: id.to_string(), + method: method.to_string(), + }); + + if !unregister_methods.is_empty() { + unregister_methods.push_str(", "); + } + + unregister_methods.push_str(method); + + if let CapabilityStatus::Enable(register_options) = status { + registrations.push(Registration { + id: id.to_string(), + method: method.to_string(), + register_options, + }); + + if !register_methods.is_empty() { + register_methods.push_str(", "); + } + + register_methods.push_str(method); + } } - tracing::info!("Setting up new Database connection"); - let new_db = DbConnection::new(connection_string, Arc::clone(&self.ide)).await?; - tracing::info!("Set up new connection, trying to acquire write lock…"); + if let Err(e) = self.client.unregister_capability(unregistrations).await { + error!( + "Error unregistering {unregister_methods:?} capabilities: {}", + e + ); + } else { + info!("Unregister capabilities {unregister_methods:?}"); + } - let mut current_db = self.db.write().await; - let old_db = current_db.replace(new_db); + if let Err(e) = self.client.register_capability(registrations).await { + error!("Error registering {register_methods:?} capabilities: {}", e); + } else { + info!("Register capabilities {register_methods:?}"); + } + } - if old_db.is_some() { - tracing::info!("Dropping previous Database Connection."); - let old_db = old_db.unwrap(); - old_db.close().await; + /// Computes diagnostics for the file matching the provided url and publishes + /// them to the client. Called from [`handlers::text_document`] when a file's + /// contents changes. + #[tracing::instrument(level = "trace", skip_all, fields(url = display(&url), diagnostic_count), err)] + pub(crate) async fn update_diagnostics(&self, url: lsp_types::Url) -> Result<(), LspError> { + let pglsp_path = self.file_path(&url)?; + let doc = self.document(&url)?; + if self.configuration_status().is_error() && !self.notified_broken_configuration() { + self.set_notified_broken_configuration(); + self.client + .show_message(MessageType::WARNING, "The configuration file has errors. PgLSP will report only parsing errors until the configuration is fixed.") + .await; } - tracing::info!("Successfully set up new connection."); + let categories = RuleCategoriesBuilder::default().all(); + + let diagnostics: Vec = { + let result = self.workspace.pull_diagnostics(PullDiagnosticsParams { + path: pglsp_path.clone(), + max_diagnostics: u64::MAX, + categories: categories.build(), + only: Vec::new(), + skip: Vec::new(), + })?; + + tracing::trace!("pglsp diagnostics: {:#?}", result.diagnostics); + + result + .diagnostics + .into_iter() + .filter_map(|d| { + match utils::diagnostic_to_lsp( + d, + &url, + &doc.line_index, + self.position_encoding(), + None, + ) { + Ok(diag) => Some(diag), + Err(err) => { + error!("failed to convert diagnostic to LSP: {err:?}"); + None + } + } + }) + .collect() + }; + + tracing::Span::current().record("diagnostic_count", diagnostics.len()); + + self.client + .publish_diagnostics(url, diagnostics, Some(doc.version)) + .await; + Ok(()) } - /// Runs the passed-in statement against the underlying database. - pub async fn run_stmt(&self, stmt: String) -> anyhow::Result { - let db = self.db.read().await; - let pool = db.as_ref().map(|d| d.get_pool()); - - let cmd = ExecuteStatementCommand::new(stmt); + /// Updates diagnostics for every [`Document`] in this [`Session`] + pub(crate) async fn update_all_diagnostics(&self) { + let mut futures: FuturesUnordered<_> = self + .documents + .read() + .unwrap() + .keys() + .map(|url| self.update_diagnostics(url.clone())) + .collect(); - match cmd.run(pool).await { - Err(e) => Err(e), - Ok(res) => Ok(res.rows_affected()), + while let Some(result) = futures.next().await { + if let Err(e) = result { + error!("Error while updating diagnostics: {}", e); + } } } - pub async fn on_file_closed(&self, path: PgLspPath) { - let ide = self.ide.read().await; - ide.remove_document(path); + /// Get a [`Document`] matching the provided [`lsp_types::Url`] + /// + /// If document does not exist, result is [WorkspaceError::NotFound] + pub(crate) fn document(&self, url: &lsp_types::Url) -> Result { + self.documents + .read() + .unwrap() + .get(url) + .cloned() + .ok_or_else(|| WorkspaceError::not_found().with_file_path(url.to_string())) } - pub async fn get_diagnostics(&self, path: PgLspPath) -> Vec<(Diagnostic, Range)> { - let ide = self.ide.read().await; - - // make sure there are documents at the provided path before - // trying to collect diagnostics. - let doc = ide.documents.get(&path); - if doc.is_none() { - tracing::info!("Doc not found, path: {:?}", &path); - return vec![]; - } + /// Set the [`Document`] for the provided [`lsp_types::Url`] + /// + /// Used by [`handlers::text_document] to synchronize documents with the client. + pub(crate) fn insert_document(&self, url: lsp_types::Url, document: Document) { + self.documents.write().unwrap().insert(url, document); + } - ide.diagnostics(&path) - .into_iter() - .map(|d| { - let range = doc - .as_ref() - .unwrap() - .line_index - .line_col_lsp_range(d.range) - .unwrap(); - (d, range) - }) - .collect() + /// Remove the [`Document`] matching the provided [`lsp_types::Url`] + pub(crate) fn remove_document(&self, url: &lsp_types::Url) { + self.documents.write().unwrap().remove(url); } - pub async fn apply_doc_changes( - &self, - path: PgLspPath, - version: i32, - text: String, - ) -> HashSet { - { - let ide = self.ide.read().await; - - ide.apply_change( - path, - DocumentChange::new(version, vec![Change { range: None, text }]), - ); - } + pub(crate) fn file_path(&self, url: &lsp_types::Url) -> Result { + let path_to_file = match url.to_file_path() { + Err(_) => { + // If we can't create a path, it's probably because the file doesn't exist. + // It can be a newly created file that it's not on disk + PathBuf::from(url.path()) + } + Ok(path) => path, + }; + + Ok(PgLspPath::new(path_to_file)) + } - self.recompute_and_get_changed_files().await + /// True if the client supports dynamic registration of "workspace/didChangeConfiguration" requests + pub(crate) fn can_register_did_change_configuration(&self) -> bool { + self.initialize_params + .get() + .and_then(|c| c.client_capabilities.workspace.as_ref()) + .and_then(|c| c.did_change_configuration) + .and_then(|c| c.dynamic_registration) + == Some(true) } - pub async fn recompute_and_get_changed_files(&self) -> HashSet { - let ide = self.ide.read().await; + /// Get the current workspace folders + pub(crate) fn get_workspace_folders(&self) -> Option<&Vec> { + self.initialize_params + .get() + .and_then(|c| c.workspace_folders.as_ref()) + } - let db = self.db.read().await; - let pool = db.as_ref().map(|d| d.get_pool()); + /// Returns the base path of the workspace on the filesystem if it has one + pub(crate) fn base_path(&self) -> Option { + let initialize_params = self.initialize_params.get()?; - let changed_files = ide.compute(pool); + let root_uri = initialize_params.root_uri.as_ref()?; + match root_uri.to_file_path() { + Ok(base_path) => Some(base_path), + Err(()) => { + error!( + "The Workspace root URI {root_uri:?} could not be parsed as a filesystem path" + ); + None + } + } + } - changed_files.into_iter().map(|p| p.document_url).collect() + /// Returns a reference to the client information for this session + pub(crate) fn client_information(&self) -> Option<&ClientInformation> { + self.initialize_params.get()?.client_information.as_ref() } - pub async fn get_available_code_actions_or_commands( - &self, - path: PgLspPath, - range: Range, - ) -> Option> { - let ide = self.ide.read().await; - let doc = ide.documents.get(&path)?; - - let range = doc.line_index.offset_lsp_range(range).unwrap(); - - // for now, we only provide `ExcecuteStatementCommand`s. - let actions = doc - .statements_at_range(&range) - .into_iter() - .map(|stmt| { - let cmd = ExecuteStatementCommand::command_type(); - let title = format!( - "Execute '{}'", - ExecuteStatementCommand::trim_statement(stmt.text.clone(), 50) - ); - CodeActionOrCommand::Command(tower_lsp::lsp_types::Command { - title, - command: format!("pglsp.{}", cmd.id()), - arguments: Some(vec![serde_json::to_value(stmt.text.clone()).unwrap()]), - }) - }) - .collect(); + /// Returns a reference to the client capabilities for this session + pub(crate) fn client_capabilities(&self) -> Option<&ClientCapabilities> { + self.initialize_params + .get() + .map(|params| ¶ms.client_capabilities) + } - Some(actions) + /// This function attempts to read the `pglsp.toml` configuration file from + /// the root URI and update the workspace settings accordingly + #[tracing::instrument(level = "trace", skip(self))] + pub(crate) async fn load_workspace_settings(&self) { + // Providing a custom configuration path will not allow to support workspaces + if let Some(config_path) = &self.config_path { + let base_path = ConfigurationPathHint::FromUser(config_path.clone()); + let status = self.load_pglsp_configuration_file(base_path).await; + self.set_configuration_status(status); + } else if let Some(folders) = self.get_workspace_folders() { + info!("Detected workspace folder."); + self.set_configuration_status(ConfigurationStatus::Loading); + for folder in folders { + info!("Attempt to load the configuration file in {:?}", folder.uri); + let base_path = folder.uri.to_file_path(); + match base_path { + Ok(base_path) => { + let status = self + .load_pglsp_configuration_file(ConfigurationPathHint::FromWorkspace( + base_path, + )) + .await; + self.set_configuration_status(status); + } + Err(_) => { + error!( + "The Workspace root URI {:?} could not be parsed as a filesystem path", + folder.uri + ); + } + } + } + } else { + let base_path = match self.base_path() { + None => ConfigurationPathHint::default(), + Some(path) => ConfigurationPathHint::FromLsp(path), + }; + let status = self.load_pglsp_configuration_file(base_path).await; + self.set_configuration_status(status); + } } - pub async fn get_inlay_hints(&self, path: PgLspPath, range: Range) -> Option> { - let ide = self.ide.read().await; - let doc = ide.documents.get(&path)?; - - let range = doc.line_index.offset_lsp_range(range)?; - - let schema_cache = ide.schema_cache.read().expect("Unable to get Schema Cache"); - - let hints = doc - .statements_at_range(&range) - .into_iter() - .flat_map(|stmt| { - ::pg_inlay_hints::inlay_hints(::pg_inlay_hints::InlayHintsParams { - ast: ide.pg_query.ast(&stmt).as_ref().map(|x| x.as_ref()), - enriched_ast: ide - .pg_query - .enriched_ast(&stmt) - .as_ref() - .map(|x| x.as_ref()), - tree: ide.tree_sitter.tree(&stmt).as_ref().map(|x| x.as_ref()), - cst: ide.pg_query.cst(&stmt).as_ref().map(|x| x.as_ref()), - schema_cache: &schema_cache, - }) - }) - .map(|hint| InlayHint { - position: doc.line_index.line_col_lsp(hint.offset).unwrap(), - label: match hint.content { - pg_inlay_hints::InlayHintContent::FunctionArg(arg) => { - InlayHintLabel::String(match arg.name { - Some(name) => format!("{} ({})", name, arg.type_name), - None => arg.type_name.clone(), - }) + async fn load_pglsp_configuration_file( + &self, + base_path: ConfigurationPathHint, + ) -> ConfigurationStatus { + match load_configuration(&self.fs, base_path.clone()) { + Ok(loaded_configuration) => { + let LoadedConfiguration { + configuration: fs_configuration, + directory_path: configuration_path, + .. + } = loaded_configuration; + info!("Configuration loaded successfully from disk."); + info!("Update workspace settings."); + + let result = fs_configuration + .retrieve_gitignore_matches(&self.fs, configuration_path.as_deref()); + + match result { + Ok((vcs_base_path, gitignore_matches)) => { + let result = self.workspace.update_settings(UpdateSettingsParams { + workspace_directory: self.fs.working_directory(), + configuration: fs_configuration, + vcs_base_path, + gitignore_matches, + }); + + if let Err(error) = result { + error!("Failed to set workspace settings: {}", error); + self.client.log_message(MessageType::ERROR, &error).await; + ConfigurationStatus::Error + } else { + ConfigurationStatus::Loaded + } } - }, - kind: match hint.content { - pg_inlay_hints::InlayHintContent::FunctionArg(_) => { - Some(InlayHintKind::PARAMETER) + Err(err) => { + error!("Couldn't load the configuration file, reason:\n {}", err); + self.client.log_message(MessageType::ERROR, &err).await; + ConfigurationStatus::Error } - }, - text_edits: None, - tooltip: None, - padding_left: None, - padding_right: None, - data: None, - }) - .collect(); + } + } + Err(err) => { + error!("Couldn't load the configuration file, reason:\n {}", err); + self.client.log_message(MessageType::ERROR, &err).await; + ConfigurationStatus::Error + } + } + } - Some(hints) + /// Broadcast a shutdown signal to all active connections + pub(crate) fn broadcast_shutdown(&self) { + self.cancellation.notify_one(); } - pub async fn get_available_completions( - &self, - path: PgLspPath, - position: Position, - ) -> Option { - let ide = self.ide.read().await; - - let doc = ide.documents.get(&path)?; - let offset = doc.line_index.offset_lsp(position)?; - let (range, stmt) = doc.statement_at_offset_with_range(&offset)?; - - let schema_cache = ide.schema_cache.read().expect("No Schema Cache"); - - let completion_items: Vec = pg_completions::complete(CompletionParams { - position: offset - range.start() - TextSize::from(1), - text: stmt.text.clone(), - tree: ide.tree_sitter.tree(&stmt).as_ref().map(|t| t.as_ref()), - schema: &schema_cache, - }) - .into_iter() - .map(|item| CompletionItem { - label: item.label, - label_details: Some(tower_lsp::lsp_types::CompletionItemLabelDetails { - description: Some(item.description), - detail: None, - }), - kind: Some(to_completion_kind(item.kind)), - detail: None, - documentation: None, - deprecated: None, - preselect: None, - sort_text: None, - filter_text: None, - insert_text: None, - insert_text_format: None, - insert_text_mode: None, - text_edit: None, - additional_text_edits: None, - commit_characters: None, - data: None, - tags: None, - command: None, - }) - .collect(); - - Some(CompletionList { - is_incomplete: false, - items: completion_items, - }) + /// Retrieves information regarding the configuration status + pub(crate) fn configuration_status(&self) -> ConfigurationStatus { + self.configuration_status + .load(Ordering::Relaxed) + .try_into() + .unwrap() } - pub async fn get_available_hover_diagnostics( - &self, - path: PgLspPath, - position: Position, - ) -> Option { - let ide = self.ide.read().await; - let doc = ide.documents.get(&path)?; - - let offset = doc.line_index.offset_lsp(position)?; - - let (range, stmt) = doc.statement_at_offset_with_range(&offset)?; - let range_start = range.start(); - let hover_range = doc.line_index.line_col_lsp_range(range); - - let schema_cache = ide.schema_cache.read().expect("No Schema Cache"); - - ::pg_hover::hover(HoverParams { - position: offset - range_start, - source: stmt.text.as_str(), - enriched_ast: ide - .pg_query - .enriched_ast(&stmt) - .as_ref() - .map(|x| x.as_ref()), - tree: ide.tree_sitter.tree(&stmt).as_ref().map(|x| x.as_ref()), - schema_cache: schema_cache.clone(), - }) - .map(|hover| Hover { - contents: HoverContents::Scalar(MarkedString::String(hover.content)), - range: hover_range, - }) + /// Updates the status of the configuration + fn set_configuration_status(&self, status: ConfigurationStatus) { + self.notified_broken_configuration + .store(false, Ordering::Relaxed); + self.configuration_status + .store(status as u8, Ordering::Relaxed); + } + + fn notified_broken_configuration(&self) -> bool { + self.notified_broken_configuration.load(Ordering::Relaxed) + } + fn set_notified_broken_configuration(&self) { + self.notified_broken_configuration + .store(true, Ordering::Relaxed); + } + + pub fn position_encoding(&self) -> PositionEncoding { + self.initialize_params + .get() + .map_or(PositionEncoding::Wide(WideEncoding::Utf16), |params| { + negotiated_encoding(¶ms.client_capabilities) + }) } } diff --git a/crates/pg_lsp/src/utils.rs b/crates/pg_lsp/src/utils.rs index 50e9edd8..33eef1f7 100644 --- a/crates/pg_lsp/src/utils.rs +++ b/crates/pg_lsp/src/utils.rs @@ -1,77 +1,440 @@ -pub mod line_index_ext; -pub mod to_lsp_types; -pub mod to_proto; +use anyhow::{ensure, Context, Result}; +use pg_console::fmt::Termcolor; +use pg_console::fmt::{self, Formatter}; +use pg_console::MarkupBuf; +use pg_diagnostics::termcolor::NoColor; +use pg_diagnostics::{Diagnostic, DiagnosticTags, Location, PrintDescription, Severity, Visit}; +use pg_lsp_converters::line_index::LineIndex; +use pg_lsp_converters::{from_proto, to_proto, PositionEncoding}; +use pg_text_edit::{CompressedOp, DiffOp, TextEdit}; +use std::any::Any; +use std::borrow::Cow; +use std::fmt::{Debug, Display}; +use std::io; +use std::ops::{Add, Range}; +use text_size::{TextRange, TextSize}; +use tower_lsp::jsonrpc::Error as LspError; +use tower_lsp::lsp_types; +use tower_lsp::lsp_types::{self as lsp, CodeDescription, Url}; +use tracing::error; -use std::path::PathBuf; +pub(crate) fn text_edit( + line_index: &LineIndex, + diff: TextEdit, + position_encoding: PositionEncoding, + offset: Option, +) -> Result> { + let mut result: Vec = Vec::new(); + let mut offset = if let Some(offset) = offset { + TextSize::from(offset) + } else { + TextSize::from(0) + }; -use pg_fs::PgLspPath; -use tower_lsp::lsp_types; + for op in diff.iter() { + match op { + CompressedOp::DiffOp(DiffOp::Equal { range }) => { + offset += range.len(); + } + CompressedOp::DiffOp(DiffOp::Insert { range }) => { + let start = to_proto::position(line_index, offset, position_encoding)?; + + // Merge with a previous delete operation if possible + let last_edit = result.last_mut().filter(|text_edit| { + text_edit.range.end == start && text_edit.new_text.is_empty() + }); + + if let Some(last_edit) = last_edit { + last_edit.new_text = diff.get_text(*range).to_string(); + } else { + result.push(lsp::TextEdit { + range: lsp::Range::new(start, start), + new_text: diff.get_text(*range).to_string(), + }); + } + } + CompressedOp::DiffOp(DiffOp::Delete { range }) => { + let start = to_proto::position(line_index, offset, position_encoding)?; + offset += range.len(); + let end = to_proto::position(line_index, offset, position_encoding)?; + + result.push(lsp::TextEdit { + range: lsp::Range::new(start, end), + new_text: String::new(), + }); + } -/// Convert a `lsp_types::Url` to a `PgLspPath`. -pub(crate) fn file_path(url: &lsp_types::Url) -> PgLspPath { - let path_to_file = match url.to_file_path() { - Err(_) => { - // If we can't create a path, it's probably because the file doesn't exist. - // It can be a newly created file that it's not on disk - PathBuf::from(url.path()) + CompressedOp::EqualLines { line_count } => { + let mut line_col = line_index + .line_col(offset) + .expect("diff length is overflowing the line count in the original file"); + + line_col.line += line_count.get() + 1; + line_col.col = 0; + + // SAFETY: This should only happen if `line_index` wasn't built + // from the same string as the old revision of `diff` + let new_offset = line_index + .offset(line_col) + .expect("diff length is overflowing the line count in the original file"); + + offset = new_offset; + } } - Ok(path) => path, - }; + } - PgLspPath::new(path_to_file) + Ok(result) } -pub fn normalize_uri(uri: &mut lsp_types::Url) { - if let Some(mut segments) = uri.path_segments() { - if let Some(mut path) = segments.next().and_then(fix_drive_letter) { - for segment in segments { - path.push('/'); - path.push_str(segment); - } +/// Convert an [pg_diagnostics::Diagnostic] to a [lsp::Diagnostic], using the span +/// of the diagnostic's primary label as the diagnostic range. +/// Requires a [LineIndex] to convert a byte offset range to the line/col range +/// expected by LSP. +pub(crate) fn diagnostic_to_lsp( + diagnostic: D, + url: &lsp::Url, + line_index: &LineIndex, + position_encoding: PositionEncoding, + offset: Option, +) -> Result { + let location = diagnostic.location(); + + let span = location.span.context("diagnostic location has no span")?; + let span = if let Some(offset) = offset { + TextRange::new( + span.start().add(TextSize::from(offset)), + span.end().add(TextSize::from(offset)), + ) + } else { + span + }; + let span = to_proto::range(line_index, span, position_encoding) + .context("failed to convert diagnostic span to LSP range")?; - uri.set_path(&path); + let severity = match diagnostic.severity() { + Severity::Fatal | Severity::Error => lsp::DiagnosticSeverity::ERROR, + Severity::Warning => lsp::DiagnosticSeverity::WARNING, + Severity::Information => lsp::DiagnosticSeverity::INFORMATION, + Severity::Hint => lsp::DiagnosticSeverity::HINT, + }; + + let code = diagnostic + .category() + .map(|category| lsp::NumberOrString::String(category.name().to_string())); + + let code_description = diagnostic + .category() + .and_then(|category| category.link()) + .and_then(|link| { + let href = Url::parse(link).ok()?; + Some(CodeDescription { href }) + }); + + let message = PrintDescription(&diagnostic).to_string(); + ensure!(!message.is_empty(), "diagnostic description is empty"); + + let mut related_information = None; + let mut visitor = RelatedInformationVisitor { + url, + line_index, + position_encoding, + related_information: &mut related_information, + }; + + diagnostic.advices(&mut visitor).unwrap(); + + let tags = diagnostic.tags(); + let tags = { + let mut result = Vec::new(); + + if tags.contains(DiagnosticTags::UNNECESSARY_CODE) { + result.push(lsp::DiagnosticTag::UNNECESSARY); } + + if tags.contains(DiagnosticTags::DEPRECATED_CODE) { + result.push(lsp::DiagnosticTag::DEPRECATED); + } + + if !result.is_empty() { + Some(result) + } else { + None + } + }; + + let mut diagnostic = lsp::Diagnostic::new( + span, + Some(severity), + code, + Some("pg".into()), + message, + related_information, + tags, + ); + diagnostic.code_description = code_description; + Ok(diagnostic) +} + +struct RelatedInformationVisitor<'a> { + url: &'a lsp::Url, + line_index: &'a LineIndex, + position_encoding: PositionEncoding, + related_information: &'a mut Option>, +} + +impl Visit for RelatedInformationVisitor<'_> { + fn record_frame(&mut self, location: Location<'_>) -> io::Result<()> { + let span = match location.span { + Some(span) => span, + None => return Ok(()), + }; + + let range = match to_proto::range(self.line_index, span, self.position_encoding) { + Ok(range) => range, + Err(_) => return Ok(()), + }; + + let related_information = self.related_information.get_or_insert_with(Vec::new); + + related_information.push(lsp::DiagnosticRelatedInformation { + location: lsp::Location { + uri: self.url.clone(), + range, + }, + message: String::new(), + }); + + Ok(()) } +} + +/// Convert a piece of markup into a String +fn print_markup(markup: &MarkupBuf) -> String { + let mut message = Termcolor(NoColor::new(Vec::new())); + fmt::Display::fmt(markup, &mut Formatter::new(&mut message)) + // SAFETY: Writing to a memory buffer should never fail + .unwrap(); + + // SAFETY: Printing uncolored markup never generates non UTF-8 byte sequences + String::from_utf8(message.0.into_inner()).unwrap() +} - uri.set_fragment(None); +/// Helper to create a [tower_lsp::jsonrpc::Error] from a message +pub(crate) fn into_lsp_error(msg: impl Display + Debug) -> LspError { + let mut error = LspError::internal_error(); + error!("Error: {}", msg); + error.message = Cow::Owned(msg.to_string()); + error.data = Some(format!("{msg:?}").into()); + error } -fn fix_drive_letter(text: &str) -> Option { - if !text.is_ascii() { - return None; +pub(crate) fn panic_to_lsp_error(err: Box) -> LspError { + let mut error = LspError::internal_error(); + + match err.downcast::() { + Ok(msg) => { + error.message = Cow::Owned(msg.to_string()); + } + Err(err) => match err.downcast::<&str>() { + Ok(msg) => { + error.message = Cow::Owned(msg.to_string()); + } + Err(_) => { + error.message = Cow::Owned(String::from("Encountered an unknown error")); + } + }, } - match &text[1..] { - ":" => Some(text.to_ascii_uppercase()), - "%3A" | "%3a" => Some(format!("{}:", text[0..1].to_ascii_uppercase())), - _ => None, + error +} + +pub(crate) fn apply_document_changes( + position_encoding: PositionEncoding, + current_content: String, + content_changes: &[lsp_types::TextDocumentContentChangeEvent], +) -> String { + // Skip to the last full document change, as it invalidates all previous changes anyways. + let mut start = content_changes + .iter() + .rev() + .position(|change| change.range.is_none()) + .map_or(0, |idx| content_changes.len() - idx - 1); + + let mut text: String = match content_changes.get(start) { + // peek at the first content change as an optimization + Some(lsp_types::TextDocumentContentChangeEvent { + range: None, text, .. + }) => { + let text = text.clone(); + start += 1; + + // The only change is a full document update + if start == content_changes.len() { + return text; + } + text + } + Some(_) => current_content, + // we received no content changes + None => return current_content, + }; + + let mut line_index = LineIndex::new(&text); + + // The changes we got must be applied sequentially, but can cross lines so we + // have to keep our line index updated. + // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we + // remember the last valid line in the index and only rebuild it if needed. + let mut index_valid = u32::MAX; + for change in content_changes { + // The None case can't happen as we have handled it above already + if let Some(range) = change.range { + if index_valid <= range.end.line { + line_index = LineIndex::new(&text); + } + index_valid = range.start.line; + if let Ok(range) = from_proto::text_range(&line_index, range, position_encoding) { + text.replace_range(Range::::from(range), &change.text); + } + } } + + text } #[cfg(test)] mod tests { - use lsp_types::Url; - use super::normalize_uri; + use pg_lsp_converters::line_index::LineIndex; + use pg_lsp_converters::PositionEncoding; + use pg_text_edit::TextEdit; + use tower_lsp::lsp_types as lsp; #[test] - fn test_lowercase_drive_letter() { - let mut uri = Url::parse("file://c:/foo/bar.txt").unwrap(); - normalize_uri(&mut uri); - assert_eq!(uri.as_str(), "file:///C:/foo/bar.txt"); - } + fn test_diff_1() { + const OLD: &str = "line 1 old +line 2 +line 3 +line 4 +line 5 +line 6 +line 7 old"; - #[test] - fn test_uppercase_drive_letter() { - let mut uri = Url::parse("file://C:/foo/bar.txt").unwrap(); - normalize_uri(&mut uri); - assert_eq!(uri.as_str(), "file:///C:/foo/bar.txt"); + const NEW: &str = "line 1 new +line 2 +line 3 +line 4 +line 5 +line 6 +line 7 new"; + + let line_index = LineIndex::new(OLD); + let diff = TextEdit::from_unicode_words(OLD, NEW); + + let text_edit = super::text_edit(&line_index, diff, PositionEncoding::Utf8, None).unwrap(); + + assert_eq!( + text_edit.as_slice(), + &[ + lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 7, + }, + end: lsp::Position { + line: 0, + character: 10, + }, + }, + new_text: String::from("new"), + }, + lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 6, + character: 7 + }, + end: lsp::Position { + line: 6, + character: 10 + } + }, + new_text: String::from("new"), + }, + ] + ); } #[test] - fn test_fragment() { - let mut uri = Url::parse("foo:///bar/baz.txt#qux").unwrap(); - normalize_uri(&mut uri); - assert_eq!(uri.as_str(), "foo:///bar/baz.txt"); + fn test_diff_2() { + const OLD: &str = "console.log(\"Variable: \" + variable);"; + const NEW: &str = "console.log(`Variable: ${variable}`);"; + + let line_index = LineIndex::new(OLD); + let diff = TextEdit::from_unicode_words(OLD, NEW); + + let text_edit = super::text_edit(&line_index, diff, PositionEncoding::Utf8, None).unwrap(); + + assert_eq!( + text_edit.as_slice(), + &[ + lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 12, + }, + end: lsp::Position { + line: 0, + character: 13, + }, + }, + new_text: String::from("`"), + }, + lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 23 + }, + end: lsp::Position { + line: 0, + character: 27 + } + }, + new_text: String::from("${"), + }, + lsp::TextEdit { + range: lsp::Range { + start: lsp::Position { + line: 0, + character: 35 + }, + end: lsp::Position { + line: 0, + character: 35 + } + }, + new_text: String::from("}`"), + }, + ] + ); } + + // #[test] + // fn test_range_formatting() { + // let encoding = PositionEncoding::Wide(WideEncoding::Utf16); + // let input = "(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\n(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\nisSpreadAssignment;\n".to_string(); + // let change = TextDocumentContentChangeEvent { + // range: Some(Range::new(Position::new(0, 30), Position::new(1, 0))), + // range_length: Some(1), + // text: String::new(), + // }; + // + // let output = apply_document_changes(encoding, input, vec![change]); + // let expected = "(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\nisSpreadAssignment;\n"; + // + // assert_eq!(output, expected); + // } } diff --git a/crates/pg_lsp/src/utils/line_index_ext.rs b/crates/pg_lsp/src/utils/line_index_ext.rs deleted file mode 100644 index de9aed9a..00000000 --- a/crates/pg_lsp/src/utils/line_index_ext.rs +++ /dev/null @@ -1,78 +0,0 @@ -use line_index::{LineCol, LineColUtf16, LineIndex}; -use text_size::{TextRange, TextSize}; -use tower_lsp::lsp_types::{Position, Range}; - -pub trait LineIndexExt { - fn offset_lsp(&self, line_col: Position) -> Option; - - fn offset_lsp_range(&self, line_col: Range) -> Option; - - fn line_col_lsp(&self, offset: TextSize) -> Option; - - fn line_col_lsp_range(&self, offset: TextRange) -> Option; -} - -impl LineIndexExt for LineIndex { - fn offset_lsp(&self, line_col: Position) -> Option { - let line_col = LineColUtf16 { - line: line_col.line, - col: line_col.character, - }; - - let line_col = self.to_utf8(line_col)?; - self.offset(line_col) - } - - fn offset_lsp_range(&self, line_col: Range) -> Option { - let start = self.offset_lsp(line_col.start)?; - let end = self.offset_lsp(line_col.end)?; - Some(TextRange::new(start, end)) - } - - fn line_col_lsp(&self, offset: TextSize) -> Option { - let line_col = self.line_col(offset); - let line_col = self.to_utf16(line_col)?; - Some(Position::new(line_col.line, line_col.col)) - } - - fn line_col_lsp_range(&self, offset: TextRange) -> Option { - let start = self.line_col_lsp(offset.start())?; - let mut end = self.line_col_lsp(offset.end())?; - if end.line != start.line && end.character == 0 { - // Prefer keeping multi-line ranges on the same line - let line_end = self.offset(LineCol { - line: end.line, - col: 0, - })?; - - end = self.line_col_lsp(line_end - TextSize::from(1))?; - } - - Some(Range::new(start, end)) - } -} - -#[cfg(test)] -mod tests { - use pg_base_db::Document; - use pg_fs::PgLspPath; - use text_size::{TextRange, TextSize}; - - use crate::utils::line_index_ext::LineIndexExt; - - #[test] - fn test_line_col_lsp_range() { - let url = PgLspPath::new("test.sql"); - - let d = Document::new( - url, - Some("select 1 from contact;\nselect 1;\nalter table test drop column id;".to_string()), - ); - - println!( - "{:#?}", - d.line_index - .line_col_lsp_range(TextRange::new(TextSize::new(52), TextSize::new(66))) - ); - } -} diff --git a/crates/pg_lsp/src/utils/to_lsp_types.rs b/crates/pg_lsp/src/utils/to_lsp_types.rs deleted file mode 100644 index 24dcc443..00000000 --- a/crates/pg_lsp/src/utils/to_lsp_types.rs +++ /dev/null @@ -1,11 +0,0 @@ -use tower_lsp::lsp_types; - -pub fn to_completion_kind( - kind: pg_completions::CompletionItemKind, -) -> lsp_types::CompletionItemKind { - match kind { - pg_completions::CompletionItemKind::Table => lsp_types::CompletionItemKind::CLASS, - pg_completions::CompletionItemKind::Function => lsp_types::CompletionItemKind::FUNCTION, - pg_completions::CompletionItemKind::Column => lsp_types::CompletionItemKind::FIELD, - } -} diff --git a/crates/pg_lsp/src/utils/to_proto.rs b/crates/pg_lsp/src/utils/to_proto.rs deleted file mode 100644 index 94a9552d..00000000 --- a/crates/pg_lsp/src/utils/to_proto.rs +++ /dev/null @@ -1,18 +0,0 @@ -use pg_workspace::diagnostics::{Diagnostic, Severity}; -use tower_lsp::lsp_types; - -pub fn diagnostic(diagnostic: Diagnostic, range: lsp_types::Range) -> lsp_types::Diagnostic { - let severity = match diagnostic.severity { - Severity::Error => lsp_types::DiagnosticSeverity::ERROR, - Severity::Warning => lsp_types::DiagnosticSeverity::WARNING, - Severity::Information => lsp_types::DiagnosticSeverity::INFORMATION, - Severity::Hint => lsp_types::DiagnosticSeverity::HINT, - Severity::Fatal => lsp_types::DiagnosticSeverity::ERROR, - }; - - lsp_types::Diagnostic { - severity: Some(severity), - source: Some(diagnostic.source), - ..lsp_types::Diagnostic::new_simple(range, diagnostic.message) - } -} diff --git a/crates/pg_lsp_new/Cargo.toml b/crates/pg_lsp_new/Cargo.toml deleted file mode 100644 index 8e20b521..00000000 --- a/crates/pg_lsp_new/Cargo.toml +++ /dev/null @@ -1,40 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pg_lsp_new" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -anyhow = { workspace = true } -biome_deserialize = { workspace = true } -futures = "0.3.31" -pg_analyse = { workspace = true } -pg_completions = { workspace = true } -pg_configuration = { workspace = true } -pg_console = { workspace = true } -pg_diagnostics = { workspace = true } -pg_fs = { workspace = true } -pg_lsp_converters = { workspace = true } -pg_text_edit = { workspace = true } -pg_workspace_new = { workspace = true } -rustc-hash = { workspace = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true } -text-size.workspace = true -tokio = { workspace = true, features = ["rt", "io-std"] } -tower-lsp = { version = "0.20.0" } -tracing = { workspace = true, features = ["attributes"] } - -[dev-dependencies] - -[lib] -doctest = false - -[features] diff --git a/crates/pg_lsp_new/src/lib.rs b/crates/pg_lsp_new/src/lib.rs deleted file mode 100644 index 99db526f..00000000 --- a/crates/pg_lsp_new/src/lib.rs +++ /dev/null @@ -1,9 +0,0 @@ -mod capabilities; -mod diagnostics; -mod documents; -mod handlers; -mod server; -mod session; -mod utils; - -pub use crate::server::{LSPServer, ServerConnection, ServerFactory}; diff --git a/crates/pg_lsp_new/src/server.rs b/crates/pg_lsp_new/src/server.rs deleted file mode 100644 index 327ca46a..00000000 --- a/crates/pg_lsp_new/src/server.rs +++ /dev/null @@ -1,430 +0,0 @@ -use crate::capabilities::server_capabilities; -use crate::diagnostics::{handle_lsp_error, LspError}; -use crate::handlers; -use crate::session::{ - CapabilitySet, CapabilityStatus, ClientInformation, Session, SessionHandle, SessionKey, -}; -use crate::utils::{into_lsp_error, panic_to_lsp_error}; -use futures::future::ready; -use futures::FutureExt; -use pg_diagnostics::panic::PanicError; -use pg_fs::{ConfigName, FileSystem, OsFileSystem}; -use pg_workspace_new::{workspace, DynRef, Workspace}; -use rustc_hash::FxHashMap; -use serde_json::json; -use std::panic::RefUnwindSafe; -use std::path::PathBuf; -use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; -use std::sync::{Arc, Mutex}; -use tokio::io::{AsyncRead, AsyncWrite}; -use tokio::sync::Notify; -use tokio::task::spawn_blocking; -use tower_lsp::jsonrpc::Result as LspResult; -use tower_lsp::{lsp_types::*, ClientSocket}; -use tower_lsp::{LanguageServer, LspService, Server}; -use tracing::{error, info}; - -pub struct LSPServer { - session: SessionHandle, - /// Map of all sessions connected to the same [ServerFactory] as this [LSPServer]. - sessions: Sessions, - /// If this is true the server will broadcast a shutdown signal once the - /// last client disconnected - stop_on_disconnect: bool, - /// This shared flag is set to true once at least one session has been - /// initialized on this server instance - is_initialized: Arc, -} - -impl RefUnwindSafe for LSPServer {} - -impl LSPServer { - fn new( - session: SessionHandle, - sessions: Sessions, - stop_on_disconnect: bool, - is_initialized: Arc, - ) -> Self { - Self { - session, - sessions, - stop_on_disconnect, - is_initialized, - } - } - - async fn setup_capabilities(&self) { - let mut capabilities = CapabilitySet::default(); - - capabilities.add_capability( - "pglsp_did_change_extension_settings", - "workspace/didChangeConfiguration", - if self.session.can_register_did_change_configuration() { - CapabilityStatus::Enable(None) - } else { - CapabilityStatus::Disable - }, - ); - - capabilities.add_capability( - "pglsp_did_change_workspace_settings", - "workspace/didChangeWatchedFiles", - if let Some(base_path) = self.session.base_path() { - CapabilityStatus::Enable(Some(json!(DidChangeWatchedFilesRegistrationOptions { - watchers: vec![FileSystemWatcher { - glob_pattern: GlobPattern::String(format!( - "{}/pglsp.toml", - base_path.display() - )), - kind: Some(WatchKind::all()), - },], - }))) - } else { - CapabilityStatus::Disable - }, - ); - - self.session.register_capabilities(capabilities).await; - } - - async fn map_op_error( - &self, - result: Result, LspError>, PanicError>, - ) -> LspResult> { - match result { - Ok(result) => match result { - Ok(result) => Ok(result), - Err(err) => handle_lsp_error(err, &self.session.client).await, - }, - - Err(err) => Err(into_lsp_error(err)), - } - } -} - -#[tower_lsp::async_trait] -impl LanguageServer for LSPServer { - #[allow(deprecated)] - #[tracing::instrument( - level = "info", - skip_all, - fields( - root_uri = params.root_uri.as_ref().map(display), - capabilities = debug(¶ms.capabilities), - client_info = params.client_info.as_ref().map(debug), - workspace_folders = params.workspace_folders.as_ref().map(debug), - ) - )] - async fn initialize(&self, params: InitializeParams) -> LspResult { - info!("Starting Language Server..."); - self.is_initialized.store(true, Ordering::Relaxed); - - let server_capabilities = server_capabilities(¶ms.capabilities); - - self.session.initialize( - params.capabilities, - params.client_info.map(|client_info| ClientInformation { - name: client_info.name, - version: client_info.version, - }), - params.root_uri, - params.workspace_folders, - ); - - // - let init = InitializeResult { - capabilities: server_capabilities, - server_info: Some(ServerInfo { - name: String::from(env!("CARGO_PKG_NAME")), - version: Some(pg_configuration::VERSION.to_string()), - }), - }; - - Ok(init) - } - - #[tracing::instrument(level = "info", skip_all)] - async fn initialized(&self, params: InitializedParams) { - let _ = params; - - info!("Attempting to load the configuration from 'pglsp.toml' file"); - - futures::join!(self.session.load_workspace_settings()); - - let msg = format!("Server initialized with PID: {}", std::process::id()); - self.session - .client - .log_message(MessageType::INFO, msg) - .await; - - self.setup_capabilities().await; - - // Diagnostics are disabled by default, so update them after fetching workspace config - self.session.update_all_diagnostics().await; - } - - #[tracing::instrument(level = "info", skip_all)] - async fn shutdown(&self) -> LspResult<()> { - Ok(()) - } - - #[tracing::instrument(level = "info", skip_all)] - async fn did_change_configuration(&self, params: DidChangeConfigurationParams) { - let _ = params; - self.session.load_workspace_settings().await; - self.setup_capabilities().await; - self.session.update_all_diagnostics().await; - } - - #[tracing::instrument(level = "trace", skip(self))] - async fn did_change_watched_files(&self, params: DidChangeWatchedFilesParams) { - let file_paths = params - .changes - .iter() - .map(|change| change.uri.to_file_path()); - for file_path in file_paths { - match file_path { - Ok(file_path) => { - let base_path = self.session.base_path(); - if let Some(base_path) = base_path { - let possible_config_toml = file_path.strip_prefix(&base_path); - if let Ok(watched_file) = possible_config_toml { - if ConfigName::file_names() - .contains(&&*watched_file.display().to_string()) - { - self.session.load_workspace_settings().await; - self.setup_capabilities().await; - // self.session.update_all_diagnostics().await; - // for now we are only interested to the configuration file, - // so it's OK to exist the loop - break; - } - } - } - } - Err(_) => { - error!("The Workspace root URI {file_path:?} could not be parsed as a filesystem path"); - continue; - } - } - } - } - - #[tracing::instrument(level = "trace", skip(self))] - async fn did_open(&self, params: DidOpenTextDocumentParams) { - handlers::text_document::did_open(&self.session, params) - .await - .ok(); - } - - #[tracing::instrument(level = "trace", skip(self, params))] - async fn did_change(&self, params: DidChangeTextDocumentParams) { - if let Err(e) = handlers::text_document::did_change(&self.session, params).await { - error!("{}", e); - }; - } - - #[tracing::instrument(level = "trace", skip(self))] - async fn did_save(&self, params: DidSaveTextDocumentParams) { - // handlers::text_document::did_save(&self.session, params) - // .await - // .ok(); - } - - #[tracing::instrument(level = "trace", skip(self))] - async fn did_close(&self, params: DidCloseTextDocumentParams) { - handlers::text_document::did_close(&self.session, params) - .await - .ok(); - } - - #[tracing::instrument(level = "trace", skip(self))] - async fn completion(&self, params: CompletionParams) -> LspResult> { - match handlers::completions::get_completions(&self.session, params) { - Ok(result) => LspResult::Ok(Some(result)), - Err(e) => LspResult::Err(into_lsp_error(e)), - } - } -} - -impl Drop for LSPServer { - fn drop(&mut self) { - if let Ok(mut sessions) = self.sessions.lock() { - let _removed = sessions.remove(&self.session.key); - debug_assert!(_removed.is_some(), "Session did not exist."); - - if self.stop_on_disconnect - && sessions.is_empty() - && self.is_initialized.load(Ordering::Relaxed) - { - self.session.cancellation.notify_one(); - } - } - } -} - -/// Map of active sessions connected to a [ServerFactory]. -type Sessions = Arc>>; - -/// Helper method for wrapping a [Workspace] method in a `custom_method` for -/// the [LSPServer] -macro_rules! workspace_method { - ( $builder:ident, $method:ident ) => { - $builder = $builder.custom_method( - concat!("pglsp/", stringify!($method)), - |server: &LSPServer, params| { - let span = tracing::trace_span!(concat!("pglsp/", stringify!($method)), params = ?params).or_current(); - tracing::info!("Received request: {}", stringify!($method)); - - let workspace = server.session.workspace.clone(); - let result = spawn_blocking(move || { - let _guard = span.entered(); - workspace.$method(params) - }); - - result.map(move |result| { - // The type of `result` is `Result, JoinError>`, - // where the inner result is the return value of `$method` while the - // outer one is added by `spawn_blocking` to catch panics or - // cancellations of the task - match result { - Ok(Ok(result)) => Ok(result), - Ok(Err(err)) => Err(into_lsp_error(err)), - Err(err) => match err.try_into_panic() { - Ok(err) => Err(panic_to_lsp_error(err)), - Err(err) => Err(into_lsp_error(err)), - }, - } - }) - }, - ); - }; -} - -/// Factory data structure responsible for creating [ServerConnection] handles -/// for each incoming connection accepted by the server -#[derive(Default)] -pub struct ServerFactory { - /// Synchronization primitive used to broadcast a shutdown signal to all - /// active connections - cancellation: Arc, - /// Optional [Workspace] instance shared between all clients. Currently - /// this field is always [None] (meaning each connection will get its own - /// workspace) until we figure out how to handle concurrent access to the - /// same workspace from multiple client - workspace: Option>, - - /// The sessions of the connected clients indexed by session key. - sessions: Sessions, - - /// Session key generator. Stores the key of the next session. - next_session_key: AtomicU64, - - /// If this is true the server will broadcast a shutdown signal once the - /// last client disconnected - stop_on_disconnect: bool, - /// This shared flag is set to true once at least one sessions has been - /// initialized on this server instance - is_initialized: Arc, -} - -impl ServerFactory { - pub fn new(stop_on_disconnect: bool) -> Self { - Self { - cancellation: Arc::default(), - workspace: None, - sessions: Sessions::default(), - next_session_key: AtomicU64::new(0), - stop_on_disconnect, - is_initialized: Arc::default(), - } - } - - pub fn create(&self, config_path: Option) -> ServerConnection { - self.create_with_fs(config_path, DynRef::Owned(Box::::default())) - } - - /// Create a new [ServerConnection] from this factory - pub fn create_with_fs( - &self, - config_path: Option, - fs: DynRef<'static, dyn FileSystem>, - ) -> ServerConnection { - let workspace = self - .workspace - .clone() - .unwrap_or_else(workspace::server_sync); - - let session_key = SessionKey(self.next_session_key.fetch_add(1, Ordering::Relaxed)); - - let mut builder = LspService::build(move |client| { - let mut session = Session::new( - session_key, - client, - workspace, - self.cancellation.clone(), - fs, - ); - if let Some(path) = config_path { - session.set_config_path(path); - } - let handle = Arc::new(session); - - let mut sessions = self.sessions.lock().unwrap(); - sessions.insert(session_key, handle.clone()); - - LSPServer::new( - handle, - self.sessions.clone(), - self.stop_on_disconnect, - self.is_initialized.clone(), - ) - }); - - // "shutdown" is not part of the Workspace API - builder = builder.custom_method("pglsp/shutdown", |server: &LSPServer, (): ()| { - info!("Sending shutdown signal"); - server.session.broadcast_shutdown(); - ready(Ok(Some(()))) - }); - - workspace_method!(builder, open_file); - workspace_method!(builder, change_file); - workspace_method!(builder, close_file); - workspace_method!(builder, pull_diagnostics); - workspace_method!(builder, get_completions); - - let (service, socket) = builder.finish(); - ServerConnection { socket, service } - } - - /// Return a handle to the cancellation token for this server process - pub fn cancellation(&self) -> Arc { - self.cancellation.clone() - } -} - -/// Handle type created by the server for each incoming connection -pub struct ServerConnection { - socket: ClientSocket, - service: LspService, -} - -impl ServerConnection { - /// Destructure a connection into its inner service instance and socket - pub fn into_inner(self) -> (LspService, ClientSocket) { - (self.service, self.socket) - } - - /// Accept an incoming connection and run the server async I/O loop to - /// completion - pub async fn accept(self, stdin: I, stdout: O) - where - I: AsyncRead + Unpin, - O: AsyncWrite, - { - Server::new(stdin, stdout, self.socket) - .serve(self.service) - .await; - } -} diff --git a/crates/pg_lsp_new/src/session.rs b/crates/pg_lsp_new/src/session.rs deleted file mode 100644 index cda7fee0..00000000 --- a/crates/pg_lsp_new/src/session.rs +++ /dev/null @@ -1,528 +0,0 @@ -use crate::diagnostics::LspError; -use crate::documents::Document; -use crate::utils; -use anyhow::Result; -use futures::stream::FuturesUnordered; -use futures::StreamExt; -use pg_analyse::RuleCategoriesBuilder; -use pg_configuration::ConfigurationPathHint; -use pg_diagnostics::{DiagnosticExt, Error}; -use pg_fs::{FileSystem, PgLspPath}; -use pg_lsp_converters::{negotiated_encoding, PositionEncoding, WideEncoding}; -use pg_workspace_new::configuration::{load_configuration, LoadedConfiguration}; -use pg_workspace_new::settings::PartialConfigurationExt; -use pg_workspace_new::workspace::{PullDiagnosticsParams, UpdateSettingsParams}; -use pg_workspace_new::Workspace; -use pg_workspace_new::{DynRef, WorkspaceError}; -use rustc_hash::FxHashMap; -use serde_json::Value; -use std::path::PathBuf; -use std::sync::atomic::Ordering; -use std::sync::atomic::{AtomicBool, AtomicU8}; -use std::sync::Arc; -use std::sync::RwLock; -use tokio::sync::Notify; -use tokio::sync::OnceCell; -use tower_lsp::lsp_types::Url; -use tower_lsp::lsp_types::{self, ClientCapabilities}; -use tower_lsp::lsp_types::{MessageType, Registration}; -use tower_lsp::lsp_types::{Unregistration, WorkspaceFolder}; -use tracing::{error, info}; - -pub(crate) struct ClientInformation { - /// The name of the client - pub(crate) name: String, - - /// The version of the client - pub(crate) version: Option, -} - -/// Key, uniquely identifying a LSP session. -#[derive(Clone, Copy, Eq, PartialEq, Hash, Debug)] -pub(crate) struct SessionKey(pub u64); - -/// Represents the state of an LSP server session. -pub(crate) struct Session { - /// The unique key identifying this session. - pub(crate) key: SessionKey, - - /// The LSP client for this session. - pub(crate) client: tower_lsp::Client, - - /// The parameters provided by the client in the "initialize" request - initialize_params: OnceCell, - - pub(crate) workspace: Arc, - - configuration_status: AtomicU8, - - /// A flag to notify a message to the user when the configuration is broken, and the LSP attempts - /// to update the diagnostics - notified_broken_configuration: AtomicBool, - - /// File system to read files inside the workspace - pub(crate) fs: DynRef<'static, dyn FileSystem>, - - documents: RwLock>, - - pub(crate) cancellation: Arc, - - pub(crate) config_path: Option, -} - -/// The parameters provided by the client in the "initialize" request -struct InitializeParams { - /// The capabilities provided by the client as part of [`lsp_types::InitializeParams`] - client_capabilities: lsp_types::ClientCapabilities, - client_information: Option, - root_uri: Option, - #[allow(unused)] - workspace_folders: Option>, -} - -#[repr(u8)] -pub(crate) enum ConfigurationStatus { - /// The configuration file was properly loaded - Loaded = 0, - /// The configuration file does not exist - Missing = 1, - /// The configuration file exists but could not be loaded - Error = 2, - /// Currently loading the configuration - Loading = 3, -} - -impl ConfigurationStatus { - pub(crate) const fn is_error(&self) -> bool { - matches!(self, ConfigurationStatus::Error) - } - - pub(crate) const fn is_loaded(&self) -> bool { - matches!(self, ConfigurationStatus::Loaded) - } -} - -impl TryFrom for ConfigurationStatus { - type Error = (); - - fn try_from(value: u8) -> Result { - match value { - 0 => Ok(Self::Loaded), - 1 => Ok(Self::Missing), - 2 => Ok(Self::Error), - 3 => Ok(Self::Loading), - _ => Err(()), - } - } -} - -pub(crate) type SessionHandle = Arc; - -/// Holds the set of capabilities supported by the Language Server -/// instance and whether they are enabled or not -#[derive(Default)] -pub(crate) struct CapabilitySet { - registry: FxHashMap<&'static str, (&'static str, CapabilityStatus)>, -} - -/// Represents whether a capability is enabled or not, optionally holding the -/// configuration associated with the capability -pub(crate) enum CapabilityStatus { - Enable(Option), - Disable, -} - -impl CapabilitySet { - /// Insert a capability in the set - pub(crate) fn add_capability( - &mut self, - id: &'static str, - method: &'static str, - status: CapabilityStatus, - ) { - self.registry.insert(id, (method, status)); - } -} - -impl Session { - pub(crate) fn new( - key: SessionKey, - client: tower_lsp::Client, - workspace: Arc, - cancellation: Arc, - fs: DynRef<'static, dyn FileSystem>, - ) -> Self { - let documents = Default::default(); - Self { - key, - client, - initialize_params: OnceCell::default(), - workspace, - configuration_status: AtomicU8::new(ConfigurationStatus::Missing as u8), - documents, - fs, - cancellation, - config_path: None, - notified_broken_configuration: AtomicBool::new(false), - } - } - - pub(crate) fn set_config_path(&mut self, path: PathBuf) { - self.config_path = Some(path); - } - - /// Initialize this session instance with the incoming initialization parameters from the client - pub(crate) fn initialize( - &self, - client_capabilities: lsp_types::ClientCapabilities, - client_information: Option, - root_uri: Option, - workspace_folders: Option>, - ) { - let result = self.initialize_params.set(InitializeParams { - client_capabilities, - client_information, - root_uri, - workspace_folders, - }); - - if let Err(err) = result { - error!("Failed to initialize session: {err}"); - } - } - - /// Register a set of capabilities with the client - pub(crate) async fn register_capabilities(&self, capabilities: CapabilitySet) { - let mut registrations = Vec::new(); - let mut unregistrations = Vec::new(); - - let mut register_methods = String::new(); - let mut unregister_methods = String::new(); - - for (id, (method, status)) in capabilities.registry { - unregistrations.push(Unregistration { - id: id.to_string(), - method: method.to_string(), - }); - - if !unregister_methods.is_empty() { - unregister_methods.push_str(", "); - } - - unregister_methods.push_str(method); - - if let CapabilityStatus::Enable(register_options) = status { - registrations.push(Registration { - id: id.to_string(), - method: method.to_string(), - register_options, - }); - - if !register_methods.is_empty() { - register_methods.push_str(", "); - } - - register_methods.push_str(method); - } - } - - if let Err(e) = self.client.unregister_capability(unregistrations).await { - error!( - "Error unregistering {unregister_methods:?} capabilities: {}", - e - ); - } else { - info!("Unregister capabilities {unregister_methods:?}"); - } - - if let Err(e) = self.client.register_capability(registrations).await { - error!("Error registering {register_methods:?} capabilities: {}", e); - } else { - info!("Register capabilities {register_methods:?}"); - } - } - - /// Computes diagnostics for the file matching the provided url and publishes - /// them to the client. Called from [`handlers::text_document`] when a file's - /// contents changes. - #[tracing::instrument(level = "trace", skip_all, fields(url = display(&url), diagnostic_count), err)] - pub(crate) async fn update_diagnostics(&self, url: lsp_types::Url) -> Result<(), LspError> { - let pglsp_path = self.file_path(&url)?; - let doc = self.document(&url)?; - if self.configuration_status().is_error() && !self.notified_broken_configuration() { - self.set_notified_broken_configuration(); - self.client - .show_message(MessageType::WARNING, "The configuration file has errors. PgLSP will report only parsing errors until the configuration is fixed.") - .await; - } - - let categories = RuleCategoriesBuilder::default().all(); - - let diagnostics: Vec = { - let result = self.workspace.pull_diagnostics(PullDiagnosticsParams { - path: pglsp_path.clone(), - max_diagnostics: u64::MAX, - categories: categories.build(), - only: Vec::new(), - skip: Vec::new(), - })?; - - tracing::trace!("pglsp diagnostics: {:#?}", result.diagnostics); - - result - .diagnostics - .into_iter() - .filter_map(|d| { - match utils::diagnostic_to_lsp( - d, - &url, - &doc.line_index, - self.position_encoding(), - None, - ) { - Ok(diag) => Some(diag), - Err(err) => { - error!("failed to convert diagnostic to LSP: {err:?}"); - None - } - } - }) - .collect() - }; - - tracing::Span::current().record("diagnostic_count", diagnostics.len()); - - self.client - .publish_diagnostics(url, diagnostics, Some(doc.version)) - .await; - - Ok(()) - } - - /// Updates diagnostics for every [`Document`] in this [`Session`] - pub(crate) async fn update_all_diagnostics(&self) { - let mut futures: FuturesUnordered<_> = self - .documents - .read() - .unwrap() - .keys() - .map(|url| self.update_diagnostics(url.clone())) - .collect(); - - while let Some(result) = futures.next().await { - if let Err(e) = result { - error!("Error while updating diagnostics: {}", e); - } - } - } - - /// Get a [`Document`] matching the provided [`lsp_types::Url`] - /// - /// If document does not exist, result is [WorkspaceError::NotFound] - pub(crate) fn document(&self, url: &lsp_types::Url) -> Result { - self.documents - .read() - .unwrap() - .get(url) - .cloned() - .ok_or_else(|| WorkspaceError::not_found().with_file_path(url.to_string())) - } - - /// Set the [`Document`] for the provided [`lsp_types::Url`] - /// - /// Used by [`handlers::text_document] to synchronize documents with the client. - pub(crate) fn insert_document(&self, url: lsp_types::Url, document: Document) { - self.documents.write().unwrap().insert(url, document); - } - - /// Remove the [`Document`] matching the provided [`lsp_types::Url`] - pub(crate) fn remove_document(&self, url: &lsp_types::Url) { - self.documents.write().unwrap().remove(url); - } - - pub(crate) fn file_path(&self, url: &lsp_types::Url) -> Result { - let path_to_file = match url.to_file_path() { - Err(_) => { - // If we can't create a path, it's probably because the file doesn't exist. - // It can be a newly created file that it's not on disk - PathBuf::from(url.path()) - } - Ok(path) => path, - }; - - Ok(PgLspPath::new(path_to_file)) - } - - /// True if the client supports dynamic registration of "workspace/didChangeConfiguration" requests - pub(crate) fn can_register_did_change_configuration(&self) -> bool { - self.initialize_params - .get() - .and_then(|c| c.client_capabilities.workspace.as_ref()) - .and_then(|c| c.did_change_configuration) - .and_then(|c| c.dynamic_registration) - == Some(true) - } - - /// Get the current workspace folders - pub(crate) fn get_workspace_folders(&self) -> Option<&Vec> { - self.initialize_params - .get() - .and_then(|c| c.workspace_folders.as_ref()) - } - - /// Returns the base path of the workspace on the filesystem if it has one - pub(crate) fn base_path(&self) -> Option { - let initialize_params = self.initialize_params.get()?; - - let root_uri = initialize_params.root_uri.as_ref()?; - match root_uri.to_file_path() { - Ok(base_path) => Some(base_path), - Err(()) => { - error!( - "The Workspace root URI {root_uri:?} could not be parsed as a filesystem path" - ); - None - } - } - } - - /// Returns a reference to the client information for this session - pub(crate) fn client_information(&self) -> Option<&ClientInformation> { - self.initialize_params.get()?.client_information.as_ref() - } - - /// Returns a reference to the client capabilities for this session - pub(crate) fn client_capabilities(&self) -> Option<&ClientCapabilities> { - self.initialize_params - .get() - .map(|params| ¶ms.client_capabilities) - } - - /// This function attempts to read the `pglsp.toml` configuration file from - /// the root URI and update the workspace settings accordingly - #[tracing::instrument(level = "trace", skip(self))] - pub(crate) async fn load_workspace_settings(&self) { - // Providing a custom configuration path will not allow to support workspaces - if let Some(config_path) = &self.config_path { - let base_path = ConfigurationPathHint::FromUser(config_path.clone()); - let status = self.load_pglsp_configuration_file(base_path).await; - self.set_configuration_status(status); - } else if let Some(folders) = self.get_workspace_folders() { - info!("Detected workspace folder."); - self.set_configuration_status(ConfigurationStatus::Loading); - for folder in folders { - info!("Attempt to load the configuration file in {:?}", folder.uri); - let base_path = folder.uri.to_file_path(); - match base_path { - Ok(base_path) => { - let status = self - .load_pglsp_configuration_file(ConfigurationPathHint::FromWorkspace( - base_path, - )) - .await; - self.set_configuration_status(status); - } - Err(_) => { - error!( - "The Workspace root URI {:?} could not be parsed as a filesystem path", - folder.uri - ); - } - } - } - } else { - let base_path = match self.base_path() { - None => ConfigurationPathHint::default(), - Some(path) => ConfigurationPathHint::FromLsp(path), - }; - let status = self.load_pglsp_configuration_file(base_path).await; - self.set_configuration_status(status); - } - } - - async fn load_pglsp_configuration_file( - &self, - base_path: ConfigurationPathHint, - ) -> ConfigurationStatus { - match load_configuration(&self.fs, base_path.clone()) { - Ok(loaded_configuration) => { - let LoadedConfiguration { - configuration: fs_configuration, - directory_path: configuration_path, - .. - } = loaded_configuration; - info!("Configuration loaded successfully from disk."); - info!("Update workspace settings."); - - let result = fs_configuration - .retrieve_gitignore_matches(&self.fs, configuration_path.as_deref()); - - match result { - Ok((vcs_base_path, gitignore_matches)) => { - let result = self.workspace.update_settings(UpdateSettingsParams { - workspace_directory: self.fs.working_directory(), - configuration: fs_configuration, - vcs_base_path, - gitignore_matches, - }); - - if let Err(error) = result { - error!("Failed to set workspace settings: {}", error); - self.client.log_message(MessageType::ERROR, &error).await; - ConfigurationStatus::Error - } else { - ConfigurationStatus::Loaded - } - } - Err(err) => { - error!("Couldn't load the configuration file, reason:\n {}", err); - self.client.log_message(MessageType::ERROR, &err).await; - ConfigurationStatus::Error - } - } - } - Err(err) => { - error!("Couldn't load the configuration file, reason:\n {}", err); - self.client.log_message(MessageType::ERROR, &err).await; - ConfigurationStatus::Error - } - } - } - - /// Broadcast a shutdown signal to all active connections - pub(crate) fn broadcast_shutdown(&self) { - self.cancellation.notify_one(); - } - - /// Retrieves information regarding the configuration status - pub(crate) fn configuration_status(&self) -> ConfigurationStatus { - self.configuration_status - .load(Ordering::Relaxed) - .try_into() - .unwrap() - } - - /// Updates the status of the configuration - fn set_configuration_status(&self, status: ConfigurationStatus) { - self.notified_broken_configuration - .store(false, Ordering::Relaxed); - self.configuration_status - .store(status as u8, Ordering::Relaxed); - } - - fn notified_broken_configuration(&self) -> bool { - self.notified_broken_configuration.load(Ordering::Relaxed) - } - fn set_notified_broken_configuration(&self) { - self.notified_broken_configuration - .store(true, Ordering::Relaxed); - } - - pub fn position_encoding(&self) -> PositionEncoding { - self.initialize_params - .get() - .map_or(PositionEncoding::Wide(WideEncoding::Utf16), |params| { - negotiated_encoding(¶ms.client_capabilities) - }) - } -} diff --git a/crates/pg_lsp_new/src/utils.rs b/crates/pg_lsp_new/src/utils.rs deleted file mode 100644 index 33eef1f7..00000000 --- a/crates/pg_lsp_new/src/utils.rs +++ /dev/null @@ -1,440 +0,0 @@ -use anyhow::{ensure, Context, Result}; -use pg_console::fmt::Termcolor; -use pg_console::fmt::{self, Formatter}; -use pg_console::MarkupBuf; -use pg_diagnostics::termcolor::NoColor; -use pg_diagnostics::{Diagnostic, DiagnosticTags, Location, PrintDescription, Severity, Visit}; -use pg_lsp_converters::line_index::LineIndex; -use pg_lsp_converters::{from_proto, to_proto, PositionEncoding}; -use pg_text_edit::{CompressedOp, DiffOp, TextEdit}; -use std::any::Any; -use std::borrow::Cow; -use std::fmt::{Debug, Display}; -use std::io; -use std::ops::{Add, Range}; -use text_size::{TextRange, TextSize}; -use tower_lsp::jsonrpc::Error as LspError; -use tower_lsp::lsp_types; -use tower_lsp::lsp_types::{self as lsp, CodeDescription, Url}; -use tracing::error; - -pub(crate) fn text_edit( - line_index: &LineIndex, - diff: TextEdit, - position_encoding: PositionEncoding, - offset: Option, -) -> Result> { - let mut result: Vec = Vec::new(); - let mut offset = if let Some(offset) = offset { - TextSize::from(offset) - } else { - TextSize::from(0) - }; - - for op in diff.iter() { - match op { - CompressedOp::DiffOp(DiffOp::Equal { range }) => { - offset += range.len(); - } - CompressedOp::DiffOp(DiffOp::Insert { range }) => { - let start = to_proto::position(line_index, offset, position_encoding)?; - - // Merge with a previous delete operation if possible - let last_edit = result.last_mut().filter(|text_edit| { - text_edit.range.end == start && text_edit.new_text.is_empty() - }); - - if let Some(last_edit) = last_edit { - last_edit.new_text = diff.get_text(*range).to_string(); - } else { - result.push(lsp::TextEdit { - range: lsp::Range::new(start, start), - new_text: diff.get_text(*range).to_string(), - }); - } - } - CompressedOp::DiffOp(DiffOp::Delete { range }) => { - let start = to_proto::position(line_index, offset, position_encoding)?; - offset += range.len(); - let end = to_proto::position(line_index, offset, position_encoding)?; - - result.push(lsp::TextEdit { - range: lsp::Range::new(start, end), - new_text: String::new(), - }); - } - - CompressedOp::EqualLines { line_count } => { - let mut line_col = line_index - .line_col(offset) - .expect("diff length is overflowing the line count in the original file"); - - line_col.line += line_count.get() + 1; - line_col.col = 0; - - // SAFETY: This should only happen if `line_index` wasn't built - // from the same string as the old revision of `diff` - let new_offset = line_index - .offset(line_col) - .expect("diff length is overflowing the line count in the original file"); - - offset = new_offset; - } - } - } - - Ok(result) -} - -/// Convert an [pg_diagnostics::Diagnostic] to a [lsp::Diagnostic], using the span -/// of the diagnostic's primary label as the diagnostic range. -/// Requires a [LineIndex] to convert a byte offset range to the line/col range -/// expected by LSP. -pub(crate) fn diagnostic_to_lsp( - diagnostic: D, - url: &lsp::Url, - line_index: &LineIndex, - position_encoding: PositionEncoding, - offset: Option, -) -> Result { - let location = diagnostic.location(); - - let span = location.span.context("diagnostic location has no span")?; - let span = if let Some(offset) = offset { - TextRange::new( - span.start().add(TextSize::from(offset)), - span.end().add(TextSize::from(offset)), - ) - } else { - span - }; - let span = to_proto::range(line_index, span, position_encoding) - .context("failed to convert diagnostic span to LSP range")?; - - let severity = match diagnostic.severity() { - Severity::Fatal | Severity::Error => lsp::DiagnosticSeverity::ERROR, - Severity::Warning => lsp::DiagnosticSeverity::WARNING, - Severity::Information => lsp::DiagnosticSeverity::INFORMATION, - Severity::Hint => lsp::DiagnosticSeverity::HINT, - }; - - let code = diagnostic - .category() - .map(|category| lsp::NumberOrString::String(category.name().to_string())); - - let code_description = diagnostic - .category() - .and_then(|category| category.link()) - .and_then(|link| { - let href = Url::parse(link).ok()?; - Some(CodeDescription { href }) - }); - - let message = PrintDescription(&diagnostic).to_string(); - ensure!(!message.is_empty(), "diagnostic description is empty"); - - let mut related_information = None; - let mut visitor = RelatedInformationVisitor { - url, - line_index, - position_encoding, - related_information: &mut related_information, - }; - - diagnostic.advices(&mut visitor).unwrap(); - - let tags = diagnostic.tags(); - let tags = { - let mut result = Vec::new(); - - if tags.contains(DiagnosticTags::UNNECESSARY_CODE) { - result.push(lsp::DiagnosticTag::UNNECESSARY); - } - - if tags.contains(DiagnosticTags::DEPRECATED_CODE) { - result.push(lsp::DiagnosticTag::DEPRECATED); - } - - if !result.is_empty() { - Some(result) - } else { - None - } - }; - - let mut diagnostic = lsp::Diagnostic::new( - span, - Some(severity), - code, - Some("pg".into()), - message, - related_information, - tags, - ); - diagnostic.code_description = code_description; - Ok(diagnostic) -} - -struct RelatedInformationVisitor<'a> { - url: &'a lsp::Url, - line_index: &'a LineIndex, - position_encoding: PositionEncoding, - related_information: &'a mut Option>, -} - -impl Visit for RelatedInformationVisitor<'_> { - fn record_frame(&mut self, location: Location<'_>) -> io::Result<()> { - let span = match location.span { - Some(span) => span, - None => return Ok(()), - }; - - let range = match to_proto::range(self.line_index, span, self.position_encoding) { - Ok(range) => range, - Err(_) => return Ok(()), - }; - - let related_information = self.related_information.get_or_insert_with(Vec::new); - - related_information.push(lsp::DiagnosticRelatedInformation { - location: lsp::Location { - uri: self.url.clone(), - range, - }, - message: String::new(), - }); - - Ok(()) - } -} - -/// Convert a piece of markup into a String -fn print_markup(markup: &MarkupBuf) -> String { - let mut message = Termcolor(NoColor::new(Vec::new())); - fmt::Display::fmt(markup, &mut Formatter::new(&mut message)) - // SAFETY: Writing to a memory buffer should never fail - .unwrap(); - - // SAFETY: Printing uncolored markup never generates non UTF-8 byte sequences - String::from_utf8(message.0.into_inner()).unwrap() -} - -/// Helper to create a [tower_lsp::jsonrpc::Error] from a message -pub(crate) fn into_lsp_error(msg: impl Display + Debug) -> LspError { - let mut error = LspError::internal_error(); - error!("Error: {}", msg); - error.message = Cow::Owned(msg.to_string()); - error.data = Some(format!("{msg:?}").into()); - error -} - -pub(crate) fn panic_to_lsp_error(err: Box) -> LspError { - let mut error = LspError::internal_error(); - - match err.downcast::() { - Ok(msg) => { - error.message = Cow::Owned(msg.to_string()); - } - Err(err) => match err.downcast::<&str>() { - Ok(msg) => { - error.message = Cow::Owned(msg.to_string()); - } - Err(_) => { - error.message = Cow::Owned(String::from("Encountered an unknown error")); - } - }, - } - - error -} - -pub(crate) fn apply_document_changes( - position_encoding: PositionEncoding, - current_content: String, - content_changes: &[lsp_types::TextDocumentContentChangeEvent], -) -> String { - // Skip to the last full document change, as it invalidates all previous changes anyways. - let mut start = content_changes - .iter() - .rev() - .position(|change| change.range.is_none()) - .map_or(0, |idx| content_changes.len() - idx - 1); - - let mut text: String = match content_changes.get(start) { - // peek at the first content change as an optimization - Some(lsp_types::TextDocumentContentChangeEvent { - range: None, text, .. - }) => { - let text = text.clone(); - start += 1; - - // The only change is a full document update - if start == content_changes.len() { - return text; - } - text - } - Some(_) => current_content, - // we received no content changes - None => return current_content, - }; - - let mut line_index = LineIndex::new(&text); - - // The changes we got must be applied sequentially, but can cross lines so we - // have to keep our line index updated. - // Some clients (e.g. Code) sort the ranges in reverse. As an optimization, we - // remember the last valid line in the index and only rebuild it if needed. - let mut index_valid = u32::MAX; - for change in content_changes { - // The None case can't happen as we have handled it above already - if let Some(range) = change.range { - if index_valid <= range.end.line { - line_index = LineIndex::new(&text); - } - index_valid = range.start.line; - if let Ok(range) = from_proto::text_range(&line_index, range, position_encoding) { - text.replace_range(Range::::from(range), &change.text); - } - } - } - - text -} - -#[cfg(test)] -mod tests { - - use pg_lsp_converters::line_index::LineIndex; - use pg_lsp_converters::PositionEncoding; - use pg_text_edit::TextEdit; - use tower_lsp::lsp_types as lsp; - - #[test] - fn test_diff_1() { - const OLD: &str = "line 1 old -line 2 -line 3 -line 4 -line 5 -line 6 -line 7 old"; - - const NEW: &str = "line 1 new -line 2 -line 3 -line 4 -line 5 -line 6 -line 7 new"; - - let line_index = LineIndex::new(OLD); - let diff = TextEdit::from_unicode_words(OLD, NEW); - - let text_edit = super::text_edit(&line_index, diff, PositionEncoding::Utf8, None).unwrap(); - - assert_eq!( - text_edit.as_slice(), - &[ - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 7, - }, - end: lsp::Position { - line: 0, - character: 10, - }, - }, - new_text: String::from("new"), - }, - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 6, - character: 7 - }, - end: lsp::Position { - line: 6, - character: 10 - } - }, - new_text: String::from("new"), - }, - ] - ); - } - - #[test] - fn test_diff_2() { - const OLD: &str = "console.log(\"Variable: \" + variable);"; - const NEW: &str = "console.log(`Variable: ${variable}`);"; - - let line_index = LineIndex::new(OLD); - let diff = TextEdit::from_unicode_words(OLD, NEW); - - let text_edit = super::text_edit(&line_index, diff, PositionEncoding::Utf8, None).unwrap(); - - assert_eq!( - text_edit.as_slice(), - &[ - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 12, - }, - end: lsp::Position { - line: 0, - character: 13, - }, - }, - new_text: String::from("`"), - }, - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 23 - }, - end: lsp::Position { - line: 0, - character: 27 - } - }, - new_text: String::from("${"), - }, - lsp::TextEdit { - range: lsp::Range { - start: lsp::Position { - line: 0, - character: 35 - }, - end: lsp::Position { - line: 0, - character: 35 - } - }, - new_text: String::from("}`"), - }, - ] - ); - } - - // #[test] - // fn test_range_formatting() { - // let encoding = PositionEncoding::Wide(WideEncoding::Utf16); - // let input = "(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\n(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\nisSpreadAssignment;\n".to_string(); - // let change = TextDocumentContentChangeEvent { - // range: Some(Range::new(Position::new(0, 30), Position::new(1, 0))), - // range_length: Some(1), - // text: String::new(), - // }; - // - // let output = apply_document_changes(encoding, input, vec![change]); - // let expected = "(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");(\"Jan 1, 2018\u{2009}–\u{2009}Jan 1, 2019\");\nisSpreadAssignment;\n"; - // - // assert_eq!(output, expected); - // } -} diff --git a/crates/pg_workspace/Cargo.toml b/crates/pg_workspace/Cargo.toml index b5c31f43..2dc60e99 100644 --- a/crates/pg_workspace/Cargo.toml +++ b/crates/pg_workspace/Cargo.toml @@ -12,20 +12,28 @@ version = "0.0.0" [dependencies] -async-std = "1.12.0" -dashmap = "5.5.3" -text-size = "1.1.1" - -pg_base_db.workspace = true -pg_fs.workspace = true -pg_hover.workspace = true -pg_lint.workspace = true -pg_query_ext.workspace = true -pg_schema_cache.workspace = true -pg_syntax.workspace = true -pg_typecheck.workspace = true - +biome_deserialize = "0.6.0" +dashmap = "5.5.3" +futures = "0.3.31" +ignore = { workspace = true } +pg_analyse = { workspace = true, features = ["serde"] } +pg_analyser = { workspace = true } +pg_completions = { workspace = true } +pg_configuration = { workspace = true } +pg_console = { workspace = true } +pg_diagnostics = { workspace = true } +pg_fs = { workspace = true, features = ["serde"] } +pg_query_ext = { workspace = true } +pg_schema_cache = { workspace = true } +pg_statement_splitter = { workspace = true } +rustc-hash = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true, features = ["raw_value"] } sqlx.workspace = true +text-size.workspace = true +tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } +toml = { workspace = true } +tracing = { workspace = true, features = ["attributes", "log"] } tree-sitter.workspace = true tree_sitter_sql.workspace = true diff --git a/crates/pg_workspace_new/src/configuration.rs b/crates/pg_workspace/src/configuration.rs similarity index 100% rename from crates/pg_workspace_new/src/configuration.rs rename to crates/pg_workspace/src/configuration.rs diff --git a/crates/pg_workspace/src/diagnostics.rs b/crates/pg_workspace/src/diagnostics.rs index a1391f85..16efcea0 100644 --- a/crates/pg_workspace/src/diagnostics.rs +++ b/crates/pg_workspace/src/diagnostics.rs @@ -1,27 +1,353 @@ -use std::fmt::Debug; -use text_size::TextRange; - -#[derive(Debug, PartialEq, Eq)] -pub struct Diagnostic { - pub message: String, - pub description: Option, - pub severity: Severity, - pub source: String, - pub range: TextRange, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] -/// The severity to associate to a diagnostic. -pub enum Severity { - /// Reports a hint. - Hint, - /// Reports an information. - #[default] - Information, - /// Reports a warning. - Warning, - /// Reports an error. - Error, - /// Reports a crash. - Fatal, +use pg_configuration::ConfigurationDiagnostic; +use pg_console::fmt::Bytes; +use pg_console::markup; +use pg_diagnostics::{ + category, Advices, Category, Diagnostic, DiagnosticTags, LogCategory, Severity, Visit, +}; +use pg_fs::FileSystemDiagnostic; +use serde::{Deserialize, Serialize}; +use std::error::Error; +use std::fmt; +use std::fmt::{Debug, Display, Formatter}; +use std::process::{ExitCode, Termination}; +use tokio::task::JoinError; + +/// Generic errors thrown during operations +#[derive(Deserialize, Diagnostic, Serialize)] +pub enum WorkspaceError { + /// Error thrown when validating the configuration. Once deserialized, further checks have to be done. + Configuration(ConfigurationDiagnostic), + /// Error when trying to access the database + DatabaseConnectionError(DatabaseConnectionError), + /// Diagnostics emitted when querying the file system + FileSystem(FileSystemDiagnostic), + /// Thrown when we can't read a generic directory + CantReadDirectory(CantReadDirectory), + /// Thrown when we can't read a generic file + CantReadFile(CantReadFile), + /// The file does not exist in the [crate::Workspace] + NotFound(NotFound), + /// Error emitted by the underlying transport layer for a remote Workspace + TransportError(TransportError), + /// Emitted when the file is ignored and should not be processed + FileIgnored(FileIgnored), + /// Emitted when a file could not be parsed because it's larger than the size limit + FileTooLarge(FileTooLarge), + /// Diagnostic raised when a file is protected + ProtectedFile(ProtectedFile), + /// Raised when there's an issue around the VCS integration + Vcs(VcsDiagnostic), + /// Error in the async runtime + RuntimeError(RuntimeError), +} + +impl WorkspaceError { + pub fn cant_read_file(path: String) -> Self { + Self::CantReadFile(CantReadFile { path }) + } + + pub fn not_found() -> Self { + Self::NotFound(NotFound) + } + + pub fn protected_file(file_path: impl Into) -> Self { + Self::ProtectedFile(ProtectedFile { + file_path: file_path.into(), + verbose_advice: ProtectedFileAdvice, + }) + } + + pub fn vcs_disabled() -> Self { + Self::Vcs(VcsDiagnostic::DisabledVcs(DisabledVcs {})) + } + + pub fn runtime(msg: &str) -> Self { + Self::RuntimeError(RuntimeError { + message: msg.into(), + }) + } +} + +impl Error for WorkspaceError {} + +impl Debug for WorkspaceError { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + std::fmt::Display::fmt(self, f) + } +} + +impl Display for WorkspaceError { + fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { + Diagnostic::description(self, f) + } +} + +impl From for WorkspaceError { + fn from(err: TransportError) -> Self { + Self::TransportError(err) + } +} + +impl Termination for WorkspaceError { + fn report(self) -> ExitCode { + ExitCode::FAILURE + } +} + +impl From for WorkspaceError { + fn from(err: FileSystemDiagnostic) -> Self { + Self::FileSystem(err) + } +} + +impl From for WorkspaceError { + fn from(err: ConfigurationDiagnostic) -> Self { + Self::Configuration(err) + } +} + +#[derive(Debug, Serialize, Deserialize)] +/// Error emitted by the underlying transport layer for a remote Workspace +pub enum TransportError { + /// Error emitted by the transport layer if the connection was lost due to an I/O error + ChannelClosed, + /// Error emitted by the transport layer if a request timed out + Timeout, + /// Error caused by a serialization or deserialization issue + SerdeError(String), + /// Generic error type for RPC errors that can't be deserialized into RomeError + RPCError(String), +} + +impl Display for TransportError { + fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { + self.description(fmt) + } +} + +impl Diagnostic for TransportError { + fn category(&self) -> Option<&'static Category> { + Some(category!("internalError/io")) + } + + fn severity(&self) -> Severity { + Severity::Error + } + + fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { + match self { + TransportError::SerdeError(err) => write!(fmt, "serialization error: {err}"), + TransportError::ChannelClosed => fmt.write_str( + "a request to the remote workspace failed because the connection was interrupted", + ), + TransportError::Timeout => { + fmt.write_str("the request to the remote workspace timed out") + } + TransportError::RPCError(err) => fmt.write_str(err), + } + } + + fn message(&self, fmt: &mut pg_console::fmt::Formatter<'_>) -> std::io::Result<()> { + match self { + TransportError::SerdeError(err) => write!(fmt, "serialization error: {err}"), + TransportError::ChannelClosed => fmt.write_str( + "a request to the remote workspace failed because the connection was interrupted", + ), + TransportError::Timeout => { + fmt.write_str("the request to the remote workspace timed out") + } + TransportError::RPCError(err) => fmt.write_str(err), + } + } + fn tags(&self) -> DiagnosticTags { + DiagnosticTags::INTERNAL + } +} + +#[derive(Debug, Deserialize, Diagnostic, Serialize)] +pub enum VcsDiagnostic { + /// When the VCS folder couldn't be found + NoVcsFolderFound(NoVcsFolderFound), + /// VCS is disabled + DisabledVcs(DisabledVcs), +} + +#[derive(Debug, Diagnostic, Serialize, Deserialize)] +#[diagnostic( + category = "internalError/fs", + severity = Warning, + message = "Couldn't determine a directory for the VCS integration. VCS integration will be disabled." +)] +pub struct DisabledVcs {} + +#[derive(Debug, Diagnostic, Serialize, Deserialize)] +#[diagnostic( + category = "internalError/runtime", + severity = Error, + message = "An error occurred in the async runtime." +)] +pub struct RuntimeError { + message: String, +} + +impl From for WorkspaceError { + fn from(err: JoinError) -> Self { + Self::RuntimeError(RuntimeError { + message: err.to_string(), + }) + } +} + +#[derive(Debug, Diagnostic, Serialize, Deserialize)] +#[diagnostic( + category = "internalError/fs", + severity = Error, + message( + description = "Couldn't find the VCS folder at the following path: {path}", + message("Couldn't find the VCS folder at the following path: "{self.path}), + ) +)] +pub struct NoVcsFolderFound { + #[location(resource)] + pub path: String, +} + +impl From for WorkspaceError { + fn from(value: VcsDiagnostic) -> Self { + Self::Vcs(value) + } +} + +#[derive(Debug, Serialize, Deserialize, Diagnostic)] +#[diagnostic( + category = "database/connection", + message = "Database error: {message}" +)] +pub struct DatabaseConnectionError { + message: String, + code: Option, +} + +impl From for WorkspaceError { + fn from(err: sqlx::Error) -> Self { + let db_err = err.as_database_error(); + if let Some(db_err) = db_err { + Self::DatabaseConnectionError(DatabaseConnectionError { + message: db_err.message().to_string(), + code: db_err.code().map(|c| c.to_string()), + }) + } else { + Self::DatabaseConnectionError(DatabaseConnectionError { + message: err.to_string(), + code: None, + }) + } + } +} + +#[derive(Debug, Serialize, Deserialize, Diagnostic)] +#[diagnostic( + category = "internalError/fs", + message = "The file does not exist in the workspace.", + tags(INTERNAL) +)] +pub struct NotFound; + +#[derive(Debug, Serialize, Deserialize, Diagnostic)] +#[diagnostic( + category = "project", + severity = Information, + message( + message("The file "{self.file_path}" is protected because is handled by another tool. We won't process it."), + description = "The file {file_path} is protected because is handled by another tool. We won't process it.", + ), + tags(VERBOSE) +)] +pub struct ProtectedFile { + #[location(resource)] + pub file_path: String, + + #[verbose_advice] + pub verbose_advice: ProtectedFileAdvice, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct ProtectedFileAdvice; + +impl Advices for ProtectedFileAdvice { + fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { + visitor.record_log(LogCategory::Info, &markup! { "You can hide this diagnostic by using ""--diagnostic-level=warn"" to increase the diagnostic level shown by CLI." }) + } +} + +#[derive(Debug, Serialize, Deserialize, Diagnostic)] +#[diagnostic( + category = "internalError/fs", + message( + message("We couldn't read the following directory, maybe for permissions reasons or it doesn't exist: "{self.path}), + description = "We couldn't read the following directory, maybe for permissions reasons or it doesn't exist: {path}" + ) +)] +pub struct CantReadDirectory { + #[location(resource)] + path: String, +} + +#[derive(Debug, Serialize, Deserialize, Diagnostic)] +#[diagnostic( + category = "internalError/fs", + message( + message("We couldn't read the following file, maybe for permissions reasons or it doesn't exist: "{self.path}), + description = "We couldn't read the following file, maybe for permissions reasons or it doesn't exist: {path}" + ) +)] +pub struct CantReadFile { + #[location(resource)] + path: String, +} + +#[derive(Debug, Serialize, Deserialize, Diagnostic)] +#[diagnostic( + category = "internalError/fs", + message( + message("The file "{self.path}" was ignored."), + description = "The file {path} was ignored." + ), + severity = Warning, +)] +pub struct FileIgnored { + #[location(resource)] + path: String, +} + +#[derive(Debug, Serialize, Deserialize)] +pub struct FileTooLarge { + path: String, + size: usize, + limit: usize, +} + +impl Diagnostic for FileTooLarge { + fn category(&self) -> Option<&'static Category> { + Some(category!("internalError/fs")) + } + + fn message(&self, fmt: &mut pg_console::fmt::Formatter<'_>) -> std::io::Result<()> { + fmt.write_markup( + markup!{ + "Size of "{self.path}" is "{Bytes(self.size)}" which exceeds configured maximum of "{Bytes(self.limit)}" for this project. + The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't. + Use the `files.maxSize` configuration to change the maximum size of files processed." + } + ) + } + + fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { + write!(fmt, + "Size of {} is {} which exceeds configured maximum of {} for this project.\n\ + The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't.\n\ + Use the `files.maxSize` configuration to change the maximum size of files processed.", + self.path, Bytes(self.size), Bytes(self.limit) + ) + } } diff --git a/crates/pg_workspace_new/src/dome.rs b/crates/pg_workspace/src/dome.rs similarity index 100% rename from crates/pg_workspace_new/src/dome.rs rename to crates/pg_workspace/src/dome.rs diff --git a/crates/pg_workspace/src/lib.rs b/crates/pg_workspace/src/lib.rs index 97c668d5..9467d1fb 100644 --- a/crates/pg_workspace/src/lib.rs +++ b/crates/pg_workspace/src/lib.rs @@ -1,550 +1,98 @@ -pub mod diagnostics; -mod lint; -mod pg_query; -mod tree_sitter; -mod typecheck; - -use std::sync::{RwLock, RwLockWriteGuard}; - -use dashmap::{DashMap, DashSet}; -use diagnostics::{Diagnostic, Severity}; -use lint::Linter; -use pg_base_db::{Document, DocumentChange, StatementRef}; -use pg_fs::PgLspPath; -use pg_query::PgQueryParser; -use pg_schema_cache::SchemaCache; -use sqlx::PgPool; -use tree_sitter::TreeSitterParser; -use typecheck::Typechecker; +use std::ops::{Deref, DerefMut}; -pub struct Workspace { - pub documents: DashMap, - // Stores the statements that have changed since the last analysis - changed_stmts: DashSet, - pub schema_cache: RwLock, +use pg_console::Console; +use pg_fs::{FileSystem, OsFileSystem}; - pub tree_sitter: TreeSitterParser, - pub pg_query: PgQueryParser, - pub linter: Linter, - pub typechecker: Typechecker, -} - -impl Default for Workspace { - fn default() -> Self { - Self::new() - } +pub mod configuration; +pub mod diagnostics; +pub mod dome; +pub mod matcher; +pub mod settings; +pub mod workspace; + +pub use crate::diagnostics::{TransportError, WorkspaceError}; +pub use crate::workspace::Workspace; + +/// This is the main entrypoint of the application. +pub struct App<'app> { + /// A reference to the internal virtual file system + pub fs: DynRef<'app, dyn FileSystem>, + /// A reference to the internal workspace + pub workspace: WorkspaceRef<'app>, + /// A reference to the internal console, where its buffer will be used to write messages and + /// errors + pub console: &'app mut dyn Console, } -impl Workspace { - pub fn new() -> Workspace { - Workspace { - documents: DashMap::new(), - schema_cache: RwLock::new(SchemaCache::new()), - changed_stmts: DashSet::new(), - - tree_sitter: TreeSitterParser::new(), - pg_query: PgQueryParser::new(), - linter: Linter::new(), - typechecker: Typechecker::new(), - } - } - - /// Applies changes to the current state of the world - /// - /// Returns a list of changed statements - pub fn apply_change(&self, url: PgLspPath, mut change: DocumentChange) { - let mut doc = self - .documents - .entry(url.clone()) - .or_insert(Document::new(url, None)); - - change.apply(doc.value_mut()); - - let changed_stmts = change.collect_statement_changes(); - - for c in &changed_stmts { - match c { - pg_base_db::StatementChange::Added(s) => { - self.tree_sitter.add_statement(s); - self.pg_query.add_statement(s); - - self.changed_stmts.insert(s.to_owned()); - } - pg_base_db::StatementChange::Deleted(s) => { - self.tree_sitter.remove_statement(s); - self.pg_query.remove_statement(s); - self.linter.clear_statement_violations(s); - self.typechecker.clear_statement_errors(s); - - self.changed_stmts.insert(s.to_owned()); - } - pg_base_db::StatementChange::Modified(s) => { - self.tree_sitter.modify_statement(s); - self.pg_query.modify_statement(s); - self.linter.clear_statement_violations(&s.statement); - self.typechecker.clear_statement_errors(&s.statement); - - self.changed_stmts.remove(&s.statement); - self.changed_stmts.insert(s.new_statement().to_owned()); - } - } - } +impl<'app> App<'app> { + pub fn with_console(console: &'app mut dyn Console) -> Self { + Self::with_filesystem_and_console(DynRef::Owned(Box::::default()), console) } - pub fn remove_document(&self, url: PgLspPath) { - let r = self.documents.remove(&url); - if r.is_some() { - let doc = r.unwrap().1; - for stmt in doc.statement_refs() { - self.tree_sitter.remove_statement(&stmt); - self.pg_query.remove_statement(&stmt); - self.linter.clear_statement_violations(&stmt); - self.typechecker.clear_statement_errors(&stmt); - } - } + /// Create a new instance of the app using the specified [FileSystem] and [Console] implementation + pub fn with_filesystem_and_console( + fs: DynRef<'app, dyn FileSystem>, + console: &'app mut dyn Console, + ) -> Self { + Self::new(fs, console, WorkspaceRef::Owned(workspace::server())) } - /// Collects all diagnostics for a given document. It does not compute them, it just collects. - pub fn diagnostics(&self, url: &PgLspPath) -> Vec { - let mut diagnostics: Vec = vec![]; - - let doc = self.documents.get(url); - - if doc.is_none() { - return diagnostics; - } - - let doc = doc.unwrap(); - - for (range, stmt) in doc.statement_refs_with_range() { - diagnostics.extend(self.pg_query.diagnostics(&stmt, range)); - diagnostics.extend(self.linter.diagnostics(&stmt, range)); - diagnostics.extend(self.typechecker.diagnostics(&stmt, range)); + /// Create a new instance of the app using the specified [FileSystem], [Console] and [Workspace] implementation + pub fn new( + fs: DynRef<'app, dyn FileSystem>, + console: &'app mut dyn Console, + workspace: WorkspaceRef<'app>, + ) -> Self { + Self { + fs, + console, + workspace, } - - diagnostics - } - - /// Drain changed statements to kick off analysis - pub fn compute(&self, conn: Option) -> Vec { - let changed: Vec = self - .changed_stmts - .iter() - .map(|arc| (*arc).clone()) - .collect(); - - self.changed_stmts.clear(); - - changed.iter().for_each(|stmt| { - self.pg_query.compute_cst(stmt); - - if let Some(ast) = self.pg_query.ast(stmt) { - self.linter.compute_statement_violations( - stmt, - ::pg_lint::LinterParams { - ast: ast.as_ref(), - enriched_ast: self - .pg_query - .enriched_ast(stmt) - .as_ref() - .map(|a| a.as_ref()), - }, - ); - if let Some(conn) = conn.as_ref() { - self.typechecker.run_typecheck( - stmt, - ::pg_typecheck::TypecheckerParams { - conn, - sql: &stmt.text, - ast: ast.as_ref(), - enriched_ast: self - .pg_query - .enriched_ast(stmt) - .as_ref() - .map(|a| a.as_ref()), - }, - ); - } - } - }); - changed - } - - pub fn set_schema_cache(&self, cache: SchemaCache) { - let mut schema_cache: RwLockWriteGuard = self.schema_cache.write().unwrap(); - *schema_cache = cache; - - // clear all schema cache related diagnostics - // and add all statements to the changed statements - self.typechecker.clear_errors(); - self.documents - .iter() - .flat_map(|entry| entry.value().statement_refs()) - .for_each(|f| { - self.changed_stmts.insert(f); - }) } } -#[cfg(test)] -mod tests { - - use pg_base_db::{Change, DocumentChange}; - use text_size::{TextRange, TextSize}; - - use crate::{ - diagnostics::{Diagnostic, Severity}, - Workspace, - }; - use pg_fs::PgLspPath; - - #[test] - fn test_apply_change() { - let ide = Workspace::new(); - - ide.apply_change( - PgLspPath::new("test.sql"), - DocumentChange::new( - 1, - vec![Change { - range: None, - text: "select 1;".to_string(), - }], - ), - ); - } - - #[test] - fn test_diagnostics_within_statement() { - let ide = Workspace::new(); - - let url = PgLspPath::new("test.sql"); - - ide.apply_change( - url.clone(), - DocumentChange::new( - 1, - vec![Change { - range: None, - text: "select unknown from contact;\n\nselect 12345;\n\nalter table test drop column id;\n".to_string(), - }], - ), - ); - - ide.compute(None); - - assert_eq!(ide.diagnostics(&url).len(), 1); - - { - let doc = ide.documents.get(&PgLspPath::new("test.sql")).unwrap(); - assert_eq!(doc.statement_refs().len(), 3); - assert_eq!( - doc.statement_ref(0).text, - "select unknown from contact;".to_string() - ); - assert_eq!(doc.statement_ref(1).text, "select 12345;".to_string()); - assert_eq!( - doc.statement_ref(2).text, - "alter table test drop column id;".to_string() - ); - } - - ide.compute(None); - - assert_eq!(ide.diagnostics(&url).len(), 1); - - ide.apply_change( - PgLspPath::new("test.sql"), - DocumentChange::new( - 1, - vec![Change { - range: Some(TextRange::new(76.into(), 76.into())), - text: "a".to_string(), - }], - ), - ); - - { - let doc = ide.documents.get(&PgLspPath::new("test.sql")).unwrap(); - assert_eq!(doc.statement_refs().len(), 3); - assert_eq!( - doc.statement_ref(0).text, - "select unknown from contact;".to_string() - ); - assert_eq!(doc.statement_ref(1).text, "select 12345;".to_string()); - assert_eq!( - doc.statement_ref(2).text, - "alter table test drop column ida;".to_string() - ); - } - - // the problem is here! - ide.compute(None); - - assert_eq!(ide.diagnostics(&url).len(), 1); - } - - #[test] - fn test_apply_deletion_change() { - let ide = Workspace::new(); - - let url = PgLspPath::new("test.sql"); - - ide.apply_change( - url.clone(), - DocumentChange::new( - 1, - vec![Change { - range: None, - text: "select unknown from contact;\n\nselect 12345;\n\nalter table test drop column id;\n".to_string(), - }], - ), - ); - - ide.compute(None); - - assert_eq!(ide.diagnostics(&url).len(), 1); - - { - let doc = ide.documents.get(&PgLspPath::new("test.sql")).unwrap(); - assert_eq!(doc.statement_refs().len(), 3); - assert_eq!( - doc.statement_ref(0).text, - "select unknown from contact;".to_string() - ); - assert_eq!(doc.statement_ref(1).text, "select 12345;".to_string()); - assert_eq!( - doc.statement_ref(2).text, - "alter table test drop column id;".to_string() - ); - } - - ide.compute(None); - - assert_eq!(ide.diagnostics(&url).len(), 1); - - ide.apply_change( - PgLspPath::new("test.sql"), - DocumentChange::new( - 1, - vec![Change { - range: Some(TextRange::new(39.into(), 40.into())), - text: "".to_string(), - }], - ), - ); - - ide.compute(None); +pub enum WorkspaceRef<'app> { + Owned(Box), + Borrowed(&'app dyn Workspace), +} - assert_eq!(ide.diagnostics(&url).len(), 1); +impl<'app> Deref for WorkspaceRef<'app> { + type Target = dyn Workspace + 'app; - { - let doc = ide.documents.get(&PgLspPath::new("test.sql")).unwrap(); - assert_eq!(doc.statement_refs().len(), 3); - assert_eq!( - doc.statement_ref(0).text, - "select unknown from contact;".to_string() - ); - assert_eq!(doc.statement_ref(1).text, "select 1245;".to_string()); - assert_eq!( - doc.statement_ref(2).text, - "alter table test drop column id;".to_string() - ); + // False positive + #[allow(clippy::explicit_auto_deref)] + fn deref(&self) -> &Self::Target { + match self { + WorkspaceRef::Owned(inner) => &**inner, + WorkspaceRef::Borrowed(inner) => *inner, } - - ide.compute(None); - - assert_eq!(ide.diagnostics(&url).len(), 1); } +} - #[test] - fn test_lint() { - let ide = Workspace::new(); - let path = PgLspPath::new("test.sql"); +/// Clone of [std::borrow::Cow] specialized for storing a trait object and +/// holding a mutable reference in the `Borrowed` variant instead of requiring +/// the inner type to implement [std::borrow::ToOwned] +pub enum DynRef<'app, T: ?Sized + 'app> { + Owned(Box), + Borrowed(&'app mut T), +} - ide.apply_change( - path.clone(), - DocumentChange::new( - 1, - vec![Change { - range: None, - text: "select 1 from contact;\nselect 1;\nalter table test drop column id;" - .to_string(), - }], - ), - ); +impl<'app, T: ?Sized + 'app> Deref for DynRef<'app, T> { + type Target = T; - { - let doc = ide.documents.get(&path).unwrap(); - assert_eq!(doc.statement_ranges.len(), 3); - assert_eq!( - doc.statement_ref(0).text, - "select 1 from contact;".to_string() - ); - assert_eq!(doc.statement_ref(1).text, "select 1;".to_string()); - assert_eq!( - doc.statement_ref(2).text, - "alter table test drop column id;".to_string() - ); + fn deref(&self) -> &Self::Target { + match self { + DynRef::Owned(inner) => inner, + DynRef::Borrowed(inner) => inner, } - - ide.compute(None); - - let d = ide.diagnostics(&path); - - assert_eq!(d.len(), 1); - - assert_eq!( - d[0], - Diagnostic { - message: "Dropping a column may break existing clients.".to_string(), - description: None, - severity: Severity::Warning, - source: "lint".to_string(), - range: TextRange::new(TextSize::new(50), TextSize::new(64)), - } - ); } +} - #[test] - fn test_apply_change_with_error() { - let ide = Workspace::new(); - - let path = PgLspPath::new("test.sql"); - - ide.apply_change( - path.clone(), - DocumentChange::new( - 1, - vec![Change { - range: None, - text: "select 1;\nselect 2;".to_string(), - }], - ), - ); - - { - let doc = ide.documents.get(&path).unwrap(); - assert_eq!(doc.statement_ref(0).text, "select 1;".to_string()); - assert_eq!(doc.statement_ref(1).text, "select 2;".to_string()); - assert_eq!( - doc.statement_ranges[0], - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.statement_ranges[1], - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); - } - - ide.apply_change( - path.clone(), - DocumentChange::new( - 2, - vec![Change { - range: Some(TextRange::new(7.into(), 8.into())), - text: "".to_string(), - }], - ), - ); - - { - let doc = ide.documents.get(&path).unwrap(); - - assert_eq!(doc.text, "select ;\nselect 2;"); - assert_eq!(doc.statement_refs().len(), 2); - assert_eq!(doc.statement_ref(0).text, "select ;".to_string()); - assert_eq!(doc.statement_ref(1).text, "select 2;".to_string()); - assert_eq!( - doc.statement_ranges[0], - TextRange::new(TextSize::new(0), TextSize::new(8)) - ); - assert_eq!( - doc.statement_ranges[1], - TextRange::new(TextSize::new(9), TextSize::new(18)) - ); - } - - ide.apply_change( - path.clone(), - DocumentChange::new( - 3, - vec![Change { - range: Some(TextRange::new(7.into(), 7.into())), - text: "!".to_string(), - }], - ), - ); - - { - let doc = ide.documents.get(&path).unwrap(); - - assert_eq!(doc.text, "select !;\nselect 2;"); - assert_eq!(doc.statement_refs().len(), 2); - assert_eq!( - doc.statement_ranges[0], - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.statement_ranges[1], - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); - } - - assert_eq!(ide.diagnostics(&PgLspPath::new("test.sql")).len(), 1); - - ide.apply_change( - path.clone(), - DocumentChange::new( - 2, - vec![Change { - range: Some(TextRange::new(7.into(), 8.into())), - text: "".to_string(), - }], - ), - ); - - { - let doc = ide.documents.get(&path).unwrap(); - - assert_eq!(doc.text, "select ;\nselect 2;"); - assert_eq!(doc.statement_refs().len(), 2); - assert_eq!( - doc.statement_ranges[0], - TextRange::new(TextSize::new(0), TextSize::new(8)) - ); - assert_eq!( - doc.statement_ranges[1], - TextRange::new(TextSize::new(9), TextSize::new(18)) - ); - } - - ide.apply_change( - path.clone(), - DocumentChange::new( - 3, - vec![Change { - range: Some(TextRange::new(7.into(), 7.into())), - text: "1".to_string(), - }], - ), - ); - - { - let doc = ide.documents.get(&path).unwrap(); - - assert_eq!(doc.text, "select 1;\nselect 2;"); - assert_eq!(doc.statement_refs().len(), 2); - assert_eq!( - doc.statement_ranges[0], - TextRange::new(TextSize::new(0), TextSize::new(9)) - ); - assert_eq!( - doc.statement_ranges[1], - TextRange::new(TextSize::new(10), TextSize::new(19)) - ); +impl<'app, T: ?Sized + 'app> DerefMut for DynRef<'app, T> { + fn deref_mut(&mut self) -> &mut Self::Target { + match self { + DynRef::Owned(inner) => inner, + DynRef::Borrowed(inner) => inner, } - - assert_eq!(ide.diagnostics(&PgLspPath::new("test.sql")).len(), 0); } } diff --git a/crates/pg_workspace/src/lint.rs b/crates/pg_workspace/src/lint.rs deleted file mode 100644 index 00604cb8..00000000 --- a/crates/pg_workspace/src/lint.rs +++ /dev/null @@ -1,59 +0,0 @@ -use std::sync::Arc; - -use crate::{Diagnostic, Severity}; -use dashmap::DashMap; -use pg_base_db::StatementRef; -use text_size::TextRange; - -pub struct Linter { - violations: DashMap>>, -} - -impl Default for Linter { - fn default() -> Self { - Self::new() - } -} - -impl Linter { - pub fn new() -> Linter { - Linter { - violations: DashMap::new(), - } - } - - pub fn diagnostics(&self, statement: &StatementRef, at_range: TextRange) -> Vec { - let mut diagnostics = Vec::new(); - if let Some(v) = self.violations.get(statement) { - diagnostics.extend(v.iter().flat_map(|v| { - v.messages.iter().map(|m| Diagnostic { - description: None, - source: "lint".to_string(), - range: v.range.map(|r| r + at_range.start()).unwrap_or(at_range), - severity: match m { - pg_lint::ViolationMessage::Note(_) => Severity::Warning, - pg_lint::ViolationMessage::Help(_) => Severity::Hint, - }, - message: match m { - pg_lint::ViolationMessage::Note(n) => n.to_owned(), - pg_lint::ViolationMessage::Help(n) => n.to_owned(), - }, - }) - })); - } - diagnostics - } - - pub fn compute_statement_violations( - &self, - statement: &StatementRef, - params: pg_lint::LinterParams<'_>, - ) { - self.violations - .insert(statement.clone(), Arc::new(pg_lint::check_sql(params))); - } - - pub fn clear_statement_violations(&self, statement: &StatementRef) { - self.violations.remove(statement); - } -} diff --git a/crates/pg_workspace_new/src/matcher/LICENCE-APACHE b/crates/pg_workspace/src/matcher/LICENCE-APACHE similarity index 100% rename from crates/pg_workspace_new/src/matcher/LICENCE-APACHE rename to crates/pg_workspace/src/matcher/LICENCE-APACHE diff --git a/crates/pg_workspace_new/src/matcher/LICENSE-MIT b/crates/pg_workspace/src/matcher/LICENSE-MIT similarity index 100% rename from crates/pg_workspace_new/src/matcher/LICENSE-MIT rename to crates/pg_workspace/src/matcher/LICENSE-MIT diff --git a/crates/pg_workspace_new/src/matcher/mod.rs b/crates/pg_workspace/src/matcher/mod.rs similarity index 100% rename from crates/pg_workspace_new/src/matcher/mod.rs rename to crates/pg_workspace/src/matcher/mod.rs diff --git a/crates/pg_workspace_new/src/matcher/pattern.rs b/crates/pg_workspace/src/matcher/pattern.rs similarity index 99% rename from crates/pg_workspace_new/src/matcher/pattern.rs rename to crates/pg_workspace/src/matcher/pattern.rs index 35c6c8cb..eecd6962 100644 --- a/crates/pg_workspace_new/src/matcher/pattern.rs +++ b/crates/pg_workspace/src/matcher/pattern.rs @@ -1043,7 +1043,7 @@ mod test { #[test] fn test_pattern_glob_brackets_not_available_by_default() { - // RODO: Remove this test when we make brackets available by default in Biome 2.0 + // RODO: Remove this test when we make brackets available by default let pattern = Pattern::parse("{foo.js,bar.js}", false).unwrap(); assert!(!pattern.matches_path(Path::new("foo.js"))); assert!(!pattern.matches_path(Path::new("bar.js"))); diff --git a/crates/pg_workspace/src/pg_query.rs b/crates/pg_workspace/src/pg_query.rs deleted file mode 100644 index daa79e60..00000000 --- a/crates/pg_workspace/src/pg_query.rs +++ /dev/null @@ -1,91 +0,0 @@ -use std::sync::Arc; - -use crate::{Diagnostic, Severity}; -use dashmap::DashMap; -use pg_base_db::{ChangedStatement, StatementRef}; -use text_size::TextRange; - -pub struct PgQueryParser { - ast_db: DashMap>, - native_diagnostics: DashMap>, - enriched_ast_db: DashMap>, - cst_db: DashMap>, -} - -impl Default for PgQueryParser { - fn default() -> Self { - Self::new() - } -} - -impl PgQueryParser { - pub fn new() -> PgQueryParser { - PgQueryParser { - ast_db: DashMap::new(), - native_diagnostics: DashMap::new(), - enriched_ast_db: DashMap::new(), - cst_db: DashMap::new(), - } - } - - pub fn ast(&self, statement: &StatementRef) -> Option> { - self.ast_db.get(statement).map(|x| x.clone()) - } - - pub fn enriched_ast(&self, statement: &StatementRef) -> Option> { - self.enriched_ast_db.get(statement).map(|x| x.clone()) - } - - pub fn cst(&self, statement: &StatementRef) -> Option> { - self.cst_db.get(statement).map(|x| x.clone()) - } - - pub fn compute_cst(&self, statement: &StatementRef) { - if self.cst_db.contains_key(statement) { - return; - } - - if let Some(ast) = self.ast_db.get(statement) { - let r = pg_syntax::parse_syntax(&statement.text, &ast); - self.cst_db.insert(statement.clone(), Arc::new(r.cst)); - self.enriched_ast_db - .insert(statement.clone(), Arc::new(r.ast)); - } - } - - pub fn diagnostics(&self, statement: &StatementRef, at_range: TextRange) -> Vec { - let mut diagnostics = Vec::new(); - if let Some(err) = self.native_diagnostics.get(statement) { - diagnostics.push(Diagnostic { - description: None, - source: "pg_query".to_string(), - range: at_range, - severity: Severity::Error, - message: err.to_string(), - }); - } - diagnostics - } - - pub fn add_statement(&self, statement: &StatementRef) { - let r = pg_query_ext::parse(statement.text.as_str()); - if r.is_ok() { - self.ast_db.insert(statement.clone(), Arc::new(r.unwrap())); - } else { - self.native_diagnostics - .insert(statement.clone(), Arc::new(r.unwrap_err())); - } - } - - pub fn remove_statement(&self, statement: &StatementRef) { - self.ast_db.remove(statement); - self.native_diagnostics.remove(statement); - self.enriched_ast_db.remove(statement); - self.cst_db.remove(statement); - } - - pub fn modify_statement(&self, change: &ChangedStatement) { - self.remove_statement(&change.statement); - self.add_statement(&change.new_statement()); - } -} diff --git a/crates/pg_workspace_new/src/settings.rs b/crates/pg_workspace/src/settings.rs similarity index 100% rename from crates/pg_workspace_new/src/settings.rs rename to crates/pg_workspace/src/settings.rs diff --git a/crates/pg_workspace/src/tree_sitter.rs b/crates/pg_workspace/src/tree_sitter.rs deleted file mode 100644 index b6ba8fa8..00000000 --- a/crates/pg_workspace/src/tree_sitter.rs +++ /dev/null @@ -1,164 +0,0 @@ -use std::sync::{Arc, RwLock}; - -use dashmap::DashMap; -use pg_base_db::{ChangedStatement, StatementRef}; -use tree_sitter::InputEdit; - -pub struct TreeSitterParser { - db: DashMap>, - - parser: RwLock, -} - -impl Default for TreeSitterParser { - fn default() -> Self { - Self::new() - } -} - -impl TreeSitterParser { - pub fn new() -> TreeSitterParser { - let mut parser = tree_sitter::Parser::new(); - parser - .set_language(tree_sitter_sql::language()) - .expect("Error loading sql language"); - - TreeSitterParser { - db: DashMap::new(), - parser: RwLock::new(parser), - } - } - - pub fn tree(&self, statement: &StatementRef) -> Option> { - self.db.get(statement).map(|x| x.clone()) - } - - pub fn add_statement(&self, statement: &StatementRef) { - let mut guard = self.parser.write().expect("Error reading parser"); - // todo handle error - let tree = guard.parse(&statement.text, None).unwrap(); - drop(guard); - self.db.insert(statement.clone(), Arc::new(tree)); - } - - pub fn remove_statement(&self, statement: &StatementRef) { - self.db.remove(statement); - } - - pub fn modify_statement(&self, change: &ChangedStatement) { - let old = self.db.remove(&change.statement); - - if old.is_none() { - self.add_statement(&change.new_statement()); - return; - } - - // we clone the three for now, lets see if that is sufficient or if we need to mutate the - // original tree instead but that will require some kind of locking - let mut tree = old.unwrap().1.as_ref().clone(); - - let edit = edit_from_change( - change.statement.text.as_str(), - usize::from(change.range.start()), - usize::from(change.range.end()), - change.text.as_str(), - ); - - tree.edit(&edit); - - let new_stmt = change.new_statement(); - let new_text = new_stmt.text.clone(); - - let mut guard = self.parser.write().expect("Error reading parser"); - // todo handle error - self.db.insert( - new_stmt, - Arc::new(guard.parse(new_text, Some(&tree)).unwrap()), - ); - drop(guard); - } -} - -// i wont pretend to know whats going on here but it seems to work -pub fn edit_from_change( - text: &str, - start_char: usize, - end_char: usize, - replacement_text: &str, -) -> InputEdit { - let mut start_byte = 0; - let mut end_byte = 0; - let mut chars_counted = 0; - - let mut line = 0; - let mut current_line_char_start = 0; // Track start of the current line in characters - let mut column_start = 0; - let mut column_end = 0; - - for (idx, c) in text.char_indices() { - if chars_counted == start_char { - start_byte = idx; - column_start = chars_counted - current_line_char_start; - } - if chars_counted == end_char { - end_byte = idx; - // Calculate column_end based on replacement_text - let replacement_lines: Vec<&str> = replacement_text.split('\n').collect(); - if replacement_lines.len() > 1 { - // If replacement text spans multiple lines, adjust line and column_end accordingly - line += replacement_lines.len() - 1; - column_end = replacement_lines.last().unwrap().chars().count(); - } else { - // Single line replacement, adjust column_end based on replacement text length - column_end = column_start + replacement_text.chars().count(); - } - break; // Found both start and end - } - if c == '\n' { - line += 1; - current_line_char_start = chars_counted + 1; // Next character starts a new line - } - chars_counted += 1; - } - - // Adjust end_byte based on the byte length of the replacement text - if start_byte != end_byte { - // Ensure there's a range to replace - end_byte = start_byte + replacement_text.len(); - } else if chars_counted < text.chars().count() && end_char == chars_counted { - // For insertions at the end of text - end_byte += replacement_text.len(); - } - - let start_point = tree_sitter::Point::new(line, column_start); - let end_point = tree_sitter::Point::new(line, column_end); - - // Calculate the new end byte after the insertion - let new_end_byte = start_byte + replacement_text.len(); - - // Calculate the new end position - let new_lines = replacement_text.matches('\n').count(); // Count how many new lines are in the inserted text - let last_line_length = replacement_text - .lines() - .last() - .unwrap_or("") - .chars() - .count(); // Length of the last line in the insertion - - let new_end_position = if new_lines > 0 { - // If there are new lines, the row is offset by the number of new lines, and the column is the length of the last line - tree_sitter::Point::new(start_point.row + new_lines, last_line_length) - } else { - // If there are no new lines, the row remains the same, and the column is offset by the length of the insertion - tree_sitter::Point::new(start_point.row, start_point.column + last_line_length) - }; - - InputEdit { - start_byte, - old_end_byte: end_byte, - new_end_byte, - start_position: start_point, - old_end_position: end_point, - new_end_position, - } -} diff --git a/crates/pg_workspace/src/typecheck.rs b/crates/pg_workspace/src/typecheck.rs deleted file mode 100644 index 35741825..00000000 --- a/crates/pg_workspace/src/typecheck.rs +++ /dev/null @@ -1,63 +0,0 @@ -use std::sync::Arc; - -use crate::{Diagnostic, Severity}; -use dashmap::DashMap; -use pg_base_db::StatementRef; -use pg_typecheck::{check_sql, PgSeverity, TypeError, TypecheckerParams}; -use text_size::TextRange; - -pub struct Typechecker { - errors: DashMap>>, -} - -impl Default for Typechecker { - fn default() -> Self { - Self::new() - } -} - -impl Typechecker { - pub fn new() -> Typechecker { - Typechecker { - errors: DashMap::new(), - } - } - - pub fn clear_errors(&self) { - self.errors.clear(); - } - - pub fn diagnostics(&self, statement: &StatementRef, at_range: TextRange) -> Vec { - let mut diagnostics = Vec::new(); - if let Some(errs) = self.errors.get(statement) { - diagnostics.extend(errs.iter().map(|e| Diagnostic { - description: None, - source: "typecheck".to_string(), - range: e.range.map(|r| r + at_range.start()).unwrap_or(at_range), - severity: match e.severity { - PgSeverity::Error => Severity::Error, - PgSeverity::Fatal => Severity::Error, - PgSeverity::Panic => Severity::Error, - PgSeverity::Warning => Severity::Warning, - PgSeverity::Notice => Severity::Information, - PgSeverity::Debug => Severity::Information, - PgSeverity::Info => Severity::Information, - PgSeverity::Log => Severity::Information, - }, - message: e.message.to_owned(), - })); - } - diagnostics - } - - pub fn run_typecheck(&self, statement: &StatementRef, params: TypecheckerParams<'_>) { - self.errors.insert( - statement.clone(), - Arc::new(async_std::task::block_on(check_sql(params))), - ); - } - - pub fn clear_statement_errors(&self, statement: &StatementRef) { - self.errors.remove(statement); - } -} diff --git a/crates/pg_workspace_new/src/workspace.rs b/crates/pg_workspace/src/workspace.rs similarity index 100% rename from crates/pg_workspace_new/src/workspace.rs rename to crates/pg_workspace/src/workspace.rs diff --git a/crates/pg_workspace_new/src/workspace/client.rs b/crates/pg_workspace/src/workspace/client.rs similarity index 100% rename from crates/pg_workspace_new/src/workspace/client.rs rename to crates/pg_workspace/src/workspace/client.rs diff --git a/crates/pg_workspace_new/src/workspace/server.rs b/crates/pg_workspace/src/workspace/server.rs similarity index 99% rename from crates/pg_workspace_new/src/workspace/server.rs rename to crates/pg_workspace/src/workspace/server.rs index fd7f4416..0e58ec6e 100644 --- a/crates/pg_workspace_new/src/workspace/server.rs +++ b/crates/pg_workspace/src/workspace/server.rs @@ -151,7 +151,7 @@ impl WorkspaceServer { /// Check whether a file is ignored in the top-level config `files.ignore`/`files.include` fn is_ignored(&self, path: &Path) -> bool { let file_name = path.file_name().and_then(|s| s.to_str()); - // Never ignore Biome's config file regardless `include`/`ignore` + // Never ignore PGLSP's config file regardless `include`/`ignore` (file_name != Some(ConfigName::pglsp_toml())) && // Apply top-level `include`/`ignore (self.is_ignored_by_top_level_config(path)) @@ -170,7 +170,7 @@ impl WorkspaceServer { // `matched_path_or_any_parents` panics if `source` is not under the gitignore root. // This checks excludes absolute paths that are not a prefix of the base root. if !path.has_root() || path.starts_with(ignore.path()) { - // Because Biome passes a list of paths, + // Because PGLSP passes a list of paths, // we use `matched_path_or_any_parents` instead of `matched`. ignore .matched_path_or_any_parents(path, path.is_dir()) diff --git a/crates/pg_workspace_new/src/workspace/server/analyser.rs b/crates/pg_workspace/src/workspace/server/analyser.rs similarity index 100% rename from crates/pg_workspace_new/src/workspace/server/analyser.rs rename to crates/pg_workspace/src/workspace/server/analyser.rs diff --git a/crates/pg_workspace_new/src/workspace/server/change.rs b/crates/pg_workspace/src/workspace/server/change.rs similarity index 100% rename from crates/pg_workspace_new/src/workspace/server/change.rs rename to crates/pg_workspace/src/workspace/server/change.rs diff --git a/crates/pg_workspace_new/src/workspace/server/document.rs b/crates/pg_workspace/src/workspace/server/document.rs similarity index 100% rename from crates/pg_workspace_new/src/workspace/server/document.rs rename to crates/pg_workspace/src/workspace/server/document.rs diff --git a/crates/pg_workspace_new/src/workspace/server/pg_query.rs b/crates/pg_workspace/src/workspace/server/pg_query.rs similarity index 100% rename from crates/pg_workspace_new/src/workspace/server/pg_query.rs rename to crates/pg_workspace/src/workspace/server/pg_query.rs diff --git a/crates/pg_workspace_new/src/workspace/server/tree_sitter.rs b/crates/pg_workspace/src/workspace/server/tree_sitter.rs similarity index 100% rename from crates/pg_workspace_new/src/workspace/server/tree_sitter.rs rename to crates/pg_workspace/src/workspace/server/tree_sitter.rs diff --git a/crates/pg_workspace_new/Cargo.toml b/crates/pg_workspace_new/Cargo.toml deleted file mode 100644 index c48bb6e2..00000000 --- a/crates/pg_workspace_new/Cargo.toml +++ /dev/null @@ -1,45 +0,0 @@ -[package] -authors.workspace = true -categories.workspace = true -description = "" -edition.workspace = true -homepage.workspace = true -keywords.workspace = true -license.workspace = true -name = "pg_workspace_new" -repository.workspace = true -version = "0.0.0" - - -[dependencies] -biome_deserialize = "0.6.0" -dashmap = "5.5.3" -futures = "0.3.31" -ignore = { workspace = true } -pg_analyse = { workspace = true, features = ["serde"] } -pg_analyser = { workspace = true } -pg_completions = { workspace = true } -pg_configuration = { workspace = true } -pg_console = { workspace = true } -pg_diagnostics = { workspace = true } -pg_fs = { workspace = true, features = ["serde"] } -pg_query_ext = { workspace = true } -pg_schema_cache = { workspace = true } -pg_statement_splitter = { workspace = true } -rustc-hash = { workspace = true } -serde = { workspace = true, features = ["derive"] } -serde_json = { workspace = true, features = ["raw_value"] } -sqlx.workspace = true -text-size.workspace = true -tokio = { workspace = true, features = ["rt", "rt-multi-thread"] } -toml = { workspace = true } -tracing = { workspace = true, features = ["attributes", "log"] } -tree-sitter.workspace = true -tree_sitter_sql.workspace = true - -[dev-dependencies] - -[lib] -doctest = false - -[features] diff --git a/crates/pg_workspace_new/src/diagnostics.rs b/crates/pg_workspace_new/src/diagnostics.rs deleted file mode 100644 index 16efcea0..00000000 --- a/crates/pg_workspace_new/src/diagnostics.rs +++ /dev/null @@ -1,353 +0,0 @@ -use pg_configuration::ConfigurationDiagnostic; -use pg_console::fmt::Bytes; -use pg_console::markup; -use pg_diagnostics::{ - category, Advices, Category, Diagnostic, DiagnosticTags, LogCategory, Severity, Visit, -}; -use pg_fs::FileSystemDiagnostic; -use serde::{Deserialize, Serialize}; -use std::error::Error; -use std::fmt; -use std::fmt::{Debug, Display, Formatter}; -use std::process::{ExitCode, Termination}; -use tokio::task::JoinError; - -/// Generic errors thrown during operations -#[derive(Deserialize, Diagnostic, Serialize)] -pub enum WorkspaceError { - /// Error thrown when validating the configuration. Once deserialized, further checks have to be done. - Configuration(ConfigurationDiagnostic), - /// Error when trying to access the database - DatabaseConnectionError(DatabaseConnectionError), - /// Diagnostics emitted when querying the file system - FileSystem(FileSystemDiagnostic), - /// Thrown when we can't read a generic directory - CantReadDirectory(CantReadDirectory), - /// Thrown when we can't read a generic file - CantReadFile(CantReadFile), - /// The file does not exist in the [crate::Workspace] - NotFound(NotFound), - /// Error emitted by the underlying transport layer for a remote Workspace - TransportError(TransportError), - /// Emitted when the file is ignored and should not be processed - FileIgnored(FileIgnored), - /// Emitted when a file could not be parsed because it's larger than the size limit - FileTooLarge(FileTooLarge), - /// Diagnostic raised when a file is protected - ProtectedFile(ProtectedFile), - /// Raised when there's an issue around the VCS integration - Vcs(VcsDiagnostic), - /// Error in the async runtime - RuntimeError(RuntimeError), -} - -impl WorkspaceError { - pub fn cant_read_file(path: String) -> Self { - Self::CantReadFile(CantReadFile { path }) - } - - pub fn not_found() -> Self { - Self::NotFound(NotFound) - } - - pub fn protected_file(file_path: impl Into) -> Self { - Self::ProtectedFile(ProtectedFile { - file_path: file_path.into(), - verbose_advice: ProtectedFileAdvice, - }) - } - - pub fn vcs_disabled() -> Self { - Self::Vcs(VcsDiagnostic::DisabledVcs(DisabledVcs {})) - } - - pub fn runtime(msg: &str) -> Self { - Self::RuntimeError(RuntimeError { - message: msg.into(), - }) - } -} - -impl Error for WorkspaceError {} - -impl Debug for WorkspaceError { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - std::fmt::Display::fmt(self, f) - } -} - -impl Display for WorkspaceError { - fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { - Diagnostic::description(self, f) - } -} - -impl From for WorkspaceError { - fn from(err: TransportError) -> Self { - Self::TransportError(err) - } -} - -impl Termination for WorkspaceError { - fn report(self) -> ExitCode { - ExitCode::FAILURE - } -} - -impl From for WorkspaceError { - fn from(err: FileSystemDiagnostic) -> Self { - Self::FileSystem(err) - } -} - -impl From for WorkspaceError { - fn from(err: ConfigurationDiagnostic) -> Self { - Self::Configuration(err) - } -} - -#[derive(Debug, Serialize, Deserialize)] -/// Error emitted by the underlying transport layer for a remote Workspace -pub enum TransportError { - /// Error emitted by the transport layer if the connection was lost due to an I/O error - ChannelClosed, - /// Error emitted by the transport layer if a request timed out - Timeout, - /// Error caused by a serialization or deserialization issue - SerdeError(String), - /// Generic error type for RPC errors that can't be deserialized into RomeError - RPCError(String), -} - -impl Display for TransportError { - fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { - self.description(fmt) - } -} - -impl Diagnostic for TransportError { - fn category(&self) -> Option<&'static Category> { - Some(category!("internalError/io")) - } - - fn severity(&self) -> Severity { - Severity::Error - } - - fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { - match self { - TransportError::SerdeError(err) => write!(fmt, "serialization error: {err}"), - TransportError::ChannelClosed => fmt.write_str( - "a request to the remote workspace failed because the connection was interrupted", - ), - TransportError::Timeout => { - fmt.write_str("the request to the remote workspace timed out") - } - TransportError::RPCError(err) => fmt.write_str(err), - } - } - - fn message(&self, fmt: &mut pg_console::fmt::Formatter<'_>) -> std::io::Result<()> { - match self { - TransportError::SerdeError(err) => write!(fmt, "serialization error: {err}"), - TransportError::ChannelClosed => fmt.write_str( - "a request to the remote workspace failed because the connection was interrupted", - ), - TransportError::Timeout => { - fmt.write_str("the request to the remote workspace timed out") - } - TransportError::RPCError(err) => fmt.write_str(err), - } - } - fn tags(&self) -> DiagnosticTags { - DiagnosticTags::INTERNAL - } -} - -#[derive(Debug, Deserialize, Diagnostic, Serialize)] -pub enum VcsDiagnostic { - /// When the VCS folder couldn't be found - NoVcsFolderFound(NoVcsFolderFound), - /// VCS is disabled - DisabledVcs(DisabledVcs), -} - -#[derive(Debug, Diagnostic, Serialize, Deserialize)] -#[diagnostic( - category = "internalError/fs", - severity = Warning, - message = "Couldn't determine a directory for the VCS integration. VCS integration will be disabled." -)] -pub struct DisabledVcs {} - -#[derive(Debug, Diagnostic, Serialize, Deserialize)] -#[diagnostic( - category = "internalError/runtime", - severity = Error, - message = "An error occurred in the async runtime." -)] -pub struct RuntimeError { - message: String, -} - -impl From for WorkspaceError { - fn from(err: JoinError) -> Self { - Self::RuntimeError(RuntimeError { - message: err.to_string(), - }) - } -} - -#[derive(Debug, Diagnostic, Serialize, Deserialize)] -#[diagnostic( - category = "internalError/fs", - severity = Error, - message( - description = "Couldn't find the VCS folder at the following path: {path}", - message("Couldn't find the VCS folder at the following path: "{self.path}), - ) -)] -pub struct NoVcsFolderFound { - #[location(resource)] - pub path: String, -} - -impl From for WorkspaceError { - fn from(value: VcsDiagnostic) -> Self { - Self::Vcs(value) - } -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "database/connection", - message = "Database error: {message}" -)] -pub struct DatabaseConnectionError { - message: String, - code: Option, -} - -impl From for WorkspaceError { - fn from(err: sqlx::Error) -> Self { - let db_err = err.as_database_error(); - if let Some(db_err) = db_err { - Self::DatabaseConnectionError(DatabaseConnectionError { - message: db_err.message().to_string(), - code: db_err.code().map(|c| c.to_string()), - }) - } else { - Self::DatabaseConnectionError(DatabaseConnectionError { - message: err.to_string(), - code: None, - }) - } - } -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "internalError/fs", - message = "The file does not exist in the workspace.", - tags(INTERNAL) -)] -pub struct NotFound; - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "project", - severity = Information, - message( - message("The file "{self.file_path}" is protected because is handled by another tool. We won't process it."), - description = "The file {file_path} is protected because is handled by another tool. We won't process it.", - ), - tags(VERBOSE) -)] -pub struct ProtectedFile { - #[location(resource)] - pub file_path: String, - - #[verbose_advice] - pub verbose_advice: ProtectedFileAdvice, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct ProtectedFileAdvice; - -impl Advices for ProtectedFileAdvice { - fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { - visitor.record_log(LogCategory::Info, &markup! { "You can hide this diagnostic by using ""--diagnostic-level=warn"" to increase the diagnostic level shown by CLI." }) - } -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "internalError/fs", - message( - message("We couldn't read the following directory, maybe for permissions reasons or it doesn't exist: "{self.path}), - description = "We couldn't read the following directory, maybe for permissions reasons or it doesn't exist: {path}" - ) -)] -pub struct CantReadDirectory { - #[location(resource)] - path: String, -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "internalError/fs", - message( - message("We couldn't read the following file, maybe for permissions reasons or it doesn't exist: "{self.path}), - description = "We couldn't read the following file, maybe for permissions reasons or it doesn't exist: {path}" - ) -)] -pub struct CantReadFile { - #[location(resource)] - path: String, -} - -#[derive(Debug, Serialize, Deserialize, Diagnostic)] -#[diagnostic( - category = "internalError/fs", - message( - message("The file "{self.path}" was ignored."), - description = "The file {path} was ignored." - ), - severity = Warning, -)] -pub struct FileIgnored { - #[location(resource)] - path: String, -} - -#[derive(Debug, Serialize, Deserialize)] -pub struct FileTooLarge { - path: String, - size: usize, - limit: usize, -} - -impl Diagnostic for FileTooLarge { - fn category(&self) -> Option<&'static Category> { - Some(category!("internalError/fs")) - } - - fn message(&self, fmt: &mut pg_console::fmt::Formatter<'_>) -> std::io::Result<()> { - fmt.write_markup( - markup!{ - "Size of "{self.path}" is "{Bytes(self.size)}" which exceeds configured maximum of "{Bytes(self.limit)}" for this project. - The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't. - Use the `files.maxSize` configuration to change the maximum size of files processed." - } - ) - } - - fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { - write!(fmt, - "Size of {} is {} which exceeds configured maximum of {} for this project.\n\ - The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't.\n\ - Use the `files.maxSize` configuration to change the maximum size of files processed.", - self.path, Bytes(self.size), Bytes(self.limit) - ) - } -} diff --git a/crates/pg_workspace_new/src/lib.rs b/crates/pg_workspace_new/src/lib.rs deleted file mode 100644 index 9467d1fb..00000000 --- a/crates/pg_workspace_new/src/lib.rs +++ /dev/null @@ -1,98 +0,0 @@ -use std::ops::{Deref, DerefMut}; - -use pg_console::Console; -use pg_fs::{FileSystem, OsFileSystem}; - -pub mod configuration; -pub mod diagnostics; -pub mod dome; -pub mod matcher; -pub mod settings; -pub mod workspace; - -pub use crate::diagnostics::{TransportError, WorkspaceError}; -pub use crate::workspace::Workspace; - -/// This is the main entrypoint of the application. -pub struct App<'app> { - /// A reference to the internal virtual file system - pub fs: DynRef<'app, dyn FileSystem>, - /// A reference to the internal workspace - pub workspace: WorkspaceRef<'app>, - /// A reference to the internal console, where its buffer will be used to write messages and - /// errors - pub console: &'app mut dyn Console, -} - -impl<'app> App<'app> { - pub fn with_console(console: &'app mut dyn Console) -> Self { - Self::with_filesystem_and_console(DynRef::Owned(Box::::default()), console) - } - - /// Create a new instance of the app using the specified [FileSystem] and [Console] implementation - pub fn with_filesystem_and_console( - fs: DynRef<'app, dyn FileSystem>, - console: &'app mut dyn Console, - ) -> Self { - Self::new(fs, console, WorkspaceRef::Owned(workspace::server())) - } - - /// Create a new instance of the app using the specified [FileSystem], [Console] and [Workspace] implementation - pub fn new( - fs: DynRef<'app, dyn FileSystem>, - console: &'app mut dyn Console, - workspace: WorkspaceRef<'app>, - ) -> Self { - Self { - fs, - console, - workspace, - } - } -} - -pub enum WorkspaceRef<'app> { - Owned(Box), - Borrowed(&'app dyn Workspace), -} - -impl<'app> Deref for WorkspaceRef<'app> { - type Target = dyn Workspace + 'app; - - // False positive - #[allow(clippy::explicit_auto_deref)] - fn deref(&self) -> &Self::Target { - match self { - WorkspaceRef::Owned(inner) => &**inner, - WorkspaceRef::Borrowed(inner) => *inner, - } - } -} - -/// Clone of [std::borrow::Cow] specialized for storing a trait object and -/// holding a mutable reference in the `Borrowed` variant instead of requiring -/// the inner type to implement [std::borrow::ToOwned] -pub enum DynRef<'app, T: ?Sized + 'app> { - Owned(Box), - Borrowed(&'app mut T), -} - -impl<'app, T: ?Sized + 'app> Deref for DynRef<'app, T> { - type Target = T; - - fn deref(&self) -> &Self::Target { - match self { - DynRef::Owned(inner) => inner, - DynRef::Borrowed(inner) => inner, - } - } -} - -impl<'app, T: ?Sized + 'app> DerefMut for DynRef<'app, T> { - fn deref_mut(&mut self) -> &mut Self::Target { - match self { - DynRef::Owned(inner) => inner, - DynRef::Borrowed(inner) => inner, - } - } -} diff --git a/xtask/codegen/src/generate_configuration.rs b/xtask/codegen/src/generate_configuration.rs index ba54a326..d7587899 100644 --- a/xtask/codegen/src/generate_configuration.rs +++ b/xtask/codegen/src/generate_configuration.rs @@ -299,7 +299,7 @@ fn generate_for_groups( #[cfg_attr(feature = "schema", derive(JsonSchema))] #[serde(rename_all = "camelCase", deny_unknown_fields)] pub struct Rules { - /// It enables the lint rules recommended by Biome. `true` by default. + /// It enables the lint rules recommended by PGLSP. `true` by default. #[serde(skip_serializing_if = "Option::is_none")] pub recommended: Option, diff --git a/xtask/rules_check/Cargo.toml b/xtask/rules_check/Cargo.toml index f436855f..812d3cbf 100644 --- a/xtask/rules_check/Cargo.toml +++ b/xtask/rules_check/Cargo.toml @@ -13,5 +13,5 @@ pg_console = { workspace = true } pg_diagnostics = { workspace = true } pg_query_ext = { workspace = true } pg_statement_splitter = { workspace = true } -pg_workspace_new = { workspace = true } +pg_workspace = { workspace = true } pulldown-cmark = "0.12.2" diff --git a/xtask/rules_check/src/lib.rs b/xtask/rules_check/src/lib.rs index ed589252..40791612 100644 --- a/xtask/rules_check/src/lib.rs +++ b/xtask/rules_check/src/lib.rs @@ -11,7 +11,7 @@ use pg_analyser::{Analyser, AnalyserConfig}; use pg_console::{markup, Console}; use pg_diagnostics::{Diagnostic, DiagnosticExt, PrintDiagnostic}; use pg_query_ext::diagnostics::SyntaxDiagnostic; -use pg_workspace_new::settings::Settings; +use pg_workspace::settings::Settings; use pulldown_cmark::{CodeBlockKind, Event, Parser, Tag, TagEnd}; pub fn check_rules() -> anyhow::Result<()> {