diff --git a/crates/mun_codegen/Cargo.toml b/crates/mun_codegen/Cargo.toml index 725dc65a3..f4f7273e3 100644 --- a/crates/mun_codegen/Cargo.toml +++ b/crates/mun_codegen/Cargo.toml @@ -37,6 +37,7 @@ insta = "0.16" libloader = { path = "../mun_libloader", package = "mun_libloader" } mun_test = { path = "../mun_test" } runtime = { path = "../mun_runtime", package = "mun_runtime" } +paths = {path="../mun_paths", package="mun_paths"} [build-dependencies] semver = "0.9.0" diff --git a/crates/mun_codegen/src/code_gen/module_builder.rs b/crates/mun_codegen/src/code_gen/module_builder.rs index bd54dde52..b85011a58 100644 --- a/crates/mun_codegen/src/code_gen/module_builder.rs +++ b/crates/mun_codegen/src/code_gen/module_builder.rs @@ -23,8 +23,8 @@ impl<'db, 'ink, 'ctx> ModuleBuilder<'db, 'ink, 'ctx> { let file_id = module .file_id(code_gen.db) .expect("module must have a file"); - let assembly_name = code_gen.db.file_relative_path(file_id); - let assembly_module = code_gen.create_module(assembly_name); + let assembly_module = + code_gen.create_module(code_gen.db.file_relative_path(file_id).as_str()); Ok(Self { code_gen, diff --git a/crates/mun_codegen/src/mock.rs b/crates/mun_codegen/src/mock.rs index 33d1b90f2..ac018ad40 100644 --- a/crates/mun_codegen/src/mock.rs +++ b/crates/mun_codegen/src/mock.rs @@ -2,9 +2,10 @@ use crate::{ db::{CodeGenDatabase, CodeGenDatabaseStorage}, OptimizationLevel, }; -use hir::{FileId, HirDatabase, RelativePathBuf, SourceDatabase, SourceRoot, SourceRootId}; +use hir::{FileId, HirDatabase, SourceDatabase, SourceRoot, SourceRootId}; use mun_target::spec::Target; use parking_lot::Mutex; +use paths::RelativePathBuf; use std::sync::Arc; /// A mock implementation of the IR database. It can be used to set up a simple test case. @@ -83,15 +84,17 @@ impl MockDatabase { let text = Arc::from(text.to_owned()); let rel_path = RelativePathBuf::from("main.mun"); let file_id = FileId(0); - db.set_file_relative_path(file_id, rel_path.clone()); db.set_file_text(file_id, text); db.set_file_source_root(file_id, source_root_id); - source_root.insert_file(file_id); + source_root.insert_file(file_id, rel_path); db.set_source_root(source_root_id, Arc::new(source_root)); - db.set_optimization_level(OptimizationLevel::None); - db.set_package_source_root(hir::PackageId(0), source_root_id); + let mut packages = hir::PackageSet::default(); + packages.add_package(source_root_id); + db.set_packages(Arc::new(packages)); + + db.set_optimization_level(OptimizationLevel::None); (db, file_id) } diff --git a/crates/mun_compiler/Cargo.toml b/crates/mun_compiler/Cargo.toml index 5521ad900..12a287716 100644 --- a/crates/mun_compiler/Cargo.toml +++ b/crates/mun_compiler/Cargo.toml @@ -16,7 +16,8 @@ categories = ["game-development", "mun"] anyhow = "1.0.31" mun_codegen = { version = "=0.2.0", path="../mun_codegen" } mun_syntax = { version = "=0.2.0", path="../mun_syntax" } -hir = { version = "=0.2.0", path = "../mun_hir", package = "mun_hir" } +hir = { version="=0.2.0", path="../mun_hir", package="mun_hir" } +paths = {path="../mun_paths", package="mun_paths"} mun_target = { version = "=0.2.0", path="../mun_target" } mun_project = { version = "=0.1.0", path = "../mun_project" } mun_diagnostics = { version = "=0.1.0", path = "../mun_diagnostics" } diff --git a/crates/mun_compiler/src/diagnostics_snippets.rs b/crates/mun_compiler/src/diagnostics_snippets.rs index 9565ccb78..574e4567d 100644 --- a/crates/mun_compiler/src/diagnostics_snippets.rs +++ b/crates/mun_compiler/src/diagnostics_snippets.rs @@ -1,13 +1,13 @@ -use hir::{line_index::LineIndex, FileId, HirDatabase, RelativePathBuf}; +use annotate_snippets::{ + display_list::DisplayList, + display_list::FormatOptions, + snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation}, +}; +use hir::{line_index::LineIndex, FileId, HirDatabase}; use mun_diagnostics::DiagnosticForWith; use mun_syntax::SyntaxError; - -use std::sync::Arc; - -use annotate_snippets::display_list::DisplayList; -use annotate_snippets::display_list::FormatOptions; -use annotate_snippets::snippet::{Annotation, AnnotationType, Slice, Snippet, SourceAnnotation}; -use std::collections::HashMap; +use paths::RelativePathBuf; +use std::{collections::HashMap, sync::Arc}; /// Writes the specified syntax error to the output stream. pub(crate) fn emit_syntax_error( @@ -93,7 +93,7 @@ fn emit_diagnostic( // Add primary annotations annotations.push(AnnotationFile { - relative_file_path: db.file_relative_path(file_id), + relative_file_path: db.file_relative_path(file_id).to_relative_path_buf(), source_code: db.file_text(file_id), line_index: db.line_index(file_id), annotations: vec![match diagnostic.primary_annotation() { diff --git a/crates/mun_compiler/src/driver.rs b/crates/mun_compiler/src/driver.rs index 3df128e2a..6831dc297 100644 --- a/crates/mun_compiler/src/driver.rs +++ b/crates/mun_compiler/src/driver.rs @@ -6,10 +6,11 @@ use crate::{ PathOrInline, RelativePath, }; use hir::{ - AstDatabase, DiagnosticSink, FileId, Module, PackageId, RelativePathBuf, SourceDatabase, - SourceRoot, SourceRootId, Upcast, + AstDatabase, DiagnosticSink, FileId, Module, PackageSet, SourceDatabase, SourceRoot, + SourceRootId, Upcast, }; use mun_codegen::{AssemblyIR, CodeGenDatabase, TargetAssembly}; +use paths::RelativePathBuf; use std::{path::PathBuf, sync::Arc}; @@ -27,7 +28,6 @@ use std::path::Path; use walkdir::WalkDir; pub const WORKSPACE: SourceRootId = SourceRootId(0); -pub const PACKAGE: PackageId = PackageId(0); pub struct Driver { db: CompilerDatabase, @@ -91,14 +91,16 @@ impl Driver { // Store the file information in the database together with the source root let file_id = FileId(driver.next_file_id as u32); driver.next_file_id += 1; - driver.db.set_file_relative_path(file_id, rel_path); driver.db.set_file_text(file_id, Arc::from(text)); driver.db.set_file_source_root(file_id, WORKSPACE); - driver.source_root.insert_file(file_id); + driver.source_root.insert_file(file_id, rel_path); driver .db .set_source_root(WORKSPACE, Arc::new(driver.source_root.clone())); - driver.db.set_package_source_root(PACKAGE, WORKSPACE); + + let mut package_set = PackageSet::default(); + package_set.add_package(WORKSPACE); + driver.db.set_packages(Arc::new(package_set)); Ok((driver, file_id)) } @@ -120,9 +122,10 @@ impl Driver { // Iterate over all files in the source directory of the package and store their information in // the database - let source_directory = package - .source_directory() - .ok_or_else(|| anyhow::anyhow!("the source directory does not exist"))?; + let source_directory = package.source_directory(); + if !source_directory.is_dir() { + anyhow::bail!("the source directory does not exist") + } for source_file_path in iter_source_files(&source_directory) { let relative_path = compute_source_relative_path(&source_directory, &source_file_path)?; @@ -137,19 +140,21 @@ impl Driver { })?; let file_id = driver.alloc_file_id(&relative_path)?; - driver - .db - .set_file_relative_path(file_id, relative_path.clone()); driver.db.set_file_text(file_id, Arc::from(file_contents)); driver.db.set_file_source_root(file_id, WORKSPACE); - driver.source_root.insert_file(file_id); + driver + .source_root + .insert_file(file_id, relative_path.clone()); } // Store the source root in the database driver .db .set_source_root(WORKSPACE, Arc::new(driver.source_root.clone())); - driver.db.set_package_source_root(PACKAGE, WORKSPACE); + + let mut package_set = PackageSet::default(); + package_set.add_package(WORKSPACE); + driver.db.set_packages(Arc::new(package_set)); Ok((package, driver)) } @@ -378,13 +383,12 @@ impl Driver { let file_id = self.alloc_file_id(path.as_ref()).unwrap(); // Insert the new file - self.db - .set_file_relative_path(file_id, path.as_ref().to_relative_path_buf()); self.db.set_file_text(file_id, Arc::from(contents)); self.db.set_file_source_root(file_id, WORKSPACE); // Update the source root - self.source_root.insert_file(file_id); + self.source_root + .insert_file(file_id, path.as_ref().to_relative_path_buf()); self.db .set_source_root(WORKSPACE, Arc::new(self.source_root.clone())); @@ -425,8 +429,11 @@ impl Driver { .insert(file_id, to.as_ref().to_relative_path_buf()); self.path_to_file_id.remove(from.as_ref()); // FileId now belongs to to + self.source_root.remove_file(file_id); + self.source_root + .insert_file(file_id, to.as_ref().to_relative_path_buf()); self.db - .set_file_relative_path(file_id, to.as_ref().to_relative_path_buf()); + .set_source_root(WORKSPACE, Arc::new(self.source_root.clone())); file_id } diff --git a/crates/mun_compiler/src/lib.rs b/crates/mun_compiler/src/lib.rs index d763d780d..89964e4ac 100644 --- a/crates/mun_compiler/src/lib.rs +++ b/crates/mun_compiler/src/lib.rs @@ -6,8 +6,9 @@ pub mod diagnostics; mod diagnostics_snippets; mod driver; -pub use hir::{FileId, RelativePath, RelativePathBuf}; +pub use hir::FileId; pub use mun_target::spec::Target; +pub use paths::{RelativePath, RelativePathBuf}; use std::path::{Path, PathBuf}; pub use crate::driver::DisplayColor; diff --git a/crates/mun_compiler_daemon/src/lib.rs b/crates/mun_compiler_daemon/src/lib.rs index 6b9f4ae5e..fb44b952f 100644 --- a/crates/mun_compiler_daemon/src/lib.rs +++ b/crates/mun_compiler_daemon/src/lib.rs @@ -19,9 +19,8 @@ pub fn compile_and_watch_manifest( // Start watching the source directory let (watcher_tx, watcher_rx) = channel(); let mut watcher: RecommendedWatcher = Watcher::new(watcher_tx, Duration::from_millis(10))?; - let source_directory = package - .source_directory() - .expect("missing source directory"); + let source_directory = package.source_directory(); + watcher.watch(&source_directory, RecursiveMode::Recursive)?; println!("Watching: {}", source_directory.display()); diff --git a/crates/mun_hir/Cargo.toml b/crates/mun_hir/Cargo.toml index eaa02e542..2d7343555 100644 --- a/crates/mun_hir/Cargo.toml +++ b/crates/mun_hir/Cargo.toml @@ -17,9 +17,9 @@ salsa = "0.15.0" superslice = "1.0" mun_syntax = { version = "=0.2.0", path = "../mun_syntax" } mun_target = { version = "=0.2.0", path = "../mun_target" } +paths = {path="../mun_paths", package="mun_paths"} rustc-hash = "1.1" once_cell = "1.4.0" -relative-path = "1.2" ena = "0.14" drop_bomb = "0.1.4" either = "1.5.3" diff --git a/crates/mun_hir/src/code_model/package.rs b/crates/mun_hir/src/code_model/package.rs index 5959667f6..7b2dd15ce 100644 --- a/crates/mun_hir/src/code_model/package.rs +++ b/crates/mun_hir/src/code_model/package.rs @@ -1,6 +1,5 @@ use super::Module; -use crate::ids::PackageId; -use crate::{HirDatabase, ModuleId}; +use crate::{HirDatabase, ModuleId, PackageId}; /// A `Package` describes a single package. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] @@ -10,10 +9,8 @@ pub struct Package { impl Package { /// Returns all the packages defined in the database - pub fn all(_db: &dyn HirDatabase) -> Vec { - // TODO: Currently we assume there is only a single package with ID 0. This has to be - // implemented when have multiple packages. See CrateGraph in rust-analyzer. - vec![Package { id: PackageId(0) }] + pub fn all(db: &dyn HirDatabase) -> Vec { + db.packages().iter().map(|id| Package { id }).collect() } /// Returns the root module of the package (represented by the `mod.rs` in the source root) diff --git a/crates/mun_hir/src/db.rs b/crates/mun_hir/src/db.rs index c80c8eeaa..1129bf56c 100644 --- a/crates/mun_hir/src/db.rs +++ b/crates/mun_hir/src/db.rs @@ -1,7 +1,7 @@ #![allow(clippy::type_repetition_in_bounds)] use crate::expr::BodySourceMap; -use crate::ids::{DefWithBodyId, FunctionId, PackageId}; +use crate::ids::{DefWithBodyId, FunctionId}; use crate::input::{SourceRoot, SourceRootId}; use crate::item_tree::{self, ItemTree}; use crate::module_tree::ModuleTree; @@ -14,12 +14,12 @@ use crate::{ ids, line_index::LineIndex, ty::InferenceResult, - AstIdMap, Body, ExprScopes, FileId, Struct, TypeAlias, + AstIdMap, Body, ExprScopes, FileId, PackageId, PackageSet, Struct, TypeAlias, }; use mun_syntax::{ast, Parse, SourceFile}; use mun_target::abi; use mun_target::spec::Target; -pub use relative_path::RelativePathBuf; +use paths::RelativePathBuf; use std::sync::Arc; // TODO(bas): In the future maybe move this to a seperate crate (mun_db?) @@ -34,22 +34,17 @@ pub trait SourceDatabase: salsa::Database { #[salsa::input] fn file_text(&self, file_id: FileId) -> Arc; - /// Path to a file, relative to the root of its source root. - #[salsa::input] - fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf; - /// Source root of a file #[salsa::input] fn file_source_root(&self, file_id: FileId) -> SourceRootId; + /// Returns the relative path of a file + fn file_relative_path(&self, file_id: FileId) -> RelativePathBuf; + /// Contents of the source root #[salsa::input] fn source_root(&self, id: SourceRootId) -> Arc; - /// The source root for a specific package - #[salsa::input] - fn package_source_root(&self, package: PackageId) -> SourceRootId; - /// For a package, returns its hierarchy of modules. #[salsa::invoke(ModuleTree::module_tree_query)] fn module_tree(&self, package: PackageId) -> Arc; @@ -57,6 +52,10 @@ pub trait SourceDatabase: salsa::Database { /// Returns the line index of a file #[salsa::invoke(line_index_query)] fn line_index(&self, file_id: FileId) -> Arc; + + /// Returns the set of packages + #[salsa::input] + fn packages(&self) -> Arc; } /// The `AstDatabase` provides queries that transform text from the `SourceDatabase` into an @@ -158,3 +157,9 @@ fn target_data_layout(db: &dyn HirDatabase) -> Arc { .expect("unable to create TargetDataLayout from target"); Arc::new(data_layout) } + +fn file_relative_path(db: &dyn SourceDatabase, file_id: FileId) -> RelativePathBuf { + let source_root_id = db.file_source_root(file_id); + let source_root = db.source_root(source_root_id); + source_root.relative_path(file_id).to_relative_path_buf() +} diff --git a/crates/mun_hir/src/fixture.rs b/crates/mun_hir/src/fixture.rs index 2d4f3f269..da6a4db6d 100644 --- a/crates/mun_hir/src/fixture.rs +++ b/crates/mun_hir/src/fixture.rs @@ -1,10 +1,8 @@ #![cfg(test)] -use crate::ids::PackageId; -use crate::{FileId, SourceDatabase, SourceRoot, SourceRootId}; +use crate::{FileId, PackageSet, SourceDatabase, SourceRoot, SourceRootId}; pub use mun_test::Fixture; -use std::convert::TryInto; -use std::sync::Arc; +use std::{convert::TryInto, sync::Arc}; impl WithFixture for DB {} @@ -32,20 +30,21 @@ fn with_files(db: &mut dyn SourceDatabase, fixture: &str) -> Vec { let mut source_root = SourceRoot::default(); let source_root_id = SourceRootId(0); - let package_id = PackageId(0); let mut files = Vec::new(); for (idx, entry) in fixture.into_iter().enumerate() { let file_id = FileId(idx.try_into().expect("too many files")); - db.set_file_relative_path(file_id, entry.relative_path); db.set_file_text(file_id, Arc::from(entry.text)); db.set_file_source_root(file_id, source_root_id); - source_root.insert_file(file_id); + source_root.insert_file(file_id, entry.relative_path); files.push(file_id); } db.set_source_root(source_root_id, Arc::new(source_root)); - db.set_package_source_root(package_id, source_root_id); + + let mut packages = PackageSet::default(); + packages.add_package(source_root_id); + db.set_packages(Arc::new(packages)); return files; } diff --git a/crates/mun_hir/src/ids.rs b/crates/mun_hir/src/ids.rs index e64c2c0c8..d8316d0df 100644 --- a/crates/mun_hir/src/ids.rs +++ b/crates/mun_hir/src/ids.rs @@ -1,7 +1,9 @@ -use crate::item_tree::{Function, ItemTreeId, ItemTreeNode, Struct, TypeAlias}; -use crate::module_tree::LocalModuleId; -use crate::primitive_type::PrimitiveType; -use crate::DefDatabase; +use crate::{ + item_tree::{Function, ItemTreeId, ItemTreeNode, Struct, TypeAlias}, + module_tree::LocalModuleId, + primitive_type::PrimitiveType, + DefDatabase, PackageId, +}; use std::hash::{Hash, Hasher}; #[derive(Debug)] @@ -94,11 +96,6 @@ macro_rules! impl_intern { }; } -/// Represents the id of a single package, all packages have a unique id, the main package and all -/// dependent packages. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub struct PackageId(pub u32); - /// Represents an id of a module inside a package. #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] pub struct ModuleId { diff --git a/crates/mun_hir/src/input.rs b/crates/mun_hir/src/input.rs index 2e422d9f9..128ee3d99 100644 --- a/crates/mun_hir/src/input.rs +++ b/crates/mun_hir/src/input.rs @@ -1,4 +1,5 @@ -use std::collections::HashSet; +use paths::{RelativePath, RelativePathBuf}; +use rustc_hash::FxHashMap; /// `FileId` is an integer which uniquely identifies a file. File paths are messy and /// system-dependent, so most of the code should work directly with `FileId`, without inspecting the @@ -20,20 +21,21 @@ pub struct SourceRootId(pub u32); #[derive(Default, Clone, Debug, PartialEq, Eq)] pub struct SourceRoot { - files: HashSet, + files: FxHashMap, } impl SourceRoot { - pub fn new() -> SourceRoot { - Default::default() + pub fn insert_file(&mut self, file_id: FileId, path: impl AsRef) { + self.files + .insert(file_id, path.as_ref().to_relative_path_buf()); } - pub fn insert_file(&mut self, file_id: FileId) { - self.files.insert(file_id); + pub fn remove_file(&mut self, file_id: FileId) -> bool { + self.files.remove(&file_id).is_some() } - pub fn remove_file(&mut self, file_id: FileId) { - self.files.remove(&file_id); + pub fn relative_path(&self, file_id: FileId) -> &RelativePath { + &self.files[&file_id] } pub fn files(&self) -> impl Iterator + '_ { - self.files.iter().copied() + self.files.keys().copied() } } diff --git a/crates/mun_hir/src/lib.rs b/crates/mun_hir/src/lib.rs index 27669309a..b440c3fb0 100644 --- a/crates/mun_hir/src/lib.rs +++ b/crates/mun_hir/src/lib.rs @@ -35,14 +35,13 @@ mod item_scope; #[cfg(test)] mod mock; mod package_defs; +mod package_set; #[cfg(test)] mod tests; mod visibility; pub use salsa; -pub use relative_path::{RelativePath, RelativePathBuf}; - pub use crate::{ db::{ AstDatabase, AstDatabaseStorage, DefDatabase, DefDatabaseStorage, HirDatabase, @@ -55,11 +54,12 @@ pub use crate::{ ArithOp, BinaryOp, Body, CmpOp, Expr, ExprId, ExprScopes, Literal, LogicOp, Ordering, Pat, PatId, RecordLitField, Statement, UnaryOp, }, - ids::{ItemLoc, ModuleId, PackageId}, + ids::{ItemLoc, ModuleId}, in_file::InFile, input::{FileId, SourceRoot, SourceRootId}, name::Name, name_resolution::PerNs, + package_set::{PackageId, PackageSet}, path::{Path, PathKind}, primitive_type::{FloatBitness, IntBitness, Signedness}, resolve::{resolver_for_expr, resolver_for_scope, Resolver, TypeNs, ValueNs}, diff --git a/crates/mun_hir/src/module_tree.rs b/crates/mun_hir/src/module_tree.rs index f8e1be50c..a18bec0f4 100644 --- a/crates/mun_hir/src/module_tree.rs +++ b/crates/mun_hir/src/module_tree.rs @@ -1,12 +1,12 @@ use crate::{ arena::{Arena, Idx}, - ids::{ModuleId, PackageId}, + ids::ModuleId, module_tree::diagnostics::ModuleTreeDiagnostic, visibility::RawVisibility, - DefDatabase, FileId, Name, SourceDatabase, Visibility, + DefDatabase, FileId, Name, PackageId, SourceDatabase, Visibility, }; use itertools::Itertools; -use relative_path::RelativePath; +use paths::RelativePath; use rustc_hash::FxHashMap; use std::sync::Arc; @@ -50,7 +50,7 @@ impl ModuleTree { let mut diagnostics = Vec::new(); // Get the sources for the package - let source_root_id = db.package_source_root(package); + let source_root_id = db.packages().as_ref()[package].source_root; let source_root = db.source_root(source_root_id); let mut modules = Arena::default(); @@ -59,7 +59,7 @@ impl ModuleTree { // Iterate over all files and add them to the module tree for (file_id, relative_path) in source_root .files() - .map(|file_id| (file_id, db.file_relative_path(file_id))) + .map(|file_id| (file_id, source_root.relative_path(file_id))) .sorted_by(|(_, a), (_, b)| a.cmp(b)) { // Iterate over all segments of the relative path and construct modules on the way diff --git a/crates/mun_hir/src/name_resolution/path_resolution.rs b/crates/mun_hir/src/name_resolution/path_resolution.rs index da60ee4df..5c9932351 100644 --- a/crates/mun_hir/src/name_resolution/path_resolution.rs +++ b/crates/mun_hir/src/name_resolution/path_resolution.rs @@ -1,9 +1,11 @@ -use crate::ids::{ItemDefinitionId, ModuleId, PackageId}; -use crate::item_scope::BUILTIN_SCOPE; -use crate::module_tree::LocalModuleId; -use crate::package_defs::PackageDefs; -use crate::{DefDatabase, Name, Path, PathKind, PerNs, Visibility}; -use itertools::__std_iter::successors; +use crate::{ + ids::{ItemDefinitionId, ModuleId}, + item_scope::BUILTIN_SCOPE, + module_tree::LocalModuleId, + package_defs::PackageDefs, + DefDatabase, Name, PackageId, Path, PathKind, PerNs, Visibility, +}; +use std::iter::successors; #[derive(Debug, Clone, Copy, PartialEq, Eq)] pub(super) enum ReachedFixedPoint { diff --git a/crates/mun_hir/src/package_defs.rs b/crates/mun_hir/src/package_defs.rs index 7aafb6e84..c45effca4 100644 --- a/crates/mun_hir/src/package_defs.rs +++ b/crates/mun_hir/src/package_defs.rs @@ -1,8 +1,8 @@ mod collector; use crate::{ - arena::map::ArenaMap, ids::PackageId, item_scope::ItemScope, module_tree::LocalModuleId, - module_tree::ModuleTree, DefDatabase, + arena::map::ArenaMap, item_scope::ItemScope, module_tree::LocalModuleId, + module_tree::ModuleTree, DefDatabase, PackageId, }; use std::{ops::Index, sync::Arc}; diff --git a/crates/mun_hir/src/package_defs/collector.rs b/crates/mun_hir/src/package_defs/collector.rs index e16a81ffc..dad03ad4a 100644 --- a/crates/mun_hir/src/package_defs/collector.rs +++ b/crates/mun_hir/src/package_defs/collector.rs @@ -1,14 +1,14 @@ use super::PackageDefs; use crate::{ arena::map::ArenaMap, - ids::{FunctionLoc, Intern, ItemDefinitionId, ModuleId, PackageId, StructLoc, TypeAliasLoc}, + ids::{FunctionLoc, Intern, ItemDefinitionId, ModuleId, StructLoc, TypeAliasLoc}, item_scope::ItemScope, item_tree::{ Function, ItemTree, ItemTreeId, LocalItemTreeId, ModItem, Struct, StructDefKind, TypeAlias, }, module_tree::{LocalModuleId, ModuleTree}, visibility::RawVisibility, - DefDatabase, FileId, Name, PerNs, Visibility, + DefDatabase, FileId, Name, PackageId, PerNs, Visibility, }; use std::sync::Arc; diff --git a/crates/mun_hir/src/package_set.rs b/crates/mun_hir/src/package_set.rs new file mode 100644 index 000000000..3d02e449c --- /dev/null +++ b/crates/mun_hir/src/package_set.rs @@ -0,0 +1,44 @@ +use crate::SourceRootId; +use rustc_hash::FxHashMap; +use std::ops::Index; + +/// Information regarding a package +#[derive(Debug, Clone)] +pub struct PackageData { + /// The source root that holds the source files + pub source_root: SourceRootId, +} + +/// Represents the id of a single package, all packages have a unique id, the main package and all +/// dependent packages. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub struct PackageId(pub u32); + +/// Represents information about a set of packages in a compilation +#[derive(Debug, Clone, Default)] +pub struct PackageSet { + arena: FxHashMap, +} + +impl PackageSet { + /// Adds a new package to the package set + pub fn add_package(&mut self, source_root: SourceRootId) -> PackageId { + let data = PackageData { source_root }; + let package_id = PackageId(self.arena.len() as u32); + self.arena.insert(package_id, data); + package_id + } + + /// Iterates over all packages + pub fn iter(&self) -> impl Iterator + '_ { + self.arena.keys().copied() + } +} + +impl Index for PackageSet { + type Output = PackageData; + + fn index(&self, index: PackageId) -> &Self::Output { + &self.arena[&index] + } +} diff --git a/crates/mun_language_server/Cargo.toml b/crates/mun_language_server/Cargo.toml index 0a2bf6dad..0fb4fe8f3 100644 --- a/crates/mun_language_server/Cargo.toml +++ b/crates/mun_language_server/Cargo.toml @@ -15,6 +15,7 @@ categories = ["game-development", "mun"] # See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html [dependencies] +rustc-hash="1.1.0" lsp-types = "0.74" log = "0.4" serde = "1.0" @@ -24,11 +25,17 @@ async-std = "1.6" futures = "0.3" anyhow = "1.0" thiserror = "1.0" -ra_vfs = "0.6.1" salsa = "0.15.0" hir = { version = "=0.2.0", path="../mun_hir", package="mun_hir" } rayon = "1.3" num_cpus = "1.13.0" +vfs = { path = "../mun_vfs", package="mun_vfs" } +project = { path = "../mun_project", package="mun_project" } mun_target = { version = "=0.2.0", path = "../mun_target" } mun_syntax = { version = "=0.2.0", path = "../mun_syntax" } mun_diagnostics = { version = "=0.1.0", path = "../mun_diagnostics" } +crossbeam-channel = "0.5.0" +paths = {path="../mun_paths", package="mun_paths"} + +[dev-dependencies] +tempdir = "0.3.7" diff --git a/crates/mun_language_server/src/analysis.rs b/crates/mun_language_server/src/analysis.rs index 8e344a08d..9a8aa282d 100644 --- a/crates/mun_language_server/src/analysis.rs +++ b/crates/mun_language_server/src/analysis.rs @@ -55,13 +55,11 @@ impl AnalysisSnapshot { self.with_db(|db| diagnostics::diagnostics(db, file_id)) } - /// Returns all the files in the given source root - pub fn source_root_files( - &self, - source_root: hir::SourceRootId, - ) -> Cancelable> { + /// Returns all the source files of the given package + pub fn package_source_files(&self, package_id: hir::PackageId) -> Cancelable> { self.with_db(|db| { - let source_root = db.source_root(source_root); + let packages = db.packages(); + let source_root = db.source_root(packages[package_id].source_root); source_root.files().collect() }) } diff --git a/crates/mun_language_server/src/change.rs b/crates/mun_language_server/src/change.rs index b2826c19d..d9d1a9137 100644 --- a/crates/mun_language_server/src/change.rs +++ b/crates/mun_language_server/src/change.rs @@ -1,31 +1,13 @@ use crate::db::AnalysisDatabase; use hir::SourceDatabase; -use std::collections::HashMap; -use std::fmt; use std::sync::Arc; /// Represents an atomic change to the state of the `Analysis` #[derive(Default)] pub struct AnalysisChange { - new_roots: Vec<(hir::SourceRootId, hir::PackageId)>, - roots_changed: HashMap, - files_changed: Vec<(hir::FileId, Arc)>, -} - -impl fmt::Debug for AnalysisChange { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - let mut d = fmt.debug_struct("AnalysisChange"); - if !self.new_roots.is_empty() { - d.field("new_roots", &self.new_roots); - } - if !self.roots_changed.is_empty() { - d.field("roots_changed", &self.roots_changed); - } - if !self.files_changed.is_empty() { - d.field("files_changed", &self.files_changed.len()); - } - d.finish() - } + packages: Option, + roots: Option>, + files_changed: Vec<(hir::FileId, Option>)>, } impl AnalysisChange { @@ -34,110 +16,44 @@ impl AnalysisChange { AnalysisChange::default() } - /// Records the addition of a new root - pub fn add_root(&mut self, root_id: hir::SourceRootId, package_id: hir::PackageId) { - self.new_roots.push((root_id, package_id)); + /// Sets the packages + pub fn set_packages(&mut self, packages: hir::PackageSet) { + self.packages = Some(packages) } - /// Records the addition of a new file to a root - pub fn add_file( - &mut self, - root_id: hir::SourceRootId, - file_id: hir::FileId, - path: hir::RelativePathBuf, - text: Arc, - ) { - let file = AddFile { - file_id, - path, - text, - }; - self.roots_changed - .entry(root_id) - .or_default() - .added - .push(file); + /// Records the addition of a new root + pub fn set_roots(&mut self, roots: Vec) { + self.roots = Some(roots) } /// Records the change of content of a specific file - pub fn change_file(&mut self, file_id: hir::FileId, new_text: Arc) { + pub fn change_file(&mut self, file_id: hir::FileId, new_text: Option>) { self.files_changed.push((file_id, new_text)) } - - /// Records the removal of a file from a root - pub fn remove_file( - &mut self, - root_id: hir::SourceRootId, - file_id: hir::FileId, - path: hir::RelativePathBuf, - ) { - let file = RemoveFile { file_id, path }; - self.roots_changed - .entry(root_id) - .or_default() - .removed - .push(file); - } -} - -/// Represents the addition of a file to a source root. -#[derive(Debug)] -struct AddFile { - file_id: hir::FileId, - path: hir::RelativePathBuf, - text: Arc, -} - -/// Represents the removal of a file from a source root. -#[derive(Debug)] -struct RemoveFile { - file_id: hir::FileId, - path: hir::RelativePathBuf, -} - -/// Represents the changes to a source root. -#[derive(Default)] -struct RootChange { - added: Vec, - removed: Vec, -} - -impl fmt::Debug for RootChange { - fn fmt(&self, fmt: &mut fmt::Formatter) -> fmt::Result { - fmt.debug_struct("RootChange") - .field("added", &self.added.len()) - .field("removed", &self.removed.len()) - .finish() - } } impl AnalysisDatabase { /// Applies the specified change to the database pub(crate) fn apply_change(&mut self, change: AnalysisChange) { - // Add new source roots - for (root_id, package_id) in change.new_roots { - let root = hir::SourceRoot::new(); - self.set_source_root(root_id, Arc::new(root)); - self.set_package_source_root(package_id, root_id); + // Add new package set + if let Some(package_set) = change.packages { + self.set_packages(Arc::new(package_set)) } - // Modify existing source roots - for (root_id, root_change) in change.roots_changed { - let mut source_root = hir::SourceRoot::clone(&self.source_root(root_id)); - for add_file in root_change.added { - self.set_file_text(add_file.file_id, add_file.text); - self.set_file_relative_path(add_file.file_id, add_file.path.clone()); - source_root.insert_file(add_file.file_id) - } - for remove_file in root_change.removed { - self.set_file_text(remove_file.file_id, Arc::from("")); - source_root.remove_file(remove_file.file_id); + // Modify the source roots + if let Some(roots) = change.roots { + for (idx, root) in roots.into_iter().enumerate() { + let root_id = hir::SourceRootId(idx as u32); + for file_id in root.files() { + self.set_file_source_root(file_id, root_id); + } + self.set_source_root(root_id, Arc::new(root)); } - self.set_source_root(root_id, Arc::new(source_root)); } // Update changed files for (file_id, text) in change.files_changed { + let text = text.unwrap_or_else(|| Arc::from("".to_owned())); self.set_file_text(file_id, text) } } diff --git a/crates/mun_language_server/src/config.rs b/crates/mun_language_server/src/config.rs index 1a1c23902..ab3b2af48 100644 --- a/crates/mun_language_server/src/config.rs +++ b/crates/mun_language_server/src/config.rs @@ -1,17 +1,25 @@ -use std::path::PathBuf; +use crate::project_manifest::ProjectManifest; +use paths::AbsPathBuf; /// The configuration used by the language server. #[derive(Debug, Clone)] pub struct Config { pub watcher: FilesWatcher, - pub workspace_roots: Vec, + + /// The root directory of the workspace + pub root_dir: AbsPathBuf, + + /// A collection of projects discovered within the workspace + pub discovered_projects: Option>, } -impl Default for Config { - fn default() -> Self { +impl Config { + /// Constructs a new instance of a `Config` + pub fn new(root_path: AbsPathBuf) -> Self { Self { watcher: FilesWatcher::Notify, - workspace_roots: Vec::new(), + root_dir: root_path, + discovered_projects: None, } } } diff --git a/crates/mun_language_server/src/conversion.rs b/crates/mun_language_server/src/conversion.rs index ba6646bc2..b51f57289 100644 --- a/crates/mun_language_server/src/conversion.rs +++ b/crates/mun_language_server/src/conversion.rs @@ -1,7 +1,11 @@ use lsp_types::Url; use mun_syntax::{TextRange, TextUnit}; -use std::path::{Component, Path, Prefix}; -use std::str::FromStr; +use paths::AbsPathBuf; +use std::{ + convert::TryFrom, + path::{Component, Path, Prefix}, + str::FromStr, +}; /// Returns a `Url` object from a given path, will lowercase drive letters if present. /// This will only happen when processing Windows paths. @@ -63,3 +67,10 @@ pub fn convert_unit( character: line_col.col.into(), } } + +pub fn convert_uri(uri: &Url) -> anyhow::Result { + uri.to_file_path() + .ok() + .and_then(|path| AbsPathBuf::try_from(path).ok()) + .ok_or_else(|| anyhow::anyhow!("invalid uri: {}", uri)) +} diff --git a/crates/mun_language_server/src/lib.rs b/crates/mun_language_server/src/lib.rs index 23788a7cf..f3819d951 100644 --- a/crates/mun_language_server/src/lib.rs +++ b/crates/mun_language_server/src/lib.rs @@ -7,14 +7,17 @@ mod conversion; mod db; mod diagnostics; mod main_loop; +mod project_manifest; pub mod protocol; +mod workspace; pub use config::Config; pub use main_loop::main_loop; -use crate::config::FilesWatcher; -use serde::de::DeserializeOwned; -use serde::Serialize; +use crate::{config::FilesWatcher, project_manifest::ProjectManifest}; +use paths::AbsPathBuf; +use serde::{de::DeserializeOwned, Serialize}; +use std::convert::TryFrom; pub type Result = anyhow::Result; @@ -67,8 +70,24 @@ pub async fn run_server_async() -> Result<()> { } let config = { - let mut config = Config::default(); - + // Convert the root uri to a PathBuf + let root_dir = match initialize_params + .root_uri + .and_then(|it| it.to_file_path().ok()) + .and_then(|path| AbsPathBuf::try_from(path).ok()) + { + Some(path) => path, + None => { + // Get the current working directory as fallback + let cwd = std::env::current_dir()?; + AbsPathBuf::try_from(cwd) + .expect("could not convert current directory to an absolute path") + } + }; + + let mut config = Config::new(root_dir); + + // Determine type of watcher to use let supports_file_watcher_dynamic_registration = initialize_params .capabilities .workspace @@ -79,24 +98,26 @@ pub async fn run_server_async() -> Result<()> { config.watcher = FilesWatcher::Client; } - // Get the current working directory as fallback - let cwd = std::env::current_dir()?; - // Convert the root uri to a PathBuf - let root = initialize_params - .root_uri - .and_then(|it| it.to_file_path().ok()) - .unwrap_or(cwd); // Convert the workspace_roots, if these are empy use the root_uri or the cwd - config.workspace_roots = initialize_params + let workspace_roots = initialize_params .workspace_folders .map(|workspaces| { workspaces .into_iter() .filter_map(|it| it.uri.to_file_path().ok()) + .filter_map(|path| AbsPathBuf::try_from(path).ok()) .collect::>() }) .filter(|workspaces| !workspaces.is_empty()) - .unwrap_or_else(|| vec![root]); + .unwrap_or_else(|| vec![config.root_dir.clone()]); + + // Find all the projects in the workspace + let discovered = ProjectManifest::discover_all(workspace_roots.iter().cloned()); + log::info!("discovered projects: {:?}", discovered); + if discovered.is_empty() { + log::error!("failed to find any projects in {:?}", workspace_roots); + } + config.discovered_projects = Some(discovered); config }; diff --git a/crates/mun_language_server/src/main_loop.rs b/crates/mun_language_server/src/main_loop.rs index a95cc4206..96022fe6f 100644 --- a/crates/mun_language_server/src/main_loop.rs +++ b/crates/mun_language_server/src/main_loop.rs @@ -1,20 +1,22 @@ -use crate::analysis::{Analysis, AnalysisSnapshot, Cancelable}; -use crate::change::AnalysisChange; -use crate::config::{Config, FilesWatcher}; -use crate::conversion::{convert_range, url_from_path_with_drive_lowercasing}; -use crate::protocol::{Connection, Message, Notification, Request, RequestId}; -use crate::Result; -use anyhow::anyhow; +use crate::{ + analysis::{Analysis, AnalysisSnapshot, Cancelable}, + change::AnalysisChange, + config::Config, + conversion::{convert_range, convert_uri, url_from_path_with_drive_lowercasing}, + protocol::{Connection, Message, Notification, Request, RequestId}, + Result, +}; use async_std::sync::RwLock; -use futures::channel::mpsc::{unbounded, Sender, UnboundedReceiver, UnboundedSender}; -use futures::{SinkExt, StreamExt}; -use lsp_types::notification::PublishDiagnostics; -use lsp_types::{PublishDiagnosticsParams, Url}; -use ra_vfs::{RootEntry, Vfs, VfsChange, VfsFile}; +use futures::{ + channel::mpsc::{unbounded, UnboundedReceiver, UnboundedSender}, + SinkExt, StreamExt, +}; +use lsp_types::{notification::PublishDiagnostics, PublishDiagnosticsParams, Url}; +use paths::AbsPathBuf; +use rustc_hash::FxHashSet; use serde::{de::DeserializeOwned, Serialize}; -use std::collections::HashSet; -use std::ops::Deref; -use std::sync::Arc; +use std::{cell::RefCell, collections::HashSet, ops::Deref, sync::Arc}; +use vfs::VirtualFileSystem; /// A `Task` is something that is send from async tasks to the entry point for processing. This /// enables synchronizing resources like the connection with the client. @@ -26,41 +28,56 @@ enum Task { #[derive(Debug)] enum Event { Msg(Message), - Vfs(ra_vfs::VfsTask), + Vfs(vfs::MonitorMessage), Task(Task), } /// State for the language server -struct LanguageServerState { - /// Interface to the vfs, a virtual filesystem that supports overlaying of files - pub vfs: Arc>, +pub(crate) struct LanguageServerState { + /// The connection with the client + pub connection: ConnectionState, - /// Receiver channel to apply filesystem changes on `vfs` - pub vfs_task_receiver: UnboundedReceiver, + /// The configuration passed by the client + pub config: Config, + + /// Thread pool for async execution + pub thread_pool: rayon::ThreadPool, + + /// The virtual filesystem that holds all the file contents + pub vfs: Arc>, + + /// The vfs monitor + pub vfs_monitor: Box, + + /// The receiver of vfs monitor messages + pub vfs_monitor_receiver: UnboundedReceiver, + + /// Documents that are currently kept in memory from the client + pub open_docs: FxHashSet, /// Holds the state of the analysis process pub analysis: Analysis, - /// All the roots in the workspace - pub local_source_roots: Vec, + /// All the packages known to the server + pub packages: Arc>, } /// A snapshot of the state of the language server -struct LanguageServerSnapshot { - /// Interface to the vfs, a virtual filesystem that supports overlaying of files - pub vfs: Arc>, +pub(crate) struct LanguageServerSnapshot { + /// The virtual filesystem that holds all the file contents + pub vfs: Arc>, /// Holds the state of the analysis process pub analysis: AnalysisSnapshot, - /// All the roots in the workspace - pub local_source_roots: Vec, + /// All the packages known to the server + pub packages: Arc>, } /// State maintained for the connection. This includes everything that is required to be able to /// properly communicate with the client but has nothing to do with any Mun related state. -struct ConnectionState { - connection: Connection, +pub(crate) struct ConnectionState { + pub(crate) connection: Connection, next_request_id: u64, pending_responses: HashSet, @@ -86,144 +103,86 @@ impl ConnectionState { } } -/// Filter used to choose which files the ra_vfs should ignore -struct MunFilter {} - -/// Implement the filter provided by ra_vfs -impl ra_vfs::Filter for MunFilter { - fn include_dir(&self, _dir_path: &ra_vfs::RelativePath) -> bool { - true - } - - fn include_file(&self, file_path: &ra_vfs::RelativePath) -> bool { - file_path.extension() == Some("mun") - } -} - impl LanguageServerState { - pub fn new(config: Config) -> Self { - // Create a channel for use by the vfs - let (task_sender, task_receiver) = futures::channel::mpsc::unbounded(); - - // Create the vfs - let task_sender = Box::new(move |t| task_sender.unbounded_send(t).unwrap()); - let vfs = Vfs::new( - config - .workspace_roots - .into_iter() - .map(|root| RootEntry::new(root.join("src"), Box::new(MunFilter {}))) - .collect(), - task_sender, - ra_vfs::Watch(config.watcher == FilesWatcher::Notify), - ); + pub fn new(connection: Connection, config: Config) -> Self { + // Construct the virtual filesystem monitor + let (vfs_monitor_sender, vfs_monitor_receiver) = unbounded::(); + let vfs_monitor_sender = RefCell::new(vfs_monitor_sender); + let vfs_monitor: vfs::NotifyMonitor = vfs::Monitor::new(Box::new(move |msg| { + async_std::task::block_on(vfs_monitor_sender.borrow_mut().send(msg)) + .expect("error sending vfs monitor message to foreground") + })); + let vfs_monitor = Box::new(vfs_monitor) as Box; + + // Create a thread pool to dispatch the async commands + // Use the num_cpus to get a nice thread count estimation + let thread_pool = rayon::ThreadPoolBuilder::new() + .num_threads(num_cpus::get()) + .build() + .expect("unable to spin up thread pool"); // Apply the initial changes - let mut source_roots = Vec::new(); let mut change = AnalysisChange::new(); - for root in vfs.1.iter() { - change.add_root(hir::SourceRootId(root.0), hir::PackageId(root.0)); - source_roots.push(hir::SourceRootId(root.0)); - } + change.set_packages(Default::default()); + change.set_roots(Default::default()); // Construct the state that will hold all the analysis let mut analysis = Analysis::new(); analysis.apply_change(change); LanguageServerState { - vfs: Arc::new(RwLock::new(vfs.0)), - vfs_task_receiver: task_receiver, + connection: ConnectionState::new(connection), + config, + vfs: Arc::new(RwLock::new(Default::default())), + vfs_monitor, + vfs_monitor_receiver, + open_docs: FxHashSet::default(), + thread_pool, analysis, - local_source_roots: source_roots, + packages: Arc::new(Vec::new()), } } -} - -/// Registers file watchers with the client to monitor all mun files in the workspaces -async fn register_client_file_watcher(connection_state: &mut ConnectionState, config: &Config) { - let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { - watchers: config - .workspace_roots - .iter() - .map(|root| format!("{}/**/*.mun", root.display())) - .map(|glob_pattern| lsp_types::FileSystemWatcher { - glob_pattern, - kind: None, - }) - .collect(), - }; - let registration = lsp_types::Registration { - id: "file-watcher".to_string(), - method: "workspace/didChangeWatchedFiles".to_string(), - register_options: Some(serde_json::to_value(registration_options).unwrap()), - }; - let params = lsp_types::RegistrationParams { - registrations: vec![registration], - }; - let request = build_request::( - connection_state.next_request_id(), - params, - ); - connection_state - .connection - .sender - .send(request.into()) - .await - .unwrap(); -} - -/// Runs the main loop of the language server. This will receive requests and handle them. -pub async fn main_loop(connection: Connection, config: Config) -> Result<()> { - log::info!("initial config: {:#?}", config); - - // Subscribe with file watchers of the client if enabled - let mut connection_state = ConnectionState::new(connection); - if config.watcher == FilesWatcher::Client { - register_client_file_watcher(&mut connection_state, &config).await - } - // Create a thread pool to dispatch the async commands - // Use the num_cpus to get a nice thread count estimation - let pool = rayon::ThreadPoolBuilder::new() - .num_threads(num_cpus::get()) - .build()?; - - // Create the state for the language server - let mut state = LanguageServerState::new(config); - let (task_sender, mut task_receiver) = unbounded::(); - loop { - // Determine what to do next. This selects from different channels, the first message to - // arrive is returned. If an error occurs on one of the channel the main loop is shutdown - // with an error. - let event = futures::select! { - msg = connection_state.connection.receiver.next() => match msg { - Some(msg) => Event::Msg(msg), - None => return Err(anyhow::anyhow!("client exited without shutdown")), - }, - task = state.vfs_task_receiver.next() => match task { - Some(task) => Event::Vfs(task), - None => return Err(anyhow::anyhow!("vfs has died")), - }, - task = task_receiver.next() => Event::Task(task.unwrap()) - }; + /// Runs the language server to completion + pub async fn run(mut self) -> Result<()> { + // Start by updating the current workspace + self.fetch_workspaces(); + + // Process events as the pass + let (task_sender, mut task_receiver) = futures::channel::mpsc::unbounded::(); + loop { + // Determine what to do next. This selects from different channels, the first message to + // arrive is returned. If an error occurs on one of the channel the main loop is shutdown + // with an error. + let event = futures::select! { + msg = self.connection.connection.receiver.next() => match msg { + Some(msg) => Event::Msg(msg), + None => return Err(anyhow::anyhow!("client exited without shutdown")), + }, + msg = self.vfs_monitor_receiver.next() => match msg { + Some(msg) => Event::Vfs(msg), + None => return Err(anyhow::anyhow!("client exited without shutdown")), + }, + task = task_receiver.next() => Event::Task(task.unwrap()), + }; - // Handle the event - match handle_event( - event, - &task_sender, - &mut connection_state, - &pool, - &mut state, - ) - .await? - { - LoopState::Continue => {} - LoopState::Shutdown => { - break; + // Handle the event + match handle_event(event, &task_sender, &mut self).await? { + LoopState::Continue => {} + LoopState::Shutdown => { + break; + } } } + + Ok(()) } +} - Ok(()) +/// Runs the main loop of the language server. This will receive requests and handle them. +pub async fn main_loop(connection: Connection, config: Config) -> Result<()> { + log::info!("initial config: {:#?}", config); + LanguageServerState::new(connection, config).run().await } /// A `LoopState` enumerator determines the state of the main loop @@ -233,8 +192,13 @@ enum LoopState { } /// Handles a received request -async fn handle_request(request: Request, connection: &mut ConnectionState) -> Result { - if connection.connection.handle_shutdown(&request).await? { +async fn handle_request(request: Request, state: &mut LanguageServerState) -> Result { + if state + .connection + .connection + .handle_shutdown(&request) + .await? + { return Ok(LoopState::Shutdown); }; Ok(LoopState::Continue) @@ -243,26 +207,15 @@ async fn handle_request(request: Request, connection: &mut ConnectionState) -> R /// Handles a received notification async fn on_notification( notification: Notification, - connection: &mut ConnectionState, - state: &LanguageServerState, + state: &mut LanguageServerState, ) -> Result { let notification = // When a a text document is opened match cast_notification::(notification) { Ok(params) => { - // Get the uri - let uri = params.text_document.uri; - // And convert into a file path - let path = uri - .to_file_path() - .map_err(|()| anyhow!("invalid uri: {}", uri))?; - if state - .vfs - .write() - .await - .add_file_overlay(&path, params.text_document.text).is_some() - { - // TODO: Keep track of opened files + if let Ok(path) = convert_uri(¶ms.text_document.uri) { + state.open_docs.insert(path.clone()); + state.vfs.write().await.set_file_contents(&path, Some(params.text_document.text.into_bytes())); } return Ok(LoopState::Continue); } @@ -273,26 +226,22 @@ async fn on_notification( let notification = match cast_notification::(notification) { Ok(params) => { - let uri = params.text_document.uri; - let path = uri - .to_file_path() - .map_err(|()| anyhow!("invalid uri: {}", uri))?; - if state - .vfs - .write() - .await - .remove_file_overlay(path.as_path()) - .is_some() - { - // TODO: Keep track of opened files + if let Ok(path) = convert_uri(¶ms.text_document.uri) { + state.open_docs.remove(&path); + state.vfs_monitor.reload(&path); } let params = lsp_types::PublishDiagnosticsParams { - uri, + uri: params.text_document.uri, diagnostics: Vec::new(), version: None, }; let not = build_notification::(params); - connection.connection.sender.try_send(not.into()).unwrap(); + state + .connection + .connection + .sender + .try_send(not.into()) + .unwrap(); return Ok(LoopState::Continue); } Err(not) => not, @@ -305,23 +254,14 @@ async fn on_notification( text_document, content_changes, } = params; - //let world = state.snapshot(); - //let file_id = from_proto::file_id(&world, &text_document.uri)?; - //let line_index = world.analysis().file_line_index(file_id)?; - let uri = text_document.uri; - let path = uri - .to_file_path() - .map_err(|()| anyhow!("invalid uri: {}", uri))?; - // TODO: I assume that since we are using *FULL* as the support change mode, that get - // the text as a single change - state - .vfs - .write() - .await - .change_file_overlay(&path, |old_text| { - // TODO: Change this to incremental later - *old_text = content_changes.get(0).unwrap().text.clone(); - }); + if let Ok(path) = convert_uri(&text_document.uri) { + let new_content = content_changes.get(0).unwrap().text.clone(); + state + .vfs + .write() + .await + .set_file_contents(&path, Some(new_content.into_bytes())); + } return Ok(LoopState::Continue); } Err(not) => not, @@ -330,13 +270,10 @@ async fn on_notification( let _notification = match cast_notification::(notification) { Ok(params) => { - let mut vfs = state.vfs.write().await; for change in params.changes { - let uri = change.uri; - let path = uri - .to_file_path() - .map_err(|()| anyhow::anyhow!("invalid uri: {}", uri))?; - vfs.notify_changed(path) + if let Ok(path) = convert_uri(&change.uri) { + state.vfs_monitor.reload(&path); + } } return Ok(LoopState::Continue); } @@ -351,26 +288,25 @@ async fn on_notification( async fn handle_event( event: Event, task_sender: &UnboundedSender, - connection_state: &mut ConnectionState, - pool: &rayon::ThreadPool, state: &mut LanguageServerState, ) -> Result { log::info!("handling event: {:?}", event); // Process the incoming event let loop_state = match event { - Event::Task(task) => handle_task(task, &mut connection_state.connection.sender).await?, - Event::Msg(msg) => handle_lsp_message(msg, connection_state, state).await?, + Event::Task(task) => handle_task(task, state).await?, + Event::Msg(msg) => handle_lsp_message(msg, state).await?, Event::Vfs(task) => handle_vfs_task(task, state).await?, }; // Process any changes to the vfs let state_changed = state.process_vfs_changes().await; + dbg!(state_changed); if state_changed { let snapshot = state.snapshot(); let task_sender = task_sender.clone(); // Spawn the diagnostics in the threadpool - pool.spawn(move || { + state.thread_pool.spawn(move || { let _result = async_std::task::block_on(handle_diagnostics(snapshot, task_sender)); }); } @@ -383,10 +319,14 @@ async fn handle_diagnostics( state: LanguageServerSnapshot, mut sender: UnboundedSender, ) -> Cancelable<()> { + dbg!(&state.packages); + // Iterate over all files - for root in state.local_source_roots.iter() { + for (idx, _package) in state.packages.iter().enumerate() { + let package_id = hir::PackageId(idx as u32); + // Get all the files - let files = state.analysis.source_root_files(*root)?; + let files = state.analysis.package_source_files(package_id)?; // Publish all diagnostics for file in files { @@ -448,57 +388,66 @@ async fn handle_diagnostics( .unwrap(); } } - Ok(()) } /// Handles a task send by another async task -async fn handle_task(task: Task, sender: &mut Sender) -> Result { +async fn handle_task(task: Task, state: &mut LanguageServerState) -> Result { match task { - Task::Notify(notification) => sender.send(notification.into()).await?, + Task::Notify(notification) => { + state + .connection + .connection + .sender + .send(notification.into()) + .await? + } } Ok(LoopState::Continue) } /// Handles a change to the underlying virtual file system. -async fn handle_vfs_task(task: ra_vfs::VfsTask, state: &LanguageServerState) -> Result { - let mut vfs = state.vfs.write().await; - vfs.handle_task(task); +async fn handle_vfs_task( + mut task: vfs::MonitorMessage, + state: &mut LanguageServerState, +) -> Result { + loop { + match task { + vfs::MonitorMessage::Progress { .. } => {} + vfs::MonitorMessage::Loaded { files } => { + let vfs = &mut *state.vfs.write().await; + for (path, contents) in files { + vfs.set_file_contents(&path, contents); + } + } + } + + // Coalesce many VFS events into a single loop turn + task = match state.vfs_monitor_receiver.try_next() { + Ok(Some(task)) => task, + _ => break, + } + } Ok(LoopState::Continue) } /// Handles an incoming message via the language server protocol. -async fn handle_lsp_message( - msg: Message, - connection_state: &mut ConnectionState, - state: &LanguageServerState, -) -> Result { +async fn handle_lsp_message(msg: Message, state: &mut LanguageServerState) -> Result { match msg { - Message::Request(req) => handle_request(req, connection_state).await, + Message::Request(req) => handle_request(req, state).await, Message::Response(response) => { - let removed = connection_state.pending_responses.remove(&response.id); + let removed = state.connection.pending_responses.remove(&response.id); if !removed { log::error!("unexpected response: {:?}", response) } Ok(LoopState::Continue) } - Message::Notification(notification) => { - on_notification(notification, connection_state, state).await - } + Message::Notification(notification) => on_notification(notification, state).await, } } -/// Constructs a new request with the generic type R and the given parameters. -fn build_request(id: RequestId, params: R::Params) -> Request -where - R: lsp_types::request::Request, - R::Params: Serialize, -{ - Request::new(id, R::METHOD.to_string(), params) -} - /// Constructs a new notification with the specified parameters. fn build_notification(params: N::Params) -> Notification where @@ -517,13 +466,25 @@ where notification.try_extract(N::METHOD) } +impl LanguageServerState { + /// Sends a new request to the client + pub fn send_request(&mut self, params: R::Params) { + let request = Request::new( + self.connection.next_request_id(), + R::METHOD.to_string(), + params, + ); + async_std::task::block_on(self.connection.connection.sender.send(request.into())).unwrap(); + } +} + impl LanguageServerState { /// Creates a snapshot of the state pub fn snapshot(&self) -> LanguageServerSnapshot { LanguageServerSnapshot { - analysis: self.analysis.snapshot(), - local_source_roots: self.local_source_roots.clone(), vfs: self.vfs.clone(), + analysis: self.analysis.snapshot(), + packages: self.packages.clone(), } } @@ -532,47 +493,42 @@ impl LanguageServerState { /// otherwise false. pub async fn process_vfs_changes(&mut self) -> bool { // Get all the changes since the last time we processed - let changes = self.vfs.write().await.commit_changes(); - if changes.is_empty() { + let changed_files = { + let mut vfs = self.vfs.write().await; + vfs.take_changes() + }; + if changed_files.is_empty() { return false; } - // Construct an AnalysisChange to apply + // Construct an AnalysisChange to apply to the analysis + let vfs = self.vfs.read().await; let mut analysis_change = AnalysisChange::new(); - for change in changes { - match change { - VfsChange::AddRoot { root, files } => { - for (file, path, text) in files { - analysis_change.add_file( - hir::SourceRootId(root.0), - hir::FileId(file.0), - path, - Arc::from(text.to_string()), - ); - } - } - VfsChange::AddFile { - root, - file, - path, - text, - } => { - analysis_change.add_file( - hir::SourceRootId(root.0), - hir::FileId(file.0), - path, - Arc::from(text.to_string()), - ); - } - VfsChange::RemoveFile { root, file, path } => analysis_change.remove_file( - hir::SourceRootId(root.0), - hir::FileId(file.0), - path, - ), - VfsChange::ChangeFile { file, text } => { - analysis_change.change_file(hir::FileId(file.0), Arc::from(text.to_string())); - } + let mut has_created_or_deleted_entries = false; + for file in changed_files { + // If the file was deleted or created we have to remember that so that we update the + // source roots as well. + if file.is_created_or_deleted() { + has_created_or_deleted_entries = true; } + + // Convert the contents of the file to a string + let bytes = vfs + .file_contents(file.file_id) + .map(Vec::from) + .unwrap_or_default(); + let text = match String::from_utf8(bytes).ok() { + Some(text) => Some(Arc::from(text)), + None => None, + }; + + // Notify the database about this change + analysis_change.change_file(hir::FileId(file.file_id.0), text); + } + + // If an entry was created or deleted we have to recreate all source roots + if has_created_or_deleted_entries { + analysis_change.set_roots(self.recompute_source_roots()); } // Apply the change @@ -582,9 +538,10 @@ impl LanguageServerState { } impl LanguageServerSnapshot { - /// Converts the specified `FileId` to a `Url` + /// Converts the specified `hir::FileId` to a `Url` pub async fn file_id_to_uri(&self, id: hir::FileId) -> Result { - let path = self.vfs.read().await.file2path(VfsFile(id.0)); + let vfs = self.vfs.read().await; + let path = vfs.file_path(vfs::FileId(id.0)); let url = url_from_path_with_drive_lowercasing(path)?; Ok(url) diff --git a/crates/mun_language_server/src/project_manifest.rs b/crates/mun_language_server/src/project_manifest.rs new file mode 100644 index 000000000..316574e22 --- /dev/null +++ b/crates/mun_language_server/src/project_manifest.rs @@ -0,0 +1,62 @@ +use anyhow::bail; +use paths::{AbsPath, AbsPathBuf}; +use rustc_hash::FxHashSet; +use std::convert::TryFrom; +use std::fs::read_dir; +use std::io; + +/// A wrapper around a path to a mun project +#[derive(Debug, Clone, PartialEq, Eq, Hash, Ord, PartialOrd)] +pub struct ProjectManifest { + pub path: AbsPathBuf, +} + +impl ProjectManifest { + /// Constructs a new [`ProjectManifest`] from a path + pub fn from_manifest_path(path: impl AsRef) -> anyhow::Result { + let path = path.as_ref(); + if path.ends_with(project::MANIFEST_FILENAME) { + Ok(Self { + path: path.to_path_buf(), + }) + } else { + bail!( + "project root must point to {}: {}", + project::MANIFEST_FILENAME, + path.display() + ); + } + } + + /// Find all project manifests in the given directory + pub fn discover(path: impl AsRef) -> io::Result> { + Ok(read_dir(path.as_ref())? + .filter_map(Result::ok) + .map(|entry| entry.path()) + .filter(|path| { + path.is_file() + && path + .file_name() + .map(|file_name| file_name == project::MANIFEST_FILENAME) + .unwrap_or(false) + }) + .map(|path| ProjectManifest { + path: AbsPathBuf::try_from(path).expect( + "read_dir does not return absolute path when iterating an absolute path", + ), + }) + .collect()) + } + + /// Find all project manifests in a collection of paths + pub fn discover_all(paths: impl Iterator>) -> Vec { + let mut project_manifests = paths + .filter_map(|path| ProjectManifest::discover(path).ok()) + .flatten() + .collect::>() + .into_iter() + .collect::>(); + project_manifests.sort(); + project_manifests + } +} diff --git a/crates/mun_language_server/src/workspace.rs b/crates/mun_language_server/src/workspace.rs new file mode 100644 index 000000000..454b4af42 --- /dev/null +++ b/crates/mun_language_server/src/workspace.rs @@ -0,0 +1,136 @@ +use crate::{change::AnalysisChange, config::FilesWatcher, main_loop::LanguageServerState}; +use paths::{AbsPathBuf, RelativePath}; +use std::{ + convert::{TryFrom, TryInto}, + sync::Arc, +}; + +impl LanguageServerState { + /// Called to update all workspaces from the files + pub(crate) fn fetch_workspaces(&mut self) { + // Load all the manifests as packages + let packages = self + .config + .discovered_projects + .as_ref() + .into_iter() + .flatten() + .filter_map(|project| match project::Package::from_file(&project.path) { + Ok(package) => Some(package), + Err(_) => { + // TODO: Show error + None + } + }) + .collect::>(); + + // If these packages are the same as the ones we already had, there is little to do. + if *self.packages == packages { + return; + } + + // If we use the client to watch for file changes, communicate a request to the client + if self.config.watcher == FilesWatcher::Client { + let registration_options = lsp_types::DidChangeWatchedFilesRegistrationOptions { + watchers: packages + .iter() + .map(|package| format!("{}/**/*.mun", package.source_directory().display())) + .map(|glob_pattern| lsp_types::FileSystemWatcher { + glob_pattern, + kind: None, + }) + .collect(), + }; + + let registration = lsp_types::Registration { + id: "file-watcher".to_string(), + method: "workspace/didChangeWatchedFiles".to_string(), + register_options: Some(serde_json::to_value(registration_options).unwrap()), + }; + self.send_request::( + lsp_types::RegistrationParams { + registrations: vec![registration], + }, + ); + } + + let mut change = AnalysisChange::new(); + + // Construct the set of files to pass to the vfs loader + let entries_to_load = packages + .iter() + .map(|package| { + let source_dir: AbsPathBuf = package + .source_directory() + .try_into() + .expect("could not convert package root to absolute path"); + vfs::MonitorEntry::Directories(vfs::MonitorDirectories { + extensions: vec!["mun".to_owned()], + include: vec![source_dir], + exclude: vec![], + }) + }) + .collect::>(); + + let monitor_config = vfs::MonitorConfig { + watch: match self.config.watcher { + FilesWatcher::Client => vec![], + FilesWatcher::Notify => (0..entries_to_load.len()).into_iter().collect(), + }, + load: entries_to_load, + }; + + self.vfs_monitor.set_config(monitor_config); + + // Create the set of packages + let mut package_set = hir::PackageSet::default(); + for (idx, _package) in packages.iter().enumerate() { + package_set.add_package(hir::SourceRootId(idx as u32)); + } + change.set_packages(package_set); + + // Store the current set of packages and update the source roots + self.packages = Arc::new(packages); + change.set_roots(self.recompute_source_roots()); + + // Apply all changes to the database + self.analysis.apply_change(change); + } + + /// Recomputes all the source roots based on the `packages` + pub(crate) fn recompute_source_roots(&self) -> Vec { + // Iterate over all sources and see to which package they belong + let mut source_roots = vec![hir::SourceRoot::default(); self.packages.len()]; + + // Source directories + let source_dirs = self + .packages + .iter() + .map(|p| { + AbsPathBuf::try_from(p.source_directory()) + .expect("must be able to convert source dir to absolute path") + }) + .collect::>(); + + // Iterate over all files and find to which source directory they belong, including their + // relative path + let vfs = &*async_std::task::block_on(self.vfs.read()); + for (file_id, path) in vfs.iter() { + if let Some((idx, relative_path)) = + source_dirs + .iter() + .enumerate() + .find_map(|(index, source_dir)| { + path.strip_prefix(source_dir) + .ok() + .and_then(|path| RelativePath::from_path(path).ok()) + .map(|relative| (index, relative)) + }) + { + source_roots[idx].insert_file(hir::FileId(file_id.0), relative_path); + } + } + + source_roots + } +} diff --git a/crates/mun_language_server/tests/support.rs b/crates/mun_language_server/tests/support.rs index 56c80425f..ca716aa3c 100644 --- a/crates/mun_language_server/tests/support.rs +++ b/crates/mun_language_server/tests/support.rs @@ -3,8 +3,10 @@ use futures::{SinkExt, StreamExt}; use lsp_types::{notification::Exit, request::Shutdown}; use mun_language_server::protocol::{Connection, Message, Notification, Request}; use mun_language_server::{main_loop, Config}; +use paths::AbsPathBuf; use serde::Serialize; use serde_json::Value; +use std::convert::TryFrom; use std::time::Duration; /// An object that runs the language server main loop and enables sending and receiving messages @@ -13,6 +15,7 @@ pub struct Server { next_request_id: u64, worker: Option>, client: Connection, + _temp_path: tempdir::TempDir, } impl Server { @@ -20,7 +23,13 @@ impl Server { pub fn new() -> Self { let (connection, client) = Connection::memory(); - let config = Config::default(); + let temp_path = tempdir::TempDir::new("mun_language_server") + .expect("unable to create temporary directory"); + + let config = Config::new( + AbsPathBuf::try_from(temp_path.path().to_path_buf()) + .expect("temp_path is not an absolute path"), + ); let worker = std::thread::spawn(move || { async_std::task::block_on(async move { main_loop(connection, config).await.unwrap(); @@ -31,6 +40,7 @@ impl Server { next_request_id: Default::default(), worker: Some(worker), client, + _temp_path: temp_path, } } diff --git a/crates/mun_paths/Cargo.toml b/crates/mun_paths/Cargo.toml new file mode 100644 index 000000000..77a72d60d --- /dev/null +++ b/crates/mun_paths/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "mun_paths" +version = "0.1.0" +authors = ["The Mun Team "] +edition = "2018" +description = "Provides convenience structures for handling relative- and absolute paths" +documentation = "https://docs.mun-lang.org/v0.2" +readme = "README.md" +homepage = "https://mun-lang.org" +repository = "https://github.com/mun-lang/mun" +license = "MIT OR Apache-2.0" + +[dependencies] +relative-path = "1.2" diff --git a/crates/mun_paths/src/abs_path.rs b/crates/mun_paths/src/abs_path.rs new file mode 100644 index 000000000..d96b54161 --- /dev/null +++ b/crates/mun_paths/src/abs_path.rs @@ -0,0 +1,125 @@ +use std::borrow::Borrow; +use std::convert::{TryFrom, TryInto}; +use std::ops::Deref; +use std::path::{Path, PathBuf}; + +/// Represents an absolute path, internally simply wraps a `PathBuf`. +#[derive(Debug, Clone, Ord, PartialOrd, Eq, PartialEq, Hash)] +pub struct AbsPathBuf(PathBuf); + +impl From for PathBuf { + fn from(abs_path_buf: AbsPathBuf) -> Self { + abs_path_buf.0 + } +} + +impl Deref for AbsPathBuf { + type Target = AbsPath; + + fn deref(&self) -> &Self::Target { + self.as_path() + } +} + +impl AsRef for AbsPathBuf { + fn as_ref(&self) -> &Path { + self.0.as_path() + } +} + +impl AsRef for AbsPathBuf { + fn as_ref(&self) -> &PathBuf { + &self.0 + } +} + +impl AsRef for AbsPathBuf { + fn as_ref(&self) -> &AbsPath { + AbsPath::assert_new(self.0.as_path()) + } +} + +impl TryFrom for AbsPathBuf { + type Error = PathBuf; + + fn try_from(path: PathBuf) -> Result { + if !path.is_absolute() { + Err(path) + } else { + Ok(AbsPathBuf(path)) + } + } +} + +impl PartialEq for AbsPathBuf { + fn eq(&self, other: &AbsPath) -> bool { + self.as_path() == other + } +} + +impl Borrow for AbsPathBuf { + fn borrow(&self) -> &AbsPath { + self.as_path() + } +} + +impl AbsPathBuf { + /// Coerces to a [`AbsPath`] slice. + pub fn as_path(&self) -> &AbsPath { + AbsPath::assert_new(self.0.as_path()) + } +} + +#[derive(Debug, Ord, PartialOrd, Eq, PartialEq, Hash)] +#[repr(transparent)] +pub struct AbsPath(Path); + +impl Deref for AbsPath { + type Target = Path; + + fn deref(&self) -> &Self::Target { + &self.0 + } +} + +impl AsRef for AbsPath { + fn as_ref(&self) -> &Path { + &self.0 + } +} + +impl<'a> TryFrom<&'a Path> for &'a AbsPath { + type Error = &'a Path; + + fn try_from(path: &'a Path) -> Result { + if !path.is_absolute() { + Err(path) + } else { + Ok(AbsPath::assert_new(path)) + } + } +} + +impl AbsPath { + /// Constructs a new `AbsPath` from a `Path`. + pub fn assert_new(path: &Path) -> &AbsPath { + assert!(path.is_absolute()); + // This is a safe operation because `AbsPath` is a transparent wrapper around `Path` + unsafe { &*(path as *const Path as *const AbsPath) } + } + + /// Returns the `AbsPath` without its final component, if there is one. + pub fn parent(&self) -> Option<&AbsPath> { + self.0.parent().map(AbsPath::assert_new) + } + + /// Creates an owned [`AbsPathBuf`] with `path` adjoined to `self`. + pub fn join(&self, path: impl AsRef) -> AbsPathBuf { + self.as_ref().join(path).try_into().unwrap() + } + + /// Converts a `AbsPath` to an owned [`AbsPathBuf`]. + pub fn to_path_buf(&self) -> AbsPathBuf { + AbsPathBuf::try_from(self.0.to_path_buf()).unwrap() + } +} diff --git a/crates/mun_paths/src/lib.rs b/crates/mun_paths/src/lib.rs new file mode 100644 index 000000000..594d0cf66 --- /dev/null +++ b/crates/mun_paths/src/lib.rs @@ -0,0 +1,4 @@ +pub mod abs_path; + +pub use abs_path::{AbsPath, AbsPathBuf}; +pub use relative_path::{RelativePath, RelativePathBuf}; diff --git a/crates/mun_project/src/package.rs b/crates/mun_project/src/package.rs index e306f94ef..992ecac3d 100644 --- a/crates/mun_project/src/package.rs +++ b/crates/mun_project/src/package.rs @@ -57,14 +57,9 @@ impl Package { self.package_id().version() } - /// Returns the source directory of the package, or None if no such directory exists. - pub fn source_directory(&self) -> Option { - let source_dir = self.root().join("src"); - if source_dir.is_dir() { - Some(source_dir) - } else { - None - } + /// Returns the path to the source directory of the package + pub fn source_directory(&self) -> PathBuf { + self.root().join("src") } } diff --git a/crates/mun_project/tests/parse.rs b/crates/mun_project/tests/parse.rs index edbc4b749..71729be50 100644 --- a/crates/mun_project/tests/parse.rs +++ b/crates/mun_project/tests/parse.rs @@ -23,8 +23,6 @@ fn package_from_file() { assert_eq!(&package.root(), &manifest_path.parent().unwrap()); assert_eq!(format!("{}", &package), "test v0.2.0"); - let source_dir = package - .source_directory() - .expect("could not locate source directory"); + let source_dir = package.source_directory(); assert_eq!(source_dir, manifest_path.parent().unwrap().join("src")); } diff --git a/crates/mun_vfs/Cargo.toml b/crates/mun_vfs/Cargo.toml new file mode 100644 index 000000000..7e554b722 --- /dev/null +++ b/crates/mun_vfs/Cargo.toml @@ -0,0 +1,19 @@ +[package] +name = "mun_vfs" +version = "0.1.0" +authors = ["The Mun Team "] +edition = "2018" +description = "Provides an in-memory filesystem" +documentation = "https://docs.mun-lang.org/v0.2" +readme = "README.md" +homepage = "https://mun-lang.org" +repository = "https://github.com/mun-lang/mun" +license = "MIT OR Apache-2.0" + +[dependencies] +rustc-hash = "1.1.0" +notify = "5.0.0-pre.4" +crossbeam-channel = "0.5.0" +log = "0.4.11" +walkdir = "2.3.1" +paths = {path="../mun_paths", package="mun_paths"} diff --git a/crates/mun_vfs/src/lib.rs b/crates/mun_vfs/src/lib.rs new file mode 100644 index 000000000..c247048b4 --- /dev/null +++ b/crates/mun_vfs/src/lib.rs @@ -0,0 +1,235 @@ +use std::mem; + +pub use monitor::{ + Monitor, MonitorConfig, MonitorDirectories, MonitorEntry, MonitorMessage, NotifyMonitor, +}; + +use path_interner::PathInterner; +use paths::{AbsPath, AbsPathBuf}; + +mod monitor; +mod path_interner; + +/// A `FileId` represents a unique identifier for a file within the `VirtualFileSystem`. +#[derive(Debug, Copy, Clone, PartialEq, Eq, Ord, PartialOrd, Hash)] +pub struct FileId(pub u32); + +/// The `VirtualFileSystem` is a struct that manages a set of files and their content. Changes to +/// the instance are logged, they can be be retrieved via the `take_changes` method. +#[derive(Default)] +pub struct VirtualFileSystem { + /// Used to convert from paths to `FileId` and vice versa. + interner: PathInterner, + + /// Per file the content of the file, or `None` if no content is available + file_contents: Vec>>, + + /// A record of changes to this instance. + changes: Vec, +} + +/// A record of a change to a file +#[derive(Debug, Clone, PartialEq, Eq, Hash)] +pub struct ChangedFile { + pub file_id: FileId, + pub kind: ChangeKind, +} + +impl ChangedFile { + /// Returns true if this change indicates that the file was created or deleted + pub fn is_created_or_deleted(&self) -> bool { + matches!(self.kind, ChangeKind::Create | ChangeKind::Delete) + } +} + +/// The type of change that a file undergoes +#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum ChangeKind { + Create, + Modify, + Delete, +} + +impl VirtualFileSystem { + /// Returns `true` if there are changes that can be processed. + pub fn has_changes(&self) -> bool { + !self.changes.is_empty() + } + + /// Returns the changes performed on the instance since the last time this function was called + /// or since the creation of the instance. + pub fn take_changes(&mut self) -> Vec { + mem::take(&mut self.changes) + } + + /// Returns the `FileId` of the file at the specified `path` or `None` if there is no data for + /// that file. + pub fn file_id(&self, path: &AbsPath) -> Option { + self.interner + .get(path) + .filter(|&file_id| self.get(file_id).is_some()) + } + + /// Returns the path of the file with the specified `FileId`. + pub fn file_path(&self, file_id: FileId) -> &AbsPath { + self.interner.lookup(file_id) + } + + /// Returns the content of the file with the specified `FileId`. + pub fn file_contents(&self, file_id: FileId) -> Option<&[u8]> { + self.get(file_id).as_deref() + } + + /// Returns an iterator that iterates all `FileId`s and their path. + pub fn iter(&self) -> impl Iterator + '_ { + self.file_contents + .iter() + .enumerate() + .filter(|(_, contents)| contents.is_some()) + .map(move |(id, _)| { + let file_id = FileId(id as u32); + let path = self.interner.lookup(file_id); + (file_id, path) + }) + } + + /// Notifies this instance that the contents of the specified file has changed to something + /// else. Returns true if the new contents is actually different. + pub fn set_file_contents(&mut self, path: &AbsPath, contents: Option>) -> bool { + let file_id = self.alloc_file_id(path); + let kind = match (&self.get(file_id), &contents) { + (None, None) => return false, + (None, Some(_)) => ChangeKind::Create, + (Some(_), None) => ChangeKind::Delete, + (Some(old), Some(new)) if old == new => return false, + (Some(_), Some(_)) => ChangeKind::Modify, + }; + + *self.get_mut(file_id) = contents; + self.changes.push(ChangedFile { file_id, kind }); + true + } + + /// Returns the `FileId` for the specified path and ensures that we can use it with this + /// instance. + fn alloc_file_id(&mut self, path: &AbsPath) -> FileId { + let file_id = self.interner.intern(path); + let idx = file_id.0 as usize; + let len = self.file_contents.len().max(idx + 1); + self.file_contents.resize(len, None); + file_id + } + + /// Returns a reference to the current content of a specific file. This function is only used + /// internally. Use the `file_contents` function to get the contents of a file. + fn get(&self, file_id: FileId) -> &Option> { + &self.file_contents[file_id.0 as usize] + } + + /// Returns a mutable reference to the current content of a specific file. This function is only + /// used internally. Use the `set_file_contents` function to update the contents of a file. + fn get_mut(&mut self, file_id: FileId) -> &mut Option> { + &mut self.file_contents[file_id.0 as usize] + } +} + +#[cfg(test)] +mod tests { + use std::convert::TryInto; + use std::path::PathBuf; + + use crate::{AbsPathBuf, ChangeKind, ChangedFile, VirtualFileSystem}; + + #[test] + fn vfs() { + let mut vfs = VirtualFileSystem::default(); + assert!(!vfs.has_changes()); + + // Construct a fake file name + let abs_manifest_dir: AbsPathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .try_into() + .unwrap(); + let test_path = abs_manifest_dir.as_path().join("test"); + + // We should not have a FileId for this file yet + assert!(vfs.file_id(&test_path).is_none()); + + // Store some data in the vfs, this should definitly trigger a change + assert!(vfs.set_file_contents(&test_path, Some(vec![])), true); + assert!(vfs.has_changes()); + + // We should now have a FileId + let file_id = vfs + .file_id(&test_path) + .expect("there should be a FileId by now"); + + // Lookup the path, it should match + assert_eq!(&test_path, vfs.file_path(file_id)); + + // Get the contents of the file + assert!(vfs.file_contents(file_id).is_some()); + + // Modify the file contents, but dont actually modify it, should not trigger a change + assert_eq!(vfs.set_file_contents(&test_path, Some(vec![])), false); + + // Actually modify the contents + assert!(vfs.set_file_contents(&test_path, Some(vec![0])), true); + + // Remove the file contents, should also trigger a change + assert!(vfs.set_file_contents(&test_path, None), true); + + // We should now no longer have a file id because the contents was removed + assert_eq!(vfs.file_id(&test_path), None); + + // Get the changes + assert!(vfs.has_changes()); + assert_eq!( + vfs.take_changes(), + vec![ + ChangedFile { + file_id, + kind: ChangeKind::Create + }, + ChangedFile { + file_id, + kind: ChangeKind::Modify + }, + ChangedFile { + file_id, + kind: ChangeKind::Delete + }, + ] + ); + } + + #[test] + fn iter() { + let mut vfs = VirtualFileSystem::default(); + + // Construct a fake file name + let abs_manifest_dir: AbsPathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .try_into() + .unwrap(); + + // Add two files to the system + let test_path2 = abs_manifest_dir.as_path().join("test2"); + let test_path = abs_manifest_dir.as_path().join("test"); + assert!(vfs.set_file_contents(&test_path, Some(vec![0]))); + assert!(vfs.set_file_contents(&test_path2, Some(vec![1]))); + let file_id = vfs.file_id(&test_path).unwrap(); + let file_id2 = vfs.file_id(&test_path2).unwrap(); + assert_ne!(file_id, file_id2); + + let mut entries = vfs + .iter() + .map(|(id, entry)| (id, entry.to_path_buf())) + .collect::>(); + let mut expected_entries = + vec![(file_id, test_path.clone()), (file_id2, test_path2.clone())]; + + entries.sort_by_key(|entry| entry.0); + expected_entries.sort_by_key(|entry| entry.0); + + assert_eq!(entries, expected_entries); + } +} diff --git a/crates/mun_vfs/src/monitor.rs b/crates/mun_vfs/src/monitor.rs new file mode 100644 index 000000000..040c9f190 --- /dev/null +++ b/crates/mun_vfs/src/monitor.rs @@ -0,0 +1,211 @@ +///! A monitor is a trait that reads and monitors files in a given set of directories. Changes are +///! read to memory and communicated. +mod notify_monitor; + +pub use notify_monitor::NotifyMonitor; + +use crate::{AbsPath, AbsPathBuf}; + +/// Describes something to be monitored by a `Monitor`. +#[derive(Debug, Clone)] +pub enum MonitorEntry { + /// A set of files + Files(Vec), + + /// A dynamic set of files and directories + Directories(MonitorDirectories), +} + +/// Describes a set of files to monitor. A file is included if: +/// * it has included `extension` +/// * it is under an `include` path +/// * it is not under an `exclude` path +/// +/// If many include/exclude paths match, the longest one wins. +#[derive(Debug, Clone, PartialEq, Eq)] +pub struct MonitorDirectories { + /// File extensions to monitor (e.g. "mun") + pub extensions: Vec, + + /// The directories or files to monitor + pub include: Vec, + + /// Paths to ignore + pub exclude: Vec, +} + +/// Describes the configuration of the monitor. This can be updated with the `set_config` method on +/// a [`Monitor`] +#[derive(Debug, Clone)] +pub struct MonitorConfig { + /// The set of entries to load + pub load: Vec, + + /// Indicates which entries in `load` should also continuously be monitored. + pub watch: Vec, +} + +/// A message that might be communicated from a [`Monitor`] +#[derive(Debug)] +pub enum MonitorMessage { + /// A message that indicates the progress status of the monitor + Progress { total: usize, done: usize }, + + /// A message that indicates files has been loaded or modified. If the contents of a file is + /// `None` it has been removed. + Loaded { + files: Vec<(AbsPathBuf, Option>)>, + }, +} + +pub type Sender = Box; + +/// A trait to monitor a set of directories and files +/// TODO: In the future it would be nice to do this with a Future (no pun intended). +pub trait Monitor { + /// Instantiates a new instance of `Self` + fn new(sender: Sender) -> Self + where + Self: Sized; + + /// Updates the configuration of things to monitor. + fn set_config(&mut self, config: MonitorConfig); + + /// Reload the content of the specified file. This will trigger a new `Loaded` message to be + /// send. + fn reload(&mut self, path: &AbsPath); +} + +impl MonitorDirectories { + /// Returns true if, according to this instance, the file at the given `path` is contained in + /// this set. + pub fn contains_file(&self, path: impl AsRef) -> bool { + let ext = path.as_ref().extension().unwrap_or_default(); + if !self + .extensions + .iter() + .any(|include_ext| include_ext.as_str() == ext) + { + false + } else { + self.includes_path(path) + } + } + + /// Returns true if, according to this instance, the directory at the given `path` is contained + /// in this set. + pub fn contains_dir(&self, path: impl AsRef) -> bool { + self.includes_path(path) + } + + /// Returns true if the given path is considered part of this set. + fn includes_path(&self, path: impl AsRef) -> bool { + let path = path.as_ref(); + + // Find the include path with the longest path that includes the specified path + let mut include: Option<&AbsPathBuf> = None; + for incl in &self.include { + if path.starts_with(incl) { + include = Some(match include { + Some(prev) if prev.starts_with(incl) => prev, + _ => incl, + }) + } + } + + // If there is no include path, we're done quickly + let include = match include { + Some(incl) => incl, + None => return false, + }; + + // Filter based on exclude paths + for excl in &self.exclude { + if path.starts_with(excl) && excl.starts_with(include) { + return false; + } + } + + true + } +} + +impl MonitorEntry { + /// Returns true if, according to this instance, the file at the given `path` is contained in + /// this entry. + pub fn contains_file(&self, path: impl AsRef) -> bool { + match self { + MonitorEntry::Files(files) => { + let path = path.as_ref(); + files.iter().any(|entry| entry == path) + } + MonitorEntry::Directories(dirs) => dirs.contains_file(path), + } + } + + /// Returns true if, according to this instance, the directory at the given `path` is contained + /// in this set. + pub fn contains_dir(&self, path: impl AsRef) -> bool { + match self { + MonitorEntry::Files(_) => false, + MonitorEntry::Directories(dirs) => dirs.contains_dir(path), + } + } +} + +#[cfg(test)] +mod tests { + use super::{AbsPathBuf, Monitor, MonitorDirectories}; + use std::convert::TryInto; + use std::path::PathBuf; + + #[test] + fn monitor_is_object_safe() { + fn _assert(_: &dyn Monitor) {} + } + + #[test] + fn test_config() { + let abs_manifest_dir: AbsPathBuf = PathBuf::from(env!("CARGO_MANIFEST_DIR")) + .try_into() + .unwrap(); + + let config = MonitorDirectories { + extensions: vec!["mun".to_owned()], + include: vec![ + abs_manifest_dir.join("src"), + abs_manifest_dir.join("src/.git/special_case"), + ], + exclude: vec![ + abs_manifest_dir.join(".git"), + abs_manifest_dir.join("src/.git"), + ], + }; + + assert_eq!( + config.contains_file(abs_manifest_dir.join("mod.mun")), + false + ); + assert_eq!( + config.contains_file(abs_manifest_dir.join("src/mod.mun")), + true + ); + assert_eq!( + config.contains_file(abs_manifest_dir.join("src/mod.rs")), + false + ); + assert_eq!( + config.contains_file(abs_manifest_dir.join(".git/src/mod.mun")), + false + ); + assert_eq!( + config.contains_file(abs_manifest_dir.join("src/.git/mod.mun")), + false + ); + assert_eq!( + config.contains_file(abs_manifest_dir.join("src/.git/special_case/mod.mun")), + true + ); + assert_eq!(config.contains_dir(abs_manifest_dir.join("src")), true); + } +} diff --git a/crates/mun_vfs/src/monitor/notify_monitor.rs b/crates/mun_vfs/src/monitor/notify_monitor.rs new file mode 100644 index 000000000..d1e0366f0 --- /dev/null +++ b/crates/mun_vfs/src/monitor/notify_monitor.rs @@ -0,0 +1,289 @@ +use super::{Monitor, MonitorConfig, MonitorDirectories, MonitorEntry, MonitorMessage}; +use crate::{AbsPath, AbsPathBuf}; +use crossbeam_channel::{never, select, unbounded, Receiver, Sender}; +use notify::{RecursiveMode, Watcher}; +use std::{convert::TryFrom, thread}; +use walkdir::WalkDir; + +/// A message that can be sent from the "foreground" to the background thread. +#[derive(Debug)] +enum ForegroundMessage { + /// Notifies the background tasks that the configuration has changed + ConfigChanged(MonitorConfig), + + /// Notifies the background tasks that the specified path should be reloaded + Reload(AbsPathBuf), +} + +#[derive(Debug)] +pub struct NotifyMonitor { + sender: Sender, + thread: thread::JoinHandle<()>, +} + +impl Monitor for NotifyMonitor { + fn new(sender: super::Sender) -> Self + where + Self: Sized, + { + let background_thread = NotifyThread::new(sender); + let (sender, receiver) = unbounded::(); + let thread = thread::Builder::new() + .spawn(move || background_thread.run(receiver)) + .expect("failed to spawn notify background thread"); + NotifyMonitor { sender, thread } + } + + fn set_config(&mut self, config: MonitorConfig) { + self.sender + .send(ForegroundMessage::ConfigChanged(config)) + .expect("could not send new configuration to background thread"); + } + + fn reload(&mut self, path: &AbsPath) { + self.sender + .send(ForegroundMessage::Reload(path.to_path_buf())) + .expect("could not send reload message to background thread"); + } +} + +type NotifyEvent = notify::Result; + +/// A struct that manages the notify watchers and processes the changes. +struct NotifyThread { + sender: super::Sender, + watched_entries: Vec, + watcher: Option<(notify::RecommendedWatcher, Receiver)>, +} + +/// A message to be processed by the `NotifyThread`. +enum NotifyThreadEvent { + ForegroundMessage(ForegroundMessage), + NotifyEvent(NotifyEvent), +} + +impl NotifyThread { + /// Constructs a new instance of `Self` + pub fn new(sender: super::Sender) -> Self { + NotifyThread { + sender, + watched_entries: Vec::new(), + watcher: None, + } + } + + /// Returns the next event to process. + fn next_event(&self, receiver: &Receiver) -> Option { + let watcher_receiver = self.watcher.as_ref().map(|(_, receiver)| receiver); + select! { + recv(receiver) -> it => it.ok().map(NotifyThreadEvent::ForegroundMessage), + recv(watcher_receiver.unwrap_or(&never())) -> it => Some(NotifyThreadEvent::NotifyEvent(it.unwrap())), + } + } + + /// Runs the background thread until there are no more messages to receive + pub fn run(mut self, receiver: Receiver) { + while let Some(event) = self.next_event(&receiver) { + match event { + NotifyThreadEvent::ForegroundMessage(message) => match message { + ForegroundMessage::ConfigChanged(config) => self.set_config(config), + ForegroundMessage::Reload(path) => { + let contents = read(&path); + let files = vec![(path, contents)]; + self.send(MonitorMessage::Loaded { files }); + } + }, + NotifyThreadEvent::NotifyEvent(event) => { + if let Some(event) = log_notify_error(event) { + let files = event + .paths + .into_iter() + .map(|path| { + AbsPathBuf::try_from(path) + .expect("could not convert notify event path to absolute path") + }) + .filter_map(|path| { + if path.is_dir() + && self + .watched_entries + .iter() + .any(|entry| entry.contains_dir(&path)) + { + self.watch(path); + None + } else if !path.is_file() + || !self + .watched_entries + .iter() + .any(|entry| entry.contains_file(&path)) + { + None + } else { + let contents = read(&path); + Some((path, contents)) + } + }) + .collect::>(); + if !files.is_empty() { + self.send(MonitorMessage::Loaded { files }); + } + } + } + } + } + } + + /// Updates the configuration to `config` + fn set_config(&mut self, config: MonitorConfig) { + // Reset the previous watcher and possibly construct a new one + self.watcher = None; + if !config.watch.is_empty() { + let (watcher_sender, watcher_receiver) = unbounded(); + let watcher = log_notify_error(Watcher::new_immediate(move |event| { + watcher_sender + .send(event) + .expect("unable to send notify event over channel") + })); + self.watcher = watcher.map(|it| (it, watcher_receiver)); + } + + // Update progress + let total_entries = config.load.len(); + self.send(MonitorMessage::Progress { + total: total_entries, + done: 0, + }); + + // Update the current set of entries + self.watched_entries.clear(); + for (i, entry) in config.load.into_iter().enumerate() { + let watch = config.watch.contains(&i); + if watch { + self.watched_entries.push(entry.clone()); + } + + let files = self.load_entry(entry, watch); + self.send(MonitorMessage::Loaded { files }); + self.send(MonitorMessage::Progress { + total: total_entries, + done: i + 1, + }); + } + } + + /// Loads all the files from the given entry and optionally adds to the watched entries + fn load_entry( + &mut self, + entry: MonitorEntry, + watch: bool, + ) -> Vec<(AbsPathBuf, Option>)> { + match entry { + MonitorEntry::Files(files) => self.load_files_entry(files, watch), + MonitorEntry::Directories(dirs) => self.load_directories_entry(dirs, watch), + } + } + + /// Loads all the files and optionally adds to watched entries + fn load_files_entry( + &mut self, + files: Vec, + watch: bool, + ) -> Vec<(AbsPathBuf, Option>)> { + files + .into_iter() + .map(|file| { + if watch { + self.watch(&file); + } + let contents = read(&file); + (file, contents) + }) + .collect() + } + + /// Loads all the files from the specified directories and optionally starts watching them. + fn load_directories_entry( + &mut self, + dirs: MonitorDirectories, + watch: bool, + ) -> Vec<(AbsPathBuf, Option>)> { + let mut result = Vec::new(); + for root in dirs.include.iter() { + let walkdir = WalkDir::new(root) + .follow_links(true) + .into_iter() + .filter_entry(|entry| { + if !entry.file_type().is_dir() { + true + } else { + let path = AbsPath::assert_new(entry.path()); + root == path + || dirs + .exclude + .iter() + .chain(&dirs.include) + .all(|dir| dir != path) + } + }); + + let files = walkdir.filter_map(Result::ok).filter_map(|entry| { + let is_dir = entry.file_type().is_dir(); + let is_file = entry.file_type().is_file(); + let abs_path = AbsPathBuf::try_from(entry.into_path()) + .expect("could not convert walkdir entry to absolute path"); + if is_dir && watch { + self.watch(&abs_path); + } + if !is_file { + None + } else { + let ext = abs_path.extension().unwrap_or_default(); + if dirs.extensions.iter().all(|entry| entry.as_str() != ext) { + None + } else { + Some(abs_path) + } + } + }); + + result.extend(files.map(|file| { + let contents = read(&file); + (file, contents) + })); + } + + result + } + + /// Sends a message to the foreground. + fn send(&mut self, message: MonitorMessage) { + (self.sender)(message); + } + + /// Start watching the file at the specified path + fn watch(&mut self, path: impl AsRef) { + if let Some((watcher, _)) = &mut self.watcher { + log_notify_error(watcher.watch(path.as_ref(), RecursiveMode::NonRecursive)); + } + } +} + +/// A helper function that reads the contents of the specified file and returns it. +fn read(path: impl AsRef) -> Option> { + std::fs::read(path.as_ref()).ok() +} + +/// A helper function to load a warning for a "notify" error. +fn log_notify_error(res: notify::Result) -> Option { + res.map_err(|err| log::warn!("notify error: {}", err)).ok() +} + +#[cfg(test)] +mod tests { + use super::{Monitor, NotifyMonitor}; + + #[test] + fn construct() { + let _monitor = NotifyMonitor::new(Box::new(|_| {})); + } +} diff --git a/crates/mun_vfs/src/path_interner.rs b/crates/mun_vfs/src/path_interner.rs new file mode 100644 index 000000000..788778170 --- /dev/null +++ b/crates/mun_vfs/src/path_interner.rs @@ -0,0 +1,67 @@ +use crate::{AbsPath, AbsPathBuf, FileId}; +use rustc_hash::FxHashMap; + +/// A struct to map file paths to `FileId`s. `FileId`s are never cleared because we assume there +/// never be too many. +#[derive(Default)] +pub(crate) struct PathInterner { + path_to_id: FxHashMap, + id_to_path: Vec, +} + +impl PathInterner { + /// Returns the `FileId` for the specified `path` or `None` if the specified path was not + /// interned. + pub fn get(&self, path: &AbsPath) -> Option { + self.path_to_id.get(path).copied() + } + + /// Interns the specified `path`, returning a unique `FileId` for the path. + pub fn intern(&mut self, path: &AbsPath) -> FileId { + if let Some(id) = self.get(path) { + id + } else { + let id = FileId(self.id_to_path.len() as u32); + self.path_to_id.insert(path.to_path_buf(), id); + self.id_to_path.push(path.to_path_buf()); + id + } + } + + /// Returns the path for the specified FileId. + pub fn lookup(&self, id: FileId) -> &AbsPath { + &self.id_to_path[id.0 as usize] + } +} + +#[cfg(test)] +mod tests { + use super::PathInterner; + use crate::AbsPathBuf; + use std::convert::TryInto; + use std::path::PathBuf; + + #[test] + fn intern() { + let mut interner = PathInterner::default(); + + let file_path_buf: PathBuf = env!("CARGO_MANIFEST_DIR").into(); + let abs_file: AbsPathBuf = file_path_buf.try_into().unwrap(); + + // Didnt intern yet, should not be able to find file_id + assert_eq!(interner.get(&abs_file), None); + + // Insert the path into the interner + let file_id = interner.intern(&abs_file); + + // We get get the file_id by path now + assert_eq!(interner.get(&abs_file), Some(file_id)); + + // Insert the path again, should return the same path + let file_id2 = interner.intern(&abs_file); + assert_eq!(file_id, file_id2); + + // Check the path from the id + assert_eq!(&abs_file, interner.lookup(file_id)); + } +}