diff --git a/Cargo.lock b/Cargo.lock index 81ce69e9..ee1d5626 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3350,6 +3350,7 @@ dependencies = [ "chrono", "digest", "dirs", + "dunce", "fluent-uri", "futures", "gix", diff --git a/core/Cargo.toml b/core/Cargo.toml index 51e80f34..db781846 100644 --- a/core/Cargo.toml +++ b/core/Cargo.toml @@ -68,6 +68,7 @@ bytes = { version = "1.11.1", default-features = false } toml_edit = { version = "0.25.4", features = ["serde"] } globset = { version = "0.4.18", default-features = false } reqwest = { version = "0.13.2", optional = true, features = ["rustls", "stream"] } +dunce = "1.0.5" [dev-dependencies] assert_cmd = "2.1.2" diff --git a/core/src/commands/sources.rs b/core/src/commands/sources.rs index ac916593..51758de1 100644 --- a/core/src/commands/sources.rs +++ b/core/src/commands/sources.rs @@ -79,14 +79,14 @@ pub fn do_sources_local_src_project_no_deps( project: &LocalSrcProject, include_index: bool, ) -> Result, LocalSourcesError> { - let unix_srcs = do_sources_project_no_deps(project, include_index)?; + let unix_sources = do_sources_project_no_deps(project, include_index)?; - let srcs: Result, _> = unix_srcs + let sources: Result, _> = unix_sources .iter() .map(|path| project.get_source_path(path)) .collect(); - Ok(srcs?) + Ok(sources?) } /// Transitively resolve a list of usages (typically the usages of some project) @@ -136,5 +136,10 @@ pub fn enumerate_projects_lock( Vec<::InterchangeProjectRead>, ResolutionError<::ReadError>, > { - lock.resolve_projects(env) + let projects = lock + .resolve_projects(env)? + .into_iter() + .filter_map(|(_, project_read)| project_read) + .collect(); + Ok(projects) } diff --git a/core/src/context.rs b/core/src/context.rs index c5b6dec7..1f21e192 100644 --- a/core/src/context.rs +++ b/core/src/context.rs @@ -1,12 +1,18 @@ +#[cfg(feature = "filesystem")] +use camino::Utf8PathBuf; + #[cfg(feature = "filesystem")] use crate::{project::local_src::LocalSrcProject, workspace::Workspace}; #[derive(Debug, Default)] pub struct ProjectContext { - /// Root directory of current workspace + /// Current workspace if found #[cfg(feature = "filesystem")] pub current_workspace: Option, - /// Root directory of current project + /// Current project if found #[cfg(feature = "filesystem")] pub current_project: Option, + /// Path to current directory + #[cfg(feature = "filesystem")] + pub current_directory: Utf8PathBuf, } diff --git a/core/src/env/local_directory/metadata.rs b/core/src/env/local_directory/metadata.rs new file mode 100644 index 00000000..08d2c8b4 --- /dev/null +++ b/core/src/env/local_directory/metadata.rs @@ -0,0 +1,335 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use std::{fmt::Display, str::FromStr}; + +use camino::{Utf8Path, Utf8PathBuf}; +use serde::Deserialize; +use thiserror::Error; +use toml_edit::{ArrayOfTables, DocumentMut, Item, Table, Value, value}; +use typed_path::Utf8UnixPathBuf; + +use crate::{ + context::ProjectContext, + env::local_directory::{LocalDirectoryEnvironment, LocalReadError}, + lock::{Lock, ResolutionError, Source, multiline_array}, + project::{ + local_src::{LocalSrcError, LocalSrcProject}, + utils::{FsIoError, ToUnixPathBuf, deserialize_unix_path, wrapfs}, + }, +}; + +pub const METADATA_PREFIX: &str = "# This file is automatically generated by Sysand and is not intended to be edited manually.\n\n"; +pub const CURRENT_METADATA_VERSION: &str = "0.1"; +pub const SUPPORTED_METADATA_VERSIONS: &[&str] = &[CURRENT_METADATA_VERSION]; + +#[derive(Debug, Error)] +pub enum LockToEnvMetadataError { + #[error(transparent)] + ResolutionError(#[from] ResolutionError), + #[error(transparent)] + Canonicalization(#[from] Box), +} + +impl Lock { + pub fn to_env_metadata( + &self, + env: &LocalDirectoryEnvironment, + ctx: &ProjectContext, + ) -> Result { + let resolved_projects = self.resolve_projects(env)?; + + let mut metadata = EnvMetadata::default(); + for (project, storage) in resolved_projects { + let usages = project + .usages + .iter() + .map(|usage| usage.resource.clone()) + .collect(); + + if let Some(storage) = storage { + let project_path = wrapfs::canonicalize(storage.root_path())?; + let env_path = wrapfs::canonicalize(env.root_path())?; + let path = project_path + .strip_prefix(env_path) + .expect("path to project in env does not share a prefix with path to env") + .to_unix_path_buf(); + metadata.projects.push(EnvProject { + publisher: project.publisher, + name: project.name, + version: project.version, + path, + identifiers: project.identifiers, + usages, + editable: false, + workspace: false, + }); + } else if let [Source::Editable { editable }, ..] = project.sources.as_slice() { + let workspace = ctx + .current_workspace + .iter() + .flat_map(|ws| ws.projects().iter()) + .any(|p| p.path.as_str() == editable); + metadata.projects.push(EnvProject { + publisher: project.publisher, + name: project.name, + version: project.version, + path: editable.as_str().into(), + identifiers: project.identifiers, + usages, + editable: true, + workspace, + }); + } + } + + Ok(metadata) + } +} + +#[derive(Debug, Deserialize)] +pub struct EnvMetadata { + pub version: String, + #[serde(rename = "project", default)] + pub projects: Vec, +} + +impl Default for EnvMetadata { + fn default() -> Self { + EnvMetadata { + version: CURRENT_METADATA_VERSION.to_string(), + projects: vec![], + } + } +} + +#[derive(Debug, Error)] +#[error("env metadata version `{0}` is not supported")] +pub struct UnsupportedVersionError(String); + +#[derive(Debug, Error)] +pub enum ParseError { + #[error("failed to parse env metadata file: {0}")] + Toml(#[from] toml::de::Error), + #[error(transparent)] + Unsupported(#[from] UnsupportedVersionError), +} + +impl Display for EnvMetadata { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.to_toml()) + } +} + +impl FromStr for EnvMetadata { + type Err = ParseError; + + fn from_str(s: &str) -> Result { + let metadata: EnvMetadata = toml::from_str(s)?; + + if !SUPPORTED_METADATA_VERSIONS.contains(&metadata.version.as_str()) { + return Err(ParseError::Unsupported(UnsupportedVersionError( + metadata.version.clone(), + ))); + } + + Ok(metadata) + } +} + +#[derive(Debug, Error)] +pub enum AddProjectError { + #[error(transparent)] + ProjectRead(#[from] LocalSrcError), + #[error("missing project info at `{0}`")] + MissingInfo(Utf8PathBuf), +} + +impl EnvMetadata { + pub fn to_toml(&self) -> DocumentMut { + let mut doc = DocumentMut::new(); + doc.decor_mut().set_prefix(METADATA_PREFIX); + doc.insert("version", value(Value::from(&self.version))); + + let mut projects = ArrayOfTables::new(); + for project in &self.projects { + projects.push(project.to_toml()); + } + doc.insert("project", Item::ArrayOfTables(projects)); + + doc + } + + fn find_project(&self, identifiers: &[String], version: &String) -> Option { + for (index, project) in self.projects.iter().enumerate() { + if &project.version == version + && project + .identifiers + .iter() + .any(|iri| identifiers.contains(iri)) + { + return Some(index); + } + } + None + } + + pub fn add_project(&mut self, project: EnvProject) { + if let Some(found) = self.find_project(&project.identifiers, &project.version) { + self.projects[found].merge_identifiers(&project); + } else { + self.projects.push(project); + } + } + + pub fn remove_project, V: AsRef>(&mut self, iri: S, version: Option) { + if let Some(v) = version { + self.projects.retain(|p| { + p.version != v.as_ref() || !p.identifiers.iter().any(|i| i == iri.as_ref()) + }); + } else { + self.projects + .retain(|p| !p.identifiers.iter().any(|i| i == iri.as_ref())); + } + } + + /// Add `LocalSrcProject` to env. Must have `nominal_path` set. + pub fn add_local_project( + &mut self, + identifiers: Vec, + project: LocalSrcProject, + editable: bool, + workspace: bool, + ) -> Result<(), AddProjectError> { + let info = project + .get_info()? + .ok_or(AddProjectError::MissingInfo(project.project_path.clone()))?; + let project = EnvProject { + publisher: info.publisher, + name: Some(info.name), + version: info.version, + path: project + .nominal_path + .expect("expected nominal path for project") + .to_unix_path_buf(), + identifiers, + usages: info.usage.into_iter().map(|u| u.resource).collect(), + editable, + workspace, + }; + self.add_project(project); + + Ok(()) + } + + pub fn merge(&mut self, other: EnvMetadata) { + for project in other.projects { + self.add_project(project) + } + } +} + +/// Metadata describing a project belonging to an environment. +#[derive(Debug, Deserialize)] +pub struct EnvProject { + /// Publisher of the project. Intended for display purposes. + pub publisher: Option, + /// Name of the project. Intended for display purposes. + pub name: Option, + /// Version of the project. + pub version: String, + /// Path to the root directory of the project. + /// If the project is not `editable` this should be relative + /// to the env directory and otherwise it should be relative + /// to the workspace root. + #[serde(deserialize_with = "deserialize_unix_path")] + pub path: Utf8UnixPathBuf, + /// List of identifiers (IRIs) used for the project. + /// The first identifier is considered the canonical + /// identifier, and if the project is not `editable` this + /// is the IRI it is installed as. The rest are considered + /// as aliases. Can only be empty for `editable` projects. + #[serde(default)] + pub identifiers: Vec, + /// Usages of the project. Intended for tools needing to + /// track the interdependence of project in the environment. + #[serde(default)] + pub usages: Vec, + /// Indicator of wether the project is fully installed in + /// the environment or located elsewhere. + #[serde(default)] + pub editable: bool, + /// In case of an `editable` project these are the files + /// belonging to the project. Intended for tools that + /// are not able to natively parse and understand the + /// projects `.meta.json` file. Paths should be relative + /// to the `path` of the project. + #[serde(default)] + pub workspace: bool, +} + +impl EnvProject { + pub fn to_toml(&self) -> Table { + let mut table = Table::new(); + if let Some(publisher) = &self.publisher { + table.insert("publisher", value(publisher)); + } + if let Some(name) = &self.name { + table.insert("name", value(name)); + } + table.insert("version", value(&self.version)); + table.insert("path", value(self.path.as_str())); + if !self.identifiers.is_empty() { + table.insert( + "identifiers", + value(multiline_array(self.identifiers.iter())), + ); + } + if !self.usages.is_empty() { + table.insert("usages", value(multiline_array(self.usages.iter()))); + } + if self.editable { + table.insert("editable", value(true)); + } + if self.workspace { + table.insert("workspace", value(true)); + } + + table + } + + /// Adds identifiers from other project. + /// Should only be done if the underlying projects are the same. + /// In particular they must have the same version. + pub fn merge_identifiers(&mut self, other: &EnvProject) { + assert_eq!( + self.version, other.version, + "attempting to merge identifiers for projects with different versions" + ); + + for iri in &other.identifiers { + if !self.identifiers.contains(iri) { + self.identifiers.push(iri.clone()); + } + } + } +} + +#[derive(Error, Debug)] +pub enum EnvMetadataReadError { + #[error("failed to deserialize TOML file `{0}`: {1}")] + Toml(Box, toml::de::Error), + #[error(transparent)] + Io(#[from] Box), + #[error(transparent)] + Unsupported(UnsupportedVersionError), +} + +pub fn load_env_metadata>(path: P) -> Result { + let result = EnvMetadata::from_str(wrapfs::read_to_string(path.as_ref())?.as_str()); + + result.map_err(|parse_err| match parse_err { + ParseError::Toml(err) => EnvMetadataReadError::Toml(path.as_ref().to_owned().into(), err), + ParseError::Unsupported(err) => EnvMetadataReadError::Unsupported(err), + }) +} diff --git a/core/src/env/local_directory.rs b/core/src/env/local_directory/mod.rs similarity index 55% rename from core/src/env/local_directory.rs rename to core/src/env/local_directory/mod.rs index e20e6d85..36d3d999 100644 --- a/core/src/env/local_directory.rs +++ b/core/src/env/local_directory/mod.rs @@ -1,16 +1,14 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 +use std::io::{self, BufRead, BufReader, Write}; + use camino::{Utf8Path, Utf8PathBuf}; use camino_tempfile::NamedUtf8TempFile; -use sha2::Sha256; -use std::{ - fs, - io::{self, BufRead, BufReader, Read, Write}, -}; +use thiserror::Error; use crate::{ - env::{PutProjectError, ReadEnvironment, WriteEnvironment, segment_uri_generic}, + env::{PutProjectError, ReadEnvironment, WriteEnvironment}, project::{ local_src::{LocalSrcError, LocalSrcProject, PathError}, utils::{ @@ -20,7 +18,13 @@ use crate::{ }, }; -use thiserror::Error; +pub mod metadata; +mod utils; + +use utils::{ + TryMoveError, add_line_temp, path_encode_uri, remove_dir_if_empty, remove_empty_dirs, + singleton_line_temp, try_move_files, try_remove_files, +}; #[derive(Clone, Debug)] pub struct LocalDirectoryEnvironment { @@ -29,265 +33,19 @@ pub struct LocalDirectoryEnvironment { pub const DEFAULT_ENV_NAME: &str = "sysand_env"; +pub const METADATA_PATH: &str = "env.toml"; pub const ENTRIES_PATH: &str = "entries.txt"; pub const VERSIONS_PATH: &str = "versions.txt"; -/// Get a relative path corresponding to the given `uri` -pub fn path_encode_uri>(uri: S) -> Utf8PathBuf { - let mut result = Utf8PathBuf::new(); - for segment in segment_uri_generic::(uri) { - result.push(segment); - } - - result -} - -pub fn remove_dir_if_empty>(path: P) -> Result<(), FsIoError> { - match fs::remove_dir(path.as_ref()) { - Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => Ok(()), - r => r.map_err(|e| FsIoError::RmDir(path.to_path_buf(), e)), - } -} - -pub fn remove_empty_dirs>(path: P) -> Result<(), FsIoError> { - let mut dirs: Vec<_> = walkdir::WalkDir::new(path.as_ref()) - .into_iter() - .filter_map(|e| e.ok()) - .filter_map(|e| { - e.file_type() - .is_dir() - .then(|| Utf8PathBuf::from_path_buf(e.into_path()).ok()) - .flatten() - }) - .collect(); - - dirs.sort_by(|a, b| b.cmp(a)); - - for dir in dirs { - remove_dir_if_empty(&dir)?; - } - - Ok(()) -} - -#[derive(Error, Debug)] -pub enum TryMoveError { - #[error("recovered from failure: {0}")] - RecoveredIO(Box), - #[error( - "failed and may have left the directory in inconsistent state:\n{err}\nwhich was caused by:\n{cause}" - )] - CatastrophicIO { - err: Box, - cause: Box, - }, -} - -fn try_remove_files, I: Iterator>( - paths: I, -) -> Result<(), TryMoveError> { - let tempdir = camino_tempfile::tempdir() - .map_err(|e| TryMoveError::RecoveredIO(FsIoError::CreateTempFile(e).into()))?; - let mut moved: Vec = vec![]; - - for (i, path) in paths.enumerate() { - match move_fs_item(&path, tempdir.path().join(i.to_string())) { - Ok(_) => { - moved.push(path.to_path_buf()); - } - Err(cause) => { - // NOTE: This dance is to bypass the fact that std::io::error is not Clone-eable... - let mut catastrophic_error = None; - for (j, recover) in moved.iter().enumerate() { - if let Err(err) = move_fs_item(tempdir.path().join(j.to_string()), recover) { - catastrophic_error = Some(err); - break; - } - } - - if let Some(err) = catastrophic_error { - return Err(TryMoveError::CatastrophicIO { err, cause }); - } else { - return Err(TryMoveError::RecoveredIO(cause)); - } - } - } - } - - Ok(()) -} - -// Recursively copy a directory from `src` to `dst`. -// Assumes that all parents of `dst` exist. -fn copy_dir_recursive, Q: AsRef>( - src: P, - dst: Q, -) -> Result<(), Box> { - wrapfs::create_dir(&dst)?; - - for entry_result in wrapfs::read_dir(&src)? { - let entry = entry_result.map_err(|e| FsIoError::ReadDir(src.to_path_buf(), e))?; - let file_type = entry - .file_type() - .map_err(|e| FsIoError::ReadDir(src.to_path_buf(), e))?; - let src_path = entry.path(); - let dst_path = dst.as_ref().join(entry.file_name()); - - if file_type.is_dir() { - copy_dir_recursive(src_path, dst_path)?; - } else { - wrapfs::copy(src_path, dst_path)?; - } - } - - Ok(()) -} - -// Rename/move a file or directory from `src` to `dst`. -fn move_fs_item, Q: AsRef>( - src: P, - dst: Q, -) -> Result<(), Box> { - match fs::rename(src.as_ref(), dst.as_ref()) { - Ok(_) => Ok(()), - Err(e) if e.kind() == io::ErrorKind::CrossesDevices => { - let metadata = wrapfs::metadata(&src)?; - if metadata.is_dir() { - copy_dir_recursive(&src, &dst)?; - wrapfs::remove_dir_all(&src)?; - } else { - wrapfs::copy(&src, &dst)?; - wrapfs::remove_file(&src)?; - } - Ok(()) - } - Err(e) => Err(FsIoError::Move(src.to_path_buf(), dst.to_path_buf(), e))?, - } -} - -fn try_move_files(paths: &Vec<(&Utf8Path, &Utf8Path)>) -> Result<(), TryMoveError> { - let tempdir = camino_tempfile::tempdir() - .map_err(|e| TryMoveError::RecoveredIO(FsIoError::CreateTempFile(e).into()))?; - - let mut last_err = None; - - // move source files out of the way - for (i, (path, _)) in paths.iter().enumerate() { - let src_path = tempdir.path().join(format!("src_{}", i)); - if let Err(e) = move_fs_item(path, src_path) { - last_err = Some(e); - break; - } - } - - // Recover moved files in case of failure - if let Some(cause) = last_err { - for (i, (path, _)) in paths.iter().enumerate() { - let src_path = tempdir.path().join(format!("src_{}", i)); - - if src_path.exists() - && let Err(err) = move_fs_item(src_path, path) - { - return Err(TryMoveError::CatastrophicIO { err, cause }); - } - } - - return Err(TryMoveError::RecoveredIO(cause)); - } - - let mut last_err = None; - - // Move target files out of the way - for (i, (_, path)) in paths.iter().enumerate() { - if path.exists() { - let trg_path = tempdir.path().join(format!("trg_{}", i)); - if let Err(e) = move_fs_item(path, trg_path) { - last_err = Some(e); - break; - } - } - } - - // Recover moved files in case of failure - if let Some(cause) = last_err { - for (i, (_, path)) in paths.iter().enumerate() { - let trg_path = tempdir.path().join(format!("trg_{}", i)); - - if trg_path.exists() - && let Err(err) = move_fs_item(trg_path, path) - { - return Err(TryMoveError::CatastrophicIO { err, cause }); - } - } - - for (i, (path, _)) in paths.iter().enumerate() { - let src_path = tempdir.path().join(format!("src_{}", i)); - - if src_path.exists() - && let Err(err) = move_fs_item(src_path, path) - { - return Err(TryMoveError::CatastrophicIO { err, cause }); - } - } - - return Err(TryMoveError::RecoveredIO(cause)); - } - - let mut last_err = None; - - // Try moving files to destination - for (i, (_, target)) in paths.iter().enumerate() { - let src_path = tempdir.path().join(format!("src_{}", i)); - - if let Err(e) = move_fs_item(src_path, target) { - last_err = Some(e); - break; - } - } - - // Recover moved files in case of failure - if let Some(cause) = last_err { - for (i, (_, path)) in paths.iter().enumerate() { - let src_path = tempdir.path().join(format!("src_{}", i)); - - if path.exists() - && let Err(err) = move_fs_item(path, src_path) - { - return Err(TryMoveError::CatastrophicIO { err, cause }); - } - } - - for (i, (_, path)) in paths.iter().enumerate() { - let trg_path = tempdir.path().join(format!("trg_{}", i)); - - if trg_path.exists() - && let Err(err) = move_fs_item(trg_path, path) - { - return Err(TryMoveError::CatastrophicIO { err, cause }); - } - } - - for (i, (path, _)) in paths.iter().enumerate() { - let src_path = tempdir.path().join(format!("src_{}", i)); - - if src_path.exists() - && let Err(err) = move_fs_item(src_path, path) - { - return Err(TryMoveError::CatastrophicIO { err, cause }); - } - } - - return Err(TryMoveError::RecoveredIO(cause)); - } - - Ok(()) -} - impl LocalDirectoryEnvironment { pub fn root_path(&self) -> &Utf8Path { &self.environment_path } + pub fn metadata_path(&self) -> Utf8PathBuf { + self.environment_path.join(METADATA_PATH) + } + pub fn entries_path(&self) -> Utf8PathBuf { self.environment_path.join(ENTRIES_PATH) } @@ -447,47 +205,6 @@ impl From for LocalWriteError { } } -fn add_line_temp>( - reader: R, - line: S, -) -> Result { - let mut temp_file = NamedUtf8TempFile::new().map_err(FsIoError::CreateTempFile)?; - - let mut line_added = false; - for this_line in BufReader::new(reader).lines() { - let this_line = this_line.map_err(|e| FsIoError::ReadFile(temp_file.to_path_buf(), e))?; - - if !line_added && line.as_ref() < this_line.as_str() { - writeln!(temp_file, "{}", line.as_ref()) - .map_err(|e| FsIoError::WriteFile(temp_file.path().into(), e))?; - line_added = true; - } - - writeln!(temp_file, "{}", this_line) - .map_err(|e| FsIoError::WriteFile(temp_file.path().into(), e))?; - - if line.as_ref() == this_line { - line_added = true; - } - } - - if !line_added { - writeln!(temp_file, "{}", line.as_ref()) - .map_err(|e| FsIoError::WriteFile(temp_file.path().into(), e))?; - } - - Ok(temp_file) -} - -fn singleton_line_temp>(line: S) -> Result { - let mut temp_file = NamedUtf8TempFile::new().map_err(FsIoError::CreateTempFile)?; - - writeln!(temp_file, "{}", line.as_ref()) - .map_err(|e| FsIoError::WriteFile(temp_file.path().into(), e))?; - - Ok(temp_file) -} - impl WriteEnvironment for LocalDirectoryEnvironment { type WriteError = LocalWriteError; diff --git a/core/src/env/local_directory/utils.rs b/core/src/env/local_directory/utils.rs new file mode 100644 index 00000000..e984a9cb --- /dev/null +++ b/core/src/env/local_directory/utils.rs @@ -0,0 +1,309 @@ +// SPDX-FileCopyrightText: © 2025 Sysand contributors +// SPDX-License-Identifier: MIT OR Apache-2.0 + +use std::{ + fs, + io::{self, BufRead, BufReader, Read, Write}, +}; + +use camino::{Utf8Path, Utf8PathBuf}; +use camino_tempfile::NamedUtf8TempFile; +use sha2::Sha256; +use thiserror::Error; + +use crate::{ + env::{local_directory::LocalWriteError, segment_uri_generic}, + project::utils::{FsIoError, ToPathBuf, wrapfs}, +}; + +/// Get a relative path corresponding to the given `uri` +pub fn path_encode_uri>(uri: S) -> Utf8PathBuf { + let mut result = Utf8PathBuf::new(); + for segment in segment_uri_generic::(uri) { + result.push(segment); + } + + result +} + +pub fn remove_dir_if_empty>(path: P) -> Result<(), FsIoError> { + match fs::remove_dir(path.as_ref()) { + Err(err) if err.kind() == io::ErrorKind::DirectoryNotEmpty => Ok(()), + r => r.map_err(|e| FsIoError::RmDir(path.to_path_buf(), e)), + } +} + +pub fn remove_empty_dirs>(path: P) -> Result<(), FsIoError> { + let mut dirs: Vec<_> = walkdir::WalkDir::new(path.as_ref()) + .into_iter() + .filter_map(|e| e.ok()) + .filter_map(|e| { + e.file_type() + .is_dir() + .then(|| Utf8PathBuf::from_path_buf(e.into_path()).ok()) + .flatten() + }) + .collect(); + + dirs.sort_by(|a, b| b.cmp(a)); + + for dir in dirs { + remove_dir_if_empty(&dir)?; + } + + Ok(()) +} + +#[derive(Error, Debug)] +pub enum TryMoveError { + #[error("recovered from failure: {0}")] + RecoveredIO(Box), + #[error( + "failed and may have left the directory in inconsistent state:\n{err}\nwhich was caused by:\n{cause}" + )] + CatastrophicIO { + err: Box, + cause: Box, + }, +} + +pub fn try_remove_files, I: Iterator>( + paths: I, +) -> Result<(), TryMoveError> { + let tempdir = camino_tempfile::tempdir() + .map_err(|e| TryMoveError::RecoveredIO(FsIoError::CreateTempFile(e).into()))?; + let mut moved: Vec = vec![]; + + for (i, path) in paths.enumerate() { + match move_fs_item(&path, tempdir.path().join(i.to_string())) { + Ok(_) => { + moved.push(path.to_path_buf()); + } + Err(cause) => { + // NOTE: This dance is to bypass the fact that std::io::error is not cloneable... + let mut catastrophic_error = None; + for (j, recover) in moved.iter().enumerate() { + if let Err(err) = move_fs_item(tempdir.path().join(j.to_string()), recover) { + catastrophic_error = Some(err); + break; + } + } + + if let Some(err) = catastrophic_error { + return Err(TryMoveError::CatastrophicIO { err, cause }); + } else { + return Err(TryMoveError::RecoveredIO(cause)); + } + } + } + } + + Ok(()) +} + +// Recursively copy a directory from `src` to `dst`. +// Assumes that all parents of `dst` exist. +fn copy_dir_recursive, Q: AsRef>( + src: P, + dst: Q, +) -> Result<(), Box> { + wrapfs::create_dir(&dst)?; + + for entry_result in wrapfs::read_dir(&src)? { + let entry = entry_result.map_err(|e| FsIoError::ReadDir(src.to_path_buf(), e))?; + let file_type = entry + .file_type() + .map_err(|e| FsIoError::ReadDir(src.to_path_buf(), e))?; + let src_path = entry.path(); + let dst_path = dst.as_ref().join(entry.file_name()); + + if file_type.is_dir() { + copy_dir_recursive(src_path, dst_path)?; + } else { + wrapfs::copy(src_path, dst_path)?; + } + } + + Ok(()) +} + +// Rename/move a file or directory from `src` to `dst`. +fn move_fs_item, Q: AsRef>( + src: P, + dst: Q, +) -> Result<(), Box> { + match fs::rename(src.as_ref(), dst.as_ref()) { + Ok(_) => Ok(()), + Err(e) if e.kind() == io::ErrorKind::CrossesDevices => { + let metadata = wrapfs::metadata(&src)?; + if metadata.is_dir() { + copy_dir_recursive(&src, &dst)?; + wrapfs::remove_dir_all(&src)?; + } else { + wrapfs::copy(&src, &dst)?; + wrapfs::remove_file(&src)?; + } + Ok(()) + } + Err(e) => Err(FsIoError::Move(src.to_path_buf(), dst.to_path_buf(), e))?, + } +} + +pub fn try_move_files(paths: &Vec<(&Utf8Path, &Utf8Path)>) -> Result<(), TryMoveError> { + let tempdir = camino_tempfile::tempdir() + .map_err(|e| TryMoveError::RecoveredIO(FsIoError::CreateTempFile(e).into()))?; + + let mut last_err = None; + + // move source files out of the way + for (i, (path, _)) in paths.iter().enumerate() { + let src_path = tempdir.path().join(format!("src_{}", i)); + if let Err(e) = move_fs_item(path, src_path) { + last_err = Some(e); + break; + } + } + + // Recover moved files in case of failure + if let Some(cause) = last_err { + for (i, (path, _)) in paths.iter().enumerate() { + let src_path = tempdir.path().join(format!("src_{}", i)); + + if src_path.exists() + && let Err(err) = move_fs_item(src_path, path) + { + return Err(TryMoveError::CatastrophicIO { err, cause }); + } + } + + return Err(TryMoveError::RecoveredIO(cause)); + } + + let mut last_err = None; + + // Move target files out of the way + for (i, (_, path)) in paths.iter().enumerate() { + if path.exists() { + let trg_path = tempdir.path().join(format!("trg_{}", i)); + if let Err(e) = move_fs_item(path, trg_path) { + last_err = Some(e); + break; + } + } + } + + // Recover moved files in case of failure + if let Some(cause) = last_err { + for (i, (_, path)) in paths.iter().enumerate() { + let trg_path = tempdir.path().join(format!("trg_{}", i)); + + if trg_path.exists() + && let Err(err) = move_fs_item(trg_path, path) + { + return Err(TryMoveError::CatastrophicIO { err, cause }); + } + } + + for (i, (path, _)) in paths.iter().enumerate() { + let src_path = tempdir.path().join(format!("src_{}", i)); + + if src_path.exists() + && let Err(err) = move_fs_item(src_path, path) + { + return Err(TryMoveError::CatastrophicIO { err, cause }); + } + } + + return Err(TryMoveError::RecoveredIO(cause)); + } + + let mut last_err = None; + + // Try moving files to destination + for (i, (_, target)) in paths.iter().enumerate() { + let src_path = tempdir.path().join(format!("src_{}", i)); + + if let Err(e) = move_fs_item(src_path, target) { + last_err = Some(e); + break; + } + } + + // Recover moved files in case of failure + if let Some(cause) = last_err { + for (i, (_, path)) in paths.iter().enumerate() { + let src_path = tempdir.path().join(format!("src_{}", i)); + + if path.exists() + && let Err(err) = move_fs_item(path, src_path) + { + return Err(TryMoveError::CatastrophicIO { err, cause }); + } + } + + for (i, (_, path)) in paths.iter().enumerate() { + let trg_path = tempdir.path().join(format!("trg_{}", i)); + + if trg_path.exists() + && let Err(err) = move_fs_item(trg_path, path) + { + return Err(TryMoveError::CatastrophicIO { err, cause }); + } + } + + for (i, (path, _)) in paths.iter().enumerate() { + let src_path = tempdir.path().join(format!("src_{}", i)); + + if src_path.exists() + && let Err(err) = move_fs_item(src_path, path) + { + return Err(TryMoveError::CatastrophicIO { err, cause }); + } + } + + return Err(TryMoveError::RecoveredIO(cause)); + } + + Ok(()) +} + +pub fn add_line_temp>( + reader: R, + line: S, +) -> Result { + let mut temp_file = NamedUtf8TempFile::new().map_err(FsIoError::CreateTempFile)?; + + let mut line_added = false; + for this_line in BufReader::new(reader).lines() { + let this_line = this_line.map_err(|e| FsIoError::ReadFile(temp_file.to_path_buf(), e))?; + + if !line_added && line.as_ref() < this_line.as_str() { + writeln!(temp_file, "{}", line.as_ref()) + .map_err(|e| FsIoError::WriteFile(temp_file.path().into(), e))?; + line_added = true; + } + + writeln!(temp_file, "{}", this_line) + .map_err(|e| FsIoError::WriteFile(temp_file.path().into(), e))?; + + if line.as_ref() == this_line { + line_added = true; + } + } + + if !line_added { + writeln!(temp_file, "{}", line.as_ref()) + .map_err(|e| FsIoError::WriteFile(temp_file.path().into(), e))?; + } + + Ok(temp_file) +} + +pub fn singleton_line_temp>(line: S) -> Result { + let mut temp_file = NamedUtf8TempFile::new().map_err(FsIoError::CreateTempFile)?; + + writeln!(temp_file, "{}", line.as_ref()) + .map_err(|e| FsIoError::WriteFile(temp_file.path().into(), e))?; + + Ok(temp_file) +} diff --git a/core/src/lock.rs b/core/src/lock.rs index 3bd95a49..405b14e3 100644 --- a/core/src/lock.rs +++ b/core/src/lock.rs @@ -18,7 +18,13 @@ use toml_edit::{ }; use typed_path::Utf8UnixPathBuf; -use crate::{env::ReadEnvironment, project::ProjectRead}; +use crate::{ + env::ReadEnvironment, + project::{ + ProjectRead, + utils::{deserialize_unix_path, serialize_unix_path}, + }, +}; pub const LOCKFILE_PREFIX: &str = "# This file is automatically generated by Sysand and is not intended to be edited manually.\n\n"; pub const CURRENT_LOCK_VERSION: &str = "0.3"; @@ -182,18 +188,33 @@ pub enum ValidationError { }, } +pub type ProjectResolution = ( + Project, + Option<::InterchangeProjectRead>, +); + impl Lock { pub fn resolve_projects( &self, env: &Env, - ) -> Result< - Vec<::InterchangeProjectRead>, - ResolutionError, - > { + ) -> Result>, ResolutionError> { let mut missing = vec![]; let mut found = vec![]; for project in &self.projects { + // Projects without sources (default for standard libraries) and + // projects with editable sources won't be installed in environment. + match project.sources.as_slice() { + [] => { + continue; + } + [Source::Editable { editable: _ }, ..] => { + found.push((project.clone(), None)); + continue; + } + _ => {} + } + let checksum = &project.checksum; let mut resolved_project = None; @@ -212,8 +233,8 @@ impl Lock { } } - if let Some(success) = resolved_project { - found.push(success); + if resolved_project.is_some() { + found.push((project.clone(), resolved_project)); } else { missing.push(project.clone()); } @@ -482,21 +503,21 @@ pub enum Source { // Path must be a Unix path relative to workspace root Editable { #[serde( - deserialize_with = "parse_unix_path", + deserialize_with = "deserialize_unix_path", serialize_with = "serialize_unix_path" )] editable: Utf8UnixPathBuf, }, LocalSrc { #[serde( - deserialize_with = "parse_unix_path", + deserialize_with = "deserialize_unix_path", serialize_with = "serialize_unix_path" )] src_path: Utf8UnixPathBuf, }, LocalKpar { #[serde( - deserialize_with = "parse_unix_path", + deserialize_with = "deserialize_unix_path", serialize_with = "serialize_unix_path" )] kpar_path: Utf8UnixPathBuf, @@ -519,22 +540,6 @@ pub enum Source { }, } -fn serialize_unix_path(x: &Utf8UnixPathBuf, s: S) -> Result -where - S: serde::Serializer, -{ - s.serialize_str(x.as_str()) -} - -fn parse_unix_path<'de, D>(deserializer: D) -> Result -where - D: serde::Deserializer<'de>, -{ - let s = String::deserialize(deserializer)?; - // TODO: check that it is actually what we want - Ok(Utf8UnixPathBuf::from(s)) -} - impl Source { pub fn to_toml(&self) -> InlineTable { let mut table = InlineTable::new(); diff --git a/core/src/project/local_src.rs b/core/src/project/local_src.rs index b7a89fa7..313933a8 100644 --- a/core/src/project/local_src.rs +++ b/core/src/project/local_src.rs @@ -20,7 +20,7 @@ use crate::{ model::{InterchangeProjectInfoRaw, InterchangeProjectMetadataRaw}, project::{ ProjectMut, ProjectRead, - utils::{RelativizePathError, ToPathBuf, relativize_path, wrapfs}, + utils::{RelativizePathError, ToPathBuf, ToUnixPathBuf, relativize_path, wrapfs}, }, }; @@ -119,17 +119,7 @@ impl LocalSrcProject { let path = relativize_path_in(&path, project_path) .ok_or_else(|| UnixPathError::PathOutsideProject(path.to_path_buf()))?; - let mut unix_path = Utf8UnixPathBuf::new(); - for component in path.components() { - unix_path.push( - component - .as_os_str() - .to_str() - .ok_or_else(|| UnixPathError::Conversion(path.to_owned()))?, - ); - } - - Ok(unix_path) + Ok(path.to_unix_path_buf()) } pub fn get_source_path>( @@ -358,8 +348,6 @@ pub enum UnixPathError { PathOutsideProject(Utf8PathBuf), #[error("failed to canonicalize\n `{0}`:\n {1}")] Canonicalize(Utf8PathBuf, std::io::Error), - #[error("path `{0}` is not valid Unicode")] - Conversion(Utf8PathBuf), } #[derive(Error, Debug)] diff --git a/core/src/project/utils.rs b/core/src/project/utils.rs index 5ea010cd..b74bc0f8 100644 --- a/core/src/project/utils.rs +++ b/core/src/project/utils.rs @@ -1,13 +1,15 @@ // SPDX-FileCopyrightText: © 2025 Sysand contributors // SPDX-License-Identifier: MIT OR Apache-2.0 +use std::io::{self, Read}; + use camino::{Utf8Component, Utf8Path, Utf8PathBuf}; +use serde::Deserialize; use thiserror::Error; +use typed_path::Utf8UnixPathBuf; #[cfg(feature = "filesystem")] use zip::{self, result::ZipError}; -use std::io::{self, Read}; - /// A file that is guaranteed to exist as long as the lifetime. /// Intended to be used with temporary files that are automatically /// deleted; in this case, the lifetime `'a` is the lifetime of the @@ -45,6 +47,38 @@ where } } +pub trait ToUnixPathBuf { + fn to_unix_path_buf(&self) -> Utf8UnixPathBuf; +} + +impl

ToUnixPathBuf for P +where + P: AsRef, +{ + fn to_unix_path_buf(&self) -> Utf8UnixPathBuf { + let mut unix_path = Utf8UnixPathBuf::new(); + for component in self.as_ref().components() { + unix_path.push(component.as_str()); + } + unix_path + } +} + +pub fn serialize_unix_path(path: &Utf8UnixPathBuf, s: S) -> Result +where + S: serde::Serializer, +{ + s.serialize_str(path.as_str()) +} + +pub fn deserialize_unix_path<'de, D>(deserializer: D) -> Result +where + D: serde::Deserializer<'de>, +{ + let string = String::deserialize(deserializer)?; + Ok(Utf8UnixPathBuf::from(string)) +} + /// The errors arising from filesystem I/O. /// The variants defined here include relevant context where possible. #[derive(Error, Debug)] @@ -186,9 +220,15 @@ pub mod wrapfs { .map_err(|e| Box::new(FsIoError::WriteFile(path.as_ref().into(), e))) } + /// Canonicalizes UTF-8 path. If canonicalized path is not valid + /// UTF-8, returns `io::Error` of `InvalidData` kind. + /// On Windows this returns most compatible form of a path instead of UNC. pub fn canonicalize>(path: P) -> Result> { - path.as_ref() - .canonicalize_utf8() + dunce::canonicalize(path.as_ref()) + .and_then(|path| { + Utf8PathBuf::from_path_buf(path) + .map_err(|_| io::Error::from(io::ErrorKind::InvalidData)) + }) .map_err(|e| Box::new(FsIoError::Canonicalize(path.as_ref().into(), e))) } diff --git a/sysand/src/commands/add.rs b/sysand/src/commands/add.rs index da4d3eea..9194ca2b 100644 --- a/sysand/src/commands/add.rs +++ b/sysand/src/commands/add.rs @@ -280,6 +280,7 @@ fn resolve_deps, Policy: HTTPAuthentication>( &provided_iris, runtime, auth_policy, + ctx, )?; } Ok(()) diff --git a/sysand/src/commands/clone.rs b/sysand/src/commands/clone.rs index 9db2229a..cfce6f2a 100644 --- a/sysand/src/commands/clone.rs +++ b/sysand/src/commands/clone.rs @@ -91,6 +91,15 @@ pub fn command_clone( } }; + // Update project context with the new cloned project + // TODO: Consider under which circumstances (if any) + // the workspace should carry over. + let ctx = ProjectContext { + current_workspace: None, + current_project: Some(local_project.clone()), + current_directory: ctx.current_directory, + }; + if !no_deps { let provided_iris = if !include_std { crate::known_std_libs() @@ -147,6 +156,7 @@ pub fn command_clone( &provided_iris, runtime, auth_policy, + &ctx, )?; } diff --git a/sysand/src/commands/env.rs b/sysand/src/commands/env.rs index 4860fa8d..2211066e 100644 --- a/sysand/src/commands/env.rs +++ b/sysand/src/commands/env.rs @@ -13,7 +13,10 @@ use sysand_core::{ commands::{env::do_env_local_dir, lock::LockOutcome}, config::Config, context::ProjectContext, - env::local_directory::LocalDirectoryEnvironment, + env::local_directory::{ + LocalDirectoryEnvironment, + metadata::{EnvMetadata, load_env_metadata}, + }, lock::Lock, model::InterchangeProjectUsage, project::{ @@ -36,7 +39,9 @@ use crate::{ }; pub fn command_env>(path: P) -> Result { - Ok(do_env_local_dir(path)?) + let env = do_env_local_dir(path)?; + wrapfs::write(env.metadata_path(), EnvMetadata::default().to_string())?; + Ok(env) } // TODO: Factor out provided_iris logic @@ -120,7 +125,7 @@ pub fn command_env_install( // TODO: don't use different root project resolution // mechanisms depending on no_deps if no_deps { - let (_version, storage) = + let (version, storage) = crate::commands::clone::get_project_version(&iri, version, &resolver)?; sysand_core::commands::env::do_env_install_project( &iri, @@ -129,6 +134,7 @@ pub fn command_env_install( allow_overwrite, allow_multiple, )?; + add_single_env_project(iri, version.to_string(), env)?; } else { let usages = vec![InterchangeProjectUsage { resource: fluent_uri::Iri::from_str(iri.as_ref())?, @@ -164,6 +170,7 @@ pub fn command_env_install( &provided_iris, runtime, auth_policy, + &ctx, )?; } @@ -230,14 +237,14 @@ pub fn command_env_install_path( Some(config.index_urls(index, vec![DEFAULT_INDEX_URL.to_string()], default_index)?) }; - if let Some(version) = version { - let project_version = project - .get_info()? - .ok_or_else(|| anyhow!("missing project info"))? - .version; - if version != project_version { - bail!("given version {version} does not match project version {project_version}") - } + let project_version = project + .get_info()? + .ok_or_else(|| anyhow!("missing project info"))? + .version; + if let Some(version) = version + && version != project_version + { + bail!("given version {version} does not match project version {project_version}") } // TODO: Fix this hack. Currently installing manually then turning project into Editable to @@ -300,18 +307,46 @@ pub fn command_env_install_path( &provided_iris, runtime, auth_policy, + &ctx, )?; + } else { + add_single_env_project(iri, project_version, env)?; } Ok(()) } -pub fn command_env_uninstall, Q: AsRef>( +fn add_single_env_project, V: AsRef>( + iri: S, + version: V, + env: LocalDirectoryEnvironment, +) -> Result<()> { + let metadata_path = env.metadata_path(); + let mut env_metadata = load_env_metadata(&metadata_path)?; + let project_path = env.project_path(&iri, version); + let project = LocalSrcProject { + nominal_path: Some(project_path.strip_prefix(env.root_path())?.to_owned()), + project_path, + }; + env_metadata.add_local_project(vec![iri.as_ref().to_owned()], project, false, false)?; + wrapfs::write(metadata_path, env_metadata.to_string())?; + + Ok(()) +} + +pub fn command_env_uninstall, V: AsRef>( iri: S, - version: Option, + version: Option, env: LocalDirectoryEnvironment, ) -> Result<()> { - sysand_core::commands::env::do_env_uninstall(iri, version, env)?; + let metadata_path = env.metadata_path(); + + sysand_core::commands::env::do_env_uninstall(&iri, version.as_ref(), env)?; + + let mut env_metadata = load_env_metadata(&metadata_path)?; + env_metadata.remove_project(iri, version); + wrapfs::write(metadata_path, env_metadata.to_string())?; + Ok(()) } diff --git a/sysand/src/commands/lock.rs b/sysand/src/commands/lock.rs index 718f448e..a64f9dea 100644 --- a/sysand/src/commands/lock.rs +++ b/sysand/src/commands/lock.rs @@ -37,7 +37,7 @@ pub fn command_lock, Policy: HTTPAuthentication, R: AsRef, auth_policy: Arc, - ctx: ProjectContext, + ctx: &ProjectContext, ) -> Result { assert!(path.as_ref().is_relative(), "{}", path.as_ref()); @@ -75,7 +75,7 @@ pub fn command_lock, Policy: HTTPAuthentication, R: AsRef, Policy: HTTPAuthentication>( lock: &Lock, project_root: P, @@ -30,6 +33,7 @@ pub fn command_sync, Policy: HTTPAuthentication>( provided_iris: &HashMap>, runtime: Arc, auth_policy: Arc, + ctx: &ProjectContext, ) -> Result<()> { sysand_core::commands::sync::do_sync( lock, @@ -65,5 +69,20 @@ pub fn command_sync, Policy: HTTPAuthentication>( }), provided_iris, )?; + + // TODO: Integrate the updating of metadata into `LocalDirectoryEnvironment` itself. + // This will likely require updating the `WriteEnvironment` trait to support + // multiple identifiers per project. + let lock_metadata = lock.to_env_metadata(env, ctx)?; + let env_metadata = if wrapfs::is_file(env.metadata_path())? { + let mut env_metadata = load_env_metadata(env.metadata_path())?; + env_metadata.merge(lock_metadata); + env_metadata + } else { + lock_metadata + }; + + wrapfs::write(env.metadata_path(), env_metadata.to_string())?; + Ok(()) } diff --git a/sysand/src/lib.rs b/sysand/src/lib.rs index 972ee4d5..2e47df5e 100644 --- a/sysand/src/lib.rs +++ b/sysand/src/lib.rs @@ -110,7 +110,7 @@ where fn set_panic_hook() { // TODO: use `panic::update_hook()` once it's stable - // also set bactrace style once it's stable, but take + // also set backtrace style once it's stable, but take // into account the current level let default_hook = panic::take_hook(); // panic::set_backtrace_style(panic::BacktraceStyle::Short); @@ -147,6 +147,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { let ctx = ProjectContext { current_workspace: discover_workspace(&cwd)?, current_project: discover_project(&cwd)?, + current_directory: cwd, }; let project_root = ctx .current_project @@ -154,7 +155,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { .map(|p| p.root_path().to_owned()); let current_environment = { - let dir = project_root.as_ref().unwrap_or(&cwd); + let dir = project_root.as_ref().unwrap_or(&ctx.current_directory); crate::get_env(dir)? }; @@ -182,7 +183,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { .unwrap(), ); - let _runtime_keepalive = runtime.clone(); + let _runtime_keep_alive = runtime.clone(); // FIXME: This is a temporary implementation to provide credentials until // https://github.com/sensmetry/sysand/pull/157 @@ -282,7 +283,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { Command::Env { command } => match command { None => { let env_dir = { - let mut p = project_root.unwrap_or(cwd); + let mut p = project_root.unwrap_or(ctx.current_directory); p.push(DEFAULT_ENV_NAME); p }; @@ -369,7 +370,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { client, runtime, basic_auth_policy, - ctx, + &ctx, ) .map(|_| ()) } else { @@ -381,7 +382,12 @@ pub fn run_cli(args: cli::Args) -> Result<()> { Command::Sync { resolution_opts } => { let mut local_environment = match current_environment { Some(env) => env, - None => command_env(project_root.as_ref().unwrap_or(&cwd).join(DEFAULT_ENV_NAME))?, + None => command_env( + project_root + .as_ref() + .unwrap_or(&ctx.current_directory) + .join(DEFAULT_ENV_NAME), + )?, }; let provided_iris = if !resolution_opts.include_std { @@ -391,7 +397,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { HashMap::default() }; - let project_root = project_root.unwrap_or(cwd); + let project_root = project_root.unwrap_or(ctx.current_directory.clone()); let lockfile = project_root.join(DEFAULT_LOCKFILE_NAME); let lock = match fs::read_to_string(&lockfile) { Ok(l) => match Lock::from_str(&l) { @@ -409,7 +415,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { client.clone(), runtime.clone(), basic_auth_policy.clone(), - ctx, + &ctx, )? } else { bail!("failed to read lockfile `{lockfile}`: {e}") @@ -424,9 +430,10 @@ pub fn run_cli(args: cli::Args) -> Result<()> { &provided_iris, runtime, basic_auth_policy, + &ctx, ) } - Command::PrintRoot => command_print_root(cwd), + Command::PrintRoot => command_print_root(ctx.current_directory), Command::Info { path, iri, @@ -470,7 +477,7 @@ pub fn run_cli(args: cli::Args) -> Result<()> { HashSet::default() }; - let project_root = project_root.unwrap_or(cwd); + let project_root = project_root.unwrap_or(ctx.current_directory); let overrides = get_overrides( &config, &project_root, diff --git a/sysand/tests/cli_env.rs b/sysand/tests/cli_env.rs index c8194e58..0c8cbad8 100644 --- a/sysand/tests/cli_env.rs +++ b/sysand/tests/cli_env.rs @@ -7,7 +7,9 @@ use assert_cmd::prelude::*; use camino::Utf8Path; use mockito::Server; use predicates::prelude::*; -use sysand_core::env::local_directory::DEFAULT_ENV_NAME; +use sysand_core::env::local_directory::{ + DEFAULT_ENV_NAME, ENTRIES_PATH, METADATA_PATH, VERSIONS_PATH, +}; // pub due to https://github.com/rust-lang/rust/issues/46379 mod common; @@ -31,13 +33,12 @@ fn env_init_empty_env() -> Result<(), Box> { if path.is_dir() { assert_eq!(path.strip_prefix(&cwd)?, env_path); } else { - // if path.is_file() - assert_eq!(path.strip_prefix(&cwd)?, env_path.join("entries.txt")); + assert_eq!(path.strip_prefix(&cwd)?, env_path.join(ENTRIES_PATH)); } } assert_eq!( - std::fs::File::open(cwd.join("sysand_env/entries.txt"))? + std::fs::File::open(cwd.join(DEFAULT_ENV_NAME).join(ENTRIES_PATH))? .metadata()? .len(), 0 @@ -75,7 +76,7 @@ fn env_install_from_local_dir() -> Result<(), Box> { .stderr(predicate::str::contains("`urn:kpar:test` 0.0.1")); assert_eq!( - std::fs::read_to_string(cwd.join(env_path).join("entries.txt"))?, + std::fs::read_to_string(cwd.join(env_path).join(ENTRIES_PATH))?, "urn:kpar:test\n" ); @@ -84,7 +85,7 @@ fn env_install_from_local_dir() -> Result<(), Box> { assert!(cwd.join(env_path).join(test_hash).is_dir()); assert_eq!( - std::fs::read_to_string(cwd.join(env_path).join(test_hash).join("versions.txt"))?, + std::fs::read_to_string(cwd.join(env_path).join(test_hash).join(VERSIONS_PATH))?, "0.0.1\n" ); @@ -125,11 +126,21 @@ fn env_install_from_local_dir() -> Result<(), Box> { let entries = std::fs::read_dir(cwd.join(env_path))?.collect::, _>>()?; - assert_eq!(entries.len(), 1); + let mut entry_names: Vec<_> = entries + .iter() + .map(|e| e.file_name().to_string_lossy().to_string()) + .collect(); - assert_eq!(entries[0].file_name(), "entries.txt"); + let entries_path_index = entry_names.iter().position(|e| e == ENTRIES_PATH).unwrap(); - assert_eq!(std::fs::read_to_string(entries[0].path())?, ""); + entry_names.sort(); + + assert_eq!(entry_names, [ENTRIES_PATH, METADATA_PATH]); + + assert_eq!( + std::fs::read_to_string(entries[entries_path_index].path())?, + "" + ); Ok(()) } @@ -190,7 +201,7 @@ fn env_install_from_http_kpar() -> Result<(), Box> { out.assert().success(); assert_eq!( - std::fs::read_to_string(cwd.join(env_path).join("entries.txt"))?, + std::fs::read_to_string(cwd.join(env_path).join(ENTRIES_PATH))?, format!("{}\n", &project_url) ); diff --git a/sysand/tests/cli_sync.rs b/sysand/tests/cli_sync.rs index 6396b094..9e6cac14 100644 --- a/sysand/tests/cli_sync.rs +++ b/sysand/tests/cli_sync.rs @@ -6,12 +6,72 @@ use indexmap::IndexMap; use mockito::Matcher; use predicates::prelude::*; use reqwest::header; -use sysand_core::commands::lock::DEFAULT_LOCKFILE_NAME; +use sysand_core::{ + commands::lock::DEFAULT_LOCKFILE_NAME, + env::local_directory::{DEFAULT_ENV_NAME, ENTRIES_PATH, METADATA_PATH}, +}; // pub due to https://github.com/rust-lang/rust/issues/46379 mod common; pub use common::*; +#[test] +fn sync_to_current() -> Result<(), Box> { + let (_temp_dir, cwd, out) = run_sysand( + ["init", "--version", "1.2.3", "--name", "sync_to_current"], + None, + )?; + + std::fs::write(cwd.join("test.sysml"), b"package P;\n")?; + + out.assert().success(); + + let out = run_sysand_in(&cwd, ["include", "test.sysml"], None)?; + + out.assert().success(); + + let out = run_sysand_in(&cwd, ["sync"], None)?; + + out.assert() + .success() + .stderr(predicate::str::contains("Creating")) + .stderr(predicate::str::contains("Syncing")); + + let env_path = cwd.join(DEFAULT_ENV_NAME); + + let env_metadata = std::fs::read_to_string(env_path.join(METADATA_PATH))?; + + assert_eq!( + env_metadata, + format!( + r#"# This file is automatically generated by Sysand and is not intended to be edited manually. + +version = "0.1" + +[[project]] +publisher = "untitled" +name = "sync_to_current" +version = "1.2.3" +path = "." +editable = true +"# + ) + ); + + let entries: Result, _> = std::fs::read_dir(env_path)?.collect(); + + let mut entry_names: Vec<_> = entries? + .iter() + .map(|e| e.file_name().to_string_lossy().to_string()) + .collect(); + + entry_names.sort(); + + assert_eq!(entry_names, [ENTRIES_PATH, METADATA_PATH]); + + Ok(()) +} + #[test] fn sync_to_local() -> Result<(), Box> { let (_temp_dir, cwd) = new_temp_cwd()?; @@ -62,6 +122,26 @@ sources = [ .stderr(predicate::str::contains("Syncing")) .stderr(predicate::str::contains("Installing")); + let env_metadata = std::fs::read_to_string(cwd.join(DEFAULT_ENV_NAME).join(METADATA_PATH))?; + + assert_eq!( + env_metadata, + format!( + r#"# This file is automatically generated by Sysand and is not intended to be edited manually. + +version = "0.1" + +[[project]] +name = "sync_to_local" +version = "1.2.3" +path = "5ddc0a2e8aaa88ac2bfc71aa0a8d08e020bceac4a90a4b72d8fb7f97ec5bfcc5/1.2.3.kpar" +identifiers = [ + "urn:kpar:sync_to_local", +] +"# + ) + ); + let out = run_sysand_in(&cwd, ["env", "list"], None)?; out.assert() @@ -130,6 +210,26 @@ sources = [ info_mock.assert(); meta_mock.assert(); + let env_metadata = std::fs::read_to_string(cwd.join(DEFAULT_ENV_NAME).join(METADATA_PATH))?; + + assert_eq!( + env_metadata, + format!( + r#"# This file is automatically generated by Sysand and is not intended to be edited manually. + +version = "0.1" + +[[project]] +name = "sync_to_remote" +version = "1.2.3" +path = "2b95cb7c6d6c08695b0e7c4b7e9d836c21de37fb9c72b0cfa26f53fd84a1b459/1.2.3.kpar" +identifiers = [ + "urn:kpar:sync_to_remote", +] +"# + ) + ); + let out = run_sysand_in(&cwd, ["env", "list"], None)?; out.assert()