diff --git a/Cargo.lock b/Cargo.lock index 62b3952f8..0e7017628 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1571,6 +1571,20 @@ dependencies = [ "parking_lot_core", ] +[[package]] +name = "dashmap" +version = "6.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5041cc499144891f3790297212f32a74fb938e5136a14943f338ef9e0ae276cf" +dependencies = [ + "cfg-if", + "crossbeam-utils", + "hashbrown 0.14.5", + "lock_api", + "once_cell", + "parking_lot_core", +] + [[package]] name = "data-encoding" version = "2.6.0" @@ -2767,7 +2781,7 @@ version = "14.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d3b0e276cd08eb2a22e9f286a4f13a222a01be2defafa8621367515375644b99" dependencies = [ - "dashmap", + "dashmap 5.5.3", "gix-fs", "libc", "once_cell", @@ -3882,6 +3896,15 @@ version = "1.20.2" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1261fe7e33c73b354eab43b1273a57c8f967d0391e80353e51f764ac02cf6775" +[[package]] +name = "once_map" +version = "0.0.1" +dependencies = [ + "dashmap 6.1.0", + "futures", + "tokio", +] + [[package]] name = "oorandom" version = "11.1.3" @@ -4198,6 +4221,17 @@ dependencies = [ "yansi", ] +[[package]] +name = "priority-queue" +version = "2.1.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "714c75db297bc88a63783ffc6ab9f830698a6705aa0201416931759ef4c8183d" +dependencies = [ + "autocfg", + "equivalent", + "indexmap 2.2.6", +] + [[package]] name = "proc-macro-crate" version = "3.1.0" @@ -4226,6 +4260,18 @@ dependencies = [ "human_format", ] +[[package]] +name = "pubgrub" +version = "0.2.1" +source = "git+https://github.com/maciektr/pubgrub.git?branch=dev#6e12ee740000e367984d8b965c8d9d574e6bee7d" +dependencies = [ + "indexmap 2.2.6", + "log", + "priority-queue", + "rustc-hash", + "thiserror", +] + [[package]] name = "quote" version = "1.0.37" @@ -4657,9 +4703,11 @@ dependencies = [ "libloading", "ntest", "once_cell", + "once_map", "pathdiff", "petgraph", "predicates", + "pubgrub", "ra_ap_toolchain", "redb", "reqwest", @@ -4669,6 +4717,7 @@ dependencies = [ "scarb-test-support", "scarb-ui", "semver", + "semver-pubgrub", "serde", "serde-untagged", "serde-value", @@ -4685,6 +4734,7 @@ dependencies = [ "test-for-each-example", "thiserror", "tokio", + "tokio-stream", "toml", "toml_edit 0.22.16", "tracing", @@ -5008,6 +5058,15 @@ dependencies = [ "serde", ] +[[package]] +name = "semver-pubgrub" +version = "0.1.0" +source = "git+https://github.com/maciektr/semver-pubgrub.git#a12311e3f5b0aa29d78b79001ac564b49de8212b" +dependencies = [ + "pubgrub", + "semver", +] + [[package]] name = "serde" version = "1.0.210" @@ -5709,6 +5768,17 @@ dependencies = [ "tokio", ] +[[package]] +name = "tokio-stream" +version = "0.1.16" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4f4e6ce100d0eb49a2734f8c0812bcd324cf357d21810932c5df6b96ef2b86f1" +dependencies = [ + "futures-core", + "pin-project-lite", + "tokio", +] + [[package]] name = "tokio-util" version = "0.7.11" diff --git a/Cargo.toml b/Cargo.toml index 497a3e5f8..8626216c6 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -11,6 +11,7 @@ members = [ "plugins/cairo-lang-macro-attributes", "plugins/cairo-lang-macro-stable", "utils/create-output-dir", + "utils/once-map", "utils/scarb-build-metadata", "utils/scarb-stable-hash", "utils/scarb-test-support", @@ -65,6 +66,7 @@ clap = { version = "4", features = ["derive", "env", "string"] } console = "0.15" convert_case = "0.6.0" darling = "0.20" +dashmap = "6" data-encoding = "2" deno_task_shell = ">=0.13" derive_builder = ">=0.12" @@ -75,7 +77,7 @@ expect-test = "1.5" flate2 = { version = "1.0.34", default-features = false, features = ["zlib"] } fs4 = { version = "0.7", features = ["tokio"] } fs_extra = "1" -futures = { version = "0.3", default-features = false, features = ["std", "async-await"] } +futures = { version = "0.3", default-features = false, features = ["std", "async-await", "executor"] } gix = ">=0.55" gix-path = "0.10" glob = "0.3" @@ -93,10 +95,12 @@ ntest = "0.9" num-bigint = { version = "0.4", features = ["rand"] } num-traits = "0.2" once_cell = "1" +once_map = "0.4" pathdiff = { version = "0.2", features = ["camino"] } petgraph = "0.6" predicates = "3" proc-macro2 = "1" +pubgrub = { git = "https://github.com/maciektr/pubgrub.git", branch = "dev" } quote = "1" ra_ap_toolchain = "0.0.218" rayon = "1.10" @@ -104,6 +108,7 @@ redb = "2.1.4" reqwest = { version = "0.11", features = ["gzip", "brotli", "deflate", "json", "stream", "multipart"], default-features = false } salsa = { package = "rust-analyzer-salsa", version = "0.17.0-pre.6" } semver = { version = "1", features = ["serde"] } +semver-pubgrub = { git = "https://github.com/maciektr/semver-pubgrub.git" } serde = { version = "1", features = ["serde_derive"] } serde-untagged = "0.1" serde-value = "0.7" @@ -123,6 +128,7 @@ test-case = "3" thiserror = "1" time = "0.3" tokio = { version = "1", features = ["macros", "io-util", "process", "rt", "rt-multi-thread", "sync"] } +tokio-stream = "0.1" toml = "0.8" toml_edit = { version = "0.22", features = ["serde"] } tower-http = { version = "0.4", features = ["fs"] } @@ -140,6 +146,9 @@ xxhash-rust = { version = "0.8", features = ["xxh3"] } zip = { version = "0.6", default-features = false, features = ["deflate"] } zstd = "0.13" +[patch.'https://github.com/pubgrub-rs/pubgrub.git'] +pubgrub = { git = 'https://github.com/maciektr/pubgrub.git', branch = 'dev' } + [profile.release] lto = true diff --git a/examples/dependencies/Scarb.lock b/examples/dependencies/Scarb.lock index 90f6c90f9..be818842b 100644 --- a/examples/dependencies/Scarb.lock +++ b/examples/dependencies/Scarb.lock @@ -4,10 +4,10 @@ version = 1 [[package]] name = "alexandria_data_structures" version = "0.1.0" -source = "git+https://github.com/keep-starknet-strange/alexandria.git#3356bf0c5c1a089167d7d3c28d543e195325e596" [[package]] name = "alexandria_math" +checksum = "sha256:b62fc4b9bfbd9310a47d2e595d2c8f468354266be0827aeea9b465d9984908de" version = "0.2.0" source = "git+https://github.com/keep-starknet-strange/alexandria.git#3356bf0c5c1a089167d7d3c28d543e195325e596" dependencies = [ diff --git a/scarb/Cargo.toml b/scarb/Cargo.toml index 5f887e927..6bea69898 100644 --- a/scarb/Cargo.toml +++ b/scarb/Cargo.toml @@ -45,8 +45,8 @@ directories.workspace = true dunce.workspace = true fs4.workspace = true futures.workspace = true -gix.workspace = true gix-path.workspace = true +gix.workspace = true glob.workspace = true ignore.workspace = true include_dir.workspace = true @@ -54,8 +54,10 @@ indoc.workspace = true itertools.workspace = true libloading.workspace = true once_cell.workspace = true +once_map = { path = "../utils/once-map" } pathdiff.workspace = true petgraph.workspace = true +pubgrub.workspace = true ra_ap_toolchain.workspace = true redb.workspace = true reqwest.workspace = true @@ -63,6 +65,7 @@ scarb-build-metadata = { path = "../utils/scarb-build-metadata" } scarb-metadata = { path = "../scarb-metadata", default-features = false, features = ["builder"] } scarb-stable-hash = { path = "../utils/scarb-stable-hash" } scarb-ui = { path = "../utils/scarb-ui" } +semver-pubgrub.workspace = true semver.workspace = true serde-untagged.workspace = true serde-value.workspace = true @@ -75,6 +78,7 @@ smol_str.workspace = true tar.workspace = true thiserror.workspace = true tokio.workspace = true +tokio-stream.workspace = true toml.workspace = true toml_edit.workspace = true tracing-subscriber.workspace = true diff --git a/scarb/src/core/lockfile.rs b/scarb/src/core/lockfile.rs index 4aa47d9e2..44292b7d7 100644 --- a/scarb/src/core/lockfile.rs +++ b/scarb/src/core/lockfile.rs @@ -21,7 +21,7 @@ pub enum LockVersion { V1 = 1, } -#[derive(Debug, Eq, PartialEq, Default, Serialize, Deserialize)] +#[derive(Debug, Clone, Eq, PartialEq, Default, Serialize, Deserialize)] #[serde(rename_all = "kebab-case")] pub struct Lockfile { pub version: LockVersion, diff --git a/scarb/src/core/registry/mod.rs b/scarb/src/core/registry/mod.rs index c8d47c05e..0d5820adc 100644 --- a/scarb/src/core/registry/mod.rs +++ b/scarb/src/core/registry/mod.rs @@ -111,7 +111,7 @@ pub(crate) mod mock { let summary = Summary::builder() .package_id(package_id) .dependencies(dependencies) - .no_core(package_id.is_core()) + .no_core(package_id.name == PackageName::CORE) .build(); let manifest = Box::new( diff --git a/scarb/src/ops/resolve.rs b/scarb/src/ops/resolve.rs index 6a4f3620a..4ccd54ea6 100644 --- a/scarb/src/ops/resolve.rs +++ b/scarb/src/ops/resolve.rs @@ -129,11 +129,19 @@ pub fn resolve_workspace_with_opts( read_lockfile(ws)? }; - let resolve = resolver::resolve(&members_summaries, &patched, lockfile).await?; + let registry = Box::new(patched); + let registry: Box = Box::new(*registry); + let resolve = resolver::resolve( + &members_summaries, + &*registry, + lockfile, + ws.config().tokio_handle(), + ) + .await?; write_lockfile(Lockfile::from_resolve(&resolve), ws)?; - let packages = collect_packages_from_resolve_graph(&resolve, &patched).await?; + let packages = collect_packages_from_resolve_graph(&resolve, &*registry).await?; packages .values() diff --git a/scarb/src/resolver/algorithm/in_memory_index.rs b/scarb/src/resolver/algorithm/in_memory_index.rs new file mode 100644 index 000000000..e6296a822 --- /dev/null +++ b/scarb/src/resolver/algorithm/in_memory_index.rs @@ -0,0 +1,43 @@ +use crate::core::{Package, PackageId, Summary}; +use crate::resolver::algorithm::provider::PubGrubPackage; +use once_map::OnceMap; +use std::sync::Arc; + +/// In-memory index of package metadata. +#[derive(Default, Clone)] +pub struct InMemoryIndex(Arc); + +#[derive(Default)] +struct SharedInMemoryIndex { + /// A map from package name to the metadata for that package and the index where the metadata + /// came from. + packages: FxOnceMap>, + + /// A map from package ID to metadata for that distribution. + distributions: FxOnceMap>, +} + +pub(crate) type FxOnceMap = OnceMap; + +impl InMemoryIndex { + /// Returns a reference to the package metadata map. + pub fn packages(&self) -> &FxOnceMap> { + &self.0.packages + } + + /// Returns a reference to the distribution metadata map. + pub fn distributions(&self) -> &FxOnceMap> { + &self.0.distributions + } +} + +// pub struct VersionsResponse; +#[derive(Debug)] +pub enum VersionsResponse { + Found(Vec), +} + +// pub struct MetadataResponse; +pub enum MetadataResponse { + Found(Package), +} diff --git a/scarb/src/resolver/algorithm/mod.rs b/scarb/src/resolver/algorithm/mod.rs index 8b1378917..4be651818 100644 --- a/scarb/src/resolver/algorithm/mod.rs +++ b/scarb/src/resolver/algorithm/mod.rs @@ -1 +1,272 @@ +use crate::core::lockfile::Lockfile; +use crate::core::registry::Registry; +use crate::core::{ManifestDependency, PackageId, PackageName, Resolve, Summary}; +use crate::resolver::algorithm::in_memory_index::{InMemoryIndex, VersionsResponse}; +use crate::resolver::algorithm::provider::{ + rewrite_dependency_source_id, rewrite_locked_dependency, DependencyProviderError, + PubGrubDependencyProvider, PubGrubPackage, +}; +use crate::resolver::algorithm::solution::build_resolve; +use anyhow::bail; +use futures::{FutureExt, StreamExt, TryFutureExt}; +use indoc::indoc; +use itertools::Itertools; +use pubgrub::error::PubGrubError; +use pubgrub::report::{DefaultStringReporter, Reporter}; +use pubgrub::type_aliases::SelectedDependencies; +use pubgrub::{Incompatibility, State}; +use std::collections::{HashMap, HashSet}; +use std::sync::Arc; +use std::thread; +use tokio::runtime::Handle; +use tokio::sync::{mpsc, oneshot}; +use tokio_stream::wrappers::ReceiverStream; +mod in_memory_index; +mod provider; +mod solution; + +#[derive(Default)] +struct ResolverState { + index: InMemoryIndex, +} + +impl ResolverState { + async fn fetch<'a, 'c>( + self: Arc, + provider: Arc>, + request_stream: mpsc::Receiver, + ) -> Result<(), DependencyProviderError> { + let mut response_stream = ReceiverStream::new(request_stream) + .map(|request| self.process_request(request, &*provider).boxed_local()) + // Allow as many futures as possible to start in the background. + // Backpressure is provided by at a more granular level by `DistributionDatabase` + // and `SourceDispatch`, as well as the bounded request channel. + .buffer_unordered(usize::MAX); + + while let Some(response) = response_stream.next().await { + match response? { + Some(Response::Package(package, summaries)) => { + // if summaries.is_empty() { + // continue; + // } + // let package_name = summaries + // .first() + // .map(|s| s.package_id.name.clone()) + // .expect("summaries cannot be empty"); + self.index + .packages() + .done(package, Arc::new(VersionsResponse::Found(summaries))); + } + None => {} + } + } + Ok(()) + } + + async fn process_request<'a>( + &self, + request: Request, + registry: &RegistryWrapper<'a>, + ) -> Result, DependencyProviderError> { + match request { + Request::Package(package) => { + self.index.packages().register(package.clone()); + let dependency: ManifestDependency = (&package).into(); + let summaries = registry.registry.query(&dependency).await?; + Ok(Some(Response::Package(package, summaries))) + } + } + } +} + +pub struct RegistryWrapper<'a> { + registry: &'a dyn Registry, +} + +#[derive(Debug)] +pub(crate) enum Request { + Package(PubGrubPackage), +} + +pub(crate) enum Response { + Package(PubGrubPackage, Vec), +} + +#[allow(clippy::dbg_macro)] +#[allow(dead_code)] +pub async fn resolve<'c>( + summaries: &[Summary], + registry: &dyn Registry, + lockfile: Lockfile, + _handle: &'c Handle, +) -> anyhow::Result { + let state = Arc::new(ResolverState::default()); + + let (request_sink, request_stream): (mpsc::Sender, mpsc::Receiver) = + mpsc::channel(300); + + let registry_wrapper = Arc::new(RegistryWrapper { registry }); + + let requests_fut = state + .clone() + .fetch(registry_wrapper.clone(), request_stream) + .map_err(|err| anyhow::format_err!(err)) + .fuse(); + + for summary in summaries { + let package: PubGrubPackage = summary.package_id.into(); + if state.index.packages().register(package.clone()) { + request_sink.send(Request::Package(package)).await?; + } + for dep in summary.full_dependencies() { + let dep = rewrite_dependency_source_id(summary.package_id, dep)?; + let locked_package_id = lockfile.packages_matching(dep.clone()); + let dep = if let Some(locked_package_id) = locked_package_id { + rewrite_locked_dependency(dep.clone(), locked_package_id?) + } else { + dep.clone() + }; + + let package: PubGrubPackage = (&dep).into(); + if state.index.packages().register(package.clone()) { + request_sink.send(Request::Package(package)).await?; + } + } + } + + let main_package_ids: HashSet = + HashSet::from_iter(summaries.iter().map(|sum| sum.package_id)); + + let (tx, rx) = oneshot::channel(); + + let cloned_lockfile = lockfile.clone(); + thread::Builder::new() + .name("scarb-resolver".into()) + .spawn(move || { + let result = || { + let provider = PubGrubDependencyProvider::new( + main_package_ids, + state, + request_sink, + cloned_lockfile, + ); + + // Init state + let main_package_ids = provider + .main_package_ids() + .clone() + .into_iter() + .collect_vec(); + + let Some((first, rest)) = main_package_ids.split_first() else { + bail!("empty summaries"); + }; + let package: PubGrubPackage = (*first).into(); + let version = first.version.clone(); + let mut state = State::init(package.clone(), version); + state + .unit_propagation(package.clone()) + .map_err(|err| anyhow::format_err!("unit propagation failed: {:?}", err))?; + for package_id in rest { + let package: PubGrubPackage = (*package_id).into(); + let version = package_id.version.clone(); + state.add_incompatibility(Incompatibility::not_root( + package.clone(), + version.clone(), + )); + state + .unit_propagation(package) + .map_err(|err| anyhow::format_err!("unit propagation failed: {:?}", err))? + } + + // Resolve requirements + let solution = pubgrub::solver::resolve_state(&provider, &mut state, package) + .map_err(format_error)?; + + validate_solution(&solution)?; + build_resolve(&provider, solution) + }; + let result = result(); + tx.send(result).unwrap(); + })?; + + let resolve_fut = async move { + rx.await + // .map_err(|_| (ResolveError::ChannelClosed, FxHashSet::default())) + // .map_err(|_| DependencyProviderError::ChannelClosed) + .map_err(|err| anyhow::format_err!("channel closed")) + .and_then(|result| result) + }; + + // match tokio::try_join!(requests_fut, resolve_fut) { + // Ok(((), resolution)) => { + // // state.on_complete(); + // Ok(resolution) + // } + // Err(err) => Err(err).context("resolver failed"), + // } + let (_, resolve) = tokio::try_join!(requests_fut, resolve_fut)?; + resolve.check_checksums(&lockfile)?; + Ok(resolve) +} + +fn format_error(err: PubGrubError) -> anyhow::Error { + match err { + PubGrubError::NoSolution(derivation_tree) => { + anyhow::format_err!( + "version solving failed:\n{}\n", + DefaultStringReporter::report(&derivation_tree) + ) + } + PubGrubError::ErrorChoosingPackageVersion(DependencyProviderError::PackageNotFound { + name, + version, + }) => { + anyhow::format_err!("cannot find package `{name} {version}`") + } + PubGrubError::ErrorChoosingPackageVersion(DependencyProviderError::PackageQueryFailed( + err, + )) => anyhow::format_err!("{}", err).context("dependency query failed"), + PubGrubError::ErrorRetrievingDependencies { + package, + version, + source, + } => anyhow::Error::from(source) + .context(format!("cannot get dependencies of `{package}@{version}`")), + PubGrubError::SelfDependency { package, version } => { + anyhow::format_err!("self dependency found: `{}@{}`", package, version) + } + PubGrubError::ErrorInShouldCancel(err) => { + anyhow::format_err!("{}", err).context("should cancel failed") + } + PubGrubError::Failure(msg) => anyhow::format_err!("{}", msg).context("resolver failure"), + PubGrubError::ErrorChoosingPackageVersion(DependencyProviderError::ChannelClosed) => { + anyhow::format_err!("channel closed") + } + } +} + +fn validate_solution( + solution: &SelectedDependencies, +) -> anyhow::Result<()> { + // Same package, different sources. + let mut seen: HashMap = Default::default(); + for pkg in solution.keys() { + if let Some(existing) = seen.get(&pkg.name) { + bail!( + indoc! {" + found dependencies on the same package `{}` coming from incompatible \ + sources: + source 1: {} + source 2: {} + "}, + pkg.name, + existing.source_id, + pkg.source_id + ); + } + seen.insert(pkg.name.clone(), pkg.clone()); + } + Ok(()) +} diff --git a/scarb/src/resolver/algorithm/provider.rs b/scarb/src/resolver/algorithm/provider.rs new file mode 100644 index 000000000..f7ccdaa00 --- /dev/null +++ b/scarb/src/resolver/algorithm/provider.rs @@ -0,0 +1,449 @@ +use crate::core::lockfile::Lockfile; +use crate::core::registry::Registry; +use crate::core::{ + DependencyFilter, DependencyVersionReq, ManifestDependency, PackageId, PackageName, SourceId, + Summary, +}; +use crate::resolver::algorithm::in_memory_index::VersionsResponse; +use crate::resolver::algorithm::{Request, ResolverState}; +use itertools::Itertools; +use pubgrub::solver::{Dependencies, DependencyProvider}; +use pubgrub::version_set::VersionSet; +use semver::Version; +use semver_pubgrub::SemverPubgrub; +use std::cmp::Reverse; +use std::collections::{HashMap, HashSet}; +use std::fmt::Display; +use std::sync::{Arc, RwLock}; +use thiserror::Error; +use tokio::sync::mpsc; + +#[derive(Eq, PartialEq, Clone, Debug)] +pub struct CustomIncompatibility(String); + +impl Display for CustomIncompatibility { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.0) + } +} + +#[derive(Clone, PartialEq, Eq, Hash, Debug)] +pub struct PubGrubPackage { + pub name: PackageName, + pub source_id: SourceId, +} + +impl From<&PubGrubPackage> for ManifestDependency { + fn from(package: &PubGrubPackage) -> Self { + ManifestDependency::builder() + .name(package.name.clone()) + .source_id(package.source_id) + .version_req(DependencyVersionReq::Any) + .build() + } +} + +impl From<&ManifestDependency> for PubGrubPackage { + fn from(dependency: &ManifestDependency) -> Self { + Self { + name: dependency.name.clone(), + source_id: dependency.source_id, + } + } +} + +impl From for PubGrubPackage { + fn from(package_id: PackageId) -> Self { + Self { + name: package_id.name.clone(), + source_id: package_id.source_id, + } + } +} + +impl From<&Summary> for PubGrubPackage { + fn from(summary: &Summary) -> Self { + summary.package_id.into() + } +} + +impl Display for PubGrubPackage { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + write!(f, "{}", self.name) + } +} + +#[allow(dead_code)] +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord)] +pub enum PubGrubPriority { + /// The package has no specific priority. + /// + /// As such, its priority is based on the order in which the packages were added (FIFO), such + /// that the first package we visit is prioritized over subsequent packages. + /// + /// TODO(charlie): Prefer constrained over unconstrained packages, if they're at the same depth + /// in the dependency graph. + Unspecified(Reverse), + + /// The version range is constrained to a single version (e.g., with the `==` operator). + Singleton(Reverse), + + /// The package was specified via a direct URL. + DirectUrl(Reverse), + + /// The package is the root package. + Root, +} + +pub struct PubGrubDependencyProvider { + priority: RwLock>, + packages: RwLock>, + main_package_ids: HashSet, + lockfile: Lockfile, + state: Arc, + pub request_sink: mpsc::Sender, +} + +impl<'c> PubGrubDependencyProvider { + pub fn new( + main_package_ids: HashSet, + state: Arc, + request_sink: mpsc::Sender, + lockfile: Lockfile, + ) -> Self { + Self { + main_package_ids, + priority: RwLock::new(HashMap::new()), + packages: RwLock::new(HashMap::new()), + state, + lockfile, + request_sink, + } + } + + pub fn main_package_ids(&self) -> &HashSet { + &self.main_package_ids + } + + pub fn only_fetch_summary( + &self, + package_id: PackageId, + ) -> Result { + let summary = self.packages.read().unwrap().get(&package_id).cloned(); + let summary = summary.map(Ok).unwrap_or_else(|| { + let dependency = ManifestDependency::builder() + .name(package_id.name.clone()) + .source_id(package_id.source_id) + .version_req(DependencyVersionReq::exact(&package_id.version)) + .build(); + let summary = self + .query(dependency.clone())? + .into_iter() + .find_or_first(|summary| summary.package_id == package_id); + if let Some(summary) = summary.as_ref() { + let mut write_lock = self.packages.write().unwrap(); + write_lock.insert(summary.package_id, summary.clone()); + write_lock.insert(package_id, summary.clone()); + } + summary.ok_or_else(|| DependencyProviderError::PackageNotFound { + name: dependency.name.clone().to_string(), + version: dependency.version_req.clone(), + }) + })?; + Ok(summary) + } + + pub fn fetch_summary(&self, package_id: PackageId) -> Result { + let summary = self.only_fetch_summary(package_id)?; + for dep in summary.dependencies.iter() { + // let dep = self.rewrite_dependency_source_id(summary.package_id, &dep)?; + let locked_package_id = self.lockfile.packages_matching(dep.clone()); + let dep = if let Some(locked_package_id) = locked_package_id { + rewrite_locked_dependency(dep.clone(), locked_package_id?) + } else { + dep.clone() + }; + + let package: PubGrubPackage = (&dep).into(); + if self.state.index.packages().register(package.clone()) { + self.request_sink + .blocking_send((Request::Package(package))) + .unwrap(); + } + + let dep = rewrite_dependency_source_id(summary.package_id, &dep)?; + let package: PubGrubPackage = (&dep).into(); + if self.state.index.packages().register(package.clone()) { + self.request_sink + .blocking_send((Request::Package(package))) + .unwrap(); + } + } + Ok(summary) + } + + fn query( + &self, + dependency: ManifestDependency, + ) -> Result, DependencyProviderError> { + // let summaries = self + // .handle + // .block_on(self.registry.query(&dependency)) + // .map_err(DependencyProviderError::PackageQueryFailed)?; + let package: PubGrubPackage = (&dependency).into(); + let x = self.state.index.packages().items.get(&package); + + let summaries = self.state.index.packages().wait_blocking(&package).unwrap(); + + let VersionsResponse::Found(summaries) = summaries.as_ref() else { + todo!("no response"); + }; + + { + let mut write_lock = self.packages.write().unwrap(); + for summary in summaries.iter() { + write_lock.insert(summary.package_id, summary.clone()); + } + } + + // Sort from highest to lowest. + let summaries = summaries + .into_iter() + .sorted_by_key(|sum| sum.package_id.version.clone()) + .rev() + .cloned() + .collect_vec(); + + Ok(summaries) + } + + // pub fn rewrite_dependency_source_id( + // &self, + // package_id: PackageId, + // dependency: &ManifestDependency, + // ) -> Result { + // // Rewrite path dependencies for git sources. + // if package_id.source_id.is_git() && dependency.source_id.is_path() { + // let rewritten_dep = ManifestDependency::builder() + // .kind(dependency.kind.clone()) + // .name(dependency.name.clone()) + // .source_id(package_id.source_id) + // .version_req(dependency.version_req.clone()) + // .build(); + // + // // Copy downloaded summaries + // // let orig_pkg: PubGrubPackage = dependency.into(); + // // let new_pkg: PubGrubPackage = (&rewritten_dep).into(); + // // let orig = self.state.index.packages().wait_blocking(&orig_pkg); + // // if let Some(orig) = orig { + // // self.state.index.packages().done(new_pkg, orig); + // // } + // + // // Check if this dependency can be queried from git source. + // // E.g. packages below other package's manifest will not be accessible. + // + // // let summaries = self.query(dependency.clone())?; + // // + // // // if !self + // // // .handle + // // // .block_on(self.registry.query(&rewritten_dep)) + // // // .map_err(DependencyProviderError::PackageQueryFailed)? + // // if !summaries.is_empty() { + // // // If it is, return rewritten dependency. + // // return Ok(rewritten_dep); + // // } + // return Ok(rewritten_dep); + // }; + // + // Ok(dependency.clone()) + // } +} + +impl<'a, 'c> DependencyProvider for PubGrubDependencyProvider { + type P = PubGrubPackage; + type V = Version; + type VS = SemverPubgrub; + type M = CustomIncompatibility; + + fn prioritize(&self, package: &Self::P, _range: &Self::VS) -> Self::Priority { + if self.state.index.packages().register(package.clone()) { + self.request_sink + .blocking_send((Request::Package(package.clone()))) + .unwrap(); + } + + // Prioritize by ordering from root. + let priority = self.priority.read().unwrap().get(package).copied(); + if let Some(priority) = priority { + return Some(PubGrubPriority::Unspecified(Reverse(priority))); + } + None + } + + type Priority = Option; + type Err = DependencyProviderError; + + fn choose_version( + &self, + package: &Self::P, + range: &Self::VS, + ) -> Result, Self::Err> { + // Query available versions. + let dependency: ManifestDependency = package.into(); + let summaries = self.query(dependency)?; + + // Choose version. + let summary = summaries + .into_iter() + .find(|summary| range.contains(&summary.package_id.version)); + + // Store retrieved summary for selected version. + if let Some(summary) = summary.as_ref() { + self.packages + .write() + .unwrap() + .insert(summary.package_id, summary.clone()); + } + + Ok(summary.map(|summary| summary.package_id.version.clone())) + } + + fn get_dependencies( + &self, + package: &Self::P, + version: &Self::V, + ) -> Result, Self::Err> { + // Query summary. + let package_id = PackageId::new(package.name.clone(), version.clone(), package.source_id); + let summary = self.fetch_summary(package_id)?; + + // Set priority for dependencies. + let self_priority = self + .priority + .read() + .unwrap() + .get(&PubGrubPackage { + name: package_id.name.clone(), + source_id: package_id.source_id, + }) + .copied(); + if let Some(priority) = self_priority { + let mut write_lock = self.priority.write().unwrap(); + for dependency in summary.full_dependencies() { + let package: PubGrubPackage = dependency.into(); + write_lock.insert(package, priority + 1); + } + } + + // Convert dependencies to constraints. + let dep_filter = + DependencyFilter::propagation(self.main_package_ids.contains(&summary.package_id)); + let deps = summary + .filtered_full_dependencies(dep_filter) + .cloned() + .map(|dependency| { + let original_dep = dependency.clone(); + let dependency = rewrite_dependency_source_id(summary.package_id, &dependency)?; + let locked_package_id = self.lockfile.packages_matching(dependency.clone()); + let dependency = if let Some(locked_package_id) = locked_package_id { + rewrite_locked_dependency(dependency.clone(), locked_package_id?) + } else { + dependency + }; + + let dep_name = dependency.name.clone().to_string(); + let summaries = self.query(dependency.clone())?; + let (summaries, do_rewrite_source) = if summaries.is_empty() { + ((self.query(original_dep.clone())?), false) + } else { + ((summaries), true) + }; + summaries + .into_iter() + .find(|summary| dependency.version_req.matches(&summary.package_id.version)) + .map(|summary| (summary.package_id, dependency.version_req.clone())) + .map(|(summary_package_id, req)| { + // let package_id = if do_rewrite_source { + // package_id + // } else { + // summary_package_id + // }; + (summary_package_id, req) + }) + .ok_or_else(|| DependencyProviderError::PackageNotFound { + name: dep_name, + version: dependency.version_req.clone(), + }) + }) + .collect::, DependencyProviderError>>()?; + let constraints = deps + .into_iter() + .map(|(package_id, req)| (package_id.into(), req.into())) + .collect(); + + Ok(Dependencies::Available(constraints)) + } +} + +impl From for SemverPubgrub { + fn from(req: DependencyVersionReq) -> Self { + match req { + DependencyVersionReq::Req(req) => SemverPubgrub::from(&req), + DependencyVersionReq::Any => SemverPubgrub::empty().complement(), + DependencyVersionReq::Locked { exact, .. } => { + DependencyVersionReq::exact(&exact).into() + } + } + } +} + +pub fn rewrite_locked_dependency( + dependency: ManifestDependency, + locked_package_id: PackageId, +) -> ManifestDependency { + ManifestDependency::builder() + .kind(dependency.kind.clone()) + .name(dependency.name.clone()) + .source_id(locked_package_id.source_id) + .version_req(DependencyVersionReq::Locked { + exact: locked_package_id.version.clone(), + req: dependency.version_req.clone().into(), + }) + .build() +} + +pub fn rewrite_dependency_source_id( + package_id: PackageId, + dependency: &ManifestDependency, +) -> Result { + // Rewrite path dependencies for git sources. + if package_id.source_id.is_git() && dependency.source_id.is_path() { + let rewritten_dep = ManifestDependency::builder() + .kind(dependency.kind.clone()) + .name(dependency.name.clone()) + .source_id(package_id.source_id) + .version_req(dependency.version_req.clone()) + .build(); + + return Ok(rewritten_dep); + }; + + Ok(dependency.clone()) +} +/// Error thrown while trying to execute `scarb` command. +#[derive(Error, Debug)] +#[non_exhaustive] +pub enum DependencyProviderError { + /// Package not found. + #[error("cannot find package `{name} {version}`")] + PackageNotFound { + name: String, + version: DependencyVersionReq, + }, + /// Package query failed. + #[error("{0}")] + PackageQueryFailed(#[from] anyhow::Error), + /// Channel closed. + #[error("channel closed")] + ChannelClosed, +} diff --git a/scarb/src/resolver/algorithm/solution.rs b/scarb/src/resolver/algorithm/solution.rs new file mode 100644 index 000000000..ad82d3f54 --- /dev/null +++ b/scarb/src/resolver/algorithm/solution.rs @@ -0,0 +1,51 @@ +use crate::core::resolver::DependencyEdge; +use crate::core::{DepKind, DependencyFilter, PackageId, Resolve, Summary, TargetKind}; +use crate::resolver::algorithm::provider::PubGrubDependencyProvider; +use petgraph::prelude::DiGraphMap; +use pubgrub::type_aliases::SelectedDependencies; +use std::collections::HashMap; + +pub fn build_resolve( + provider: &PubGrubDependencyProvider, + solution: SelectedDependencies, +) -> anyhow::Result { + let summaries: HashMap = solution + .into_iter() + .map(|(package, version)| { + let pid = PackageId::new(package.name.clone(), version.clone(), package.source_id); + let sum = provider + .only_fetch_summary(pid) + .map_err(|err| anyhow::format_err!("failed to get summary: {:?}", err))?; + Ok((sum.package_id, sum)) + }) + .collect::>>()?; + + let mut graph: DiGraphMap = Default::default(); + + for pid in summaries.keys() { + graph.add_node(*pid); + } + + for summary in summaries.values() { + let dep_filter = DependencyFilter::propagation( + provider.main_package_ids().contains(&summary.package_id), + ); + for dep in summary.filtered_full_dependencies(dep_filter) { + let dep_target_kind: Option = match dep.kind.clone() { + DepKind::Normal => None, + DepKind::Target(target_kind) => Some(target_kind), + }; + let Some(dep) = summaries.keys().find(|pid| pid.name == dep.name).copied() else { + continue; + }; + let weight = graph + .edge_weight(summary.package_id, dep) + .cloned() + .unwrap_or_default(); + let weight = weight.extend(dep_target_kind); + graph.add_edge(summary.package_id, dep, weight); + } + } + + Ok(Resolve { graph, summaries }) +} diff --git a/scarb/src/resolver/mod.rs b/scarb/src/resolver/mod.rs index 93b789bc8..f21f043c5 100644 --- a/scarb/src/resolver/mod.rs +++ b/scarb/src/resolver/mod.rs @@ -1,9 +1,9 @@ -use anyhow::Result; - use crate::core::lockfile::Lockfile; use crate::core::registry::Registry; use crate::core::resolver::Resolve; use crate::core::Summary; +use anyhow::Result; +use tokio::runtime::Handle; mod algorithm; mod primitive; @@ -28,12 +28,14 @@ mod primitive; /// /// * `ui` - an [`Ui`] instance used to show warnings to the user. #[tracing::instrument(level = "trace", skip_all)] -pub async fn resolve( +pub async fn resolve<'c>( summaries: &[Summary], registry: &dyn Registry, lockfile: Lockfile, + handle: &'c Handle, ) -> Result { - primitive::resolve(summaries, registry, lockfile).await + // primitive::resolve(summaries, registry, lockfile, handle).await + algorithm::resolve(summaries, registry, lockfile, handle).await } #[cfg(test)] @@ -126,7 +128,12 @@ mod tests { .collect_vec(); let lockfile = Lockfile::new(locks.iter().cloned()); - runtime.block_on(super::resolve(&summaries, ®istry, lockfile)) + runtime.block_on(super::resolve( + &summaries, + ®istry, + lockfile, + runtime.handle(), + )) } fn package_id>(name: S) -> PackageId { @@ -253,20 +260,7 @@ mod tests { ("baz v1.0.0", []), ], &[deps![("foo", "*")]], - // TODO(#2): Expected result is commented out. - // Ok(pkgs![ - // "bar v1.0.0", - // "baz v1.0.0", - // "foo v1.0.0" - // ]), - Err(indoc! {" - Version solving failed: - - bar v2.0.0 cannot use baz v1.0.0, because bar requires baz ^2.0.0 - - Scarb does not have real version solving algorithm yet. - Perhaps in the future this conflict could be resolved, but currently, - please upgrade your dependencies to use latest versions of their dependencies. - "}), + Ok(pkgs!["bar v1.0.0", "baz v1.0.0", "foo v1.0.0"]), ) } @@ -285,20 +279,7 @@ mod tests { ("baz v2.1.0", []), ], &[deps![("bar", "~1.1.0"), ("foo", "~2.7")]], - // TODO(#2): Expected result is commented out. - // Ok(pkgs![ - // "bar v1.1.1", - // "baz v1.7.1", - // "foo v2.7.0" - // ]), - Err(indoc! {" - Version solving failed: - - foo v2.7.0 cannot use baz v2.1.0, because foo requires baz ~1.7.1 - - Scarb does not have real version solving algorithm yet. - Perhaps in the future this conflict could be resolved, but currently, - please upgrade your dependencies to use latest versions of their dependencies. - "}), + Ok(pkgs!["bar v1.1.1", "baz v1.7.1", "foo v2.7.0"]), ) } @@ -339,12 +320,10 @@ mod tests { ], &[deps![("top1", "1"), ("top2", "1")]], Err(indoc! {" - Version solving failed: - - top2 v1.0.0 cannot use foo v1.0.0, because top2 requires foo ^2.0.0 - - Scarb does not have real version solving algorithm yet. - Perhaps in the future this conflict could be resolved, but currently, - please upgrade your dependencies to use latest versions of their dependencies. + version solving failed: + Because there is no version of top1 in >1.0.0, <2.0.0 and top1 1.0.0 depends on foo >=1.0.0, <2.0.0, top1 >=1.0.0, <2.0.0 depends on foo >=1.0.0, <2.0.0. + And because top2 1.0.0 depends on foo >=2.0.0, <3.0.0 and there is no version of top2 in >1.0.0, <2.0.0, top1 >=1.0.0, <2.0.0, top2 >=1.0.0, <2.0.0 are incompatible. + And because root_1 1.0.0 depends on top1 >=1.0.0, <2.0.0 and root_1 1.0.0 depends on top2 >=1.0.0, <2.0.0, root_1 1.0.0 is forbidden. "}), ) } @@ -354,7 +333,7 @@ mod tests { check( registry![], &[deps![("foo", "1.0.0")]], - Err(r#"MockRegistry/query: cannot find foo ^1.0.0"#), + Err(r#"MockRegistry/query: cannot find foo *"#), ) } @@ -363,7 +342,7 @@ mod tests { check( registry![("foo v2.0.0", []),], &[deps![("foo", "1.0.0")]], - Err(r#"cannot find package foo"#), + Err(r#"cannot get dependencies of `root_1@1.0.0`"#), ) } @@ -372,7 +351,7 @@ mod tests { check( registry![("foo v1.0.0", []),], &[deps![("foo", "1.0.0", "git+https://example.git/foo.git")]], - Err(r#"MockRegistry/query: cannot find foo ^1.0.0 (git+https://example.git/foo.git)"#), + Err(r#"MockRegistry/query: cannot find foo * (git+https://example.git/foo.git)"#), ) } @@ -388,7 +367,7 @@ mod tests { ("b v3.8.14", []), ], &[deps![("a", "~3.6"), ("b", "~3.6")]], - Err(r#"cannot find package a"#), + Err(r#"cannot get dependencies of `root_1@1.0.0`"#), ) } @@ -408,7 +387,7 @@ mod tests { ("b v3.8.5", [("d", "2.9.0")]), ], &[deps![("a", "~3.6"), ("c", "~1.1"), ("b", "~3.6")]], - Err(r#"cannot find package a"#), + Err(r#"cannot get dependencies of `root_1@1.0.0`"#), ) } @@ -431,7 +410,7 @@ mod tests { ), ], &[deps![("e", "~1.0"), ("a", "~3.7"), ("b", "~3.7")]], - Err(r#"cannot find package e"#), + Err(r#"cannot get dependencies of `root_1@1.0.0`"#), ) } @@ -532,7 +511,7 @@ mod tests { registry![("foo v1.0.0", []),], &[deps![("foo", "2.0.0"),]], locks![("foo v1.0.0", [])], - Err("cannot find package foo"), + Err("cannot get dependencies of `root_1@1.0.0`"), ); } diff --git a/scarb/src/resolver/primitive.rs b/scarb/src/resolver/primitive.rs index 6e77327ba..cb5291fe2 100644 --- a/scarb/src/resolver/primitive.rs +++ b/scarb/src/resolver/primitive.rs @@ -1,3 +1,5 @@ +#![allow(dead_code)] + use crate::core::lockfile::Lockfile; use crate::core::registry::Registry; use crate::core::resolver::DependencyEdge; @@ -9,12 +11,14 @@ use anyhow::bail; use indoc::{formatdoc, indoc}; use petgraph::graphmap::DiGraphMap; use std::collections::{HashMap, HashSet}; +use tokio::runtime::Handle; #[tracing::instrument(level = "trace", skip_all)] -pub async fn resolve( +pub async fn resolve<'c>( summaries: &[Summary], registry: &dyn Registry, lockfile: Lockfile, + _handle: &'c Handle, ) -> anyhow::Result { // TODO(#2): This is very bad, use PubGrub here. let mut graph = DiGraphMap::::new(); diff --git a/scarb/tests/add.rs b/scarb/tests/add.rs index 472460edf..561f43f3d 100644 --- a/scarb/tests/add.rs +++ b/scarb/tests/add.rs @@ -199,7 +199,10 @@ fn runs_resolver_if_network_is_allowed() { "#}) .failure() .stdout_matches(indoc! {r#" - error: cannot find package dep + error: cannot get dependencies of `hello@1.0.0` + + Caused by: + cannot find package `dep ^1.0.0` "#}) .run(); } diff --git a/scarb/tests/git_source.rs b/scarb/tests/git_source.rs index 86fe2ef77..87e96679c 100644 --- a/scarb/tests/git_source.rs +++ b/scarb/tests/git_source.rs @@ -165,6 +165,7 @@ fn fetch_with_nested_paths() { Scarb::quick_snapbox() .arg("fetch") .current_dir(&t) + .timeout(std::time::Duration::from_secs(60 * 1)) .assert() .success(); } diff --git a/scarb/tests/git_source_network.rs b/scarb/tests/git_source_network.rs index 5c498093c..44ea6e2bd 100644 --- a/scarb/tests/git_source_network.rs +++ b/scarb/tests/git_source_network.rs @@ -41,7 +41,8 @@ fn https_something_happens() { error: failed to clone into: [..] Caused by: - process did not exit successfully: exit [..]: 128 + 0: failed to clone into: [..] + 1: process did not exit successfully: exit [..]: 128 "#}); }); } @@ -76,7 +77,8 @@ fn ssh_something_happens() { error: failed to clone into: [..] Caused by: - process did not exit successfully: exit [..]: 128 + 0: failed to clone into: [..] + 1: process did not exit successfully: exit [..]: 128 "#}); }); } diff --git a/scarb/tests/http_registry.rs b/scarb/tests/http_registry.rs index d21a019cc..c44da7aae 100644 --- a/scarb/tests/http_registry.rs +++ b/scarb/tests/http_registry.rs @@ -191,10 +191,11 @@ fn not_found() { .assert() .failure() .stdout_matches(indoc! {r#" - error: failed to lookup for `baz ^1 (registry+http://[..])` in registry: registry+http://[..] + error: failed to lookup for `baz * (registry+http://[..])` in registry: registry+http://[..] Caused by: - package not found in registry: baz ^1 (registry+http://[..]) + 0: failed to lookup for `baz * (registry+http://[..])` in registry: registry+http://[..] + 1: package not found in registry: baz * (registry+http://[..]) "#}); let expected = expect![[" @@ -245,11 +246,12 @@ fn missing_config_json() { .assert() .failure() .stdout_matches(indoc! {r#" - error: failed to lookup for `baz ^1 (registry+http://[..])` in registry: registry+http://[..] + error: failed to lookup for `baz * (registry+http://[..])` in registry: registry+http://[..] Caused by: - 0: failed to fetch registry config - 1: HTTP status client error (404 Not Found) for url (http://[..]/config.json) + 0: failed to lookup for `baz * (registry+http://[..])` in registry: registry+http://[..] + 1: failed to fetch registry config + 2: HTTP status client error (404 Not Found) for url (http://[..]/config.json) "#}); let expected = expect![[" diff --git a/scarb/tests/local_registry.rs b/scarb/tests/local_registry.rs index ba5ca10cc..05d6fc9ef 100644 --- a/scarb/tests/local_registry.rs +++ b/scarb/tests/local_registry.rs @@ -63,10 +63,11 @@ fn not_found() { .assert() .failure() .stdout_matches(indoc! {r#" - error: failed to lookup for `baz ^1 (registry+file://[..])` in registry: registry+file://[..] + error: failed to lookup for `baz * (registry+file://[..])` in registry: registry+file://[..] Caused by: - package not found in registry: baz ^1 (registry+file://[..]) + 0: failed to lookup for `baz * (registry+file://[..])` in registry: registry+file://[..] + 1: package not found in registry: baz * (registry+file://[..]) "#}); } @@ -90,10 +91,11 @@ fn empty_registry() { .assert() .failure() .stdout_matches(indoc! {r#" - error: failed to lookup for `baz ^1 (registry+file://[..])` in registry: registry+file://[..] + error: failed to lookup for `baz * (registry+file://[..])` in registry: registry+file://[..] Caused by: - package not found in registry: baz ^1 (registry+file://[..]) + 0: failed to lookup for `baz * (registry+file://[..])` in registry: registry+file://[..] + 1: package not found in registry: baz * (registry+file://[..]) "#}); } @@ -120,7 +122,8 @@ fn url_pointing_to_file() { error: failed to load source: registry+file://[..] Caused by: - local registry path is not a directory: [..] + 0: failed to load source: registry+file://[..] + 1: local registry path is not a directory: [..] "#}); // Prevent the temp directory from being deleted until this point. diff --git a/scarb/tests/resolver_with_git.rs b/scarb/tests/resolver_with_git.rs index 9e36d5f1d..7fa688b3d 100644 --- a/scarb/tests/resolver_with_git.rs +++ b/scarb/tests/resolver_with_git.rs @@ -1,10 +1,10 @@ use assert_fs::prelude::*; use assert_fs::TempDir; use indoc::indoc; - use scarb_test_support::command::Scarb; use scarb_test_support::gitx; use scarb_test_support::project_builder::{DepBuilder, ProjectBuilder}; +use snapbox::assert_matches; #[test] fn valid_triangle() { @@ -33,15 +33,34 @@ fn valid_triangle() { .dep("proxy", &proxy) .build(&t); - Scarb::quick_snapbox() + let output = Scarb::quick_snapbox() .arg("fetch") .current_dir(&t) - .assert() - .success() - .stdout_matches(indoc! {r#" - [..] Updating git repository file://[..]/culprit - [..] Updating git repository file://[..]/proxy - "#}); + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + assert!( + output.status.success(), + "output is not success:\n{}", + stderr.clone() + ); + + let output = String::from_utf8_lossy(&output.stdout).to_string(); + assert_matches( + indoc! {r#" + [..] Updating git repository file://[..] + [..] Updating git repository file://[..] + "#}, + &output, + ); + + assert!( + // Order is not assured. + output.contains("/proxy") && output.contains("/culprit"), + "{}", + stderr + ); } #[test] @@ -73,18 +92,33 @@ fn two_revs_of_same_dep() { .dep("proxy", &proxy) .build(&t); - Scarb::quick_snapbox() + let output = Scarb::quick_snapbox() .arg("fetch") .current_dir(&t) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..] Updating git repository file://[..]/culprit - [..] Updating git repository file://[..]/culprit - error: found dependencies on the same package `culprit` coming from incompatible sources: - source 1: git+file://[..]/culprit#[..] - source 2: git+file://[..]/culprit?branch=branchy#[..] - "#}); + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + assert!(!output.status.success(), "{}", stderr.clone()); + + let output = String::from_utf8_lossy(&output.stdout).to_string(); + assert_matches( + indoc! {r#" + [..] Updating git repository file://[..]/culprit + [..] Updating git repository file://[..]/culprit + error: found dependencies on the same package `culprit` coming from incompatible sources: + source 1: git+file://[..]/culprit[..] + source 2: git+file://[..]/culprit[..] + "#}, + &output, + ); + + assert!( + // Order is not assured. + output.contains("culprit?branch=branchy#") && output.contains("culprit#"), + "{}", + stderr + ); } #[test] @@ -125,18 +159,40 @@ fn two_revs_of_same_dep_diamond() { .dep("dep2", &dep2) .build(&t); - Scarb::quick_snapbox() + let output = Scarb::quick_snapbox() .arg("fetch") .current_dir(&t) - .assert() - .failure() - .stdout_matches(indoc! {r#" - [..] Updating git repository file://[..]/dep1 - [..] Updating git repository file://[..]/dep2 - [..] Updating git repository file://[..]/culprit - [..] Updating git repository file://[..]/culprit + .output() + .unwrap(); + + let stderr = String::from_utf8_lossy(&output.stderr).to_string(); + assert!(!output.status.success(), "{}", stderr.clone()); + + let output = String::from_utf8_lossy(&output.stdout).to_string(); + assert_matches( + indoc! {r#" + [..] Updating git repository file://[..] + [..] Updating git repository file://[..] + [..] Updating git repository file://[..] + [..] Updating git repository file://[..] error: found dependencies on the same package `culprit` coming from incompatible sources: - source 1: git+file://[..]/culprit#[..] - source 2: git+file://[..]/culprit?branch=branchy#[..] - "#}); + source 1: git+file://[..]/culprit[..] + source 2: git+file://[..]/culprit[..] + "#}, + &output, + ); + + assert!( + // Order is not assured. + output.contains("/dep1") && output.contains("/dep2") && output.contains("/culprit"), + "{}", + stderr.clone() + ); + + assert!( + // Order is not assured. + output.contains("/culprit?branch=branchy#") && output.contains("/culprit#"), + "{}", + stderr + ); } diff --git a/utils/once-map/Cargo.toml b/utils/once-map/Cargo.toml new file mode 100644 index 000000000..4b29bf48a --- /dev/null +++ b/utils/once-map/Cargo.toml @@ -0,0 +1,14 @@ +[package] +name = "once_map" +version = "0.0.1" +publish = false +edition.workspace = true +homepage.workspace = true +repository.workspace = true +authors.workspace = true +license.workspace = true + +[dependencies] +dashmap.workspace = true +tokio.workspace = true +futures.workspace = true diff --git a/utils/once-map/src/lib.rs b/utils/once-map/src/lib.rs new file mode 100644 index 000000000..d26c67cb9 --- /dev/null +++ b/utils/once-map/src/lib.rs @@ -0,0 +1,131 @@ +use std::borrow::Borrow; +use std::hash::{BuildHasher, Hash, RandomState}; +use std::pin::pin; +use std::sync::Arc; + +use dashmap::DashMap; +use tokio::sync::Notify; + +/// Run tasks only once and store the results in a parallel hash map. +/// +/// We often have jobs `Fn(K) -> V` that we only want to run once and memoize, e.g. network +/// requests for metadata. When multiple tasks start the same query in parallel, e.g. through source +/// dist builds, we want to wait until the other task is done and get a reference to the same +/// result. +/// +/// Note that this always clones the value out of the underlying map. Because +/// of this, it's common to wrap the `V` in an `Arc` to make cloning cheap. +pub struct OnceMap { + pub items: DashMap, H>, +} + +impl OnceMap { + /// Register that you want to start a job. + /// + /// If this method returns `true`, you need to start a job and call [`OnceMap::done`] eventually + /// or other tasks will hang. If it returns `false`, this job is already in progress and you + /// can [`OnceMap::wait`] for the result. + pub fn register(&self, key: K) -> bool { + let entry = self.items.entry(key); + match entry { + dashmap::mapref::entry::Entry::Occupied(_) => false, + dashmap::mapref::entry::Entry::Vacant(entry) => { + entry.insert(Value::Waiting(Arc::new(Notify::new()))); + true + } + } + } + + /// Submit the result of a job you registered. + pub fn done(&self, key: K, value: V) { + if let Some(Value::Waiting(notify)) = self.items.insert(key, Value::Filled(value)) { + notify.notify_waiters(); + } + } + + /// Wait for the result of a job that is running. + /// + /// Will hang if [`OnceMap::done`] isn't called for this key. + pub async fn wait(&self, key: &K) -> Option { + let notify = { + let entry = self.items.get(key)?; + match entry.value() { + Value::Filled(value) => return Some(value.clone()), + Value::Waiting(notify) => notify.clone(), + } + }; + + // Register the waiter for calls to `notify_waiters`. + let notification = pin!(notify.notified()); + + // Make sure the value wasn't inserted in-between us checking the map and registering the waiter. + if let Value::Filled(value) = self.items.get(key).expect("map is append-only").value() { + return Some(value.clone()); + }; + + // Wait until the value is inserted. + notification.await; + + let entry = self.items.get(key).expect("map is append-only"); + match entry.value() { + Value::Filled(value) => Some(value.clone()), + Value::Waiting(_) => unreachable!("notify was called"), + } + } + + /// Wait for the result of a job that is running, in a blocking context. + /// + /// Will hang if [`OnceMap::done`] isn't called for this key. + pub fn wait_blocking(&self, key: &K) -> Option { + let notify = { + let entry = self.items.get(key)?; + match entry.value() { + Value::Filled(value) => return Some(value.clone()), + Value::Waiting(notify) => notify.clone(), + } + }; + + // Register the waiter for calls to `notify_waiters`. + let notification = pin!(notify.notified()); + + // Make sure the value wasn't inserted in-between us checking the map and registering the waiter. + if let Value::Filled(value) = self.items.get(key).expect("map is append-only").value() { + return Some(value.clone()); + }; + + // Wait until the value is inserted. + futures::executor::block_on(notification); + + let entry = self.items.get(key).expect("map is append-only"); + match entry.value() { + Value::Filled(value) => Some(value.clone()), + Value::Waiting(_) => unreachable!("notify was called"), + } + } + + /// Return the result of a previous job, if any. + pub fn get(&self, key: &Q) -> Option + where + K: Borrow, + { + let entry = self.items.get(key)?; + match entry.value() { + Value::Filled(value) => Some(value.clone()), + Value::Waiting(_) => None, + } + } +} + +impl Default for OnceMap { + fn default() -> Self { + Self { + items: DashMap::with_hasher(H::default()), + } + } +} + +#[derive(Debug)] +pub enum Value { + Waiting(Arc), + Filled(V), +}