diff --git a/Cargo.lock b/Cargo.lock index 80b7e8736..2301cca35 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -3617,6 +3617,7 @@ name = "pixi" version = "0.40.3" dependencies = [ "ahash", + "anyhow", "assert_matches", "async-fd-lock", "async-once-cell", @@ -3713,6 +3714,7 @@ dependencies = [ "typed-path", "url", "uv-auth", + "uv-build-frontend", "uv-cache", "uv-client", "uv-configuration", diff --git a/Cargo.toml b/Cargo.toml index 164fb23ca..da76aeb01 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -16,6 +16,7 @@ repository = "https://github.com/prefix-dev/pixi" [workspace.dependencies] ahash = "0.8.11" +anyhow = "*" assert_matches = "1.5.0" async-fd-lock = "0.2.0" async-once-cell = "0.5.4" @@ -99,6 +100,7 @@ toml_edit = "0.22.22" tracing = "0.1.41" tracing-subscriber = "0.3.19" typed-path = "0.10.0" +uv-build-frontend = { git = "https://github.com/astral-sh/uv", tag = "0.5.6" } uv-distribution-filename = { git = "https://github.com/astral-sh/uv", tag = "0.5.6" } uv-distribution-types = { git = "https://github.com/astral-sh/uv", tag = "0.5.6" } uv-install-wheel = { git = "https://github.com/astral-sh/uv", tag = "0.5.6" } @@ -206,6 +208,7 @@ performance = ["pixi_allocator"] [dependencies] ahash = { workspace = true } +anyhow = { workspace = true } assert_matches = { workspace = true } async-fd-lock = { workspace = true } async-once-cell = { workspace = true } @@ -247,6 +250,7 @@ minijinja = { workspace = true, features = ["builtins"] } once_cell = { workspace = true } parking_lot = { workspace = true } rstest = { workspace = true } +uv-build-frontend = { workspace = true } uv-distribution-filename = { workspace = true } uv-distribution-types = { workspace = true } uv-install-wheel = { workspace = true } diff --git a/pixi.toml b/pixi.toml index 64b032105..a994812db 100644 --- a/pixi.toml +++ b/pixi.toml @@ -48,7 +48,7 @@ syrupy = ">=4.8.0,<5" tomli-w = ">=1.0,<2" [feature.pytest.tasks] -test-common-wheels = { cmd = "pytest --numprocesses=auto tests/wheel_tests/", depends-on = [ +test-common-wheels = { cmd = "pytest -s --numprocesses=auto tests/wheel_tests/", depends-on = [ "build-release", ] } test-common-wheels-ci = { cmd = "pytest --numprocesses=auto --verbose tests/wheel_tests/" } diff --git a/src/cli/shell.rs b/src/cli/shell.rs index 8810d450a..4b904025d 100644 --- a/src/cli/shell.rs +++ b/src/cli/shell.rs @@ -8,13 +8,16 @@ use rattler_shell::{ shell::{CmdExe, PowerShell, Shell, ShellEnum, ShellScript}, }; -use crate::cli::cli_config::{PrefixUpdateConfig, ProjectConfig}; use crate::lock_file::UpdateMode; use crate::{ activation::CurrentEnvVarBehavior, environment::get_update_lock_file_and_prefix, project::virtual_packages::verify_current_platform_has_required_virtual_packages, prompt, Project, UpdateLockFileOptions, }; +use crate::{ + cli::cli_config::{PrefixUpdateConfig, ProjectConfig}, + project::get_activated_environment_variables, +}; use pixi_config::{ConfigCliActivation, ConfigCliPrompt}; #[cfg(target_family = "unix")] @@ -251,15 +254,15 @@ pub async fn execute(args: Args) -> miette::Result<()> { .await?; // Get the environment variables we need to set activate the environment in the shell. - let env = project - .get_activated_environment_variables( - &environment, - CurrentEnvVarBehavior::Exclude, - Some(&lock_file_data.lock_file), - project.config().force_activate(), - project.config().experimental_activation_cache_usage(), - ) - .await?; + let env = get_activated_environment_variables( + project.env_vars(), + &environment, + CurrentEnvVarBehavior::Exclude, + Some(&lock_file_data.lock_file), + project.config().force_activate(), + project.config().experimental_activation_cache_usage(), + ) + .await?; tracing::debug!("Pixi environment activation:\n{:?}", env); diff --git a/src/cli/shell_hook.rs b/src/cli/shell_hook.rs index 9763b05ad..7b9fbb79d 100644 --- a/src/cli/shell_hook.rs +++ b/src/cli/shell_hook.rs @@ -11,7 +11,6 @@ use rattler_shell::{ use serde::Serialize; use serde_json; -use crate::activation::CurrentEnvVarBehavior; use crate::environment::get_update_lock_file_and_prefix; use crate::{ activation::get_activator, @@ -19,6 +18,7 @@ use crate::{ project::{Environment, HasProjectRef}, prompt, Project, UpdateLockFileOptions, }; +use crate::{activation::CurrentEnvVarBehavior, project::get_activated_environment_variables}; /// Print the pixi environment activation script. /// @@ -107,16 +107,15 @@ async fn generate_environment_json( force_activate: bool, experimental_cache: bool, ) -> miette::Result { - let environment_variables = environment - .project() - .get_activated_environment_variables( - environment, - CurrentEnvVarBehavior::Exclude, - Some(lock_file), - force_activate, - experimental_cache, - ) - .await?; + let environment_variables = get_activated_environment_variables( + environment.project().env_vars(), + environment, + CurrentEnvVarBehavior::Exclude, + Some(lock_file), + force_activate, + experimental_cache, + ) + .await?; let shell_env = ShellEnv { environment_variables, diff --git a/src/conda_prefix_updater.rs b/src/conda_prefix_updater.rs new file mode 100644 index 000000000..88b91648c --- /dev/null +++ b/src/conda_prefix_updater.rs @@ -0,0 +1,115 @@ +use crate::build::BuildContext; +use crate::environment::{self, PythonStatus}; +use crate::lock_file::IoConcurrencyLimit; +use crate::prefix::Prefix; +use crate::project::grouped_environment::{GroupedEnvironment, GroupedEnvironmentName}; +use crate::project::HasProjectRef; +use futures::TryFutureExt; +use miette::IntoDiagnostic; +use pixi_manifest::FeaturesExt; +use pixi_record::PixiRecord; +use rattler::package_cache::PackageCache; +use rattler_conda_types::Platform; + +/// A struct that contains the result of updating a conda prefix. +pub struct CondaPrefixUpdated { + /// The name of the group that was updated. + pub group: GroupedEnvironmentName, + /// The prefix that was updated. + pub prefix: Prefix, + /// Any change to the python interpreter. + pub python_status: Box, +} + +#[derive(Clone)] +/// A task that updates the prefix for a given environment. +pub struct CondaPrefixUpdater<'a> { + pub group: GroupedEnvironment<'a>, + pub platform: Platform, + pub package_cache: PackageCache, + pub io_concurrency_limit: IoConcurrencyLimit, + pub build_context: BuildContext, +} + +impl<'a> CondaPrefixUpdater<'a> { + /// Creates a new prefix task. + pub fn new( + group: GroupedEnvironment<'a>, + platform: Platform, + package_cache: PackageCache, + io_concurrency_limit: IoConcurrencyLimit, + build_context: BuildContext, + ) -> Self { + Self { + group, + package_cache, + io_concurrency_limit, + build_context, + platform, + } + } + + /// Updates the prefix for the given environment. + pub(crate) async fn update( + &self, + pixi_records: Vec, + ) -> miette::Result { + tracing::debug!( + "updating prefix for '{}'", + self.group.name().fancy_display() + ); + + let channels = self + .group + .channel_urls(&self.group.project().channel_config()) + .into_diagnostic()?; + + // Spawn a task to determine the currently installed packages. + let prefix_clone = self.group.prefix().clone(); + let installed_packages_future = + tokio::task::spawn_blocking(move || prefix_clone.find_installed_packages()) + .unwrap_or_else(|e| match e.try_into_panic() { + Ok(panic) => std::panic::resume_unwind(panic), + Err(_err) => Err(miette::miette!("the operation was cancelled")), + }); + + // Wait until the conda records are available and until the installed packages + // for this prefix are available. + let installed_packages = installed_packages_future.await?; + + let has_existing_packages = !installed_packages.is_empty(); + let group_name = self.group.name().clone(); + let client = self.group.project().authenticated_client().clone(); + let prefix = self.group.prefix(); + + let python_status = environment::update_prefix_conda( + &prefix, + self.package_cache.clone(), + client, + installed_packages, + pixi_records, + self.group.virtual_packages(self.platform), + channels, + self.platform, + &format!( + "{} conda prefix '{}'", + if has_existing_packages { + "updating" + } else { + "creating" + }, + group_name.fancy_display() + ), + " ", + self.io_concurrency_limit.clone().into(), + self.build_context.clone(), + ) + .await?; + + Ok(CondaPrefixUpdated { + group: group_name, + prefix, + python_status: Box::new(python_status), + }) + } +} diff --git a/src/lib.rs b/src/lib.rs index 694051f53..bdad53419 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -2,6 +2,7 @@ pub mod activation; pub mod cli; +pub(crate) mod conda_prefix_updater; pub mod diff; pub mod environment; mod global; @@ -19,5 +20,6 @@ mod build; mod rlimit; mod utils; +pub use conda_prefix_updater::{CondaPrefixUpdated, CondaPrefixUpdater}; pub use lock_file::{load_lock_file, UpdateLockFileOptions}; pub use project::{DependencyType, Project}; diff --git a/src/lock_file/mod.rs b/src/lock_file/mod.rs index c0fe1c2e6..f001d705a 100644 --- a/src/lock_file/mod.rs +++ b/src/lock_file/mod.rs @@ -7,6 +7,7 @@ mod satisfiability; mod update; mod utils; +pub use crate::CondaPrefixUpdater; use crate::Project; use miette::{IntoDiagnostic, WrapErr}; pub(crate) use package_identifier::PypiPackageIdentifier; @@ -22,7 +23,7 @@ pub use satisfiability::{ }; pub(crate) use update::{LockFileDerivedData, UpdateContext}; pub use update::{UpdateLockFileOptions, UpdateMode}; -pub(crate) use utils::filter_lock_file; +pub(crate) use utils::{filter_lock_file, IoConcurrencyLimit}; /// A list of conda packages that are locked for a specific platform. pub type LockedCondaPackages = Vec; diff --git a/src/lock_file/resolve/build_dispatch.rs b/src/lock_file/resolve/build_dispatch.rs new file mode 100644 index 000000000..2dbb8cdeb --- /dev/null +++ b/src/lock_file/resolve/build_dispatch.rs @@ -0,0 +1,446 @@ +//! This module contains an implementation of the `BuildContext` trait for the `LazyBuildDispatch` trait. +//! This is mainly to be able to initialize the conda prefix for PyPI resolving on demand. +//! This is needed because the conda prefix is a heavy operation and we want to avoid initializing it. +//! And we do not need to initialize it if we are not resolving PyPI source dependencies. +//! With this implementation we only initialize a prefix once uv requests some operation that actually needs this prefix. +//! +//! This is especially prudent to do when we have multiple environments, which translates into multiple prefixes, that all need to be initialized. +//! Previously we would initialize all prefixes upfront, but this is not needed and can also sometimes not be done for each platform. +//! Using this implementation we can solve for a lot more platforms than we could before. +//! +//! The main struct of interest is the [`LazyBuildDispatch`] struct which holds the parameters needed to create a `BuildContext` uv implementation. +//! and holds struct that is used to instantiate the conda prefix when its needed. +use std::path::Path; + +use async_once_cell::OnceCell as AsyncCell; + +use once_cell::sync::OnceCell; + +use anyhow::Result; +use pixi_manifest::EnvironmentName; +use pixi_record::PixiRecord; +use pixi_uv_conversions::{isolated_names_to_packages, names_to_build_isolation}; +use std::collections::HashMap; +use tokio::runtime::Handle; +use uv_build_frontend::SourceBuild; +use uv_cache::Cache; +use uv_client::RegistryClient; +use uv_configuration::{ + BuildKind, BuildOptions, BuildOutput, Concurrency, ConfigSettings, Constraints, IndexStrategy, + LowerBound, SourceStrategy, +}; +use uv_dispatch::{BuildDispatch, SharedState}; +use uv_distribution_types::{ + CachedDist, DependencyMetadata, IndexLocations, Resolution, SourceDist, +}; +use uv_install_wheel::linker::LinkMode; +use uv_pep508::PackageName; +use uv_pypi_types::Requirement; +use uv_python::{Interpreter, PythonEnvironment}; +use uv_resolver::{ExcludeNewer, FlatIndex}; +use uv_types::{BuildContext, HashStrategy}; + +use crate::{ + activation::CurrentEnvVarBehavior, + project::{get_activated_environment_variables, Environment, EnvironmentVars}, +}; + +use crate::{CondaPrefixUpdated, CondaPrefixUpdater}; + +/// This structure holds all the parameters needed to create a `BuildContext` uv implementation. +pub struct UvBuildDispatchParams<'a> { + client: &'a RegistryClient, + cache: &'a Cache, + index_locations: &'a IndexLocations, + flat_index: &'a FlatIndex, + dependency_metadata: &'a DependencyMetadata, + config_settings: &'a ConfigSettings, + build_options: &'a BuildOptions, + hasher: &'a HashStrategy, + index_strategy: IndexStrategy, + constraints: Constraints, + shared_state: SharedState, + link_mode: uv_install_wheel::linker::LinkMode, + exclude_newer: Option, + bounds: LowerBound, + sources: SourceStrategy, + concurrency: Concurrency, +} + +impl<'a> UvBuildDispatchParams<'a> { + #[allow(clippy::too_many_arguments)] + pub fn new( + client: &'a RegistryClient, + cache: &'a Cache, + index_locations: &'a IndexLocations, + flat_index: &'a FlatIndex, + dependency_metadata: &'a DependencyMetadata, + config_settings: &'a ConfigSettings, + build_options: &'a BuildOptions, + hasher: &'a HashStrategy, + ) -> Self { + Self { + client, + cache, + index_locations, + flat_index, + dependency_metadata, + config_settings, + build_options, + hasher, + index_strategy: IndexStrategy::default(), + shared_state: SharedState::default(), + link_mode: LinkMode::default(), + constraints: Constraints::default(), + exclude_newer: None, + bounds: LowerBound::default(), + sources: SourceStrategy::default(), + concurrency: Concurrency::default(), + } + } + + /// Set the index strategy for the build dispatch. + pub fn with_index_strategy(mut self, index_strategy: IndexStrategy) -> Self { + self.index_strategy = index_strategy; + self + } + + /// Set the shared state for the build dispatch + pub fn with_shared_state(mut self, shared_state: SharedState) -> Self { + self.shared_state = shared_state; + self + } + + /// Set the source strategy for the build dispatch + pub fn with_source_strategy(mut self, sources: SourceStrategy) -> Self { + self.sources = sources; + self + } + + /// Set the concurrency amount for the build dispatch + pub fn with_concurrency(mut self, concurrency: Concurrency) -> Self { + self.concurrency = concurrency; + self + } + + /// Set the link mode for the build dispatch + #[allow(dead_code)] + pub fn with_link_mode(mut self, link_mode: LinkMode) -> Self { + self.link_mode = link_mode; + self + } + + /// Set the constraints for the build dispatch + #[allow(dead_code)] + pub fn with_constraints(mut self, constraints: Constraints) -> Self { + self.constraints = constraints; + self + } + + /// Set the exclude newer options for the build dispatch + #[allow(dead_code)] + pub fn with_exclude_newer(mut self, exclude_newer: Option) -> Self { + self.exclude_newer = exclude_newer; + self + } + + /// Set the lower bounds handling for the build dispatch + #[allow(dead_code)] + pub fn with_lower_bounds(mut self, lower_bounds: LowerBound) -> Self { + self.bounds = lower_bounds; + self + } +} + +/// Handles the lazy initialization of a build dispatch. +/// +/// A build dispatch is used to manage building Python packages from source, +/// including setting up build environments, dependencies, and executing builds. +/// +/// This struct helps manage resources needed for build dispatch that may need +/// to be initialized on-demand rather than upfront. +/// +/// Both the [`BuildDispatch`] and the conda prefix are instantiated on demand. +pub struct LazyBuildDispatch<'a> { + pub params: UvBuildDispatchParams<'a>, + pub prefix_updater: CondaPrefixUpdater<'a>, + pub repodata_records: Vec, + + pub build_dispatch: AsyncCell>, + + // if we create a new conda prefix, we need to store the task result + // so that we can reuse it later + pub conda_task: Option, + + // project environment variables + // this is used to get the activated environment variables + pub project_env_vars: HashMap, + pub environment: Environment<'a>, + + // what pkgs we dont need to activate + pub no_build_isolation: Option>, + + // we need to tie the interpreter to the build dispatch + pub lazy_deps: &'a LazyBuildDispatchDependencies, + + /// Whether to disallow installing the conda prefix. + pub disallow_install_conda_prefix: bool, +} + +/// These are resources for the [`BuildDispatch`] that need to be lazily initialized. +/// along with the build dispatch. +/// +/// This needs to be passed in externally or there will be problems with the borrows being shorter +/// than the lifetime of the `BuildDispatch`, and we are returning the references. +#[derive(Default)] +pub struct LazyBuildDispatchDependencies { + /// The initialized python interpreter + interpreter: OnceCell, + /// The non isolated packages + non_isolated_packages: OnceCell>>, + /// The python environment + python_env: OnceCell, +} + +impl<'a> LazyBuildDispatch<'a> { + /// Create a new `PixiBuildDispatch` instance. + #[allow(clippy::too_many_arguments)] + pub fn new( + params: UvBuildDispatchParams<'a>, + prefix_updater: CondaPrefixUpdater<'a>, + project_env_vars: HashMap, + environment: Environment<'a>, + repodata_records: Vec, + no_build_isolation: Option>, + lazy_deps: &'a LazyBuildDispatchDependencies, + disallow_install_conda_prefix: bool, + ) -> Self { + Self { + params, + prefix_updater, + conda_task: None, + project_env_vars, + environment, + repodata_records, + no_build_isolation, + build_dispatch: AsyncCell::new(), + lazy_deps, + disallow_install_conda_prefix, + } + } + + /// Lazy initialization of the `BuildDispatch`. This also implies initializing the conda prefix. + async fn get_or_try_init(&self) -> anyhow::Result<&BuildDispatch> { + self.build_dispatch + .get_or_try_init(async { + // Disallow installing if the flag is set. + if self.disallow_install_conda_prefix { + return Err(anyhow::anyhow!( + "installation of conda environment is required to solve PyPI source dependencies but `--no-install` flag has been set" + )); + } + tracing::debug!( + "PyPI solve requires instantiation of conda prefix for '{}'", + self.prefix_updater.group.name().as_str() + ); + let prefix = self + .prefix_updater + .update(self.repodata_records.clone()) + .await + .map_err(|err| { + anyhow::anyhow!(err).context("failed to install conda environment") + })?; + + // get the activation vars + let env_vars = get_activated_environment_variables( + &self.project_env_vars, + &self.environment, + CurrentEnvVarBehavior::Exclude, + None, + false, + false, + ) + .await + .map_err(|err| { + anyhow::anyhow!(err).context("failed to get activated environment variables") + })?; + + let python_path = prefix + .python_status + .location() + .map(|path| prefix.prefix.root().join(path)) + .ok_or_else(|| { + anyhow::anyhow!(format!( + "missing python interpreter from conda prefix {}. \n {}", + prefix.prefix.root().display(), + "Use `pixi add python` to install the latest python interpreter.", + )) + })?; + + let interpreter = self + .lazy_deps + .interpreter + .get_or_try_init(|| Interpreter::query(python_path, self.cache()))?; + + let env = self + .lazy_deps + .python_env + .get_or_init(|| PythonEnvironment::from_interpreter(interpreter.clone())); + + let non_isolated_packages = + self.lazy_deps.non_isolated_packages.get_or_try_init(|| { + isolated_names_to_packages(self.no_build_isolation.as_deref()).map_err( + |err| { + anyhow::anyhow!(err).context("failed to get non isolated packages") + }, + ) + })?; + + let build_isolation = + names_to_build_isolation(non_isolated_packages.as_deref(), env); + + let build_dispatch = BuildDispatch::new( + self.params.client, + self.params.cache, + self.params.constraints.clone(), + interpreter, + self.params.index_locations, + self.params.flat_index, + self.params.dependency_metadata, + self.params.shared_state.clone(), + self.params.index_strategy, + self.params.config_settings, + build_isolation, + self.params.link_mode, + self.params.build_options, + self.params.hasher, + self.params.exclude_newer, + self.params.bounds, + self.params.sources, + self.params.concurrency, + ) + .with_build_extra_env_vars(env_vars); + + Ok(build_dispatch) + }) + .await + } +} + +impl BuildContext for LazyBuildDispatch<'_> { + type SourceDistBuilder = SourceBuild; + + fn interpreter(&self) -> &uv_python::Interpreter { + // In most cases the interpreter should be initialized, because one of the other trait + // methods will have been called + // But in case it is not, we will initialize it here + // + // Even though intitalize does not initialize twice, we skip the codepath because the initialization takes time + if self.lazy_deps.interpreter.get().is_none() { + // This will usually be called from the multi-threaded runtime, but there might be tests + // that calls this in the current thread runtime. + // In the current thread runtime we cannot use `block_in_place` as it will pani + let handle = Handle::current(); + match handle.runtime_flavor() { + tokio::runtime::RuntimeFlavor::CurrentThread => { + let runtime = tokio::runtime::Builder::new_current_thread() + .enable_all() + .build() + .expect("failed to initialize the runtime "); + runtime + .block_on(self.get_or_try_init()) + .expect("failed to initialize the build dispatch"); + } + // Others are multi-threaded runtimes + _ => { + tokio::task::block_in_place(move || { + handle + .block_on(self.get_or_try_init()) + .expect("failed to initialize build dispatch"); + }); + } + } + } + self.lazy_deps + .interpreter + .get() + .expect("python interpreter not initialized, this is a programming error") + } + + fn cache(&self) -> &uv_cache::Cache { + self.params.cache + } + + fn git(&self) -> &uv_git::GitResolver { + self.params.shared_state.git() + } + + fn capabilities(&self) -> &uv_distribution_types::IndexCapabilities { + self.params.shared_state.capabilities() + } + + fn dependency_metadata(&self) -> &uv_distribution_types::DependencyMetadata { + self.params.dependency_metadata + } + + fn build_options(&self) -> &uv_configuration::BuildOptions { + self.params.build_options + } + + fn config_settings(&self) -> &uv_configuration::ConfigSettings { + self.params.config_settings + } + + fn bounds(&self) -> uv_configuration::LowerBound { + self.params.bounds + } + + fn sources(&self) -> uv_configuration::SourceStrategy { + self.params.sources + } + + fn locations(&self) -> &uv_distribution_types::IndexLocations { + self.params.index_locations + } + + async fn resolve<'data>(&'data self, requirements: &'data [Requirement]) -> Result { + self.get_or_try_init().await?.resolve(requirements).await + } + + async fn install<'data>( + &'data self, + resolution: &'data Resolution, + venv: &'data PythonEnvironment, + ) -> Result> { + self.get_or_try_init() + .await? + .install(resolution, venv) + .await + } + + async fn setup_build<'data>( + &'data self, + source: &'data Path, + subdirectory: Option<&'data Path>, + install_path: &'data Path, + version_id: Option, + dist: Option<&'data SourceDist>, + sources: SourceStrategy, + build_kind: BuildKind, + build_output: BuildOutput, + ) -> Result { + self.get_or_try_init() + .await? + .setup_build( + source, + subdirectory, + install_path, + version_id, + dist, + sources, + build_kind, + build_output, + ) + .await + } +} diff --git a/src/lock_file/resolve/mod.rs b/src/lock_file/resolve/mod.rs index d57d5fdcf..d788ee6f6 100644 --- a/src/lock_file/resolve/mod.rs +++ b/src/lock_file/resolve/mod.rs @@ -2,6 +2,7 @@ //! //! See [`resolve_pypi`] and [`resolve_conda`] for more information. +pub(crate) mod build_dispatch; pub(crate) mod conda; pub(crate) mod pypi; mod resolver_provider; diff --git a/src/lock_file/resolve/pypi.rs b/src/lock_file/resolve/pypi.rs index b243ea0eb..d513e4164 100644 --- a/src/lock_file/resolve/pypi.rs +++ b/src/lock_file/resolve/pypi.rs @@ -13,13 +13,14 @@ use indexmap::{IndexMap, IndexSet}; use indicatif::ProgressBar; use itertools::{Either, Itertools}; use miette::{Context, IntoDiagnostic}; -use pixi_manifest::{pypi::pypi_options::PypiOptions, PyPiRequirement, SystemRequirements}; +use pixi_manifest::{ + pypi::pypi_options::PypiOptions, EnvironmentName, PyPiRequirement, SystemRequirements, +}; use pixi_record::PixiRecord; use pixi_uv_conversions::{ - as_uv_req, convert_uv_requirements_to_pep508, into_pinned_git_spec, isolated_names_to_packages, - names_to_build_isolation, no_build_to_build_options, pypi_options_to_index_locations, - to_index_strategy, to_normalize, to_requirements, to_uv_normalize, to_uv_version, - to_version_specifiers, ConversionError, + as_uv_req, convert_uv_requirements_to_pep508, into_pinned_git_spec, no_build_to_build_options, + pypi_options_to_index_locations, to_index_strategy, to_normalize, to_requirements, + to_uv_normalize, to_uv_version, to_version_specifiers, ConversionError, }; use pypi_modifiers::{ pypi_marker_env::determine_marker_environment, @@ -32,17 +33,15 @@ use rattler_lock::{ use typed_path::Utf8TypedPathBuf; use url::Url; use uv_client::{Connectivity, FlatIndexClient, RegistryClient, RegistryClientBuilder}; -use uv_configuration::{ConfigSettings, Constraints, IndexStrategy, LowerBound, Overrides}; -use uv_dispatch::{BuildDispatch, SharedState}; +use uv_configuration::{ConfigSettings, Constraints, Overrides}; +use uv_dispatch::SharedState; use uv_distribution::DistributionDatabase; use uv_distribution_types::{ BuiltDist, DependencyMetadata, Diagnostic, Dist, FileLocation, HashPolicy, IndexCapabilities, IndexUrl, InstalledDist, InstalledRegistryDist, Name, Resolution, ResolvedDist, SourceDist, }; use uv_git::GitResolver; -use uv_install_wheel::linker::LinkMode; use uv_pypi_types::{Conflicts, HashAlgorithm, HashDigest, RequirementSource}; -use uv_python::{Interpreter, PythonEnvironment}; use uv_requirements::LookaheadResolver; use uv_resolver::{ AllowedYanks, DefaultResolverProvider, FlatIndex, InMemoryIndex, Manifest, Options, Preference, @@ -52,10 +51,19 @@ use uv_types::EmptyInstalledPackages; use crate::{ lock_file::{ - records_by_name::HasNameVersion, resolve::resolver_provider::CondaResolverProvider, - LockedPypiPackages, PypiPackageIdentifier, PypiRecord, UvResolutionContext, + records_by_name::HasNameVersion, + resolve::{ + build_dispatch::{ + LazyBuildDispatch, LazyBuildDispatchDependencies, UvBuildDispatchParams, + }, + resolver_provider::CondaResolverProvider, + }, + CondaPrefixUpdater, LockedPypiPackages, PixiRecordsByName, PypiPackageIdentifier, + PypiRecord, UvResolutionContext, }, + project::{Environment, EnvironmentVars}, uv_reporter::{UvReporter, UvReporterOptions}, + CondaPrefixUpdated, }; #[derive(Debug, thiserror::Error)] @@ -165,10 +173,13 @@ pub async fn resolve_pypi( locked_pypi_packages: &[PypiRecord], platform: rattler_conda_types::Platform, pb: &ProgressBar, - python_location: &Path, - env_variables: &HashMap, project_root: &Path, -) -> miette::Result { + prefix_updater: CondaPrefixUpdater<'_>, + repodata_records: Arc, + project_env_vars: HashMap, + environment_name: Environment<'_>, + disallow_install_conda_prefix: bool, +) -> miette::Result<(LockedPypiPackages, Option)> { // Solve python packages pb.set_message("resolving pypi dependencies"); @@ -266,13 +277,6 @@ pub async fn resolve_pypi( let requires_python = uv_resolver::RequiresPython::from_specifiers( &uv_pep440::VersionSpecifiers::from(python_specifier), ); - let interpreter = Interpreter::query(python_location, &context.cache) - .into_diagnostic() - .wrap_err("failed to query python interpreter")?; - tracing::debug!( - "using python interpreter (should be assumed for building only): {}", - interpreter.key() - ); tracing::info!( "using requires python specifier (this may differ from the above): {}", requires_python @@ -324,12 +328,6 @@ pub async fn resolve_pypi( let build_dispatch_in_memory_index = InMemoryIndex::default(); let config_settings = ConfigSettings::default(); - let env = PythonEnvironment::from_interpreter(interpreter.clone()); - let non_isolated_packages = - isolated_names_to_packages(pypi_options.no_build_isolation.as_deref()).into_diagnostic()?; - let build_isolation = names_to_build_isolation(non_isolated_packages.as_deref(), &env); - tracing::debug!("using build-isolation: {:?}", build_isolation); - let dependency_metadata = DependencyMetadata::default(); let options = Options { index_strategy, @@ -344,28 +342,32 @@ pub async fn resolve_pypi( context.capabilities.clone(), ); - let build_dispatch = BuildDispatch::new( + let build_params = UvBuildDispatchParams::new( ®istry_client, &context.cache, - Constraints::default(), - &interpreter, &index_locations, &flat_index, &dependency_metadata, - // TODO: could use this later to add static metadata - shared_state, - IndexStrategy::default(), &config_settings, - build_isolation, - LinkMode::default(), &build_options, &context.hash_strategy, - None, - LowerBound::default(), - context.source_strategy, - context.concurrency, ) - .with_build_extra_env_vars(env_variables.iter()); + .with_index_strategy(index_strategy) + .with_shared_state(shared_state.clone()) + .with_source_strategy(context.source_strategy) + .with_concurrency(context.concurrency); + + let lazy_build_dispatch_dependencies = LazyBuildDispatchDependencies::default(); + let lazy_build_dispatch = LazyBuildDispatch::new( + build_params, + prefix_updater, + project_env_vars, + environment_name, + repodata_records.records.clone(), + pypi_options.no_build_isolation.clone(), + &lazy_build_dispatch_dependencies, + disallow_install_conda_prefix, + ); // Constrain the conda packages to the specific python packages let constraints = conda_python_packages @@ -430,7 +432,7 @@ pub async fn resolve_pypi( &lookahead_index, DistributionDatabase::new( ®istry_client, - &build_dispatch, + &lazy_build_dispatch, context.concurrency.downloads, ), ) @@ -456,7 +458,7 @@ pub async fn resolve_pypi( let fallback_provider = DefaultResolverProvider::new( DistributionDatabase::new( ®istry_client, - &build_dispatch, + &lazy_build_dispatch, context.concurrency.downloads, ), &flat_index, @@ -512,16 +514,20 @@ pub async fn resolve_pypi( } // Collect resolution into locked packages - lock_pypi_packages( + let locked_packages = lock_pypi_packages( conda_python_packages, - &build_dispatch, + &lazy_build_dispatch, ®istry_client, resolution, &context.capabilities, context.concurrency.downloads, project_root, ) - .await + .await?; + + let conda_task = lazy_build_dispatch.conda_task; + + Ok((locked_packages, conda_task)) } #[derive(Debug, thiserror::Error)] @@ -636,7 +642,7 @@ fn get_url_or_path( /// Create a vector of locked packages from a resolution async fn lock_pypi_packages( conda_python_packages: CondaPythonPackages, - build_dispatch: &BuildDispatch<'_>, + pixi_build_dispatch: &LazyBuildDispatch<'_>, registry_client: &Arc, resolution: Resolution, index_capabilities: &IndexCapabilities, @@ -644,7 +650,8 @@ async fn lock_pypi_packages( abs_project_root: &Path, ) -> miette::Result> { let mut locked_packages = LockedPypiPackages::with_capacity(resolution.len()); - let database = DistributionDatabase::new(registry_client, build_dispatch, concurrent_downloads); + let database = + DistributionDatabase::new(registry_client, pixi_build_dispatch, concurrent_downloads); for dist in resolution.distributions() { // If this refers to a conda package we can skip it if conda_python_packages.contains_key(dist.name()) { diff --git a/src/lock_file/update.rs b/src/lock_file/update.rs index cd1759b9b..b2bb0de48 100644 --- a/src/lock_file/update.rs +++ b/src/lock_file/update.rs @@ -8,6 +8,10 @@ use std::{ time::{Duration, Instant}, }; +use crate::{ + lock_file::CondaPrefixUpdater, + project::{get_activated_environment_variables, EnvironmentVars}, +}; use barrier_cell::BarrierCell; use fancy_display::FancyDisplay; use futures::{stream::FuturesUnordered, FutureExt, StreamExt, TryFutureExt, TryStreamExt}; @@ -61,7 +65,7 @@ use crate::{ Environment, HasProjectRef, }, repodata::Repodata, - Project, + CondaPrefixUpdated, Project, }; impl Project { @@ -287,18 +291,15 @@ impl<'p> LockFileDerivedData<'p> { }; // TODO: This can be really slow (~200ms for pixi on @ruben-arts machine). - let env_variables = self - .project - // Not providing a lock-file as the cache will be invalidated directly anyway, - // by it changing the lockfile with pypi records. - .get_activated_environment_variables( - environment, - CurrentEnvVarBehavior::Exclude, - None, - false, - false, - ) - .await?; + let env_variables = get_activated_environment_variables( + self.project.env_vars(), + environment, + CurrentEnvVarBehavior::Exclude, + None, + false, + false, + ) + .await?; let non_isolated_packages = environment.pypi_options().no_build_isolation; let no_build = environment @@ -306,6 +307,7 @@ impl<'p> LockFileDerivedData<'p> { .no_build .clone() .unwrap_or_default(); + // Update the prefix with Pypi records environment::update_prefix_pypi( environment.name(), @@ -649,20 +651,6 @@ impl<'p> UpdateContext<'p> { }) .collect() } - - /// Returns a future that will resolve to the solved repodata records for - /// the given environment or `None` if no task was spawned to - /// instantiate the prefix. - pub(crate) fn get_conda_prefix( - &self, - environment: &GroupedEnvironment<'p>, - ) -> Option> { - let cell = self.instantiated_conda_prefixes.get(environment)?.clone(); - Some(async move { - let prefix = cell.wait().await; - (prefix.0.clone(), prefix.1.clone()) - }) - } } /// If the project has any source dependencies, like `git` or `path` @@ -1175,73 +1163,6 @@ impl<'p> UpdateContext<'p> { } } - // Spawn tasks to instantiate prefixes that we need to be able to solve Pypi - // packages. - // - // Solving Pypi packages requires a python interpreter to be present in the - // prefix, therefore we first need to make sure we have conda packages - // available, then we can instantiate the prefix with at least the required - // conda packages (including a python interpreter) and then we can solve the - // Pypi packages using the installed interpreter. - // - // We only need to instantiate the prefix for the current platform. - for (environment, platforms) in self.outdated_envs.pypi.iter() { - // Only instantiate a prefix if any of the platforms actually contain pypi - // dependencies. If there are no pypi-dependencies than solving is also - // not required and thus a prefix is also not required. - if !platforms - .iter() - .any(|p| !environment.pypi_dependencies(Some(*p)).is_empty()) - { - continue; - } - - // If we are not allowed to install, we can't instantiate a prefix. - if self.no_install { - miette::bail!("Cannot update pypi dependencies without first installing a conda prefix that includes python."); - } - - // Check if the group is already being instantiated - let group = GroupedEnvironment::from(environment.clone()); - if self.instantiated_conda_prefixes.contains_key(&group) { - continue; - } - - // Construct a future that will resolve when we have the repodata available for - // the current platform for this group. - let records_future = self - .get_latest_group_repodata_records(&group, environment.best_platform()) - .ok_or_else(|| { - make_unsupported_pypi_platform_error(environment, current_platform) - })?; - - // Spawn a task to instantiate the environment - let environment_name = environment.name().clone(); - let pypi_env_task = spawn_create_prefix_task( - group.clone(), - self.package_cache.clone(), - records_future, - self.io_concurrency_limit.clone(), - self.build_context.clone(), - ) - .map_err(move |e| { - e.context(format!( - "failed to instantiate a prefix for '{}'", - environment_name - )) - }) - .boxed_local(); - - pending_futures.push(pypi_env_task); - let previous_cell = self - .instantiated_conda_prefixes - .insert(group, Arc::new(BarrierCell::new())); - assert!( - previous_cell.is_none(), - "cannot update the same group twice" - ) - } - // Spawn tasks to update the pypi packages. let mut uv_context = None; for (environment, platform) in self @@ -1269,25 +1190,22 @@ impl<'p> UpdateContext<'p> { } // Get environment variables from the activation - let env_variables = project - .get_activated_environment_variables( - environment, - CurrentEnvVarBehavior::Exclude, - None, - false, - false, - ) - .await?; - + let project_variables = self.project.env_vars().clone(); // Construct a future that will resolve when we have the repodata available let repodata_future = self - .get_latest_group_repodata_records(&group, platform) - .expect("conda records should be available now or in the future"); + .get_latest_group_repodata_records(&group, environment.best_platform()) + .ok_or_else(|| { + make_unsupported_pypi_platform_error(environment, current_platform) + })?; - // Construct a future that will resolve when we have the conda prefix available - let prefix_future = self - .get_conda_prefix(&group) - .expect("prefix should be available now or in the future"); + // Creates an object to initiate an update at a later point + let conda_prefix_updater = CondaPrefixUpdater::new( + group.clone(), + environment.best_platform(), + self.package_cache.clone(), + self.io_concurrency_limit.clone(), + self.build_context.clone(), + ); // Get the uv context let uv_context = match uv_context.as_ref() { @@ -1308,13 +1226,15 @@ impl<'p> UpdateContext<'p> { let pypi_solve_future = spawn_solve_pypi_task( uv_context, group.clone(), + environment.clone(), + project_variables, platform, repodata_future, - prefix_future, - env_variables, + conda_prefix_updater, self.pypi_solve_semaphore.clone(), project.root().to_path_buf(), locked_group_records, + self.no_install, ); pending_futures.push(pypi_solve_future.boxed_local()); @@ -1437,23 +1357,34 @@ impl<'p> UpdateContext<'p> { } } } - TaskResult::CondaPrefixUpdated(group_name, prefix, python_status, duration) => { - let group = GroupedEnvironment::from_name(project, &group_name) - .expect("grouped environment should exist"); - - self.instantiated_conda_prefixes - .get_mut(&group) - .expect("the entry for this environment should exists") - .set(Arc::new((prefix, *python_status))) - .expect("prefix should not be instantiated twice"); - - tracing::info!( - "updated conda packages in the '{}' prefix in {}", - group.name().fancy_display(), - humantime::format_duration(duration) - ); - } - TaskResult::PypiGroupSolved(group_name, platform, records, duration) => { + // TaskResult::CondaPrefixUpdated( + // group_name, + // prefix, + // python_status, + // duration, + // ) => { + // let group = GroupedEnvironment::from_name(project, &group_name) + // .expect("grouped environment should exist"); + + // self.instantiated_conda_prefixes + // .get_mut(&group) + // .expect("the entry for this environment should exists") + // .set(Arc::new((prefix, *python_status, status, prefix_task))) + // .expect("prefix should not be instantiated twice"); + + // tracing::info!( + // "updated conda packages in the '{}' prefix in {}", + // group.name().fancy_display(), + // humantime::format_duration(duration) + // ); + // } + TaskResult::PypiGroupSolved( + group_name, + platform, + records, + duration, + conda_prefix, + ) => { let group = GroupedEnvironment::from_name(project, &group_name) .expect("group should exist"); @@ -1483,6 +1414,23 @@ impl<'p> UpdateContext<'p> { ); } } + + if let Some(conda_prefix) = conda_prefix { + let group = GroupedEnvironment::from_name(project, &conda_prefix.group) + .expect("grouped environment should exist"); + + self.instantiated_conda_prefixes + .get_mut(&group) + .expect("the entry for this environment should exists") + .set(Arc::new((conda_prefix.prefix, *conda_prefix.python_status))) + .expect("prefix should not be instantiated twice"); + + tracing::info!( + "updated conda packages in the '{}' prefix in {}", + group.name().fancy_display(), + humantime::format_duration(duration) + ); + } } TaskResult::ExtractedRecordsSubset( environment, @@ -1658,7 +1606,7 @@ fn make_unsupported_pypi_platform_error( /// Represents data that is sent back from a task. This is used to communicate /// the result of a task back to the main task which will forward the /// information to other tasks waiting for results. -enum TaskResult { +pub enum TaskResult { /// The conda dependencies for a grouped environment have been solved. CondaGroupSolved( GroupedEnvironmentName, @@ -1667,15 +1615,13 @@ enum TaskResult { Duration, ), - /// A prefix was updated with the latest conda packages - CondaPrefixUpdated(GroupedEnvironmentName, Prefix, Box, Duration), - /// The pypi dependencies for a grouped environment have been solved. PypiGroupSolved( GroupedEnvironmentName, Platform, PypiRecordsByName, Duration, + Option, ), /// The records for a specific environment have been extracted from a @@ -2122,38 +2068,40 @@ async fn spawn_extract_environment_task( /// A task that solves the pypi dependencies for a given environment. #[allow(clippy::too_many_arguments)] -async fn spawn_solve_pypi_task( +async fn spawn_solve_pypi_task<'p>( resolution_context: UvResolutionContext, - environment: GroupedEnvironment<'_>, + grouped_environment: GroupedEnvironment<'p>, + environment: Environment<'p>, + project_variables: HashMap, platform: Platform, repodata_records: impl Future>, - prefix: impl Future, - env_variables: &HashMap, + prefix_task: CondaPrefixUpdater<'p>, semaphore: Arc, project_root: PathBuf, locked_pypi_packages: Arc, + disallow_install_conda_prefix: bool, ) -> miette::Result { // Get the Pypi dependencies for this environment - let dependencies = environment.pypi_dependencies(Some(platform)); + let dependencies = grouped_environment.pypi_dependencies(Some(platform)); if dependencies.is_empty() { return Ok(TaskResult::PypiGroupSolved( - environment.name().clone(), + grouped_environment.name().clone(), platform, PypiRecordsByName::default(), Duration::from_millis(0), + None, )); } // Get the system requirements for this environment - let system_requirements = environment.system_requirements(); + let system_requirements = grouped_environment.system_requirements(); // Wait until the conda records and prefix are available. - let (repodata_records, (prefix, python_status), _guard) = - tokio::join!(repodata_records, prefix, semaphore.acquire_owned()); + let (repodata_records, _guard) = tokio::join!(repodata_records, semaphore.acquire_owned()); - let environment_name = environment.name().clone(); + let environment_name = grouped_environment.name().clone(); - let pypi_name_mapping_location = environment.project().pypi_name_mapping_source()?; + let pypi_name_mapping_location = grouped_environment.project().pypi_name_mapping_source()?; let mut pixi_records = repodata_records.records.clone(); let locked_pypi_records = locked_pypi_packages.records.clone(); @@ -2169,8 +2117,7 @@ async fn spawn_solve_pypi_task( .await?; let pypi_options = environment.pypi_options(); - // let (pypi_packages, duration) = tokio::spawn( - let (pypi_packages, duration) = async move { + let (pypi_packages, duration, prefix_task_result) = async move { let pb = SolveProgressBar::new( global_multi_progress().add(ProgressBar::hidden()), platform, @@ -2178,16 +2125,6 @@ async fn spawn_solve_pypi_task( ); pb.start(); - let python_path = python_status - .location() - .map(|path| prefix.root().join(path)) - .ok_or_else(|| { - miette::miette!( - help = "Use `pixi add python` to install the latest python interpreter.", - "missing python interpreter from environment" - ) - })?; - let start = Instant::now(); let dependencies: Vec<(uv_normalize::PackageName, IndexSet<_>)> = dependencies @@ -2198,7 +2135,7 @@ async fn spawn_solve_pypi_task( let index_map = IndexMap::from_iter(dependencies); - let records = lock_file::resolve_pypi( + let (records, prefix_task_result) = lock_file::resolve_pypi( resolution_context, &pypi_options, index_map, @@ -2207,9 +2144,12 @@ async fn spawn_solve_pypi_task( &locked_pypi_records, platform, &pb.pb, - &python_path, - env_variables, &project_root, + prefix_task, + repodata_records, + project_variables, + environment, + disallow_install_conda_prefix, ) .await .with_context(|| { @@ -2223,102 +2163,24 @@ async fn spawn_solve_pypi_task( pb.finish(); - Ok::<(_, _), miette::Report>((PypiRecordsByName::from_iter(records), end - start)) + Ok::<(_, _, _), miette::Report>(( + PypiRecordsByName::from_iter(records), + end - start, + prefix_task_result, + )) } .instrument(tracing::info_span!( "resolve_pypi", - group = %environment.name().as_str(), + group = %grouped_environment.name().as_str(), platform = %platform )) .await?; Ok(TaskResult::PypiGroupSolved( - environment.name().clone(), + grouped_environment.name().clone(), platform, pypi_packages, duration, - )) -} - -/// Updates the prefix for the given environment. -/// -/// This function will wait until the conda records for the prefix are -/// available. -async fn spawn_create_prefix_task( - group: GroupedEnvironment<'_>, - package_cache: PackageCache, - pixi_records: impl Future>, - io_concurrency_limit: IoConcurrencyLimit, - build_context: BuildContext, -) -> miette::Result { - let group_name = group.name().clone(); - let prefix = group.prefix(); - let client = group.project().authenticated_client().clone(); - let channels = group - .channel_urls(&group.project().channel_config()) - .into_diagnostic()?; - - // Spawn a task to determine the currently installed packages. - let installed_packages_future = tokio::spawn({ - let prefix = prefix.clone(); - async move { prefix.find_installed_packages() } - }) - .unwrap_or_else(|e| match e.try_into_panic() { - Ok(panic) => std::panic::resume_unwind(panic), - Err(_err) => Err(miette::miette!("the operation was cancelled")), - }); - - // Wait until the conda records are available and until the installed packages - // for this prefix are available. - let (pixi_records, installed_packages) = - tokio::try_join!(pixi_records.map(Ok), installed_packages_future)?; - - let build_virtual_packages = group.virtual_packages(Platform::current()); - - // Spawn a background task to update the prefix - let (python_status, duration) = tokio::spawn({ - let prefix = prefix.clone(); - let group_name = group_name.clone(); - async move { - let start = Instant::now(); - let has_existing_packages = !installed_packages.is_empty(); - let python_status = environment::update_prefix_conda( - &prefix, - package_cache, - client, - installed_packages, - pixi_records.records.clone(), - build_virtual_packages, - channels, - Platform::current(), - &format!( - "{} python environment to solve pypi packages for '{}'", - if has_existing_packages { - "updating" - } else { - "creating" - }, - group_name.fancy_display() - ), - " ", - io_concurrency_limit.into(), - build_context, - ) - .await?; - let end = Instant::now(); - Ok((python_status, end - start)) - } - }) - .await - .unwrap_or_else(|e| match e.try_into_panic() { - Ok(panic) => std::panic::resume_unwind(panic), - Err(_err) => Err(miette::miette!("the operation was cancelled")), - })?; - - Ok(TaskResult::CondaPrefixUpdated( - group_name, - prefix, - Box::new(python_status), - duration, + prefix_task_result, )) } diff --git a/src/project/mod.rs b/src/project/mod.rs index d76552c8c..3f438872c 100644 --- a/src/project/mod.rs +++ b/src/project/mod.rs @@ -224,6 +224,10 @@ impl Project { .collect() } + pub fn env_vars(&self) -> &HashMap { + &self.env_vars + } + /// Constructs a project from a manifest. pub fn from_str(manifest_path: &Path, content: &str) -> miette::Result { let manifest = Manifest::from_str(manifest_path, content)?; @@ -472,67 +476,6 @@ impl Project { .ok_or_else(|| miette::miette!("unknown environment '{environment_name}'")) } - /// Get or initialize the activated environment variables - pub async fn get_activated_environment_variables( - &self, - environment: &Environment<'_>, - current_env_var_behavior: CurrentEnvVarBehavior, - lock_file: Option<&LockFile>, - force_activate: bool, - experimental_cache: bool, - ) -> miette::Result<&HashMap> { - let vars = self.env_vars.get(environment.name()).ok_or_else(|| { - miette::miette!( - "{} environment should be already created during project creation", - environment.name() - ) - })?; - match current_env_var_behavior { - CurrentEnvVarBehavior::Clean => { - vars.clean() - .get_or_try_init(async { - initialize_env_variables( - environment, - current_env_var_behavior, - lock_file, - force_activate, - experimental_cache, - ) - .await - }) - .await - } - CurrentEnvVarBehavior::Exclude => { - vars.pixi_only() - .get_or_try_init(async { - initialize_env_variables( - environment, - current_env_var_behavior, - lock_file, - force_activate, - experimental_cache, - ) - .await - }) - .await - } - CurrentEnvVarBehavior::Include => { - vars.full() - .get_or_try_init(async { - initialize_env_variables( - environment, - current_env_var_behavior, - lock_file, - force_activate, - experimental_cache, - ) - .await - }) - .await - } - } - } - /// Returns all the solve groups in the project. pub(crate) fn solve_groups(&self) -> Vec { self.manifest @@ -1111,6 +1054,67 @@ pub(crate) fn find_project_manifest(current_dir: impl AsRef) -> Option( + project_env_vars: &'a HashMap, + environment: &Environment<'_>, + current_env_var_behavior: CurrentEnvVarBehavior, + lock_file: Option<&LockFile>, + force_activate: bool, + experimental_cache: bool, +) -> miette::Result<&'a HashMap> { + let vars = project_env_vars.get(environment.name()).ok_or_else(|| { + miette::miette!( + "{} environment should be already created during project creation", + environment.name() + ) + })?; + match current_env_var_behavior { + CurrentEnvVarBehavior::Clean => { + vars.clean() + .get_or_try_init(async { + initialize_env_variables( + environment, + current_env_var_behavior, + lock_file, + force_activate, + experimental_cache, + ) + .await + }) + .await + } + CurrentEnvVarBehavior::Exclude => { + vars.pixi_only() + .get_or_try_init(async { + initialize_env_variables( + environment, + current_env_var_behavior, + lock_file, + force_activate, + experimental_cache, + ) + .await + }) + .await + } + CurrentEnvVarBehavior::Include => { + vars.full() + .get_or_try_init(async { + initialize_env_variables( + environment, + current_env_var_behavior, + lock_file, + force_activate, + experimental_cache, + ) + .await + }) + .await + } + } +} + /// Create a symlink from the directory to the custom target directory #[cfg(not(windows))] fn create_symlink(target_dir: &Path, symlink_dir: &Path) { diff --git a/src/task/executable_task.rs b/src/task/executable_task.rs index afeaf33f3..d43413db4 100644 --- a/src/task/executable_task.rs +++ b/src/task/executable_task.rs @@ -17,7 +17,7 @@ use tokio::task::JoinHandle; use super::task_hash::{InputHashesError, TaskCache, TaskHash}; use crate::{ lock_file::LockFileDerivedData, - project::Environment, + project::{get_activated_environment_variables, Environment}, task::task_graph::{TaskGraph, TaskId}, Project, }; @@ -373,7 +373,8 @@ pub async fn get_task_env( CurrentEnvVarBehavior::Include }; let mut activation_env = await_in_progress("activating environment", |_| { - environment.project().get_activated_environment_variables( + get_activated_environment_variables( + environment.project().env_vars(), environment, env_var_behavior, lock_file, diff --git a/tests/data/pixi_tomls/two_envs_one_sdist.toml b/tests/data/pixi_tomls/two_envs_one_sdist.toml new file mode 100644 index 000000000..e2e36405a --- /dev/null +++ b/tests/data/pixi_tomls/two_envs_one_sdist.toml @@ -0,0 +1,26 @@ +[project] +channels = ["https://prefix.dev/conda-forge"] +name = "pypi-lazy-sdist" +platforms = ["linux-64", "osx-arm64", "osx-64", "win-64"] + +[dependencies] +python = "*" + +[pypi-dependencies] +boltons = "*" + +[feature.py39.dependencies] +python = "3.9" + +[feature.py39.pypi-dependencies] +boltons = "==24.1.0" + +[feature.py310.dependencies] +python = "3.10" + +[feature.py310.pypi-dependencies] +sdist = "==0.0.0" + +[environments] +py310 = ["py310"] +py39 = ["py39"] diff --git a/tests/integration_python/test_edge_cases.py b/tests/integration_python/test_edge_cases.py index 92f842813..fc621a2e2 100644 --- a/tests/integration_python/test_edge_cases.py +++ b/tests/integration_python/test_edge_cases.py @@ -36,3 +36,29 @@ def test_python_mismatch(pixi: Path, tmp_pixi_workspace: Path) -> None: [pixi, "install", "--manifest-path", manifest], ExitCode.SUCCESS, ) + + +@pytest.mark.slow +def test_prefix_only_created_when_sdist( + pixi: Path, tmp_pixi_workspace: Path, tmp_path: Path +) -> None: + """Test that the prefix is only created when the source distribution is used""" + test_data = Path(__file__).parent.parent / "data/pixi_tomls/two_envs_one_sdist.toml" + manifest = tmp_pixi_workspace.joinpath("pixi.toml") + toml = test_data.read_text() + manifest.write_text(toml) + + # Install + verify_cli_command( + [pixi, "install", "--manifest-path", manifest], + ExitCode.SUCCESS, + # We need the an uncached version, otherwise it might skip prefix creation + env={"PIXI_CACHE_DIR": str(tmp_path)}, + ) + + # Test if the `py310` prefix is created but the `py39` is not + py39 = tmp_pixi_workspace / ".pixi" / "envs" / "py39" + py310 = tmp_pixi_workspace / ".pixi" / "envs" / "py310" + + assert not py39.exists() + assert py310.exists() diff --git a/tests/integration_rust/add_tests.rs b/tests/integration_rust/add_tests.rs index 112a6fe53..d77de3bec 100644 --- a/tests/integration_rust/add_tests.rs +++ b/tests/integration_rust/add_tests.rs @@ -249,18 +249,12 @@ async fn add_pypi_functionality() { .await .unwrap(); - // Add a pypi package but without installing should fail - pixi.add("pipx") + // Add a pypi package that is a wheel + // without installing should succeed + pixi.add("pipx==1.7.1") .set_type(DependencyType::PypiDependency) .with_install(false) .await - .unwrap_err(); - - // Add a pypi package - pixi.add("pipx") - .set_type(DependencyType::PypiDependency) - .with_install(true) - .await .unwrap(); // Add a pypi package to a target with short hash diff --git a/tests/integration_rust/common/mod.rs b/tests/integration_rust/common/mod.rs index f17560411..e3035a43d 100644 --- a/tests/integration_rust/common/mod.rs +++ b/tests/integration_rust/common/mod.rs @@ -169,6 +169,7 @@ impl LockFileExt for LockFile { requirement: pep508_rs::Requirement, ) -> bool { let Some(env) = self.environment(environment) else { + eprintln!("environment not found: {}", environment); return false; }; let package_found = env @@ -261,6 +262,14 @@ impl PixiControl { Ok(self.tmpdir.path().join(env.dir())) } + /// Get path to default environment + pub fn env_path(&self, env_name: &str) -> miette::Result { + let project = self.project()?; + let env = project.environment(env_name); + let env = env.ok_or_else(|| miette::miette!("{} environment not found", env_name))?; + Ok(self.tmpdir.path().join(env.dir())) + } + pub fn manifest_path(&self) -> PathBuf { // Either pixi.toml or pyproject.toml if self.project_path().join(consts::PROJECT_MANIFEST).exists() { diff --git a/tests/integration_rust/install_tests.rs b/tests/integration_rust/install_tests.rs index f11ac9567..4f3f7ef9d 100644 --- a/tests/integration_rust/install_tests.rs +++ b/tests/integration_rust/install_tests.rs @@ -761,3 +761,35 @@ async fn test_ensure_gitignore_file_creation() { ".pixi/.gitignore file does not contain the expected content" ); } + +#[tokio::test(flavor = "multi_thread", worker_threads = 1)] +#[cfg_attr(not(feature = "slow_integration_tests"), ignore)] +async fn pypi_prefix_is_not_created_when_whl() { + let pixi = PixiControl::new().unwrap(); + pixi.init().await.unwrap(); + + // Add and update lockfile with this version of python + pixi.add("python==3.11").with_install(false).await.unwrap(); + + // Add pypi dependency that is a wheel + pixi.add_multiple(vec!["boltons==24.1.0"]) + .set_type(pixi::DependencyType::PypiDependency) + // we don't want to install the package + // we just want to check that the prefix is not created + .with_install(false) + .await + .unwrap(); + + // Check the locked boltons dependencies + let lock = pixi.lock_file().await.unwrap(); + assert!(lock.contains_pep508_requirement( + consts::DEFAULT_ENVIRONMENT_NAME, + Platform::current(), + pep508_rs::Requirement::from_str("boltons==24.1.0").unwrap() + )); + + let default_env_prefix = pixi.default_env_path().unwrap(); + + // Check that the prefix is not created + assert!(!default_env_prefix.exists()); +} diff --git a/tests/integration_rust/test_activation.rs b/tests/integration_rust/test_activation.rs index 259ae1437..9dadc7506 100644 --- a/tests/integration_rust/test_activation.rs +++ b/tests/integration_rust/test_activation.rs @@ -1,5 +1,5 @@ use crate::common::PixiControl; -use pixi::activation::CurrentEnvVarBehavior; +use pixi::{activation::CurrentEnvVarBehavior, project::get_activated_environment_variables}; #[cfg(windows)] const HOME: &str = "HOMEPATH"; @@ -14,16 +14,16 @@ async fn test_pixi_only_env_activation() { let project = pixi.project().unwrap(); let default_env = project.default_environment(); - let pixi_only_env = project - .get_activated_environment_variables( - &default_env, - CurrentEnvVarBehavior::Exclude, - None, - false, - false, - ) - .await - .unwrap(); + let pixi_only_env = get_activated_environment_variables( + project.env_vars(), + &default_env, + CurrentEnvVarBehavior::Exclude, + None, + false, + false, + ) + .await + .unwrap(); std::env::set_var("DIRTY_VAR", "Dookie"); @@ -43,16 +43,16 @@ async fn test_full_env_activation() { std::env::set_var("DIRTY_VAR", "Dookie"); - let full_env = project - .get_activated_environment_variables( - &default_env, - CurrentEnvVarBehavior::Include, - None, - false, - false, - ) - .await - .unwrap(); + let full_env = get_activated_environment_variables( + project.env_vars(), + &default_env, + CurrentEnvVarBehavior::Include, + None, + false, + false, + ) + .await + .unwrap(); assert!(full_env.get("CONDA_PREFIX").is_some()); assert!(full_env.get("DIRTY_VAR").is_some()); assert!(full_env.get(HOME).is_some()); @@ -69,16 +69,16 @@ async fn test_clean_env_activation() { std::env::set_var("DIRTY_VAR", "Dookie"); - let clean_env = project - .get_activated_environment_variables( - &default_env, - CurrentEnvVarBehavior::Clean, - None, - false, - false, - ) - .await - .unwrap(); + let clean_env = get_activated_environment_variables( + project.env_vars(), + &default_env, + CurrentEnvVarBehavior::Clean, + None, + false, + false, + ) + .await + .unwrap(); assert!(clean_env.get("CONDA_PREFIX").is_some()); assert!(clean_env.get("DIRTY_VAR").is_none());