diff --git a/Cargo.lock b/Cargo.lock index 92d4491..2488956 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -2642,6 +2642,12 @@ dependencies = [ "web-time", ] +[[package]] +name = "indoc" +version = "2.0.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4c7245a08504955605670dbf141fceab975f15ca21570696aebe9d2e71576bd" + [[package]] name = "inout" version = "0.1.3" @@ -3824,6 +3830,34 @@ dependencies = [ "url", ] +[[package]] +name = "pixi-build-pyext" +version = "0.1.6" +dependencies = [ + "async-trait", + "chrono", + "fs-err", + "indexmap 2.7.1", + "insta", + "miette", + "minijinja", + "pixi-build-backend", + "pixi_build_type_conversions", + "pixi_build_types", + "pixi_manifest", + "pyo3", + "rattler-build", + "rattler_conda_types", + "rattler_package_streaming", + "rattler_virtual_packages", + "rstest", + "serde", + "serde_json", + "tempfile", + "tokio", + "url", +] + [[package]] name = "pixi-build-python" version = "0.1.5" @@ -4238,6 +4272,69 @@ dependencies = [ "unicase", ] +[[package]] +name = "pyo3" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "17da310086b068fbdcefbba30aeb3721d5bb9af8db4987d6735b2183ca567229" +dependencies = [ + "cfg-if 1.0.0", + "indoc", + "libc", + "memoffset", + "once_cell", + "portable-atomic", + "pyo3-build-config", + "pyo3-ffi", + "pyo3-macros", + "unindent", +] + +[[package]] +name = "pyo3-build-config" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e27165889bd793000a098bb966adc4300c312497ea25cf7a690a9f0ac5aa5fc1" +dependencies = [ + "once_cell", + "target-lexicon", +] + +[[package]] +name = "pyo3-ffi" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "05280526e1dbf6b420062f3ef228b78c0c54ba94e157f5cb724a609d0f2faabc" +dependencies = [ + "libc", + "pyo3-build-config", +] + +[[package]] +name = "pyo3-macros" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3ce5686aa4d3f63359a5100c62a127c9f15e8398e5fdeb5deef1fed5cd5f44" +dependencies = [ + "proc-macro2", + "pyo3-macros-backend", + "quote", + "syn", +] + +[[package]] +name = "pyo3-macros-backend" +version = "0.24.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f4cf6faa0cbfb0ed08e89beb8103ae9724eb4750e3a78084ba4017cbe94f3855" +dependencies = [ + "heck 0.5.0", + "proc-macro2", + "pyo3-build-config", + "quote", + "syn", +] + [[package]] name = "pyproject-toml" version = "0.13.4" @@ -5949,6 +6046,12 @@ dependencies = [ "xattr", ] +[[package]] +name = "target-lexicon" +version = "0.13.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e502f78cdbb8ba4718f566c418c52bc729126ffd16baee5baa718cf25dd5a69a" + [[package]] name = "tempfile" version = "3.16.0" @@ -6408,6 +6511,12 @@ version = "0.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1fc81956842c57dac11422a97c3b8195a1ff727f06e85c84ed2e8aa277c9a0fd" +[[package]] +name = "unindent" +version = "0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7264e107f553ccae879d21fbea1d6724ac785e8c3bfc762137959b5802826ef3" + [[package]] name = "unsafe-libyaml" version = "0.2.11" diff --git a/crates/pixi-build-pyext/Cargo.toml b/crates/pixi-build-pyext/Cargo.toml new file mode 100644 index 0000000..bf6cccf --- /dev/null +++ b/crates/pixi-build-pyext/Cargo.toml @@ -0,0 +1,32 @@ +[package] +name = "pixi-build-pyext" +version = "0.1.6" +edition.workspace = true + +[dependencies] +async-trait = { workspace = true } +chrono = { workspace = true } +indexmap = { workspace = true } +miette = { workspace = true } +minijinja = { workspace = true, features = ["json"] } +rattler_conda_types = { workspace = true } +rattler_package_streaming = { workspace = true } +rattler-build = { workspace = true } +serde = { workspace = true, features = ["derive"] } +serde_json = { workspace = true } +tempfile = { workspace = true } +tokio = { workspace = true, features = ["macros"] } + +pixi-build-backend = { workspace = true } + +pixi_build_types = { workspace = true } +pixi_manifest = { workspace = true } +pixi_build_type_conversions = { workspace = true } +pyo3 = { version = "0.24.1", features = ["auto-initialize"] } +url.workspace = true +rattler_virtual_packages.workspace = true +fs-err = { workspace = true, features = ["tokio"] } + +[dev-dependencies] +insta = { version = "1.42.1", features = ["yaml", "redactions", "filters"] } +rstest = { workspace = true } diff --git a/crates/pixi-build-pyext/pixi.toml b/crates/pixi-build-pyext/pixi.toml new file mode 100644 index 0000000..24ceb3b --- /dev/null +++ b/crates/pixi-build-pyext/pixi.toml @@ -0,0 +1,31 @@ +[workspace] +authors = ["Nichita Morcotilo "] +channels = ["conda-forge"] +description = "Showcases how to build a rust project with pixi" +name = "pixi-build-pyext" +platforms = ["win-64", "linux-64", "osx-64", "osx-arm64"] +preview = ["pixi-build"] + +[dependencies] +pixi-build-pyext = { path = "." } + +[package] +authors = ["Nichita Morcotilo "] +description = "Showcases how to build a rust project with pixi" +name = "pixi-build-pyext" +version = "0.1.6" + +[package.build] +backend = { name = "pixi-build-rust", version = "*" } +channels = ["https://prefix.dev/pixi-build-backends", "conda-forge"] + +[package.build-dependencies] +python = "3.12.*" + +[package.host-dependencies] +python = "3.12.*" + +[package.run-dependencies] +# python = "3.12.*" +# "ruamel.yaml" = "0.18.*" +# requests = "2.32.*" diff --git a/crates/pixi-build-pyext/src/config.rs b/crates/pixi-build-pyext/src/config.rs new file mode 100644 index 0000000..cd92102 --- /dev/null +++ b/crates/pixi-build-pyext/src/config.rs @@ -0,0 +1,12 @@ +use serde::{Deserialize, Serialize}; +use std::{collections::HashMap, path::PathBuf}; + +#[derive(Debug, Default, Deserialize, Serialize)] +#[serde(rename_all = "kebab-case")] +pub struct PyExtConfig { + pub debug_dir: Option, + pub python_script: PathBuf, + + #[serde(flatten)] + pub options: HashMap, +} diff --git a/crates/pixi-build-pyext/src/main.rs b/crates/pixi-build-pyext/src/main.rs new file mode 100644 index 0000000..8ffc54b --- /dev/null +++ b/crates/pixi-build-pyext/src/main.rs @@ -0,0 +1,13 @@ +mod config; +mod protocol; +mod rattler_build; + +use protocol::RattlerBuildBackendInstantiator; + +#[tokio::main] +pub async fn main() { + if let Err(err) = pixi_build_backend::cli::main(RattlerBuildBackendInstantiator::new).await { + eprintln!("{err:?}"); + std::process::exit(1); + } +} diff --git a/crates/pixi-build-pyext/src/protocol.rs b/crates/pixi-build-pyext/src/protocol.rs new file mode 100644 index 0000000..55ea216 --- /dev/null +++ b/crates/pixi-build-pyext/src/protocol.rs @@ -0,0 +1,616 @@ +use std::str::FromStr; + +use fs_err::tokio as tokio_fs; +use miette::{Context, IntoDiagnostic}; +use pixi_build_backend::{ + protocol::{Protocol, ProtocolInstantiator}, + tools::RattlerBuild, + utils::TemporaryRenderedRecipe, +}; +use pixi_build_types::{ + procedures::{ + conda_build::{CondaBuildParams, CondaBuildResult, CondaBuiltPackage}, + conda_metadata::{CondaMetadataParams, CondaMetadataResult}, + initialize::{InitializeParams, InitializeResult}, + negotiate_capabilities::{NegotiateCapabilitiesParams, NegotiateCapabilitiesResult}, + }, + BackendCapabilities, CondaPackageMetadata, ProjectModelV1, +}; +use rattler_build::{ + build::run_build, + console_utils::LoggingOutputHandler, + hash::HashInfo, + metadata::PlatformWithVirtualPackages, + recipe::{parser::BuildString, Jinja}, + render::resolved_dependencies::DependencyInfo, + selectors::SelectorConfig, + tool_configuration::Configuration, +}; +use rattler_conda_types::{ChannelConfig, MatchSpec, Platform}; +use rattler_virtual_packages::VirtualPackageOverrides; +use url::Url; + +use crate::{config::PyExtConfig, rattler_build::RattlerBuildBackend}; +pub struct RattlerBuildBackendInstantiator { + logging_output_handler: LoggingOutputHandler, +} + +impl RattlerBuildBackendInstantiator { + /// This type implements [`ProtocolInstantiator`] and can be used to + /// initialize a new [`RattlerBuildBackend`]. + pub fn new(logging_output_handler: LoggingOutputHandler) -> RattlerBuildBackendInstantiator { + RattlerBuildBackendInstantiator { + logging_output_handler, + } + } +} + +#[async_trait::async_trait] +impl Protocol for RattlerBuildBackend { + async fn conda_get_metadata( + &self, + params: CondaMetadataParams, + ) -> miette::Result { + log_conda_get_metadata(&self.config, ¶ms).await?; + + // Create the work directory if it does not exist + tokio_fs::create_dir_all(¶ms.work_directory) + .await + .into_diagnostic()?; + + let host_platform = params + .host_platform + .as_ref() + .map(|p| p.platform) + .unwrap_or(Platform::current()); + + let build_platform = params + .build_platform + .as_ref() + .map(|p| p.platform) + .unwrap_or(Platform::current()); + + let selector_config = RattlerBuild::selector_config_from(¶ms); + + let rattler_build_tool = RattlerBuild::new( + self.recipe_source.clone(), + selector_config, + params.work_directory.clone(), + ); + + let channel_config = ChannelConfig { + channel_alias: params.channel_configuration.base_url, + root_dir: self + .recipe_source + .path + .parent() + .expect("should have parent") + .to_path_buf(), + }; + + let channels = params + .channel_base_urls + .unwrap_or_else(|| vec![Url::from_str("https://prefix.dev/conda-forge").unwrap()]); + + let discovered_outputs = + rattler_build_tool.discover_outputs(¶ms.variant_configuration)?; + + let host_vpkgs = params + .host_platform + .as_ref() + .map(|p| p.virtual_packages.clone()) + .unwrap_or_default(); + + let host_vpkgs = RattlerBuild::detect_virtual_packages(host_vpkgs)?; + + let build_vpkgs = params + .build_platform + .as_ref() + .map(|p| p.virtual_packages.clone()) + .unwrap_or_default(); + + let build_vpkgs = RattlerBuild::detect_virtual_packages(build_vpkgs)?; + + let outputs = rattler_build_tool.get_outputs( + &discovered_outputs, + channels, + build_vpkgs, + host_vpkgs, + host_platform, + build_platform, + )?; + + let tool_config = Configuration::builder() + .with_opt_cache_dir(self.cache_dir.clone()) + .with_logging_output_handler(self.logging_output_handler.clone()) + .with_channel_config(channel_config.clone()) + .with_testing(false) + .with_keep_build(true) + .finish(); + + let mut solved_packages = vec![]; + + eprintln!("before outputs "); + + for output in outputs { + let temp_recipe = TemporaryRenderedRecipe::from_output(&output)?; + let tool_config = &tool_config; + let output = temp_recipe + .within_context_async(move || async move { + output + .resolve_dependencies(tool_config) + .await + .into_diagnostic() + }) + .await?; + + let finalized_deps = &output + .finalized_dependencies + .as_ref() + .expect("dependencies should be resolved at this point") + .run; + + let selector_config = output.build_configuration.selector_config(); + + let jinja = Jinja::new(selector_config.clone()).with_context(&output.recipe.context); + + let hash = HashInfo::from_variant(output.variant(), output.recipe.build().noarch()); + let build_string = output.recipe.build().string().resolve( + &hash, + output.recipe.build().number(), + &jinja, + ); + + let conda = CondaPackageMetadata { + name: output.name().clone(), + version: output.version().clone().into(), + build: build_string.to_string(), + build_number: output.recipe.build.number, + subdir: output.build_configuration.target_platform, + depends: finalized_deps + .depends + .iter() + .map(DependencyInfo::spec) + .map(MatchSpec::to_string) + .collect(), + constraints: finalized_deps + .constraints + .iter() + .map(DependencyInfo::spec) + .map(MatchSpec::to_string) + .collect(), + license: output.recipe.about.license.map(|l| l.to_string()), + license_family: output.recipe.about.license_family, + noarch: output.recipe.build.noarch, + }; + solved_packages.push(conda); + } + + Ok(CondaMetadataResult { + packages: solved_packages, + input_globs: None, + }) + } + + async fn conda_build(&self, params: CondaBuildParams) -> miette::Result { + log_conda_build(&self.config, ¶ms).await?; + + // Create the work directory if it does not exist + tokio_fs::create_dir_all(¶ms.work_directory) + .await + .into_diagnostic()?; + + let host_platform = params + .host_platform + .as_ref() + .map(|p| p.platform) + .unwrap_or(Platform::current()); + + let build_platform = Platform::current(); + + let selector_config = SelectorConfig { + target_platform: build_platform, + host_platform, + build_platform, + hash: None, + variant: Default::default(), + experimental: true, + allow_undefined: false, + }; + + let host_vpkgs = params + .host_platform + .as_ref() + .map(|p| p.virtual_packages.clone()) + .unwrap_or_default(); + + let host_vpkgs = match host_vpkgs { + Some(vpkgs) => vpkgs, + None => { + PlatformWithVirtualPackages::detect(&VirtualPackageOverrides::from_env()) + .into_diagnostic()? + .virtual_packages + } + }; + + let build_vpkgs = params + .build_platform_virtual_packages + .clone() + .unwrap_or_default(); + + let channel_config = ChannelConfig { + channel_alias: params.channel_configuration.base_url, + root_dir: self + .recipe_source + .path + .parent() + .expect("should have parent") + .to_path_buf(), + }; + + let channels = params + .channel_base_urls + .unwrap_or_else(|| vec![Url::from_str("https://prefix.dev/conda-forge").unwrap()]); + + let rattler_build_tool = RattlerBuild::new( + self.recipe_source.clone(), + selector_config, + params.work_directory.clone(), + ); + + let discovered_outputs = + rattler_build_tool.discover_outputs(¶ms.variant_configuration)?; + + let outputs = rattler_build_tool.get_outputs( + &discovered_outputs, + channels, + build_vpkgs, + host_vpkgs, + host_platform, + build_platform, + )?; + + let mut built = vec![]; + + let tool_config = Configuration::builder() + .with_opt_cache_dir(self.cache_dir.clone()) + .with_logging_output_handler(self.logging_output_handler.clone()) + .with_channel_config(channel_config.clone()) + .with_testing(false) + .with_keep_build(true) + .finish(); + + for output in outputs { + let temp_recipe = TemporaryRenderedRecipe::from_output(&output)?; + + let tool_config = &tool_config; + + let mut output_with_build_string = output.clone(); + + let selector_config = output.build_configuration.selector_config(); + + let jinja = Jinja::new(selector_config.clone()).with_context(&output.recipe.context); + + let hash = HashInfo::from_variant(output.variant(), output.recipe.build().noarch()); + let build_string = output.recipe.build().string().resolve( + &hash, + output.recipe.build().number(), + &jinja, + ); + output_with_build_string.recipe.build.string = + BuildString::Resolved(build_string.to_string()); + + let (output, build_path) = temp_recipe + .within_context_async(move || async move { + run_build(output_with_build_string, tool_config).await + }) + .await?; + + built.push(CondaBuiltPackage { + output_file: build_path, + input_globs: Vec::from([self.recipe_source.name.clone()]), + name: output.name().as_normalized().to_string(), + version: output.version().to_string(), + build: build_string.to_string(), + subdir: output.target_platform().to_string(), + }); + } + Ok(CondaBuildResult { packages: built }) + } +} + +#[async_trait::async_trait] +impl ProtocolInstantiator for RattlerBuildBackendInstantiator { + async fn initialize( + &self, + params: InitializeParams, + ) -> miette::Result<(Box, InitializeResult)> { + let config = if let Some(config) = ¶ms.configuration { + serde_json::from_value(config.clone()) + .into_diagnostic() + .context("failed to parse configuration")? + } else { + PyExtConfig::default() + }; + + let project_model = params + .project_model + .ok_or_else(|| miette::miette!("project model is required"))?; + + let project_model = project_model + .into_v1() + .ok_or_else(|| miette::miette!("project model v1 is required"))?; + + log_initialize(&config, &project_model).await?; + + let instance = RattlerBuildBackend::new( + params.manifest_path.as_path(), + self.logging_output_handler.clone(), + params.cache_directory, + config, + project_model, + )?; + + Ok((Box::new(instance), InitializeResult {})) + } + + async fn negotiate_capabilities( + _params: NegotiateCapabilitiesParams, + ) -> miette::Result { + Ok(NegotiateCapabilitiesResult { + capabilities: default_capabilities(), + }) + } +} + +pub(crate) fn default_capabilities() -> BackendCapabilities { + BackendCapabilities { + provides_conda_metadata: Some(true), + provides_conda_build: Some(true), + highest_supported_project_model: Some( + pixi_build_types::VersionedProjectModel::highest_version(), + ), + } +} + +async fn log_initialize( + config: &PyExtConfig, + project_model: &ProjectModelV1, +) -> miette::Result<()> { + let Some(ref debug_dir) = config.debug_dir else { + return Ok(()); + }; + + let project_model_json = serde_json::to_string_pretty(&project_model) + .into_diagnostic() + .context("failed to serialize project model to JSON")?; + + let project_model_path = debug_dir.join("project_model.json"); + tokio_fs::write(&project_model_path, project_model_json) + .await + .into_diagnostic() + .context("failed to write project model JSON to file")?; + Ok(()) +} + +async fn log_conda_get_metadata( + config: &PyExtConfig, + params: &CondaMetadataParams, +) -> miette::Result<()> { + let Some(ref debug_dir) = config.debug_dir else { + return Ok(()); + }; + + let json = serde_json::to_string_pretty(¶ms) + .into_diagnostic() + .context("failed to serialize parameters to JSON")?; + + tokio_fs::create_dir_all(&debug_dir) + .await + .into_diagnostic() + .context("failed to create data directory")?; + + let path = debug_dir.join("conda_metadata_params.json"); + tokio_fs::write(&path, json) + .await + .into_diagnostic() + .context("failed to write JSON to file")?; + Ok(()) +} + +async fn log_conda_build(config: &PyExtConfig, params: &CondaBuildParams) -> miette::Result<()> { + let Some(ref debug_dir) = config.debug_dir else { + return Ok(()); + }; + + let json = serde_json::to_string_pretty(¶ms) + .into_diagnostic() + .context("failed to serialize parameters to JSON")?; + + tokio_fs::create_dir_all(&debug_dir) + .await + .into_diagnostic() + .context("failed to create data directory")?; + + let path = debug_dir.join("conda_build_params.json"); + tokio_fs::write(&path, json) + .await + .into_diagnostic() + .context("failed to write JSON to file")?; + Ok(()) +} + +#[cfg(test)] +mod tests { + use std::{ + path::{Path, PathBuf}, + str::FromStr, + }; + + use pixi_build_types::{ + procedures::{ + conda_build::CondaBuildParams, conda_metadata::CondaMetadataParams, + initialize::InitializeParams, + }, + ChannelConfiguration, + }; + use rattler_build::console_utils::LoggingOutputHandler; + use tempfile::tempdir; + use url::Url; + + use super::*; + + #[tokio::test] + async fn test_conda_get_metadata() { + // get cargo manifest dir + let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let recipe = manifest_dir.join("../../tests/recipe/boltons/recipe.yaml"); + + let factory = RattlerBuildBackendInstantiator::new(LoggingOutputHandler::default()) + .initialize(InitializeParams { + manifest_path: recipe, + project_model: None, + configuration: None, + cache_directory: None, + }) + .await + .unwrap(); + + let current_dir = std::env::current_dir().unwrap(); + + let result = factory + .0 + .conda_get_metadata(CondaMetadataParams { + host_platform: None, + build_platform: None, + channel_configuration: ChannelConfiguration { + base_url: Url::from_str("https://prefix.dev").unwrap(), + }, + channel_base_urls: None, + work_directory: current_dir, + variant_configuration: None, + }) + .await + .unwrap(); + + assert_eq!(result.packages.len(), 1); + } + + #[tokio::test] + async fn test_conda_build() { + // get cargo manifest dir + let manifest_dir = PathBuf::from(env!("CARGO_MANIFEST_DIR")); + let recipe = manifest_dir.join("../../tests/recipe/boltons/recipe.yaml"); + + let factory = RattlerBuildBackendInstantiator::new(LoggingOutputHandler::default()) + .initialize(InitializeParams { + manifest_path: recipe, + project_model: None, + configuration: None, + cache_directory: None, + }) + .await + .unwrap(); + + let current_dir = tempdir().unwrap(); + + let result = factory + .0 + .conda_build(CondaBuildParams { + build_platform_virtual_packages: None, + host_platform: None, + channel_base_urls: None, + channel_configuration: ChannelConfiguration { + base_url: Url::from_str("https://prefix.dev").unwrap(), + }, + outputs: None, + work_directory: current_dir.into_path(), + variant_configuration: None, + editable: false, + }) + .await + .unwrap(); + + assert_eq!(result.packages[0].name, "boltons-with-extra"); + } + + const FAKE_RECIPE: &str = r#" + package: + name: foobar + version: 0.1.0 + "#; + + async fn try_initialize( + manifest_path: impl AsRef, + ) -> miette::Result { + RattlerBuildBackend::new( + manifest_path.as_ref(), + LoggingOutputHandler::default(), + None, + RattlerBuildBackendConfig::default(), + ) + } + + #[tokio::test] + async fn test_recipe_discovery() { + let tmp = tempdir().unwrap(); + let recipe = tmp.path().join("recipe.yaml"); + std::fs::write(&recipe, FAKE_RECIPE).unwrap(); + assert_eq!( + try_initialize(&tmp.path().join("pixi.toml")) + .await + .unwrap() + .recipe_source + .path, + recipe + ); + assert_eq!( + try_initialize(&recipe).await.unwrap().recipe_source.path, + recipe + ); + + let tmp = tempdir().unwrap(); + let recipe = tmp.path().join("recipe.yml"); + std::fs::write(&recipe, FAKE_RECIPE).unwrap(); + assert_eq!( + try_initialize(&tmp.path().join("pixi.toml")) + .await + .unwrap() + .recipe_source + .path, + recipe + ); + assert_eq!( + try_initialize(&recipe).await.unwrap().recipe_source.path, + recipe + ); + + let tmp = tempdir().unwrap(); + let recipe_dir = tmp.path().join("recipe"); + let recipe = recipe_dir.join("recipe.yaml"); + std::fs::create_dir(recipe_dir).unwrap(); + std::fs::write(&recipe, FAKE_RECIPE).unwrap(); + assert_eq!( + try_initialize(&tmp.path().join("pixi.toml")) + .await + .unwrap() + .recipe_source + .path, + recipe + ); + + let tmp = tempdir().unwrap(); + let recipe_dir = tmp.path().join("recipe"); + let recipe = recipe_dir.join("recipe.yml"); + std::fs::create_dir(recipe_dir).unwrap(); + std::fs::write(&recipe, FAKE_RECIPE).unwrap(); + assert_eq!( + try_initialize(&tmp.path().join("pixi.toml")) + .await + .unwrap() + .recipe_source + .path, + recipe + ); + } +} diff --git a/crates/pixi-build-pyext/src/rattler_build.rs b/crates/pixi-build-pyext/src/rattler_build.rs new file mode 100644 index 0000000..3662526 --- /dev/null +++ b/crates/pixi-build-pyext/src/rattler_build.rs @@ -0,0 +1,107 @@ +use std::{ + ffi::CString, + io::Write, + path::{Path, PathBuf}, +}; + +use miette::IntoDiagnostic; +use pixi_build_backend::source::Source; +use pixi_build_types::ProjectModelV1; +use pyo3::{ + types::{PyAnyMethods as _, PyModule}, + PyResult, Python, +}; +use rattler_build::console_utils::LoggingOutputHandler; +use tempfile::NamedTempFile; + +use crate::config::PyExtConfig; + +pub struct RattlerBuildBackend { + pub(crate) logging_output_handler: LoggingOutputHandler, + /// In case of rattler-build, manifest is the raw recipe + /// We need to apply later the selectors to get the final recipe + pub(crate) recipe_source: Source, + pub(crate) cache_dir: Option, + pub(crate) config: PyExtConfig, + + _temp_recipe_file: NamedTempFile, +} + +impl RattlerBuildBackend { + /// Returns a new instance of [`RattlerBuildBackend`] by reading the + /// manifest at the given path. + pub fn new( + manifest_path: &Path, + logging_output_handler: LoggingOutputHandler, + cache_dir: Option, + config: PyExtConfig, + project_model: ProjectModelV1, + ) -> miette::Result { + // Locate the recipe + // Create a temporary file to hold the generated recipe + // Try to place it relative to the manifest path for context, otherwise use system temp + let mut temp_file = NamedTempFile::with_suffix(".yaml") + .into_diagnostic() + .map_err(|e| miette::miette!("Failed to create temporary file: {}", e))?; + + eprintln!("Manifest path: {}", manifest_path.display()); + let pyscript = PathBuf::from("backend.py"); + let py_script_path = manifest_path.parent().unwrap().join(&pyscript); + eprintln!("Python script path: {}", py_script_path.display()); + eprintln!("Python script path: xx {:?}", &pyscript); + + let py_file_content = fs_err::read_to_string(&py_script_path) + .into_diagnostic() + .map_err(|e| miette::miette!("Failed to read Python script: {}", e))?; + let c_str = CString::new(py_file_content).unwrap(); + + // Call the Python function + let generated_recipe_content = Python::with_gil(|py| -> PyResult { + let spec = PyModule::from_code( + py, + &c_str, + &CString::new("recipe_generator.py").unwrap(), + &CString::new("recipe_generator").unwrap(), + )?; + + let func = spec.getattr("generate_recipe")?; + + let project_model_json = + serde_json::to_string(&project_model).expect("Failed to serialize project model"); + let config_json = serde_json::to_string(&config).expect("Failed to serialize config"); + + let args = (project_model_json, config_json); + let result = func.call1(args)?; + result.extract::() + }) + .map_err(|e| { + miette::miette!( + "Python error generating recipe via '{}.{}': {}", + "recipe_generator", + "generate_recipe", + e + ) + })?; + + eprintln!("Generated recipe content:\n{}\n", generated_recipe_content); + + // Write the generated recipe to the temporary file + temp_file + .write_all(generated_recipe_content.as_bytes()) + .into_diagnostic() + .map_err(|e| miette::miette!("Failed to write to temporary file: {}", e))?; + + // Load the manifest from the source directory + let manifest_root = manifest_path.parent().expect("manifest must have a root"); + let recipe_source = Source::from_rooted_path(manifest_root, temp_file.path().to_path_buf()) + .into_diagnostic()?; + + Ok(Self { + recipe_source, + logging_output_handler, + cache_dir, + config, + _temp_recipe_file: temp_file, + }) + } +} diff --git a/crates/pixi-build-python/src/python.rs b/crates/pixi-build-python/src/python.rs index ca5101e..734cbbc 100644 --- a/crates/pixi-build-python/src/python.rs +++ b/crates/pixi-build-python/src/python.rs @@ -239,8 +239,11 @@ pub(crate) fn construct_configuration( params: BuildConfigurationParams, ) -> BuildConfiguration { BuildConfiguration { - // TODO: NoArch?? - target_platform: Platform::NoArch, + target_platform: if recipe.build.noarch == NoArchType::none() { + params.host_platform.platform + } else { + Platform::NoArch + }, host_platform: params.host_platform, build_platform: params.build_platform, hash: HashInfo::from_variant(¶ms.variant, &recipe.build.noarch),