diff --git a/.gitignore b/.gitignore index b2c30b0a..03f07a72 100644 --- a/.gitignore +++ b/.gitignore @@ -5,3 +5,4 @@ src/new/includes.rs src/new/templates/**/Cargo.lock src/new/templates/**/*.wasm src/new/templates/**/*.zip +**/.DS_Store diff --git a/Cargo.lock b/Cargo.lock index 2b5d746f..b2695d9d 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -1,6 +1,6 @@ # This file is automatically @generated by Cargo. # It is not intended for manual editing. -version = 3 +version = 4 [[package]] name = "addr2line" @@ -2281,7 +2281,7 @@ dependencies = [ [[package]] name = "kit" -version = "1.0.2" +version = "1.1.0" dependencies = [ "alloy", "alloy-sol-macro", diff --git a/Cargo.toml b/Cargo.toml index 88c31954..44327389 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,7 +1,7 @@ [package] name = "kit" authors = ["Sybil Technologies AG"] -version = "1.0.2" +version = "1.1.0" edition = "2021" description = "Development toolkit for Hyperware" homepage = "https://hyperware.ai" @@ -52,8 +52,7 @@ semver = "1.0" serde = { version = "1.0", features = ["derive"] } serde_json = "1.0" sha2 = "0.10.8" -syn = { version = "2.0", features = ["full", "visit", "extra-traits"] } -#syn = { version = "2.0", features = ["full", "visit"] } +syn = { version = "2.0", features = ["full", "visit", "parsing", "extra-traits"] } thiserror = "1.0" tokio = { version = "1.28", features = [ "macros", diff --git a/build.rs b/build.rs index 5c3ae138..1bdf8338 100644 --- a/build.rs +++ b/build.rs @@ -50,7 +50,10 @@ fn visit_dirs(dir: &Path, output_buffer: &mut Vec) -> io::Result<()> { let path = entry.path(); if path.is_dir() { let dir_name = path.file_name().and_then(|s| s.to_str()); - if dir_name == Some("home") || dir_name == Some("target") { + if dir_name == Some("home") + || dir_name == Some("target") + || dir_name == Some(".mypy_cache") + { continue; } visit_dirs(&path, output_buffer)?; @@ -106,6 +109,9 @@ fn add_branch_name(repo: &git2::Repository) -> anyhow::Result<()> { } fn main() -> anyhow::Result<()> { + // Always run this script + println!("cargo:rerun-if-changed=NULL"); + make_new_includes()?; // write version info into binary diff --git a/src/build/caller_utils_generator.rs b/src/build/caller_utils_generator.rs new file mode 100644 index 00000000..c450252e --- /dev/null +++ b/src/build/caller_utils_generator.rs @@ -0,0 +1,885 @@ +use std::collections::HashMap; +use std::fs; +use std::path::{Path, PathBuf}; + +use color_eyre::{ + eyre::{bail, WrapErr}, + Result, +}; +use tracing::{debug, info, instrument, warn}; + +use toml::Value; +use walkdir::WalkDir; + +// Convert kebab-case to snake_case +pub fn to_snake_case(s: &str) -> String { + s.replace('-', "_") +} + +// Convert kebab-case to PascalCase +pub fn to_pascal_case(s: &str) -> String { + let parts = s.split('-'); + let mut result = String::new(); + + for part in parts { + if !part.is_empty() { + let mut chars = part.chars(); + if let Some(first_char) = chars.next() { + result.push(first_char.to_uppercase().next().unwrap()); + result.extend(chars); + } + } + } + + result +} + +// Find the world name in the world WIT file, prioritizing types-prefixed worlds +#[instrument(level = "trace", skip_all)] +fn find_world_names(api_dir: &Path) -> Result> { + debug!(dir = ?api_dir, "Looking for world names..."); + let mut world_names = Vec::new(); + + // Look for world definition files + for entry in WalkDir::new(api_dir) + .max_depth(1) + .into_iter() + .filter_map(Result::ok) + { + let path = entry.path(); + + if path.is_file() && path.extension().map_or(false, |ext| ext == "wit") { + if let Ok(content) = fs::read_to_string(path) { + if content.contains("world ") { + debug!(file = %path.display(), "Analyzing potential world definition file"); + + // Extract the world name + let lines: Vec<&str> = content.lines().collect(); + + if let Some(world_line) = + lines.iter().find(|line| line.trim().starts_with("world ")) + { + debug!(line = %world_line, "Found world line"); + + if let Some(world_name) = world_line.trim().split_whitespace().nth(1) { + let clean_name = world_name.trim_end_matches(" {"); + debug!(name = %clean_name, "Extracted potential world name"); + + // Check if this is a types-prefixed world + if clean_name.starts_with("types-") { + world_names.push(clean_name.to_string()); + debug!(name = %clean_name, "Found types-prefixed world"); + } + } + } + } + } + } + } + + if world_names.is_empty() { + bail!("No world name found in any WIT file. Cannot generate caller-utils without a world name.") + } + Ok(world_names) +} + +// Convert WIT type to Rust type - IMPROVED with more Rust primitives +fn wit_type_to_rust(wit_type: &str) -> String { + match wit_type { + // Integer types + "s8" => "i8".to_string(), + "u8" => "u8".to_string(), + "s16" => "i16".to_string(), + "u16" => "u16".to_string(), + "s32" => "i32".to_string(), + "u32" => "u32".to_string(), + "s64" => "i64".to_string(), + "u64" => "u64".to_string(), + // Floating point types + "f32" => "f32".to_string(), + "f64" => "f64".to_string(), + // Other primitive types + "string" => "String".to_string(), + "str" => "&str".to_string(), + "char" => "char".to_string(), + "bool" => "bool".to_string(), + "_" => "()".to_string(), + // Special types + "address" => "WitAddress".to_string(), + // Collection types with generics + t if t.starts_with("list<") => { + let inner_type = &t[5..t.len() - 1]; + format!("Vec<{}>", wit_type_to_rust(inner_type)) + } + t if t.starts_with("option<") => { + let inner_type = &t[7..t.len() - 1]; + format!("Option<{}>", wit_type_to_rust(inner_type)) + } + t if t.starts_with("result<") => { + let inner_part = &t[7..t.len() - 1]; + if let Some(comma_pos) = inner_part.find(',') { + let ok_type = &inner_part[..comma_pos].trim(); + let err_type = &inner_part[comma_pos + 1..].trim(); + format!( + "Result<{}, {}>", + wit_type_to_rust(ok_type), + wit_type_to_rust(err_type) + ) + } else { + format!("Result<{}, ()>", wit_type_to_rust(inner_part)) + } + } + t if t.starts_with("tuple<") => { + let inner_types = &t[6..t.len() - 1]; + let rust_types: Vec = inner_types + .split(", ") + .map(|t| wit_type_to_rust(t)) + .collect(); + format!("({})", rust_types.join(", ")) + } + // Custom types (in kebab-case) need to be converted to PascalCase + _ => to_pascal_case(wit_type).to_string(), + } +} + +// Generate default value for Rust type - IMPROVED with additional types +fn generate_default_value(rust_type: &str) -> String { + match rust_type { + // Integer types + "i8" | "u8" | "i16" | "u16" | "i32" | "u32" | "i64" | "u64" | "isize" | "usize" => { + "0".to_string() + } + // Floating point types + "f32" | "f64" => "0.0".to_string(), + // String types + "String" => "String::new()".to_string(), + "&str" => "\"\"".to_string(), + // Other primitive types + "bool" => "false".to_string(), + "char" => "'\\0'".to_string(), + "()" => "()".to_string(), + // Collection types + t if t.starts_with("Vec<") => "Vec::new()".to_string(), + t if t.starts_with("Option<") => "None".to_string(), + t if t.starts_with("Result<") => { + // For Result, default to Ok with the default value of the success type + if let Some(success_type_end) = t.find(',') { + let success_type = &t[7..success_type_end]; + format!("Ok({})", generate_default_value(success_type)) + } else { + "Ok(())".to_string() + } + } + //t if t.starts_with("HashMap<") => "HashMap::new()".to_string(), + t if t.starts_with("(") => { + // Generate default tuple with default values for each element + let inner_part = t.trim_start_matches('(').trim_end_matches(')'); + let parts: Vec<_> = inner_part.split(", ").collect(); + let default_values: Vec<_> = parts + .iter() + .map(|part| generate_default_value(part)) + .collect(); + format!("({})", default_values.join(", ")) + } + // For custom types, assume they implement Default + _ => format!("{}::default()", rust_type), + } +} + +// Structure to represent a field in a WIT signature struct +#[derive(Debug)] +struct SignatureField { + name: String, + wit_type: String, +} + +// Structure to represent a WIT signature struct +#[derive(Debug)] +struct SignatureStruct { + function_name: String, + attr_type: String, + fields: Vec, +} + +// Find all interface imports in the world WIT file +#[instrument(level = "trace", skip_all)] +fn find_interfaces_in_world(api_dir: &Path) -> Result> { + debug!(dir = ?api_dir, "Finding interface imports in world definitions"); + let mut interfaces = Vec::new(); + + // Find world definition files + for entry in WalkDir::new(api_dir) + .max_depth(1) + .into_iter() + .filter_map(Result::ok) + { + let path = entry.path(); + + if path.is_file() && path.extension().map_or(false, |ext| ext == "wit") { + if let Ok(content) = fs::read_to_string(path) { + if content.contains("world ") { + debug!(file = %path.display(), "Analyzing world definition file for imports"); + + // Extract import statements + for line in content.lines() { + let line = line.trim(); + if line.starts_with("import ") && line.ends_with(";") { + let interface = line + .trim_start_matches("import ") + .trim_end_matches(";") + .trim(); + + interfaces.push(interface.to_string()); + debug!(interface = %interface, "Found interface import"); + } + } + } + } + } + } + debug!(count = interfaces.len(), interfaces = ?interfaces, "Found interface imports"); + Ok(interfaces) +} + +// Parse WIT file to extract function signatures and type definitions +#[instrument(level = "trace", skip_all)] +fn parse_wit_file(file_path: &Path) -> Result<(Vec, Vec)> { + debug!(file = %file_path.display(), "Parsing WIT file"); + + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read WIT file: {}", file_path.display()))?; + + let mut signatures = Vec::new(); + let mut type_names = Vec::new(); + + // Simple parser for WIT files to extract record definitions and types + let lines: Vec<_> = content.lines().collect(); + let mut i = 0; + + while i < lines.len() { + let line = lines[i].trim(); + + // Look for record definitions that aren't signature structs + if line.starts_with("record ") && !line.contains("-signature-") { + let record_name = line + .trim_start_matches("record ") + .trim_end_matches(" {") + .trim(); + debug!(name = %record_name, "Found type definition (record)"); + type_names.push(record_name.to_string()); + } + // Look for variant definitions (enums) + else if line.starts_with("variant ") { + let variant_name = line + .trim_start_matches("variant ") + .trim_end_matches(" {") + .trim(); + debug!(name = %variant_name, "Found type definition (variant)"); + type_names.push(variant_name.to_string()); + } + // Look for signature record definitions + else if line.starts_with("record ") && line.contains("-signature-") { + let record_name = line + .trim_start_matches("record ") + .trim_end_matches(" {") + .trim(); + debug!(name = %record_name, "Found signature record"); + + // Extract function name and attribute type + let parts: Vec<_> = record_name.split("-signature-").collect(); + if parts.len() != 2 { + warn!(name = %record_name, "Unexpected signature record name format, skipping"); + i += 1; + continue; + } + + let function_name = parts[0].to_string(); + let attr_type = parts[1].to_string(); + debug!(function = %function_name, attr_type = %attr_type, "Extracted function name and type"); + + // Parse fields + let mut fields = Vec::new(); + i += 1; + + while i < lines.len() && !lines[i].trim().starts_with("}") { + let field_line = lines[i].trim(); + + // Skip comments and empty lines + if field_line.starts_with("//") || field_line.is_empty() { + i += 1; + continue; + } + + // Parse field definition + let field_parts: Vec<_> = field_line.split(':').collect(); + if field_parts.len() == 2 { + let field_name = field_parts[0].trim().to_string(); + let field_type = field_parts[1].trim().trim_end_matches(',').to_string(); + + debug!(name = %field_name, wit_type = %field_type, "Found field"); + fields.push(SignatureField { + name: field_name, + wit_type: field_type, + }); + } + + i += 1; + } + + signatures.push(SignatureStruct { + function_name, + attr_type, + fields, + }); + } + + i += 1; + } + + debug!( + file = %file_path.display(), + signatures = signatures.len(), + types = type_names.len(), + "Finished parsing WIT file" + ); + Ok((signatures, type_names)) +} + +// Generate a Rust async function from a signature struct +fn generate_async_function(signature: &SignatureStruct) -> String { + // Convert function name from kebab-case to snake_case + let snake_function_name = to_snake_case(&signature.function_name); + + // Get pascal case version for the JSON request format + let pascal_function_name = to_pascal_case(&signature.function_name); + + // Function full name with attribute type + let full_function_name = format!("{}_{}_rpc", snake_function_name, signature.attr_type); + debug!(name = %full_function_name, "Generating function stub"); + + // Extract parameters and return type + let mut params = Vec::new(); + let mut param_names = Vec::new(); + let mut return_type = "()".to_string(); + let mut target_param = ""; + + for field in &signature.fields { + let field_name_snake = to_snake_case(&field.name); + let rust_type = wit_type_to_rust(&field.wit_type); + debug!(field = %field.name, wit_type = %field.wit_type, rust_type = %rust_type, "Processing field"); + + if field.name == "target" { + if field.wit_type == "string" { + target_param = "&str"; + } else { + // Use hyperware_process_lib::Address instead of WitAddress + target_param = "&Address"; + } + } else if field.name == "returning" { + return_type = rust_type; + debug!(return_type = %return_type, "Identified return type"); + } else { + params.push(format!("{}: {}", field_name_snake, rust_type)); + param_names.push(field_name_snake); + debug!(param_name = param_names.last().unwrap(), "Added parameter"); + } + } + + // First parameter is always target + let all_params = if target_param.is_empty() { + warn!( + "No 'target' parameter found in signature for {}", + full_function_name + ); + params.join(", ") + } else { + format!( + "target: {}{}", + target_param, + if params.is_empty() { "" } else { ", " } + ) + ¶ms.join(", ") + }; + + // Wrap the return type in a Result<_, AppSendError> + let wrapped_return_type = format!("Result<{}, AppSendError>", return_type); + + // For HTTP endpoints, generate commented-out implementation + if signature.attr_type == "http" { + debug!("Generating commented-out stub for HTTP endpoint"); + let default_value = generate_default_value(&return_type); + + // Add underscore prefix to all parameters for HTTP stubs + let all_params_with_underscore = if target_param.is_empty() { + params + .iter() + .map(|param| { + let parts: Vec<&str> = param.split(':').collect(); + if parts.len() == 2 { + format!("_{}: {}", parts[0], parts[1]) + } else { + warn!(param = %param, "Could not parse parameter for underscore prefix"); + format!("_{}", param) + } + }) + .collect::>() + .join(", ") + } else { + let target_with_underscore = format!("_target: {}", target_param); + if params.is_empty() { + target_with_underscore + } else { + let params_with_underscore = params + .iter() + .map(|param| { + let parts: Vec<&str> = param.split(':').collect(); + if parts.len() == 2 { + format!("_{}: {}", parts[0], parts[1]) + } else { + warn!(param = %param, "Could not parse parameter for underscore prefix"); + format!("_{}", param) + } + }) + .collect::>() + .join(", "); + format!("{}, {}", target_with_underscore, params_with_underscore) + } + }; + + return format!( + "// /// Generated stub for `{}` {} RPC call\n// /// HTTP endpoint - uncomment to implement\n// pub async fn {}({}) -> {} {{\n// // TODO: Implement HTTP endpoint\n// Ok({})\n// }}", + signature.function_name, + signature.attr_type, + full_function_name, + all_params_with_underscore, + wrapped_return_type, + default_value + ); + } + + // Format JSON parameters correctly + let json_params = if param_names.is_empty() { + // No parameters case + debug!("Generating JSON with no parameters"); + format!("json!({{\"{}\" : {{}}}})", pascal_function_name) + } else if param_names.len() == 1 { + // Single parameter case + debug!(param = %param_names[0], "Generating JSON with single parameter"); + format!( + "json!({{\"{}\": {}}})", + pascal_function_name, param_names[0] + ) + } else { + // Multiple parameters case - use tuple format + debug!(params = ?param_names, "Generating JSON with multiple parameters (tuple)"); + format!( + "json!({{\"{}\": ({})}})", + pascal_function_name, + param_names.join(", ") + ) + }; + + // Generate function with implementation using send + debug!("Generating standard RPC stub implementation"); + format!( + "/// Generated stub for `{}` {} RPC call\npub async fn {}({}) -> {} {{\n let body = {};\n let body = serde_json::to_vec(&body).unwrap();\n let request = Request::to(target)\n .body(body);\n send::<{}>(request).await\n}}", + signature.function_name, + signature.attr_type, + full_function_name, + all_params, + wrapped_return_type, + json_params, + return_type + ) +} + +// Create the caller-utils crate with a single lib.rs file +#[instrument(level = "trace", skip_all)] +fn create_caller_utils_crate(api_dir: &Path, base_dir: &Path) -> Result<()> { + // Path to the new crate + let caller_utils_dir = base_dir.join("target").join("caller-utils"); + debug!( + path = %caller_utils_dir.display(), + "Creating caller-utils crate" + ); + + // Create directories + fs::create_dir_all(&caller_utils_dir)?; + fs::create_dir_all(caller_utils_dir.join("src"))?; + debug!("Created project directory structure"); + + // Create Cargo.toml with updated dependencies + let cargo_toml = r#"[package] +name = "caller-utils" +version = "0.1.0" +edition = "2021" +publish = false + +[dependencies] +anyhow = "1.0" +process_macros = "0.1.0" +futures-util = "0.3" +serde = { version = "1.0", features = ["derive"] } +serde_json = "1.0" +hyperware_app_common = { git = "https://github.com/hyperware-ai/hyperprocess-macro", rev = "b6ad495" } +once_cell = "1.20.2" +futures = "0.3" +uuid = { version = "1.0" } +wit-bindgen = "0.41.0" + +[lib] +crate-type = ["cdylib", "lib"] +"#; + + fs::write(caller_utils_dir.join("Cargo.toml"), cargo_toml) + .with_context(|| "Failed to write caller-utils Cargo.toml")?; + + debug!("Created Cargo.toml for caller-utils"); + + // Get the world name (preferably the types- version) + let world_names = find_world_names(api_dir)?; + debug!("Using world names for code generation: {:?}", world_names); + let world_name = if world_names.len() == 0 { + "" + } else if world_names.len() == 1 { + &world_names[0] + } else { + let path = api_dir.join("types.wit"); + let mut content = "world types {\n".to_string(); + for world_name in world_names { + content.push_str(&format!(" include {world_name};\n")); + } + content.push_str("}\n"); + fs::write(&path, &content)?; + "types" + }; + + // Get all interfaces from the world file + let interface_imports = find_interfaces_in_world(api_dir)?; + + // Store all types from each interface + let mut interface_types: HashMap> = HashMap::new(); + + // Find all WIT files in the api directory to generate stubs + let mut wit_files = Vec::new(); + for entry in WalkDir::new(api_dir) + .max_depth(1) + .into_iter() + .filter_map(Result::ok) + { + let path = entry.path(); + if path.is_file() && path.extension().map_or(false, |ext| ext == "wit") { + // Exclude world definition files + if let Ok(content) = fs::read_to_string(path) { + if !content.contains("world ") { + debug!(file = %path.display(), "Adding WIT file for parsing"); + wit_files.push(path.to_path_buf()); + } else { + debug!(file = %path.display(), "Skipping world definition WIT file"); + } + } + } + } + + debug!( + count = wit_files.len(), + "Found WIT interface files for stub generation" + ); + + // Generate content for each module and collect types + let mut module_contents = HashMap::::new(); + + for wit_file in &wit_files { + // Extract the interface name from the file name + let interface_name = wit_file.file_stem().unwrap().to_string_lossy(); + let snake_interface_name = to_snake_case(&interface_name); + + debug!( + interface = %interface_name, module = %snake_interface_name, file = %wit_file.display(), + "Processing interface" + ); + + // Parse the WIT file to extract signature structs and types + match parse_wit_file(wit_file) { + Ok((signatures, types)) => { + // Store types for this interface + interface_types.insert(interface_name.to_string(), types); + + if signatures.is_empty() { + debug!(file = %wit_file.display(), "No signature records found, skipping module generation for this file."); + continue; + } + + // Generate module content + let mut mod_content = String::new(); + + // Add function implementations + for signature in &signatures { + let function_impl = generate_async_function(signature); + mod_content.push_str(&function_impl); + mod_content.push_str("\n\n"); + } + + // Store the module content + module_contents.insert(snake_interface_name.clone(), mod_content); + + debug!( + interface = %interface_name, module = %snake_interface_name.as_str(), count = signatures.len(), + "Generated module content" + ); + } + Err(e) => { + warn!(file = %wit_file.display(), error = %e, "Error parsing WIT file, skipping"); + } + } + } + + // Create import statements for each interface using "hyperware::process::{interface_name}::*" + // Use a HashSet to track which interfaces we've already processed to avoid duplicates + let mut processed_interfaces = std::collections::HashSet::new(); + let mut interface_use_statements = Vec::new(); + + for interface_name in &interface_imports { + // Convert to snake case for module name + let snake_interface_name = to_snake_case(interface_name); + + // Only add the import if we haven't processed this interface yet + if processed_interfaces.insert(snake_interface_name.clone()) { + // Create wildcard import for this interface + interface_use_statements.push(format!( + "pub use crate::hyperware::process::{}::*;", + snake_interface_name + )); + } + } + + // Create single lib.rs with all modules inline + let mut lib_rs = String::new(); + + lib_rs.push_str("wit_bindgen::generate!({\n"); + lib_rs.push_str(" path: \"target/wit\",\n"); + lib_rs.push_str(&format!(" world: \"{}\",\n", world_name)); + lib_rs.push_str(" generate_unused_types: true,\n"); + lib_rs.push_str(" additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto],\n"); + lib_rs.push_str("});\n\n"); + + lib_rs.push_str("/// Generated caller utilities for RPC function stubs\n\n"); + + // Add global imports + lib_rs.push_str("pub use hyperware_app_common::AppSendError;\n"); + lib_rs.push_str("pub use hyperware_app_common::send;\n"); + lib_rs.push_str("use hyperware_app_common::hyperware_process_lib as hyperware_process_lib;\n"); + lib_rs.push_str("use hyperware_process_lib::{Address, Request};\n"); + lib_rs.push_str("use serde_json::json;\n\n"); + + // Add interface use statements + if !interface_use_statements.is_empty() { + lib_rs.push_str("// Import types from each interface\n"); + for use_stmt in interface_use_statements { + lib_rs.push_str(&format!("{}\n", use_stmt)); + } + lib_rs.push_str("\n"); + } + + // Add all modules with their content + for (module_name, module_content) in module_contents { + lib_rs.push_str(&format!( + "/// Generated RPC stubs for the {} interface\n", + module_name + )); + lib_rs.push_str(&format!("pub mod {} {{\n", module_name)); + lib_rs.push_str(" use crate::*;\n\n"); + lib_rs.push_str(&format!(" {}\n", module_content.replace("\n", "\n "))); + lib_rs.push_str("}\n\n"); + } + + // Write lib.rs + let lib_rs_path = caller_utils_dir.join("src").join("lib.rs"); + debug!("Writing generated code to {}", lib_rs_path.display()); + + fs::write(&lib_rs_path, lib_rs) + .with_context(|| format!("Failed to write lib.rs: {}", lib_rs_path.display()))?; + + // Create target/wit directory and copy all WIT files + let target_wit_dir = caller_utils_dir.join("target").join("wit"); + debug!("Creating directory: {}", target_wit_dir.display()); + + // Remove the directory if it exists to ensure clean state + if target_wit_dir.exists() { + debug!("Removing existing target/wit directory"); + fs::remove_dir_all(&target_wit_dir)?; + } + + fs::create_dir_all(&target_wit_dir)?; + + // Copy all WIT files to target/wit + for entry in WalkDir::new(api_dir) + .max_depth(1) + .into_iter() + .filter_map(Result::ok) + { + let path = entry.path(); + if path.is_file() && path.extension().map_or(false, |ext| ext == "wit") { + let file_name = path.file_name().unwrap(); + let target_path = target_wit_dir.join(file_name); + fs::copy(path, &target_path).with_context(|| { + format!( + "Failed to copy {} to {}", + path.display(), + target_path.display() + ) + })?; + debug!( + "Copied {} to target/wit directory", + file_name.to_string_lossy() + ); + } + } + + Ok(()) +} + +// Update workspace Cargo.toml to include the caller-utils crate +#[instrument(level = "trace", skip_all)] +fn update_workspace_cargo_toml(base_dir: &Path) -> Result<()> { + let workspace_cargo_toml = base_dir.join("Cargo.toml"); + debug!( + path = %workspace_cargo_toml.display(), + "Updating workspace Cargo.toml" + ); + + if !workspace_cargo_toml.exists() { + warn!( + path = %workspace_cargo_toml.display(), + "Workspace Cargo.toml not found, skipping update." + ); + return Ok(()); + } + + let content = fs::read_to_string(&workspace_cargo_toml).with_context(|| { + format!( + "Failed to read workspace Cargo.toml: {}", + workspace_cargo_toml.display() + ) + })?; + + // Parse the TOML content + let mut parsed_toml: Value = content + .parse() + .with_context(|| "Failed to parse workspace Cargo.toml")?; + + // Check if there's a workspace section + if let Some(workspace) = parsed_toml.get_mut("workspace") { + if let Some(members) = workspace.get_mut("members") { + if let Some(members_array) = members.as_array_mut() { + // Check if caller-utils is already in the members list + let caller_utils_exists = members_array + .iter() + .any(|m| m.as_str().map_or(false, |s| s == "target/caller-utils")); + + if !caller_utils_exists { + members_array.push(Value::String("target/caller-utils".to_string())); + + // Write back the updated TOML + let updated_content = toml::to_string_pretty(&parsed_toml) + .with_context(|| "Failed to serialize updated workspace Cargo.toml")?; + + fs::write(&workspace_cargo_toml, updated_content).with_context(|| { + format!( + "Failed to write updated workspace Cargo.toml: {}", + workspace_cargo_toml.display() + ) + })?; + + debug!("Successfully updated workspace Cargo.toml"); + } else { + debug!( + "Workspace Cargo.toml already up-to-date regarding caller-utils member." + ); + } + } + } + } + + Ok(()) +} + +// Add caller-utils as a dependency to hyperware:process crates +#[instrument(level = "trace", skip_all)] +pub fn add_caller_utils_to_projects(projects: &[PathBuf]) -> Result<()> { + for project_path in projects { + let cargo_toml_path = project_path.join("Cargo.toml"); + debug!( + project = ?project_path.file_name().unwrap_or_default(), + path = %cargo_toml_path.display(), + "Processing project" + ); + + let content = fs::read_to_string(&cargo_toml_path).with_context(|| { + format!( + "Failed to read project Cargo.toml: {}", + cargo_toml_path.display() + ) + })?; + + let mut parsed_toml: Value = content.parse().with_context(|| { + format!( + "Failed to parse project Cargo.toml: {}", + cargo_toml_path.display() + ) + })?; + + // Add caller-utils to dependencies if not already present + if let Some(dependencies) = parsed_toml.get_mut("dependencies") { + if let Some(deps_table) = dependencies.as_table_mut() { + if !deps_table.contains_key("caller-utils") { + deps_table.insert( + "caller-utils".to_string(), + Value::Table({ + let mut t = toml::map::Map::new(); + t.insert( + "path".to_string(), + Value::String("../target/caller-utils".to_string()), + ); + t + }), + ); + + // Write back the updated TOML + let updated_content = + toml::to_string_pretty(&parsed_toml).with_context(|| { + format!( + "Failed to serialize updated project Cargo.toml: {}", + cargo_toml_path.display() + ) + })?; + + fs::write(&cargo_toml_path, updated_content).with_context(|| { + format!( + "Failed to write updated project Cargo.toml: {}", + cargo_toml_path.display() + ) + })?; + + debug!(project = ?project_path.file_name().unwrap_or_default(), "Successfully added caller-utils dependency"); + } else { + debug!(project = ?project_path.file_name().unwrap_or_default(), "caller-utils dependency already exists"); + } + } + } + } + + Ok(()) +} + +// Create caller-utils crate and integrate with the workspace +#[instrument(level = "trace", skip_all)] +pub fn create_caller_utils(base_dir: &Path, api_dir: &Path) -> Result<()> { + // Step 1: Create the caller-utils crate + create_caller_utils_crate(api_dir, base_dir)?; + + // Step 2: Update workspace Cargo.toml + update_workspace_cargo_toml(base_dir)?; + + info!("Successfully created caller-utils and copied the imports"); + Ok(()) +} diff --git a/src/build/mod.rs b/src/build/mod.rs index 9e2382e3..e0fb2db7 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -32,6 +32,9 @@ use crate::KIT_CACHE; mod rewrite; use rewrite::copy_and_rewrite_package; +mod caller_utils_generator; +mod wit_generator; + const PY_VENV_NAME: &str = "process_env"; const JAVASCRIPT_SRC_PATH: &str = "src/lib.js"; const PYTHON_SRC_PATH: &str = "src/lib.py"; @@ -166,6 +169,25 @@ pub fn remove_missing_features(cargo_toml_path: &Path, features: Vec<&str>) -> R .collect()) } +#[instrument(level = "trace", skip_all)] +pub fn get_process_name(cargo_toml_path: &Path) -> Result { + let cargo_toml_content = fs::read_to_string(cargo_toml_path)?; + let cargo_toml: toml::Value = cargo_toml_content.parse()?; + + if let Some(process_name) = cargo_toml + .get("package") + .and_then(|p| p.get("name")) + .and_then(|n| n.as_str()) + { + let process_name = process_name.replace("_", "-"); + Ok(process_name.to_string()) + } else { + Err(eyre!( + "No package.name field in Cargo.toml at {cargo_toml_path:?}" + )) + } +} + /// Check if the first element is empty and there are no more elements #[instrument(level = "trace", skip_all)] fn is_only_empty_string(splitted: &Vec<&str>) -> bool { @@ -649,8 +671,6 @@ fn get_most_recent_modified_time( return Err(eyre!("Didn't find required dirs: {must_exist_dirs:?}")); } - debug!("get_most_recent_modified_time: most_recent: {most_recent:?}, most_recent_excluded: {most_recent_excluded:?}"); - Ok((most_recent, most_recent_excluded)) } @@ -898,18 +918,24 @@ async fn compile_rust_wasm_process( features: &str, verbose: bool, ) -> Result<()> { + let Some(package_dir) = process_dir.parent() else { + return Err(eyre!( + "Could not derive package dir from process_dir ({process_dir:?}) parent" + )); + }; + let process_name = get_process_name(&process_dir.join("Cargo.toml"))?; info!("Compiling Rust Hyperware process in {:?}...", process_dir); // Paths - let wit_dir = process_dir.join("target").join("wit"); - let bindings_dir = process_dir + let wit_dir = package_dir.join("target").join("wit"); + let bindings_dir = package_dir .join("target") .join("bindings") - .join(process_dir.file_name().unwrap()); + .join(package_dir.file_name().unwrap()); fs::create_dir_all(&bindings_dir)?; // Check and download wasi_snapshot_preview1.wasm if it does not exist - let wasi_snapshot_file = process_dir + let wasi_snapshot_file = package_dir .join("target") .join("wasi_snapshot_preview1.wasm"); let wasi_snapshot_url = format!( @@ -932,6 +958,8 @@ async fn compile_rust_wasm_process( let mut args = vec![ "+stable", "build", + "-p", + &process_name, "--release", "--no-default-features", "--target", @@ -947,7 +975,7 @@ async fn compile_rust_wasm_process( } else { features.len() }; - let features = remove_missing_features(&process_dir.join("Cargo.toml"), features)?; + let features = remove_missing_features(&package_dir.join("Cargo.toml"), features)?; if !test_only && original_length != features.len() { info!( "process {:?} missing features; using {:?}", @@ -960,7 +988,7 @@ async fn compile_rust_wasm_process( args.push(&features); } let result = run_command( - Command::new("cargo").args(&args).current_dir(process_dir), + Command::new("cargo").args(&args).current_dir(package_dir), verbose, )?; @@ -978,7 +1006,7 @@ async fn compile_rust_wasm_process( // For use inside of process_dir // Run `wasm-tools component new`, putting output in pkg/ // and rewriting all `_`s to `-`s - // cargo hates `-`s and so outputs with `_`s; Kimap hates + // cargo hates `-`s and so outputs with `_`s; Hypermap hates // `_`s and so we convert to and enforce all `-`s let wasm_file_name_cab = process_dir .file_name() @@ -990,7 +1018,7 @@ async fn compile_rust_wasm_process( let wasm_file_prefix = Path::new("target/wasm32-wasip1/release"); let wasm_file_cab = wasm_file_prefix.join(&format!("{wasm_file_name_cab}.wasm")); - let wasm_file_pkg = format!("../pkg/{wasm_file_name_hep}.wasm"); + let wasm_file_pkg = format!("pkg/{wasm_file_name_hep}.wasm"); let wasm_file_pkg = Path::new(&wasm_file_pkg); let wasi_snapshot_file = Path::new("target/wasi_snapshot_preview1.wasm"); @@ -1006,7 +1034,7 @@ async fn compile_rust_wasm_process( "--adapt", wasi_snapshot_file.to_str().unwrap(), ]) - .current_dir(process_dir), + .current_dir(package_dir), verbose, )?; @@ -1067,11 +1095,11 @@ async fn compile_and_copy_ui( #[instrument(level = "trace", skip_all)] async fn build_wit_dir( - process_dir: &Path, + package_dir: &Path, apis: &HashMap>, wit_version: Option, ) -> Result<()> { - let wit_dir = process_dir.join("target").join("wit"); + let wit_dir = package_dir.join("target").join("wit"); if wit_dir.exists() { fs::remove_dir_all(&wit_dir)?; } @@ -1090,29 +1118,21 @@ async fn build_wit_dir( async fn compile_package_item( path: PathBuf, features: String, - apis: HashMap>, world: String, - wit_version: Option, + is_rust_process: bool, + is_py_process: bool, + is_js_process: bool, verbose: bool, ) -> Result<()> { - if path.is_dir() { - let is_rust_process = path.join(RUST_SRC_PATH).exists(); - let is_py_process = path.join(PYTHON_SRC_PATH).exists(); - let is_js_process = path.join(JAVASCRIPT_SRC_PATH).exists(); - if is_rust_process || is_py_process || is_js_process { - build_wit_dir(&path, &apis, wit_version).await?; - } - - if is_rust_process { - compile_rust_wasm_process(&path, &features, verbose).await?; - } else if is_py_process { - let python = get_python_version(None, None)? - .ok_or_else(|| eyre!("kit requires Python 3.10 or newer"))?; - compile_python_wasm_process(&path, &python, &world, verbose).await?; - } else if is_js_process { - let valid_node = get_newest_valid_node_version(None, None)?; - compile_javascript_wasm_process(&path, valid_node, &world, verbose).await?; - } + if is_rust_process { + compile_rust_wasm_process(&path, &features, verbose).await?; + } else if is_py_process { + let python = get_python_version(None, None)? + .ok_or_else(|| eyre!("kit requires Python 3.10 or newer"))?; + compile_python_wasm_process(&path, &python, &world, verbose).await?; + } else if is_js_process { + let valid_node = get_newest_valid_node_version(None, None)?; + compile_javascript_wasm_process(&path, valid_node, &world, verbose).await?; } Ok(()) } @@ -1161,6 +1181,7 @@ async fn fetch_dependencies( include: &HashSet, exclude: &HashSet, rewrite: bool, + hyperapp: bool, force: bool, verbose: bool, ) -> Result<()> { @@ -1178,6 +1199,7 @@ async fn fetch_dependencies( vec![], // TODO: what about deps-of-deps? vec![], rewrite, + hyperapp, false, force, verbose, @@ -1215,6 +1237,7 @@ async fn fetch_dependencies( local_dep_deps, vec![], rewrite, + hyperapp, false, force, verbose, @@ -1531,8 +1554,10 @@ async fn compile_package( include: &HashSet, exclude: &HashSet, rewrite: bool, + hyperapp: bool, force: bool, verbose: bool, + hyperapp_processed_projects: Option>, ignore_deps: bool, // for internal use; may cause problems when adding recursive deps ) -> Result<()> { let metadata = read_and_update_metadata(package_dir)?; @@ -1540,7 +1565,9 @@ async fn compile_package( let (mut apis, dependencies) = check_and_populate_dependencies(package_dir, &metadata, skip_deps_check, verbose).await?; + info!("dependencies: {dependencies:?}"); if !ignore_deps && !dependencies.is_empty() { + info!("fetching dependencies..."); fetch_dependencies( package_dir, &dependencies.iter().map(|s| s.to_string()).collect(), @@ -1554,10 +1581,11 @@ async fn compile_package( include, exclude, rewrite, + hyperapp, force, verbose, ) - .await?; + .await? } let wit_world = default_world @@ -1566,8 +1594,11 @@ async fn compile_package( }) .to_string(); + build_wit_dir(&package_dir, &apis, metadata.properties.wit_version).await?; + let mut tasks = tokio::task::JoinSet::new(); let features = features.to_string(); + let mut to_compile = HashSet::new(); for entry in fs::read_dir(package_dir)? { let Ok(entry) = entry else { continue; @@ -1576,12 +1607,35 @@ async fn compile_package( if !is_cluded(&path, include, exclude) { continue; } + if !path.is_dir() { + continue; + } + + let is_rust_process = path.join(RUST_SRC_PATH).exists(); + let is_py_process = path.join(PYTHON_SRC_PATH).exists(); + let is_js_process = path.join(JAVASCRIPT_SRC_PATH).exists(); + if is_rust_process || is_py_process || is_js_process { + to_compile.insert((path, is_rust_process, is_py_process, is_js_process)); + } + } + + let api_dir = package_dir.join("target").join("wit"); + //info!("{processed_project:?} {api_dir:?}"); + if let Some(ref processed_projects) = hyperapp_processed_projects { + caller_utils_generator::create_caller_utils(package_dir, &api_dir)?; + for processed_project in processed_projects { + caller_utils_generator::add_caller_utils_to_projects(&[processed_project.clone()])?; + } + } + + for (path, is_rust_process, is_py_process, is_js_process) in to_compile { tasks.spawn(compile_package_item( path, features.clone(), - apis.clone(), wit_world.clone(), - metadata.properties.wit_version, + is_rust_process, + is_py_process, + is_js_process, verbose.clone(), )); } @@ -1661,6 +1715,7 @@ pub async fn execute( local_dependencies: Vec, add_paths_to_api: Vec, rewrite: bool, + hyperapp: bool, reproducible: bool, force: bool, verbose: bool, @@ -1753,6 +1808,19 @@ pub async fn execute( copy_and_rewrite_package(package_dir)? }; + let hyperapp_processed_projects = if !hyperapp { + None + } else { + let api_dir = live_dir.join("api"); + let (processed_projects, interfaces) = + wit_generator::generate_wit_files(&live_dir, &api_dir)?; + if interfaces.is_empty() { + None + } else { + Some(processed_projects) + } + }; + let ui_dirs = get_ui_dirs(&live_dir, &include, &exclude)?; if !no_ui && !ui_dirs.is_empty() { if !skip_deps_check { @@ -1779,8 +1847,10 @@ pub async fn execute( &include, &exclude, rewrite, + hyperapp, force, verbose, + hyperapp_processed_projects, ignore_deps, ) .await?; diff --git a/src/build/wit_generator.rs b/src/build/wit_generator.rs new file mode 100644 index 00000000..6c1f3258 --- /dev/null +++ b/src/build/wit_generator.rs @@ -0,0 +1,1190 @@ +use std::collections::{HashMap, HashSet}; +use std::fs; +use std::path::{Path, PathBuf}; + +use color_eyre::{ + eyre::{bail, eyre, WrapErr}, + Result, +}; +use syn::{self, Attribute, ImplItem, Item, Type}; +use toml::Value; +use tracing::{debug, info, instrument, warn}; +use walkdir::WalkDir; + +// Helper functions for naming conventions +fn to_kebab_case(s: &str) -> String { + // First, handle the case where the input has underscores + if s.contains('_') { + return s.replace('_', "-"); + } + + let mut result = String::with_capacity(s.len() + 5); // Extra capacity for hyphens + let chars: Vec = s.chars().collect(); + + for (i, &c) in chars.iter().enumerate() { + if c.is_uppercase() { + // Add hyphen if: + // 1. Not the first character + // 2. Previous character is lowercase + // 3. Or next character is lowercase (to handle acronyms like HTML) + if i > 0 + && (chars[i - 1].is_lowercase() + || (i < chars.len() - 1 && chars[i + 1].is_lowercase())) + { + result.push('-'); + } + result.push(c.to_lowercase().next().unwrap()); + } else { + result.push(c); + } + } + + result +} + +// Validates a name doesn't contain numbers or "stream" +fn validate_name(name: &str, kind: &str) -> Result<()> { + // Check for numbers + if name.chars().any(|c| c.is_digit(10)) { + bail!( + "Error: {} name '{}' contains numbers, which is not allowed", + kind, + name + ); + } + + // Check for "stream" + if name.to_lowercase().contains("stream") { + bail!( + "Error: {} name '{}' contains 'stream', which is not allowed", + kind, + name + ); + } + + Ok(()) +} + +// Check if a field name starts with an underscore, and if so, strip it and print a warning. +fn check_and_strip_leading_underscore(field_name: String) -> String { + if let Some(stripped) = field_name.strip_prefix('_') { + warn!(field_name = %field_name, + "field_name is prefixed with an underscore, which is not allowed in WIT. Function signatures should not include unused parameters." + ); + stripped.to_string() + } else { + field_name + } +} + +// Remove "State" suffix from a name +fn remove_state_suffix(name: &str) -> String { + if name.ends_with("State") { + let len = name.len(); + return name[0..len - 5].to_string(); + } + name.to_string() +} + +// Extract wit_world from the #[hyperprocess] attribute using the format in the debug representation +#[instrument(level = "trace", skip_all)] +fn extract_wit_world(attrs: &[Attribute]) -> Result { + for attr in attrs { + if attr.path().is_ident("hyperprocess") { + // Convert attribute to string representation + let attr_str = format!("{:?}", attr); + debug!(attr_str = %attr_str, "Attribute string"); + + // Look for wit_world in the attribute string + if let Some(pos) = attr_str.find("wit_world") { + debug!(pos = %pos, "Found wit_world"); + + // Find the literal value after wit_world by looking for lit: "value" + let lit_pattern = "lit: \""; + if let Some(lit_pos) = attr_str[pos..].find(lit_pattern) { + let start_pos = pos + lit_pos + lit_pattern.len(); + + // Find the closing quote of the literal + if let Some(quote_pos) = attr_str[start_pos..].find('\"') { + let world_name = &attr_str[start_pos..(start_pos + quote_pos)]; + debug!(wit_world = %world_name, "Extracted wit_world"); + return Ok(world_name.to_string()); + } + } + } + } + } + bail!("wit_world not found in hyperprocess attribute") +} + +// Convert Rust type to WIT type, including downstream types +#[instrument(level = "trace", skip_all)] +fn rust_type_to_wit(ty: &Type, used_types: &mut HashSet) -> Result { + match ty { + Type::Path(type_path) => { + if type_path.path.segments.is_empty() { + return Err(eyre!("Failed to parse path type: {ty:?}")); + } + + let ident = &type_path.path.segments.last().unwrap().ident; + let type_name = ident.to_string(); + + match type_name.as_str() { + "i8" => Ok("s8".to_string()), + "u8" => Ok("u8".to_string()), + "i16" => Ok("s16".to_string()), + "u16" => Ok("u16".to_string()), + "i32" => Ok("s32".to_string()), + "u32" => Ok("u32".to_string()), + "i64" => Ok("s64".to_string()), + "u64" => Ok("u64".to_string()), + "f32" => Ok("f32".to_string()), + "f64" => Ok("f64".to_string()), + "String" => Ok("string".to_string()), + "bool" => Ok("bool".to_string()), + "Vec" => { + if let syn::PathArguments::AngleBracketed(args) = + &type_path.path.segments.last().unwrap().arguments + { + if let Some(syn::GenericArgument::Type(inner_ty)) = args.args.first() { + let inner_type = rust_type_to_wit(inner_ty, used_types)?; + Ok(format!("list<{}>", inner_type)) + } else { + Err(eyre!("Failed to parse Vec inner type")) + } + } else { + Err(eyre!("Failed to parse Vec inner type!")) + } + } + "Option" => { + if let syn::PathArguments::AngleBracketed(args) = + &type_path.path.segments.last().unwrap().arguments + { + if let Some(syn::GenericArgument::Type(inner_ty)) = args.args.first() { + let inner_type = rust_type_to_wit(inner_ty, used_types)?; + Ok(format!("option<{}>", inner_type)) + } else { + Err(eyre!("Failed to parse Option inner type")) + } + } else { + Err(eyre!("Failed to parse Option inner type!")) + } + } + "Result" => { + if let syn::PathArguments::AngleBracketed(args) = + &type_path.path.segments.last().unwrap().arguments + { + if args.args.len() >= 2 { + if let ( + Some(syn::GenericArgument::Type(ok_ty)), + Some(syn::GenericArgument::Type(err_ty)), + ) = (args.args.first(), args.args.get(1)) + { + let ok_type = rust_type_to_wit(ok_ty, used_types)?; + let err_type = rust_type_to_wit(err_ty, used_types)?; + Ok(format!("result<{}, {}>", ok_type, err_type)) + } else { + Err(eyre!("Failed to parse Result generic arguments")) + } + } else { + Err(eyre!("Result requires two type arguments")) + } + } else { + Err(eyre!("Failed to parse Result type arguments")) + } + } + // TODO: fix and enable + //"HashMap" | "BTreeMap" => { + // if let syn::PathArguments::AngleBracketed(args) = + // &type_path.path.segments.last().unwrap().arguments + // { + // if args.args.len() >= 2 { + // if let ( + // Some(syn::GenericArgument::Type(key_ty)), + // Some(syn::GenericArgument::Type(val_ty)), + // ) = (args.args.first(), args.args.get(1)) + // { + // let key_type = rust_type_to_wit(key_ty, used_types)?; + // let val_type = rust_type_to_wit(val_ty, used_types)?; + // // For HashMaps, we'll generate a list of tuples where each tuple contains a key and value + // Ok(format!("list>", key_type, val_type)) + // } else { + // Ok("list>".to_string()) + // } + // } else { + // Ok("list>".to_string()) + // } + // } else { + // Ok("list>".to_string()) + // } + //} + custom => { + // Validate custom type name + validate_name(custom, "Type")?; + + // Convert custom type to kebab-case and add to used types + let kebab_custom = to_kebab_case(custom); + used_types.insert(kebab_custom.clone()); + Ok(kebab_custom) + } + } + } + Type::Reference(type_ref) => { + // Handle references by using the underlying type + rust_type_to_wit(&type_ref.elem, used_types) + } + Type::Tuple(type_tuple) => { + if type_tuple.elems.is_empty() { + // Empty tuple is unit in WIT + Ok("unit".to_string()) + } else { + // Create a tuple representation in WIT + let mut elem_types = Vec::new(); + for elem in &type_tuple.elems { + elem_types.push(rust_type_to_wit(elem, used_types)?); + } + Ok(format!("tuple<{}>", elem_types.join(", "))) + } + } + _ => return Err(eyre!("Failed to parse type: {ty:?}")), + } +} + +// Find all Rust files in a crate directory +fn find_rust_files(crate_path: &Path) -> Vec { + let mut rust_files = Vec::new(); + let src_dir = crate_path.join("src"); + + debug!(src_dir = %src_dir.display(), "Finding Rust files"); + + if !src_dir.exists() || !src_dir.is_dir() { + warn!(src_dir = %src_dir.display(), "No src directory found"); + return rust_files; + } + + for entry in WalkDir::new(src_dir).into_iter().filter_map(Result::ok) { + let path = entry.path(); + if path.is_file() && path.extension().map_or(false, |ext| ext == "rs") { + debug!(path = %path.display(), "Found Rust file"); + rust_files.push(path.to_path_buf()); + } + } + + debug!(count = %rust_files.len(), "Found Rust files"); + rust_files +} + +// Collect **only used** type definitions (structs and enums) from a file +#[instrument(level = "trace", skip_all)] +fn collect_type_definitions_from_file( + file_path: &Path, + used_types: &HashSet, // Accept the set of used types +) -> Result> { + debug!( + file_path = %file_path.display(), + "Collecting used type definitions from file" + ); + + let content = fs::read_to_string(file_path) + .with_context(|| format!("Failed to read file: {}", file_path.display()))?; + + let ast = syn::parse_file(&content) + .with_context(|| format!("Failed to parse file: {}", file_path.display()))?; + + let mut type_defs = HashMap::new(); + + for item in &ast.items { + match item { + Item::Struct(item_struct) => { + // Validate struct name doesn't contain numbers or "stream" + let orig_name = item_struct.ident.to_string(); + + // Skip trying to validate if name contains "__" as these are likely internal types + if orig_name.contains("__") { + // This skip can remain, as internal types are unlikely to be in `used_types` anyway + warn!(name = %orig_name, "Skipping likely internal struct"); + continue; + } + + match validate_name(&orig_name, "Struct") { + Ok(_) => { + // Use kebab-case for struct name + let name = to_kebab_case(&orig_name); + + // --- Check if this type is used --- + if !used_types.contains(&name) { + // Skip this struct if not in the used set + continue; + } + // --- End Check --- + + debug!(original_name = %orig_name, kebab_name = %name, "Found used struct"); + + // Proceed with field processing only if the struct is used + let fields: Vec = match &item_struct.fields { + syn::Fields::Named(fields) => { + // Note: The `rust_type_to_wit` calls here still use a *local* `used_types` + // set for *recursive* type discovery *within this struct's definition*. + // This is necessary for correctly formatting types like list. + // The main `used_types` set (passed as argument) determines *if* this struct + // definition is included at all. + let mut local_used_types_for_fields = HashSet::new(); // Renamed for clarity + let mut field_strings = Vec::new(); + + for f in &fields.named { + if let Some(field_ident) = &f.ident { + let field_orig_name = field_ident.to_string(); + match validate_name(&field_orig_name, "Field") { + Ok(_) => { + let field_name = to_kebab_case(&field_orig_name); + if field_name.is_empty() { + warn!( + struct_name = %name, field_original_name = %field_orig_name, + "Skipping field with empty name conversion" + ); + continue; + } + + // This call populates `local_used_types_for_fields` if needed, + // but its primary goal here is WIT type string generation. + let field_type = match rust_type_to_wit( + &f.ty, + &mut local_used_types_for_fields, // Pass the local set + ) { + Ok(ty) => ty, + Err(e) => { + warn!(struct_name = %name, field_name = %field_name, error = %e, "Error converting field type"); + // Propagate error if field type conversion fails + return Err(e); + } + }; + + debug!( + " Field: {} -> {}", + field_name, field_type + ); + field_strings.push(format!( + " {}: {}", + field_name, field_type + )); + } + Err(e) => { + warn!(struct_name = %name, error = %e, "Skipping field with invalid name"); + // Decide if you want to continue or error out + continue; + } + } + } + } + field_strings + } + _ => Vec::new(), // Handle tuple structs, unit structs if needed + }; + + // Add the struct definition only if it has fields (or adjust logic if empty records are valid) + if !fields.is_empty() { + type_defs.insert( + name.clone(), + format!(" record {} {{\n{}\n }}", name, fields.join(",\n")), + ); + } else { + warn!(name = %name, "Skipping used struct with no convertible fields"); + } + } + Err(e) => { + // Struct name validation failed, skip regardless of usage + warn!(error = %e, "Skipping struct with invalid name"); + continue; + } + } + } + Item::Enum(item_enum) => { + // Validate enum name doesn't contain numbers or "stream" + let orig_name = item_enum.ident.to_string(); + + // Skip trying to validate if name contains "__" + if orig_name.contains("__") { + debug!(name = %orig_name, "Skipping likely internal enum"); + continue; + } + + match validate_name(&orig_name, "Enum") { + Ok(_) => { + // Use kebab-case for enum name + let name = to_kebab_case(&orig_name); + + // --- Check if this type is used --- + if !used_types.contains(&name) { + debug!(original_name = %orig_name, kebab_name = %name, "Skipping type not present in any function signature"); + continue; // Skip this enum if not in the used set + } + // --- End Check --- + + debug!(original_name = %orig_name, kebab_name = %name, "Found used enum"); + + // Proceed with variant processing only if the enum is used + let mut variants = Vec::new(); + let mut skip_enum = false; + + for v in &item_enum.variants { + let variant_orig_name = v.ident.to_string(); + match validate_name(&variant_orig_name, "Enum variant") { + Ok(_) => { + match &v.fields { + syn::Fields::Unnamed(fields) + if fields.unnamed.len() == 1 => + { + // Similar to structs, use a local set for inner type resolution + let mut local_used_types_for_variant = HashSet::new(); + match rust_type_to_wit( + &fields.unnamed.first().unwrap().ty, + &mut local_used_types_for_variant, // Pass local set + ) { + Ok(ty) => { + let variant_name = + to_kebab_case(&variant_orig_name); + debug!(original_name = %variant_orig_name, kebab_name = %variant_name, ty_str = %ty, "Found enum variant with type"); + variants.push(format!( + " {}({})", + variant_name, ty + )); + } + Err(e) => { + warn!(enum_name = %name, variant_name = %variant_orig_name, error = %e, "Error converting variant type"); + // Propagate error if variant type conversion fails + return Err(e); + } + } + } + syn::Fields::Unit => { + let variant_name = to_kebab_case(&variant_orig_name); + debug!(original_name = %variant_orig_name, kebab_name = %variant_name, "Found unit enum variant"); + variants.push(format!(" {}", variant_name)); + } + _ => { + warn!(enum_name = %name, variant_name = %variant_orig_name, "Skipping complex variant in used enum"); + skip_enum = true; // Skip the whole enum if one variant is complex + break; + } + } + } + Err(e) => { + warn!(enum_name = %name, error = %e, "Skipping variant with invalid name in used enum"); + skip_enum = true; // Skip the whole enum if one variant name is invalid + break; + } + } + } + + // Add the enum definition only if it wasn't skipped and has variants + if !skip_enum && !variants.is_empty() { + type_defs.insert( + name.clone(), + format!( + " variant {} {{\n{}\n }}", + name, + variants.join(",\n") + ), + ); + } else { + warn!(name = %name, "Skipping used enum due to complex/invalid variants or no variants"); + } + } + Err(e) => { + // Enum name validation failed, skip regardless of usage + warn!(error = %e, "Skipping enum with invalid name"); + continue; + } + } + } + _ => {} // Handle other top-level items like functions, impls, etc. if needed + } + } + + debug!( + count = %type_defs.len(), file_path = %file_path.display(), + "Collected used type definitions from this file" + ); + Ok(type_defs) +} + +// Find all relevant Rust projects +fn find_rust_projects(base_dir: &Path) -> Vec { + let mut projects = Vec::new(); + debug!(base_dir = %base_dir.display(), "Scanning for Rust projects"); + + for entry in WalkDir::new(base_dir) + .max_depth(1) + .into_iter() + .filter_map(Result::ok) + { + let path = entry.path(); + + if !path.is_dir() || path == base_dir { + continue; + } + let cargo_toml = path.join("Cargo.toml"); + debug!(path = %cargo_toml.display(), "Checking path"); + + if !cargo_toml.exists() { + continue; + } + // Try to read and parse Cargo.toml + let Ok(content) = fs::read_to_string(&cargo_toml) else { + continue; + }; + let Ok(cargo_data) = content.parse::() else { + continue; + }; + // Check for the specific metadata + let Some(metadata) = cargo_data + .get("package") + .and_then(|p| p.get("metadata")) + .and_then(|m| m.get("component")) + else { + warn!(path = %cargo_toml.display(), "No package.metadata.component metadata found"); + continue; + }; + let Some(package) = metadata.get("package") else { + continue; + }; + let Some(package_str) = package.as_str() else { + continue; + }; + debug!(package = %package_str, "Found package.metadata.component.package"); + if package_str == "hyperware:process" { + debug!(path = %path.display(), "Adding project"); + projects.push(path.to_path_buf()); + } + } + + debug!(count = %projects.len(), "Found relevant Rust projects"); + projects +} + +// Helper function to generate signature struct for specific attribute type +#[instrument(level = "trace", skip_all)] +fn generate_signature_struct( + kebab_name: &str, + attr_type: &str, + method: &syn::ImplItemFn, + used_types: &mut HashSet, +) -> Result { + // Create signature struct name with attribute type + let signature_struct_name = format!("{}-signature-{}", kebab_name, attr_type); + + // Generate comment for this specific function + let comment = format!( + " // Function signature for: {} ({})", + kebab_name, attr_type + ); + + // Create struct fields that directly represent function parameters + let mut struct_fields = Vec::new(); + + // Add target parameter based on attribute type + if attr_type == "http" { + struct_fields.push(" target: string".to_string()); + } else { + // remote or local + struct_fields.push(" target: address".to_string()); + } + + // Process function parameters (skip &self and &mut self) + for arg in &method.sig.inputs { + if let syn::FnArg::Typed(pat_type) = arg { + if let syn::Pat::Ident(pat_ident) = &*pat_type.pat { + // Skip &self and &mut self + if pat_ident.ident == "self" { + continue; + } + + // Get original param name and convert to kebab-case + let param_orig_name = pat_ident.ident.to_string(); + + // Validate parameter name + match validate_name(¶m_orig_name, "Parameter") { + Ok(_) => { + let param_name = check_and_strip_leading_underscore(param_orig_name); + let param_name = to_kebab_case(¶m_name); + + // Rust type to WIT type + match rust_type_to_wit(&pat_type.ty, used_types) { + Ok(param_type) => { + // Add field directly to the struct + struct_fields + .push(format!(" {}: {}", param_name, param_type)); + } + Err(e) => { + warn!(param_name = %param_name, error = %e, "Error converting parameter type"); + return Err(e); + } + } + } + Err(e) => { + warn!(error = %e, "Skipping parameter with invalid name"); + return Err(e); + } + } + } + } + } + + // Add return type field + match &method.sig.output { + syn::ReturnType::Type(_, ty) => match rust_type_to_wit(&*ty, used_types) { + Ok(return_type) => { + struct_fields.push(format!(" returning: {}", return_type)); + } + Err(e) => { + warn!(struct_name = %signature_struct_name, error = %e, "Error converting return type"); + return Err(e); + } + }, + _ => { + // For unit return type + struct_fields.push(" returning: unit".to_string()); + } + } + + // Combine everything into a record definition + let record_def = format!( + "{}\n record {} {{\n{}\n }}", + comment, + signature_struct_name, + struct_fields.join(",\n") + ); + + Ok(record_def) +} + +// Helper trait to get TypePath from Type +trait AsTypePath { + fn as_type_path(&self) -> Option<&syn::TypePath>; +} + +impl AsTypePath for syn::Type { + fn as_type_path(&self) -> Option<&syn::TypePath> { + match self { + syn::Type::Path(tp) => Some(tp), + _ => None, + } + } +} + +// Process a single Rust project and generate WIT files +#[instrument(level = "trace", skip_all)] +fn process_rust_project(project_path: &Path, api_dir: &Path) -> Result> { + debug!(project_path = %project_path.display(), "Processing project"); + + // Find lib.rs for this project + let lib_rs = project_path.join("src").join("lib.rs"); + + if !lib_rs.exists() { + warn!(project_path = %project_path.display(), "No lib.rs found for project"); + return Ok(None); + } + + // Find all Rust files in the project + let rust_files = find_rust_files(project_path); + + // Parse lib.rs to find the hyperprocess attribute and interface details first + let lib_content = fs::read_to_string(&lib_rs).with_context(|| { + format!( + "Failed to read lib.rs for project: {}", + project_path.display() + ) + })?; + + let ast = syn::parse_file(&lib_content).with_context(|| { + format!( + "Failed to parse lib.rs for project: {}", + project_path.display() + ) + })?; + + let mut wit_world = None; + let mut interface_name = None; + let mut kebab_interface_name = None; + let mut impl_item_with_hyperprocess = None; + + debug!("Scanning for impl blocks with hyperprocess attribute"); + for item in &ast.items { + let Item::Impl(impl_item) = item else { + continue; + }; + // Check if this impl block has a #[hyperprocess] attribute + if let Some(attr) = impl_item + .attrs + .iter() + .find(|attr| attr.path().is_ident("hyperprocess")) + { + debug!("Found hyperprocess attribute"); + + // Extract the wit_world name + match extract_wit_world(&[attr.clone()]) { + Ok(world_name) => { + debug!(wit_world = %world_name, "Extracted wit_world"); + wit_world = Some(world_name); + + // Get the interface name from the impl type + interface_name = impl_item.self_ty.as_ref().as_type_path().map(|tp| { + if let Some(last_segment) = tp.path.segments.last() { + last_segment.ident.to_string() + } else { + "Unknown".to_string() + } + }); + + // Check for "State" suffix and remove it + let Some(ref name) = interface_name else { + continue; + }; + // Validate the interface name + if let Err(e) = validate_name(name, "Interface") { + warn!(interface_name = %name, error = %e, "Interface name validation failed"); + continue; // Skip this impl block if validation fails + } + + // Remove State suffix if present + let base_name = remove_state_suffix(name); + + // Convert to kebab-case for file name and interface name + kebab_interface_name = Some(to_kebab_case(&base_name)); + + debug!(interface_name = ?interface_name, base_name = %base_name, kebab_name = ?kebab_interface_name, "Interface details"); + + // Save the impl item for later processing + impl_item_with_hyperprocess = Some(impl_item.clone()); + break; // Assume only one hyperprocess impl block per lib.rs + } + Err(e) => warn!("Failed to extract wit_world: {}", e), + } + } + } + + // Prepare to collect signature structs and used types + let mut signature_structs = Vec::new(); + let mut used_types = HashSet::new(); // This will be populated now + + // Analyze the functions within the identified impl block (if found) + if let Some(ref impl_item) = &impl_item_with_hyperprocess { + if let Some(ref _kebab_name) = &kebab_interface_name { + // Ensure kebab_name is available but acknowledge unused in this block + for item in &impl_item.items { + let ImplItem::Fn(method) = item else { + continue; + }; + let method_name = method.sig.ident.to_string(); + debug!(method_name = %method_name, "Examining method"); + + // Check for attribute types + let has_remote = method + .attrs + .iter() + .any(|attr| attr.path().is_ident("remote")); + let has_local = method + .attrs + .iter() + .any(|attr| attr.path().is_ident("local")); + let has_http = method.attrs.iter().any(|attr| attr.path().is_ident("http")); + let has_init = method.attrs.iter().any(|attr| attr.path().is_ident("init")); + + if has_remote || has_local || has_http || has_init { + debug!(remote = %has_remote, local = %has_local, http = %has_http, init = %has_init, "Method attributes"); + + // Validate function name + match validate_name(&method_name, "Function") { + Ok(_) => { + // Convert function name to kebab-case + let func_kebab_name = to_kebab_case(&method_name); // Use different var name + + debug!(original_name = %method_name, kebab_name = %func_kebab_name, "Processing method"); + + if has_init { + debug!(method_name = %method_name, "Found initialization function"); + continue; + } + // This will populate `used_types` + if has_remote { + match generate_signature_struct( + &func_kebab_name, // Pass func kebab name + "remote", + method, + &mut used_types, // Pass the main set + ) { + Ok(remote_struct) => signature_structs.push(remote_struct), + Err(e) => { + warn!(method_name = %method_name, error = %e, "Error generating remote signature struct"); + } + } + } + + if has_local { + match generate_signature_struct( + &func_kebab_name, // Pass func kebab name + "local", + method, + &mut used_types, // Pass the main set + ) { + Ok(local_struct) => signature_structs.push(local_struct), + Err(e) => { + warn!(method_name = %method_name, error = %e, "Error generating local signature struct"); + } + } + } + + if has_http { + match generate_signature_struct( + &func_kebab_name, // Pass func kebab name + "http", + method, + &mut used_types, // Pass the main set + ) { + Ok(http_struct) => signature_structs.push(http_struct), + Err(e) => { + warn!(method_name = %method_name, error = %e, "Error generating HTTP signature struct"); + } + } + } + } + Err(e) => { + warn!(" Skipping method with invalid name: {}", e); + warn!(method_name = %method_name, error = %e, "Skipping method with invalid name"); + } + } + } else { + warn!(" Method {} does not have the [remote], [local], [http] or [init] attribute, it should not be in the Impl block", method_name); + warn!(method_name = %method_name, "Method missing required attribute ([remote], [local], [http], or [init])"); + } + } + } + } + + // Collect **only used** type definitions from all Rust files + let mut all_type_defs = HashMap::new(); // Now starts empty, filled by collector + for file_path in &rust_files { + // Pass the populated used_types set to the collector + match collect_type_definitions_from_file(file_path, &used_types) { + Ok(file_type_defs) => { + for (name, def) in file_type_defs { + // Since the collector only returns used types, we can insert directly + all_type_defs.insert(name, def); + } + } + Err(e) => { + warn!(file_path = %file_path.display(), error = %e, "Error collecting type definitions from file"); + // Continue with other files + } + } + } + + debug!(count = %all_type_defs.len(), "Collected used type definitions"); + + // Now generate the WIT content for the interface + if let (Some(ref iface_name), Some(ref kebab_name), Some(ref _impl_item)) = ( + // impl_item no longer needed here + &interface_name, + &kebab_interface_name, + &impl_item_with_hyperprocess, // Keep this condition to ensure an interface was found + ) { + // No need to filter anymore, all_type_defs contains only used types + let mut type_defs: Vec = all_type_defs.into_values().collect(); // Collect values directly + + type_defs.sort(); // Sort for consistent output + + // Generate the final WIT content + if signature_structs.is_empty() && type_defs.is_empty() { + // Check both sigs and types + warn!(interface_name = %iface_name, "No functions or used types found for interface"); + } else { + // Start with the interface comment + let mut content = " // This interface contains function signature definitions that will be used\n // by the hyper-bindgen macro to generate async function bindings.\n //\n // NOTE: This is currently a hacky workaround since WIT async functions are not\n // available until WASI Preview 3. Once Preview 3 is integrated into Hyperware,\n // we should switch to using proper async WIT function signatures instead of\n // this struct-based approach with hyper-bindgen generating the async stubs.\n".to_string(); + + // Add standard imports + content.push_str("\n use standard.{address};\n\n"); + + // Add type definitions if any + if !type_defs.is_empty() { + content.push_str(&type_defs.join("\n\n")); + content.push_str("\n\n"); + } + + // Add signature structs if any (moved after types for potentially better readability) + if !signature_structs.is_empty() { + content.push_str(&signature_structs.join("\n\n")); + } + + // Wrap in interface block + let final_content = + format!("interface {} {{\n{}\n}}\n", kebab_name, content.trim_end()); // Trim trailing whitespace + debug!(interface_name = %iface_name, signature_count = %signature_structs.len(), type_def_count = %type_defs.len(), "Generated interface content"); + + // Write the interface file with kebab-case name + let interface_file = api_dir.join(format!("{}.wit", kebab_name)); + debug!(path = %interface_file.display(), "Writing WIT file"); + + fs::write(&interface_file, &final_content) + .with_context(|| format!("Failed to write {}", interface_file.display()))?; + + debug!("Successfully wrote WIT file"); + } + } else { + warn!("No valid hyperprocess interface found in lib.rs"); + } + + // Return statement remains the same logic + if let (Some(wit_world), Some(_), Some(kebab_iface)) = + (wit_world, interface_name, kebab_interface_name) + { + debug!(interface = %kebab_iface, "Returning import statement for interface"); + // Use kebab-case interface name for import + Ok(Some((kebab_iface, wit_world))) + } else { + warn!("No valid interface found or wit_world extracted."); // Updated message + Ok(None) + } +} + +#[instrument(level = "trace", skip_all)] +fn rewrite_wit( + api_dir: &Path, + new_imports: &Vec, + wit_worlds: &mut HashSet, + updated_world: &mut bool, +) -> Result<()> { + debug!(api_dir = %api_dir.display(), "Rewriting WIT world files"); + // handle existing api files + for entry in WalkDir::new(api_dir) + .max_depth(1) + .into_iter() + .filter_map(Result::ok) + { + let path = entry.path(); + + if path.is_file() && path.extension().map_or(false, |ext| ext == "wit") { + debug!(path = %path.display(), "Checking WIT file"); + + let Ok(content) = fs::read_to_string(path) else { + continue; + }; + if !content.contains("world ") { + continue; + } + debug!("Found world definition file"); + + // Extract the world name and existing imports + let lines: Vec<&str> = content.lines().collect(); + let mut world_name = None; + let mut existing_imports = Vec::new(); + let mut include_lines = HashSet::new(); + + for line in &lines { + let trimmed = line.trim(); + + if trimmed.starts_with("world ") { + if let Some(name) = trimmed.split_whitespace().nth(1) { + world_name = Some(name.trim_end_matches(" {").to_string()); + } + } else if trimmed.starts_with("import ") { + existing_imports.push(trimmed.to_string()); + } else if trimmed.starts_with("include ") { + include_lines.insert(trimmed.to_string()); + } + } + + let Some(world_name) = world_name else { + continue; + }; + + debug!(world_name = %world_name, "Extracted world name"); + + // Check if this world name matches the one we're looking for + if wit_worlds.remove(&world_name) || wit_worlds.contains(&world_name[6..]) { + let world_content = generate_wit_file( + &world_name, + new_imports, + &existing_imports, + &mut include_lines, + )?; + + debug!(path = %path.display(), "Writing updated world definition"); + // Write the updated world file + fs::write(path, world_content).with_context(|| { + format!("Failed to write updated world file: {}", path.display()) + })?; + + debug!("Successfully updated world definition"); // INFO -> DEBUG + *updated_world = true; + } + } + } + + // handle non-existing api files + for wit_world in wit_worlds.iter() { + for prefix in ["", "types-"] { + let wit_world = format!("{prefix}{wit_world}"); + let world_content = + generate_wit_file(&wit_world, new_imports, &Vec::new(), &mut HashSet::new())?; + + let path = api_dir.join(format!("{wit_world}.wit")); + debug!(path = %path.display(), wit_world = %wit_world, "Writing new world definition"); + // Write the updated world file + fs::write(&path, world_content).with_context(|| { + format!("Failed to write updated world file: {}", path.display()) + })?; + + debug!("Successfully created new world definition for {wit_world}"); + } + *updated_world = true; + } + + Ok(()) +} + +fn generate_wit_file( + world_name: &str, + new_imports: &Vec, + existing_imports: &Vec, + include_lines: &mut HashSet, +) -> Result { + // Determine the include line based on world name + // If world name starts with "types-", use "include lib;" instead + if world_name.starts_with("types-") { + if !include_lines.contains("include lib;") { + include_lines.insert("include lib;".to_string()); + } + } else { + // Keep existing include or default to process-v1 + if include_lines.is_empty() { + include_lines.insert("include process-v1;".to_string()); + } + } + + // Combine existing imports with new imports + let mut all_imports = existing_imports.clone(); + + for import in new_imports { + let import_stmt = import.trim(); + if !all_imports.iter().any(|i| i.trim() == import_stmt) { + all_imports.push(import_stmt.to_string()); + } + } + + // Make sure all imports have proper indentation + let all_imports_with_indent: Vec = all_imports + .iter() + .map(|import| { + if import.starts_with(" ") { + import.clone() + } else { + format!(" {}", import.trim()) + } + }) + .collect(); + + let imports_section = all_imports_with_indent.join("\n"); + + // Create updated world content with proper indentation + let include_lines: String = include_lines.iter().map(|l| format!(" {l}\n")).collect(); + let world_content = format!("world {world_name} {{\n{imports_section}\n{include_lines}}}"); + + return Ok(world_content); +} + +// Generate WIT files from Rust code +#[instrument(level = "trace", skip_all)] +pub fn generate_wit_files(base_dir: &Path, api_dir: &Path) -> Result<(Vec, Vec)> { + // Keep INFO for start + info!("Generating WIT files..."); + fs::create_dir_all(&api_dir)?; + + // Find all relevant Rust projects + let projects = find_rust_projects(base_dir); + let mut processed_projects = Vec::new(); + + if projects.is_empty() { + warn!("No relevant Rust projects found."); + return Ok((Vec::new(), Vec::new())); + } + + // Process each project and collect world imports + let mut new_imports = Vec::new(); + let mut interfaces = Vec::new(); + + let mut wit_worlds = HashSet::new(); + for project_path in &projects { + match process_rust_project(project_path, api_dir) { + Ok(Some((interface, wit_world))) => { + new_imports.push(format!(" import {interface};")); + + interfaces.push(interface); + processed_projects.push(project_path.clone()); + + wit_worlds.insert(wit_world); + } + Ok(None) => { + warn!(project_path = %project_path.display(), "No import statement generated for project"); + } + Err(e) => { + warn!(project_path = %project_path.display(), error = %e, "Error processing project"); + } + } + } + + debug!(count = %new_imports.len(), "Collected number of new imports"); + + // Check for existing world definition files and update them + debug!("Looking for existing world definition files"); + let mut updated_world = false; + + rewrite_wit(api_dir, &new_imports, &mut wit_worlds, &mut updated_world)?; + + // If no world definitions were found, create a default one + if !updated_world && !new_imports.is_empty() { + // Define default world name + let default_world = "async-app-template-dot-os-v0"; + warn!(default_world = %default_world, "No existing world definitions found, creating default"); + + // Create world content with process-v1 include and proper indentation for imports + let imports_with_indent: Vec = new_imports + .iter() + .map(|import| { + if import.starts_with(" ") { + import.clone() + } else { + format!(" {}", import.trim()) + } + }) + .collect(); + + // Determine include based on world name + let include_line = if default_world.starts_with("types-") { + "include lib;" + } else { + "include process-v1;" + }; + + let world_content = format!( + "world {} {{\n{}\n {}\n}}", + default_world, + imports_with_indent.join("\n"), + include_line + ); + + let world_file = api_dir.join(format!("{}.wit", default_world)); + debug!(path = %world_file.display(), "Writing default world definition"); + + fs::write(&world_file, world_content).with_context(|| { + format!( + "Failed to write default world file: {}", + world_file.display() + ) + })?; + + debug!("Successfully created default world definition"); + } + + info!("WIT files generated successfully in the 'api' directory."); + Ok((processed_projects, interfaces)) +} diff --git a/src/build_start_package/mod.rs b/src/build_start_package/mod.rs index d3707196..6e5469ff 100644 --- a/src/build_start_package/mod.rs +++ b/src/build_start_package/mod.rs @@ -22,6 +22,7 @@ pub async fn execute( local_dependencies: Vec, add_paths_to_api: Vec, rewrite: bool, + hyperapp: bool, reproducible: bool, force: bool, verbose: bool, @@ -40,6 +41,7 @@ pub async fn execute( local_dependencies, add_paths_to_api, rewrite, + hyperapp, reproducible, force, verbose, diff --git a/src/main.rs b/src/main.rs index 87574074..a49c9139 100644 --- a/src/main.rs +++ b/src/main.rs @@ -235,6 +235,7 @@ async fn execute( .map(|s| PathBuf::from(s)) .collect(); let rewrite = matches.get_one::("REWRITE").unwrap(); + let hyperapp = matches.get_one::("HYPERAPP").unwrap(); let reproducible = matches.get_one::("REPRODUCIBLE").unwrap(); let force = matches.get_one::("FORCE").unwrap(); let verbose = matches.get_one::("VERBOSE").unwrap(); @@ -253,6 +254,7 @@ async fn execute( local_dependencies, add_paths_to_api, *rewrite, + *hyperapp, *reproducible, *force, *verbose, @@ -298,6 +300,7 @@ async fn execute( .map(|s| PathBuf::from(s)) .collect(); let rewrite = matches.get_one::("REWRITE").unwrap(); + let hyperapp = matches.get_one::("HYPERAPP").unwrap(); let reproducible = matches.get_one::("REPRODUCIBLE").unwrap(); let force = matches.get_one::("FORCE").unwrap(); let verbose = matches.get_one::("VERBOSE").unwrap(); @@ -316,6 +319,7 @@ async fn execute( local_dependencies, add_paths_to_api, *rewrite, + *hyperapp, *reproducible, *force, *verbose, @@ -758,6 +762,12 @@ async fn make_app(current_dir: &std::ffi::OsString) -> Result { .help("Rewrite the package (disables `Spawn!()`) [default: don't rewrite]") .required(false) ) + .arg(Arg::new("HYPERAPP") + .action(ArgAction::SetTrue) + .long("hyperapp") + .help("Build using the Hyperapp framework [default: don't use Hyperapp framework]") + .required(false) + ) .arg(Arg::new("REPRODUCIBLE") .action(ArgAction::SetTrue) .short('r') @@ -865,6 +875,12 @@ async fn make_app(current_dir: &std::ffi::OsString) -> Result { .help("Rewrite the package (disables `Spawn!()`) [default: don't rewrite]") .required(false) ) + .arg(Arg::new("HYPERAPP") + .action(ArgAction::SetTrue) + .long("hyperapp") + .help("Build using the Hyperapp framework [default: don't use Hyperapp framework]") + .required(false) + ) .arg(Arg::new("REPRODUCIBLE") .action(ArgAction::SetTrue) .short('r') diff --git a/src/new/mod.rs b/src/new/mod.rs index 86b6013f..1b5f1e82 100644 --- a/src/new/mod.rs +++ b/src/new/mod.rs @@ -283,12 +283,12 @@ pub fn execute( if !is_hypermap_safe(&package_name, false) { let error = if !is_from_dir { eyre!( - "`package_name` '{}' must be Kimap safe (a-z, 0-9, - allowed).", + "`package_name` '{}' must be Hypermap safe (a-z, 0-9, - allowed).", package_name ) } else { eyre!( - "`package_name` (derived from given directory {:?}) '{}' must be Kimap safe (a-z, 0-9, - allowed).", + "`package_name` (derived from given directory {:?}) '{}' must be Hypermap safe (a-z, 0-9, - allowed).", new_dir, package_name, ) @@ -297,7 +297,7 @@ pub fn execute( } if !is_hypermap_safe(&publisher, true) { return Err(eyre!( - "`publisher` '{}' must be Kimap safe (a-z, 0-9, -, . allowed).", + "`publisher` '{}' must be Hypermap safe (a-z, 0-9, -, . allowed).", publisher )); } diff --git a/src/new/templates/rust/no-ui/blank/blank/src/lib.rs b/src/new/templates/rust/no-ui/blank/blank/src/lib.rs index d3ef2112..df935449 100644 --- a/src/new/templates/rust/no-ui/blank/blank/src/lib.rs +++ b/src/new/templates/rust/no-ui/blank/blank/src/lib.rs @@ -1,7 +1,7 @@ use hyperware_process_lib::{await_message, call_init, println, Address}; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "process-v1", }); diff --git a/src/new/templates/rust/no-ui/chat/chat/src/lib.rs b/src/new/templates/rust/no-ui/chat/chat/src/lib.rs index 1a4988f4..f5089f9f 100644 --- a/src/new/templates/rust/no-ui/chat/chat/src/lib.rs +++ b/src/new/templates/rust/no-ui/chat/chat/src/lib.rs @@ -4,10 +4,12 @@ use crate::hyperware::process::chat::{ ChatMessage, Request as ChatRequest, Response as ChatResponse, SendRequest, }; use hyperware_process_lib::logging::{error, info, init_logging, Level}; -use hyperware_process_lib::{await_message, call_init, println, Address, Message, Request, Response}; +use hyperware_process_lib::{ + await_message, call_init, println, Address, Message, Request, Response, +}; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "chat-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/no-ui/chat/send/src/lib.rs b/src/new/templates/rust/no-ui/chat/send/src/lib.rs index 19942550..ea2a31a6 100644 --- a/src/new/templates/rust/no-ui/chat/send/src/lib.rs +++ b/src/new/templates/rust/no-ui/chat/send/src/lib.rs @@ -1,8 +1,12 @@ -use crate::hyperware::process::chat::{Request as ChatRequest, Response as ChatResponse, SendRequest}; -use hyperware_process_lib::{await_next_message_body, call_init, println, Address, Message, Request}; +use crate::hyperware::process::chat::{ + Request as ChatRequest, Response as ChatResponse, SendRequest, +}; +use hyperware_process_lib::{ + await_next_message_body, call_init, println, Address, Message, Request, +}; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "chat-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize], diff --git a/src/new/templates/rust/no-ui/chat/test/chat-test/chat-test/src/lib.rs b/src/new/templates/rust/no-ui/chat/test/chat-test/chat-test/src/lib.rs index c0ebfcfb..41dbdc88 100644 --- a/src/new/templates/rust/no-ui/chat/test/chat-test/chat-test/src/lib.rs +++ b/src/new/templates/rust/no-ui/chat/test/chat-test/chat-test/src/lib.rs @@ -1,18 +1,24 @@ -use crate::hyperware::process::chat::{ChatMessage, Request as ChatRequest, Response as ChatResponse, SendRequest}; -use crate::hyperware::process::tester::{Request as TesterRequest, Response as TesterResponse, RunRequest, FailResponse}; +use crate::hyperware::process::chat::{ + ChatMessage, Request as ChatRequest, Response as ChatResponse, SendRequest, +}; +use crate::hyperware::process::tester::{ + FailResponse, Request as TesterRequest, Response as TesterResponse, RunRequest, +}; -use hyperware_process_lib::{await_message, call_init, print_to_terminal, println, Address, ProcessId, Request, Response}; +use hyperware_process_lib::{ + await_message, call_init, print_to_terminal, println, Address, ProcessId, Request, Response, +}; mod tester_lib; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "chat-test-template-dot-os-v0", generate_unused_types: true, additional_derives: [PartialEq, serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], }); -fn handle_message (our: &Address) -> anyhow::Result<()> { +fn handle_message(our: &Address) -> anyhow::Result<()> { let message = await_message().unwrap(); if !message.is_request() { @@ -60,15 +66,19 @@ fn handle_message (our: &Address) -> anyhow::Result<()> { target: node_names[1].clone(), message: message.clone(), })) - .send_and_await_response(15)?.unwrap(); + .send_and_await_response(15)? + .unwrap(); // Get history from receiver & test print_to_terminal(0, "chat_test: c"); let response = Request::new() .target(their_chat_address.clone()) .body(ChatRequest::History(our.node.clone())) - .send_and_await_response(15)?.unwrap(); - if response.is_request() { fail!("chat_test"); }; + .send_and_await_response(15)? + .unwrap(); + if response.is_request() { + fail!("chat_test"); + }; let ChatResponse::History(messages) = response.body().try_into()? else { fail!("chat_test"); }; @@ -96,12 +106,12 @@ fn init(our: Address) { loop { match handle_message(&our) { - Ok(()) => {}, + Ok(()) => {} Err(e) => { print_to_terminal(0, format!("chat_test: error: {e:?}").as_str()); fail!("chat_test"); - }, + } }; } } diff --git a/src/new/templates/rust/no-ui/echo/echo/src/lib.rs b/src/new/templates/rust/no-ui/echo/echo/src/lib.rs index b2d367a6..842ddba7 100644 --- a/src/new/templates/rust/no-ui/echo/echo/src/lib.rs +++ b/src/new/templates/rust/no-ui/echo/echo/src/lib.rs @@ -2,7 +2,7 @@ use hyperware_process_lib::logging::{error, info, init_logging, Level}; use hyperware_process_lib::{await_message, call_init, println, Address, Message, Response}; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "process-v1", }); diff --git a/src/new/templates/rust/no-ui/echo/test/echo-test/echo-test/src/lib.rs b/src/new/templates/rust/no-ui/echo/test/echo-test/echo-test/src/lib.rs index 34d9f9cd..87c15c29 100644 --- a/src/new/templates/rust/no-ui/echo/test/echo-test/echo-test/src/lib.rs +++ b/src/new/templates/rust/no-ui/echo/test/echo-test/echo-test/src/lib.rs @@ -9,7 +9,7 @@ use hyperware_process_lib::{ mod tester_lib; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "echo-test-template-dot-os-v0", generate_unused_types: true, additional_derives: [PartialEq, serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/no-ui/fibonacci/fibonacci/src/lib.rs b/src/new/templates/rust/no-ui/fibonacci/fibonacci/src/lib.rs index 22d97ee2..f9ba4376 100644 --- a/src/new/templates/rust/no-ui/fibonacci/fibonacci/src/lib.rs +++ b/src/new/templates/rust/no-ui/fibonacci/fibonacci/src/lib.rs @@ -5,7 +5,7 @@ use hyperware_process_lib::logging::{error, info, init_logging, Level}; use hyperware_process_lib::{await_message, call_init, Address, Message, Response}; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "fibonacci-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/no-ui/fibonacci/number/src/lib.rs b/src/new/templates/rust/no-ui/fibonacci/number/src/lib.rs index c5357da5..fd7d42a4 100644 --- a/src/new/templates/rust/no-ui/fibonacci/number/src/lib.rs +++ b/src/new/templates/rust/no-ui/fibonacci/number/src/lib.rs @@ -1,10 +1,12 @@ -use crate::hyperware::process::fibonacci::{Request as FibonacciRequest, Response as FibonacciResponse}; +use crate::hyperware::process::fibonacci::{ + Request as FibonacciRequest, Response as FibonacciResponse, +}; use hyperware_process_lib::{ await_next_message_body, call_init, println, Address, Message, Request, }; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "fibonacci-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize], diff --git a/src/new/templates/rust/no-ui/fibonacci/test/fibonacci-test/fibonacci-test/src/lib.rs b/src/new/templates/rust/no-ui/fibonacci/test/fibonacci-test/fibonacci-test/src/lib.rs index c006603a..dc990637 100644 --- a/src/new/templates/rust/no-ui/fibonacci/test/fibonacci-test/fibonacci-test/src/lib.rs +++ b/src/new/templates/rust/no-ui/fibonacci/test/fibonacci-test/fibonacci-test/src/lib.rs @@ -1,12 +1,16 @@ use crate::hyperware::process::fibonacci::{Request as FibRequest, Response as FibResponse}; -use crate::hyperware::process::tester::{Request as TesterRequest, Response as TesterResponse, RunRequest, FailResponse}; +use crate::hyperware::process::tester::{ + FailResponse, Request as TesterRequest, Response as TesterResponse, RunRequest, +}; -use hyperware_process_lib::{await_message, call_init, print_to_terminal, Address, ProcessId, Request, Response}; +use hyperware_process_lib::{ + await_message, call_init, print_to_terminal, Address, ProcessId, Request, Response, +}; mod tester_lib; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "fibonacci-test-template-dot-os-v0", generate_unused_types: true, additional_derives: [PartialEq, serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], @@ -16,8 +20,11 @@ fn test_number(n: u32, address: &Address) -> anyhow::Result { let response = Request::new() .target(address) .body(FibRequest::Number(n)) - .send_and_await_response(15)?.unwrap(); - if response.is_request() { fail!("fibonacci_test"); }; + .send_and_await_response(15)? + .unwrap(); + if response.is_request() { + fail!("fibonacci_test"); + }; let FibResponse::Number(fib_number) = response.body().try_into()? else { fail!("fibonacci_test"); }; @@ -28,15 +35,18 @@ fn test_numbers(n: u32, n_trials: u32, address: &Address) -> anyhow::Result let response = Request::new() .target(address) .body(FibRequest::Numbers((n, n_trials))) - .send_and_await_response(15)?.unwrap(); - if response.is_request() { fail!("fibonacci_test"); }; + .send_and_await_response(15)? + .unwrap(); + if response.is_request() { + fail!("fibonacci_test"); + }; let FibResponse::Numbers((fib_number, _)) = response.body().try_into()? else { fail!("fibonacci_test"); }; Ok(fib_number) } -fn handle_message (our: &Address) -> anyhow::Result<()> { +fn handle_message(our: &Address) -> anyhow::Result<()> { let message = await_message().unwrap(); if !message.is_request() { @@ -93,12 +103,12 @@ fn init(our: Address) { loop { match handle_message(&our) { - Ok(()) => {}, + Ok(()) => {} Err(e) => { print_to_terminal(0, format!("fibonacci_test: error: {e:?}").as_str()); fail!("fibonacci_test"); - }, + } }; } } diff --git a/src/new/templates/rust/no-ui/file-transfer/download/src/lib.rs b/src/new/templates/rust/no-ui/file-transfer/download/src/lib.rs index 50cf7d6e..2e99ea04 100644 --- a/src/new/templates/rust/no-ui/file-transfer/download/src/lib.rs +++ b/src/new/templates/rust/no-ui/file-transfer/download/src/lib.rs @@ -5,7 +5,7 @@ use hyperware_process_lib::{ }; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "file-transfer-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/no-ui/file-transfer/file-transfer-worker-api/src/lib.rs b/src/new/templates/rust/no-ui/file-transfer/file-transfer-worker-api/src/lib.rs index 1b6fa5b9..7d444673 100644 --- a/src/new/templates/rust/no-ui/file-transfer/file-transfer-worker-api/src/lib.rs +++ b/src/new/templates/rust/no-ui/file-transfer/file-transfer-worker-api/src/lib.rs @@ -5,7 +5,7 @@ use crate::hyperware::process::standard::Address as WitAddress; use hyperware_process_lib::{our_capabilities, spawn, Address, OnExit, Request, Response}; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "file-transfer-worker-api-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/no-ui/file-transfer/file-transfer-worker/src/lib.rs b/src/new/templates/rust/no-ui/file-transfer/file-transfer-worker/src/lib.rs index aca5faef..bd87a312 100644 --- a/src/new/templates/rust/no-ui/file-transfer/file-transfer-worker/src/lib.rs +++ b/src/new/templates/rust/no-ui/file-transfer/file-transfer-worker/src/lib.rs @@ -11,7 +11,7 @@ use hyperware_process_lib::{ }; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "file-transfer-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/no-ui/file-transfer/file-transfer/src/lib.rs b/src/new/templates/rust/no-ui/file-transfer/file-transfer/src/lib.rs index 3a8a70c4..ec8e8715 100644 --- a/src/new/templates/rust/no-ui/file-transfer/file-transfer/src/lib.rs +++ b/src/new/templates/rust/no-ui/file-transfer/file-transfer/src/lib.rs @@ -14,7 +14,7 @@ use hyperware_process_lib::{ }; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "file-transfer-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/no-ui/file-transfer/list-files/src/lib.rs b/src/new/templates/rust/no-ui/file-transfer/list-files/src/lib.rs index 66076f9f..284a3182 100644 --- a/src/new/templates/rust/no-ui/file-transfer/list-files/src/lib.rs +++ b/src/new/templates/rust/no-ui/file-transfer/list-files/src/lib.rs @@ -1,10 +1,12 @@ use crate::hyperware::process::file_transfer::{ Request as TransferRequest, Response as TransferResponse, }; -use hyperware_process_lib::{await_next_message_body, call_init, println, Address, Message, Request}; +use hyperware_process_lib::{ + await_next_message_body, call_init, println, Address, Message, Request, +}; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "file-transfer-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/no-ui/file-transfer/test/file-transfer-test/file-transfer-test/src/lib.rs b/src/new/templates/rust/no-ui/file-transfer/test/file-transfer-test/file-transfer-test/src/lib.rs index 8a3ec5b3..19cee03e 100644 --- a/src/new/templates/rust/no-ui/file-transfer/test/file-transfer-test/file-transfer-test/src/lib.rs +++ b/src/new/templates/rust/no-ui/file-transfer/test/file-transfer-test/file-transfer-test/src/lib.rs @@ -17,7 +17,7 @@ use hyperware_process_lib::{ mod tester_lib; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "file-transfer-test-template-dot-os-v0", generate_unused_types: true, additional_derives: [PartialEq, serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/ui/chat/chat/src/lib.rs b/src/new/templates/rust/ui/chat/chat/src/lib.rs index a8d4b455..9a56bb2a 100644 --- a/src/new/templates/rust/ui/chat/chat/src/lib.rs +++ b/src/new/templates/rust/ui/chat/chat/src/lib.rs @@ -14,7 +14,7 @@ use hyperware_process_lib::{ }; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "chat-template-dot-os-v0", generate_unused_types: true, additional_derives: [serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], diff --git a/src/new/templates/rust/ui/chat/test/chat-test/chat-test/src/lib.rs b/src/new/templates/rust/ui/chat/test/chat-test/chat-test/src/lib.rs index c0ebfcfb..41dbdc88 100644 --- a/src/new/templates/rust/ui/chat/test/chat-test/chat-test/src/lib.rs +++ b/src/new/templates/rust/ui/chat/test/chat-test/chat-test/src/lib.rs @@ -1,18 +1,24 @@ -use crate::hyperware::process::chat::{ChatMessage, Request as ChatRequest, Response as ChatResponse, SendRequest}; -use crate::hyperware::process::tester::{Request as TesterRequest, Response as TesterResponse, RunRequest, FailResponse}; +use crate::hyperware::process::chat::{ + ChatMessage, Request as ChatRequest, Response as ChatResponse, SendRequest, +}; +use crate::hyperware::process::tester::{ + FailResponse, Request as TesterRequest, Response as TesterResponse, RunRequest, +}; -use hyperware_process_lib::{await_message, call_init, print_to_terminal, println, Address, ProcessId, Request, Response}; +use hyperware_process_lib::{ + await_message, call_init, print_to_terminal, println, Address, ProcessId, Request, Response, +}; mod tester_lib; wit_bindgen::generate!({ - path: "target/wit", + path: "../target/wit", world: "chat-test-template-dot-os-v0", generate_unused_types: true, additional_derives: [PartialEq, serde::Deserialize, serde::Serialize, process_macros::SerdeJsonInto], }); -fn handle_message (our: &Address) -> anyhow::Result<()> { +fn handle_message(our: &Address) -> anyhow::Result<()> { let message = await_message().unwrap(); if !message.is_request() { @@ -60,15 +66,19 @@ fn handle_message (our: &Address) -> anyhow::Result<()> { target: node_names[1].clone(), message: message.clone(), })) - .send_and_await_response(15)?.unwrap(); + .send_and_await_response(15)? + .unwrap(); // Get history from receiver & test print_to_terminal(0, "chat_test: c"); let response = Request::new() .target(their_chat_address.clone()) .body(ChatRequest::History(our.node.clone())) - .send_and_await_response(15)?.unwrap(); - if response.is_request() { fail!("chat_test"); }; + .send_and_await_response(15)? + .unwrap(); + if response.is_request() { + fail!("chat_test"); + }; let ChatResponse::History(messages) = response.body().try_into()? else { fail!("chat_test"); }; @@ -96,12 +106,12 @@ fn init(our: Address) { loop { match handle_message(&our) { - Ok(()) => {}, + Ok(()) => {} Err(e) => { print_to_terminal(0, format!("chat_test: error: {e:?}").as_str()); fail!("chat_test"); - }, + } }; } } diff --git a/src/run_tests/mod.rs b/src/run_tests/mod.rs index fbc6afca..3becc77e 100644 --- a/src/run_tests/mod.rs +++ b/src/run_tests/mod.rs @@ -355,6 +355,7 @@ async fn build_packages( let url = format!("http://localhost:{port}"); + // TODO: add hyperapp setting to tests.toml for dependency_package_path in &test.dependency_package_paths { let path = match expand_home_path(&dependency_package_path) { Some(p) => p, @@ -377,6 +378,7 @@ async fn build_packages( dependency_package_paths.clone(), vec![], // TODO false, + test.hyperapp.unwrap_or_default(), false, false, false, @@ -402,6 +404,7 @@ async fn build_packages( dependency_package_paths.clone(), vec![], // TODO false, + test.hyperapp.unwrap_or_default(), false, false, false, @@ -424,6 +427,7 @@ async fn build_packages( dependency_package_paths.clone(), vec![], // TODO false, + test.hyperapp.unwrap_or_default(), false, false, false, diff --git a/src/run_tests/types.rs b/src/run_tests/types.rs index 1735155e..b129fb17 100644 --- a/src/run_tests/types.rs +++ b/src/run_tests/types.rs @@ -32,6 +32,7 @@ pub struct Test { pub timeout_secs: u64, pub fakechain_router: u16, pub nodes: Vec, + pub hyperapp: Option, } #[derive(Debug, Clone, Serialize, Deserialize)]