diff --git a/src/analysis/ast_cache.rs b/src/analysis/ast_cache.rs index 5ee3183..b64457f 100644 --- a/src/analysis/ast_cache.rs +++ b/src/analysis/ast_cache.rs @@ -1,5 +1,5 @@ use std::collections::HashMap; -use std::path::PathBuf; +use std::path::{Path, PathBuf}; use syn::File as SynFile; use walkdir::WalkDir; @@ -33,6 +33,15 @@ impl AstCache { } } + fn should_skip_path(path: &Path) -> bool { + path.components().any(|component| { + component + .as_os_str() + .to_str() + .is_some_and(|part| part == "target" || part == ".git") + }) + } + /// Parse and cache all Rust files in the given project path pub fn parse_and_cache_all_files( &mut self, @@ -47,14 +56,11 @@ impl AstCache { let entry = entry?; let path = entry.path(); - if path.is_file() && path.extension().is_some_and(|ext| ext == "rs") { - // Skip target directory and other build artifacts - if path.to_string_lossy().contains("/target/") - || path.to_string_lossy().contains("/.git/") - { - continue; - } + if Self::should_skip_path(path) { + continue; + } + if path.is_file() && path.extension().is_some_and(|ext| ext == "rs") { if verbose { println!("šŸ“„ Parsing file: {}", path.display()); } @@ -145,6 +151,11 @@ mod tests { use super::*; use std::fs; use std::io::Write; + use std::sync::atomic::{AtomicU64, Ordering}; + use std::sync::{Arc, Mutex}; + use std::thread; + + static TEMP_DIR_COUNTER: AtomicU64 = AtomicU64::new(0); fn temp_dir() -> String { use std::time::{SystemTime, UNIX_EPOCH}; @@ -152,15 +163,21 @@ mod tests { .duration_since(UNIX_EPOCH) .unwrap() .as_nanos(); - format!("./test_ast_cache_{}_{}", std::process::id(), timestamp) + let counter = TEMP_DIR_COUNTER.fetch_add(1, Ordering::Relaxed); + format!( + "./test_ast_cache_{}_{}_{}", + std::process::id(), + timestamp, + counter + ) } - fn cleanup_dir(dir: &str) { - let _ = fs::remove_dir_all(dir); + fn cleanup_dir(dir: impl AsRef) { + let _ = fs::remove_dir_all(dir.as_ref()); } - fn create_rust_file(dir: &str, name: &str, content: &str) -> PathBuf { - let path = PathBuf::from(format!("{}/{}", dir, name)); + fn create_rust_file(dir: impl AsRef, name: &str, content: &str) -> PathBuf { + let path = dir.as_ref().join(name); if let Some(parent) = path.parent() { fs::create_dir_all(parent).unwrap(); } @@ -169,6 +186,30 @@ mod tests { path } + #[test] + fn test_temp_dir_helper_is_unique_under_concurrency() { + let seen = Arc::new(Mutex::new(std::collections::HashSet::new())); + let mut handles = Vec::new(); + + for _ in 0..32 { + let seen = Arc::clone(&seen); + handles.push(thread::spawn(move || { + for _ in 0..1000 { + let dir = temp_dir(); + let mut guard = seen.lock().unwrap(); + assert!( + guard.insert(dir), + "temp_dir helper returned a duplicate path" + ); + } + })); + } + + for handle in handles { + handle.join().unwrap(); + } + } + mod parsed_file { use super::*; @@ -213,7 +254,6 @@ mod tests { #[test] fn test_parse_and_cache_single_file() { let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); let path = create_rust_file(&dir, "test.rs", "fn main() {}"); let mut cache = AstCache::new(); @@ -221,21 +261,18 @@ mod tests { assert!(result.is_ok()); assert_eq!(cache.len(), 1); assert!(cache.contains(&path)); - cleanup_dir(&dir); } #[test] fn test_parse_invalid_syntax_errors() { let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); let path = create_rust_file(&dir, "invalid.rs", "fn main( {"); let mut cache = AstCache::new(); let result = cache.parse_and_cache_file(&path); assert!(result.is_err()); assert_eq!(cache.len(), 0); - cleanup_dir(&dir); } @@ -254,7 +291,6 @@ mod tests { #[test] fn test_parse_and_cache_all_files() { let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); create_rust_file(&dir, "lib.rs", "pub fn hello() {}"); create_rust_file(&dir, "main.rs", "fn main() {}"); @@ -264,14 +300,13 @@ mod tests { let result = cache.parse_and_cache_all_files(&dir, false); assert!(result.is_ok()); assert_eq!(cache.len(), 3); - cleanup_dir(&dir); } #[test] fn test_parse_skips_target_directory() { let dir = temp_dir(); - fs::create_dir_all(format!("{}/target", dir)).unwrap(); + fs::create_dir_all(Path::new(&dir).join("target")).unwrap(); create_rust_file(&dir, "lib.rs", "pub fn hello() {}"); create_rust_file(&dir, "target/debug.rs", "fn debug() {}"); @@ -281,14 +316,13 @@ mod tests { // Should only have lib.rs, not target/debug.rs assert_eq!(cache.len(), 1); - cleanup_dir(&dir); } #[test] fn test_parse_skips_git_directory() { let dir = temp_dir(); - fs::create_dir_all(format!("{}/.git", dir)).unwrap(); + fs::create_dir_all(Path::new(&dir).join(".git")).unwrap(); create_rust_file(&dir, "lib.rs", "pub fn hello() {}"); create_rust_file(&dir, ".git/hooks.rs", "fn hook() {}"); @@ -297,14 +331,12 @@ mod tests { cache.parse_and_cache_all_files(&dir, false).unwrap(); assert_eq!(cache.len(), 1); - cleanup_dir(&dir); } #[test] fn test_parse_continues_on_syntax_error() { let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); create_rust_file(&dir, "valid.rs", "fn main() {}"); create_rust_file(&dir, "invalid.rs", "fn main( {"); @@ -315,21 +347,18 @@ mod tests { assert!(result.is_ok()); // Should have 2 valid files, skip the invalid one assert_eq!(cache.len(), 2); - cleanup_dir(&dir); } #[test] fn test_parse_with_verbose_output() { let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); create_rust_file(&dir, "lib.rs", "pub fn hello() {}"); let mut cache = AstCache::new(); // Just verify it doesn't panic with verbose=true let result = cache.parse_and_cache_all_files(&dir, true); assert!(result.is_ok()); - cleanup_dir(&dir); } } @@ -474,42 +503,36 @@ mod tests { let result = cache.parse_and_cache_all_files(&dir, false); assert!(result.is_ok()); assert_eq!(cache.len(), 0); - cleanup_dir(&dir); } #[test] fn test_directory_with_only_non_rust_files() { let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); create_rust_file(&dir, "readme.txt", "Hello"); create_rust_file(&dir, "config.json", "{}"); let mut cache = AstCache::new(); cache.parse_and_cache_all_files(&dir, false).unwrap(); assert_eq!(cache.len(), 0); - cleanup_dir(&dir); } #[test] fn test_parse_empty_rust_file() { let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); let path = create_rust_file(&dir, "empty.rs", ""); let mut cache = AstCache::new(); let result = cache.parse_and_cache_file(&path); assert!(result.is_ok()); assert_eq!(cache.len(), 1); - cleanup_dir(&dir); } #[test] fn test_cache_same_file_twice() { let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); let path = create_rust_file(&dir, "test.rs", "fn main() {}"); let mut cache = AstCache::new(); @@ -518,7 +541,6 @@ mod tests { // Should still be 1 (overwritten) assert_eq!(cache.len(), 1); - cleanup_dir(&dir); } } diff --git a/src/analysis/dependency_graph.rs b/src/analysis/dependency_graph.rs index 7f93a8f..de74da5 100644 --- a/src/analysis/dependency_graph.rs +++ b/src/analysis/dependency_graph.rs @@ -66,8 +66,10 @@ impl TypeDependencyGraph { let mut sorted = Vec::new(); let mut visited = HashSet::new(); let mut visiting = HashSet::new(); + let mut type_names: Vec<&String> = types.iter().collect(); + type_names.sort_unstable(); - for type_name in types { + for type_name in type_names { if !visited.contains(type_name) { self.topological_visit(type_name, &mut sorted, &mut visited, &mut visiting); } @@ -101,7 +103,10 @@ impl TypeDependencyGraph { // Visit dependencies first if let Some(deps) = self.dependencies.get(type_name) { - for dep in deps { + let mut sorted_deps: Vec<&String> = deps.iter().collect(); + sorted_deps.sort_unstable(); + + for dep in sorted_deps { self.topological_visit(dep, sorted, visited, visiting); } } @@ -111,6 +116,39 @@ impl TypeDependencyGraph { sorted.push(type_name.to_string()); } + fn sorted_commands<'a>(&self, commands: &'a [CommandInfo]) -> Vec<&'a CommandInfo> { + let mut sorted: Vec<_> = commands.iter().collect(); + sorted.sort_by(|a, b| { + a.name + .cmp(&b.name) + .then_with(|| a.file_path.cmp(&b.file_path)) + .then_with(|| a.line_number.cmp(&b.line_number)) + }); + sorted + } + + fn sorted_resolved_type_names(&self) -> Vec<&String> { + let mut names: Vec<_> = self.resolved_types.keys().collect(); + names.sort_unstable(); + names + } + + fn sorted_dependencies_for(&self, type_name: &str) -> Vec<&String> { + let mut deps: Vec<_> = self + .dependencies + .get(type_name) + .map(|deps| deps.iter().collect()) + .unwrap_or_default(); + deps.sort_unstable(); + deps + } + + fn sorted_dependency_owners(&self) -> Vec<&String> { + let mut owners: Vec<_> = self.dependencies.keys().collect(); + owners.sort_unstable(); + owners + } + /// Build visualization of the dependency graph pub fn visualize_dependencies(&self, entry_commands: &[crate::models::CommandInfo]) -> String { let mut output = String::new(); @@ -119,7 +157,7 @@ impl TypeDependencyGraph { // Show command entry points output.push_str("šŸ“‹ Command Entry Points:\n"); - for cmd in entry_commands { + for cmd in self.sorted_commands(entry_commands) { output.push_str(&format!( "• {} ({}:{})\n", cmd.name, cmd.file_path, cmd.line_number @@ -135,7 +173,8 @@ impl TypeDependencyGraph { } output.push_str("\nšŸ—ļø Discovered Types:\n"); - for (type_name, struct_info) in &self.resolved_types { + for type_name in self.sorted_resolved_type_names() { + let struct_info = &self.resolved_types[type_name]; let type_kind = if struct_info.is_enum { "enum" } else { @@ -150,17 +189,19 @@ impl TypeDependencyGraph { )); // Show dependencies - if let Some(deps) = self.dependencies.get(type_name) { - if !deps.is_empty() { - let deps_list: Vec = deps.iter().cloned().collect(); - output.push_str(&format!(" └─ depends on: {}\n", deps_list.join(", "))); - } + let deps_list: Vec = self + .sorted_dependencies_for(type_name) + .into_iter() + .cloned() + .collect(); + if !deps_list.is_empty() { + output.push_str(&format!(" └─ depends on: {}\n", deps_list.join(", "))); } } // Show dependency chains output.push_str("\nšŸ”— Dependency Chains:\n"); - for type_name in self.resolved_types.keys() { + for type_name in self.sorted_resolved_type_names() { self.show_dependency_chain(type_name, &mut output, 0); } @@ -179,12 +220,10 @@ impl TypeDependencyGraph { let indent_str = " ".repeat(indent); output.push_str(&format!("{}ā”œā”€ {}\n", indent_str, type_name)); - if let Some(deps) = self.dependencies.get(type_name) { - for dep in deps { - if indent < 3 { - // Prevent too deep recursion in visualization - self.show_dependency_chain(dep, output, indent + 1); - } + for dep in self.sorted_dependencies_for(type_name) { + if indent < 3 { + // Prevent too deep recursion in visualization + self.show_dependency_chain(dep, output, indent + 1); } } } @@ -198,7 +237,7 @@ impl TypeDependencyGraph { output.push('\n'); // Add command nodes - for command in commands { + for command in self.sorted_commands(commands) { output.push_str(&format!( " \"{}\" [color=blue, style=filled, fillcolor=lightblue];\n", command.name @@ -206,12 +245,12 @@ impl TypeDependencyGraph { } // Add type nodes - for type_name in self.resolved_types.keys() { + for type_name in self.sorted_resolved_type_names() { output.push_str(&format!(" \"{}\" [color=green];\n", type_name)); } // Add edges from commands to their parameter/return types - for command in commands { + for command in self.sorted_commands(commands) { for param in &command.parameters { if self.resolved_types.contains_key(¶m.rust_type) { output.push_str(&format!( @@ -229,8 +268,8 @@ impl TypeDependencyGraph { } // Add type dependency edges - for (type_name, deps) in &self.dependencies { - for dep in deps { + for type_name in self.sorted_dependency_owners() { + for dep in self.sorted_dependencies_for(type_name) { output.push_str(&format!(" \"{}\" -> \"{}\";\n", type_name, dep)); } } @@ -256,6 +295,10 @@ mod tests { } } + fn create_test_command(name: &str, file: &str, line: usize) -> CommandInfo { + CommandInfo::new_for_test(name, file, line, vec![], "String", false, vec![]) + } + #[test] fn test_new_graph() { let graph = TypeDependencyGraph::new(); @@ -390,9 +433,7 @@ mod tests { types.insert("Post".to_string()); let sorted = graph.topological_sort_types(&types); - assert_eq!(sorted.len(), 2); - assert!(sorted.contains(&"User".to_string())); - assert!(sorted.contains(&"Post".to_string())); + assert_eq!(sorted, vec!["Post", "User"]); } #[test] @@ -433,11 +474,7 @@ mod tests { assert_eq!(sorted[0], "A"); // D must come last (depends on everything) assert_eq!(sorted[3], "D"); - // B and C can be in either order but after A and before D - let b_pos = sorted.iter().position(|x| x == "B").unwrap(); - let c_pos = sorted.iter().position(|x| x == "C").unwrap(); - assert!(b_pos > 0 && b_pos < 3); - assert!(c_pos > 0 && c_pos < 3); + assert_eq!(sorted, vec!["A", "B", "C", "D"]); } #[test] @@ -549,4 +586,66 @@ mod tests { let sorted = graph.topological_sort_types(&types); assert_eq!(sorted, vec!["User", "Post"]); } + + #[test] + fn test_visualize_dependencies_is_deterministic() { + let mut graph1 = TypeDependencyGraph::new(); + graph1.add_resolved_type("Beta".to_string(), create_test_struct("Beta", "beta.rs")); + graph1.add_resolved_type("Alpha".to_string(), create_test_struct("Alpha", "alpha.rs")); + graph1.add_dependency("Beta".to_string(), "Delta".to_string()); + graph1.add_dependency("Beta".to_string(), "Gamma".to_string()); + graph1.add_dependency("Alpha".to_string(), "Gamma".to_string()); + + let mut graph2 = TypeDependencyGraph::new(); + graph2.add_resolved_type("Alpha".to_string(), create_test_struct("Alpha", "alpha.rs")); + graph2.add_resolved_type("Beta".to_string(), create_test_struct("Beta", "beta.rs")); + graph2.add_dependency("Alpha".to_string(), "Gamma".to_string()); + graph2.add_dependency("Beta".to_string(), "Gamma".to_string()); + graph2.add_dependency("Beta".to_string(), "Delta".to_string()); + + let commands1 = vec![ + create_test_command("beta_command", "beta.rs", 20), + create_test_command("alpha_command", "alpha.rs", 10), + ]; + let commands2 = vec![ + create_test_command("alpha_command", "alpha.rs", 10), + create_test_command("beta_command", "beta.rs", 20), + ]; + + assert_eq!( + graph1.visualize_dependencies(&commands1), + graph2.visualize_dependencies(&commands2) + ); + } + + #[test] + fn test_generate_dot_graph_is_deterministic() { + let mut graph1 = TypeDependencyGraph::new(); + graph1.add_resolved_type("Beta".to_string(), create_test_struct("Beta", "beta.rs")); + graph1.add_resolved_type("Alpha".to_string(), create_test_struct("Alpha", "alpha.rs")); + graph1.add_dependency("Beta".to_string(), "Delta".to_string()); + graph1.add_dependency("Beta".to_string(), "Gamma".to_string()); + graph1.add_dependency("Alpha".to_string(), "Gamma".to_string()); + + let mut graph2 = TypeDependencyGraph::new(); + graph2.add_resolved_type("Alpha".to_string(), create_test_struct("Alpha", "alpha.rs")); + graph2.add_resolved_type("Beta".to_string(), create_test_struct("Beta", "beta.rs")); + graph2.add_dependency("Alpha".to_string(), "Gamma".to_string()); + graph2.add_dependency("Beta".to_string(), "Gamma".to_string()); + graph2.add_dependency("Beta".to_string(), "Delta".to_string()); + + let commands1 = vec![ + create_test_command("beta_command", "beta.rs", 20), + create_test_command("alpha_command", "alpha.rs", 10), + ]; + let commands2 = vec![ + create_test_command("alpha_command", "alpha.rs", 10), + create_test_command("beta_command", "beta.rs", 20), + ]; + + assert_eq!( + graph1.generate_dot_graph(&commands1), + graph2.generate_dot_graph(&commands2) + ); + } } diff --git a/src/analysis/mod.rs b/src/analysis/mod.rs index 2f81286..f95cbf9 100644 --- a/src/analysis/mod.rs +++ b/src/analysis/mod.rs @@ -84,7 +84,8 @@ impl CommandAnalyzer { .parse_and_cache_all_files(project_path, verbose)?; // Extract commands from cached ASTs - let file_paths: Vec = self.ast_cache.keys().cloned().collect(); + let mut file_paths: Vec = self.ast_cache.keys().cloned().collect(); + file_paths.sort_unstable(); let mut commands = Vec::new(); let mut type_names_to_discover = HashSet::new(); diff --git a/src/bin/cargo-tauri-typegen.rs b/src/bin/cargo-tauri-typegen.rs index cd1b6c6..92b5233 100644 --- a/src/bin/cargo-tauri-typegen.rs +++ b/src/bin/cargo-tauri-typegen.rs @@ -215,6 +215,7 @@ fn run_generate( // Check cache to see if regeneration is needed (unless force is set) let discovered_structs = analyzer.get_discovered_structs(); + let discovered_events = analyzer.get_discovered_events(); let needs_regeneration = if config.should_force() { if config.is_verbose() { println!("šŸ”„ Force flag set, regenerating bindings"); @@ -225,6 +226,7 @@ fn run_generate( &config.output_path, &commands, discovered_structs, + discovered_events, &config, ) .unwrap_or(true) // On error, assume regeneration is needed @@ -273,7 +275,7 @@ fn run_generate( } // Save cache after successful generation - let cache = GenerationCache::new(&commands, discovered_structs, &config)?; + let cache = GenerationCache::new(&commands, discovered_structs, discovered_events, &config)?; if let Err(e) = cache.save(&config.output_path) { eprintln!("Warning: Failed to save generation cache: {}", e); } diff --git a/src/build/generation_cache.rs b/src/build/generation_cache.rs index 060b77d..5098c6f 100644 --- a/src/build/generation_cache.rs +++ b/src/build/generation_cache.rs @@ -1,5 +1,5 @@ use crate::interface::config::GenerateConfig; -use crate::models::{CommandInfo, StructInfo}; +use crate::models::{CommandInfo, EventInfo, StructInfo}; use serde::{Deserialize, Serialize}; use std::collections::HashMap; use std::fs; @@ -28,6 +28,8 @@ pub struct GenerationCache { commands_hash: String, /// Hash of all discovered structs structs_hash: String, + /// Hash of all discovered events + events_hash: String, /// Hash of configuration settings that affect output config_hash: String, /// Combined hash for quick comparison @@ -35,23 +37,27 @@ pub struct GenerationCache { } impl GenerationCache { - const CURRENT_VERSION: u32 = 1; + const CURRENT_VERSION: u32 = 2; /// Create a new cache from current generation state pub fn new( commands: &[CommandInfo], structs: &HashMap, + events: &[EventInfo], config: &GenerateConfig, ) -> Result { let commands_hash = Self::hash_commands(commands)?; let structs_hash = Self::hash_structs(structs)?; + let events_hash = Self::hash_events(events)?; let config_hash = Self::hash_config(config)?; - let combined_hash = Self::combine_hashes(&commands_hash, &structs_hash, &config_hash)?; + let combined_hash = + Self::combine_hashes(&commands_hash, &structs_hash, &events_hash, &config_hash)?; Ok(Self { version: Self::CURRENT_VERSION, commands_hash, structs_hash, + events_hash, config_hash, combined_hash, }) @@ -84,6 +90,7 @@ impl GenerationCache { output_dir: P, commands: &[CommandInfo], structs: &HashMap, + events: &[EventInfo], config: &GenerateConfig, ) -> Result { // Try to load previous cache @@ -101,7 +108,7 @@ impl GenerationCache { } // Generate current cache - let current_cache = Self::new(commands, structs, config)?; + let current_cache = Self::new(commands, structs, events, config)?; // Compare combined hashes Ok(previous_cache.combined_hash != current_cache.combined_hash) @@ -118,7 +125,7 @@ impl GenerationCache { #[derive(Serialize)] struct CommandHashData<'a> { name: &'a str, - file_path: &'a str, + serde_rename_all: Option<&'a str>, parameters: Vec>, return_type: &'a str, is_async: bool, @@ -130,42 +137,75 @@ impl GenerationCache { name: &'a str, rust_type: &'a str, is_optional: bool, + serde_rename: Option<&'a str>, } #[derive(Serialize)] struct ChannelHashData<'a> { parameter_name: &'a str, message_type: &'a str, + serde_rename: Option<&'a str>, } - let hash_data: Vec = commands + let mut serialized_commands: Vec = commands .iter() - .map(|cmd| CommandHashData { - name: &cmd.name, - file_path: &cmd.file_path, - parameters: cmd - .parameters - .iter() - .map(|p| ParameterHashData { - name: &p.name, - rust_type: &p.rust_type, - is_optional: p.is_optional, - }) - .collect(), - return_type: &cmd.return_type, - is_async: cmd.is_async, - channels: cmd - .channels - .iter() - .map(|c| ChannelHashData { - parameter_name: &c.parameter_name, - message_type: &c.message_type, - }) - .collect(), + .map(|cmd| { + serde_json::to_string(&CommandHashData { + name: &cmd.name, + serde_rename_all: cmd + .serde_rename_all + .as_ref() + .map(|rule| rule.to_rename_all_str()), + parameters: cmd + .parameters + .iter() + .map(|p| ParameterHashData { + name: &p.name, + rust_type: &p.rust_type, + is_optional: p.is_optional, + serde_rename: p.serde_rename.as_deref(), + }) + .collect(), + return_type: &cmd.return_type, + is_async: cmd.is_async, + channels: cmd + .channels + .iter() + .map(|c| ChannelHashData { + parameter_name: &c.parameter_name, + message_type: &c.message_type, + serde_rename: c.serde_rename.as_deref(), + }) + .collect(), + }) }) - .collect(); + .collect::>()?; + serialized_commands.sort_unstable(); - let json = serde_json::to_string(&hash_data)?; + let json = serde_json::to_string(&serialized_commands)?; + Ok(Self::compute_hash(&json)) + } + + /// Generate a deterministic hash of events + fn hash_events(events: &[EventInfo]) -> Result { + #[derive(Serialize)] + struct EventHashData<'a> { + event_name: &'a str, + payload_type: &'a str, + } + + let mut serialized_events: Vec = events + .iter() + .map(|event| { + serde_json::to_string(&EventHashData { + event_name: &event.event_name, + payload_type: &event.payload_type, + }) + }) + .collect::>()?; + serialized_events.sort_unstable(); + + let json = serde_json::to_string(&serialized_events)?; Ok(Self::compute_hash(&json)) } @@ -174,9 +214,11 @@ impl GenerationCache { #[derive(Serialize)] struct StructHashData<'a> { name: &'a str, - file_path: &'a str, is_enum: bool, + serde_rename_all: Option<&'a str>, + serde_tag: Option<&'a str>, fields: Vec>, + enum_variants: Vec>, } #[derive(Serialize)] @@ -185,32 +227,62 @@ impl GenerationCache { rust_type: &'a str, is_optional: bool, is_public: bool, + validator_attributes: Option<&'a crate::models::ValidatorAttributes>, + serde_rename: Option<&'a str>, + type_structure: &'a crate::models::TypeStructure, } - // Sort by name for deterministic ordering - let mut sorted_structs: Vec<_> = structs.values().collect(); - sorted_structs.sort_by(|a, b| a.name.cmp(&b.name)); + #[derive(Serialize)] + struct EnumVariantHashData<'a> { + name: &'a str, + serde_rename: Option<&'a str>, + kind: &'a crate::models::EnumVariantKind, + } - let hash_data: Vec = sorted_structs - .iter() - .map(|s| StructHashData { - name: &s.name, - file_path: &s.file_path, - is_enum: s.is_enum, - fields: s - .fields - .iter() - .map(|f| FieldHashData { - name: &f.name, - rust_type: &f.rust_type, - is_optional: f.is_optional, - is_public: f.is_public, - }) - .collect(), + let mut serialized_structs: Vec = structs + .values() + .map(|s| { + serde_json::to_string(&StructHashData { + name: &s.name, + is_enum: s.is_enum, + serde_rename_all: s + .serde_rename_all + .as_ref() + .map(|rule| rule.to_rename_all_str()), + serde_tag: s.serde_tag.as_deref(), + fields: s + .fields + .iter() + .map(|f| FieldHashData { + name: &f.name, + rust_type: &f.rust_type, + is_optional: f.is_optional, + is_public: f.is_public, + validator_attributes: f.validator_attributes.as_ref(), + serde_rename: f.serde_rename.as_deref(), + type_structure: &f.type_structure, + }) + .collect(), + enum_variants: s + .enum_variants + .as_ref() + .map(|variants| { + variants + .iter() + .map(|variant| EnumVariantHashData { + name: &variant.name, + serde_rename: variant.serde_rename.as_deref(), + kind: &variant.kind, + }) + .collect() + }) + .unwrap_or_default(), + }) }) - .collect(); + .collect::>()?; + serialized_structs.sort_unstable(); - let json = serde_json::to_string(&hash_data)?; + let json = serde_json::to_string(&serialized_structs)?; Ok(Self::compute_hash(&json)) } @@ -220,15 +292,24 @@ impl GenerationCache { struct ConfigHashData<'a> { validation_library: &'a str, include_private: bool, - type_mappings: Option<&'a HashMap>, + type_mappings: Option>, default_parameter_case: &'a str, default_field_case: &'a str, } + let type_mappings = config.type_mappings.as_ref().map(|mappings| { + let mut canonical: Vec<_> = mappings + .iter() + .map(|(key, value)| (key.as_str(), value.as_str())) + .collect(); + canonical.sort_unstable(); + canonical + }); + let hash_data = ConfigHashData { validation_library: &config.validation_library, include_private: config.include_private.unwrap_or(false), - type_mappings: config.type_mappings.as_ref(), + type_mappings, default_parameter_case: &config.default_parameter_case, default_field_case: &config.default_field_case, }; @@ -238,8 +319,13 @@ impl GenerationCache { } /// Combine multiple hashes into a single hash - fn combine_hashes(commands: &str, structs: &str, config: &str) -> Result { - let combined = format!("{}{}{}", commands, structs, config); + fn combine_hashes( + commands: &str, + structs: &str, + events: &str, + config: &str, + ) -> Result { + let combined = format!("{}{}{}{}", commands, structs, events, config); Ok(Self::compute_hash(&combined)) } @@ -257,6 +343,11 @@ impl GenerationCache { #[cfg(test)] mod tests { use super::*; + use crate::models::{ + EnumVariantInfo, EnumVariantKind, FieldInfo, LengthConstraint, ParameterInfo, + TypeStructure, ValidatorAttributes, + }; + use serde_rename_rule::RenameRule; // Test utilities already imported from parent module use tempfile::TempDir; @@ -281,13 +372,23 @@ mod tests { CommandInfo::new_for_test(name, "test.rs", 1, vec![], "String", false, vec![]) } + fn create_test_event(name: &str) -> EventInfo { + EventInfo { + event_name: name.to_string(), + payload_type: "String".to_string(), + payload_type_structure: crate::models::TypeStructure::Primitive("string".to_string()), + file_path: "events.rs".to_string(), + line_number: 1, + } + } + #[test] fn test_cache_creation() { let commands = vec![create_test_command("test_command")]; let structs = HashMap::new(); let config = create_test_config(); - let cache = GenerationCache::new(&commands, &structs, &config).unwrap(); + let cache = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); assert_eq!(cache.version, GenerationCache::CURRENT_VERSION); assert!(!cache.commands_hash.is_empty()); @@ -303,7 +404,7 @@ mod tests { let structs = HashMap::new(); let config = create_test_config(); - let cache = GenerationCache::new(&commands, &structs, &config).unwrap(); + let cache = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); cache.save(temp_dir.path()).unwrap(); let loaded_cache = GenerationCache::load(temp_dir.path()).unwrap(); @@ -321,7 +422,7 @@ mod tests { let config = create_test_config(); let needs_regen = - GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &config) + GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &[], &config) .unwrap(); assert!(needs_regen); @@ -335,12 +436,12 @@ mod tests { let config = create_test_config(); // Save initial cache - let cache = GenerationCache::new(&commands, &structs, &config).unwrap(); + let cache = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); cache.save(temp_dir.path()).unwrap(); // Check if regeneration needed with same data let needs_regen = - GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &config) + GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &[], &config) .unwrap(); assert!(!needs_regen); @@ -354,15 +455,20 @@ mod tests { let config = create_test_config(); // Save initial cache - let cache = GenerationCache::new(&commands, &structs, &config).unwrap(); + let cache = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); cache.save(temp_dir.path()).unwrap(); // Change commands let new_commands = vec![create_test_command("different_command")]; - let needs_regen = - GenerationCache::needs_regeneration(temp_dir.path(), &new_commands, &structs, &config) - .unwrap(); + let needs_regen = GenerationCache::needs_regeneration( + temp_dir.path(), + &new_commands, + &structs, + &[], + &config, + ) + .unwrap(); assert!(needs_regen); } @@ -375,16 +481,21 @@ mod tests { let config = create_test_config(); // Save initial cache - let cache = GenerationCache::new(&commands, &structs, &config).unwrap(); + let cache = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); cache.save(temp_dir.path()).unwrap(); // Change config let mut new_config = config; new_config.validation_library = "zod".to_string(); - let needs_regen = - GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &new_config) - .unwrap(); + let needs_regen = GenerationCache::needs_regeneration( + temp_dir.path(), + &commands, + &structs, + &[], + &new_config, + ) + .unwrap(); assert!(needs_regen); } @@ -395,8 +506,8 @@ mod tests { let structs = HashMap::new(); let config = create_test_config(); - let cache1 = GenerationCache::new(&commands, &structs, &config).unwrap(); - let cache2 = GenerationCache::new(&commands, &structs, &config).unwrap(); + let cache1 = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); + let cache2 = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); assert_eq!(cache1.combined_hash, cache2.combined_hash); assert_eq!(cache1.commands_hash, cache2.commands_hash); @@ -424,7 +535,7 @@ mod tests { // Should need regeneration due to version mismatch let needs_regen = - GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &config) + GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &[], &config) .unwrap(); assert!(needs_regen); @@ -436,7 +547,7 @@ mod tests { let structs: HashMap = HashMap::new(); let config = create_test_config(); - let cache = GenerationCache::new(&commands, &structs, &config).unwrap(); + let cache = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); // Should still create valid hashes even with empty data assert!(!cache.commands_hash.is_empty()); @@ -498,14 +609,302 @@ mod tests { structs2.insert("StructB".to_string(), struct_b); structs2.insert("StructA".to_string(), struct_a); - let cache1 = GenerationCache::new(&commands, &structs1, &config).unwrap(); - let cache2 = GenerationCache::new(&commands, &structs2, &config).unwrap(); + let cache1 = GenerationCache::new(&commands, &structs1, &[], &config).unwrap(); + let cache2 = GenerationCache::new(&commands, &structs2, &[], &config).unwrap(); // Hash should be the same regardless of insertion order assert_eq!(cache1.structs_hash, cache2.structs_hash); assert_eq!(cache1.combined_hash, cache2.combined_hash); } + #[test] + fn command_hash_order_independence() { + let config = create_test_config(); + let structs = HashMap::new(); + + let commands1 = vec![ + create_test_command("alpha_command"), + create_test_command("beta_command"), + ]; + let commands2 = vec![ + create_test_command("beta_command"), + create_test_command("alpha_command"), + ]; + + let cache1 = GenerationCache::new(&commands1, &structs, &[], &config).unwrap(); + let cache2 = GenerationCache::new(&commands2, &structs, &[], &config).unwrap(); + + assert_eq!(cache1.commands_hash, cache2.commands_hash); + assert_eq!(cache1.combined_hash, cache2.combined_hash); + } + + #[test] + fn command_hash_ignores_source_location() { + let config = create_test_config(); + let structs = HashMap::new(); + + let command1 = CommandInfo::new_for_test( + "test_command", + "src/alpha.rs", + 10, + vec![], + "String", + false, + vec![], + ); + let command2 = CommandInfo::new_for_test( + "test_command", + "src/beta.rs", + 200, + vec![], + "String", + false, + vec![], + ); + + let cache1 = GenerationCache::new(&[command1], &structs, &[], &config).unwrap(); + let cache2 = GenerationCache::new(&[command2], &structs, &[], &config).unwrap(); + + assert_eq!(cache1.commands_hash, cache2.commands_hash); + assert_eq!(cache1.combined_hash, cache2.combined_hash); + } + + #[test] + fn event_hash_ignores_source_location() { + let config = create_test_config(); + let commands = vec![create_test_command("test_command")]; + let structs = HashMap::new(); + + let event1 = EventInfo { + event_name: "alpha-ready".to_string(), + payload_type: "String".to_string(), + payload_type_structure: crate::models::TypeStructure::Primitive("string".to_string()), + file_path: "src/alpha.rs".to_string(), + line_number: 10, + }; + let event2 = EventInfo { + event_name: "alpha-ready".to_string(), + payload_type: "String".to_string(), + payload_type_structure: crate::models::TypeStructure::Primitive("string".to_string()), + file_path: "src/beta.rs".to_string(), + line_number: 200, + }; + + let cache1 = GenerationCache::new(&commands, &structs, &[event1], &config).unwrap(); + let cache2 = GenerationCache::new(&commands, &structs, &[event2], &config).unwrap(); + + assert_eq!(cache1.events_hash, cache2.events_hash); + assert_eq!(cache1.combined_hash, cache2.combined_hash); + } + + #[test] + fn struct_hash_ignores_source_location() { + let config = create_test_config(); + let commands = vec![create_test_command("test_command")]; + + let struct1 = StructInfo { + name: "Payload".to_string(), + fields: vec![FieldInfo { + name: "value".to_string(), + rust_type: "String".to_string(), + is_optional: false, + is_public: true, + validator_attributes: None, + serde_rename: None, + type_structure: TypeStructure::Primitive("string".to_string()), + }], + file_path: "src/alpha.rs".to_string(), + is_enum: false, + serde_rename_all: None, + serde_tag: None, + enum_variants: None, + }; + let struct2 = StructInfo { + file_path: "src/beta.rs".to_string(), + ..struct1.clone() + }; + + let mut structs1 = HashMap::new(); + structs1.insert("Payload".to_string(), struct1); + + let mut structs2 = HashMap::new(); + structs2.insert("Payload".to_string(), struct2); + + let cache1 = GenerationCache::new(&commands, &structs1, &[], &config).unwrap(); + let cache2 = GenerationCache::new(&commands, &structs2, &[], &config).unwrap(); + + assert_eq!(cache1.structs_hash, cache2.structs_hash); + assert_eq!(cache1.combined_hash, cache2.combined_hash); + } + + #[test] + fn command_hash_changes_with_serde_metadata() { + let config = create_test_config(); + let structs = HashMap::new(); + + let mut command1 = CommandInfo::new_for_test( + "test_command", + "src/test.rs", + 10, + vec![ParameterInfo { + name: "user_id".to_string(), + rust_type: "String".to_string(), + is_optional: false, + type_structure: TypeStructure::Primitive("string".to_string()), + serde_rename: None, + }], + "String", + false, + vec![crate::models::ChannelInfo::new_for_test( + "progress_updates", + "String", + "test_command", + "src/test.rs", + 10, + )], + ); + let mut command2 = CommandInfo::new_for_test( + "test_command", + "src/test.rs", + 10, + vec![ParameterInfo { + name: "user_id".to_string(), + rust_type: "String".to_string(), + is_optional: false, + type_structure: TypeStructure::Primitive("string".to_string()), + serde_rename: Some("userIdExplicit".to_string()), + }], + "String", + false, + vec![crate::models::ChannelInfo::new_for_test( + "progress_updates", + "String", + "test_command", + "src/test.rs", + 10, + )], + ); + command1.serde_rename_all = Some(RenameRule::SnakeCase); + command2.channels[0].serde_rename = Some("progressUpdates".to_string()); + + let cache1 = GenerationCache::new(&[command1], &structs, &[], &config).unwrap(); + let cache2 = GenerationCache::new(&[command2], &structs, &[], &config).unwrap(); + + assert_ne!(cache1.commands_hash, cache2.commands_hash); + assert_ne!(cache1.combined_hash, cache2.combined_hash); + } + + #[test] + fn struct_hash_changes_with_field_metadata() { + let config = create_test_config(); + let commands = vec![create_test_command("test_command")]; + + let struct1 = StructInfo { + name: "Payload".to_string(), + fields: vec![FieldInfo { + name: "created_at".to_string(), + rust_type: "String".to_string(), + is_optional: false, + is_public: true, + validator_attributes: None, + serde_rename: None, + type_structure: TypeStructure::Primitive("string".to_string()), + }], + file_path: "src/payload.rs".to_string(), + is_enum: false, + serde_rename_all: None, + serde_tag: None, + enum_variants: None, + }; + let struct2 = StructInfo { + fields: vec![FieldInfo { + name: "created_at".to_string(), + rust_type: "String".to_string(), + is_optional: false, + is_public: true, + validator_attributes: Some(ValidatorAttributes { + length: Some(LengthConstraint { + min: Some(1), + max: None, + message: Some("required".to_string()), + }), + range: None, + email: false, + url: false, + custom_message: Some("required".to_string()), + }), + serde_rename: Some("createdAt".to_string()), + type_structure: TypeStructure::Primitive("string".to_string()), + }], + serde_rename_all: Some(RenameRule::CamelCase), + ..struct1.clone() + }; + + let mut structs1 = HashMap::new(); + structs1.insert("Payload".to_string(), struct1); + + let mut structs2 = HashMap::new(); + structs2.insert("Payload".to_string(), struct2); + + let cache1 = GenerationCache::new(&commands, &structs1, &[], &config).unwrap(); + let cache2 = GenerationCache::new(&commands, &structs2, &[], &config).unwrap(); + + assert_ne!(cache1.structs_hash, cache2.structs_hash); + assert_ne!(cache1.combined_hash, cache2.combined_hash); + } + + #[test] + fn struct_hash_changes_with_enum_metadata() { + let config = create_test_config(); + let commands = vec![create_test_command("test_command")]; + + let base_variant = EnumVariantInfo { + name: "ReadyState".to_string(), + kind: EnumVariantKind::Struct(vec![FieldInfo { + name: "event_id".to_string(), + rust_type: "String".to_string(), + is_optional: false, + is_public: true, + validator_attributes: None, + serde_rename: None, + type_structure: TypeStructure::Primitive("string".to_string()), + }]), + serde_rename: None, + }; + let renamed_variant = EnumVariantInfo { + serde_rename: Some("ready_state".to_string()), + ..base_variant.clone() + }; + + let enum1 = StructInfo { + name: "StatusEvent".to_string(), + fields: vec![], + file_path: "src/status.rs".to_string(), + is_enum: true, + serde_rename_all: None, + serde_tag: None, + enum_variants: Some(vec![base_variant]), + }; + let enum2 = StructInfo { + serde_rename_all: Some(RenameRule::SnakeCase), + serde_tag: Some("kind".to_string()), + enum_variants: Some(vec![renamed_variant]), + ..enum1.clone() + }; + + let mut structs1 = HashMap::new(); + structs1.insert("StatusEvent".to_string(), enum1); + + let mut structs2 = HashMap::new(); + structs2.insert("StatusEvent".to_string(), enum2); + + let cache1 = GenerationCache::new(&commands, &structs1, &[], &config).unwrap(); + let cache2 = GenerationCache::new(&commands, &structs2, &[], &config).unwrap(); + + assert_ne!(cache1.structs_hash, cache2.structs_hash); + assert_ne!(cache1.combined_hash, cache2.combined_hash); + } + #[test] fn test_needs_regeneration_with_corrupted_cache_file() { let temp_dir = TempDir::new().unwrap(); @@ -519,7 +918,7 @@ mod tests { // Should need regeneration because cache is unreadable let needs_regen = - GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &config) + GenerationCache::needs_regeneration(temp_dir.path(), &commands, &structs, &[], &config) .unwrap(); assert!(needs_regen); @@ -537,14 +936,62 @@ mod tests { let config2 = create_test_config(); // No type mappings - let cache1 = GenerationCache::new(&commands, &structs, &config1).unwrap(); - let cache2 = GenerationCache::new(&commands, &structs, &config2).unwrap(); + let cache1 = GenerationCache::new(&commands, &structs, &[], &config1).unwrap(); + let cache2 = GenerationCache::new(&commands, &structs, &[], &config2).unwrap(); // Config hash should differ when type_mappings differ assert_ne!(cache1.config_hash, cache2.config_hash); assert_ne!(cache1.combined_hash, cache2.combined_hash); } + #[test] + fn config_hash_type_mappings_order_independence() { + let commands = vec![create_test_command("test_command")]; + let structs = HashMap::new(); + + let mut config1 = create_test_config(); + let mut mappings1 = HashMap::new(); + mappings1.insert("First".to_string(), "string".to_string()); + mappings1.insert("Second".to_string(), "number".to_string()); + config1.type_mappings = Some(mappings1); + + let mut config2 = create_test_config(); + let mut mappings2 = HashMap::new(); + mappings2.insert("Second".to_string(), "number".to_string()); + mappings2.insert("First".to_string(), "string".to_string()); + config2.type_mappings = Some(mappings2); + + let cache1 = GenerationCache::new(&commands, &structs, &[], &config1).unwrap(); + let cache2 = GenerationCache::new(&commands, &structs, &[], &config2).unwrap(); + + assert_eq!(cache1.config_hash, cache2.config_hash); + assert_eq!(cache1.combined_hash, cache2.combined_hash); + } + + #[test] + fn events_change_requires_regeneration() { + let temp_dir = TempDir::new().unwrap(); + let commands = vec![create_test_command("test_command")]; + let structs = HashMap::new(); + let config = create_test_config(); + let initial_events = vec![create_test_event("alpha-ready")]; + let changed_events = vec![create_test_event("beta-ready")]; + + let cache = GenerationCache::new(&commands, &structs, &initial_events, &config).unwrap(); + cache.save(temp_dir.path()).unwrap(); + + let needs_regen = GenerationCache::needs_regeneration( + temp_dir.path(), + &commands, + &structs, + &changed_events, + &config, + ) + .unwrap(); + + assert!(needs_regen); + } + #[test] fn test_cache_with_channels() { use crate::models::ChannelInfo; @@ -566,9 +1013,9 @@ mod tests { let cmd_without_channel = create_test_command("test_command"); - let cache_with = GenerationCache::new(&[cmd_with_channel], &structs, &config).unwrap(); + let cache_with = GenerationCache::new(&[cmd_with_channel], &structs, &[], &config).unwrap(); let cache_without = - GenerationCache::new(&[cmd_without_channel], &structs, &config).unwrap(); + GenerationCache::new(&[cmd_without_channel], &structs, &[], &config).unwrap(); // Commands hash should differ when channels differ assert_ne!(cache_with.commands_hash, cache_without.commands_hash); @@ -583,7 +1030,7 @@ mod tests { let structs = HashMap::new(); let config = create_test_config(); - let cache = GenerationCache::new(&commands, &structs, &config).unwrap(); + let cache = GenerationCache::new(&commands, &structs, &[], &config).unwrap(); // Should create nested directories cache.save(&nested_output).unwrap(); diff --git a/src/build/mod.rs b/src/build/mod.rs index a595d7f..c7a648b 100644 --- a/src/build/mod.rs +++ b/src/build/mod.rs @@ -200,6 +200,7 @@ impl BuildSystem { // Check cache to see if regeneration is needed (unless force is set) let discovered_structs = analyzer.get_discovered_structs(); + let discovered_events = analyzer.get_discovered_events(); if config.should_force() { self.logger.verbose("Force flag set, regenerating bindings"); } else { @@ -207,6 +208,7 @@ impl BuildSystem { &config.output_path, &commands, discovered_structs, + discovered_events, config, ) { Ok(false) => { @@ -252,7 +254,7 @@ impl BuildSystem { } // Save cache after successful generation - let cache = GenerationCache::new(&commands, discovered_structs, config)?; + let cache = GenerationCache::new(&commands, discovered_structs, discovered_events, config)?; if let Err(e) = cache.save(&config.output_path) { self.logger .warning(&format!("Failed to save generation cache: {}", e)); @@ -292,8 +294,40 @@ impl BuildSystem { #[cfg(test)] mod tests { use super::*; + use crate::interface::config::GenerateConfig; + use std::path::Path; use tempfile::TempDir; + fn create_build_config(project_path: &Path, output_path: &Path) -> GenerateConfig { + GenerateConfig { + project_path: project_path.to_string_lossy().to_string(), + output_path: output_path.to_string_lossy().to_string(), + validation_library: "none".to_string(), + verbose: Some(false), + visualize_deps: Some(false), + include_private: Some(false), + type_mappings: None, + exclude_patterns: None, + include_patterns: None, + default_parameter_case: "camelCase".to_string(), + default_field_case: "snake_case".to_string(), + force: Some(false), + } + } + + fn run_generation(build_system: &BuildSystem, config: &GenerateConfig) -> Vec { + let generated_files = build_system.generate_bindings(config).unwrap(); + let mut output_manager = OutputManager::new(&config.output_path); + output_manager + .finalize_generation(&generated_files) + .unwrap(); + generated_files + } + + fn read_generated(output_path: &Path, file_name: &str) -> String { + std::fs::read_to_string(output_path.join(file_name)).unwrap() + } + #[test] fn test_build_system_creation() { let build_system = BuildSystem::new(true, false); @@ -328,18 +362,16 @@ mod tests { std::fs::create_dir_all(&custom_src_path).unwrap(); // Create a tauri.conf.json with typegen plugin configuration - let config_content = format!( - r#"{{ - "plugins": {{ - "typegen": {{ - "projectPath": "{}", + let config_content = serde_json::json!({ + "plugins": { + "typegen": { + "projectPath": custom_src_path.to_string_lossy().to_string(), "outputPath": "./custom-output", "validationLibrary": "zod" - }} - }} - }}"#, - custom_src_path.to_string_lossy() - ); + } + } + }) + .to_string(); std::fs::write(&tauri_config_path, &config_content).unwrap(); let project_info = ProjectInfo { @@ -365,14 +397,12 @@ mod tests { std::fs::create_dir_all(&project_path).unwrap(); // Create a standalone typegen.json configuration - let config_content = format!( - r#"{{ - "project_path": "{}", + let config_content = serde_json::json!({ + "project_path": project_path.to_string_lossy().to_string(), "output_path": "./standalone-output", "validation_library": "zod" - }}"#, - project_path.to_string_lossy() - ); + }) + .to_string(); std::fs::write(&typegen_config_path, config_content).unwrap(); let project_info = ProjectInfo { @@ -432,4 +462,354 @@ mod tests { .logger .should_log(crate::interface::output::LogLevel::Debug)); } + + #[test] + fn test_generate_bindings_skips_unrelated_rust_changes() { + let temp_dir = TempDir::new().unwrap(); + let project_path = temp_dir.path().join("src-tauri"); + let output_path = temp_dir.path().join("generated"); + std::fs::create_dir_all(&project_path).unwrap(); + + let source_file = project_path.join("main.rs"); + std::fs::write( + &source_file, + r#" + use serde::{Deserialize, Serialize}; + use tauri::Manager; + + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct Payload { + pub value: String, + } + + fn helper_text() -> &'static str { + "one" + } + + #[tauri::command] + pub fn fetch_payload() -> Result { + Ok(Payload { + value: helper_text().to_string(), + }) + } + + #[tauri::command] + pub fn emit_event(app: tauri::AppHandle) -> Result<(), String> { + app.emit("stable-event", Payload { + value: helper_text().to_string(), + }).ok(); + Ok(()) + } + "#, + ) + .unwrap(); + + let config = create_build_config(&project_path, &output_path); + let build_system = BuildSystem::new(false, false); + + run_generation(&build_system, &config); + + let commands_before = read_generated(&output_path, "commands.ts"); + let types_before = read_generated(&output_path, "types.ts"); + let events_before = read_generated(&output_path, "events.ts"); + let index_before = read_generated(&output_path, "index.ts"); + + std::fs::write( + &source_file, + r#" + use serde::{Deserialize, Serialize}; + use tauri::Manager; + + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct Payload { + pub value: String, + } + + fn helper_text() -> &'static str { + "two" + } + + #[tauri::command] + pub fn fetch_payload() -> Result { + Ok(Payload { + value: helper_text().to_string(), + }) + } + + #[tauri::command] + pub fn emit_event(app: tauri::AppHandle) -> Result<(), String> { + app.emit("stable-event", Payload { + value: helper_text().to_string(), + }).ok(); + Ok(()) + } + "#, + ) + .unwrap(); + + run_generation(&build_system, &config); + + assert_eq!(commands_before, read_generated(&output_path, "commands.ts")); + assert_eq!(types_before, read_generated(&output_path, "types.ts")); + assert_eq!(events_before, read_generated(&output_path, "events.ts")); + assert_eq!(index_before, read_generated(&output_path, "index.ts")); + } + + #[test] + fn test_generate_bindings_skips_source_location_only_changes() { + let temp_dir = TempDir::new().unwrap(); + let project_path = temp_dir.path().join("src-tauri"); + let output_path = temp_dir.path().join("generated"); + std::fs::create_dir_all(&project_path).unwrap(); + + let source_file = project_path.join("main.rs"); + std::fs::write( + &source_file, + r#" + use serde::{Deserialize, Serialize}; + use tauri::Manager; + + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct Payload { + pub value: String, + } + + #[tauri::command] + pub fn fetch_payload() -> Result { + Ok(Payload { + value: "one".to_string(), + }) + } + + #[tauri::command] + pub fn emit_event(app: tauri::AppHandle) -> Result<(), String> { + app.emit("stable-event", Payload { + value: "one".to_string(), + }).ok(); + Ok(()) + } + "#, + ) + .unwrap(); + + let config = create_build_config(&project_path, &output_path); + let build_system = BuildSystem::new(false, false); + + run_generation(&build_system, &config); + + let commands_before = read_generated(&output_path, "commands.ts"); + let types_before = read_generated(&output_path, "types.ts"); + let events_before = read_generated(&output_path, "events.ts"); + + std::fs::write( + &source_file, + r#" + use serde::{Deserialize, Serialize}; + use tauri::Manager; + + // Unrelated comment that shifts every discovered item downward. + // The generated bindings should stay byte-stable. + + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct Payload { + pub value: String, + } + + #[tauri::command] + pub fn fetch_payload() -> Result { + Ok(Payload { + value: "one".to_string(), + }) + } + + #[tauri::command] + pub fn emit_event(app: tauri::AppHandle) -> Result<(), String> { + app.emit("stable-event", Payload { + value: "one".to_string(), + }).ok(); + Ok(()) + } + "#, + ) + .unwrap(); + + run_generation(&build_system, &config); + + assert_eq!(commands_before, read_generated(&output_path, "commands.ts")); + assert_eq!(types_before, read_generated(&output_path, "types.ts")); + assert_eq!(events_before, read_generated(&output_path, "events.ts")); + } + + #[test] + fn test_generate_bindings_regenerates_when_commands_change() { + let temp_dir = TempDir::new().unwrap(); + let project_path = temp_dir.path().join("src-tauri"); + let output_path = temp_dir.path().join("generated"); + std::fs::create_dir_all(&project_path).unwrap(); + + let source_file = project_path.join("main.rs"); + std::fs::write( + &source_file, + r#" + #[tauri::command] + pub fn first_command() -> Result { + Ok("one".to_string()) + } + "#, + ) + .unwrap(); + + let config = create_build_config(&project_path, &output_path); + let build_system = BuildSystem::new(false, false); + + run_generation(&build_system, &config); + let commands_before = read_generated(&output_path, "commands.ts"); + + std::fs::write( + &source_file, + r#" + #[tauri::command] + pub fn second_command() -> Result { + Ok("two".to_string()) + } + "#, + ) + .unwrap(); + + run_generation(&build_system, &config); + let commands_after = read_generated(&output_path, "commands.ts"); + + assert_ne!(commands_before, commands_after); + assert!(commands_after.contains("secondCommand")); + assert!(!commands_after.contains("firstCommand")); + } + + #[test] + fn test_generate_bindings_regenerates_when_structs_change() { + let temp_dir = TempDir::new().unwrap(); + let project_path = temp_dir.path().join("src-tauri"); + let output_path = temp_dir.path().join("generated"); + std::fs::create_dir_all(&project_path).unwrap(); + + let source_file = project_path.join("main.rs"); + std::fs::write( + &source_file, + r#" + use serde::{Deserialize, Serialize}; + + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct Payload { + pub value: String, + } + + #[tauri::command] + pub fn fetch_payload() -> Result { + Ok(Payload { + value: "one".to_string(), + }) + } + "#, + ) + .unwrap(); + + let config = create_build_config(&project_path, &output_path); + let build_system = BuildSystem::new(false, false); + + run_generation(&build_system, &config); + let types_before = read_generated(&output_path, "types.ts"); + + std::fs::write( + &source_file, + r#" + use serde::{Deserialize, Serialize}; + + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct Payload { + pub value: String, + pub count: i32, + } + + #[tauri::command] + pub fn fetch_payload() -> Result { + Ok(Payload { + value: "one".to_string(), + count: 2, + }) + } + "#, + ) + .unwrap(); + + run_generation(&build_system, &config); + let types_after = read_generated(&output_path, "types.ts"); + + assert_ne!(types_before, types_after); + assert!(types_after.contains("count: number")); + } + + #[test] + fn test_generate_bindings_regenerates_when_events_change() { + let temp_dir = TempDir::new().unwrap(); + let project_path = temp_dir.path().join("src-tauri"); + let output_path = temp_dir.path().join("generated"); + std::fs::create_dir_all(&project_path).unwrap(); + + let source_file = project_path.join("main.rs"); + std::fs::write( + &source_file, + r#" + use serde::{Deserialize, Serialize}; + use tauri::Manager; + + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct Payload { + pub value: String, + } + + #[tauri::command] + pub fn emit_event(app: tauri::AppHandle) -> Result<(), String> { + app.emit("first-event", Payload { + value: "one".to_string(), + }).ok(); + Ok(()) + } + "#, + ) + .unwrap(); + + let config = create_build_config(&project_path, &output_path); + + let build_system = BuildSystem::new(false, false); + run_generation(&build_system, &config); + let events_before = read_generated(&output_path, "events.ts"); + + std::fs::write( + &source_file, + r#" + use serde::{Deserialize, Serialize}; + use tauri::Manager; + + #[derive(Debug, Clone, Serialize, Deserialize)] + pub struct Payload { + pub value: String, + } + + #[tauri::command] + pub fn emit_event(app: tauri::AppHandle) -> Result<(), String> { + app.emit("second-event", Payload { + value: "two".to_string(), + }).ok(); + Ok(()) + } + "#, + ) + .unwrap(); + + run_generation(&build_system, &config); + + let events_after = read_generated(&output_path, "events.ts"); + assert_ne!(events_before, events_after); + assert!(events_after.contains("second-event")); + assert!(!events_after.contains("first-event")); + } } diff --git a/src/generators/base/file_writer.rs b/src/generators/base/file_writer.rs index 0fe9f0d..6b9b48f 100644 --- a/src/generators/base/file_writer.rs +++ b/src/generators/base/file_writer.rs @@ -94,22 +94,98 @@ impl FileWriter { mod tests { use super::*; use std::fs; + use std::path::{Path, PathBuf}; + use std::sync::atomic::{AtomicU64, Ordering}; + use std::sync::{Arc, Mutex}; + use std::thread; + use std::time::{Duration, SystemTime, UNIX_EPOCH}; + + static TEMP_DIR_COUNTER: AtomicU64 = AtomicU64::new(0); - // Helper to create a unique temp directory for tests fn temp_dir() -> String { - use std::time::{SystemTime, UNIX_EPOCH}; let timestamp = SystemTime::now() .duration_since(UNIX_EPOCH) .unwrap() .as_nanos(); - let dir = format!("./test_output_{}_{}", std::process::id(), timestamp); - let _ = fs::remove_dir_all(&dir); // Clean up from previous runs - dir + let counter = TEMP_DIR_COUNTER.fetch_add(1, Ordering::Relaxed); + format!( + "./test_output_{}_{}_{}", + std::process::id(), + timestamp, + counter + ) + } + + struct TestDir { + path: PathBuf, + } + + impl TestDir { + fn new() -> Self { + let path = PathBuf::from(temp_dir()); + fs::create_dir_all(&path).unwrap(); + Self { path } + } + + fn path(&self) -> &Path { + &self.path + } + + fn path_str(&self) -> &str { + self.path.to_str().unwrap() + } + } + + impl Drop for TestDir { + fn drop(&mut self) { + for attempt in 0..5 { + match fs::remove_dir_all(&self.path) { + Ok(()) => return, + Err(err) if err.kind() == std::io::ErrorKind::NotFound => return, + Err(_) if attempt < 4 => thread::sleep(Duration::from_millis(10)), + Err(err) => panic!( + "failed to remove test directory {}: {err}", + self.path.display() + ), + } + } + } } - // Helper to cleanup temp directory - fn cleanup_dir(dir: &str) { - let _ = fs::remove_dir_all(dir); + #[test] + fn test_temp_dir_helper_is_unique_under_concurrency() { + let seen = Arc::new(Mutex::new(std::collections::HashSet::new())); + let mut handles = Vec::new(); + + for _ in 0..32 { + let seen = Arc::clone(&seen); + handles.push(thread::spawn(move || { + for _ in 0..1000 { + let dir = temp_dir(); + let mut guard = seen.lock().unwrap(); + assert!( + guard.insert(dir), + "temp_dir helper returned a duplicate path" + ); + } + })); + } + + for handle in handles { + handle.join().unwrap(); + } + } + + #[test] + fn test_test_dir_cleans_up_on_drop() { + let path = { + let dir = TestDir::new(); + let path = dir.path().to_path_buf(); + assert!(path.exists()); + path + }; + + assert!(!path.exists(), "test directory should be removed on drop"); } mod initialization { @@ -117,29 +193,27 @@ mod tests { #[test] fn test_new_creates_directory() { - let dir = temp_dir(); - let writer = FileWriter::new(&dir); + let dir = TestDir::new(); + let writer = FileWriter::new(dir.path_str()); assert!(writer.is_ok()); - assert!(Path::new(&dir).exists()); - cleanup_dir(&dir); + assert!(dir.path().exists()); } #[test] fn test_new_with_nested_path() { - let dir = format!("{}/nested/path", temp_dir()); - let writer = FileWriter::new(&dir); + let root = TestDir::new(); + let dir = root.path().join("nested").join("path"); + let writer = FileWriter::new(dir.to_str().unwrap()); assert!(writer.is_ok()); - assert!(Path::new(&dir).exists()); - cleanup_dir(&temp_dir()); + assert!(dir.exists()); } #[test] fn test_new_with_existing_directory() { - let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); - let writer = FileWriter::new(&dir); + let dir = TestDir::new(); + fs::create_dir_all(dir.path()).unwrap(); + let writer = FileWriter::new(dir.path_str()); assert!(writer.is_ok()); - cleanup_dir(&dir); } } @@ -148,74 +222,67 @@ mod tests { #[test] fn test_write_typescript_file() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); let result = writer.write_typescript_file("test.ts", "export const x = 1;"); assert!(result.is_ok()); - let file_path = format!("{}/test.ts", dir); - assert!(Path::new(&file_path).exists()); + let file_path = dir.path().join("test.ts"); + assert!(file_path.exists()); let content = fs::read_to_string(&file_path).unwrap(); assert_eq!(content, "export const x = 1;"); - - cleanup_dir(&dir); } #[test] fn test_write_types_file() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); let result = writer.write_types_file("export type User = { name: string };"); assert!(result.is_ok()); assert!(writer.file_exists("types.ts")); - cleanup_dir(&dir); } #[test] fn test_write_commands_file() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); let result = writer.write_commands_file("export const commands = {};"); assert!(result.is_ok()); assert!(writer.file_exists("commands.ts")); - cleanup_dir(&dir); } #[test] fn test_write_index_file() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); let result = writer.write_index_file("export * from './types';"); assert!(result.is_ok()); assert!(writer.file_exists("index.ts")); - cleanup_dir(&dir); } #[test] fn test_write_schemas_file() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); let result = writer.write_schemas_file("import { z } from 'zod';"); assert!(result.is_ok()); assert!(writer.file_exists("schemas.ts")); - cleanup_dir(&dir); } #[test] fn test_write_events_file() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); let result = writer.write_events_file("export const events = {};"); assert!(result.is_ok()); assert!(writer.file_exists("events.ts")); - cleanup_dir(&dir); } #[test] fn test_write_multiple_files() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); writer.write_types_file("types").unwrap(); writer.write_commands_file("commands").unwrap(); @@ -231,8 +298,6 @@ mod tests { assert!(writer .get_generated_files() .contains(&"index.ts".to_string())); - - cleanup_dir(&dir); } } @@ -241,40 +306,35 @@ mod tests { #[test] fn test_get_generated_files_empty() { - let dir = temp_dir(); - let writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let writer = FileWriter::new(dir.path_str()).unwrap(); assert!(writer.get_generated_files().is_empty()); - cleanup_dir(&dir); } #[test] fn test_get_generated_files_after_writing() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); writer.write_types_file("content").unwrap(); let files = writer.get_generated_files(); assert_eq!(files.len(), 1); assert_eq!(files[0], "types.ts"); - - cleanup_dir(&dir); } #[test] fn test_get_output_path() { - let dir = temp_dir(); - let writer = FileWriter::new(&dir).unwrap(); - assert_eq!(writer.get_output_path(), dir); - cleanup_dir(&dir); + let dir = TestDir::new(); + let writer = FileWriter::new(dir.path_str()).unwrap(); + assert_eq!(writer.get_output_path(), dir.path_str()); } #[test] fn test_get_file_path() { - let dir = temp_dir(); - let writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let writer = FileWriter::new(dir.path_str()).unwrap(); let path = writer.get_file_path("test.ts"); - assert_eq!(path, format!("{}/test.ts", dir)); - cleanup_dir(&dir); + assert_eq!(path, format!("{}/test.ts", dir.path_str())); } } @@ -283,60 +343,54 @@ mod tests { #[test] fn test_file_exists_false() { - let dir = temp_dir(); - let writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let writer = FileWriter::new(dir.path_str()).unwrap(); assert!(!writer.file_exists("nonexistent.ts")); - cleanup_dir(&dir); } #[test] fn test_file_exists_true() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); writer.write_types_file("content").unwrap(); assert!(writer.file_exists("types.ts")); - cleanup_dir(&dir); } #[test] fn test_delete_file() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); writer.write_types_file("content").unwrap(); assert!(writer.file_exists("types.ts")); let result = writer.delete_file("types.ts"); assert!(result.is_ok()); assert!(!writer.file_exists("types.ts")); - - cleanup_dir(&dir); } #[test] fn test_delete_nonexistent_file() { - let dir = temp_dir(); - let writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let writer = FileWriter::new(dir.path_str()).unwrap(); let result = writer.delete_file("nonexistent.ts"); assert!(result.is_ok()); // Should not error - cleanup_dir(&dir); } #[test] fn test_ensure_directory_exists() { - let dir = format!("{}/ensure_test", temp_dir()); - let result = FileWriter::ensure_directory_exists(&dir); + let root = TestDir::new(); + let dir = root.path().join("ensure_test"); + let result = FileWriter::ensure_directory_exists(dir.to_str().unwrap()); assert!(result.is_ok()); - assert!(Path::new(&dir).exists()); - cleanup_dir(&temp_dir()); + assert!(dir.exists()); } #[test] fn test_ensure_directory_exists_already_exists() { - let dir = temp_dir(); - fs::create_dir_all(&dir).unwrap(); - let result = FileWriter::ensure_directory_exists(&dir); + let dir = TestDir::new(); + fs::create_dir_all(dir.path()).unwrap(); + let result = FileWriter::ensure_directory_exists(dir.path_str()); assert!(result.is_ok()); - cleanup_dir(&dir); } } @@ -345,21 +399,19 @@ mod tests { #[test] fn test_write_empty_content() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); let result = writer.write_typescript_file("empty.ts", ""); assert!(result.is_ok()); let content = fs::read_to_string(writer.get_file_path("empty.ts")).unwrap(); assert_eq!(content, ""); - - cleanup_dir(&dir); } #[test] fn test_overwrite_existing_file() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); writer.write_types_file("first").unwrap(); writer.write_types_file("second").unwrap(); @@ -369,14 +421,12 @@ mod tests { // File should only appear once in generated_files list assert_eq!(writer.get_generated_files().len(), 2); - - cleanup_dir(&dir); } #[test] fn test_write_large_content() { - let dir = temp_dir(); - let mut writer = FileWriter::new(&dir).unwrap(); + let dir = TestDir::new(); + let mut writer = FileWriter::new(dir.path_str()).unwrap(); let large_content = "x".repeat(100_000); let result = writer.write_typescript_file("large.ts", &large_content); @@ -384,8 +434,6 @@ mod tests { let content = fs::read_to_string(writer.get_file_path("large.ts")).unwrap(); assert_eq!(content.len(), 100_000); - - cleanup_dir(&dir); } } } diff --git a/src/generators/mod.rs b/src/generators/mod.rs index 995a158..80df821 100644 --- a/src/generators/mod.rs +++ b/src/generators/mod.rs @@ -175,9 +175,16 @@ impl TypeCollector { config: &GenerateConfig, ) -> Vec { let type_resolver = analyzer.get_type_resolver(); - - commands - .iter() + let mut sorted_commands: Vec<_> = commands.iter().collect(); + sorted_commands.sort_by(|a, b| { + a.name + .cmp(&b.name) + .then_with(|| a.file_path.cmp(&b.file_path)) + .then_with(|| a.line_number.cmp(&b.line_number)) + }); + + sorted_commands + .into_iter() .map(|cmd| { CommandContext::new(config).from_command_info(cmd, visitor, &|rust_type: &str| { type_resolver.borrow_mut().parse_type_structure(rust_type) @@ -195,9 +202,17 @@ impl TypeCollector { config: &GenerateConfig, ) -> Vec { let type_resolver = analyzer.get_type_resolver(); - - events - .iter() + let mut sorted_events: Vec<_> = events.iter().collect(); + sorted_events.sort_by(|a, b| { + a.event_name + .cmp(&b.event_name) + .then_with(|| a.file_path.cmp(&b.file_path)) + .then_with(|| a.line_number.cmp(&b.line_number)) + .then_with(|| a.payload_type.cmp(&b.payload_type)) + }); + + sorted_events + .into_iter() .map(|event| { EventContext::new(config).from_event_info(event, visitor, &|rust_type: &str| { type_resolver.borrow_mut().parse_type_structure(rust_type) @@ -213,8 +228,15 @@ impl TypeCollector { visitor: &V, config: &GenerateConfig, ) -> Vec { - used_structs - .iter() + let mut sorted_structs: Vec<_> = used_structs.iter().collect(); + sorted_structs.sort_by(|(name_a, struct_a), (name_b, struct_b)| { + name_a + .cmp(name_b) + .then_with(|| struct_a.file_path.cmp(&struct_b.file_path)) + }); + + sorted_structs + .into_iter() .map(|(name, struct_info)| { StructContext::new(config).from_struct_info(name, struct_info, visitor) }) diff --git a/src/generators/ts/generator.rs b/src/generators/ts/generator.rs index 27496fe..64992a1 100644 --- a/src/generators/ts/generator.rs +++ b/src/generators/ts/generator.rs @@ -37,21 +37,26 @@ impl TypeScriptBindingsGenerator { let visitor = TypeScriptVisitor::with_config(config); // Convert structs to context wrappers - let struct_context = self + let struct_contexts = self .collector .create_struct_contexts(used_structs, &visitor, config); // Convert commands to context wrappers - let command_context = self + let command_contexts = self .collector .create_command_contexts(commands, &visitor, analyzer, config); + let param_commands = command_contexts + .iter() + .filter(|command| !command.parameters.is_empty() || !command.channels.is_empty()) + .cloned() + .collect::>(); // Render main types.ts template let mut context = Context::new(); context.insert("header", &self.generate_file_header()); context.insert("has_channels", &has_channels); - context.insert("structs", &struct_context); - context.insert("commands", &command_context); + context.insert("structs", &struct_contexts); + context.insert("param_commands", ¶m_commands); self.render("typescript/types.ts.tera", &context) .unwrap_or_else(|e| { @@ -89,9 +94,14 @@ impl TypeScriptBindingsGenerator { /// Generate index.ts file fn generate_index_file(&self, generated_files: &[String]) -> String { + let modules = generated_files + .iter() + .filter(|file| file.as_str() != "index.ts") + .cloned() + .collect::>(); let mut context = Context::new(); context.insert("header", &self.generate_file_header()); - context.insert("files", generated_files); + context.insert("modules", &modules); self.render("typescript/index.ts.tera", &context) .unwrap_or_else(|e| { @@ -209,6 +219,10 @@ impl Default for TypeScriptBindingsGenerator { #[cfg(test)] mod tests { use super::*; + use crate::analysis::CommandAnalyzer; + use crate::models::{EventInfo, FieldInfo, StructInfo, TypeStructure}; + use crate::GenerateConfig; + use std::collections::HashMap; mod initialization { use super::*; @@ -290,6 +304,7 @@ mod tests { mod helper_methods { use super::*; + use crate::models::{ParameterInfo, TypeStructure}; #[test] fn test_generate_index_file_with_empty_files() { @@ -306,5 +321,331 @@ mod tests { let result = gen.generate_index_file(&files); assert!(!result.is_empty()); } + + #[test] + fn test_generate_index_file_skips_index_without_blank_lines() { + let gen = TypeScriptBindingsGenerator::new(); + let files = vec![ + "types.ts".to_string(), + "index.ts".to_string(), + "commands.ts".to_string(), + ]; + let result = result_without_timestamp(&gen.generate_index_file(&files)); + + assert!(result.contains(" */\n\nexport * from './types';")); + assert!(result.contains("export * from './types';\nexport * from './commands';")); + assert!(!result.contains("export * from './types';\n\nexport * from './commands';")); + } + + #[test] + fn test_generate_command_bindings_avoid_blank_lines_between_functions() { + let gen = TypeScriptBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = GenerateConfig::default(); + let commands = vec![ + CommandInfo::new_for_test( + "alpha_command", + "a.rs", + 1, + vec![ParameterInfo { + name: "value".to_string(), + rust_type: "String".to_string(), + is_optional: false, + type_structure: TypeStructure::Primitive("string".to_string()), + serde_rename: None, + }], + "Alpha", + false, + vec![], + ), + CommandInfo::new_for_test("beta_command", "b.rs", 1, vec![], "Beta", false, vec![]), + ]; + let rendered = result_without_timestamp( + &gen.generate_command_bindings(&commands, &analyzer, &config), + ); + + assert!( + rendered.contains( + "import * as types from './types';\n\nexport async function alphaCommand" + ), + "unexpected render:\n{rendered}" + ); + assert!( + rendered.contains( + "return invoke('alpha_command', params);\n}\n\nexport async function betaCommand" + ), + "unexpected render:\n{rendered}" + ); + assert!( + !rendered.contains( + "return invoke('alpha_command', params);\n}\n\n\nexport async function betaCommand" + ), + "unexpected render:\n{rendered}" + ); + } + + #[test] + fn test_generate_events_file_has_single_blank_line_between_listeners() { + let gen = TypeScriptBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = GenerateConfig::default(); + let events = vec![ + EventInfo { + event_name: "alpha-ready".to_string(), + payload_type: "String".to_string(), + payload_type_structure: TypeStructure::Primitive("string".to_string()), + file_path: "a.rs".to_string(), + line_number: 1, + }, + EventInfo { + event_name: "beta-ready".to_string(), + payload_type: "String".to_string(), + payload_type_structure: TypeStructure::Primitive("string".to_string()), + file_path: "b.rs".to_string(), + line_number: 2, + }, + ]; + let rendered = + result_without_timestamp(&gen.generate_events_file(&events, &analyzer, &config)); + + assert!( + rendered.contains(" });\n}\n\n/**\n * Listen for 'beta-ready' events"), + "unexpected render:\n{rendered}" + ); + assert!( + !rendered.contains(" });\n}\n\n\n/**\n * Listen for 'beta-ready' events"), + "unexpected render:\n{rendered}" + ); + } + + #[test] + fn test_generate_types_file_keeps_blank_line_after_header() { + let gen = TypeScriptBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = GenerateConfig::default(); + let rendered = result_without_timestamp(&gen.generate_types_file_content( + &[], + &HashMap::new(), + &analyzer, + &config, + )); + + assert!( + rendered.contains(" */\n\n"), + "unexpected render:\n{rendered}" + ); + } + + #[test] + fn test_generate_types_file_compacts_param_interfaces() { + let gen = TypeScriptBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = GenerateConfig::default(); + let commands = vec![CommandInfo::new_for_test( + "abort_loopback_fetch", + "test.rs", + 1, + vec![ParameterInfo { + name: "request_id".to_string(), + rust_type: "String".to_string(), + is_optional: false, + type_structure: TypeStructure::Primitive("string".to_string()), + serde_rename: None, + }], + "void", + false, + vec![], + )]; + let rendered = result_without_timestamp(&gen.generate_types_file_content( + &commands, + &HashMap::new(), + &analyzer, + &config, + )); + + assert!( + rendered.contains( + "export interface AbortLoopbackFetchParams {\n requestId: string;\n [key: string]: unknown;\n}" + ), + "unexpected render:\n{rendered}" + ); + } + + fn result_without_timestamp(content: &str) -> String { + content + .lines() + .map(|line| { + if line.starts_with(" * Generated at:") { + " * Generated at: ".to_string() + } else { + line.to_string() + } + }) + .collect::>() + .join("\n") + } + } + + mod determinism { + use super::*; + + fn create_test_config() -> GenerateConfig { + GenerateConfig { + project_path: ".".to_string(), + output_path: "./generated".to_string(), + validation_library: "none".to_string(), + verbose: Some(false), + visualize_deps: Some(false), + include_private: Some(false), + type_mappings: None, + exclude_patterns: None, + include_patterns: None, + default_parameter_case: "camelCase".to_string(), + default_field_case: "snake_case".to_string(), + force: Some(false), + } + } + + fn create_test_struct(name: &str, rust_type: &str, ts_type: &str) -> StructInfo { + StructInfo { + name: name.to_string(), + fields: vec![FieldInfo { + name: "value".to_string(), + rust_type: rust_type.to_string(), + is_optional: false, + is_public: true, + type_structure: TypeStructure::Primitive(ts_type.to_string()), + serde_rename: None, + validator_attributes: None, + }], + file_path: format!("{name}.rs"), + is_enum: false, + serde_rename_all: None, + serde_tag: None, + enum_variants: None, + } + } + + fn create_test_event(event_name: &str, file_path: &str, line_number: usize) -> EventInfo { + EventInfo { + event_name: event_name.to_string(), + payload_type: "String".to_string(), + payload_type_structure: TypeStructure::Primitive("string".to_string()), + file_path: file_path.to_string(), + line_number, + } + } + + fn normalize_generated_output(content: &str) -> String { + content + .lines() + .map(|line| { + if line.starts_with(" * Generated at:") { + " * Generated at: ".to_string() + } else { + line.to_string() + } + }) + .collect::>() + .join("\n") + } + + #[test] + fn deterministic_output_for_reversed_inputs() { + let generator = TypeScriptBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = create_test_config(); + + let commands1 = vec![ + CommandInfo::new_for_test( + "alpha_command", + "b.rs", + 1, + vec![], + "Alpha", + false, + vec![], + ), + CommandInfo::new_for_test("beta_command", "a.rs", 1, vec![], "Beta", false, vec![]), + ]; + let commands2 = vec![ + CommandInfo::new_for_test("beta_command", "a.rs", 1, vec![], "Beta", false, vec![]), + CommandInfo::new_for_test( + "alpha_command", + "b.rs", + 1, + vec![], + "Alpha", + false, + vec![], + ), + ]; + + let mut structs1 = HashMap::new(); + structs1.insert( + "Alpha".to_string(), + create_test_struct("Alpha", "String", "string"), + ); + structs1.insert( + "Beta".to_string(), + create_test_struct("Beta", "i32", "number"), + ); + + let mut structs2 = HashMap::new(); + structs2.insert( + "Beta".to_string(), + create_test_struct("Beta", "i32", "number"), + ); + structs2.insert( + "Alpha".to_string(), + create_test_struct("Alpha", "String", "string"), + ); + + let events1 = vec![ + create_test_event("beta-ready", "b.rs", 20), + create_test_event("alpha-ready", "a.rs", 10), + ]; + let events2 = vec![ + create_test_event("alpha-ready", "a.rs", 10), + create_test_event("beta-ready", "b.rs", 20), + ]; + + let types1 = + generator.generate_types_file_content(&commands1, &structs1, &analyzer, &config); + let types2 = + generator.generate_types_file_content(&commands2, &structs2, &analyzer, &config); + let commands_file1 = + generator.generate_command_bindings(&commands1, &analyzer, &config); + let commands_file2 = + generator.generate_command_bindings(&commands2, &analyzer, &config); + let events_file1 = generator.generate_events_file(&events1, &analyzer, &config); + let events_file2 = generator.generate_events_file(&events2, &analyzer, &config); + + assert_eq!( + normalize_generated_output(&types1), + normalize_generated_output(&types2) + ); + assert_eq!( + normalize_generated_output(&commands_file1), + normalize_generated_output(&commands_file2) + ); + assert_eq!( + normalize_generated_output(&events_file1), + normalize_generated_output(&events_file2) + ); + + for (file_name, content) in [ + ("types.ts", &types1), + ("commands.ts", &commands_file1), + ("events.ts", &events_file1), + ] { + let normalized = normalize_generated_output(content); + assert!( + !normalized.contains("\n\n\n"), + "unexpected blank lines in {file_name}:\n{normalized}" + ); + assert!(content.ends_with('\n')); + } + } } } diff --git a/src/generators/ts/templates/commands.ts.tera b/src/generators/ts/templates/commands.ts.tera index ee02aef..718897c 100644 --- a/src/generators/ts/templates/commands.ts.tera +++ b/src/generators/ts/templates/commands.ts.tera @@ -1,11 +1,11 @@ -{{ header }} -{% if has_channels -%} +{{ header }}{% if has_channels %} import { invoke, Channel } from '@tauri-apps/api/core'; -{% else -%} +{% else %} import { invoke } from '@tauri-apps/api/core'; {% endif -%} -import * as types from './types'; +import * as types from './types';{% if commands | length > 0 %} -{% for command in commands -%} -{% include "typescript/partials/command_function.ts.tera" %} -{% endfor -%} +{% for command in commands %}{% include "typescript/partials/command_function.ts.tera" %}{% if not loop.last %} + +{% endif %}{% endfor %} +{% endif -%} diff --git a/src/generators/ts/templates/events.ts.tera b/src/generators/ts/templates/events.ts.tera index 83aa059..bccd89b 100644 --- a/src/generators/ts/templates/events.ts.tera +++ b/src/generators/ts/templates/events.ts.tera @@ -5,7 +5,11 @@ */ import { listen, type UnlistenFn, type Event } from '@tauri-apps/api/event'; import * as types from './types'; +{% if events | length > 0 %} -{% for event in events -%} -{% include "typescript/partials/event_listener.ts.tera" %} -{% endfor -%} +{%- for event in events %} +{% include "typescript/partials/event_listener.ts.tera" -%} +{% if not loop.last %} +{%- endif %} +{%- endfor %} +{% endif %} diff --git a/src/generators/ts/templates/index.ts.tera b/src/generators/ts/templates/index.ts.tera index 6de479d..ad8614f 100644 --- a/src/generators/ts/templates/index.ts.tera +++ b/src/generators/ts/templates/index.ts.tera @@ -1,6 +1,4 @@ -{{ header}} -{% for file in files -%} -{%- if file != "index.ts" -%} -export * from './{{ file | replace(from=".ts", to="") }}'; -{% endif -%} -{%- endfor -%} +{{ header }} +{% for module in modules -%} +export * from './{{ module | replace(from=".ts", to="") }}'; +{% endfor -%} diff --git a/src/generators/ts/templates/partials/command_function.ts.tera b/src/generators/ts/templates/partials/command_function.ts.tera index b2134a0..0e96c4d 100644 --- a/src/generators/ts/templates/partials/command_function.ts.tera +++ b/src/generators/ts/templates/partials/command_function.ts.tera @@ -1,15 +1,11 @@ {%- set has_params = command.parameters | length > 0 -%} {%- set has_channels = command.channels | length > 0 -%} - -{# Determine parameter signature #} -{%- if has_params or has_channels -%} +{#- Params and channels both flow through the shared generated Params interface. -#}{%- if has_params or has_channels -%} export async function {{ command.tsFunctionName }}(params: types.{{ command.tsTypeName }}Params): Promise<{{ command.returnTypeTs | add_types_prefix }}> { return invoke('{{ command.name }}', params); } -{%- else -%} -{# No parameters at all #} +{#- Commands with no inputs can invoke directly without a Params wrapper. -#}{%- else -%} export async function {{ command.tsFunctionName }}(): Promise<{{ command.returnTypeTs | add_types_prefix }}> { return invoke('{{ command.name }}'); } -{%- endif %} - +{%- endif -%} diff --git a/src/generators/ts/templates/partials/enum.tera b/src/generators/ts/templates/partials/enum.tera index b945166..d01f2ae 100644 --- a/src/generators/ts/templates/partials/enum.tera +++ b/src/generators/ts/templates/partials/enum.tera @@ -1,11 +1,11 @@ {% if struct.isSimpleEnum -%} {# Simple enum: string literal union #} -export type {{ name }} = {% for field in fields -%} -"{{ field.serializedName }}"{% if not loop.last %} | {% endif %} +export type {{ struct.name }} = {% for variant in struct.enumVariants -%} +"{{ variant.serializedName }}"{% if not loop.last %} | {% endif %} {%- endfor %}; {% else -%} {# Complex enum: discriminated union #} -export type {{ name }} = +export type {{ struct.name }} = {% for variant in struct.enumVariants -%} | {% if variant.kind == "unit" -%} { {{ struct.discriminatorTag }}: "{{ variant.serializedName }}" } diff --git a/src/generators/ts/templates/partials/event_listener.ts.tera b/src/generators/ts/templates/partials/event_listener.ts.tera index 712bcfc..0ace79e 100644 --- a/src/generators/ts/templates/partials/event_listener.ts.tera +++ b/src/generators/ts/templates/partials/event_listener.ts.tera @@ -10,4 +10,3 @@ export async function {{ event.tsFunctionName }}( handler(event.payload); }); } - diff --git a/src/generators/ts/templates/partials/interface.tera b/src/generators/ts/templates/partials/interface.tera index cccd8f3..6d0e982 100644 --- a/src/generators/ts/templates/partials/interface.tera +++ b/src/generators/ts/templates/partials/interface.tera @@ -1,5 +1,4 @@ -export interface {{ name }} { - {%- for field in fields %} - {{ field.serializedName }}{% if field.isOptional %}?{% endif %}: {{ field.typescriptType }}; - {%- endfor %} +export interface {{ struct.name }} { +{% for field in struct.fields %} {{ field.serializedName }}{% if field.isOptional %}?{% endif %}: {{ field.typescriptType }}; +{% endfor -%} } diff --git a/src/generators/ts/templates/partials/param_interface.ts.tera b/src/generators/ts/templates/partials/param_interface.ts.tera index ff28520..1fe263c 100644 --- a/src/generators/ts/templates/partials/param_interface.ts.tera +++ b/src/generators/ts/templates/partials/param_interface.ts.tera @@ -1,12 +1,5 @@ -{%- if command.parameters | length > 0 or command.channels | length > 0 -%} export interface {{ command.tsTypeName }}Params { - {%- for param in command.parameters %} - {{ param.serializedName }}{% if param.isOptional %}?{% endif %}: {{ param.typescriptType }}; - {%- endfor %} - {%- for channel in command.channels %} - {{ channel.serializedParameterName }}: Channel<{{ channel.typescriptMessageType }}>; - {%- endfor %} - [key: string]: unknown; +{% for param in command.parameters %} {{ param.serializedName }}{% if param.isOptional %}?{% endif %}: {{ param.typescriptType }}; +{% endfor %}{% for channel in command.channels %} {{ channel.serializedParameterName }}: Channel<{{ channel.typescriptMessageType }}>; +{% endfor %} [key: string]: unknown; } - -{% endif -%} diff --git a/src/generators/ts/templates/types.ts.tera b/src/generators/ts/templates/types.ts.tera index d4192d5..f83f92f 100644 --- a/src/generators/ts/templates/types.ts.tera +++ b/src/generators/ts/templates/types.ts.tera @@ -1,20 +1,7 @@ {{ header }} -{% if has_channels -%} +{% if has_channels %} import type { Channel } from '@tauri-apps/api/core'; -{% endif %} - -{% for struct in structs -%} -{%- set name = struct.name -%} -{%- set fields = struct.fields -%} -{%- set isEnum = struct.isEnum -%} -{% if isEnum -%} -{% include "typescript/partials/enum.tera" %} -{% else -%} -{% include "typescript/partials/interface.tera" %} -{% endif -%} -{% endfor -%} - -{% for command in commands -%} -{% include "typescript/partials/param_interface.ts.tera" %} -{% endfor -%} +{% endif %}{% for struct in structs %}{% if struct.isEnum %}{% include "typescript/partials/enum.tera" %}{% else %}{% include "typescript/partials/interface.tera" %}{% endif %}{% if not loop.last or param_commands | length > 0 %} +{% endif %}{% endfor %}{% for command in param_commands %}{% include "typescript/partials/param_interface.ts.tera" %}{% if not loop.last %} +{% endif %}{% endfor %} diff --git a/src/generators/zod/generator.rs b/src/generators/zod/generator.rs index c14d141..5beeabc 100644 --- a/src/generators/zod/generator.rs +++ b/src/generators/zod/generator.rs @@ -125,13 +125,18 @@ impl ZodBindingsGenerator { let type_names: HashSet = used_structs.keys().cloned().collect(); let sorted_types = analyzer.topological_sort_types(&type_names); - // Generate struct schemas - let mut struct_schemas = String::new(); - for name in &sorted_types { - if let Some(struct_info) = used_structs.get(name) { - struct_schemas.push_str(&self.generate_struct_schema(name, struct_info, config)); - } - } + // Render struct schemas up front so the template only handles section layout. + let sections = sorted_types + .iter() + .filter_map(|name| { + used_structs.get(name).map(|struct_info| { + self.generate_struct_schema(name, struct_info, config) + .trim() + .to_string() + }) + }) + .filter(|section| !section.is_empty()) + .collect::>(); // Convert commands to context wrappers let visitor = ZodVisitor::with_config(config); @@ -148,28 +153,17 @@ impl ZodBindingsGenerator { } } - // Generate parameter schemas using template - let param_schemas = { - let mut context = Context::new(); - context.insert("commands", &command_contexts); - self.render("zod/partials/param_schemas.ts.tera", &context) - .unwrap_or_else(|e| { - eprintln!("Template rendering failed for param schemas: {}", e); - String::new() - }) - }; - - // Generate type aliases using template - let type_aliases = { - let mut context = Context::new(); - context.insert("commands", &command_contexts); - context.insert("struct_names", &sorted_types); - self.render("zod/partials/type_aliases.ts.tera", &context) - .unwrap_or_else(|e| { - eprintln!("Template rendering failed for type aliases: {}", e); - String::new() - }) - }; + // Split command contexts by the template fragments they actually need. + let commands_with_params = command_contexts + .iter() + .filter(|command| !command.parameters.is_empty()) + .cloned() + .collect::>(); + let commands_with_type_aliases = command_contexts + .iter() + .filter(|command| !command.parameters.is_empty() || !command.channels.is_empty()) + .cloned() + .collect::>(); // Render main types.ts template let mut context = Context::new(); @@ -178,9 +172,9 @@ impl ZodBindingsGenerator { "has_channels", &commands.iter().any(|cmd| !cmd.channels.is_empty()), ); - context.insert("struct_schemas", &struct_schemas); - context.insert("param_schemas", ¶m_schemas); - context.insert("type_aliases", &type_aliases); + context.insert("struct_sections", §ions); + context.insert("commands_with_params", &commands_with_params); + context.insert("commands_with_type_aliases", &commands_with_type_aliases); self.render("zod/types.ts.tera", &context) .unwrap_or_else(|e| { @@ -222,9 +216,14 @@ impl ZodBindingsGenerator { /// Generate index.ts file fn generate_index_file(&self, generated_files: &[String]) -> String { + let modules = generated_files + .iter() + .filter(|file| file.as_str() != "index.ts") + .cloned() + .collect::>(); let mut context = Context::new(); context.insert("header", &self.generate_file_header()); - context.insert("files", generated_files); + context.insert("modules", &modules); self.render("zod/index.ts.tera", &context) .unwrap_or_else(|e| { @@ -341,7 +340,9 @@ impl Default for ZodBindingsGenerator { #[cfg(test)] mod tests { use super::*; - use crate::models::{FieldInfo, TypeStructure}; + use crate::analysis::CommandAnalyzer; + use crate::models::{EventInfo, FieldInfo, TypeStructure}; + use std::collections::HashMap; mod initialization { use super::*; @@ -551,6 +552,7 @@ mod tests { mod helper_methods { use super::*; + use crate::models::{ChannelInfo, ParameterInfo, TypeStructure}; #[test] fn test_generate_index_file_with_empty_files() { @@ -567,5 +569,383 @@ mod tests { let result = gen.generate_index_file(&files); assert!(!result.is_empty()); } + + #[test] + fn test_generate_index_file_skips_index_without_blank_lines() { + let gen = ZodBindingsGenerator::new(); + let files = vec![ + "types.ts".to_string(), + "index.ts".to_string(), + "commands.ts".to_string(), + ]; + let result = result_without_timestamp(&gen.generate_index_file(&files)); + + assert!(result.contains(" */\n\nexport * from './types';")); + assert!(result.contains("export * from './types';\nexport * from './commands';")); + assert!(!result.contains("export * from './types';\n\nexport * from './commands';")); + } + + #[test] + fn test_generate_command_bindings_avoid_blank_lines_between_functions() { + let gen = ZodBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = GenerateConfig { + project_path: ".".to_string(), + output_path: "./output".to_string(), + validation_library: "zod".to_string(), + visualize_deps: Some(false), + verbose: Some(false), + include_private: Some(false), + type_mappings: None, + exclude_patterns: None, + include_patterns: None, + default_parameter_case: "camelCase".to_string(), + default_field_case: "snake_case".to_string(), + force: Some(false), + }; + let commands = vec![ + CommandInfo::new_for_test( + "alpha_command", + "a.rs", + 1, + vec![ParameterInfo { + name: "value".to_string(), + rust_type: "String".to_string(), + is_optional: false, + type_structure: TypeStructure::Primitive("string".to_string()), + serde_rename: None, + }], + "Alpha", + false, + vec![], + ), + CommandInfo::new_for_test("beta_command", "b.rs", 1, vec![], "Beta", false, vec![]), + ]; + let rendered = result_without_timestamp( + &gen.generate_command_bindings(&commands, &analyzer, &config), + ); + + assert!( + rendered.contains("}\n\nexport async function alphaCommand"), + "unexpected render:\n{rendered}" + ); + assert!( + rendered.contains(" }\n}\n\nexport async function betaCommand"), + "unexpected render:\n{rendered}" + ); + assert!( + !rendered.contains(" }\n}\n\n\nexport async function betaCommand"), + "unexpected render:\n{rendered}" + ); + } + + #[test] + fn test_generate_events_file_has_single_blank_line_between_listeners() { + let gen = ZodBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = GenerateConfig { + project_path: ".".to_string(), + output_path: "./output".to_string(), + validation_library: "zod".to_string(), + visualize_deps: Some(false), + verbose: Some(false), + include_private: Some(false), + type_mappings: None, + exclude_patterns: None, + include_patterns: None, + default_parameter_case: "camelCase".to_string(), + default_field_case: "snake_case".to_string(), + force: Some(false), + }; + let events = vec![ + EventInfo { + event_name: "alpha-ready".to_string(), + payload_type: "String".to_string(), + payload_type_structure: TypeStructure::Primitive("string".to_string()), + file_path: "a.rs".to_string(), + line_number: 1, + }, + EventInfo { + event_name: "beta-ready".to_string(), + payload_type: "String".to_string(), + payload_type_structure: TypeStructure::Primitive("string".to_string()), + file_path: "b.rs".to_string(), + line_number: 2, + }, + ]; + let rendered = + result_without_timestamp(&gen.generate_events_file(&events, &analyzer, &config)); + + assert!( + rendered.contains(" });\n}\n\n/**\n * Listen for 'beta-ready' events"), + "unexpected render:\n{rendered}" + ); + assert!( + !rendered.contains(" });\n}\n\n\n/**\n * Listen for 'beta-ready' events"), + "unexpected render:\n{rendered}" + ); + } + + #[test] + fn test_generate_types_file_keeps_blank_line_after_header() { + let gen = ZodBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = GenerateConfig { + project_path: ".".to_string(), + output_path: "./output".to_string(), + validation_library: "zod".to_string(), + visualize_deps: Some(false), + verbose: Some(false), + include_private: Some(false), + type_mappings: None, + exclude_patterns: None, + include_patterns: None, + default_parameter_case: "camelCase".to_string(), + default_field_case: "snake_case".to_string(), + force: Some(false), + }; + let rendered = result_without_timestamp(&gen.generate_types_file_content( + &[], + &HashMap::new(), + &analyzer, + &config, + )); + + assert!( + rendered.contains(" */\n\nimport { z } from 'zod';"), + "unexpected render:\n{rendered}" + ); + } + + #[test] + fn test_generate_types_file_compacts_channel_interfaces() { + let gen = ZodBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = GenerateConfig { + project_path: ".".to_string(), + output_path: "./output".to_string(), + validation_library: "zod".to_string(), + visualize_deps: Some(false), + verbose: Some(false), + include_private: Some(false), + type_mappings: None, + exclude_patterns: None, + include_patterns: None, + default_parameter_case: "camelCase".to_string(), + default_field_case: "snake_case".to_string(), + force: Some(false), + }; + let commands = vec![CommandInfo::new_for_test( + "abort_loopback_fetch", + "test.rs", + 1, + vec![ParameterInfo { + name: "request_id".to_string(), + rust_type: "String".to_string(), + is_optional: false, + type_structure: TypeStructure::Primitive("string".to_string()), + serde_rename: None, + }], + "void", + false, + vec![ChannelInfo::new_for_test( + "updates", + "String", + "abort_loopback_fetch", + "test.rs", + 1, + )], + )]; + let rendered = result_without_timestamp(&gen.generate_types_file_content( + &commands, + &HashMap::new(), + &analyzer, + &config, + )); + + assert!( + rendered.contains( + "export interface AbortLoopbackFetchParams extends z.infer {\n updates: Channel;\n}" + ), + "unexpected render:\n{rendered}" + ); + } + + fn result_without_timestamp(content: &str) -> String { + content + .lines() + .map(|line| { + if line.starts_with(" * Generated at:") { + " * Generated at: ".to_string() + } else { + line.to_string() + } + }) + .collect::>() + .join("\n") + } + } + + mod whitespace { + use super::*; + + fn create_test_config() -> GenerateConfig { + GenerateConfig { + project_path: ".".to_string(), + output_path: "./output".to_string(), + validation_library: "zod".to_string(), + visualize_deps: Some(false), + verbose: Some(false), + include_private: Some(false), + type_mappings: None, + exclude_patterns: None, + include_patterns: None, + default_parameter_case: "camelCase".to_string(), + default_field_case: "snake_case".to_string(), + force: Some(false), + } + } + + fn create_test_struct(name: &str, rust_type: &str, ts_type: &str) -> StructInfo { + StructInfo { + name: name.to_string(), + fields: vec![FieldInfo { + name: "value".to_string(), + rust_type: rust_type.to_string(), + is_optional: false, + is_public: true, + type_structure: TypeStructure::Primitive(ts_type.to_string()), + serde_rename: None, + validator_attributes: None, + }], + file_path: format!("{name}.rs"), + is_enum: false, + serde_rename_all: None, + serde_tag: None, + enum_variants: None, + } + } + + fn create_test_event(event_name: &str, file_path: &str, line_number: usize) -> EventInfo { + EventInfo { + event_name: event_name.to_string(), + payload_type: "String".to_string(), + payload_type_structure: TypeStructure::Primitive("string".to_string()), + file_path: file_path.to_string(), + line_number, + } + } + + fn normalize_generated_output(content: &str) -> String { + content + .lines() + .map(|line| { + if line.starts_with(" * Generated at:") { + " * Generated at: ".to_string() + } else { + line.to_string() + } + }) + .collect::>() + .join("\n") + } + + #[test] + fn deterministic_output_for_reversed_inputs() { + let generator = ZodBindingsGenerator::new(); + let analyzer = CommandAnalyzer::new(); + let config = create_test_config(); + + let commands1 = vec![ + CommandInfo::new_for_test( + "alpha_command", + "b.rs", + 1, + vec![], + "Alpha", + false, + vec![], + ), + CommandInfo::new_for_test("beta_command", "a.rs", 1, vec![], "Beta", false, vec![]), + ]; + let commands2 = vec![ + CommandInfo::new_for_test("beta_command", "a.rs", 1, vec![], "Beta", false, vec![]), + CommandInfo::new_for_test( + "alpha_command", + "b.rs", + 1, + vec![], + "Alpha", + false, + vec![], + ), + ]; + + let mut structs1 = HashMap::new(); + structs1.insert( + "Alpha".to_string(), + create_test_struct("Alpha", "String", "string"), + ); + structs1.insert( + "Beta".to_string(), + create_test_struct("Beta", "i32", "number"), + ); + + let mut structs2 = HashMap::new(); + structs2.insert( + "Beta".to_string(), + create_test_struct("Beta", "i32", "number"), + ); + structs2.insert( + "Alpha".to_string(), + create_test_struct("Alpha", "String", "string"), + ); + + let events1 = vec![ + create_test_event("beta-ready", "b.rs", 20), + create_test_event("alpha-ready", "a.rs", 10), + ]; + let events2 = vec![ + create_test_event("alpha-ready", "a.rs", 10), + create_test_event("beta-ready", "b.rs", 20), + ]; + + let types1 = + generator.generate_types_file_content(&commands1, &structs1, &analyzer, &config); + let types2 = + generator.generate_types_file_content(&commands2, &structs2, &analyzer, &config); + let commands_file1 = + generator.generate_command_bindings(&commands1, &analyzer, &config); + let commands_file2 = + generator.generate_command_bindings(&commands2, &analyzer, &config); + let events_file1 = generator.generate_events_file(&events1, &analyzer, &config); + let events_file2 = generator.generate_events_file(&events2, &analyzer, &config); + + assert_eq!( + normalize_generated_output(&types1), + normalize_generated_output(&types2) + ); + assert_eq!( + normalize_generated_output(&commands_file1), + normalize_generated_output(&commands_file2) + ); + assert_eq!( + normalize_generated_output(&events_file1), + normalize_generated_output(&events_file2) + ); + + for (file_name, content) in [ + ("types.ts", &types1), + ("commands.ts", &commands_file1), + ("events.ts", &events_file1), + ] { + let normalized = normalize_generated_output(content); + assert!( + !normalized.contains("\n\n\n"), + "unexpected blank lines in {file_name}:\n{normalized}" + ); + assert!(content.ends_with('\n')); + } + } } } diff --git a/src/generators/zod/templates/commands.ts.tera b/src/generators/zod/templates/commands.ts.tera index 94d04ce..aba6386 100644 --- a/src/generators/zod/templates/commands.ts.tera +++ b/src/generators/zod/templates/commands.ts.tera @@ -1,7 +1,6 @@ -{{ header }} -{% if has_channels -%} +{{ header }}{% if has_channels %} import { invoke, Channel } from '@tauri-apps/api/core'; -{% else -%} +{% else %} import { invoke } from '@tauri-apps/api/core'; {% endif -%} import { ZodError } from 'zod'; @@ -19,8 +18,9 @@ export interface CommandHooks { /** Called after command settles (success or error) */ onSettled?: () => void; -} +}{% if commands | length > 0 %} -{% for command in commands -%} -{% include "zod/partials/command_function.ts.tera" %} -{% endfor -%} +{% for command in commands %}{% include "zod/partials/command_function.ts.tera" %}{% if not loop.last %} + +{% endif %}{% endfor %} +{% endif -%} diff --git a/src/generators/zod/templates/events.ts.tera b/src/generators/zod/templates/events.ts.tera index e35f93e..0d5b8a7 100644 --- a/src/generators/zod/templates/events.ts.tera +++ b/src/generators/zod/templates/events.ts.tera @@ -5,7 +5,11 @@ */ import { listen, type UnlistenFn, type Event } from '@tauri-apps/api/event'; import * as types from './types'; +{% if events | length > 0 %} -{% for event in events -%} -{% include "zod/partials/event_listener.ts.tera" %} -{% endfor -%} +{%- for event in events %} +{% include "zod/partials/event_listener.ts.tera" -%} +{% if not loop.last %} +{%- endif %} +{%- endfor %} +{% endif %} diff --git a/src/generators/zod/templates/index.ts.tera b/src/generators/zod/templates/index.ts.tera index 6de479d..ad8614f 100644 --- a/src/generators/zod/templates/index.ts.tera +++ b/src/generators/zod/templates/index.ts.tera @@ -1,6 +1,4 @@ -{{ header}} -{% for file in files -%} -{%- if file != "index.ts" -%} -export * from './{{ file | replace(from=".ts", to="") }}'; -{% endif -%} -{%- endfor -%} +{{ header }} +{% for module in modules -%} +export * from './{{ module | replace(from=".ts", to="") }}'; +{% endfor -%} diff --git a/src/generators/zod/templates/partials/command_function.ts.tera b/src/generators/zod/templates/partials/command_function.ts.tera index 8aaa0df..3769ac8 100644 --- a/src/generators/zod/templates/partials/command_function.ts.tera +++ b/src/generators/zod/templates/partials/command_function.ts.tera @@ -1,9 +1,7 @@ {%- set has_params = command.parameters | length > 0 -%} {%- set has_channels = command.channels | length > 0 -%} {%- set return_type = command.returnTypeTs | add_types_prefix -%} - -{# Determine parameter signature #} -{%- if has_params or has_channels -%} +{#- Params and channels share the generated Params type so validation can stay in one path. -#}{%- if has_params or has_channels -%} export async function {{ command.tsFunctionName }}(params: types.{{ command.tsTypeName }}Params, hooks?: CommandHooks<{{ return_type }}>): Promise<{{ return_type }}> { try { {%- if has_params %} @@ -14,13 +12,13 @@ export async function {{ command.tsFunctionName }}(params: types.{{ command.tsTy throw result.error; } {%- if has_channels %} - {# Build channel references #} + {#- Build channel references. -#} const data = await invoke<{{ return_type }}>('{{ command.name }}', { ...result.data, {% for channel in command.channels %}{{ channel.serializedParameterName }}: params.{{ channel.serializedParameterName }}{% if not loop.last %}, {% endif %}{% endfor %} }); {%- else %} const data = await invoke<{{ return_type }}>('{{ command.name }}', result.data); {%- endif %} {%- else %} - {# Only channels, no validation #} + {#- Channel-only commands skip Zod parsing and pass params through unchanged. -#} const data = await invoke<{{ return_type }}>('{{ command.name }}', params); {%- endif %} hooks?.onSuccess?.(data); @@ -38,8 +36,7 @@ export async function {{ command.tsFunctionName }}(params: types.{{ command.tsTy hooks?.onSettled?.(); } } -{%- else -%} -{# No parameters at all #} +{#- Commands with no inputs can skip both Params generation and schema validation. -#}{%- else -%} export async function {{ command.tsFunctionName }}(hooks?: CommandHooks<{{ return_type }}>): Promise<{{ return_type }}> { try { const data = await invoke<{{ return_type }}>('{{ command.name }}'); @@ -52,5 +49,4 @@ export async function {{ command.tsFunctionName }}(hooks?: CommandHooks<{{ retur hooks?.onSettled?.(); } } -{%- endif %} - +{%- endif -%} diff --git a/src/generators/zod/templates/partials/event_listener.ts.tera b/src/generators/zod/templates/partials/event_listener.ts.tera index 712bcfc..0ace79e 100644 --- a/src/generators/zod/templates/partials/event_listener.ts.tera +++ b/src/generators/zod/templates/partials/event_listener.ts.tera @@ -10,4 +10,3 @@ export async function {{ event.tsFunctionName }}( handler(event.payload); }); } - diff --git a/src/generators/zod/templates/partials/param_schemas.ts.tera b/src/generators/zod/templates/partials/param_schemas.ts.tera index b9af89c..6f877b5 100644 --- a/src/generators/zod/templates/partials/param_schemas.ts.tera +++ b/src/generators/zod/templates/partials/param_schemas.ts.tera @@ -1,10 +1,4 @@ -{% for command in commands -%} -{%- if command.parameters | length > 0 -%} export const {{ command.tsTypeName }}ParamsSchema = z.object({ - {% for param in command.parameters -%} - {{ param.serializedName }}: {{ param.typescriptType }}{% if param.isOptional %}.optional(){% endif %}, - {%- endfor %} +{% for param in command.parameters %} {{ param.serializedName }}: {{ param.typescriptType }}{% if param.isOptional %}.optional(){% endif %}, +{% endfor -%} }); - -{% endif -%} -{%- endfor -%} diff --git a/src/generators/zod/templates/partials/type_aliases.ts.tera b/src/generators/zod/templates/partials/type_aliases.ts.tera index d91c159..e47e2c3 100644 --- a/src/generators/zod/templates/partials/type_aliases.ts.tera +++ b/src/generators/zod/templates/partials/type_aliases.ts.tera @@ -1,21 +1,13 @@ -{%- for command in commands %} -{%- if command.parameters | length == 0 and command.channels | length > 0 %} -{# Only channels, generate interface manually #} +{# Channel-only commands need a manual interface because there is no schema to infer from. #}{% if command.parameters | length == 0 and command.channels | length > 0 %} export interface {{ command.tsTypeName }}Params { - {%- for channel in command.channels %} - {{ channel.serializedParameterName }}: Channel<{{ channel.typescriptMessageType }}>; - {%- endfor %} - [key: string]: unknown; +{% for channel in command.channels %} {{ channel.serializedParameterName }}: Channel<{{ channel.typescriptMessageType }}>; +{% endfor -%} [key: string]: unknown; } -{%- elif command.parameters | length > 0 and command.channels | length == 0 %} -{# Only regular params #} +{# Parameter-only commands can infer the Params type straight from the schema. #}{% elif command.parameters | length > 0 and command.channels | length == 0 %} export type {{ command.tsTypeName }}Params = z.infer; -{%- elif command.parameters | length > 0 and command.channels | length > 0 %} -{# Both params and channels #} +{# Mixed commands extend the inferred schema type with channel fields. #}{% else %} export interface {{ command.tsTypeName }}Params extends z.infer { - {%- for channel in command.channels %} - {{ channel.serializedParameterName }}: Channel<{{ channel.typescriptMessageType }}>; - {%- endfor %} +{% for channel in command.channels %} {{ channel.serializedParameterName }}: Channel<{{ channel.typescriptMessageType }}>; +{% endfor -%} } -{%- endif %} -{%- endfor %} +{% endif %} diff --git a/src/generators/zod/templates/types.ts.tera b/src/generators/zod/templates/types.ts.tera index b3a4699..2b70ac1 100644 --- a/src/generators/zod/templates/types.ts.tera +++ b/src/generators/zod/templates/types.ts.tera @@ -1,9 +1,9 @@ {{ header }} -import { z } from 'zod'; -{% if has_channels %} +import { z } from 'zod';{% if has_channels %} import type { Channel } from '@tauri-apps/api/core'; {% endif %} -{{ struct_schemas }} -{{ param_schemas }} -{{ type_aliases }} +{% for section in struct_sections %}{{ section }}{% if not loop.last or commands_with_params | length > 0 or commands_with_type_aliases | length > 0 %} +{% endif %}{% endfor %}{% for command in commands_with_params %}{% include "zod/partials/param_schemas.ts.tera" %}{% if not loop.last or commands_with_type_aliases | length > 0 %} +{% endif %}{% endfor %}{% for command in commands_with_type_aliases %}{% include "zod/partials/type_aliases.ts.tera" %}{% if not loop.last %} +{% endif %}{% endfor %}