summaryrefslogtreecommitdiff
path: root/gen
diff options
context:
space:
mode:
Diffstat (limited to 'gen')
-rw-r--r--gen/constants.rs29
-rw-r--r--gen/env.rs136
-rw-r--r--gen/gen_commands_file.rs188
-rw-r--r--gen/gen_compile_info.rs37
-rw-r--r--gen/gen_iscc_script.rs25
-rw-r--r--gen/gen_mod_files.rs96
-rw-r--r--gen/gen_override_renderer.rs188
-rw-r--r--gen/gen_renderers_file.rs97
-rw-r--r--gen/gen_specific_renderer.rs383
-rw-r--r--gen/resolve_types.rs114
10 files changed, 1293 insertions, 0 deletions
diff --git a/gen/constants.rs b/gen/constants.rs
new file mode 100644
index 0000000..e26317f
--- /dev/null
+++ b/gen/constants.rs
@@ -0,0 +1,29 @@
+pub const COMMANDS_PATH: &str = "./src/cmds/cmd/";
+pub const RENDERERS_PATH: &str = "./src/cmds/renderer/";
+
+pub const COMPILE_INFO_RS_TEMPLATE: &str = "./templates/compile_info.rs.template";
+pub const COMPILE_INFO_RS: &str = "./src/data/compile_info.rs";
+
+pub const SETUP_JV_CLI_ISS_TEMPLATE: &str = "./templates/setup_jv_cli.iss.template";
+pub const SETUP_JV_CLI_ISS: &str = "./scripts/setup/windows/setup_jv_cli.iss";
+
+pub const COMMAND_LIST_TEMPLATE: &str = "./templates/_commands.rs.template";
+pub const COMMAND_LIST: &str = "./src/systems/cmd/_commands.rs";
+
+pub const OVERRIDE_RENDERER_DISPATCHER_TEMPLATE: &str =
+ "./templates/_override_renderer_dispatcher.rs.template";
+pub const OVERRIDE_RENDERER_DISPATCHER: &str =
+ "./src/systems/render/_override_renderer_dispatcher.rs";
+
+pub const OVERRIDE_RENDERER_ENTRY_TEMPLATE: &str =
+ "./templates/_override_renderer_entry.rs.template";
+pub const OVERRIDE_RENDERER_ENTRY: &str = "./src/systems/render/_override_renderer_entry.rs";
+
+pub const SPECIFIC_RENDERER_MATCHING_TEMPLATE: &str =
+ "./templates/_specific_renderer_matching.rs.template";
+pub const SPECIFIC_RENDERER_MATCHING: &str = "./src/systems/render/_specific_renderer_matching.rs";
+
+pub const REGISTRY_TOML: &str = "./.cargo/registry.toml";
+
+pub const TEMPLATE_START: &str = "// -- TEMPLATE START --";
+pub const TEMPLATE_END: &str = "// -- TEMPLATE END --";
diff --git a/gen/env.rs b/gen/env.rs
new file mode 100644
index 0000000..c45830e
--- /dev/null
+++ b/gen/env.rs
@@ -0,0 +1,136 @@
+use std::process::Command;
+
+pub fn get_author() -> Result<String, Box<dyn std::error::Error>> {
+ let cargo_toml_path = std::path::Path::new("Cargo.toml");
+ let cargo_toml_content = std::fs::read_to_string(cargo_toml_path)?;
+ let cargo_toml: toml::Value = toml::from_str(&cargo_toml_content)?;
+
+ if let Some(package) = cargo_toml.get("package") {
+ if let Some(authors) = package.get("authors") {
+ if let Some(authors_array) = authors.as_array() {
+ if let Some(first_author) = authors_array.get(0) {
+ if let Some(author_str) = first_author.as_str() {
+ return Ok(author_str.to_string());
+ }
+ }
+ }
+ }
+ }
+
+ Err("Author not found in Cargo.toml".into())
+}
+
+pub fn get_site() -> Result<String, Box<dyn std::error::Error>> {
+ let cargo_toml_path = std::path::Path::new("Cargo.toml");
+ let cargo_toml_content = std::fs::read_to_string(cargo_toml_path)?;
+ let cargo_toml: toml::Value = toml::from_str(&cargo_toml_content)?;
+
+ if let Some(package) = cargo_toml.get("package") {
+ if let Some(homepage) = package.get("homepage") {
+ if let Some(site_str) = homepage.as_str() {
+ return Ok(site_str.to_string());
+ }
+ }
+ }
+
+ Err("Homepage not found in Cargo.toml".into())
+}
+
+pub fn get_platform(target: &str) -> String {
+ if target.contains("windows") {
+ "Windows".to_string()
+ } else if target.contains("linux") {
+ "Linux".to_string()
+ } else if target.contains("darwin") || target.contains("macos") {
+ "macOS".to_string()
+ } else if target.contains("android") {
+ "Android".to_string()
+ } else if target.contains("ios") {
+ "iOS".to_string()
+ } else {
+ "Unknown".to_string()
+ }
+}
+
+pub fn get_toolchain() -> String {
+ let rustc_version = std::process::Command::new("rustc")
+ .arg("--version")
+ .output()
+ .ok()
+ .and_then(|output| String::from_utf8(output.stdout).ok())
+ .unwrap_or_else(|| "unknown".to_string())
+ .trim()
+ .to_string();
+
+ let channel = if rustc_version.contains("nightly") {
+ "nightly"
+ } else if rustc_version.contains("beta") {
+ "beta"
+ } else {
+ "stable"
+ };
+
+ format!("{} ({})", rustc_version, channel)
+}
+
+pub fn get_version() -> String {
+ let cargo_toml_path = std::path::Path::new("Cargo.toml");
+ let cargo_toml_content = match std::fs::read_to_string(cargo_toml_path) {
+ Ok(content) => content,
+ Err(_) => return "unknown".to_string(),
+ };
+
+ let cargo_toml: toml::Value = match toml::from_str(&cargo_toml_content) {
+ Ok(value) => value,
+ Err(_) => return "unknown".to_string(),
+ };
+
+ if let Some(workspace) = cargo_toml.get("workspace") {
+ if let Some(package) = workspace.get("package") {
+ if let Some(version) = package.get("version") {
+ if let Some(version_str) = version.as_str() {
+ return version_str.to_string();
+ }
+ }
+ }
+ }
+
+ "unknown".to_string()
+}
+
+pub fn get_git_branch() -> Result<String, Box<dyn std::error::Error>> {
+ let output = Command::new("git")
+ .args(["branch", "--show-current"])
+ .output()?;
+
+ if output.status.success() {
+ let branch = String::from_utf8(output.stdout)?.trim().to_string();
+
+ if branch.is_empty() {
+ // Try to get HEAD reference if no branch (detached HEAD)
+ let output = Command::new("git")
+ .args(["rev-parse", "--abbrev-ref", "HEAD"])
+ .output()?;
+
+ if output.status.success() {
+ let head_ref = String::from_utf8(output.stdout)?.trim().to_string();
+ return Ok(head_ref);
+ }
+ } else {
+ return Ok(branch);
+ }
+ }
+
+ Err("Failed to get git branch".into())
+}
+
+pub fn get_git_commit() -> Result<String, Box<dyn std::error::Error>> {
+ let output = Command::new("git").args(["rev-parse", "HEAD"]).output()?;
+
+ if output.status.success() {
+ let commit = String::from_utf8(output.stdout)?.trim().to_string();
+ return Ok(commit);
+ }
+
+ Err("Failed to get git commit".into())
+}
diff --git a/gen/gen_commands_file.rs b/gen/gen_commands_file.rs
new file mode 100644
index 0000000..a6b7212
--- /dev/null
+++ b/gen/gen_commands_file.rs
@@ -0,0 +1,188 @@
+use std::path::PathBuf;
+
+use string_proc::pascal_case;
+
+use crate::r#gen::constants::{
+ COMMAND_LIST, COMMAND_LIST_TEMPLATE, COMMANDS_PATH, REGISTRY_TOML, TEMPLATE_END, TEMPLATE_START,
+};
+
+/// Generate registry file from Registry.toml configuration
+pub async fn generate_commands_file(repo_root: &PathBuf) {
+ let template_path = repo_root.join(COMMAND_LIST_TEMPLATE);
+ let output_path = repo_root.join(COMMAND_LIST);
+ let config_path = repo_root.join(REGISTRY_TOML);
+
+ // Read the template
+ let template = tokio::fs::read_to_string(&template_path).await.unwrap();
+
+ // Read and parse the TOML configuration
+ let config_content = tokio::fs::read_to_string(&config_path).await.unwrap();
+ let config: toml::Value = toml::from_str(&config_content).unwrap();
+
+ // Collect all command configurations
+ let mut commands = Vec::new();
+ let mut nodes = Vec::new();
+
+ // Collect commands from registry.toml and COMMANDS_PATH in parallel
+ let (registry_collected, auto_collected) = tokio::join!(
+ async {
+ let mut commands = Vec::new();
+ let mut nodes = Vec::new();
+
+ let Some(table) = config.as_table() else {
+ return (commands, nodes);
+ };
+
+ let Some(cmd_table_value) = table.get("cmd") else {
+ return (commands, nodes);
+ };
+
+ let Some(cmd_table) = cmd_table_value.as_table() else {
+ return (commands, nodes);
+ };
+
+ for (key, cmd_value) in cmd_table {
+ let Some(cmd_config) = cmd_value.as_table() else {
+ continue;
+ };
+
+ let Some(node_value) = cmd_config.get("node") else {
+ continue;
+ };
+
+ let Some(node_str) = node_value.as_str() else {
+ continue;
+ };
+
+ let Some(cmd_type_value) = cmd_config.get("type") else {
+ continue;
+ };
+
+ let Some(cmd_type_str) = cmd_type_value.as_str() else {
+ continue;
+ };
+
+ let n = node_str.replace(".", " ");
+ nodes.push(n.clone());
+ commands.push((key.to_string(), n, cmd_type_str.to_string()));
+ }
+
+ (commands, nodes)
+ },
+ async {
+ let mut commands = Vec::new();
+ let mut nodes = Vec::new();
+ let commands_dir = repo_root.join(COMMANDS_PATH);
+ if commands_dir.exists() && commands_dir.is_dir() {
+ let mut entries = tokio::fs::read_dir(&commands_dir).await.unwrap();
+ while let Some(entry) = entries.next_entry().await.unwrap() {
+ let path = entry.path();
+
+ if !path.is_file() {
+ continue;
+ }
+
+ let extension = match path.extension() {
+ Some(ext) => ext,
+ None => continue,
+ };
+
+ if extension != "rs" {
+ continue;
+ }
+
+ let file_name = match path.file_stem().and_then(|s| s.to_str()) {
+ Some(name) => name,
+ None => continue,
+ };
+
+ // Skip files that start with underscore
+ if file_name.starts_with('_') {
+ continue;
+ }
+
+ // Convert filename to PascalCase
+ let pascal_name = pascal_case!(file_name);
+
+ let key = file_name.to_string();
+ let node = file_name.replace(".", " ").replace("_", " ");
+ let cmd_type = format!("cmds::cmd::{}::JV{}Command", file_name, pascal_name);
+
+ nodes.push(node.clone());
+ commands.push((key, node, cmd_type));
+ }
+ }
+ (commands, nodes)
+ }
+ );
+
+ // Combine the results
+ let (mut registry_commands, mut registry_nodes) = registry_collected;
+ let (mut auto_commands, mut auto_nodes) = auto_collected;
+
+ commands.append(&mut registry_commands);
+ commands.append(&mut auto_commands);
+ nodes.append(&mut registry_nodes);
+ nodes.append(&mut auto_nodes);
+
+ // Extract the node_if template from the template content
+ const PROCESS_MARKER: &str = "// PROCESS";
+ const LINE: &str = "<<LINE>>";
+ const NODES: &str = "<<NODES>>";
+
+ let template_start_index = template
+ .find(TEMPLATE_START)
+ .ok_or("Template start marker not found")
+ .unwrap();
+ let template_end_index = template
+ .find(TEMPLATE_END)
+ .ok_or("Template end marker not found")
+ .unwrap();
+
+ let template_slice = &template[template_start_index..template_end_index + TEMPLATE_END.len()];
+ let node_if_template = template_slice
+ .trim_start_matches(TEMPLATE_START)
+ .trim_end_matches(TEMPLATE_END)
+ .trim_matches('\n');
+
+ // Generate the match arms for each command
+ let match_arms: String = commands
+ .iter()
+ .map(|(key, node, cmd_type)| {
+ node_if_template
+ .replace("<<KEY>>", key)
+ .replace("<<NODE_NAME>>", node)
+ .replace("<<COMMAND_TYPE>>", cmd_type)
+ .trim_matches('\n')
+ .to_string()
+ })
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ let nodes_str = format!(
+ "[\n {}\n ]",
+ nodes
+ .iter()
+ .map(|node| format!("\"{}\".to_string()", node))
+ .collect::<Vec<_>>()
+ .join(", ")
+ );
+
+ // Replace the template section with the generated match arms
+ let final_content = template
+ .replace(node_if_template, "")
+ .replace(TEMPLATE_START, "")
+ .replace(TEMPLATE_END, "")
+ .replace(PROCESS_MARKER, &match_arms)
+ .lines()
+ .filter(|line| !line.trim().is_empty())
+ .collect::<Vec<_>>()
+ .join("\n")
+ .replace(LINE, "")
+ .replace(NODES, nodes_str.as_str());
+
+ // Write the generated code
+ tokio::fs::write(output_path, final_content).await.unwrap();
+
+ println!("Generated registry file with {} commands", commands.len());
+}
diff --git a/gen/gen_compile_info.rs b/gen/gen_compile_info.rs
new file mode 100644
index 0000000..5af030c
--- /dev/null
+++ b/gen/gen_compile_info.rs
@@ -0,0 +1,37 @@
+use std::path::PathBuf;
+
+use crate::r#gen::{
+ constants::{COMPILE_INFO_RS, COMPILE_INFO_RS_TEMPLATE},
+ env::{get_git_branch, get_git_commit, get_platform, get_toolchain, get_version},
+};
+
+/// Generate compile info
+pub async fn generate_compile_info(repo_root: &PathBuf) {
+ // Read the template code
+ let template_code = tokio::fs::read_to_string(repo_root.join(COMPILE_INFO_RS_TEMPLATE))
+ .await
+ .unwrap();
+
+ let date = chrono::Local::now().format("%Y-%m-%d %H:%M:%S").to_string();
+ let target = std::env::var("TARGET").unwrap_or_else(|_| "unknown".to_string());
+ let platform = get_platform(&target);
+ let toolchain = get_toolchain();
+ let version = get_version();
+ let branch = get_git_branch().unwrap_or_else(|_| "unknown".to_string());
+ let commit = get_git_commit().unwrap_or_else(|_| "unknown".to_string());
+
+ let generated_code = template_code
+ .replace("{date}", &date)
+ .replace("{target}", &target)
+ .replace("{platform}", &platform)
+ .replace("{toolchain}", &toolchain)
+ .replace("{version}", &version)
+ .replace("{branch}", &branch)
+ .replace("{commit}", &commit);
+
+ // Write the generated code
+ let compile_info_path = repo_root.join(COMPILE_INFO_RS);
+ tokio::fs::write(compile_info_path, generated_code)
+ .await
+ .unwrap();
+}
diff --git a/gen/gen_iscc_script.rs b/gen/gen_iscc_script.rs
new file mode 100644
index 0000000..1eddcca
--- /dev/null
+++ b/gen/gen_iscc_script.rs
@@ -0,0 +1,25 @@
+use std::path::PathBuf;
+
+use crate::r#gen::{
+ constants::{SETUP_JV_CLI_ISS, SETUP_JV_CLI_ISS_TEMPLATE},
+ env::{get_author, get_site, get_version},
+};
+
+/// Generate Inno Setup installer script (Windows only)
+pub async fn generate_installer_script(repo_root: &PathBuf) {
+ let template_path = repo_root.join(SETUP_JV_CLI_ISS_TEMPLATE);
+ let output_path = repo_root.join(SETUP_JV_CLI_ISS);
+
+ let template = tokio::fs::read_to_string(&template_path).await.unwrap();
+
+ let author = get_author().unwrap();
+ let version = get_version();
+ let site = get_site().unwrap();
+
+ let generated = template
+ .replace("<<<AUTHOR>>>", &author)
+ .replace("<<<VERSION>>>", &version)
+ .replace("<<<SITE>>>", &site);
+
+ tokio::fs::write(output_path, generated).await.unwrap();
+}
diff --git a/gen/gen_mod_files.rs b/gen/gen_mod_files.rs
new file mode 100644
index 0000000..6e44eac
--- /dev/null
+++ b/gen/gen_mod_files.rs
@@ -0,0 +1,96 @@
+use std::path::PathBuf;
+
+use crate::r#gen::constants::REGISTRY_TOML;
+
+/// Generate collect files from directory structure
+pub async fn generate_collect_files(repo_root: &PathBuf) {
+ // Read and parse the TOML configuration
+ let config_path = repo_root.join(REGISTRY_TOML);
+ let config_content = tokio::fs::read_to_string(&config_path).await.unwrap();
+ let config: toml::Value = toml::from_str(&config_content).unwrap();
+
+ // Process each collect configuration
+ let collect_table = config.get("collect").and_then(|v| v.as_table());
+
+ let collect_table = match collect_table {
+ Some(table) => table,
+ None => return,
+ };
+
+ for (_collect_name, collect_config) in collect_table {
+ let config_table = match collect_config.as_table() {
+ Some(table) => table,
+ None => continue,
+ };
+
+ let path_str = match config_table.get("path").and_then(|v| v.as_str()) {
+ Some(path) => path,
+ None => continue,
+ };
+
+ let output_path = repo_root.join(path_str);
+
+ // Extract directory name from the path (e.g., "src/renderers.rs" -> "renderers")
+ let dir_name = match output_path.file_stem().and_then(|s| s.to_str()) {
+ Some(name) => name.to_string(),
+ None => continue,
+ };
+
+ // Get the directory path for this collect type
+ // e.g., for "src/renderers.rs", we want "src/renderers/"
+ let output_parent = output_path.parent().unwrap_or_else(|| repo_root.as_path());
+ let dir_path = output_parent.join(&dir_name);
+
+ // Collect all .rs files in the directory (excluding the output file itself)
+ let mut modules = Vec::new();
+
+ if dir_path.exists() && dir_path.is_dir() {
+ for entry in std::fs::read_dir(&dir_path).unwrap() {
+ let entry = entry.unwrap();
+ let path = entry.path();
+
+ if !path.is_file() {
+ continue;
+ }
+
+ let extension = match path.extension() {
+ Some(ext) => ext,
+ None => continue,
+ };
+
+ if extension != "rs" {
+ continue;
+ }
+
+ let file_name = match path.file_stem().and_then(|s| s.to_str()) {
+ Some(name) => name,
+ None => continue,
+ };
+
+ // Skip files that start with underscore
+ if !file_name.starts_with('_') {
+ modules.push(file_name.to_string());
+ }
+ }
+ }
+
+ // Sort modules alphabetically
+ modules.sort();
+
+ // Generate the content
+ let mut content = String::new();
+ for module in &modules {
+ content.push_str(&format!("pub mod {};\n", module));
+ }
+
+ // Write the file
+ tokio::fs::write(&output_path, content).await.unwrap();
+
+ println!(
+ "Generated {} with {} modules: {:?}",
+ path_str,
+ modules.len(),
+ modules
+ );
+ }
+}
diff --git a/gen/gen_override_renderer.rs b/gen/gen_override_renderer.rs
new file mode 100644
index 0000000..2ac97bd
--- /dev/null
+++ b/gen/gen_override_renderer.rs
@@ -0,0 +1,188 @@
+use std::{collections::HashSet, path::PathBuf};
+
+use regex::Regex;
+use tokio::fs;
+
+use crate::r#gen::{
+ constants::{
+ COMMANDS_PATH, OVERRIDE_RENDERER_ENTRY, OVERRIDE_RENDERER_ENTRY_TEMPLATE, TEMPLATE_END,
+ TEMPLATE_START,
+ },
+ resolve_types::resolve_type_paths,
+};
+
+pub async fn generate_override_renderer(repo_root: &PathBuf) {
+ let template_path = repo_root.join(OVERRIDE_RENDERER_ENTRY_TEMPLATE);
+ let output_path = repo_root.join(OVERRIDE_RENDERER_ENTRY);
+ let all_possible_types = collect_all_possible_types(&PathBuf::from(COMMANDS_PATH)).await;
+
+ // Read the template
+ let template = tokio::fs::read_to_string(&template_path).await.unwrap();
+
+ // Extract the template section from the template content
+ const MATCH_MARKER: &str = "// MATCHING";
+
+ let template_start_index = template
+ .find(TEMPLATE_START)
+ .ok_or("Template start marker not found")
+ .unwrap();
+ let template_end_index = template
+ .find(TEMPLATE_END)
+ .ok_or("Template end marker not found")
+ .unwrap();
+
+ let template_slice = &template[template_start_index..template_end_index + TEMPLATE_END.len()];
+ let renderer_template = template_slice
+ .trim_start_matches(TEMPLATE_START)
+ .trim_end_matches(TEMPLATE_END)
+ .trim_matches('\n');
+
+ // Generate the match arms for each renderer
+ let match_arms: String = all_possible_types
+ .iter()
+ .map(|type_name| {
+ let name = type_name.split("::").last().unwrap_or(type_name);
+ renderer_template
+ .replace("JVOutputTypeName", name)
+ .replace("JVOutputType", type_name)
+ .trim_matches('\n')
+ .to_string()
+ })
+ .collect::<Vec<String>>()
+ .join("\n");
+
+ // Replace the template section with the generated match arms
+ let final_content = template
+ .replace(renderer_template, "")
+ .replace(TEMPLATE_START, "")
+ .replace(TEMPLATE_END, "")
+ .replace(MATCH_MARKER, &match_arms)
+ .lines()
+ .filter(|line| !line.trim().is_empty())
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ // Write the generated code
+ tokio::fs::write(output_path, final_content).await.unwrap();
+}
+
+pub async fn collect_all_possible_types(dir: &PathBuf) -> HashSet<String> {
+ let mut all_types = HashSet::new();
+ let mut dirs_to_visit = vec![dir.clone()];
+
+ while let Some(current_dir) = dirs_to_visit.pop() {
+ let entries_result = fs::read_dir(&current_dir).await;
+ if entries_result.is_err() {
+ continue;
+ }
+
+ let mut entries = entries_result.unwrap();
+
+ loop {
+ let entry_result = entries.next_entry().await;
+ if entry_result.is_err() {
+ break;
+ }
+
+ let entry_opt = entry_result.unwrap();
+ if entry_opt.is_none() {
+ break;
+ }
+
+ let entry = entry_opt.unwrap();
+ let path = entry.path();
+
+ if path.is_dir() {
+ dirs_to_visit.push(path);
+ continue;
+ }
+
+ let is_rs_file = path.extension().map(|ext| ext == "rs").unwrap_or(false);
+
+ if !is_rs_file {
+ continue;
+ }
+
+ let code_result = fs::read_to_string(&path).await;
+ if code_result.is_err() {
+ continue;
+ }
+
+ let code = code_result.unwrap();
+ let types_opt = resolve_type_paths(&code, get_output_types(&code).unwrap());
+
+ if let Some(types) = types_opt {
+ for type_name in types {
+ all_types.insert(type_name);
+ }
+ }
+ }
+ }
+
+ all_types
+}
+
+pub fn get_output_types(code: &String) -> Option<Vec<String>> {
+ let mut output_types = Vec::new();
+
+ // Find all cmd_output! macros
+ let cmd_output_re = Regex::new(r"cmd_output!\s*\(\s*[^,]+,\s*([^)]+)\s*\)").ok()?;
+ for cap in cmd_output_re.captures_iter(code) {
+ let type_name = cap[1].trim();
+ output_types.push(type_name.to_string());
+ }
+
+ Some(output_types)
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_get_output_types() {
+ const SITUATION: &str = "
+ use crate::{
+ cmd_output,
+ cmds::out::{
+ JVCustomOutput, JVCustomOutput2
+ },
+ systems::cmd::{
+ cmd_system::JVCommandContext,
+ errors::{CmdExecuteError, CmdPrepareError},
+ workspace_reader::LocalWorkspaceReader,
+ },
+ };
+ use cmd_system_macros::exec;
+ use other::cmds::output::JVCustomOutputOutside;
+
+ async fn exec() -> Result<(), CmdExecuteError> {
+ cmd_output!(output, JVCustomOutput)
+ cmd_output!(output, JVCustomOutput2)
+ cmd_output!(output, JVCustomOutputNotExist)
+ cmd_output!(output, JVCustomOutputOutside)
+ }
+ ";
+
+ let result = get_output_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let result = result.unwrap();
+ let expected = vec![
+ "JVCustomOutput".to_string(),
+ "JVCustomOutput2".to_string(),
+ "JVCustomOutputNotExist".to_string(),
+ "JVCustomOutputOutside".to_string(),
+ ];
+ assert_eq!(result, expected);
+
+ let result = resolve_type_paths(&SITUATION.to_string(), expected);
+ assert!(result.is_some(), "Parse failed");
+ let result = result.unwrap();
+ let expected = vec![
+ "crate::cmds::out::JVCustomOutput".to_string(),
+ "crate::cmds::out::JVCustomOutput2".to_string(),
+ "other::cmds::output::JVCustomOutputOutside".to_string(),
+ ];
+ assert_eq!(result, expected);
+ }
+}
diff --git a/gen/gen_renderers_file.rs b/gen/gen_renderers_file.rs
new file mode 100644
index 0000000..497d258
--- /dev/null
+++ b/gen/gen_renderers_file.rs
@@ -0,0 +1,97 @@
+use std::path::PathBuf;
+
+use crate::r#gen::constants::{
+ OVERRIDE_RENDERER_DISPATCHER, OVERRIDE_RENDERER_DISPATCHER_TEMPLATE, REGISTRY_TOML,
+ TEMPLATE_END, TEMPLATE_START,
+};
+
+/// Generate renderer list file from Registry.toml configuration
+pub async fn generate_renderers_file(repo_root: &PathBuf) {
+ let template_path = repo_root.join(OVERRIDE_RENDERER_DISPATCHER_TEMPLATE);
+ let output_path = repo_root.join(OVERRIDE_RENDERER_DISPATCHER);
+ let config_path = repo_root.join(REGISTRY_TOML);
+
+ // Read the template
+ let template = tokio::fs::read_to_string(&template_path).await.unwrap();
+
+ // Read and parse the TOML configuration
+ let config_content = tokio::fs::read_to_string(&config_path).await.unwrap();
+ let config: toml::Value = toml::from_str(&config_content).unwrap();
+
+ // Collect all renderer configurations
+ let mut renderers = Vec::new();
+
+ let Some(table) = config.as_table() else {
+ return;
+ };
+ let Some(renderer_table) = table.get("renderer") else {
+ return;
+ };
+ let Some(renderer_table) = renderer_table.as_table() else {
+ return;
+ };
+
+ for (_, renderer_value) in renderer_table {
+ let Some(renderer_config) = renderer_value.as_table() else {
+ continue;
+ };
+ let Some(name) = renderer_config.get("name").and_then(|v| v.as_str()) else {
+ continue;
+ };
+ let Some(renderer_type) = renderer_config.get("type").and_then(|v| v.as_str()) else {
+ continue;
+ };
+
+ renderers.push((name.to_string(), renderer_type.to_string()));
+ }
+
+ // Extract the template section from the template content
+ const MATCH_MARKER: &str = "// MATCH";
+
+ let template_start_index = template
+ .find(TEMPLATE_START)
+ .ok_or("Template start marker not found")
+ .unwrap();
+ let template_end_index = template
+ .find(TEMPLATE_END)
+ .ok_or("Template end marker not found")
+ .unwrap();
+
+ let template_slice = &template[template_start_index..template_end_index + TEMPLATE_END.len()];
+ let renderer_template = template_slice
+ .trim_start_matches(TEMPLATE_START)
+ .trim_end_matches(TEMPLATE_END)
+ .trim_matches('\n');
+
+ // Generate the match arms for each renderer
+ let match_arms: String = renderers
+ .iter()
+ .map(|(name, renderer_type)| {
+ renderer_template
+ .replace("<<NAME>>", name)
+ .replace("RendererType", renderer_type)
+ .trim_matches('\n')
+ .to_string()
+ })
+ .collect::<Vec<String>>()
+ .join("\n");
+
+ // Replace the template section with the generated match arms
+ let final_content = template
+ .replace(renderer_template, "")
+ .replace(TEMPLATE_START, "")
+ .replace(TEMPLATE_END, "")
+ .replace(MATCH_MARKER, &match_arms)
+ .lines()
+ .filter(|line| !line.trim().is_empty())
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ // Write the generated code
+ tokio::fs::write(output_path, final_content).await.unwrap();
+
+ println!(
+ "Generated renderer list file with {} renderers",
+ renderers.len()
+ );
+}
diff --git a/gen/gen_specific_renderer.rs b/gen/gen_specific_renderer.rs
new file mode 100644
index 0000000..0c66631
--- /dev/null
+++ b/gen/gen_specific_renderer.rs
@@ -0,0 +1,383 @@
+use std::{collections::HashMap, path::PathBuf};
+
+use regex::Regex;
+
+use crate::r#gen::{
+ constants::{
+ RENDERERS_PATH, SPECIFIC_RENDERER_MATCHING, SPECIFIC_RENDERER_MATCHING_TEMPLATE,
+ TEMPLATE_END, TEMPLATE_START,
+ },
+ resolve_types::resolve_type_paths,
+};
+
+const RENDERER_TYPE_PREFIX: &str = "crate::";
+
+pub async fn generate_specific_renderer(repo_root: &PathBuf) {
+ // Matches
+ // HashMap<RendererTypeFullName, OutputTypeFullName>
+ let mut renderer_matches: HashMap<String, String> = HashMap::new();
+
+ let renderer_path = repo_root.join(RENDERERS_PATH);
+
+ collect_renderers(&renderer_path, &mut renderer_matches);
+
+ let template_path = repo_root.join(SPECIFIC_RENDERER_MATCHING_TEMPLATE);
+ let output_path = repo_root.join(SPECIFIC_RENDERER_MATCHING);
+
+ // Read the template
+ let template = tokio::fs::read_to_string(&template_path).await.unwrap();
+
+ // Extract the template section from the template content
+ const MATCH_MARKER: &str = "// MATCHING";
+
+ let template_start_index = template
+ .find(TEMPLATE_START)
+ .ok_or("Template start marker not found")
+ .unwrap();
+ let template_end_index = template
+ .find(TEMPLATE_END)
+ .ok_or("Template end marker not found")
+ .unwrap();
+
+ let template_slice = &template[template_start_index..template_end_index + TEMPLATE_END.len()];
+ let renderer_template = template_slice
+ .trim_start_matches(TEMPLATE_START)
+ .trim_end_matches(TEMPLATE_END)
+ .trim_matches('\n');
+
+ // Generate the match arms for each renderer
+ let match_arms: String = renderer_matches
+ .iter()
+ .map(|(renderer, output)| {
+ let output_name = output.split("::").last().unwrap_or(output);
+ renderer_template
+ .replace("OutputTypeName", output_name)
+ .replace("OutputType", output)
+ .replace("RendererType", renderer)
+ .trim_matches('\n')
+ .to_string()
+ })
+ .collect::<Vec<String>>()
+ .join("\n");
+
+ // Replace the template section with the generated match arms
+ let final_content = template
+ .replace(renderer_template, "")
+ .replace(TEMPLATE_START, "")
+ .replace(TEMPLATE_END, "")
+ .replace(MATCH_MARKER, &match_arms)
+ .lines()
+ .filter(|line| !line.trim().is_empty())
+ .collect::<Vec<_>>()
+ .join("\n");
+
+ // Write the generated code
+ tokio::fs::write(output_path, final_content).await.unwrap();
+}
+
+fn collect_renderers(dir_path: &PathBuf, matches: &mut HashMap<String, String>) {
+ if let Ok(entries) = std::fs::read_dir(dir_path) {
+ for entry in entries {
+ if let Ok(entry) = entry {
+ let path = entry.path();
+ if path.is_dir() {
+ collect_renderers(&path, matches);
+ } else if path.is_file() && path.extension().map_or(false, |ext| ext == "rs") {
+ process_rs_file(&path, matches);
+ }
+ }
+ }
+ }
+}
+
+fn process_rs_file(file_path: &PathBuf, matches: &mut HashMap<String, String>) {
+ let content = match std::fs::read_to_string(file_path) {
+ Ok(content) => content,
+ Err(_) => return,
+ };
+
+ let renderer_info = match get_renderer_types(&content) {
+ Some(info) => info,
+ None => return,
+ };
+
+ let (renderer_type, output_type) = renderer_info;
+
+ let full_renderer_type = build_full_renderer_type(file_path, &renderer_type);
+ let full_output_type = resolve_type_paths(&content, vec![output_type])
+ .unwrap()
+ .get(0)
+ .unwrap()
+ .clone();
+
+ matches.insert(full_renderer_type, full_output_type);
+}
+
+fn build_full_renderer_type(file_path: &PathBuf, renderer_type: &str) -> String {
+ let relative_path = file_path
+ .strip_prefix(std::env::current_dir().unwrap())
+ .unwrap_or(file_path);
+ let relative_path = relative_path.with_extension("");
+ let path_str = relative_path.to_string_lossy();
+
+ // Normalize path separators and remove "./" prefix if present
+ let normalized_path = path_str
+ .replace('\\', "/")
+ .trim_start_matches("./")
+ .to_string();
+
+ let mut module_path = normalized_path.split('/').collect::<Vec<&str>>().join("::");
+
+ if module_path.starts_with("src") {
+ module_path = module_path.trim_start_matches("src").to_string();
+ if module_path.starts_with("::") {
+ module_path = module_path.trim_start_matches("::").to_string();
+ }
+ }
+
+ format!("{}{}::{}", RENDERER_TYPE_PREFIX, module_path, renderer_type)
+}
+
+pub fn get_renderer_types(code: &String) -> Option<(String, String)> {
+ let renderer_re = Regex::new(r"#\[result_renderer\(([^)]+)\)\]").unwrap();
+
+ let func_re =
+ Regex::new(r"(?:pub\s+)?(?:async\s+)?fn\s+\w+\s*\(\s*(?:mut\s+)?\w+\s*:\s*&([^),]+)\s*")
+ .unwrap();
+
+ let code_without_comments = code
+ .lines()
+ .filter(|line| !line.trim_start().starts_with("//"))
+ .collect::<Vec<&str>>()
+ .join("\n");
+
+ let renderer_captures = renderer_re.captures(&code_without_comments);
+ let func_captures = func_re.captures(&code_without_comments);
+
+ match (renderer_captures, func_captures) {
+ (Some(renderer_cap), Some(func_cap)) => {
+ let renderer_type = renderer_cap[1].trim().to_string();
+ let output_type = func_cap[1].trim().to_string();
+ Some((renderer_type, output_type))
+ }
+ _ => None,
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test1() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer)]
+ pub async fn render(data: &SomeOutput) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer");
+ assert_eq!(output, "SomeOutput");
+ }
+
+ #[test]
+ fn test2() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer)]
+ pub async fn some_render(output: &SomeOutput) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer");
+ assert_eq!(output, "SomeOutput");
+ }
+
+ #[test]
+ fn test3() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer)]
+ async fn some_render(output: &SomeOutput) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer");
+ assert_eq!(output, "SomeOutput");
+ }
+
+ #[test]
+ fn test4() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer)]
+ async pub fn some_render(output: &SomeOutput) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer");
+ assert_eq!(output, "SomeOutput");
+ }
+
+ #[test]
+ fn test5() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer)]
+ fn some_render(output: &SomeOutput2) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer");
+ assert_eq!(output, "SomeOutput2");
+ }
+
+ #[test]
+ fn test6() {
+ const SITUATION: &str = "
+ #[result__renderer(MyRenderer)]
+ fn some_render(output: &SomeOutput2) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(
+ result.is_none(),
+ "Should fail to parse when annotation doesn't match"
+ );
+ }
+
+ #[test]
+ fn test7() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer)]
+ fn some_render() -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(
+ result.is_none(),
+ "Should fail to parse when no function parameter"
+ );
+ }
+
+ #[test]
+ fn test8() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer)]
+ fn some_render(output: &SomeOutput, context: &Context) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer");
+ assert_eq!(output, "SomeOutput");
+ }
+
+ #[test]
+ fn test9() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer<T>)]
+ fn some_render(output: &SomeOutput<T>) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer<T>");
+ assert_eq!(output, "SomeOutput<T>");
+ }
+
+ #[test]
+ fn test10() {
+ const SITUATION: &str = "
+ #[result_renderer(MyRenderer<'a>)]
+ fn some_render(output: &SomeOutput<'a>) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer<'a>");
+ assert_eq!(output, "SomeOutput<'a>");
+ }
+
+ #[test]
+ fn test11() {
+ const SITUATION: &str = "
+ #[result_renderer( MyRenderer )]
+ fn some_render( output : & SomeOutput ) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MyRenderer");
+ assert_eq!(output, "SomeOutput");
+ }
+
+ #[test]
+ fn test12() {
+ const SITUATION: &str = "
+ #[result_renderer(AnotherRenderer)]
+ fn some_render(output: &DifferentOutput) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "AnotherRenderer");
+ assert_eq!(output, "DifferentOutput");
+ }
+
+ #[test]
+ fn test13() {
+ const SITUATION: &str = "
+ // #[result_renderer(WrongRenderer)]
+ #[result_renderer(CorrectRenderer)]
+ fn some_render(output: &CorrectOutput) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "CorrectRenderer");
+ assert_eq!(output, "CorrectOutput");
+ }
+
+ #[test]
+ fn test14() {
+ const SITUATION: &str = "
+ #[result_renderer(MultiLineRenderer)]
+ fn some_render(
+ output: &MultiLineOutput
+ ) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MultiLineRenderer");
+ assert_eq!(output, "MultiLineOutput");
+ }
+
+ #[test]
+ fn test15() {
+ const SITUATION: &str = "
+ #[result_renderer(MutRenderer)]
+ fn some_render(mut output: &MutOutput) -> Result<JVRenderResult, CmdRenderError>
+ ";
+
+ let result = get_renderer_types(&SITUATION.to_string());
+ assert!(result.is_some(), "Parse failed");
+ let (renderer, output) = result.unwrap();
+ assert_eq!(renderer, "MutRenderer");
+ assert_eq!(output, "MutOutput");
+ }
+}
diff --git a/gen/resolve_types.rs b/gen/resolve_types.rs
new file mode 100644
index 0000000..6079abc
--- /dev/null
+++ b/gen/resolve_types.rs
@@ -0,0 +1,114 @@
+use regex::Regex;
+
+pub fn resolve_type_paths(code: &String, type_names: Vec<String>) -> Option<Vec<String>> {
+ let mut type_mappings = std::collections::HashMap::new();
+
+ // Extract all use statements
+ let use_re = Regex::new(r"use\s+([^;]*(?:\{[^}]*\}[^;]*)*);").ok()?;
+ let mut use_statements = Vec::new();
+ for cap in use_re.captures_iter(&code) {
+ use_statements.push(cap[1].to_string());
+ }
+
+ // Process each use statement to build type mappings
+ for stmt in &use_statements {
+ let stmt = stmt.trim();
+
+ if stmt.contains("::{") {
+ if let Some(pos) = stmt.find("::{") {
+ let base_path = &stmt[..pos];
+ let content = &stmt[pos + 3..stmt.len() - 1];
+ process_nested_use(base_path, content, &mut type_mappings);
+ }
+ } else {
+ // Process non-nested use statements
+ if let Some(pos) = stmt.rfind("::") {
+ let type_name = &stmt[pos + 2..];
+ type_mappings.insert(type_name.to_string(), stmt.to_string());
+ } else {
+ type_mappings.insert(stmt.to_string(), stmt.to_string());
+ }
+ }
+ }
+
+ // Resolve type names to full paths
+ let mut result = Vec::new();
+ for type_name in type_names {
+ if let Some(full_path) = type_mappings.get(&type_name) {
+ result.push(full_path.clone());
+ }
+ }
+
+ Some(result)
+}
+
+fn process_nested_use(
+ base_path: &str,
+ content: &str,
+ mappings: &mut std::collections::HashMap<String, String>,
+) {
+ let mut items = Vec::new();
+ let mut current_item = String::new();
+ let mut brace_depth = 0;
+
+ // Split nested content
+ for c in content.chars() {
+ match c {
+ '{' => {
+ brace_depth += 1;
+ current_item.push(c);
+ }
+ '}' => {
+ brace_depth -= 1;
+ current_item.push(c);
+ }
+ ',' => {
+ if brace_depth == 0 {
+ items.push(current_item.trim().to_string());
+ current_item.clear();
+ } else {
+ current_item.push(c);
+ }
+ }
+ _ => {
+ current_item.push(c);
+ }
+ }
+ }
+
+ if !current_item.trim().is_empty() {
+ items.push(current_item.trim().to_string());
+ }
+
+ // Process each item
+ for item in items {
+ if item.is_empty() {
+ continue;
+ }
+
+ if item.contains("::{") {
+ if let Some(pos) = item.find("::{") {
+ let sub_path = &item[..pos];
+ let sub_content = &item[pos + 3..item.len() - 1];
+ let new_base = if base_path.is_empty() {
+ sub_path.to_string()
+ } else {
+ format!("{}::{}", base_path, sub_path)
+ };
+ process_nested_use(&new_base, sub_content, mappings);
+ }
+ } else {
+ let full_path = if base_path.is_empty() {
+ item.to_string()
+ } else {
+ format!("{}::{}", base_path, item)
+ };
+ if let Some(pos) = item.rfind("::") {
+ let type_name = &item[pos + 2..];
+ mappings.insert(type_name.to_string(), full_path);
+ } else {
+ mappings.insert(item.to_string(), full_path);
+ }
+ }
+ }
+}