Created
March 23, 2026 21:55
-
-
Save bitemyapp/77b6b1a85baae06c240de7bf4503c341 to your computer and use it in GitHub Desktop.
Simple Markdown LLM prompt scaffold generator
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env -S rust-script | |
| //! ```cargo | |
| //! [dependencies] | |
| //! anyhow = "1" | |
| //! clap = { version = "4.5", features = ["derive"] } | |
| //! ``` | |
| use std::fs; | |
| use std::io::{self, Write}; | |
| use std::path::PathBuf; | |
| use anyhow::{Context, Result}; | |
| use clap::Parser; | |
| #[derive(Debug, Parser)] | |
| #[command( | |
| name = "generate-llm-prompt", | |
| version, | |
| about = "Generate a markdown template for LLM prompts with source file sections" | |
| )] | |
| struct Args { | |
| /// Source files to include in the template | |
| #[arg(required = true)] | |
| files: Vec<PathBuf>, | |
| /// Output file (defaults to stdout) | |
| #[arg(short, long)] | |
| output: Option<PathBuf>, | |
| /// Title for the markdown document | |
| #[arg(short, long, default_value = "LLM Prompt")] | |
| title: String, | |
| /// Include full file paths instead of just filenames | |
| #[arg(long)] | |
| full_paths: bool, | |
| } | |
| fn main() -> Result<()> { | |
| let args = Args::parse(); | |
| if args.files.is_empty() { | |
| eprintln!("Error: No source files provided"); | |
| std::process::exit(1); | |
| } | |
| let mut output: Box<dyn Write> = if let Some(path) = &args.output { | |
| Box::new( | |
| fs::File::create(path) | |
| .with_context(|| format!("Failed to create output file: {}", path.display()))?, | |
| ) | |
| } else { | |
| Box::new(io::stdout()) | |
| }; | |
| // Write title | |
| writeln!(output, "# {}", args.title)?; | |
| writeln!(output)?; | |
| // Write prompt section | |
| writeln!(output, "<!-- Write your prompt here -->")?; | |
| writeln!(output)?; | |
| writeln!(output)?; | |
| writeln!(output)?; | |
| // Write source file sections | |
| for file_path in &args.files { | |
| let display_name = if args.full_paths { | |
| file_path.display().to_string() | |
| } else { | |
| file_path | |
| .file_name() | |
| .map(|n| n.to_string_lossy().to_string()) | |
| .unwrap_or_else(|| file_path.display().to_string()) | |
| }; | |
| writeln!(output, "## {}", display_name)?; | |
| writeln!(output)?; | |
| writeln!(output, "```")?; | |
| // Read and include file contents | |
| match fs::read_to_string(file_path) { | |
| Ok(contents) => { | |
| write!(output, "{}", contents)?; | |
| // Ensure there's a newline at the end | |
| if !contents.ends_with('\n') { | |
| writeln!(output)?; | |
| } | |
| } | |
| Err(e) => { | |
| writeln!(output, "// Error reading file: {}", e)?; | |
| } | |
| } | |
| writeln!(output, "```")?; | |
| writeln!(output)?; | |
| } | |
| if let Some(path) = &args.output { | |
| eprintln!("Generated LLM prompt template: {}", path.display()); | |
| } | |
| Ok(()) | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment