pub async fn enhance_prompt_with_llm(
prompt: &str,
config: &AppConfig,
context: Option<PromptContext>,
) -> Result<String, AppError>Expand description
Enhance a user prompt using the configured LLM provider
This function takes a user’s prompt and uses an LLM to improve it by:
- Adding relevant context and specificity
- Structuring complex requests into clear steps
- Including success criteria when appropriate
- Maintaining the user’s original intent and tone
- Leveraging provided context (session history, files, project info)
§Arguments
prompt- The original user prompt to enhanceconfig- Application configuration (for LLM provider selection)context- Optional context information (session history, files, etc.)
§Returns
Returns the enhanced prompt as a String, or an error if enhancement fails.
§Example
use gestura_core::prompt_enhancement::{enhance_prompt_with_llm, PromptContext};
use gestura_core::config::AppConfig;
use gestura_core::AppConfigSecurityExt;
let config = AppConfig::load_async().await;
let original = "fix the bug";
// Without context
let enhanced = enhance_prompt_with_llm(original, &config, None).await?;
// With context
let context = PromptContext::new()
.with_session_history(vec![
("user".to_string(), "I'm working on the login feature".to_string()),
("assistant".to_string(), "I can help with that".to_string()),
]);
let enhanced = enhance_prompt_with_llm(original, &config, Some(context)).await?;
println!("Enhanced: {}", enhanced);