gestura_core/
llm_overrides.rs

1//! Session-scoped LLM override resolution helpers.
2//!
3//! This module centralizes the business logic for applying a per-session provider/model
4//! override (see [`crate::agent_sessions::SessionLlmConfig`]) to an in-memory [`crate::config::AppConfig`].
5//!
6//! Goals:
7//! - Keep GUI/CLI thin (they provide session data + platform-specific secret lookup).
8//! - Ensure provider/model precedence and compatibility checks are consistent.
9
10use crate::agent_sessions::{AgentSession, SessionLlmConfig};
11use crate::config::{
12    AnthropicConfig, AppConfig, GeminiConfig, GrokConfig, OllamaConfig, OpenAiConfig,
13};
14use crate::config_env;
15use crate::llm_validation;
16
17/// The effective provider/model after applying session overrides.
18#[derive(Debug, Clone, PartialEq, Eq)]
19pub struct EffectiveLlmConfig {
20    /// Effective provider id (e.g. `"openai"`, `"anthropic"`, `"ollama"`).
21    pub provider: String,
22    /// Effective model id for the provider.
23    pub model: String,
24}
25
26fn canonical_known_provider(provider: &str) -> Option<&'static str> {
27    let trimmed = provider.trim();
28    KNOWN_LLM_PROVIDERS
29        .iter()
30        .copied()
31        .find(|candidate| candidate.eq_ignore_ascii_case(trimmed))
32}
33
34fn provider_has_runtime_configuration(cfg: &AppConfig, provider: &str) -> bool {
35    match provider {
36        "openai" => cfg
37            .llm
38            .openai
39            .as_ref()
40            .is_some_and(|config| !config.api_key.trim().is_empty()),
41        "anthropic" => cfg
42            .llm
43            .anthropic
44            .as_ref()
45            .is_some_and(|config| !config.api_key.trim().is_empty()),
46        "grok" => cfg
47            .llm
48            .grok
49            .as_ref()
50            .is_some_and(|config| !config.api_key.trim().is_empty()),
51        "gemini" => cfg
52            .llm
53            .gemini
54            .as_ref()
55            .is_some_and(|config| !config.api_key.trim().is_empty()),
56        "ollama" => cfg.llm.ollama.is_some(),
57        _ => false,
58    }
59}
60
61fn fallback_primary_provider(cfg: &AppConfig) -> &'static str {
62    for provider in ["anthropic", "openai", "gemini", "grok"] {
63        if provider_has_runtime_configuration(cfg, provider) {
64            return provider;
65        }
66    }
67
68    if provider_has_runtime_configuration(cfg, "ollama") {
69        return "ollama";
70    }
71
72    "anthropic"
73}
74
75fn normalize_active_provider(cfg: &mut AppConfig) {
76    if let Some(provider) = canonical_known_provider(&cfg.llm.primary) {
77        cfg.llm.primary = provider.to_string();
78        return;
79    }
80
81    let invalid_provider = cfg.llm.primary.trim().to_string();
82    let fallback = fallback_primary_provider(cfg);
83    tracing::warn!(
84        provider = %invalid_provider,
85        fallback_provider = %fallback,
86        "Repairing invalid active LLM provider before resolving session overrides"
87    );
88    cfg.llm.primary = fallback.to_string();
89}
90
91/// Apply CLI-provided provider and/or model overrides to an in-memory config.
92///
93/// This is a convenience wrapper around [`apply_session_llm_overrides`] intended for
94/// thin adapters (CLI basic mode, CLI TUI) so they don't re-implement precedence,
95/// provider/model compatibility checks, or provider-config creation.
96///
97/// The inputs map directly to CLI flags:
98/// - `provider_arg`: `--provider <provider>` style argument (provider id)
99/// - `model_arg`: `--model <model>` style argument (model id)
100///
101/// This function does **not** persist any changes to disk.
102pub fn apply_cli_llm_overrides(
103    cfg: &mut AppConfig,
104    provider_arg: Option<&str>,
105    model_arg: Option<&str>,
106) -> EffectiveLlmConfig {
107    let provider = provider_arg.map(str::trim).filter(|s| !s.is_empty());
108    let model = model_arg.map(str::trim).filter(|s| !s.is_empty());
109
110    let session = SessionLlmConfig {
111        provider: provider.map(|s| s.to_string()),
112        model: model.map(|s| s.to_string()),
113    };
114
115    apply_cli_session_llm_overrides(cfg, Some(&session))
116}
117
118/// Apply a CLI `--model` argument to the config.
119///
120/// The CLI supports either:
121/// - `"provider:model"` (e.g. `"openai:gpt-4o"`) to override both provider and model
122/// - `"model"` (e.g. `"claude-3-5-sonnet-20241022"`) to override the model for the
123///   currently selected provider
124///
125/// This function does **not** persist any changes to disk.
126pub fn apply_cli_model_arg_overrides(
127    cfg: &mut AppConfig,
128    model_arg: Option<&str>,
129) -> EffectiveLlmConfig {
130    let session = model_arg.and_then(parse_cli_model_arg);
131    apply_cli_session_llm_overrides(cfg, session.as_ref())
132}
133
134/// Apply a CLI `--provider` argument to the config.
135///
136/// This function does **not** persist any changes to disk.
137pub fn apply_cli_provider_arg_override(
138    cfg: &mut AppConfig,
139    provider_arg: Option<&str>,
140) -> EffectiveLlmConfig {
141    apply_cli_llm_overrides(cfg, provider_arg, None)
142}
143
144/// Apply session-scoped LLM overrides to an in-memory config and return the effective provider/model.
145///
146/// This function does **not** persist any changes to disk.
147///
148/// Behavior:
149/// - If the session overrides the provider, set `cfg.llm.primary`.
150/// - If the session overrides the model, apply it to the active provider's config.
151/// - If the model override is obviously incompatible with the provider, ignore it and fall back.
152///
153/// `api_key_lookup` is adapter-supplied (GUI keychain, CLI env, etc.) and is only used when
154/// we need to create a provider config to attach a model override.
155pub fn apply_session_llm_overrides(
156    cfg: &mut AppConfig,
157    session_llm: Option<&SessionLlmConfig>,
158    api_key_lookup: impl Fn(&str) -> Option<String>,
159) -> EffectiveLlmConfig {
160    normalize_active_provider(cfg);
161
162    if let Some(session_llm) = session_llm {
163        if let Some(provider) = session_llm.provider.as_deref().map(str::trim)
164            && !provider.is_empty()
165        {
166            if let Some(provider) = canonical_known_provider(provider) {
167                cfg.llm.primary = provider.to_string();
168            } else {
169                tracing::warn!(
170                    provider = %provider,
171                    "Ignoring invalid session-scoped LLM provider override"
172                );
173            }
174        }
175
176        if let Some(model) = session_llm.model.as_deref().map(str::trim)
177            && !model.is_empty()
178        {
179            if !llm_validation::is_model_compatible_with_provider(&cfg.llm.primary, model) {
180                tracing::warn!(
181                    provider = %cfg.llm.primary,
182                    model = %model,
183                    "Ignoring incompatible session-scoped LLM model override"
184                );
185            } else {
186                apply_model_override(cfg, model, &api_key_lookup);
187            }
188        }
189    }
190
191    let provider = cfg.llm.primary.clone();
192
193    // Ensure the active provider has a materialized provider config with a non-empty model.
194    //
195    // Why: `AppConfig::default()` intentionally does not materialize every provider config
196    // object, and GUI/CLI adapters rely on `EffectiveLlmConfig` to preselect dropdown values.
197    // Without this, switching providers (or rejecting an incompatible model override) can
198    // yield an empty effective model string.
199    cfg.llm.ensure_provider_config(&provider);
200
201    let model = get_model_for_provider(cfg, &provider).unwrap_or_default();
202    EffectiveLlmConfig { provider, model }
203}
204
205/// Parse the CLI `--model` argument into a session-style provider/model override.
206///
207/// Returns `None` if the argument is empty/whitespace.
208fn parse_cli_model_arg(model_arg: &str) -> Option<SessionLlmConfig> {
209    let arg = model_arg.trim();
210    if arg.is_empty() {
211        return None;
212    }
213
214    if let Some((provider, model)) = arg.split_once(':') {
215        let provider = provider.trim();
216        let model = model.trim();
217        Some(SessionLlmConfig {
218            provider: (!provider.is_empty()).then(|| provider.to_string()),
219            model: (!model.is_empty()).then(|| model.to_string()),
220        })
221    } else {
222        Some(SessionLlmConfig {
223            provider: None,
224            model: Some(arg.to_string()),
225        })
226    }
227}
228
229/// Parse a CLI-style model selector (e.g. `"provider:model"` or `"model"`) into a
230/// [`SessionLlmConfig`].
231///
232/// This is a small, shared helper for thin adapters (CLI basic mode, CLI TUI, GUI)
233/// that want to persist a session-scoped override using the same parsing rules as
234/// [`apply_cli_model_arg_overrides`].
235///
236/// Returns `None` for empty/whitespace-only input.
237pub fn session_llm_config_from_cli_model_arg(model_arg: &str) -> Option<SessionLlmConfig> {
238    parse_cli_model_arg(model_arg)
239}
240
241/// Apply session-style LLM overrides for CLI adapters.
242///
243/// CLI adapters don't have access to GUI keychain storage, so we resolve API keys
244/// from (1) already-loaded config values and (2) environment variables.
245/// Apply session-style LLM overrides for CLI adapters.
246///
247/// CLI adapters don't have access to GUI keychain storage, so we resolve API keys
248/// from (1) already-loaded config values and (2) environment variables.
249///
250/// This is a convenience wrapper around [`apply_session_llm_overrides`] that provides
251/// a standard CLI API-key lookup strategy.
252pub fn apply_cli_session_llm_overrides(
253    cfg: &mut AppConfig,
254    session_llm: Option<&SessionLlmConfig>,
255) -> EffectiveLlmConfig {
256    let openai_key = cfg.llm.openai.as_ref().map(|c| c.api_key.clone());
257    let anthropic_key = cfg.llm.anthropic.as_ref().map(|c| c.api_key.clone());
258    let gemini_key = cfg.llm.gemini.as_ref().map(|c| c.api_key.clone());
259    let grok_key = cfg.llm.grok.as_ref().map(|c| c.api_key.clone());
260
261    let api_key_lookup = move |provider: &str| match provider {
262        "openai" => openai_key
263            .clone()
264            .filter(|k| !k.trim().is_empty())
265            .or_else(|| config_env::get_env("OPENAI_API_KEY")),
266        "anthropic" => anthropic_key
267            .clone()
268            .filter(|k| !k.trim().is_empty())
269            .or_else(|| config_env::get_env("ANTHROPIC_API_KEY")),
270        "gemini" => gemini_key
271            .clone()
272            .filter(|k| !k.trim().is_empty())
273            .or_else(|| config_env::get_env("GEMINI_API_KEY")),
274        "grok" => grok_key
275            .clone()
276            .filter(|k| !k.trim().is_empty())
277            .or_else(|| config_env::get_env("GROK_API_KEY")),
278        _ => None,
279    };
280
281    apply_session_llm_overrides(cfg, session_llm, api_key_lookup)
282}
283
284fn apply_model_override(
285    cfg: &mut AppConfig,
286    model: &str,
287    api_key_lookup: &impl Fn(&str) -> Option<String>,
288) {
289    match cfg.llm.primary.as_str() {
290        "openai" => {
291            let openai = cfg.llm.openai.get_or_insert_with(|| OpenAiConfig {
292                api_key: api_key_lookup("openai").unwrap_or_default(),
293                model: model.to_string(),
294                base_url: None,
295            });
296            openai.model = model.to_string();
297        }
298        "anthropic" => {
299            let anthropic = cfg.llm.anthropic.get_or_insert_with(|| AnthropicConfig {
300                api_key: api_key_lookup("anthropic").unwrap_or_default(),
301                model: model.to_string(),
302                base_url: None,
303                thinking_budget_tokens: None,
304            });
305            anthropic.model = model.to_string();
306        }
307        "grok" => {
308            let grok = cfg.llm.grok.get_or_insert_with(|| GrokConfig {
309                api_key: api_key_lookup("grok").unwrap_or_default(),
310                model: model.to_string(),
311                base_url: None,
312            });
313            grok.model = model.to_string();
314        }
315        "gemini" => {
316            let gemini = cfg.llm.gemini.get_or_insert_with(|| GeminiConfig {
317                api_key: api_key_lookup("gemini").unwrap_or_default(),
318                model: model.to_string(),
319                base_url: None,
320            });
321            gemini.model = model.to_string();
322        }
323        "ollama" => {
324            let ollama = cfg.llm.ollama.get_or_insert_with(|| OllamaConfig {
325                base_url: "http://localhost:11434".into(),
326                model: model.to_string(),
327            });
328            ollama.model = model.to_string();
329        }
330        _ => {}
331    }
332}
333
334fn get_model_for_provider(cfg: &AppConfig, provider: &str) -> Option<String> {
335    match provider {
336        "openai" => cfg.llm.openai.as_ref().map(|c| c.model.clone()),
337        "anthropic" => cfg.llm.anthropic.as_ref().map(|c| c.model.clone()),
338        "gemini" => cfg.llm.gemini.as_ref().map(|c| c.model.clone()),
339        "grok" => cfg.llm.grok.as_ref().map(|c| c.model.clone()),
340        "ollama" => cfg.llm.ollama.as_ref().map(|c| c.model.clone()),
341        _ => None,
342    }
343}
344
345// ---------------------------------------------------------------------------
346// Provider constants & legacy-aware model selector
347// ---------------------------------------------------------------------------
348
349/// Known LLM provider identifiers.
350///
351/// Used to disambiguate `provider`-only strings from `model`-only strings in
352/// legacy-aware selectors (CLI, TUI, session persistence).
353pub const KNOWN_LLM_PROVIDERS: [&str; 5] = ["openai", "anthropic", "grok", "gemini", "ollama"];
354
355/// Returns `true` when `provider` matches one of the [`KNOWN_LLM_PROVIDERS`]
356/// (case-insensitive comparison).
357pub fn is_known_llm_provider(provider: &str) -> bool {
358    KNOWN_LLM_PROVIDERS
359        .iter()
360        .any(|p| p.eq_ignore_ascii_case(provider.trim()))
361}
362
363/// Parse a legacy-aware selector string into a session override.
364///
365/// Supported formats:
366/// - `"provider:model"` → provider + model
367/// - `"provider"` (if it matches a known provider) → provider-only
368/// - anything else → model-only (delegates to [`session_llm_config_from_cli_model_arg`])
369///
370/// Returns `None` for empty / whitespace-only input.
371pub fn parse_model_selector_legacy_aware(spec: &str) -> Option<SessionLlmConfig> {
372    let s = spec.trim();
373    if s.is_empty() {
374        return None;
375    }
376
377    if s.contains(':') {
378        return session_llm_config_from_cli_model_arg(s);
379    }
380
381    if is_known_llm_provider(s) {
382        return Some(SessionLlmConfig {
383            provider: Some(s.to_ascii_lowercase()),
384            model: None,
385        });
386    }
387
388    session_llm_config_from_cli_model_arg(s)
389}
390
391// ---------------------------------------------------------------------------
392// Session-scoped LLM override resolution (shared business logic)
393// ---------------------------------------------------------------------------
394
395/// Resolve the session-scoped LLM override from an [`AgentSession`].
396///
397/// Precedence:
398/// 1. `session.state.llm_config` (canonical persisted override)
399/// 2. Legacy `session.model` (parsed via [`parse_model_selector_legacy_aware`])
400pub fn resolve_session_llm_override(session: &AgentSession) -> Option<SessionLlmConfig> {
401    if let Some(cfg) = session.state.llm_config.as_ref() {
402        return Some(cfg.clone());
403    }
404
405    session
406        .model
407        .as_deref()
408        .and_then(parse_model_selector_legacy_aware)
409}
410
411/// Apply session-scoped LLM overrides (for basic/non-TUI CLI mode) and return
412/// a cloned config together with the effective provider/model.
413///
414/// This resolves the override from the session, clones `base_config`, applies
415/// the override via [`apply_cli_session_llm_overrides`], and returns both.
416pub fn apply_basic_mode_session_llm_overrides(
417    base_config: &AppConfig,
418    session: &AgentSession,
419) -> (AppConfig, EffectiveLlmConfig) {
420    let session_llm = resolve_session_llm_override(session);
421    let mut config = base_config.clone();
422    let effective = apply_cli_session_llm_overrides(&mut config, session_llm.as_ref());
423    (config, effective)
424}
425
426/// Normalize and (optionally) migrate session-scoped LLM selection state.
427///
428/// This is the **core** normalization logic — it performs validation and
429/// canonicalization but **does not** produce any user-facing output. Callers
430/// (CLI, GUI) are responsible for presenting errors.
431///
432/// Returns:
433/// - `Ok(true)` if the session was modified and should be persisted.
434/// - `Ok(false)` if no changes were necessary.
435/// - `Err(msg)` if validation failed (e.g. incompatible provider/model).
436pub fn normalize_session_llm_override(
437    config: &AppConfig,
438    session: &mut AgentSession,
439    cli_model_arg: Option<&str>,
440) -> std::result::Result<bool, String> {
441    let explicit_cli_arg = cli_model_arg.is_some_and(|s| !s.trim().is_empty());
442
443    let mut session_llm = if let Some(arg) = cli_model_arg.filter(|s| !s.trim().is_empty()) {
444        parse_model_selector_legacy_aware(arg)
445    } else if session.state.llm_config.is_some() {
446        session.state.llm_config.clone()
447    } else {
448        session
449            .model
450            .as_deref()
451            .and_then(parse_model_selector_legacy_aware)
452    };
453
454    let Some(mut session_llm_cfg) = session_llm.take() else {
455        return Ok(false);
456    };
457
458    // If this is an explicit CLI request, validate it before persisting.
459    if explicit_cli_arg {
460        let provider_for_validation = session_llm_cfg
461            .provider
462            .as_deref()
463            .map(str::trim)
464            .filter(|s| !s.is_empty())
465            .map(|s| s.to_string())
466            .unwrap_or_else(|| config.llm.primary.clone());
467
468        if let Some(model) = session_llm_cfg
469            .model
470            .as_deref()
471            .map(str::trim)
472            .filter(|s| !s.is_empty())
473        {
474            llm_validation::validate_model_for_provider(&provider_for_validation, model)?;
475        }
476
477        // If provider-only was requested, keep it provider-only until we resolve defaults below.
478        if session_llm_cfg.provider.is_none() {
479            session_llm_cfg.provider = Some(provider_for_validation);
480        }
481    }
482
483    let mut tmp_config = config.clone();
484    let effective = apply_cli_session_llm_overrides(&mut tmp_config, Some(&session_llm_cfg));
485    if effective.provider.trim().is_empty() || effective.model.trim().is_empty() {
486        return Ok(false);
487    }
488
489    let canonical = SessionLlmConfig {
490        provider: Some(effective.provider.clone()),
491        model: Some(effective.model.clone()),
492    };
493    let legacy = format!("{}:{}", effective.provider, effective.model);
494
495    let mut changed = false;
496    let same_canonical = session.state.llm_config.as_ref().is_some_and(|c| {
497        c.provider.as_deref() == canonical.provider.as_deref()
498            && c.model.as_deref() == canonical.model.as_deref()
499    });
500    if !same_canonical {
501        session.state.llm_config = Some(canonical);
502        changed = true;
503    }
504    if session.model.as_deref() != Some(legacy.as_str()) {
505        session.model = Some(legacy);
506        changed = true;
507    }
508
509    Ok(changed)
510}
511
512#[cfg(test)]
513mod tests {
514    use super::*;
515
516    #[test]
517    fn parse_cli_model_arg_parses_provider_and_model() {
518        let parsed = parse_cli_model_arg("openai:gpt-4o").expect("should parse");
519        assert_eq!(parsed.provider.as_deref(), Some("openai"));
520        assert_eq!(parsed.model.as_deref(), Some("gpt-4o"));
521    }
522
523    #[test]
524    fn parse_cli_model_arg_parses_model_only() {
525        let parsed = parse_cli_model_arg("claude-3-5-sonnet").expect("should parse");
526        assert_eq!(parsed.provider.as_deref(), None);
527        assert_eq!(parsed.model.as_deref(), Some("claude-3-5-sonnet"));
528    }
529
530    #[test]
531    fn apply_cli_model_arg_overrides_ignores_empty() {
532        let mut cfg = AppConfig::default();
533        let eff = apply_cli_model_arg_overrides(&mut cfg, Some("  "));
534        assert_eq!(eff.provider, cfg.llm.primary);
535    }
536
537    #[test]
538    fn provider_override_applies_and_model_falls_back() {
539        let mut cfg = AppConfig::default();
540        let session = SessionLlmConfig {
541            provider: Some("ollama".into()),
542            model: None,
543        };
544
545        let eff = apply_session_llm_overrides(&mut cfg, Some(&session), |_| None);
546        assert_eq!(eff.provider, "ollama");
547        assert!(!eff.model.is_empty());
548        assert_eq!(cfg.llm.primary, "ollama");
549    }
550
551    #[test]
552    fn incompatible_model_override_is_ignored() {
553        let mut cfg = AppConfig::default();
554        // Use a provider with no config in default (anthropic is default primary, config None)
555        let session = SessionLlmConfig {
556            provider: Some("openai".into()),
557            model: Some("grok-2".into()),
558        };
559
560        let eff = apply_session_llm_overrides(&mut cfg, Some(&session), |_| None);
561        assert_eq!(eff.provider, "openai");
562        // The incompatible model override is ignored, but we still ensure the provider has a
563        // default model so adapters can bind/preselect reliably.
564        assert!(!eff.model.is_empty());
565        assert_ne!(eff.model, "grok-2");
566        assert_eq!(cfg.llm.primary, "openai");
567        assert!(cfg.llm.openai.is_some());
568    }
569
570    #[test]
571    fn openai_completion_only_model_override_is_ignored() {
572        let mut cfg = AppConfig::default();
573        let session = SessionLlmConfig {
574            provider: Some("openai".into()),
575            model: Some("text-davinci-003".into()),
576        };
577
578        let eff = apply_session_llm_overrides(&mut cfg, Some(&session), |_| None);
579        assert_eq!(eff.provider, "openai");
580        assert!(!eff.model.is_empty());
581        assert_ne!(eff.model, "text-davinci-003");
582        assert_eq!(cfg.llm.primary, "openai");
583        assert!(cfg.llm.openai.is_some());
584        assert_ne!(cfg.llm.openai.as_ref().unwrap().model, "text-davinci-003");
585    }
586
587    #[test]
588    fn model_override_creates_provider_config_and_sets_model() {
589        let mut cfg = AppConfig::default();
590        let session = SessionLlmConfig {
591            provider: Some("openai".into()),
592            model: Some("gpt-4o".into()),
593        };
594
595        let eff = apply_session_llm_overrides(&mut cfg, Some(&session), |_| Some("k".into()));
596        assert_eq!(eff.provider, "openai");
597        assert_eq!(eff.model, "gpt-4o");
598        assert_eq!(cfg.llm.primary, "openai");
599        assert_eq!(cfg.llm.openai.as_ref().unwrap().model, "gpt-4o");
600    }
601
602    #[test]
603    fn known_llm_providers_recognized() {
604        assert!(is_known_llm_provider("openai"));
605        assert!(is_known_llm_provider("ANTHROPIC"));
606        assert!(is_known_llm_provider("Ollama"));
607        assert!(!is_known_llm_provider("unknown"));
608        assert!(!is_known_llm_provider(""));
609    }
610
611    #[test]
612    fn apply_session_llm_overrides_repairs_invalid_global_provider() {
613        let mut cfg = AppConfig::default();
614        cfg.llm.primary = "echo".to_string();
615        cfg.llm.openai = Some(OpenAiConfig {
616            api_key: "sk-openai".to_string(),
617            ..Default::default()
618        });
619
620        let effective = apply_session_llm_overrides(&mut cfg, None, |_| None);
621
622        assert_eq!(effective.provider, "openai");
623        assert_eq!(cfg.llm.primary, "openai");
624        assert!(!effective.model.trim().is_empty());
625    }
626
627    #[test]
628    fn apply_session_llm_overrides_ignores_invalid_session_provider_override() {
629        let mut cfg = AppConfig::default();
630        cfg.llm.primary = "openai".to_string();
631        cfg.llm.openai = Some(OpenAiConfig {
632            api_key: "sk-openai".to_string(),
633            ..Default::default()
634        });
635        let session = SessionLlmConfig {
636            provider: Some("echo".to_string()),
637            model: None,
638        };
639
640        let effective = apply_session_llm_overrides(&mut cfg, Some(&session), |_| None);
641
642        assert_eq!(effective.provider, "openai");
643        assert_eq!(cfg.llm.primary, "openai");
644    }
645
646    #[test]
647    fn parse_model_selector_legacy_aware_provider_only() {
648        let cfg = parse_model_selector_legacy_aware("anthropic").unwrap();
649        assert_eq!(cfg.provider.as_deref(), Some("anthropic"));
650        assert_eq!(cfg.model, None);
651    }
652
653    #[test]
654    fn parse_model_selector_legacy_aware_provider_model() {
655        let cfg = parse_model_selector_legacy_aware("openai:gpt-4o").unwrap();
656        assert_eq!(cfg.provider.as_deref(), Some("openai"));
657        assert_eq!(cfg.model.as_deref(), Some("gpt-4o"));
658    }
659
660    #[test]
661    fn parse_model_selector_legacy_aware_model_only() {
662        let cfg = parse_model_selector_legacy_aware("gpt-4o").unwrap();
663        // Falls through to session_llm_config_from_cli_model_arg which treats it as model-only
664        assert_eq!(cfg.provider, None);
665        assert_eq!(cfg.model.as_deref(), Some("gpt-4o"));
666    }
667
668    #[test]
669    fn parse_model_selector_legacy_aware_empty() {
670        assert!(parse_model_selector_legacy_aware("").is_none());
671        assert!(parse_model_selector_legacy_aware("  ").is_none());
672    }
673}