gestura_core/
llm_validation.rs1use gestura_core_llm::openai::{
13 is_openai_model_incompatible_with_agent_session, looks_like_openai_model,
14 openai_agent_session_model_message,
15};
16
17pub fn infer_provider_from_model_id(model_id: &str) -> Option<&'static str> {
28 let m = model_id.trim().to_ascii_lowercase();
29 if m.is_empty() {
30 return None;
31 }
32
33 if m.starts_with("grok-") {
34 return Some("grok");
35 }
36
37 if m.starts_with("claude-") {
38 return Some("anthropic");
39 }
40
41 if m.starts_with("gemini-") {
42 return Some("gemini");
43 }
44
45 if looks_like_openai_model(&m) {
48 return Some("openai");
49 }
50
51 None
52}
53
54pub fn is_model_compatible_with_provider(provider: &str, model_id: &str) -> bool {
63 let p = provider.trim().to_ascii_lowercase();
64 if p.is_empty() {
65 return true;
66 }
67
68 if p == "ollama" {
69 return true;
70 }
71
72 if p == "openai" && is_openai_model_incompatible_with_agent_session(model_id) {
73 return false;
74 }
75
76 match infer_provider_from_model_id(model_id) {
77 Some(inferred) => inferred == p,
78 None => true,
79 }
80}
81
82pub fn validate_model_for_provider(provider: &str, model_id: &str) -> Result<(), String> {
84 let normalized_provider = provider.trim().to_ascii_lowercase();
85 if normalized_provider == "openai" && is_openai_model_incompatible_with_agent_session(model_id)
86 {
87 return Err(openai_agent_session_model_message(model_id));
88 }
89
90 if is_model_compatible_with_provider(&normalized_provider, model_id) {
91 return Ok(());
92 }
93
94 let inferred = infer_provider_from_model_id(model_id).unwrap_or("unknown");
95 Err(format!(
96 "Invalid model for provider: provider='{}' model='{}' (looks like provider='{}')",
97 provider.trim(),
98 model_id.trim(),
99 inferred
100 ))
101}
102
103#[cfg(test)]
104mod tests {
105 use super::*;
106
107 #[test]
108 fn rejects_obvious_cross_provider_pairs() {
109 assert!(!is_model_compatible_with_provider("openai", "grok-2"));
110 assert!(!is_model_compatible_with_provider("grok", "gpt-4o"));
111 assert!(!is_model_compatible_with_provider("anthropic", "grok-3"));
112 assert!(!is_model_compatible_with_provider(
113 "openai",
114 "claude-sonnet-4-20250514"
115 ));
116 assert!(!is_model_compatible_with_provider(
117 "openai",
118 "gemini-2.0-flash"
119 ));
120 assert!(!is_model_compatible_with_provider("gemini", "gpt-4o"));
121 }
122
123 #[test]
124 fn accepts_matching_prefixes() {
125 assert!(is_model_compatible_with_provider("grok", "grok-2"));
126 assert!(is_model_compatible_with_provider(
127 "anthropic",
128 "claude-sonnet-4-20250514"
129 ));
130 assert!(is_model_compatible_with_provider("openai", "gpt-4o"));
131 assert!(is_model_compatible_with_provider("openai", "o1-mini"));
132 assert!(is_model_compatible_with_provider("openai", "o4-mini"));
133 assert!(is_model_compatible_with_provider(
134 "openai",
135 "codex-mini-latest"
136 ));
137 assert!(is_model_compatible_with_provider(
138 "openai",
139 "ft:gpt-4o-mini:gestura:agent-123"
140 ));
141 assert!(is_model_compatible_with_provider(
142 "gemini",
143 "gemini-2.0-flash"
144 ));
145 assert!(is_model_compatible_with_provider(
146 "gemini",
147 "gemini-1.5-pro"
148 ));
149 }
150
151 #[test]
152 fn allows_unknown_models_by_default() {
153 assert!(is_model_compatible_with_provider(
154 "openai",
155 "my-custom-model"
156 ));
157 assert!(is_model_compatible_with_provider(
158 "anthropic",
159 "some-enterprise-model"
160 ));
161 }
162
163 #[test]
164 fn rejects_openai_completion_only_models_for_sessions() {
165 for model in [
166 "text-davinci-003",
167 "gpt-3.5-turbo-instruct",
168 "davinci-002",
169 "ft:babbage-002:gestura:legacy-123",
170 "davinci:ft-gestura-legacy-456",
171 "gpt-4o-transcribe",
172 "gpt-4o-audio-preview",
173 "gpt-realtime",
174 "gpt-image-1",
175 ] {
176 assert!(
177 !is_model_compatible_with_provider("openai", model),
178 "expected {model} to be rejected for OpenAI sessions"
179 );
180 let message = validate_model_for_provider("openai", model).unwrap_err();
181 assert!(message.contains("automatically routes OpenAI agent requests"));
182 }
183 }
184
185 #[test]
186 fn accepts_openai_responses_models_for_sessions() {
187 for model in ["gpt-5.4", "gpt-5.3-codex", "codex-1"] {
188 assert!(
189 is_model_compatible_with_provider("openai", model),
190 "expected {model} to be accepted for OpenAI sessions"
191 );
192 validate_model_for_provider("openai", model).unwrap();
193 }
194 }
195
196 #[test]
197 fn infers_provider_for_fine_tuned_openai_chat_models() {
198 assert_eq!(
199 infer_provider_from_model_id("ft:gpt-4o-mini:gestura:abc123"),
200 Some("openai")
201 );
202 assert_eq!(infer_provider_from_model_id("o5-mini"), Some("openai"));
203 assert_eq!(
204 infer_provider_from_model_id("codex-mini-latest"),
205 Some("openai")
206 );
207 assert_eq!(
208 infer_provider_from_model_id("gpt-5.3-codex"),
209 Some("openai")
210 );
211 }
212
213 #[test]
214 fn ollama_is_always_compatible() {
215 assert!(is_model_compatible_with_provider("ollama", "grok-2"));
216 assert!(is_model_compatible_with_provider(
217 "ollama",
218 "claude-sonnet-4-20250514"
219 ));
220 }
221}