gestura_core_streaming/config.rs
1//! Streaming-specific configuration types.
2//!
3//! These mirror the subset of `AppConfig::llm` fields needed by the streaming module.
4//! Core provides `From<&AppConfig> for StreamingConfig` to bridge the two.
5
6use serde::{Deserialize, Serialize};
7
8/// Configuration for streaming LLM requests.
9///
10/// This type captures the minimal subset of application configuration
11/// required by the streaming module: which provider to use and their credentials.
12#[derive(Debug, Clone, Default, Serialize, Deserialize, PartialEq, Eq)]
13pub struct StreamingConfig {
14 /// Primary provider id: "openai" | "anthropic" | "gemini" | "grok" | "ollama"
15 #[serde(default)]
16 pub primary: String,
17 /// Fallback provider id (optional): used when primary fails
18 #[serde(default)]
19 pub fallback: Option<String>,
20 /// OpenAI provider configuration
21 pub openai: Option<OpenAiProviderConfig>,
22 /// Anthropic provider configuration
23 pub anthropic: Option<AnthropicProviderConfig>,
24 /// Gemini (Google Generative Language API) provider configuration
25 pub gemini: Option<GeminiProviderConfig>,
26 /// Grok provider configuration
27 pub grok: Option<GrokProviderConfig>,
28 /// Ollama provider configuration
29 pub ollama: Option<OllamaProviderConfig>,
30}
31
32/// OpenAI provider credentials and settings.
33#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
34pub struct OpenAiProviderConfig {
35 /// API key for authentication
36 #[serde(default)]
37 pub api_key: String,
38 /// Optional custom base URL (e.g. for Azure OpenAI)
39 pub base_url: Option<String>,
40 /// Model to use (e.g. "gpt-4o")
41 #[serde(default)]
42 pub model: String,
43}
44
45/// Anthropic provider credentials and settings.
46#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
47pub struct AnthropicProviderConfig {
48 /// API key for authentication
49 #[serde(default)]
50 pub api_key: String,
51 /// Optional custom base URL
52 pub base_url: Option<String>,
53 /// Model to use (e.g. "claude-sonnet-4-20250514")
54 #[serde(default)]
55 pub model: String,
56 /// Optional: enable Anthropic "extended thinking" streaming.
57 #[serde(default)]
58 pub thinking_budget_tokens: Option<u32>,
59}
60
61/// Grok (xAI) provider credentials and settings.
62#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
63pub struct GrokProviderConfig {
64 /// API key for authentication
65 #[serde(default)]
66 pub api_key: String,
67 /// Optional custom base URL
68 pub base_url: Option<String>,
69 /// Model to use
70 #[serde(default)]
71 pub model: String,
72}
73
74/// Gemini (Google Generative Language API) provider credentials and settings.
75#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq, Default)]
76pub struct GeminiProviderConfig {
77 /// API key for authentication (passed as query parameter)
78 #[serde(default)]
79 pub api_key: String,
80 /// Optional custom base URL
81 pub base_url: Option<String>,
82 /// Model to use (e.g. "gemini-2.0-flash")
83 #[serde(default)]
84 pub model: String,
85}
86
87/// Ollama (local) provider settings.
88#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
89pub struct OllamaProviderConfig {
90 /// Base URL for the Ollama server
91 pub base_url: String,
92 /// Model to use
93 pub model: String,
94}