Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
32 changes: 16 additions & 16 deletions desktop/Backend-Rust/src/models/chat_completions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -231,23 +231,16 @@ pub struct AnthropicUsage {
#[serde(tag = "type")]
pub enum AnthropicStreamEvent {
#[serde(rename = "message_start")]
MessageStart {
message: AnthropicStreamMessage,
},
MessageStart { message: AnthropicStreamMessage },
#[serde(rename = "content_block_start")]
ContentBlockStart {
index: usize,
content_block: AnthropicContentBlock,
},
#[serde(rename = "content_block_delta")]
ContentBlockDelta {
index: usize,
delta: AnthropicDelta,
},
ContentBlockDelta { index: usize, delta: AnthropicDelta },
#[serde(rename = "content_block_stop")]
ContentBlockStop {
index: usize,
},
ContentBlockStop { index: usize },
#[serde(rename = "message_delta")]
MessageDelta {
delta: AnthropicMessageDelta,
Expand All @@ -258,9 +251,7 @@ pub enum AnthropicStreamEvent {
#[serde(rename = "ping")]
Ping {},
#[serde(rename = "error")]
Error {
error: AnthropicStreamError,
},
Error { error: AnthropicStreamError },
}

#[derive(Debug, Clone, Deserialize)]
Expand Down Expand Up @@ -339,6 +330,16 @@ pub const MODEL_ROUTES: &[ModelRoute] = &[
upstream_model: "claude-sonnet-4-6",
provider: Provider::Anthropic,
},
ModelRoute {
public_model: "omi-haiku",
upstream_model: "claude-haiku-4-5-20251001",
provider: Provider::Anthropic,
},
ModelRoute {
public_model: "claude-haiku-4-5-20251001",
upstream_model: "claude-haiku-4-5-20251001",
provider: Provider::Anthropic,
},
];

pub fn resolve_model(model: &str) -> Option<&'static ModelRoute> {
Expand All @@ -358,9 +359,8 @@ pub fn map_stop_reason(anthropic_reason: Option<&str>) -> Option<String> {
}

pub fn anthropic_usage_to_openai(usage: &AnthropicUsage) -> Usage {
let prompt_tokens = usage.input_tokens
+ usage.cache_creation_input_tokens
+ usage.cache_read_input_tokens;
let prompt_tokens =
usage.input_tokens + usage.cache_creation_input_tokens + usage.cache_read_input_tokens;
let completion_tokens = usage.output_tokens;
Usage {
prompt_tokens,
Expand Down
15 changes: 14 additions & 1 deletion desktop/Backend-Rust/src/routes/chat_completions.rs
Original file line number Diff line number Diff line change
Expand Up @@ -70,6 +70,12 @@ fn model_cost(upstream_model: &str) -> ModelCost {
cache_read_per_token: 1.50 / 1_000_000.0,
cache_write_per_token: 18.75 / 1_000_000.0,
},
"claude-haiku-4-5-20251001" => ModelCost {
input_per_token: 0.80 / 1_000_000.0,
output_per_token: 4.00 / 1_000_000.0,
cache_read_per_token: 0.08 / 1_000_000.0,
cache_write_per_token: 1.00 / 1_000_000.0,
},
_ => ModelCost {
input_per_token: 3.0 / 1_000_000.0,
output_per_token: 15.0 / 1_000_000.0,
Expand Down Expand Up @@ -926,6 +932,14 @@ mod tests {
assert_eq!(route.upstream_model, "claude-opus-4-6");
}

#[test]
fn test_resolve_model_haiku() {
let route = resolve_model("omi-haiku").unwrap();
assert_eq!(route.public_model, "omi-haiku");
assert_eq!(route.upstream_model, "claude-haiku-4-5-20251001");
assert_eq!(route.provider, Provider::Anthropic);
}

#[test]
fn test_resolve_model_claude_aliases() {
let route = resolve_model("claude-opus-4-6").unwrap();
Expand All @@ -948,7 +962,6 @@ mod tests {
fn test_resolve_model_unknown() {
assert!(resolve_model("gpt-4").is_none());
assert!(resolve_model("").is_none());
assert!(resolve_model("omi-haiku").is_none());
}

#[test]
Expand Down