2024-03-03 15:20:30 +01:00
|
|
|
use jrest::expect;
|
|
|
|
|
use mistralai_client::v1::{
|
2024-03-09 11:28:50 +01:00
|
|
|
chat::{ChatMessage, ChatMessageRole, ChatParams, ChatResponseChoiceFinishReason},
|
2024-03-03 15:20:30 +01:00
|
|
|
client::Client,
|
2024-03-04 03:14:23 +01:00
|
|
|
constants::Model,
|
2026-03-20 17:57:44 +00:00
|
|
|
tool::{Tool, ToolChoice},
|
2024-03-03 15:20:30 +01:00
|
|
|
};
|
|
|
|
|
|
2024-03-09 11:28:50 +01:00
|
|
|
mod setup;
|
|
|
|
|
|
2024-03-03 15:20:30 +01:00
|
|
|
#[test]
|
2024-03-04 04:21:03 +01:00
|
|
|
fn test_client_chat() {
|
2024-03-09 11:28:50 +01:00
|
|
|
setup::setup();
|
|
|
|
|
|
2024-03-04 04:43:16 +01:00
|
|
|
let client = Client::new(None, None, None, None).unwrap();
|
2024-03-03 15:20:30 +01:00
|
|
|
|
2026-03-20 17:57:44 +00:00
|
|
|
let model = Model::mistral_small_latest();
|
2024-03-09 11:28:50 +01:00
|
|
|
let messages = vec![ChatMessage::new_user_message(
|
2024-06-07 14:36:18 +02:00
|
|
|
"Guess the next word: \"Eiffel ...\"?",
|
2024-03-09 11:28:50 +01:00
|
|
|
)];
|
|
|
|
|
let options = ChatParams {
|
2026-03-20 17:57:44 +00:00
|
|
|
temperature: Some(0.0),
|
2024-03-03 15:20:30 +01:00
|
|
|
random_seed: Some(42),
|
|
|
|
|
..Default::default()
|
|
|
|
|
};
|
|
|
|
|
|
2024-03-03 19:10:25 +01:00
|
|
|
let response = client.chat(model, messages, Some(options)).unwrap();
|
2024-03-03 15:20:30 +01:00
|
|
|
|
2024-03-03 19:10:25 +01:00
|
|
|
expect!(response.object).to_be("chat.completion".to_string());
|
|
|
|
|
expect!(response.choices.len()).to_be(1);
|
|
|
|
|
expect!(response.choices[0].index).to_be(0);
|
2024-03-09 11:28:50 +01:00
|
|
|
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
|
2024-06-07 15:53:25 +02:00
|
|
|
expect!(response.choices[0]
|
|
|
|
|
.message
|
|
|
|
|
.content
|
feat: add Agents API, Conversations API, and multimodal support (v1.1.0)
Agents API (beta): create, get, update, delete, list agents with tools,
handoffs, completion args, and guardrails support.
Conversations API (beta): create, append, history, messages, restart,
delete, list conversations. Supports agent-backed and model-only
conversations with function calling and handoff execution modes.
Multimodal: ChatMessageContent enum (Text/Parts) with ContentPart
variants for text and image_url. Backwards-compatible constructors.
new_user_message_with_images() for mixed content messages.
Chat: reasoning field on ChatResponseChoice for Magistral models.
HTTP: PATCH methods for agent updates.
81 tests (30 live API integration + 35 serde unit + 16 existing).
2026-03-21 20:58:25 +00:00
|
|
|
.text()
|
2024-06-07 15:53:25 +02:00
|
|
|
.contains("Tower"))
|
|
|
|
|
.to_be(true);
|
2024-03-09 11:28:50 +01:00
|
|
|
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
|
|
|
|
|
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
|
|
|
|
|
expect!(response.usage.completion_tokens).to_be_greater_than(0);
|
|
|
|
|
expect!(response.usage.total_tokens).to_be_greater_than(0);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
fn test_client_chat_with_function_calling() {
|
|
|
|
|
setup::setup();
|
|
|
|
|
|
|
|
|
|
let tools = vec![Tool::new(
|
|
|
|
|
"get_city_temperature".to_string(),
|
|
|
|
|
"Get the current temperature in a city.".to_string(),
|
2026-03-20 17:57:44 +00:00
|
|
|
serde_json::json!({
|
|
|
|
|
"type": "object",
|
|
|
|
|
"properties": {
|
|
|
|
|
"city": {
|
|
|
|
|
"type": "string",
|
|
|
|
|
"description": "The name of the city."
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
"required": ["city"]
|
|
|
|
|
}),
|
2024-03-09 11:28:50 +01:00
|
|
|
)];
|
|
|
|
|
|
|
|
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
|
|
2026-03-20 17:57:44 +00:00
|
|
|
let model = Model::mistral_small_latest();
|
2024-03-09 11:28:50 +01:00
|
|
|
let messages = vec![ChatMessage::new_user_message(
|
|
|
|
|
"What's the current temperature in Paris?",
|
|
|
|
|
)];
|
|
|
|
|
let options = ChatParams {
|
2026-03-20 17:57:44 +00:00
|
|
|
temperature: Some(0.0),
|
2024-03-09 11:28:50 +01:00
|
|
|
random_seed: Some(42),
|
|
|
|
|
tool_choice: Some(ToolChoice::Auto),
|
|
|
|
|
tools: Some(tools),
|
|
|
|
|
..Default::default()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let response = client.chat(model, messages, Some(options)).unwrap();
|
|
|
|
|
|
|
|
|
|
expect!(response.object).to_be("chat.completion".to_string());
|
|
|
|
|
expect!(response.choices.len()).to_be(1);
|
|
|
|
|
expect!(response.choices[0].index).to_be(0);
|
|
|
|
|
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
|
|
|
|
|
expect!(response.choices[0].finish_reason.clone())
|
|
|
|
|
.to_be(ChatResponseChoiceFinishReason::ToolCalls);
|
2024-03-03 19:10:25 +01:00
|
|
|
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
|
|
|
|
|
expect!(response.usage.completion_tokens).to_be_greater_than(0);
|
2024-03-04 03:14:23 +01:00
|
|
|
expect!(response.usage.total_tokens).to_be_greater_than(0);
|
2024-03-03 15:20:30 +01:00
|
|
|
}
|