Files
mistralai-client-rs/tests/v1_chat_multimodal_api_test.rs
Sienna Meridian Satterwhite a29c3c0109 feat: add Agents API, Conversations API, and multimodal support (v1.1.0)
Agents API (beta): create, get, update, delete, list agents with tools,
handoffs, completion args, and guardrails support.

Conversations API (beta): create, append, history, messages, restart,
delete, list conversations. Supports agent-backed and model-only
conversations with function calling and handoff execution modes.

Multimodal: ChatMessageContent enum (Text/Parts) with ContentPart
variants for text and image_url. Backwards-compatible constructors.
new_user_message_with_images() for mixed content messages.

Chat: reasoning field on ChatResponseChoice for Magistral models.
HTTP: PATCH methods for agent updates.

81 tests (30 live API integration + 35 serde unit + 16 existing).
2026-03-21 20:58:25 +00:00

157 lines
4.6 KiB
Rust

use mistralai_client::v1::{
chat::{
ChatMessage, ChatParams, ChatResponseChoiceFinishReason, ContentPart, ImageUrl,
},
client::Client,
constants::Model,
};
mod setup;
fn make_client() -> Client {
Client::new(None, None, None, None).unwrap()
}
#[test]
fn test_multimodal_chat_with_image_url() {
setup::setup();
let client = make_client();
// Use a small, publicly accessible image
let msg = ChatMessage::new_user_message_with_images(vec![
ContentPart::Text {
text: "Describe this image in one sentence.".to_string(),
},
ContentPart::ImageUrl {
image_url: ImageUrl {
url: "https://picsum.photos/id/237/200/300".to_string(),
detail: None,
},
},
]);
let model = Model::new("pixtral-large-latest".to_string());
let options = ChatParams {
max_tokens: Some(100),
temperature: Some(0.0),
..Default::default()
};
let response = client.chat(model, vec![msg], Some(options)).unwrap();
assert_eq!(
response.choices[0].finish_reason,
ChatResponseChoiceFinishReason::Stop
);
let text = response.choices[0].message.content.text();
assert!(!text.is_empty(), "Expected non-empty description");
assert!(response.usage.total_tokens > 0);
}
#[tokio::test]
async fn test_multimodal_chat_with_image_url_async() {
setup::setup();
let client = make_client();
let msg = ChatMessage::new_user_message_with_images(vec![
ContentPart::Text {
text: "What colors do you see in this image? Reply in one sentence.".to_string(),
},
ContentPart::ImageUrl {
image_url: ImageUrl {
url: "https://picsum.photos/id/237/200/300".to_string(),
detail: None,
},
},
]);
let model = Model::new("pixtral-large-latest".to_string());
let options = ChatParams {
max_tokens: Some(100),
temperature: Some(0.0),
..Default::default()
};
let response = client
.chat_async(model, vec![msg], Some(options))
.await
.unwrap();
let text = response.choices[0].message.content.text();
assert!(!text.is_empty(), "Expected non-empty description");
assert!(response.usage.total_tokens > 0);
}
#[test]
fn test_mixed_text_and_image_messages() {
setup::setup();
let client = make_client();
// First message: just text
let msg1 = ChatMessage::new_user_message("I'm going to show you an image next.");
// Second message: text + image
let msg2 = ChatMessage::new_user_message_with_images(vec![
ContentPart::Text {
text: "Here it is. What do you see?".to_string(),
},
ContentPart::ImageUrl {
image_url: ImageUrl {
url: "https://picsum.photos/id/237/200/300".to_string(),
detail: None,
},
},
]);
let model = Model::new("pixtral-large-latest".to_string());
let options = ChatParams {
max_tokens: Some(100),
temperature: Some(0.0),
..Default::default()
};
let response = client.chat(model, vec![msg1, msg2], Some(options)).unwrap();
let text = response.choices[0].message.content.text();
assert!(!text.is_empty());
}
#[test]
fn test_text_only_message_still_works() {
setup::setup();
let client = make_client();
// Verify that text-only messages (the common case) still work fine
// with the new ChatMessageContent type
let msg = ChatMessage::new_user_message("What is 7 + 8?");
let model = Model::mistral_small_latest();
let options = ChatParams {
temperature: Some(0.0),
max_tokens: Some(50),
..Default::default()
};
let response = client.chat(model, vec![msg], Some(options)).unwrap();
let text = response.choices[0].message.content.text();
assert!(text.contains("15"), "Expected '15', got: {text}");
}
#[test]
fn test_reasoning_field_presence() {
setup::setup();
let client = make_client();
// Normal model should not have reasoning
let msg = ChatMessage::new_user_message("What is 2 + 2?");
let model = Model::mistral_small_latest();
let options = ChatParams {
temperature: Some(0.0),
max_tokens: Some(50),
..Default::default()
};
let response = client.chat(model, vec![msg], Some(options)).unwrap();
// reasoning is None for non-Magistral models (or it might just be absent)
// This test verifies the field deserializes correctly either way
let _ = response.choices[0].reasoning.as_ref();
}