Agents API (beta): create, get, update, delete, list agents with tools, handoffs, completion args, and guardrails support. Conversations API (beta): create, append, history, messages, restart, delete, list conversations. Supports agent-backed and model-only conversations with function calling and handoff execution modes. Multimodal: ChatMessageContent enum (Text/Parts) with ContentPart variants for text and image_url. Backwards-compatible constructors. new_user_message_with_images() for mixed content messages. Chat: reasoning field on ChatResponseChoice for Magistral models. HTTP: PATCH methods for agent updates. 81 tests (30 live API integration + 35 serde unit + 16 existing).
102 lines
3.0 KiB
Rust
102 lines
3.0 KiB
Rust
use jrest::expect;
|
|
use mistralai_client::v1::{
|
|
chat::{ChatMessage, ChatMessageRole, ChatParams, ChatResponseChoiceFinishReason},
|
|
client::Client,
|
|
constants::Model,
|
|
tool::{Tool, ToolChoice},
|
|
};
|
|
|
|
mod setup;
|
|
|
|
#[tokio::test]
|
|
async fn test_client_chat_async() {
|
|
setup::setup();
|
|
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let model = Model::mistral_small_latest();
|
|
let messages = vec![ChatMessage::new_user_message(
|
|
"Guess the next word: \"Eiffel ...\"?",
|
|
)];
|
|
let options = ChatParams {
|
|
temperature: Some(0.0),
|
|
random_seed: Some(42),
|
|
..Default::default()
|
|
};
|
|
|
|
let response = client
|
|
.chat_async(model, messages, Some(options))
|
|
.await
|
|
.unwrap();
|
|
|
|
expect!(response.object).to_be("chat.completion".to_string());
|
|
|
|
expect!(response.choices.len()).to_be(1);
|
|
expect!(response.choices[0].index).to_be(0);
|
|
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
|
|
|
|
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
|
|
expect!(response.choices[0]
|
|
.message
|
|
.content
|
|
.text()
|
|
.contains("Tower"))
|
|
.to_be(true);
|
|
|
|
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
|
|
expect!(response.usage.completion_tokens).to_be_greater_than(0);
|
|
expect!(response.usage.total_tokens).to_be_greater_than(0);
|
|
}
|
|
|
|
#[tokio::test]
|
|
async fn test_client_chat_async_with_function_calling() {
|
|
setup::setup();
|
|
|
|
let tools = vec![Tool::new(
|
|
"get_city_temperature".to_string(),
|
|
"Get the current temperature in a city.".to_string(),
|
|
serde_json::json!({
|
|
"type": "object",
|
|
"properties": {
|
|
"city": {
|
|
"type": "string",
|
|
"description": "The name of the city."
|
|
}
|
|
},
|
|
"required": ["city"]
|
|
}),
|
|
)];
|
|
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let model = Model::mistral_small_latest();
|
|
let messages = vec![ChatMessage::new_user_message(
|
|
"What's the current temperature in Paris?",
|
|
)];
|
|
let options = ChatParams {
|
|
temperature: Some(0.0),
|
|
random_seed: Some(42),
|
|
tool_choice: Some(ToolChoice::Any),
|
|
tools: Some(tools),
|
|
..Default::default()
|
|
};
|
|
|
|
let response = client
|
|
.chat_async(model, messages, Some(options))
|
|
.await
|
|
.unwrap();
|
|
|
|
expect!(response.object).to_be("chat.completion".to_string());
|
|
|
|
expect!(response.choices.len()).to_be(1);
|
|
expect!(response.choices[0].index).to_be(0);
|
|
expect!(response.choices[0].finish_reason.clone())
|
|
.to_be(ChatResponseChoiceFinishReason::ToolCalls);
|
|
|
|
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
|
|
|
|
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
|
|
expect!(response.usage.completion_tokens).to_be_greater_than(0);
|
|
expect!(response.usage.total_tokens).to_be_greater_than(0);
|
|
}
|