2024-06-22 13:22:57 +02:00
|
|
|
use jrest::expect;
|
|
|
|
|
use mistralai_client::v1::{
|
|
|
|
|
chat::{ChatMessage, ChatParams},
|
|
|
|
|
client::Client,
|
|
|
|
|
constants::Model,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
#[test]
|
2026-03-20 17:57:44 +00:00
|
|
|
fn test_model_constants() {
|
2024-06-22 13:22:57 +02:00
|
|
|
let models = vec![
|
2026-03-20 17:57:44 +00:00
|
|
|
Model::mistral_small_latest(),
|
|
|
|
|
Model::mistral_large_latest(),
|
|
|
|
|
Model::open_mistral_nemo(),
|
|
|
|
|
Model::codestral_latest(),
|
2024-06-22 13:22:57 +02:00
|
|
|
];
|
|
|
|
|
|
|
|
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
|
|
|
|
|
|
let messages = vec![ChatMessage::new_user_message("A number between 0 and 100?")];
|
|
|
|
|
let options = ChatParams {
|
2026-03-20 17:57:44 +00:00
|
|
|
temperature: Some(0.0),
|
2024-06-22 13:22:57 +02:00
|
|
|
random_seed: Some(42),
|
|
|
|
|
..Default::default()
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
for model in models {
|
|
|
|
|
let response = client
|
|
|
|
|
.chat(model.clone(), messages.clone(), Some(options.clone()))
|
|
|
|
|
.unwrap();
|
|
|
|
|
|
|
|
|
|
expect!(response.model).to_be(model);
|
|
|
|
|
expect!(response.object).to_be("chat.completion".to_string());
|
|
|
|
|
expect!(response.choices.len()).to_be(1);
|
|
|
|
|
expect!(response.choices[0].index).to_be(0);
|
|
|
|
|
expect!(response.choices[0].message.content.len()).to_be_greater_than(0);
|
|
|
|
|
}
|
|
|
|
|
}
|