5 Commits

Author SHA1 Message Date
Ivan Gabriele
0c097aa56d ci(release): v0.11.0 2024-06-22 14:05:11 +02:00
Ivan Gabriele
e6539c0ccf docs(changelog): update 2024-06-22 14:05:04 +02:00
Ivan Gabriele
30156c5273 test: remove useless setup in constants test 2024-06-22 14:02:52 +02:00
Ivan Gabriele
ecd0c3028f feat(constants): add OpenMixtral8x22b, MistralTiny & CodestralLatest to Model enum 2024-06-22 13:22:57 +02:00
Ivan Gabriele
0df67b1b25 fix(chat): implement Clone trait for ChatParams & ResponseFormat 2024-06-22 13:09:15 +02:00
5 changed files with 59 additions and 3 deletions

View File

@@ -1,3 +1,12 @@
## [0.11.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.10.0...v) (2024-06-22)
### Features
* **constants:** add OpenMixtral8x22b, MistralTiny & CodestralLatest to Model enum ([ecd0c30](https://github.com/ivangabriele/mistralai-client-rs/commit/ecd0c3028fdcfab32b867eb1eed86182f5f4ab81))
### Bug Fixes
* **chat:** implement Clone trait for ChatParams & ResponseFormat ([0df67b1](https://github.com/ivangabriele/mistralai-client-rs/commit/0df67b1b2571fb04b636ce015a2daabe629ff352))
## [0.10.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.9.0...v) (2024-06-07)
### ⚠ BREAKING CHANGES

View File

@@ -2,7 +2,7 @@
name = "mistralai-client"
description = "Mistral AI API client library for Rust (unofficial)."
license = "Apache-2.0"
version = "0.10.0"
version = "0.11.0"
edition = "2021"
rust-version = "1.76.0"

View File

@@ -46,7 +46,7 @@ pub enum ChatMessageRole {
/// The format that the model must output.
///
/// See the [API documentation](https://docs.mistral.ai/api/#operation/createChatCompletion) for more information.
#[derive(Debug, Serialize, Deserialize)]
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ResponseFormat {
#[serde(rename = "type")]
pub type_: String,
@@ -65,7 +65,7 @@ impl ResponseFormat {
/// The parameters for the chat request.
///
/// See the [API documentation](https://docs.mistral.ai/api/#operation/createChatCompletion) for more information.
#[derive(Debug)]
#[derive(Clone, Debug)]
pub struct ChatParams {
/// The maximum number of tokens to generate in the completion.
///

View File

@@ -8,12 +8,18 @@ pub enum Model {
OpenMistral7b,
#[serde(rename = "open-mixtral-8x7b")]
OpenMixtral8x7b,
#[serde(rename = "open-mixtral-8x22b")]
OpenMixtral8x22b,
#[serde(rename = "mistral-tiny")]
MistralTiny,
#[serde(rename = "mistral-small-latest")]
MistralSmallLatest,
#[serde(rename = "mistral-medium-latest")]
MistralMediumLatest,
#[serde(rename = "mistral-large-latest")]
MistralLargeLatest,
#[serde(rename = "codestral-latest")]
CodestralLatest,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]

View File

@@ -0,0 +1,41 @@
use jrest::expect;
use mistralai_client::v1::{
chat::{ChatMessage, ChatParams},
client::Client,
constants::Model,
};
#[test]
fn test_model_constant() {
let models = vec![
Model::OpenMistral7b,
Model::OpenMixtral8x7b,
Model::OpenMixtral8x22b,
Model::MistralTiny,
Model::MistralSmallLatest,
Model::MistralMediumLatest,
Model::MistralLargeLatest,
Model::CodestralLatest,
];
let client = Client::new(None, None, None, None).unwrap();
let messages = vec![ChatMessage::new_user_message("A number between 0 and 100?")];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
for model in models {
let response = client
.chat(model.clone(), messages.clone(), Some(options.clone()))
.unwrap();
expect!(response.model).to_be(model);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.content.len()).to_be_greater_than(0);
}
}