14 Commits

Author SHA1 Message Date
Ivan Gabriele
0386b95b7b chore: Release mistralai-client version 0.3.0 2024-03-04 03:25:41 +01:00
Ivan Gabriele
c61f2278bb docs(changelog): update 2024-03-04 03:25:35 +01:00
Ivan Gabriele
4c8e330c95 ci(release): fix pre-release-replacements regex 2024-03-04 03:25:20 +01:00
Ivan Gabriele
64c7f2feb5 ci(release): fix type in pre-release-replacements prop 2024-03-04 03:21:13 +01:00
Ivan Gabriele
f44d951247 feat!: add client.embeddings() method
BREAKING CHANGE: Models are now enforced by `Model` & `EmbedModel` enums.
2024-03-04 03:16:59 +01:00
Ivan Gabriele
4e702aa48e refactor: rename ListModels* to ModelList* 2024-03-04 01:56:48 +01:00
Ivan Gabriele
809af31dd0 ci(release): fix changelog version replacement 2024-03-03 19:49:27 +01:00
Ivan Gabriele
7016cffb05 chore: Release mistralai-client version 0.2.0 2024-03-03 19:45:03 +01:00
Ivan Gabriele
43cf87529e docs(changelog): update 2024-03-03 19:44:54 +01:00
Ivan Gabriele
7627a336cc ci(release): add missing --execute option in Makefile definition 2024-03-03 19:44:43 +01:00
Ivan Gabriele
4983f69cd5 docs(changelog): update 2024-03-03 19:43:13 +01:00
Ivan Gabriele
814b9918b3 feat: add client.list_models() method 2024-03-03 19:42:00 +01:00
Ivan Gabriele
7de2b19b98 feat!: simplify chat completion call
BREAKING CHANGE: Chat completions must now be called directly from client.chat() without building a request in between.
2024-03-03 19:14:01 +01:00
Ivan Gabriele
8cb2c3cd0c ci(release): setup 2024-03-03 19:03:06 +01:00
14 changed files with 356 additions and 138 deletions

28
CHANGELOG.md Normal file
View File

@@ -0,0 +1,28 @@
## [0.3.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.2.0...v) (2024-03-04)
### ⚠ BREAKING CHANGES
* Models are now enforced by `Model` & `EmbedModel` enums.
### Features
* add client.embeddings() method ([f44d951](https://github.com/ivangabriele/mistralai-client-rs/commit/f44d95124767c3a3f14c78c4be3d9c203fac49ad))
## [0.2.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.1.0...v) (2024-03-03)
### ⚠ BREAKING CHANGES
* Chat completions must now be called directly from client.chat() without building a request in between.
### Features
* add client.list_models() method ([814b991](https://github.com/ivangabriele/mistralai-client-rs/commit/814b9918b3aca78bfd606b5b9bb470b70ea2a5c6))
* simplify chat completion call ([7de2b19](https://github.com/ivangabriele/mistralai-client-rs/commit/7de2b19b981f1d65fe5c566fcaf521e4f2a9ced1))
## [0.1.0](https://github.com/ivangabriele/mistralai-client-rs/compare/7d3b438d16e9936591b6454525968c5c2cdfd6ad...v0.1.0) (2024-03-03)
### Features
- add chat completion without streaming ([7d3b438](https://github.com/ivangabriele/mistralai-client-rs/commit/7d3b438d16e9936591b6454525968c5c2cdfd6ad))

View File

@@ -2,7 +2,7 @@
name = "mistralai-client"
description = "Mistral AI API client library for Rust (unofficial)."
license = "Apache-2.0"
version = "0.1.0"
version = "0.3.0"
edition = "2021"
rust-version = "1.76.0"

View File

@@ -1,6 +1,26 @@
.PHONY: test
define RELEASE_TEMPLATE
conventional-changelog -p conventionalcommits -i CHANGELOG.md -s
git add .
git commit -m "docs(changelog): update"
git push origin HEAD
cargo release $(1) --execute
git push origin HEAD --tags
endef
test:
cargo test --no-fail-fast
test-cover:
cargo tarpaulin --frozen --no-fail-fast --out Xml --skip-clean
test-watch:
cargo watch -x "test -- --nocapture"
release-patch:
$(call RELEASE_TEMPLATE,patch)
release-minor:
$(call RELEASE_TEMPLATE,minor)
release-major:
$(call RELEASE_TEMPLATE,major)

View File

@@ -26,8 +26,8 @@ Rust client for the Mistral AI API.
- [x] Chat without streaming
- [ ] Chat with streaming
- [ ] Embedding
- [ ] List models
- [x] Embedding
- [x] List models
- [ ] Function Calling
## Installation
@@ -63,20 +63,17 @@ fn main() {
### Chat without streaming
```rs
use mistralai::v1::{
chat_completion::{
ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequest,
ChatCompletionRequestOptions,
},
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequestOptions},
client::Client,
constants::OPEN_MISTRAL_7B,
constants::Model,
};
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None);
let model = OPEN_MISTRAL_7B.to_string();
let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
@@ -87,8 +84,7 @@ fn main() {
..Default::default()
};
let chat_completion_request = ChatCompletionRequest::new(model, messages, Some(options));
let result = client.chat(chat_completion_request).unwrap();
let result = client.chat(model, messages, Some(options)).unwrap();
println!("Assistant: {}", result.choices[0].message.content);
// => "Assistant: Tower. [...]"
}
@@ -100,8 +96,37 @@ _In progress._
### Embeddings
_In progress._
```rs
use mistralai_client::v1::{client::Client, constants::EmbedModel};
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client: Client = Client::new(None, None, None, None);
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client.embeddings(model, input, options).unwrap();
println!("Embeddings: {:?}", response.data);
// => "Embeddings: [{...}, {...}]"
}
```
### List models
_In progress._
```rs
use mistralai_client::v1::client::Client;
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None);
let result = client.list_models(model, messages, Some(options)).unwrap();
println!("First Model ID: {:?}", result.data[0].id);
// => "First Model ID: open-mistral-7b"
}
```

2
release.toml Normal file
View File

@@ -0,0 +1,2 @@
allow-branch = ["main"]
pre-release-replacements = [{ file = "CHANGELOG.md", search = "## \\[\\]", replace = "## [{{version}}]" }]

View File

@@ -1,9 +1,9 @@
use serde::{Deserialize, Serialize};
use crate::v1::common;
use crate::v1::{common, constants};
#[derive(Debug)]
pub struct ChatCompletionRequestOptions {
pub struct ChatCompletionParams {
pub tools: Option<String>,
pub temperature: Option<f32>,
pub max_tokens: Option<u32>,
@@ -12,7 +12,7 @@ pub struct ChatCompletionRequestOptions {
pub stream: Option<bool>,
pub safe_prompt: Option<bool>,
}
impl Default for ChatCompletionRequestOptions {
impl Default for ChatCompletionParams {
fn default() -> Self {
Self {
tools: None,
@@ -29,7 +29,7 @@ impl Default for ChatCompletionRequestOptions {
#[derive(Debug, Serialize, Deserialize)]
pub struct ChatCompletionRequest {
pub messages: Vec<ChatCompletionMessage>,
pub model: String,
pub model: constants::Model,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
@@ -44,18 +44,18 @@ pub struct ChatCompletionRequest {
pub stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub safe_prompt: Option<bool>,
// TODO Check that prop (seen in official Python client but not in API doc).
// TODO Check this prop (seen in official Python client but not in API doc).
// pub tool_choice: Option<String>,
// TODO Check that prop (seen in official Python client but not in API doc).
// TODO Check this prop (seen in official Python client but not in API doc).
// pub response_format: Option<String>,
}
impl ChatCompletionRequest {
pub fn new(
model: String,
model: constants::Model,
messages: Vec<ChatCompletionMessage>,
options: Option<ChatCompletionRequestOptions>,
options: Option<ChatCompletionParams>,
) -> Self {
let ChatCompletionRequestOptions {
let ChatCompletionParams {
tools,
temperature,
max_tokens,
@@ -85,7 +85,7 @@ pub struct ChatCompletionResponse {
pub object: String,
/// Unix timestamp (in seconds).
pub created: u32,
pub model: String,
pub model: constants::Model,
pub choices: Vec<ChatCompletionChoice>,
pub usage: common::ResponseUsage,
}
@@ -95,7 +95,7 @@ pub struct ChatCompletionChoice {
pub index: u32,
pub message: ChatCompletionMessage,
pub finish_reason: String,
// TODO Check that prop (seen in API responses but undocumented).
// TODO Check this prop (seen in API responses but undocumented).
// pub logprobs: ???
}

View File

@@ -2,8 +2,12 @@ use crate::v1::error::APIError;
use minreq::Response;
use crate::v1::{
chat_completion::{ChatCompletionRequest, ChatCompletionResponse},
constants::API_URL_BASE,
chat_completion::{
ChatCompletionMessage, ChatCompletionParams, ChatCompletionRequest, ChatCompletionResponse,
},
constants::{EmbedModel, Model, API_URL_BASE},
embedding::{EmbeddingRequest, EmbeddingRequestOptions, EmbeddingResponse},
model_list::ModelListResponse,
};
pub struct Client {
@@ -36,7 +40,7 @@ impl Client {
pub fn build_request(&self, request: minreq::Request) -> minreq::Request {
let authorization = format!("Bearer {}", self.api_key);
let user_agent = format!(
"ivangabriele/mistral-client-rs/{}",
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
@@ -51,20 +55,26 @@ impl Client {
pub fn get(&self, path: &str) -> Result<Response, APIError> {
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::post(url));
let request = self.build_request(minreq::get(url));
let result = request.send();
match result {
Ok(res) => {
if (200..=299).contains(&res.status_code) {
Ok(res)
Ok(response) => {
print!("{:?}", response.as_str().unwrap());
if (200..=299).contains(&response.status_code) {
Ok(response)
} else {
Err(APIError {
message: format!("{}: {}", res.status_code, res.as_str().unwrap()),
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
})
}
}
Err(e) => Err(self.new_error(e)),
Err(error) => Err(self.new_error(error)),
}
}
@@ -80,64 +90,63 @@ impl Client {
let result = request.with_json(params).unwrap().send();
match result {
Ok(res) => {
print!("{:?}", res.as_str().unwrap());
Ok(response) => {
print!("{:?}", response.as_str().unwrap());
if (200..=299).contains(&res.status_code) {
Ok(res)
if (200..=299).contains(&response.status_code) {
Ok(response)
} else {
Err(APIError {
message: format!("{}: {}", res.status_code, res.as_str().unwrap()),
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
})
}
}
Err(e) => Err(self.new_error(e)),
Err(error) => Err(self.new_error(error)),
}
}
pub fn delete(&self, path: &str) -> Result<Response, APIError> {
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::post(url));
pub fn chat(
&self,
model: Model,
messages: Vec<ChatCompletionMessage>,
options: Option<ChatCompletionParams>,
) -> Result<ChatCompletionResponse, APIError> {
let request = ChatCompletionRequest::new(model, messages, options);
let result = request.send();
match result {
Ok(res) => {
if (200..=299).contains(&res.status_code) {
Ok(res)
} else {
Err(APIError {
message: format!("{}: {}", res.status_code, res.as_str().unwrap()),
})
}
}
Err(e) => Err(self.new_error(e)),
}
}
// pub fn completion(&self, req: CompletionRequest) -> Result<CompletionResponse, APIError> {
// let res = self.post("/completions", &req)?;
// let r = res.json::<CompletionResponse>();
// match r {
// Ok(r) => Ok(r),
// Err(e) => Err(self.new_error(e)),
// }
// }
// pub fn embedding(&self, req: EmbeddingRequest) -> Result<EmbeddingResponse, APIError> {
// let res = self.post("/embeddings", &req)?;
// let r = res.json::<EmbeddingResponse>();
// match r {
// Ok(r) => Ok(r),
// Err(e) => Err(self.new_error(e)),
// }
// }
pub fn chat(&self, request: ChatCompletionRequest) -> Result<ChatCompletionResponse, APIError> {
let response = self.post("/chat/completions", &request)?;
let result = response.json::<ChatCompletionResponse>();
match result {
Ok(r) => Ok(r),
Err(e) => Err(self.new_error(e)),
Ok(response) => Ok(response),
Err(error) => Err(self.new_error(error)),
}
}
pub fn embeddings(
&self,
model: EmbedModel,
input: Vec<String>,
options: Option<EmbeddingRequestOptions>,
) -> Result<EmbeddingResponse, APIError> {
let request = EmbeddingRequest::new(model, input, options);
let response = self.post("/embeddings", &request)?;
let result = response.json::<EmbeddingResponse>();
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.new_error(error)),
}
}
pub fn list_models(&self) -> Result<ModelListResponse, APIError> {
let response = self.get("/models")?;
let result = response.json::<ModelListResponse>();
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.new_error(error)),
}
}
@@ -146,30 +155,4 @@ impl Client {
message: err.to_string(),
}
}
// fn query_params(
// limit: Option<i64>,
// order: Option<String>,
// after: Option<String>,
// before: Option<String>,
// mut url: String,
// ) -> String {
// let mut params = vec![];
// if let Some(limit) = limit {
// params.push(format!("limit={}", limit));
// }
// if let Some(order) = order {
// params.push(format!("order={}", order));
// }
// if let Some(after) = after {
// params.push(format!("after={}", after));
// }
// if let Some(before) = before {
// params.push(format!("before={}", before));
// }
// if !params.is_empty() {
// url = format!("{}?{}", url, params.join("&"));
// }
// url
// }
}

View File

@@ -1,7 +1,23 @@
use serde::{Deserialize, Serialize};
pub const API_URL_BASE: &str = "https://api.mistral.ai/v1";
pub const OPEN_MISTRAL_7B: &str = "open-mistral-7b";
pub const OPEN_MISTRAL_8X7B: &str = "open-mixtral-8x7b";
pub const MISTRAL_SMALL_LATEST: &str = "mistral-small-latest";
pub const MISTRAL_MEDIUM_LATEST: &str = "mistral-medium-latest";
pub const MISTRAL_LARGE_LATEST: &str = "mistral-large-latest";
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum Model {
#[serde(rename = "open-mistral-7b")]
OpenMistral7b,
#[serde(rename = "open-mistral-8x7b")]
OpenMistral8x7b,
#[serde(rename = "mistral-small-latest")]
MistralSmallLatest,
#[serde(rename = "mistral-medium-latest")]
MistralMediumLatest,
#[serde(rename = "mistral-large-latest")]
MistralLargeLatest,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum EmbedModel {
#[serde(rename = "mistral-embed")]
MistralEmbed,
}

60
src/v1/embedding.rs Normal file
View File

@@ -0,0 +1,60 @@
use serde::{Deserialize, Serialize};
use crate::v1::{common, constants};
#[derive(Debug)]
pub struct EmbeddingRequestOptions {
pub encoding_format: Option<EmbeddingRequestEncodingFormat>,
}
impl Default for EmbeddingRequestOptions {
fn default() -> Self {
Self {
encoding_format: None,
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct EmbeddingRequest {
pub model: constants::EmbedModel,
pub input: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub encoding_format: Option<EmbeddingRequestEncodingFormat>,
}
impl EmbeddingRequest {
pub fn new(
model: constants::EmbedModel,
input: Vec<String>,
options: Option<EmbeddingRequestOptions>,
) -> Self {
let EmbeddingRequestOptions { encoding_format } = options.unwrap_or_default();
Self {
model,
input,
encoding_format,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
#[allow(non_camel_case_types)]
pub enum EmbeddingRequestEncodingFormat {
float,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct EmbeddingResponse {
pub id: String,
pub object: String,
pub model: constants::EmbedModel,
pub data: Vec<EmbeddingResponseDataItem>,
pub usage: common::ResponseUsage,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct EmbeddingResponseDataItem {
pub index: u32,
pub embedding: Vec<f32>,
pub object: String,
}

View File

@@ -2,4 +2,6 @@ pub mod chat_completion;
pub mod client;
pub mod common;
pub mod constants;
pub mod embedding;
pub mod error;
pub mod model_list;

39
src/v1/model_list.rs Normal file
View File

@@ -0,0 +1,39 @@
use serde::{Deserialize, Serialize};
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ModelListResponse {
pub object: String,
pub data: Vec<ModelListData>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ModelListData {
pub id: String,
pub object: String,
/// Unix timestamp (in seconds).
pub created: u32,
pub owned_by: String,
pub permission: Vec<ModelListDataPermission>,
// TODO Check this prop (seen in API responses but undocumented).
// pub root: ???,
// TODO Check this prop (seen in API responses but undocumented).
// pub parent: ???,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ModelListDataPermission {
pub id: String,
pub object: String,
/// Unix timestamp (in seconds).
pub created: u32,
pub allow_create_engine: bool,
pub allow_sampling: bool,
pub allow_logprobs: bool,
pub allow_search_indices: bool,
pub allow_view: bool,
pub allow_fine_tuning: bool,
pub organization: String,
pub is_blocking: bool,
// TODO Check this prop (seen in API responses but undocumented).
// pub group: ???,
}

View File

@@ -1,11 +1,8 @@
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{
ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequest,
ChatCompletionRequestOptions,
},
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionParams},
client::Client,
constants::OPEN_MISTRAL_7B,
constants::Model,
};
#[test]
@@ -17,37 +14,27 @@ fn test_chat_completion() {
let client = Client::new(None, None, None, None);
let model = OPEN_MISTRAL_7B.to_string();
let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionRequestOptions {
let options = ChatCompletionParams {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
};
let chat_completion_request = ChatCompletionRequest::new(model, messages, Some(options));
let result = client.chat(chat_completion_request);
let response = client.chat(model, messages, Some(options)).unwrap();
match result {
Ok(res) => {
expect!(res.model).to_be("open-mistral-7b".to_string());
expect!(res.object).to_be("chat.completion".to_string());
expect!(res.choices.len()).to_be(1);
expect!(res.choices[0].index).to_be(0);
expect!(res.choices[0].message.role.clone())
.to_be(ChatCompletionMessageRole::assistant);
expect!(res.choices[0].message.content.clone()).to_be(
"Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string(),
);
expect!(res.usage.prompt_tokens).to_be_greater_than(0);
expect!(res.usage.completion_tokens).to_be_greater_than(0);
expect!(res.usage.total_tokens).to_be_greater_than(21);
}
Err(err) => {
panic!("Error: {}", err);
}
}
expect!(response.model).to_be(Model::OpenMistral7b);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatCompletionMessageRole::assistant);
expect!(response.choices[0].message.content.clone())
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -0,0 +1,31 @@
use jrest::expect;
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[test]
fn test_embeddings() {
extern crate dotenv;
use dotenv::dotenv;
dotenv().ok();
let client: Client = Client::new(None, None, None, None);
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client.embeddings(model, input, options).unwrap();
expect!(response.model).to_be(EmbedModel::MistralEmbed);
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be(2);
expect!(response.data[0].index).to_be(0);
expect!(response.data[0].object.clone()).to_be("embedding".to_string());
expect!(response.data[0].embedding.len()).to_be_greater_than(0);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -0,0 +1,25 @@
use jrest::expect;
use mistralai_client::v1::client::Client;
#[test]
fn test_list_models() {
extern crate dotenv;
use dotenv::dotenv;
dotenv().ok();
let client = Client::new(None, None, None, None);
let response = client.list_models().unwrap();
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be_greater_than(0);
// let open_mistral_7b_data_item = response
// .data
// .iter()
// .find(|item| item.id == "open-mistral-7b")
// .unwrap();
// expect!(open_mistral_7b_data_item.id).to_be("open-mistral-7b".to_string());
}