13 Commits

Author SHA1 Message Date
Ivan Gabriele
7c96a4a88d ci(release): v0.5.0 2024-03-04 06:39:54 +01:00
Ivan Gabriele
14437bf609 docs(changelog): update 2024-03-04 06:39:47 +01:00
Ivan Gabriele
3c228914f7 feat: add client.embeddings_async() method 2024-03-04 06:39:21 +01:00
Ivan Gabriele
b69f7c617c feat: add client.list_models_async() method 2024-03-04 06:33:38 +01:00
Ivan Gabriele
75788b9395 refactor: migrate to reqwest-only 2024-03-04 06:33:38 +01:00
renovate[bot]
a862b92c98 chore(deps): update codecov/codecov-action action to v4 (#2)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-04 05:04:56 +01:00
Ivan Gabriele
47c9b9b4fe ci(release): v0.4.0 2024-03-04 04:58:33 +01:00
Ivan Gabriele
791bef34b3 docs(changelog): update 2024-03-04 04:58:26 +01:00
Ivan Gabriele
1dd59f6704 feat: add client.chat_async() method 2024-03-04 04:57:48 +01:00
Ivan Gabriele
33876183e4 feat!: wrap Client::new() return in a Result
BREAKING CHANGE: `Client::new()` now returns a `Result`.
2024-03-04 04:43:22 +01:00
Ivan Gabriele
1deab88251 feat!: add missing api key error
BREAKING CHANGE: `APIError` is renamed to `ApiError`.
2024-03-04 04:30:13 +01:00
Ivan Gabriele
b0a3f10c9f ci(release): add custom pre-release-commit-message 2024-03-04 03:32:23 +01:00
Ivan Gabriele
bbba6b9878 docs(readme): fix list models example 2024-03-04 03:27:20 +01:00
17 changed files with 460 additions and 138 deletions

View File

@@ -1,3 +1,3 @@
# This key is only used for development purposes. # This key is only used for development purposes.
# You'll only need one if you want to contribute to this library. # You'll only need one if you want to contribute to this library.
MISTRAL_API_KEY= export MISTRAL_API_KEY=

View File

@@ -22,7 +22,7 @@ jobs:
env: env:
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }} MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
- name: Upload tests coverage - name: Upload tests coverage
uses: codecov/codecov-action@v3 uses: codecov/codecov-action@v4
with: with:
fail_ci_if_error: true fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }} token: ${{ secrets.CODECOV_TOKEN }}

View File

@@ -1,3 +1,25 @@
## [0.5.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.4.0...v) (2024-03-04)
### Features
* add client.embeddings_async() method ([3c22891](https://github.com/ivangabriele/mistralai-client-rs/commit/3c228914f78b0edd4a592091265b88d0bc55568b))
* add client.list_models_async() method ([b69f7c6](https://github.com/ivangabriele/mistralai-client-rs/commit/b69f7c617c15dd63abb61d004636512916d766bb))
## [0.4.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.3.0...v) (2024-03-04)
### ⚠ BREAKING CHANGES
* `Client::new()` now returns a `Result`.
* `APIError` is renamed to `ApiError`.
### Features
* add client.chat_async() method ([1dd59f6](https://github.com/ivangabriele/mistralai-client-rs/commit/1dd59f67048c10458ab0382af8fdfe4ed21c82fa))
* add missing api key error ([1deab88](https://github.com/ivangabriele/mistralai-client-rs/commit/1deab88251fc706e0415a5e416ab9aee4b52f6f3))
* wrap Client::new() return in a Result ([3387618](https://github.com/ivangabriele/mistralai-client-rs/commit/33876183e41340f426aa1dd1b6d8b5c05c8e15b9))
## [0.3.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.2.0...v) (2024-03-04) ## [0.3.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.2.0...v) (2024-03-04)

View File

@@ -27,8 +27,15 @@ Then run:
git clone https://github.com/ivangabriele/mistralai-client-rs.git # or your fork git clone https://github.com/ivangabriele/mistralai-client-rs.git # or your fork
cd ./mistralai-client-rs cd ./mistralai-client-rs
cargo build cargo build
cp .env.example .env
``` ```
Then edit the `.env` file to set your `MISTRAL_API_KEY`.
> [!NOTE]
> All tests use either the `open-mistral-7b` or `mistral-embed` models and only consume a few dozen tokens.
> So you would have to run them thousands of times to even reach a single dollar of usage.
### Optional requirements ### Optional requirements
- [cargo-watch](https://github.com/watchexec/cargo-watch#install) for `make test-*-watch`. - [cargo-watch](https://github.com/watchexec/cargo-watch#install) for `make test-*-watch`.
@@ -51,5 +58,4 @@ Help us keep this project open and inclusive. Please read and follow our [Code o
## Commit Message Format ## Commit Message Format
This repository follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification and This repository follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification.
specificaly the [Angular Commit Message Guidelines](https://github.com/angular/angular/blob/main/CONTRIBUTING.md#commit).

View File

@@ -2,7 +2,7 @@
name = "mistralai-client" name = "mistralai-client"
description = "Mistral AI API client library for Rust (unofficial)." description = "Mistral AI API client library for Rust (unofficial)."
license = "Apache-2.0" license = "Apache-2.0"
version = "0.3.0" version = "0.5.0"
edition = "2021" edition = "2021"
rust-version = "1.76.0" rust-version = "1.76.0"
@@ -15,12 +15,11 @@ readme = "README.md"
repository = "https://github.com/ivangabriele/mistralai-client-rs" repository = "https://github.com/ivangabriele/mistralai-client-rs"
[dependencies] [dependencies]
minreq = { version = "2.11.0", features = ["https-rustls", "json-using-serde"] } reqwest = { version = "0.11.24", features = ["json", "blocking"] }
serde = { version = "1.0.197", features = ["derive"] } serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.114" serde_json = "1.0.114"
thiserror = "1.0.57" thiserror = "1.0.57"
tokio = { version = "1.36.0", features = ["full"] } tokio = { version = "1.36.0", features = ["full"] }
[dev-dependencies] [dev-dependencies]
dotenv = "0.15.0"
jrest = "0.2.3" jrest = "0.2.3"

View File

@@ -1,7 +1,9 @@
SHELL := /bin/bash
.PHONY: test .PHONY: test
define RELEASE_TEMPLATE define RELEASE_TEMPLATE
conventional-changelog -p conventionalcommits -i CHANGELOG.md -s conventional-changelog -p conventionalcommits -i ./CHANGELOG.md -s
git add . git add .
git commit -m "docs(changelog): update" git commit -m "docs(changelog): update"
git push origin HEAD git push origin HEAD
@@ -9,13 +11,6 @@ define RELEASE_TEMPLATE
git push origin HEAD --tags git push origin HEAD --tags
endef endef
test:
cargo test --no-fail-fast
test-cover:
cargo tarpaulin --frozen --no-fail-fast --out Xml --skip-clean
test-watch:
cargo watch -x "test -- --nocapture"
release-patch: release-patch:
$(call RELEASE_TEMPLATE,patch) $(call RELEASE_TEMPLATE,patch)
@@ -24,3 +19,10 @@ release-minor:
release-major: release-major:
$(call RELEASE_TEMPLATE,major) $(call RELEASE_TEMPLATE,major)
test:
@source ./.env && cargo test --all-targets --no-fail-fast
test-cover:
cargo tarpaulin --all-targets --frozen --no-fail-fast --out Xml --skip-clean
test-watch:
cargo watch -x "test -- --all-targets --nocapture"

View File

@@ -16,19 +16,26 @@ Rust client for the Mistral AI API.
- [As a client argument](#as-a-client-argument) - [As a client argument](#as-a-client-argument)
- [Usage](#usage) - [Usage](#usage)
- [Chat without streaming](#chat-without-streaming) - [Chat without streaming](#chat-without-streaming)
- [Chat without streaming (async)](#chat-without-streaming-async)
- [Chat with streaming](#chat-with-streaming) - [Chat with streaming](#chat-with-streaming)
- [Embeddings](#embeddings) - [Embeddings](#embeddings)
- [Embeddings (async)](#embeddings-async)
- [List models](#list-models) - [List models](#list-models)
- [List models (async)](#list-models-async)
--- ---
## Supported APIs ## Supported APIs
- [x] Chat without streaming - [x] Chat without streaming
- [x] Chat without streaming (async)
- [ ] Chat with streaming - [ ] Chat with streaming
- [x] Embedding - [x] Embedding
- [x] Embedding (async)
- [x] List models - [x] List models
- [x] List models (async)
- [ ] Function Calling - [ ] Function Calling
- [ ] Function Calling (async)
## Installation ## Installation
@@ -54,7 +61,7 @@ use mistralai_client::v1::client::Client;
fn main() { fn main() {
let api_key = "your_api_key"; let api_key = "your_api_key";
let client = Client::new(Some(api_key), None, None, None); let client = Client::new(Some(api_key), None, None, None).unwrap();
} }
``` ```
@@ -71,7 +78,7 @@ use mistralai_client::v1::{
fn main() { fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable. // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None); let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b; let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage { let messages = vec![ChatCompletionMessage {
@@ -90,6 +97,37 @@ fn main() {
} }
``` ```
### Chat without streaming (async)
```rs
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequestOptions},
client::Client,
constants::Model,
};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionRequestOptions {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
};
let result = client.chat_async(model, messages, Some(options)).await.unwrap();
println!("Assistant: {}", result.choices[0].message.content);
// => "Assistant: Tower. [...]"
}
```
### Chat with streaming ### Chat with streaming
_In progress._ _In progress._
@@ -101,7 +139,7 @@ use mistralai_client::v1::{client::Client, constants::EmbedModel};
fn main() { fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable. // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client: Client = Client::new(None, None, None, None); let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed; let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."] let input = vec!["Embed this sentence.", "As well as this one."]
@@ -116,6 +154,29 @@ fn main() {
} }
``` ```
### Embeddings (async)
```rs
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client.embeddings_async(model, input, options).await.unwrap();
println!("Embeddings: {:?}", response.data);
// => "Embeddings: [{...}, {...}]"
}
```
### List models ### List models
```rs ```rs
@@ -123,9 +184,25 @@ use mistralai_client::v1::client::Client;
fn main() { fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable. // This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None); let client = Client::new(None, None, None, None).unwrap();
let result = client.list_models(model, messages, Some(options)).unwrap(); let result = client.list_models().unwrap();
println!("First Model ID: {:?}", result.data[0].id);
// => "First Model ID: open-mistral-7b"
}
```
### List models (async)
```rs
use mistralai_client::v1::client::Client;
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).await.unwrap();
let result = client.list_models_async().unwrap();
println!("First Model ID: {:?}", result.data[0].id); println!("First Model ID: {:?}", result.data[0].id);
// => "First Model ID: open-mistral-7b" // => "First Model ID: open-mistral-7b"
} }

View File

@@ -1,2 +1,4 @@
# https://github.com/crate-ci/cargo-release/blob/master/docs/reference.md
allow-branch = ["main"] allow-branch = ["main"]
pre-release-commit-message = "ci(release): v{{version}}"
pre-release-replacements = [{ file = "CHANGELOG.md", search = "## \\[\\]", replace = "## [{{version}}]" }] pre-release-replacements = [{ file = "CHANGELOG.md", search = "## \\[\\]", replace = "## [{{version}}]" }]

View File

@@ -1,5 +1,5 @@
use crate::v1::error::APIError; use crate::v1::error::ApiError;
use minreq::Response; use reqwest::Error as ReqwestError;
use crate::v1::{ use crate::v1::{
chat_completion::{ chat_completion::{
@@ -7,6 +7,7 @@ use crate::v1::{
}, },
constants::{EmbedModel, Model, API_URL_BASE}, constants::{EmbedModel, Model, API_URL_BASE},
embedding::{EmbeddingRequest, EmbeddingRequestOptions, EmbeddingResponse}, embedding::{EmbeddingRequest, EmbeddingRequestOptions, EmbeddingResponse},
error::ClientError,
model_list::ModelListResponse, model_list::ModelListResponse,
}; };
@@ -23,90 +24,21 @@ impl Client {
endpoint: Option<String>, endpoint: Option<String>,
max_retries: Option<u32>, max_retries: Option<u32>,
timeout: Option<u32>, timeout: Option<u32>,
) -> Self { ) -> Result<Self, ClientError> {
let api_key = api_key.unwrap_or(std::env::var("MISTRAL_API_KEY").unwrap()); let api_key = api_key.unwrap_or(match std::env::var("MISTRAL_API_KEY") {
Ok(api_key_from_env) => api_key_from_env,
Err(_) => return Err(ClientError::ApiKeyError),
});
let endpoint = endpoint.unwrap_or(API_URL_BASE.to_string()); let endpoint = endpoint.unwrap_or(API_URL_BASE.to_string());
let max_retries = max_retries.unwrap_or(5); let max_retries = max_retries.unwrap_or(5);
let timeout = timeout.unwrap_or(120); let timeout = timeout.unwrap_or(120);
Self { Ok(Self {
api_key, api_key,
endpoint, endpoint,
max_retries, max_retries,
timeout, timeout,
} })
}
pub fn build_request(&self, request: minreq::Request) -> minreq::Request {
let authorization = format!("Bearer {}", self.api_key);
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request = request
.with_header("Authorization", authorization)
.with_header("Accept", "application/json")
.with_header("Content-Type", "application/json")
.with_header("User-Agent", user_agent);
request
}
pub fn get(&self, path: &str) -> Result<Response, APIError> {
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::get(url));
let result = request.send();
match result {
Ok(response) => {
print!("{:?}", response.as_str().unwrap());
if (200..=299).contains(&response.status_code) {
Ok(response)
} else {
Err(APIError {
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
})
}
}
Err(error) => Err(self.new_error(error)),
}
}
pub fn post<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
path: &str,
params: &T,
) -> Result<Response, APIError> {
// print!("{:?}", params);
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::post(url));
let result = request.with_json(params).unwrap().send();
match result {
Ok(response) => {
print!("{:?}", response.as_str().unwrap());
if (200..=299).contains(&response.status_code) {
Ok(response)
} else {
Err(APIError {
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
})
}
}
Err(error) => Err(self.new_error(error)),
}
} }
pub fn chat( pub fn chat(
@@ -114,14 +46,30 @@ impl Client {
model: Model, model: Model,
messages: Vec<ChatCompletionMessage>, messages: Vec<ChatCompletionMessage>,
options: Option<ChatCompletionParams>, options: Option<ChatCompletionParams>,
) -> Result<ChatCompletionResponse, APIError> { ) -> Result<ChatCompletionResponse, ApiError> {
let request = ChatCompletionRequest::new(model, messages, options); let request = ChatCompletionRequest::new(model, messages, options);
let response = self.post("/chat/completions", &request)?; let response = self.post_sync("/chat/completions", &request)?;
let result = response.json::<ChatCompletionResponse>(); let result = response.json::<ChatCompletionResponse>();
match result { match result {
Ok(response) => Ok(response), Ok(response) => Ok(response),
Err(error) => Err(self.new_error(error)), Err(error) => Err(self.to_api_error(error)),
}
}
pub async fn chat_async(
&self,
model: Model,
messages: Vec<ChatCompletionMessage>,
options: Option<ChatCompletionParams>,
) -> Result<ChatCompletionResponse, ApiError> {
let request = ChatCompletionRequest::new(model, messages, options);
let response = self.post_async("/chat/completions", &request).await?;
let result = response.json::<ChatCompletionResponse>().await;
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.to_api_error(error)),
} }
} }
@@ -130,28 +78,193 @@ impl Client {
model: EmbedModel, model: EmbedModel,
input: Vec<String>, input: Vec<String>,
options: Option<EmbeddingRequestOptions>, options: Option<EmbeddingRequestOptions>,
) -> Result<EmbeddingResponse, APIError> { ) -> Result<EmbeddingResponse, ApiError> {
let request = EmbeddingRequest::new(model, input, options); let request = EmbeddingRequest::new(model, input, options);
let response = self.post("/embeddings", &request)?; let response = self.post_sync("/embeddings", &request)?;
let result = response.json::<EmbeddingResponse>(); let result = response.json::<EmbeddingResponse>();
match result { match result {
Ok(response) => Ok(response), Ok(response) => Ok(response),
Err(error) => Err(self.new_error(error)), Err(error) => Err(self.to_api_error(error)),
} }
} }
pub fn list_models(&self) -> Result<ModelListResponse, APIError> { pub async fn embeddings_async(
let response = self.get("/models")?; &self,
model: EmbedModel,
input: Vec<String>,
options: Option<EmbeddingRequestOptions>,
) -> Result<EmbeddingResponse, ApiError> {
let request = EmbeddingRequest::new(model, input, options);
let response = self.post_async("/embeddings", &request).await?;
let result = response.json::<EmbeddingResponse>().await;
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.to_api_error(error)),
}
}
pub fn list_models(&self) -> Result<ModelListResponse, ApiError> {
let response = self.get_sync("/models")?;
let result = response.json::<ModelListResponse>(); let result = response.json::<ModelListResponse>();
match result { match result {
Ok(response) => Ok(response), Ok(response) => Ok(response),
Err(error) => Err(self.new_error(error)), Err(error) => Err(self.to_api_error(error)),
} }
} }
fn new_error(&self, err: minreq::Error) -> APIError { pub async fn list_models_async(&self) -> Result<ModelListResponse, ApiError> {
APIError { let response = self.get_async("/models").await?;
let result = response.json::<ModelListResponse>().await;
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.to_api_error(error)),
}
}
fn build_request_sync(
&self,
request: reqwest::blocking::RequestBuilder,
) -> reqwest::blocking::RequestBuilder {
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request_builder = request
.bearer_auth(&self.api_key)
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.header("User-Agent", user_agent);
request_builder
}
fn build_request_async(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request_builder = request
.bearer_auth(&self.api_key)
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.header("User-Agent", user_agent);
request_builder
}
fn get_sync(&self, path: &str) -> Result<reqwest::blocking::Response, ApiError> {
let client_sync = reqwest::blocking::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request_sync(client_sync.get(url));
let result = request.send();
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().unwrap();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
async fn get_async(&self, path: &str) -> Result<reqwest::Response, ApiError> {
let reqwest_client = reqwest::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request_builder = reqwest_client.get(url);
let request = self.build_request_async(request_builder);
let result = request.send().await.map_err(|e| self.to_api_error(e));
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
fn post_sync<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
path: &str,
params: &T,
) -> Result<reqwest::blocking::Response, ApiError> {
let reqwest_client = reqwest::blocking::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request_builder = reqwest_client.post(url).json(params);
let request = self.build_request_sync(request_builder);
let result = request.send();
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().unwrap_or_default();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
async fn post_async<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
path: &str,
params: &T,
) -> Result<reqwest::Response, ApiError> {
let reqwest_client = reqwest::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request_builder = reqwest_client.post(url).json(params);
let request = self.build_request_async(request_builder);
let result = request.send().await.map_err(|e| self.to_api_error(e));
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
fn to_api_error(&self, err: ReqwestError) -> ApiError {
ApiError {
message: err.to_string(), message: err.to_string(),
} }
} }

View File

@@ -2,14 +2,20 @@ use std::error::Error;
use std::fmt; use std::fmt;
#[derive(Debug)] #[derive(Debug)]
pub struct APIError { pub struct ApiError {
pub message: String, pub message: String,
} }
impl fmt::Display for ApiError {
impl fmt::Display for APIError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "APIError: {}", self.message) write!(f, "ApiError: {}", self.message)
} }
} }
impl Error for ApiError {}
impl Error for APIError {} #[derive(Debug, PartialEq, thiserror::Error)]
pub enum ClientError {
#[error("You must either set the `MISTRAL_API_KEY` environment variable or specify it in `Client::new(api_key, ...).")]
ApiKeyError,
#[error("Failed to read the response text.")]
ReadResponseTextError,
}

View File

@@ -0,0 +1,38 @@
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionParams},
client::Client,
constants::Model,
};
#[tokio::test]
async fn test_client_chat_async() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
};
let response = client
.chat_async(model, messages, Some(options))
.await
.unwrap();
expect!(response.model).to_be(Model::OpenMistral7b);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatCompletionMessageRole::assistant);
expect!(response.choices[0].message.content.clone())
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -6,13 +6,8 @@ use mistralai_client::v1::{
}; };
#[test] #[test]
fn test_chat_completion() { fn test_client_chat() {
extern crate dotenv; let client = Client::new(None, None, None, None).unwrap();
use dotenv::dotenv;
dotenv().ok();
let client = Client::new(None, None, None, None);
let model = Model::OpenMistral7b; let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage { let messages = vec![ChatCompletionMessage {

View File

@@ -0,0 +1,29 @@
use jrest::expect;
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[tokio::test]
async fn test_client_embeddings_async() {
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client
.embeddings_async(model, input, options)
.await
.unwrap();
expect!(response.model).to_be(EmbedModel::MistralEmbed);
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be(2);
expect!(response.data[0].index).to_be(0);
expect!(response.data[0].object.clone()).to_be("embedding".to_string());
expect!(response.data[0].embedding.len()).to_be_greater_than(0);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -2,13 +2,8 @@ use jrest::expect;
use mistralai_client::v1::{client::Client, constants::EmbedModel}; use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[test] #[test]
fn test_embeddings() { fn test_client_embeddings() {
extern crate dotenv; let client: Client = Client::new(None, None, None, None).unwrap();
use dotenv::dotenv;
dotenv().ok();
let client: Client = Client::new(None, None, None, None);
let model = EmbedModel::MistralEmbed; let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."] let input = vec!["Embed this sentence.", "As well as this one."]

View File

@@ -0,0 +1,20 @@
use jrest::expect;
use mistralai_client::v1::client::Client;
#[tokio::test]
async fn test_client_list_models_async() {
let client = Client::new(None, None, None, None).unwrap();
let response = client.list_models_async().await.unwrap();
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be_greater_than(0);
// let open_mistral_7b_data_item = response
// .data
// .iter()
// .find(|item| item.id == "open-mistral-7b")
// .unwrap();
// expect!(open_mistral_7b_data_item.id).to_be("open-mistral-7b".to_string());
}

View File

@@ -2,13 +2,8 @@ use jrest::expect;
use mistralai_client::v1::client::Client; use mistralai_client::v1::client::Client;
#[test] #[test]
fn test_list_models() { fn test_client_list_models() {
extern crate dotenv; let client = Client::new(None, None, None, None).unwrap();
use dotenv::dotenv;
dotenv().ok();
let client = Client::new(None, None, None, None);
let response = client.list_models().unwrap(); let response = client.list_models().unwrap();

View File

@@ -1,12 +1,13 @@
use jrest::expect; use jrest::expect;
use mistralai_client::v1::client::Client; use mistralai_client::v1::{client::Client, error::ClientError};
#[test] #[test]
fn test_client_new_with_none_params() { fn test_client_new_with_none_params() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok(); let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env"); std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env");
let client = Client::new(None, None, None, None); let client = Client::new(None, None, None, None).unwrap();
expect!(client.api_key).to_be("test_api_key_from_env".to_string()); expect!(client.api_key).to_be("test_api_key_from_env".to_string());
expect!(client.endpoint).to_be("https://api.mistral.ai/v1".to_string()); expect!(client.endpoint).to_be("https://api.mistral.ai/v1".to_string());
@@ -24,6 +25,7 @@ fn test_client_new_with_none_params() {
#[test] #[test]
fn test_client_new_with_all_params() { fn test_client_new_with_all_params() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok(); let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env"); std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env");
let api_key = Some("test_api_key_from_param".to_string()); let api_key = Some("test_api_key_from_param".to_string());
@@ -36,7 +38,8 @@ fn test_client_new_with_all_params() {
endpoint.clone(), endpoint.clone(),
max_retries.clone(), max_retries.clone(),
timeout.clone(), timeout.clone(),
); )
.unwrap();
expect!(client.api_key).to_be(api_key.unwrap()); expect!(client.api_key).to_be(api_key.unwrap());
expect!(client.endpoint).to_be(endpoint.unwrap()); expect!(client.endpoint).to_be(endpoint.unwrap());
@@ -50,3 +53,23 @@ fn test_client_new_with_all_params() {
None => std::env::remove_var("MISTRAL_API_KEY"), None => std::env::remove_var("MISTRAL_API_KEY"),
} }
} }
#[test]
fn test_client_new_with_missing_api_key() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
let call = || Client::new(None, None, None, None);
match call() {
Ok(_) => panic!("Expected `ClientError::ApiKeyError` but got Ok.`"),
Err(error) => assert_eq!(error, ClientError::ApiKeyError),
}
match maybe_original_mistral_api_key {
Some(original_mistral_api_key) => {
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
}
None => std::env::remove_var("MISTRAL_API_KEY"),
}
}