20 Commits

Author SHA1 Message Date
Ivan Gabriele
9430d42382 ci(release): v0.7.0 2024-03-05 02:49:59 +01:00
Ivan Gabriele
e7d844dce9 docs(changelog): update 2024-03-05 02:49:52 +01:00
Ivan Gabriele
29566f7948 ci(github): split documentation tests into a separate job 2024-03-05 02:48:24 +01:00
Ivan Gabriele
72bae8817a docs: add client.chat*() documentation 2024-03-05 02:40:49 +01:00
Ivan Gabriele
08b042506d test(coverage): migrate from tarpaulin to llvm-cov 2024-03-05 02:34:50 +01:00
Ivan Gabriele
efcd93953a build(makefile): add --skip-clean option to test-cover command 2024-03-05 01:36:02 +01:00
Ivan Gabriele
ea99a075ef build(makefile): remove wrong --nocapture option from test-doc command 2024-03-05 00:59:13 +01:00
Ivan Gabriele
ccf3d1431a build(makefile): add doc command 2024-03-05 00:55:07 +01:00
Ivan Gabriele
a8bfb5333f ci(github): add documentation tests 2024-03-05 00:50:21 +01:00
Ivan Gabriele
ef5d475e2d fix!: fix failure when api key as param and not env
BREAKING CHANGE:

- Rename `ClientError.ApiKeyError` to `MissingApiKey`.
- Rename `ClientError.ReadResponseTextError` to `ClientError.UnreadableResponseText`.
2024-03-04 21:12:08 +01:00
Ivan Gabriele
5217fcfb94 ci(release): v0.6.0 2024-03-04 08:20:46 +01:00
Ivan Gabriele
6b1cc5c058 docs(changelog): update 2024-03-04 08:20:38 +01:00
Ivan Gabriele
4a4219d3ea feat!: add client.chat_stream() method
BREAKING CHANGE: You can't set the `stream` option for `client.chat*()`.

Either use `client.chat_stream()` if you want to use streams
or use `client.chat()` / `client.chat_async()` otherwise.
2024-03-04 08:16:10 +01:00
Ivan Gabriele
f91e794d71 refactor: remove useless error mappers 2024-03-04 06:54:24 +01:00
Ivan Gabriele
7c96a4a88d ci(release): v0.5.0 2024-03-04 06:39:54 +01:00
Ivan Gabriele
14437bf609 docs(changelog): update 2024-03-04 06:39:47 +01:00
Ivan Gabriele
3c228914f7 feat: add client.embeddings_async() method 2024-03-04 06:39:21 +01:00
Ivan Gabriele
b69f7c617c feat: add client.list_models_async() method 2024-03-04 06:33:38 +01:00
Ivan Gabriele
75788b9395 refactor: migrate to reqwest-only 2024-03-04 06:33:38 +01:00
renovate[bot]
a862b92c98 chore(deps): update codecov/codecov-action action to v4 (#2)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-04 05:04:56 +01:00
16 changed files with 752 additions and 168 deletions

View File

@@ -6,10 +6,6 @@ jobs:
test:
name: Test
runs-on: ubuntu-latest
container:
image: xd009642/tarpaulin
# https://github.com/xd009642/tarpaulin#github-actions
options: --security-opt seccomp=unconfined
steps:
- name: Checkout
uses: actions/checkout@v4
@@ -17,12 +13,30 @@ jobs:
uses: actions-rs/toolchain@v1
with:
toolchain: 1.76.0
- name: Install cargo-llvm-cov
uses: taiki-e/install-action@cargo-llvm-cov
- name: Run tests (with coverage)
run: make test-cover
run: cargo llvm-cov --lcov --output-path ./lcov.info
env:
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
- name: Upload tests coverage
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: true
files: ./lcov.info
token: ${{ secrets.CODECOV_TOKEN }}
test_documentation:
name: Test Documentation
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.76.0
- name: Run documentation tests
run: make test-doc
env:
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}

View File

@@ -1,3 +1,37 @@
## [0.7.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.6.0...v) (2024-03-05)
### ⚠ BREAKING CHANGES
* - Rename `ClientError.ApiKeyError` to `MissingApiKey`.
- Rename `ClientError.ReadResponseTextError` to `ClientError.UnreadableResponseText`.
### Bug Fixes
* fix failure when api key as param and not env ([ef5d475](https://github.com/ivangabriele/mistralai-client-rs/commit/ef5d475e2d0e3fe040c44d6adabf7249e9962835))
## [0.6.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.5.0...v) (2024-03-04)
### ⚠ BREAKING CHANGES
* You can't set the `stream` option for `client.chat*()`.
Either use `client.chat_stream()` if you want to use streams
or use `client.chat()` / `client.chat_async()` otherwise.
### Features
* add client.chat_stream() method ([4a4219d](https://github.com/ivangabriele/mistralai-client-rs/commit/4a4219d3eaa8f0ae953ee6182b36bf464d1c4a21))
## [0.5.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.4.0...v) (2024-03-04)
### Features
* add client.embeddings_async() method ([3c22891](https://github.com/ivangabriele/mistralai-client-rs/commit/3c228914f78b0edd4a592091265b88d0bc55568b))
* add client.list_models_async() method ([b69f7c6](https://github.com/ivangabriele/mistralai-client-rs/commit/b69f7c617c15dd63abb61d004636512916d766bb))
## [0.4.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.3.0...v) (2024-03-04)

View File

@@ -38,7 +38,8 @@ Then edit the `.env` file to set your `MISTRAL_API_KEY`.
### Optional requirements
- [cargo-watch](https://github.com/watchexec/cargo-watch#install) for `make test-*-watch`.
- [cargo-llvm-cov](https://github.com/taiki-e/cargo-llvm-cov?tab=readme-ov-file#installation) for `make test-cover`
- [cargo-watch](https://github.com/watchexec/cargo-watch#install) for `make test-watch`.
### Test

View File

@@ -2,7 +2,7 @@
name = "mistralai-client"
description = "Mistral AI API client library for Rust (unofficial)."
license = "Apache-2.0"
version = "0.4.0"
version = "0.7.0"
edition = "2021"
rust-version = "1.76.0"
@@ -15,10 +15,12 @@ readme = "README.md"
repository = "https://github.com/ivangabriele/mistralai-client-rs"
[dependencies]
minreq = { version = "2.11.0", features = ["https-rustls", "json-using-serde"] }
reqwest = { version = "0.11.24", features = ["json"] }
futures = "0.3.30"
reqwest = { version = "0.11.24", features = ["json", "blocking", "stream"] }
serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.114"
strum = "0.26.1"
strum_macros = "0.26.1"
thiserror = "1.0.57"
tokio = { version = "1.36.0", features = ["full"] }

View File

@@ -2,6 +2,17 @@ SHELL := /bin/bash
.PHONY: test
define source_env_if_not_ci
@if [ -z "$${CI}" ]; then \
if [ -f ./.env ]; then \
source ./.env; \
else \
echo "No .env file found"; \
exit 1; \
fi \
fi
endef
define RELEASE_TEMPLATE
conventional-changelog -p conventionalcommits -i ./CHANGELOG.md -s
git add .
@@ -11,6 +22,10 @@ define RELEASE_TEMPLATE
git push origin HEAD --tags
endef
doc:
cargo doc
open ./target/doc/mistralai_client/index.html
release-patch:
$(call RELEASE_TEMPLATE,patch)
@@ -21,8 +36,10 @@ release-major:
$(call RELEASE_TEMPLATE,major)
test:
@source ./.env && cargo test --all-targets --no-fail-fast
@$(source_env_if_not_ci) && cargo test --no-fail-fast
test-cover:
cargo tarpaulin --all-targets --frozen --no-fail-fast --out Xml --skip-clean
@$(source_env_if_not_ci) && cargo llvm-cov
test-doc:
@$(source_env_if_not_ci) && cargo test --doc --no-fail-fast
test-watch:
cargo watch -x "test -- --all-targets --nocapture"
@source ./.env && cargo watch -x "test -- --nocapture"

View File

@@ -17,7 +17,7 @@ Rust client for the Mistral AI API.
- [Usage](#usage)
- [Chat without streaming](#chat-without-streaming)
- [Chat without streaming (async)](#chat-without-streaming-async)
- [Chat with streaming](#chat-with-streaming)
- [Chat with streaming (async)](#chat-with-streaming-async)
- [Embeddings](#embeddings)
- [Embeddings (async)](#embeddings-async)
- [List models](#list-models)
@@ -29,11 +29,11 @@ Rust client for the Mistral AI API.
- [x] Chat without streaming
- [x] Chat without streaming (async)
- [ ] Chat with streaming
- [x] Chat with streaming
- [x] Embedding
- [ ] Embedding (async)
- [x] Embedding (async)
- [x] List models
- [ ] List models (async)
- [x] List models (async)
- [ ] Function Calling
- [ ] Function Calling (async)
@@ -71,7 +71,7 @@ fn main() {
```rs
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequestOptions},
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
@@ -81,8 +81,8 @@ fn main() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionRequestOptions {
@@ -101,7 +101,7 @@ fn main() {
```rs
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequestOptions},
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
@@ -112,8 +112,8 @@ async fn main() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionRequestOptions {
@@ -122,15 +122,50 @@ async fn main() {
..Default::default()
};
let result = client.chat(model, messages, Some(options)).await.unwrap();
let result = client.chat_async(model, messages, Some(options)).await.unwrap();
println!("Assistant: {}", result.choices[0].message.content);
// => "Assistant: Tower. [...]"
}
```
### Chat with streaming
### Chat with streaming (async)
_In progress._
```rs
use futures::stream::StreamExt;
use mistralai_client::v1::{
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
[#tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
};
let stream_result = client.chat_stream(model, messages, Some(options)).await;
let mut stream = stream_result.expect("Failed to create stream.");
while let Some(chunk_result) = stream.next().await {
match chunk_result {
Ok(chunk) => {
println!("Assistant (message chunk): {}", chunk.choices[0].delta.content);
}
Err(e) => eprintln!("Error processing chunk: {:?}", e),
}
}
}
```
### Embeddings
@@ -156,7 +191,26 @@ fn main() {
### Embeddings (async)
_In progress._
```rs
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client.embeddings_async(model, input, options).await.unwrap();
println!("Embeddings: {:?}", response.data);
// => "Embeddings: [{...}, {...}]"
}
```
### List models
@@ -175,4 +229,16 @@ fn main() {
### List models (async)
_In progress._
```rs
use mistralai_client::v1::client::Client;
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).await.unwrap();
let result = client.list_models_async().unwrap();
println!("First Model ID: {:?}", result.data[0].id);
// => "First Model ID: open-mistral-7b"
}
```

View File

@@ -1 +1,4 @@
//! This crate provides a easy bindings and types for MistralAI's API.
/// The v1 module contains the types and methods for the v1 API endpoints.
pub mod v1;

View File

@@ -2,6 +2,25 @@ use serde::{Deserialize, Serialize};
use crate::v1::{common, constants};
// -----------------------------------------------------------------------------
// Definitions
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatMessage {
pub role: ChatMessageRole,
pub content: String,
}
#[derive(Clone, Debug, strum_macros::Display, Eq, PartialEq, Deserialize, Serialize)]
#[allow(non_camel_case_types)]
pub enum ChatMessageRole {
assistant,
user,
}
// -----------------------------------------------------------------------------
// Request
#[derive(Debug)]
pub struct ChatCompletionParams {
pub tools: Option<String>,
@@ -9,7 +28,6 @@ pub struct ChatCompletionParams {
pub max_tokens: Option<u32>,
pub top_p: Option<f32>,
pub random_seed: Option<u32>,
pub stream: Option<bool>,
pub safe_prompt: Option<bool>,
}
impl Default for ChatCompletionParams {
@@ -20,7 +38,6 @@ impl Default for ChatCompletionParams {
max_tokens: None,
top_p: None,
random_seed: None,
stream: None,
safe_prompt: None,
}
}
@@ -28,7 +45,7 @@ impl Default for ChatCompletionParams {
#[derive(Debug, Serialize, Deserialize)]
pub struct ChatCompletionRequest {
pub messages: Vec<ChatCompletionMessage>,
pub messages: Vec<ChatMessage>,
pub model: constants::Model,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<String>,
@@ -40,8 +57,7 @@ pub struct ChatCompletionRequest {
pub top_p: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub random_seed: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
pub stream: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub safe_prompt: Option<bool>,
// TODO Check this prop (seen in official Python client but not in API doc).
@@ -52,7 +68,8 @@ pub struct ChatCompletionRequest {
impl ChatCompletionRequest {
pub fn new(
model: constants::Model,
messages: Vec<ChatCompletionMessage>,
messages: Vec<ChatMessage>,
stream: bool,
options: Option<ChatCompletionParams>,
) -> Self {
let ChatCompletionParams {
@@ -61,7 +78,6 @@ impl ChatCompletionRequest {
max_tokens,
top_p,
random_seed,
stream,
safe_prompt,
} = options.unwrap_or_default();
@@ -79,6 +95,9 @@ impl ChatCompletionRequest {
}
}
// -----------------------------------------------------------------------------
// Response
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatCompletionResponse {
pub id: String,
@@ -86,28 +105,45 @@ pub struct ChatCompletionResponse {
/// Unix timestamp (in seconds).
pub created: u32,
pub model: constants::Model,
pub choices: Vec<ChatCompletionChoice>,
pub choices: Vec<ChatCompletionResponseChoice>,
pub usage: common::ResponseUsage,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatCompletionChoice {
pub struct ChatCompletionResponseChoice {
pub index: u32,
pub message: ChatCompletionMessage,
pub message: ChatMessage,
pub finish_reason: String,
// TODO Check this prop (seen in API responses but undocumented).
// pub logprobs: ???
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatCompletionMessage {
pub role: ChatCompletionMessageRole,
pub content: String,
// -----------------------------------------------------------------------------
// Stream
#[derive(Debug, Deserialize)]
pub struct ChatCompletionStreamChunk {
pub id: String,
pub object: String,
/// Unix timestamp (in seconds).
pub created: u32,
pub model: constants::Model,
pub choices: Vec<ChatCompletionStreamChunkChoice>,
// TODO Check this prop (seen in API responses but undocumented).
// pub usage: ???,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
#[allow(non_camel_case_types)]
pub enum ChatCompletionMessageRole {
assistant,
user,
#[derive(Debug, Deserialize)]
pub struct ChatCompletionStreamChunkChoice {
pub index: u32,
pub delta: ChatCompletionStreamChunkChoiceDelta,
pub finish_reason: Option<String>,
// TODO Check this prop (seen in API responses but undocumented).
// pub logprobs: ???,
}
#[derive(Debug, Deserialize)]
pub struct ChatCompletionStreamChunkChoiceDelta {
pub role: Option<ChatMessageRole>,
pub content: String,
}

View File

@@ -1,10 +1,13 @@
use futures::stream::StreamExt;
use futures::Stream;
use reqwest::Error as ReqwestError;
use serde_json::from_str;
use crate::v1::error::ApiError;
use minreq::Response;
use reqwest::{Client as ReqwestClient, Error as ReqwestError};
use crate::v1::{
chat_completion::{
ChatCompletionMessage, ChatCompletionParams, ChatCompletionRequest, ChatCompletionResponse,
ChatCompletionParams, ChatCompletionRequest, ChatCompletionResponse, ChatMessage,
},
constants::{EmbedModel, Model, API_URL_BASE},
embedding::{EmbeddingRequest, EmbeddingRequestOptions, EmbeddingResponse},
@@ -12,6 +15,8 @@ use crate::v1::{
model_list::ModelListResponse,
};
use super::chat_completion::ChatCompletionStreamChunk;
pub struct Client {
pub api_key: String,
pub endpoint: String,
@@ -20,16 +25,39 @@ pub struct Client {
}
impl Client {
/// Constructs a new `Client`.
///
/// # Arguments
///
/// * `api_key` - An optional API key.
/// If not provided, the method will try to use the `MISTRAL_API_KEY` environment variable.
/// * `endpoint` - An optional custom API endpoint. Defaults to the official API endpoint if not provided.
/// * `max_retries` - Optional maximum number of retries for failed requests. Defaults to `5`.
/// * `timeout` - Optional timeout in seconds for requests. Defaults to `120`.
///
/// # Examples
///
/// ```
/// use mistralai_client::v1::client::Client;
///
/// let client = Client::new(Some("your_api_key_here".to_string()), None, Some(3), Some(60));
/// assert!(client.is_ok());
/// ```
///
/// # Errors
///
/// This method fails whenever neither the `api_key` is provided
/// nor the `MISTRAL_API_KEY` environment variable is set.
pub fn new(
api_key: Option<String>,
endpoint: Option<String>,
max_retries: Option<u32>,
timeout: Option<u32>,
) -> Result<Self, ClientError> {
let api_key = api_key.unwrap_or(match std::env::var("MISTRAL_API_KEY") {
Ok(api_key_from_env) => api_key_from_env,
Err(_) => return Err(ClientError::ApiKeyError),
});
let api_key = match api_key {
Some(api_key_from_param) => api_key_from_param,
None => std::env::var("MISTRAL_API_KEY").map_err(|_| ClientError::MissingApiKey)?,
};
let endpoint = endpoint.unwrap_or(API_URL_BASE.to_string());
let max_retries = max_retries.unwrap_or(5);
let timeout = timeout.unwrap_or(120);
@@ -42,127 +70,185 @@ impl Client {
})
}
pub fn build_request(&self, request: minreq::Request) -> minreq::Request {
let authorization = format!("Bearer {}", self.api_key);
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request = request
.with_header("Authorization", authorization)
.with_header("Accept", "application/json")
.with_header("Content-Type", "application/json")
.with_header("User-Agent", user_agent);
request
}
pub fn get_sync(&self, path: &str) -> Result<Response, ApiError> {
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::get(url));
let result = request.send();
match result {
Ok(response) => {
print!("{:?}", response.as_str().unwrap());
if (200..=299).contains(&response.status_code) {
Ok(response)
} else {
Err(ApiError {
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
})
}
}
Err(error) => Err(self.new_minreq_error(error)),
}
}
pub fn post_sync<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
path: &str,
params: &T,
) -> Result<Response, ApiError> {
// print!("{:?}", params);
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::post(url));
let result = request.with_json(params).unwrap().send();
match result {
Ok(response) => {
print!("{:?}", response.as_str().unwrap());
if (200..=299).contains(&response.status_code) {
Ok(response)
} else {
Err(ApiError {
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
})
}
}
Err(error) => Err(self.new_minreq_error(error)),
}
}
/// Synchronously sends a chat completion request and returns the response.
///
/// # Arguments
///
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatCompletionParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing the `ChatCompletionResponse` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
/// ```
/// use mistralai_client::v1::{
/// chat_completion::{ChatMessage, ChatMessageRole},
/// client::Client,
/// constants::Model,
/// };
///
/// let client = Client::new(None, None, None, None).unwrap();
/// let messages = vec![ChatMessage {
/// role: ChatMessageRole::user,
/// content: "Hello, world!".to_string(),
/// }];
/// let response = client.chat(Model::OpenMistral7b, messages, None).unwrap();
/// println!("{}: {}", response.choices[0].message.role, response.choices[0].message.content);
/// ```
pub fn chat(
&self,
model: Model,
messages: Vec<ChatCompletionMessage>,
messages: Vec<ChatMessage>,
options: Option<ChatCompletionParams>,
) -> Result<ChatCompletionResponse, ApiError> {
let request = ChatCompletionRequest::new(model, messages, options);
let request = ChatCompletionRequest::new(model, messages, false, options);
let response = self.post_sync("/chat/completions", &request)?;
let result = response.json::<ChatCompletionResponse>();
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.new_minreq_error(error)),
Err(error) => Err(self.to_api_error(error)),
}
}
/// Asynchronously sends a chat completion request and returns the response.
///
/// # Arguments
///
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatCompletionParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing a `Stream` of `ChatCompletionStreamChunk` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
/// ```
/// use mistralai_client::v1::{
/// chat_completion::{ChatMessage, ChatMessageRole},
/// client::Client,
/// constants::Model,
/// };
///
/// #[tokio::main]
/// async fn main() {
/// let client = Client::new(None, None, None, None).unwrap();
/// let messages = vec![ChatMessage {
/// role: ChatMessageRole::user,
/// content: "Hello, world!".to_string(),
/// }];
/// let response = client.chat_async(Model::OpenMistral7b, messages, None).await.unwrap();
/// println!("{}: {}", response.choices[0].message.role, response.choices[0].message.content);
/// }
/// ```
pub async fn chat_async(
&self,
model: Model,
messages: Vec<ChatCompletionMessage>,
messages: Vec<ChatMessage>,
options: Option<ChatCompletionParams>,
) -> Result<ChatCompletionResponse, ApiError> {
let request = ChatCompletionRequest::new(model, messages, options);
let client = ReqwestClient::new();
let request = ChatCompletionRequest::new(model, messages, false, options);
let response = client
.post(format!("{}{}", self.endpoint, "/chat/completions"))
.json(&request)
.bearer_auth(&self.api_key)
.header(
"User-Agent",
format!("mistralai-client-rs/{}", env!("CARGO_PKG_VERSION")),
)
.send()
let response = self.post_async("/chat/completions", &request).await?;
let result = response.json::<ChatCompletionResponse>().await;
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.to_api_error(error)),
}
}
/// Asynchronously sends a chat completion request and returns a stream of message chunks.
///
/// # Arguments
///
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatCompletionParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing a `Stream` of `ChatCompletionStreamChunk` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
/// ```
/// use futures::stream::StreamExt;
/// use mistralai_client::v1::{
/// chat_completion::{ChatMessage, ChatMessageRole},
/// client::Client,
/// constants::Model,
/// };
///
/// #[tokio::main]
/// async fn main() {
/// let client = Client::new(None, None, None, None).unwrap();
/// let messages = vec![ChatMessage {
/// role: ChatMessageRole::user,
/// content: "Hello, world!".to_string(),
/// }];
/// let mut stream = client.chat_stream(Model::OpenMistral7b, messages, None).await.unwrap();
/// while let Some(chunk_result) = stream.next().await {
/// match chunk_result {
/// Ok(chunk) => {
/// print!("{}", chunk.choices[0].delta.content);
/// }
/// Err(error) => {
/// println!("Error: {}", error.message);
/// }
/// }
/// }
/// }
pub async fn chat_stream(
&self,
model: Model,
messages: Vec<ChatMessage>,
options: Option<ChatCompletionParams>,
) -> Result<impl Stream<Item = Result<ChatCompletionStreamChunk, ApiError>>, ApiError> {
let request = ChatCompletionRequest::new(model, messages, true, options);
let response = self
.post_stream("/chat/completions", &request)
.await
.map_err(|e| self.new_reqwest_error(e))?;
if response.status().is_success() {
response
.json::<ChatCompletionResponse>()
.await
.map_err(|e| self.new_reqwest_error(e))
} else {
.map_err(|e| ApiError {
message: e.to_string(),
})?;
if !response.status().is_success() {
let status = response.status();
let text = response.text().await.unwrap_or_default();
Err(ApiError {
return Err(ApiError {
message: format!("{}: {}", status, text),
})
});
}
let deserialized_stream =
response
.bytes_stream()
.map(|item| -> Result<ChatCompletionStreamChunk, ApiError> {
match item {
Ok(bytes) => {
let text = String::from_utf8(bytes.to_vec()).map_err(|e| ApiError {
message: e.to_string(),
})?;
let text_trimmed = text.trim_start_matches("data: ");
from_str(&text_trimmed).map_err(|e| ApiError {
message: e.to_string(),
})
}
Err(e) => Err(ApiError {
message: e.to_string(),
}),
}
});
Ok(deserialized_stream)
}
pub fn embeddings(
@@ -177,7 +263,23 @@ impl Client {
let result = response.json::<EmbeddingResponse>();
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.new_minreq_error(error)),
Err(error) => Err(self.to_api_error(error)),
}
}
pub async fn embeddings_async(
&self,
model: EmbedModel,
input: Vec<String>,
options: Option<EmbeddingRequestOptions>,
) -> Result<EmbeddingResponse, ApiError> {
let request = EmbeddingRequest::new(model, input, options);
let response = self.post_async("/embeddings", &request).await?;
let result = response.json::<EmbeddingResponse>().await;
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.to_api_error(error)),
}
}
@@ -186,17 +288,204 @@ impl Client {
let result = response.json::<ModelListResponse>();
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.new_minreq_error(error)),
Err(error) => Err(self.to_api_error(error)),
}
}
fn new_minreq_error(&self, err: minreq::Error) -> ApiError {
ApiError {
message: err.to_string(),
pub async fn list_models_async(&self) -> Result<ModelListResponse, ApiError> {
let response = self.get_async("/models").await?;
let result = response.json::<ModelListResponse>().await;
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.to_api_error(error)),
}
}
fn new_reqwest_error(&self, err: ReqwestError) -> ApiError {
fn build_request_sync(
&self,
request: reqwest::blocking::RequestBuilder,
) -> reqwest::blocking::RequestBuilder {
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request_builder = request
.bearer_auth(&self.api_key)
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.header("User-Agent", user_agent);
request_builder
}
fn build_request_async(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request_builder = request
.bearer_auth(&self.api_key)
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.header("User-Agent", user_agent);
request_builder
}
fn build_request_stream(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request_builder = request
.bearer_auth(&self.api_key)
.header("Accept", "text/event-stream")
.header("Content-Type", "application/json")
.header("User-Agent", user_agent);
request_builder
}
fn get_sync(&self, path: &str) -> Result<reqwest::blocking::Response, ApiError> {
let reqwest_client = reqwest::blocking::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request_sync(reqwest_client.get(url));
let result = request.send();
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().unwrap();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
async fn get_async(&self, path: &str) -> Result<reqwest::Response, ApiError> {
let reqwest_client = reqwest::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request_builder = reqwest_client.get(url);
let request = self.build_request_async(request_builder);
let result = request.send().await;
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
fn post_sync<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
path: &str,
params: &T,
) -> Result<reqwest::blocking::Response, ApiError> {
let reqwest_client = reqwest::blocking::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request_builder = reqwest_client.post(url).json(params);
let request = self.build_request_sync(request_builder);
let result = request.send();
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().unwrap_or_default();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
async fn post_async<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
path: &str,
params: &T,
) -> Result<reqwest::Response, ApiError> {
let reqwest_client = reqwest::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request_builder = reqwest_client.post(url).json(params);
let request = self.build_request_async(request_builder);
let result = request.send().await;
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
async fn post_stream<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
path: &str,
params: &T,
) -> Result<reqwest::Response, ApiError> {
let reqwest_client = reqwest::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request_builder = reqwest_client.post(url).json(params);
let request = self.build_request_stream(request_builder);
let result = request.send().await;
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let status = response.status();
let text = response.text().await.unwrap_or_default();
Err(ApiError {
message: format!("{}: {}", status, text),
})
}
}
Err(error) => Err(ApiError {
message: error.to_string(),
}),
}
}
fn to_api_error(&self, err: ReqwestError) -> ApiError {
ApiError {
message: err.to_string(),
}

View File

@@ -15,5 +15,7 @@ impl Error for ApiError {}
#[derive(Debug, PartialEq, thiserror::Error)]
pub enum ClientError {
#[error("You must either set the `MISTRAL_API_KEY` environment variable or specify it in `Client::new(api_key, ...).")]
ApiKeyError,
MissingApiKey,
#[error("Failed to read the response text.")]
UnreadableResponseText,
}

View File

@@ -1,6 +1,6 @@
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionParams},
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
@@ -10,8 +10,8 @@ async fn test_client_chat_async() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
@@ -29,7 +29,7 @@ async fn test_client_chat_async() {
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatCompletionMessageRole::assistant);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::assistant);
expect!(response.choices[0].message.content.clone())
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
expect!(response.usage.prompt_tokens).to_be_greater_than(0);

View File

@@ -0,0 +1,40 @@
use futures::stream::StreamExt;
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
#[tokio::test]
async fn test_client_chat_stream() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
};
let stream_result = client.chat_stream(model, messages, Some(options)).await;
let mut stream = stream_result.expect("Failed to create stream.");
while let Some(chunk_result) = stream.next().await {
match chunk_result {
Ok(chunk) => {
if chunk.choices[0].delta.role == Some(ChatMessageRole::assistant)
|| chunk.choices[0].finish_reason == Some("stop".to_string())
{
expect!(chunk.choices[0].delta.content.len()).to_be(0);
} else {
expect!(chunk.choices[0].delta.content.len()).to_be_greater_than(0);
}
}
Err(e) => eprintln!("Error processing chunk: {:?}", e),
}
}
}

View File

@@ -1,6 +1,6 @@
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionParams},
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
@@ -10,8 +10,8 @@ fn test_client_chat() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
@@ -26,7 +26,7 @@ fn test_client_chat() {
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatCompletionMessageRole::assistant);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::assistant);
expect!(response.choices[0].message.content.clone())
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
expect!(response.usage.prompt_tokens).to_be_greater_than(0);

View File

@@ -0,0 +1,29 @@
use jrest::expect;
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[tokio::test]
async fn test_client_embeddings_async() {
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client
.embeddings_async(model, input, options)
.await
.unwrap();
expect!(response.model).to_be(EmbedModel::MistralEmbed);
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be(2);
expect!(response.data[0].index).to_be(0);
expect!(response.data[0].object.clone()).to_be("embedding".to_string());
expect!(response.data[0].embedding.len()).to_be_greater_than(0);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -0,0 +1,20 @@
use jrest::expect;
use mistralai_client::v1::client::Client;
#[tokio::test]
async fn test_client_list_models_async() {
let client = Client::new(None, None, None, None).unwrap();
let response = client.list_models_async().await.unwrap();
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be_greater_than(0);
// let open_mistral_7b_data_item = response
// .data
// .iter()
// .find(|item| item.id == "open-mistral-7b")
// .unwrap();
// expect!(open_mistral_7b_data_item.id).to_be("open-mistral-7b".to_string());
}

View File

@@ -26,6 +26,37 @@ fn test_client_new_with_none_params() {
fn test_client_new_with_all_params() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
let api_key = Some("test_api_key_from_param".to_string());
let endpoint = Some("https://example.org".to_string());
let max_retries = Some(10);
let timeout = Some(20);
let client = Client::new(
api_key.clone(),
endpoint.clone(),
max_retries.clone(),
timeout.clone(),
)
.unwrap();
expect!(client.api_key).to_be(api_key.unwrap());
expect!(client.endpoint).to_be(endpoint.unwrap());
expect!(client.max_retries).to_be(max_retries.unwrap());
expect!(client.timeout).to_be(timeout.unwrap());
match maybe_original_mistral_api_key {
Some(original_mistral_api_key) => {
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
}
None => std::env::remove_var("MISTRAL_API_KEY"),
}
}
#[test]
fn test_client_new_with_api_key_as_both_env_and_param() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env");
let api_key = Some("test_api_key_from_param".to_string());
@@ -62,8 +93,8 @@ fn test_client_new_with_missing_api_key() {
let call = || Client::new(None, None, None, None);
match call() {
Ok(_) => panic!("Expected `ClientError::ApiKeyError` but got Ok.`"),
Err(error) => assert_eq!(error, ClientError::ApiKeyError),
Ok(_) => panic!("Expected `ClientError::MissingApiKey` but got Ok.`"),
Err(error) => assert_eq!(error, ClientError::MissingApiKey),
}
match maybe_original_mistral_api_key {