2024-03-03 15:20:30 +01:00
|
|
|
# Mistral AI Rust Client
|
|
|
|
|
|
2024-03-03 15:35:57 +01:00
|
|
|
[](https://crates.io/crates/mistralai-client)
|
|
|
|
|
[](https://docs.rs/mistralai-client/latest/mistralai-client)
|
2024-03-03 15:20:30 +01:00
|
|
|
[](https://github.com/ivangabriele/mistralai-client-rs/actions?query=branch%3Amain+workflow%3ATest++)
|
|
|
|
|
[](https://app.codecov.io/github/ivangabriele/mistralai-client-rs)
|
|
|
|
|
|
|
|
|
|
Rust client for the Mistral AI API.
|
|
|
|
|
|
|
|
|
|
---
|
|
|
|
|
|
|
|
|
|
- [Supported APIs](#supported-apis)
|
|
|
|
|
- [Installation](#installation)
|
|
|
|
|
- [Mistral API Key](#mistral-api-key)
|
|
|
|
|
- [As an environment variable](#as-an-environment-variable)
|
|
|
|
|
- [As a client argument](#as-a-client-argument)
|
|
|
|
|
- [Usage](#usage)
|
|
|
|
|
- [Chat without streaming](#chat-without-streaming)
|
|
|
|
|
- [Chat with streaming](#chat-with-streaming)
|
|
|
|
|
- [Embeddings](#embeddings)
|
|
|
|
|
- [List models](#list-models)
|
|
|
|
|
|
|
|
|
|
---
|
|
|
|
|
|
|
|
|
|
## Supported APIs
|
|
|
|
|
|
|
|
|
|
- [x] Chat without streaming
|
|
|
|
|
- [ ] Chat with streaming
|
2024-03-04 03:14:23 +01:00
|
|
|
- [x] Embedding
|
2024-03-03 19:38:34 +01:00
|
|
|
- [x] List models
|
2024-03-03 15:20:30 +01:00
|
|
|
- [ ] Function Calling
|
|
|
|
|
|
|
|
|
|
## Installation
|
|
|
|
|
|
|
|
|
|
You can install the library in your project using:
|
|
|
|
|
|
|
|
|
|
```sh
|
|
|
|
|
cargo add mistralai-client
|
|
|
|
|
```
|
|
|
|
|
|
|
|
|
|
### Mistral API Key
|
|
|
|
|
|
|
|
|
|
You can get your Mistral API Key there: <https://docs.mistral.ai/#api-access>.
|
|
|
|
|
|
|
|
|
|
#### As an environment variable
|
|
|
|
|
|
|
|
|
|
Just set the `MISTRAL_API_KEY` environment variable.
|
|
|
|
|
|
|
|
|
|
#### As a client argument
|
|
|
|
|
|
|
|
|
|
```rs
|
|
|
|
|
use mistralai_client::v1::client::Client;
|
|
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
|
let api_key = "your_api_key";
|
|
|
|
|
|
|
|
|
|
let client = Client::new(Some(api_key), None, None, None);
|
|
|
|
|
}
|
|
|
|
|
```
|
|
|
|
|
|
|
|
|
|
## Usage
|
|
|
|
|
|
|
|
|
|
### Chat without streaming
|
|
|
|
|
|
|
|
|
|
```rs
|
2024-03-03 19:38:34 +01:00
|
|
|
use mistralai_client::v1::{
|
|
|
|
|
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequestOptions},
|
2024-03-03 15:20:30 +01:00
|
|
|
client::Client,
|
2024-03-04 03:14:23 +01:00
|
|
|
constants::Model,
|
2024-03-03 15:20:30 +01:00
|
|
|
};
|
|
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
|
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
|
|
|
|
let client = Client::new(None, None, None, None);
|
|
|
|
|
|
2024-03-04 03:14:23 +01:00
|
|
|
let model = Model::OpenMistral7b;
|
2024-03-03 15:20:30 +01:00
|
|
|
let messages = vec![ChatCompletionMessage {
|
|
|
|
|
role: ChatCompletionMessageRole::user,
|
|
|
|
|
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
|
|
|
|
|
}];
|
|
|
|
|
let options = ChatCompletionRequestOptions {
|
|
|
|
|
temperature: Some(0.0),
|
|
|
|
|
random_seed: Some(42),
|
|
|
|
|
..Default::default()
|
|
|
|
|
};
|
|
|
|
|
|
2024-03-03 19:10:25 +01:00
|
|
|
let result = client.chat(model, messages, Some(options)).unwrap();
|
2024-03-03 15:20:30 +01:00
|
|
|
println!("Assistant: {}", result.choices[0].message.content);
|
|
|
|
|
// => "Assistant: Tower. [...]"
|
|
|
|
|
}
|
|
|
|
|
```
|
|
|
|
|
|
|
|
|
|
### Chat with streaming
|
|
|
|
|
|
|
|
|
|
_In progress._
|
|
|
|
|
|
|
|
|
|
### Embeddings
|
|
|
|
|
|
2024-03-04 03:14:23 +01:00
|
|
|
```rs
|
|
|
|
|
use mistralai_client::v1::{client::Client, constants::EmbedModel};
|
|
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
|
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
|
|
|
|
let client: Client = Client::new(None, None, None, None);
|
|
|
|
|
|
|
|
|
|
let model = EmbedModel::MistralEmbed;
|
|
|
|
|
let input = vec!["Embed this sentence.", "As well as this one."]
|
|
|
|
|
.iter()
|
|
|
|
|
.map(|s| s.to_string())
|
|
|
|
|
.collect();
|
|
|
|
|
let options = None;
|
|
|
|
|
|
|
|
|
|
let response = client.embeddings(model, input, options).unwrap();
|
|
|
|
|
println!("Embeddings: {:?}", response.data);
|
|
|
|
|
// => "Embeddings: [{...}, {...}]"
|
|
|
|
|
}
|
|
|
|
|
```
|
2024-03-03 15:20:30 +01:00
|
|
|
|
|
|
|
|
### List models
|
|
|
|
|
|
2024-03-03 19:38:34 +01:00
|
|
|
```rs
|
|
|
|
|
use mistralai_client::v1::client::Client;
|
|
|
|
|
|
|
|
|
|
fn main() {
|
|
|
|
|
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
|
|
|
|
let client = Client::new(None, None, None, None);
|
|
|
|
|
|
2024-03-04 03:27:20 +01:00
|
|
|
let result = client.list_models().unwrap();
|
2024-03-03 19:38:34 +01:00
|
|
|
println!("First Model ID: {:?}", result.data[0].id);
|
|
|
|
|
// => "First Model ID: open-mistral-7b"
|
|
|
|
|
}
|
|
|
|
|
```
|