- Replace old CONTRIBUTING.md with comprehensive contributing guide - Remove fork notice from README header
273 lines
7.1 KiB
Markdown
273 lines
7.1 KiB
Markdown
# Mistral AI Rust Client
|
|
|
|
Rust client for the [Mistral AI API](https://docs.mistral.ai/api/).
|
|
|
|
## Supported APIs
|
|
|
|
- [x] Chat completions (sync, async, streaming)
|
|
- [x] Function calling / tool use
|
|
- [x] FIM (fill-in-the-middle) code completions
|
|
- [x] Embeddings (sync, async)
|
|
- [x] Models (list, get, delete)
|
|
- [x] Files (upload, list, get, delete, download URL)
|
|
- [x] Fine-tuning jobs (create, list, get, cancel, start)
|
|
- [x] Batch jobs (create, list, get, cancel)
|
|
- [x] OCR (document text extraction)
|
|
- [x] Audio transcription
|
|
- [x] Moderations & classifications
|
|
- [x] Agent completions
|
|
|
|
## Installation
|
|
|
|
```sh
|
|
cargo add mistralai-client
|
|
```
|
|
|
|
### API Key
|
|
|
|
Get your key at <https://console.mistral.ai/api-keys>.
|
|
|
|
```rs
|
|
use mistralai_client::v1::client::Client;
|
|
|
|
// From MISTRAL_API_KEY environment variable:
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
// Or pass directly:
|
|
let client = Client::new(Some("your_api_key".to_string()), None, None, None).unwrap();
|
|
```
|
|
|
|
## Usage
|
|
|
|
### Chat
|
|
|
|
```rs
|
|
use mistralai_client::v1::{
|
|
chat::{ChatMessage, ChatParams},
|
|
client::Client,
|
|
constants::Model,
|
|
};
|
|
|
|
fn main() {
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let model = Model::mistral_small_latest();
|
|
let messages = vec![ChatMessage::new_user_message("What is the Eiffel Tower?")];
|
|
let options = ChatParams {
|
|
temperature: Some(0.7),
|
|
..Default::default()
|
|
};
|
|
|
|
let result = client.chat(model, messages, Some(options)).unwrap();
|
|
println!("{}", result.choices[0].message.content);
|
|
}
|
|
```
|
|
|
|
### Chat (async)
|
|
|
|
```rs
|
|
use mistralai_client::v1::{
|
|
chat::{ChatMessage, ChatParams},
|
|
client::Client,
|
|
constants::Model,
|
|
};
|
|
|
|
#[tokio::main]
|
|
async fn main() {
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let model = Model::mistral_small_latest();
|
|
let messages = vec![ChatMessage::new_user_message("What is the Eiffel Tower?")];
|
|
|
|
let result = client.chat_async(model, messages, None).await.unwrap();
|
|
println!("{}", result.choices[0].message.content);
|
|
}
|
|
```
|
|
|
|
### Chat with streaming
|
|
|
|
```rs
|
|
use futures::stream::StreamExt;
|
|
use mistralai_client::v1::{
|
|
chat::{ChatMessage, ChatParams},
|
|
client::Client,
|
|
constants::Model,
|
|
};
|
|
use std::io::{self, Write};
|
|
|
|
#[tokio::main]
|
|
async fn main() {
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let model = Model::mistral_small_latest();
|
|
let messages = vec![ChatMessage::new_user_message("Tell me a short story.")];
|
|
|
|
let stream = client.chat_stream(model, messages, None).await.unwrap();
|
|
stream
|
|
.for_each(|chunk_result| async {
|
|
match chunk_result {
|
|
Ok(chunks) => chunks.iter().for_each(|chunk| {
|
|
if let Some(content) = &chunk.choices[0].delta.content {
|
|
print!("{}", content);
|
|
io::stdout().flush().unwrap();
|
|
}
|
|
}),
|
|
Err(error) => eprintln!("Error: {:?}", error),
|
|
}
|
|
})
|
|
.await;
|
|
println!();
|
|
}
|
|
```
|
|
|
|
### Function calling
|
|
|
|
```rs
|
|
use mistralai_client::v1::{
|
|
chat::{ChatMessage, ChatParams},
|
|
client::Client,
|
|
constants::Model,
|
|
tool::{Function, Tool, ToolChoice},
|
|
};
|
|
use serde::Deserialize;
|
|
use std::any::Any;
|
|
|
|
#[derive(Debug, Deserialize)]
|
|
struct GetWeatherArgs { city: String }
|
|
|
|
struct GetWeatherFunction;
|
|
#[async_trait::async_trait]
|
|
impl Function for GetWeatherFunction {
|
|
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
|
|
let args: GetWeatherArgs = serde_json::from_str(&arguments).unwrap();
|
|
Box::new(format!("20°C in {}", args.city))
|
|
}
|
|
}
|
|
|
|
fn main() {
|
|
let tools = vec![Tool::new(
|
|
"get_weather".to_string(),
|
|
"Get the weather in a city.".to_string(),
|
|
serde_json::json!({
|
|
"type": "object",
|
|
"properties": {
|
|
"city": { "type": "string", "description": "City name" }
|
|
},
|
|
"required": ["city"]
|
|
}),
|
|
)];
|
|
|
|
let mut client = Client::new(None, None, None, None).unwrap();
|
|
client.register_function("get_weather".to_string(), Box::new(GetWeatherFunction));
|
|
|
|
let messages = vec![ChatMessage::new_user_message("What's the weather in Paris?")];
|
|
let options = ChatParams {
|
|
tool_choice: Some(ToolChoice::Auto),
|
|
tools: Some(tools),
|
|
..Default::default()
|
|
};
|
|
|
|
client.chat(Model::mistral_small_latest(), messages, Some(options)).unwrap();
|
|
let result = client.get_last_function_call_result().unwrap().downcast::<String>().unwrap();
|
|
println!("{}", result);
|
|
}
|
|
```
|
|
|
|
### FIM (code completion)
|
|
|
|
```rs
|
|
use mistralai_client::v1::{client::Client, constants::Model, fim::FimParams};
|
|
|
|
fn main() {
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let options = FimParams {
|
|
suffix: Some("\n return result".to_string()),
|
|
..Default::default()
|
|
};
|
|
|
|
let result = client.fim(Model::codestral_latest(), "def fibonacci(".to_string(), Some(options)).unwrap();
|
|
println!("{}", result.choices[0].message.content);
|
|
}
|
|
```
|
|
|
|
### Embeddings
|
|
|
|
```rs
|
|
use mistralai_client::v1::{client::Client, constants::Model};
|
|
|
|
fn main() {
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let input = vec!["Hello world".to_string(), "Goodbye world".to_string()];
|
|
let response = client.embeddings(Model::mistral_embed(), input, None).unwrap();
|
|
println!("Dimensions: {}", response.data[0].embedding.len());
|
|
}
|
|
```
|
|
|
|
### List models
|
|
|
|
```rs
|
|
use mistralai_client::v1::client::Client;
|
|
|
|
fn main() {
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let models = client.list_models().unwrap();
|
|
for model in &models.data {
|
|
println!("{}", model.id);
|
|
}
|
|
}
|
|
```
|
|
|
|
### OCR
|
|
|
|
```rs
|
|
use mistralai_client::v1::{
|
|
client::Client,
|
|
constants::Model,
|
|
ocr::{OcrDocument, OcrRequest},
|
|
};
|
|
|
|
fn main() {
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let request = OcrRequest {
|
|
model: Model::mistral_ocr_latest(),
|
|
document: OcrDocument::from_url("https://example.com/document.pdf"),
|
|
pages: Some(vec![0]),
|
|
table_format: None,
|
|
include_image_base64: None,
|
|
image_limit: None,
|
|
};
|
|
|
|
let response = client.ocr(&request).unwrap();
|
|
println!("{}", response.pages[0].markdown);
|
|
}
|
|
```
|
|
|
|
## Available Models
|
|
|
|
Use `Model::new("any-model-id")` for any model, or use the built-in constructors:
|
|
|
|
| Constructor | Model ID |
|
|
|---|---|
|
|
| `Model::mistral_large_latest()` | `mistral-large-latest` |
|
|
| `Model::mistral_medium_latest()` | `mistral-medium-latest` |
|
|
| `Model::mistral_small_latest()` | `mistral-small-latest` |
|
|
| `Model::mistral_small_4()` | `mistral-small-4-0-26-03` |
|
|
| `Model::codestral_latest()` | `codestral-latest` |
|
|
| `Model::magistral_medium_latest()` | `magistral-medium-latest` |
|
|
| `Model::magistral_small_latest()` | `magistral-small-latest` |
|
|
| `Model::mistral_embed()` | `mistral-embed` |
|
|
| `Model::mistral_ocr_latest()` | `mistral-ocr-latest` |
|
|
| `Model::mistral_moderation_latest()` | `mistral-moderation-26-03` |
|
|
| `Model::pixtral_large()` | `pixtral-large-2411` |
|
|
| `Model::voxtral_mini_transcribe()` | `voxtral-mini-transcribe-2-26-02` |
|
|
|
|
See `constants.rs` for the full list.
|
|
|
|
## License
|
|
|
|
Apache-2.0
|