d5eb16dffc8c472500ee86e611eba5dd56bc3890
- Change author to Sunbeam Studios - Remove GitHub-specific files (.github/, renovate, README.template) - Remove .env.example (replaced by .envrc workflow) - Replace Makefile with justfile - Update CHANGELOG with v1.0.0 fork notes - Update Cargo.toml homepage to sunbeam.pt
Mistral AI Rust Client
Rust client for the Mistral AI API.
Supported APIs
- Chat completions (sync, async, streaming)
- Function calling / tool use
- FIM (fill-in-the-middle) code completions
- Embeddings (sync, async)
- Models (list, get, delete)
- Files (upload, list, get, delete, download URL)
- Fine-tuning jobs (create, list, get, cancel, start)
- Batch jobs (create, list, get, cancel)
- OCR (document text extraction)
- Audio transcription
- Moderations & classifications
- Agent completions
Installation
cargo add mistralai-client
API Key
Get your key at https://console.mistral.ai/api-keys.
use mistralai_client::v1::client::Client;
// From MISTRAL_API_KEY environment variable:
let client = Client::new(None, None, None, None).unwrap();
// Or pass directly:
let client = Client::new(Some("your_api_key".to_string()), None, None, None).unwrap();
Usage
Chat
use mistralai_client::v1::{
chat::{ChatMessage, ChatParams},
client::Client,
constants::Model,
};
fn main() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::mistral_small_latest();
let messages = vec![ChatMessage::new_user_message("What is the Eiffel Tower?")];
let options = ChatParams {
temperature: Some(0.7),
..Default::default()
};
let result = client.chat(model, messages, Some(options)).unwrap();
println!("{}", result.choices[0].message.content);
}
Chat (async)
use mistralai_client::v1::{
chat::{ChatMessage, ChatParams},
client::Client,
constants::Model,
};
#[tokio::main]
async fn main() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::mistral_small_latest();
let messages = vec![ChatMessage::new_user_message("What is the Eiffel Tower?")];
let result = client.chat_async(model, messages, None).await.unwrap();
println!("{}", result.choices[0].message.content);
}
Chat with streaming
use futures::stream::StreamExt;
use mistralai_client::v1::{
chat::{ChatMessage, ChatParams},
client::Client,
constants::Model,
};
use std::io::{self, Write};
#[tokio::main]
async fn main() {
let client = Client::new(None, None, None, None).unwrap();
let model = Model::mistral_small_latest();
let messages = vec![ChatMessage::new_user_message("Tell me a short story.")];
let stream = client.chat_stream(model, messages, None).await.unwrap();
stream
.for_each(|chunk_result| async {
match chunk_result {
Ok(chunks) => chunks.iter().for_each(|chunk| {
if let Some(content) = &chunk.choices[0].delta.content {
print!("{}", content);
io::stdout().flush().unwrap();
}
}),
Err(error) => eprintln!("Error: {:?}", error),
}
})
.await;
println!();
}
Function calling
use mistralai_client::v1::{
chat::{ChatMessage, ChatParams},
client::Client,
constants::Model,
tool::{Function, Tool, ToolChoice},
};
use serde::Deserialize;
use std::any::Any;
#[derive(Debug, Deserialize)]
struct GetWeatherArgs { city: String }
struct GetWeatherFunction;
#[async_trait::async_trait]
impl Function for GetWeatherFunction {
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
let args: GetWeatherArgs = serde_json::from_str(&arguments).unwrap();
Box::new(format!("20°C in {}", args.city))
}
}
fn main() {
let tools = vec![Tool::new(
"get_weather".to_string(),
"Get the weather in a city.".to_string(),
serde_json::json!({
"type": "object",
"properties": {
"city": { "type": "string", "description": "City name" }
},
"required": ["city"]
}),
)];
let mut client = Client::new(None, None, None, None).unwrap();
client.register_function("get_weather".to_string(), Box::new(GetWeatherFunction));
let messages = vec![ChatMessage::new_user_message("What's the weather in Paris?")];
let options = ChatParams {
tool_choice: Some(ToolChoice::Auto),
tools: Some(tools),
..Default::default()
};
client.chat(Model::mistral_small_latest(), messages, Some(options)).unwrap();
let result = client.get_last_function_call_result().unwrap().downcast::<String>().unwrap();
println!("{}", result);
}
FIM (code completion)
use mistralai_client::v1::{client::Client, constants::Model, fim::FimParams};
fn main() {
let client = Client::new(None, None, None, None).unwrap();
let options = FimParams {
suffix: Some("\n return result".to_string()),
..Default::default()
};
let result = client.fim(Model::codestral_latest(), "def fibonacci(".to_string(), Some(options)).unwrap();
println!("{}", result.choices[0].message.content);
}
Embeddings
use mistralai_client::v1::{client::Client, constants::Model};
fn main() {
let client = Client::new(None, None, None, None).unwrap();
let input = vec!["Hello world".to_string(), "Goodbye world".to_string()];
let response = client.embeddings(Model::mistral_embed(), input, None).unwrap();
println!("Dimensions: {}", response.data[0].embedding.len());
}
List models
use mistralai_client::v1::client::Client;
fn main() {
let client = Client::new(None, None, None, None).unwrap();
let models = client.list_models().unwrap();
for model in &models.data {
println!("{}", model.id);
}
}
OCR
use mistralai_client::v1::{
client::Client,
constants::Model,
ocr::{OcrDocument, OcrRequest},
};
fn main() {
let client = Client::new(None, None, None, None).unwrap();
let request = OcrRequest {
model: Model::mistral_ocr_latest(),
document: OcrDocument::from_url("https://example.com/document.pdf"),
pages: Some(vec![0]),
table_format: None,
include_image_base64: None,
image_limit: None,
};
let response = client.ocr(&request).unwrap();
println!("{}", response.pages[0].markdown);
}
Available Models
Use Model::new("any-model-id") for any model, or use the built-in constructors:
| Constructor | Model ID |
|---|---|
Model::mistral_large_latest() |
mistral-large-latest |
Model::mistral_medium_latest() |
mistral-medium-latest |
Model::mistral_small_latest() |
mistral-small-latest |
Model::mistral_small_4() |
mistral-small-4-0-26-03 |
Model::codestral_latest() |
codestral-latest |
Model::magistral_medium_latest() |
magistral-medium-latest |
Model::magistral_small_latest() |
magistral-small-latest |
Model::mistral_embed() |
mistral-embed |
Model::mistral_ocr_latest() |
mistral-ocr-latest |
Model::mistral_moderation_latest() |
mistral-moderation-26-03 |
Model::pixtral_large() |
pixtral-large-2411 |
Model::voxtral_mini_transcribe() |
voxtral-mini-transcribe-2-26-02 |
See constants.rs for the full list.
License
Apache-2.0
Languages
Rust
99.8%
Just
0.2%