diff --git a/.gitignore b/.gitignore index be294f9..cbf6f41 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,4 @@ Cargo.lock /cobertura.xml .env +.envrc diff --git a/README.md b/README.md index a88429c..fd0b497 100644 --- a/README.md +++ b/README.md @@ -1,87 +1,43 @@ # Mistral AI Rust Client -[![Crates.io Package](https://img.shields.io/crates/v/mistralai-client?style=for-the-badge)](https://crates.io/crates/mistralai-client) -[![Docs.rs Documentation](https://img.shields.io/docsrs/mistralai-client/latest?style=for-the-badge)](https://docs.rs/mistralai-client/latest/mistralai-client) -[![Test Workflow Status](https://img.shields.io/github/actions/workflow/status/ivangabriele/mistralai-client-rs/test.yml?label=CI&style=for-the-badge)](https://github.com/ivangabriele/mistralai-client-rs/actions?query=branch%3Amain+workflow%3ATest++) -[![Code Coverage](https://img.shields.io/codecov/c/github/ivangabriele/mistralai-client-rs/main?label=Cov&style=for-the-badge)](https://app.codecov.io/github/ivangabriele/mistralai-client-rs) +Rust client for the [Mistral AI API](https://docs.mistral.ai/api/). -Rust client for the Mistral AI API. - -> [!IMPORTANT] -> While we are in v0, minor versions may introduce breaking changes. -> Please, refer to the [CHANGELOG.md](./CHANGELOG.md) for more information. - ---- - -- [Supported APIs](#supported-apis) -- [Installation](#installation) - - [Mistral API Key](#mistral-api-key) - - [As an environment variable](#as-an-environment-variable) - - [As a client argument](#as-a-client-argument) -- [Usage](#usage) - - [Chat](#chat) - - [Chat (async)](#chat-async) - - [Chat with streaming (async)](#chat-with-streaming-async) - - [Chat with Function Calling](#chat-with-function-calling) - - [Chat with Function Calling (async)](#chat-with-function-calling-async) - - [Embeddings](#embeddings) - - [Embeddings (async)](#embeddings-async) - - [List models](#list-models) - - [List models (async)](#list-models-async) -- [Contributing](#contributing) - ---- +> **Fork** of [ivangabriele/mistralai-client-rs](https://github.com/ivangabriele/mistralai-client-rs), +> updated to the latest Mistral API with all current endpoints and models. ## Supported APIs -- [x] Chat without streaming -- [x] Chat without streaming (async) -- [x] Chat with streaming -- [x] Embedding -- [x] Embedding (async) -- [x] List models -- [x] List models (async) -- [x] Function Calling -- [x] Function Calling (async) +- [x] Chat completions (sync, async, streaming) +- [x] Function calling / tool use +- [x] FIM (fill-in-the-middle) code completions +- [x] Embeddings (sync, async) +- [x] Models (list, get, delete) +- [x] Files (upload, list, get, delete, download URL) +- [x] Fine-tuning jobs (create, list, get, cancel, start) +- [x] Batch jobs (create, list, get, cancel) +- [x] OCR (document text extraction) +- [x] Audio transcription +- [x] Moderations & classifications +- [x] Agent completions ## Installation -You can install the library in your project using: - ```sh cargo add mistralai-client ``` -### Mistral API Key +### API Key -You can get your Mistral API Key there: . - -#### As an environment variable - -Just set the `MISTRAL_API_KEY` environment variable. +Get your key at . ```rs use mistralai_client::v1::client::Client; -fn main() { - let client = Client::new(None, None, None, None); -} -``` +// From MISTRAL_API_KEY environment variable: +let client = Client::new(None, None, None, None).unwrap(); -```sh -MISTRAL_API_KEY=your_api_key cargo run -``` - -#### As a client argument - -```rs -use mistralai_client::v1::client::Client; - -fn main() { - let api_key = "your_api_key"; - - let client = Client::new(Some(api_key), None, None, None).unwrap(); -} +// Or pass directly: +let client = Client::new(Some("your_api_key".to_string()), None, None, None).unwrap(); ``` ## Usage @@ -90,30 +46,23 @@ fn main() { ```rs use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, }; fn main() { - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client = Client::new(None, None, None, None).unwrap(); - let model = Model::OpenMistral7b; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "Just guess the next word: \"Eiffel ...\"?".to_string(), - tool_calls: None, - }]; + let model = Model::mistral_small_latest(); + let messages = vec![ChatMessage::new_user_message("What is the Eiffel Tower?")]; let options = ChatParams { - temperature: 0.0, - random_seed: Some(42), + temperature: Some(0.7), ..Default::default() }; let result = client.chat(model, messages, Some(options)).unwrap(); - println!("Assistant: {}", result.choices[0].message.content); - // => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France." + println!("{}", result.choices[0].message.content); } ``` @@ -121,46 +70,29 @@ fn main() { ```rs use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, }; #[tokio::main] async fn main() { - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client = Client::new(None, None, None, None).unwrap(); - let model = Model::OpenMistral7b; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "Just guess the next word: \"Eiffel ...\"?".to_string(), - tool_calls: None, - }]; - let options = ChatParams { - temperature: 0.0, - random_seed: Some(42), - ..Default::default() - }; + let model = Model::mistral_small_latest(); + let messages = vec![ChatMessage::new_user_message("What is the Eiffel Tower?")]; - let result = client - .chat_async(model, messages, Some(options)) - .await - .unwrap(); - println!( - "{:?}: {}", - result.choices[0].message.role, result.choices[0].message.content - ); - // => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France." + let result = client.chat_async(model, messages, None).await.unwrap(); + println!("{}", result.choices[0].message.content); } ``` -### Chat with streaming (async) +### Chat with streaming ```rs use futures::stream::StreamExt; use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, }; @@ -168,244 +100,111 @@ use std::io::{self, Write}; #[tokio::main] async fn main() { - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client = Client::new(None, None, None, None).unwrap(); - let model = Model::OpenMistral7b; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "Tell me a short happy story.".to_string(), - tool_calls: None, - }]; - let options = ChatParams { - temperature: 0.0, - random_seed: Some(42), - ..Default::default() - }; + let model = Model::mistral_small_latest(); + let messages = vec![ChatMessage::new_user_message("Tell me a short story.")]; - let stream_result = client - .chat_stream(model, messages, Some(options)) - .await - .unwrap(); - stream_result + let stream = client.chat_stream(model, messages, None).await.unwrap(); + stream .for_each(|chunk_result| async { match chunk_result { Ok(chunks) => chunks.iter().for_each(|chunk| { - print!("{}", chunk.choices[0].delta.content); - io::stdout().flush().unwrap(); - // => "Once upon a time, [...]" + if let Some(content) = &chunk.choices[0].delta.content { + print!("{}", content); + io::stdout().flush().unwrap(); + } }), - Err(error) => { - eprintln!("Error processing chunk: {:?}", error) - } + Err(error) => eprintln!("Error: {:?}", error), } }) .await; - print!("\n") // To persist the last chunk output. + println!(); } ``` -### Chat with Function Calling +### Function calling ```rs use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, - tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType}, + tool::{Function, Tool, ToolChoice}, }; use serde::Deserialize; use std::any::Any; #[derive(Debug, Deserialize)] -struct GetCityTemperatureArguments { - city: String, -} +struct GetWeatherArgs { city: String } -struct GetCityTemperatureFunction; +struct GetWeatherFunction; #[async_trait::async_trait] -impl Function for GetCityTemperatureFunction { +impl Function for GetWeatherFunction { async fn execute(&self, arguments: String) -> Box { - // Deserialize arguments, perform the logic, and return the result - let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap(); - - let temperature = match city.as_str() { - "Paris" => "20°C", - _ => "Unknown city", - }; - - Box::new(temperature.to_string()) + let args: GetWeatherArgs = serde_json::from_str(&arguments).unwrap(); + Box::new(format!("20°C in {}", args.city)) } } fn main() { let tools = vec![Tool::new( - "get_city_temperature".to_string(), - "Get the current temperature in a city.".to_string(), - vec![ToolFunctionParameter::new( - "city".to_string(), - "The name of the city.".to_string(), - ToolFunctionParameterType::String, - )], + "get_weather".to_string(), + "Get the weather in a city.".to_string(), + serde_json::json!({ + "type": "object", + "properties": { + "city": { "type": "string", "description": "City name" } + }, + "required": ["city"] + }), )]; - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let mut client = Client::new(None, None, None, None).unwrap(); - client.register_function( - "get_city_temperature".to_string(), - Box::new(GetCityTemperatureFunction), - ); + client.register_function("get_weather".to_string(), Box::new(GetWeatherFunction)); - let model = Model::MistralSmallLatest; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "What's the temperature in Paris?".to_string(), - tool_calls: None, - }]; + let messages = vec![ChatMessage::new_user_message("What's the weather in Paris?")]; let options = ChatParams { - temperature: 0.0, - random_seed: Some(42), tool_choice: Some(ToolChoice::Auto), tools: Some(tools), ..Default::default() }; - client.chat(model, messages, Some(options)).unwrap(); - let temperature = client - .get_last_function_call_result() - .unwrap() - .downcast::() - .unwrap(); - println!("The temperature in Paris is: {}.", temperature); - // => "The temperature in Paris is: 20°C." + client.chat(Model::mistral_small_latest(), messages, Some(options)).unwrap(); + let result = client.get_last_function_call_result().unwrap().downcast::().unwrap(); + println!("{}", result); } ``` -### Chat with Function Calling (async) +### FIM (code completion) ```rs -use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, - client::Client, - constants::Model, - tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType}, -}; -use serde::Deserialize; -use std::any::Any; +use mistralai_client::v1::{client::Client, constants::Model, fim::FimParams}; -#[derive(Debug, Deserialize)] -struct GetCityTemperatureArguments { - city: String, -} +fn main() { + let client = Client::new(None, None, None, None).unwrap(); -struct GetCityTemperatureFunction; -#[async_trait::async_trait] -impl Function for GetCityTemperatureFunction { - async fn execute(&self, arguments: String) -> Box { - // Deserialize arguments, perform the logic, and return the result - let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap(); - - let temperature = match city.as_str() { - "Paris" => "20°C", - _ => "Unknown city", - }; - - Box::new(temperature.to_string()) - } -} - -#[tokio::main] -async fn main() { - let tools = vec![Tool::new( - "get_city_temperature".to_string(), - "Get the current temperature in a city.".to_string(), - vec![ToolFunctionParameter::new( - "city".to_string(), - "The name of the city.".to_string(), - ToolFunctionParameterType::String, - )], - )]; - - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. - let mut client = Client::new(None, None, None, None).unwrap(); - client.register_function( - "get_city_temperature".to_string(), - Box::new(GetCityTemperatureFunction), - ); - - let model = Model::MistralSmallLatest; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "What's the temperature in Paris?".to_string(), - tool_calls: None, - }]; - let options = ChatParams { - temperature: 0.0, - random_seed: Some(42), - tool_choice: Some(ToolChoice::Auto), - tools: Some(tools), + let options = FimParams { + suffix: Some("\n return result".to_string()), ..Default::default() }; - client - .chat_async(model, messages, Some(options)) - .await - .unwrap(); - let temperature = client - .get_last_function_call_result() - .unwrap() - .downcast::() - .unwrap(); - println!("The temperature in Paris is: {}.", temperature); - // => "The temperature in Paris is: 20°C." + let result = client.fim(Model::codestral_latest(), "def fibonacci(".to_string(), Some(options)).unwrap(); + println!("{}", result.choices[0].message.content); } ``` ### Embeddings ```rs -use mistralai_client::v1::{client::Client, constants::EmbedModel}; +use mistralai_client::v1::{client::Client, constants::Model}; fn main() { - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. - let client: Client = Client::new(None, None, None, None).unwrap(); + let client = Client::new(None, None, None, None).unwrap(); - let model = EmbedModel::MistralEmbed; - let input = vec!["Embed this sentence.", "As well as this one."] - .iter() - .map(|s| s.to_string()) - .collect(); - let options = None; - - let response = client.embeddings(model, input, options).unwrap(); - println!("First Embedding: {:?}", response.data[0]); - // => "First Embedding: {...}" -} -``` - -### Embeddings (async) - -```rs -use mistralai_client::v1::{client::Client, constants::EmbedModel}; - -#[tokio::main] -async fn main() { - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. - let client: Client = Client::new(None, None, None, None).unwrap(); - - let model = EmbedModel::MistralEmbed; - let input = vec!["Embed this sentence.", "As well as this one."] - .iter() - .map(|s| s.to_string()) - .collect(); - let options = None; - - let response = client - .embeddings_async(model, input, options) - .await - .unwrap(); - println!("First Embedding: {:?}", response.data[0]); - // => "First Embedding: {...}" + let input = vec!["Hello world".to_string(), "Goodbye world".to_string()]; + let response = client.embeddings(Model::mistral_embed(), input, None).unwrap(); + println!("Dimensions: {}", response.data[0].embedding.len()); } ``` @@ -415,31 +214,62 @@ async fn main() { use mistralai_client::v1::client::Client; fn main() { - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client = Client::new(None, None, None, None).unwrap(); - let result = client.list_models().unwrap(); - println!("First Model ID: {:?}", result.data[0].id); - // => "First Model ID: open-mistral-7b" + let models = client.list_models().unwrap(); + for model in &models.data { + println!("{}", model.id); + } } ``` -### List models (async) +### OCR ```rs -use mistralai_client::v1::client::Client; +use mistralai_client::v1::{ + client::Client, + constants::Model, + ocr::{OcrDocument, OcrRequest}, +}; -#[tokio::main] -async fn main() { - // This example suppose you have set the `MISTRAL_API_KEY` environment variable. +fn main() { let client = Client::new(None, None, None, None).unwrap(); - let result = client.list_models_async().await.unwrap(); - println!("First Model ID: {:?}", result.data[0].id); - // => "First Model ID: open-mistral-7b" + let request = OcrRequest { + model: Model::mistral_ocr_latest(), + document: OcrDocument::from_url("https://example.com/document.pdf"), + pages: Some(vec![0]), + table_format: None, + include_image_base64: None, + image_limit: None, + }; + + let response = client.ocr(&request).unwrap(); + println!("{}", response.pages[0].markdown); } ``` -## Contributing +## Available Models -Please read [CONTRIBUTING.md](./CONTRIBUTING.md) for details on how to contribute to this library. +Use `Model::new("any-model-id")` for any model, or use the built-in constructors: + +| Constructor | Model ID | +|---|---| +| `Model::mistral_large_latest()` | `mistral-large-latest` | +| `Model::mistral_medium_latest()` | `mistral-medium-latest` | +| `Model::mistral_small_latest()` | `mistral-small-latest` | +| `Model::mistral_small_4()` | `mistral-small-4-0-26-03` | +| `Model::codestral_latest()` | `codestral-latest` | +| `Model::magistral_medium_latest()` | `magistral-medium-latest` | +| `Model::magistral_small_latest()` | `magistral-small-latest` | +| `Model::mistral_embed()` | `mistral-embed` | +| `Model::mistral_ocr_latest()` | `mistral-ocr-latest` | +| `Model::mistral_moderation_latest()` | `mistral-moderation-26-03` | +| `Model::pixtral_large()` | `pixtral-large-2411` | +| `Model::voxtral_mini_transcribe()` | `voxtral-mini-transcribe-2-26-02` | + +See `constants.rs` for the full list. + +## License + +Apache-2.0 diff --git a/examples/chat.rs b/examples/chat.rs index ad3be09..a924d84 100644 --- a/examples/chat.rs +++ b/examples/chat.rs @@ -1,5 +1,5 @@ use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, }; @@ -8,14 +8,12 @@ fn main() { // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client = Client::new(None, None, None, None).unwrap(); - let model = Model::OpenMistral7b; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "Just guess the next word: \"Eiffel ...\"?".to_string(), - tool_calls: None, - }]; + let model = Model::mistral_small_latest(); + let messages = vec![ChatMessage::new_user_message( + "Just guess the next word: \"Eiffel ...\"?", + )]; let options = ChatParams { - temperature: 0.0, + temperature: Some(0.0), random_seed: Some(42), ..Default::default() }; diff --git a/examples/chat_async.rs b/examples/chat_async.rs index a3f35a5..6b62848 100644 --- a/examples/chat_async.rs +++ b/examples/chat_async.rs @@ -1,5 +1,5 @@ use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, }; @@ -9,14 +9,12 @@ async fn main() { // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client = Client::new(None, None, None, None).unwrap(); - let model = Model::OpenMistral7b; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "Just guess the next word: \"Eiffel ...\"?".to_string(), - tool_calls: None, - }]; + let model = Model::mistral_small_latest(); + let messages = vec![ChatMessage::new_user_message( + "Just guess the next word: \"Eiffel ...\"?", + )]; let options = ChatParams { - temperature: 0.0, + temperature: Some(0.0), random_seed: Some(42), ..Default::default() }; @@ -29,5 +27,4 @@ async fn main() { "{:?}: {}", result.choices[0].message.role, result.choices[0].message.content ); - // => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France." } diff --git a/examples/chat_with_function_calling.rs b/examples/chat_with_function_calling.rs index 67660fb..b14fbd1 100644 --- a/examples/chat_with_function_calling.rs +++ b/examples/chat_with_function_calling.rs @@ -1,8 +1,8 @@ use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, - tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType}, + tool::{Function, Tool, ToolChoice}, }; use serde::Deserialize; use std::any::Any; @@ -16,7 +16,6 @@ struct GetCityTemperatureFunction; #[async_trait::async_trait] impl Function for GetCityTemperatureFunction { async fn execute(&self, arguments: String) -> Box { - // Deserialize arguments, perform the logic, and return the result let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap(); let temperature = match city.as_str() { @@ -32,11 +31,16 @@ fn main() { let tools = vec![Tool::new( "get_city_temperature".to_string(), "Get the current temperature in a city.".to_string(), - vec![ToolFunctionParameter::new( - "city".to_string(), - "The name of the city.".to_string(), - ToolFunctionParameterType::String, - )], + serde_json::json!({ + "type": "object", + "properties": { + "city": { + "type": "string", + "description": "The name of the city." + } + }, + "required": ["city"] + }), )]; // This example suppose you have set the `MISTRAL_API_KEY` environment variable. @@ -46,14 +50,12 @@ fn main() { Box::new(GetCityTemperatureFunction), ); - let model = Model::MistralSmallLatest; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "What's the temperature in Paris?".to_string(), - tool_calls: None, - }]; + let model = Model::mistral_small_latest(); + let messages = vec![ChatMessage::new_user_message( + "What's the temperature in Paris?", + )]; let options = ChatParams { - temperature: 0.0, + temperature: Some(0.0), random_seed: Some(42), tool_choice: Some(ToolChoice::Auto), tools: Some(tools), diff --git a/examples/chat_with_function_calling_async.rs b/examples/chat_with_function_calling_async.rs index 5b91329..330f114 100644 --- a/examples/chat_with_function_calling_async.rs +++ b/examples/chat_with_function_calling_async.rs @@ -1,8 +1,8 @@ use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, - tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType}, + tool::{Function, Tool, ToolChoice}, }; use serde::Deserialize; use std::any::Any; @@ -16,7 +16,6 @@ struct GetCityTemperatureFunction; #[async_trait::async_trait] impl Function for GetCityTemperatureFunction { async fn execute(&self, arguments: String) -> Box { - // Deserialize arguments, perform the logic, and return the result let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap(); let temperature = match city.as_str() { @@ -33,11 +32,16 @@ async fn main() { let tools = vec![Tool::new( "get_city_temperature".to_string(), "Get the current temperature in a city.".to_string(), - vec![ToolFunctionParameter::new( - "city".to_string(), - "The name of the city.".to_string(), - ToolFunctionParameterType::String, - )], + serde_json::json!({ + "type": "object", + "properties": { + "city": { + "type": "string", + "description": "The name of the city." + } + }, + "required": ["city"] + }), )]; // This example suppose you have set the `MISTRAL_API_KEY` environment variable. @@ -47,14 +51,12 @@ async fn main() { Box::new(GetCityTemperatureFunction), ); - let model = Model::MistralSmallLatest; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "What's the temperature in Paris?".to_string(), - tool_calls: None, - }]; + let model = Model::mistral_small_latest(); + let messages = vec![ChatMessage::new_user_message( + "What's the temperature in Paris?", + )]; let options = ChatParams { - temperature: 0.0, + temperature: Some(0.0), random_seed: Some(42), tool_choice: Some(ToolChoice::Auto), tools: Some(tools), diff --git a/examples/chat_with_streaming.rs b/examples/chat_with_streaming.rs index f5ad8d4..bedab71 100644 --- a/examples/chat_with_streaming.rs +++ b/examples/chat_with_streaming.rs @@ -1,6 +1,6 @@ use futures::stream::StreamExt; use mistralai_client::v1::{ - chat::{ChatMessage, ChatMessageRole, ChatParams}, + chat::{ChatMessage, ChatParams}, client::Client, constants::Model, }; @@ -11,14 +11,10 @@ async fn main() { // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client = Client::new(None, None, None, None).unwrap(); - let model = Model::OpenMistral7b; - let messages = vec![ChatMessage { - role: ChatMessageRole::User, - content: "Tell me a short happy story.".to_string(), - tool_calls: None, - }]; + let model = Model::mistral_small_latest(); + let messages = vec![ChatMessage::new_user_message("Tell me a short happy story.")]; let options = ChatParams { - temperature: 0.0, + temperature: Some(0.0), random_seed: Some(42), ..Default::default() }; @@ -31,9 +27,10 @@ async fn main() { .for_each(|chunk_result| async { match chunk_result { Ok(chunks) => chunks.iter().for_each(|chunk| { - print!("{}", chunk.choices[0].delta.content); - io::stdout().flush().unwrap(); - // => "Once upon a time, [...]" + if let Some(content) = &chunk.choices[0].delta.content { + print!("{}", content); + io::stdout().flush().unwrap(); + } }), Err(error) => { eprintln!("Error processing chunk: {:?}", error) @@ -41,5 +38,5 @@ async fn main() { } }) .await; - print!("\n") // To persist the last chunk output. + println!(); } diff --git a/examples/embeddings.rs b/examples/embeddings.rs index 898e7d4..7359d99 100644 --- a/examples/embeddings.rs +++ b/examples/embeddings.rs @@ -1,10 +1,10 @@ -use mistralai_client::v1::{client::Client, constants::EmbedModel}; +use mistralai_client::v1::{client::Client, constants::Model}; fn main() { // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client: Client = Client::new(None, None, None, None).unwrap(); - let model = EmbedModel::MistralEmbed; + let model = Model::mistral_embed(); let input = vec!["Embed this sentence.", "As well as this one."] .iter() .map(|s| s.to_string()) @@ -13,5 +13,4 @@ fn main() { let response = client.embeddings(model, input, options).unwrap(); println!("First Embedding: {:?}", response.data[0]); - // => "First Embedding: {...}" } diff --git a/examples/embeddings_async.rs b/examples/embeddings_async.rs index a93d374..1987bb5 100644 --- a/examples/embeddings_async.rs +++ b/examples/embeddings_async.rs @@ -1,11 +1,11 @@ -use mistralai_client::v1::{client::Client, constants::EmbedModel}; +use mistralai_client::v1::{client::Client, constants::Model}; #[tokio::main] async fn main() { // This example suppose you have set the `MISTRAL_API_KEY` environment variable. let client: Client = Client::new(None, None, None, None).unwrap(); - let model = EmbedModel::MistralEmbed; + let model = Model::mistral_embed(); let input = vec!["Embed this sentence.", "As well as this one."] .iter() .map(|s| s.to_string()) @@ -17,5 +17,4 @@ async fn main() { .await .unwrap(); println!("First Embedding: {:?}", response.data[0]); - // => "First Embedding: {...}" } diff --git a/examples/fim.rs b/examples/fim.rs new file mode 100644 index 0000000..7d7274f --- /dev/null +++ b/examples/fim.rs @@ -0,0 +1,21 @@ +use mistralai_client::v1::{ + client::Client, + constants::Model, + fim::FimParams, +}; + +fn main() { + // This example suppose you have set the `MISTRAL_API_KEY` environment variable. + let client = Client::new(None, None, None, None).unwrap(); + + let model = Model::codestral_latest(); + let prompt = "def fibonacci(n):".to_string(); + let options = FimParams { + suffix: Some("\n return result".to_string()), + temperature: Some(0.0), + ..Default::default() + }; + + let response = client.fim(model, prompt, Some(options)).unwrap(); + println!("Completion: {}", response.choices[0].message.content); +} diff --git a/examples/ocr.rs b/examples/ocr.rs new file mode 100644 index 0000000..f077e7d --- /dev/null +++ b/examples/ocr.rs @@ -0,0 +1,25 @@ +use mistralai_client::v1::{ + client::Client, + constants::Model, + ocr::{OcrDocument, OcrRequest}, +}; + +fn main() { + // This example suppose you have set the `MISTRAL_API_KEY` environment variable. + let client = Client::new(None, None, None, None).unwrap(); + + let request = OcrRequest { + model: Model::mistral_ocr_latest(), + document: OcrDocument::from_url("https://arxiv.org/pdf/2201.04234"), + pages: Some(vec![0]), + table_format: None, + include_image_base64: None, + image_limit: None, + }; + + let response = client.ocr(&request).unwrap(); + for page in &response.pages { + println!("--- Page {} ---", page.index); + println!("{}", &page.markdown[..200.min(page.markdown.len())]); + } +}