- Rewrite README with all new endpoints and current model names - Add available models table with constructors - Add FIM and OCR examples - Update all examples for string-based Model type - Update streaming example for Option<String> delta content - Use serde_json::json!() for tool schemas in examples - Add .envrc to .gitignore
43 lines
1.3 KiB
Rust
43 lines
1.3 KiB
Rust
use futures::stream::StreamExt;
|
|
use mistralai_client::v1::{
|
|
chat::{ChatMessage, ChatParams},
|
|
client::Client,
|
|
constants::Model,
|
|
};
|
|
use std::io::{self, Write};
|
|
|
|
#[tokio::main]
|
|
async fn main() {
|
|
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
|
let client = Client::new(None, None, None, None).unwrap();
|
|
|
|
let model = Model::mistral_small_latest();
|
|
let messages = vec![ChatMessage::new_user_message("Tell me a short happy story.")];
|
|
let options = ChatParams {
|
|
temperature: Some(0.0),
|
|
random_seed: Some(42),
|
|
..Default::default()
|
|
};
|
|
|
|
let stream_result = client
|
|
.chat_stream(model, messages, Some(options))
|
|
.await
|
|
.unwrap();
|
|
stream_result
|
|
.for_each(|chunk_result| async {
|
|
match chunk_result {
|
|
Ok(chunks) => chunks.iter().for_each(|chunk| {
|
|
if let Some(content) = &chunk.choices[0].delta.content {
|
|
print!("{}", content);
|
|
io::stdout().flush().unwrap();
|
|
}
|
|
}),
|
|
Err(error) => {
|
|
eprintln!("Error processing chunk: {:?}", error)
|
|
}
|
|
}
|
|
})
|
|
.await;
|
|
println!();
|
|
}
|