docs: add client.chat*() documentation

This commit is contained in:
Ivan Gabriele
2024-03-05 02:40:49 +01:00
parent 08b042506d
commit 72bae8817a
2 changed files with 83 additions and 3 deletions

View File

@@ -1 +1,4 @@
//! This crate provides a easy bindings and types for MistralAI's API.
/// The v1 module contains the types and methods for the v1 API endpoints.
pub mod v1;

View File

@@ -70,6 +70,36 @@ impl Client {
})
}
/// Synchronously sends a chat completion request and returns the response.
///
/// # Arguments
///
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatCompletionParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing the `ChatCompletionResponse` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
/// ```
/// use mistralai_client::v1::{
/// chat_completion::{ChatMessage, ChatMessageRole},
/// client::Client,
/// constants::Model,
/// };
///
/// let client = Client::new(None, None, None, None).unwrap();
/// let messages = vec![ChatMessage {
/// role: ChatMessageRole::user,
/// content: "Hello, world!".to_string(),
/// }];
/// let response = client.chat(Model::OpenMistral7b, messages, None).unwrap();
/// println!("{}: {}", response.choices[0].message.role, response.choices[0].message.content);
/// ```
pub fn chat(
&self,
model: Model,
@@ -90,9 +120,14 @@ impl Client {
///
/// # Arguments
///
/// * `model` - The model to use for the chat completion.
/// * `messages` - A vector of `ChatMessage` to send as part of the chat.
/// * `options` - Optional `ChatCompletionParams` to customize the request.
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatCompletionParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing a `Stream` of `ChatCompletionStreamChunk` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
@@ -130,6 +165,48 @@ impl Client {
}
}
/// Asynchronously sends a chat completion request and returns a stream of message chunks.
///
/// # Arguments
///
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatCompletionParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing a `Stream` of `ChatCompletionStreamChunk` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
/// ```
/// use futures::stream::StreamExt;
/// use mistralai_client::v1::{
/// chat_completion::{ChatMessage, ChatMessageRole},
/// client::Client,
/// constants::Model,
/// };
///
/// #[tokio::main]
/// async fn main() {
/// let client = Client::new(None, None, None, None).unwrap();
/// let messages = vec![ChatMessage {
/// role: ChatMessageRole::user,
/// content: "Hello, world!".to_string(),
/// }];
/// let mut stream = client.chat_stream(Model::OpenMistral7b, messages, None).await.unwrap();
/// while let Some(chunk_result) = stream.next().await {
/// match chunk_result {
/// Ok(chunk) => {
/// print!("{}", chunk.choices[0].delta.content);
/// }
/// Err(error) => {
/// println!("Error: {}", error.message);
/// }
/// }
/// }
/// }
pub async fn chat_stream(
&self,
model: Model,