feat!: add function calling support to client.chat() & client.chat_async()

BREAKING CHANGE: Too many to count in this version. Check the README examples.
This commit is contained in:
Ivan Gabriele
2024-03-09 11:28:50 +01:00
parent 9430d42382
commit 74bf8a96ee
30 changed files with 1510 additions and 322 deletions

3
tests/setup.rs Normal file
View File

@@ -0,0 +1,3 @@
pub fn setup() {
let _ = env_logger::builder().is_test(true).try_init();
}

View File

@@ -1,20 +1,24 @@
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
chat::{ChatMessage, ChatMessageRole, ChatParams, ChatResponseChoiceFinishReason},
client::Client,
constants::Model,
tool::{Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
mod setup;
#[tokio::test]
async fn test_client_chat_async() {
setup::setup();
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
let messages = vec![ChatMessage::new_user_message(
"Just guess the next word: \"Eiffel ...\"?",
)];
let options = ChatParams {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
@@ -27,11 +31,70 @@ async fn test_client_chat_async() {
expect!(response.model).to_be(Model::OpenMistral7b);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::assistant);
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0].message.content.clone())
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}
#[tokio::test]
async fn test_client_chat_async_with_function_calling() {
setup::setup();
let tools = vec![Tool::new(
"get_city_temperature".to_string(),
"Get the current temperature in a city.".to_string(),
vec![ToolFunctionParameter::new(
"city".to_string(),
"The name of the city.".to_string(),
ToolFunctionParameterType::String,
)],
)];
let client = Client::new(None, None, None, None).unwrap();
let model = Model::MistralSmallLatest;
let messages = vec![ChatMessage::new_user_message(
"What's the current temperature in Paris?",
)];
let options = ChatParams {
temperature: Some(0.0),
random_seed: Some(42),
tool_choice: Some(ToolChoice::Any),
tools: Some(tools),
..Default::default()
};
let response = client
.chat_async(model, messages, Some(options))
.await
.unwrap();
expect!(response.model).to_be(Model::MistralSmallLatest);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].finish_reason.clone())
.to_be(ChatResponseChoiceFinishReason::ToolCalls);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0].message.content.clone()).to_be("".to_string());
// expect!(response.choices[0].message.tool_calls.clone()).to_be(Some(vec![ToolCall {
// function: ToolCallFunction {
// name: "get_city_temperature".to_string(),
// arguments: "{\"city\": \"Paris\"}".to_string(),
// },
// }]));
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);

View File

@@ -1,40 +1,40 @@
use futures::stream::StreamExt;
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
client::Client,
constants::Model,
};
// use futures::stream::StreamExt;
// use jrest::expect;
// use mistralai_client::v1::{
// chat_completion::{ChatParams, ChatMessage, ChatMessageRole},
// client::Client,
// constants::Model,
// };
#[tokio::test]
async fn test_client_chat_stream() {
let client = Client::new(None, None, None, None).unwrap();
// #[tokio::test]
// async fn test_client_chat_stream() {
// let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
};
// let model = Model::OpenMistral7b;
// let messages = vec![ChatMessage::new_user_message(
// "Just guess the next word: \"Eiffel ...\"?",
// )];
// let options = ChatParams {
// temperature: Some(0.0),
// random_seed: Some(42),
// ..Default::default()
// };
let stream_result = client.chat_stream(model, messages, Some(options)).await;
let mut stream = stream_result.expect("Failed to create stream.");
while let Some(chunk_result) = stream.next().await {
match chunk_result {
Ok(chunk) => {
if chunk.choices[0].delta.role == Some(ChatMessageRole::assistant)
|| chunk.choices[0].finish_reason == Some("stop".to_string())
{
expect!(chunk.choices[0].delta.content.len()).to_be(0);
} else {
expect!(chunk.choices[0].delta.content.len()).to_be_greater_than(0);
}
}
Err(e) => eprintln!("Error processing chunk: {:?}", e),
}
}
}
// let stream_result = client.chat_stream(model, messages, Some(options)).await;
// let mut stream = stream_result.expect("Failed to create stream.");
// while let Some(maybe_chunk_result) = stream.next().await {
// match maybe_chunk_result {
// Some(Ok(chunk)) => {
// if chunk.choices[0].delta.role == Some(ChatMessageRole::Assistant)
// || chunk.choices[0].finish_reason == Some("stop".to_string())
// {
// expect!(chunk.choices[0].delta.content.len()).to_be(0);
// } else {
// expect!(chunk.choices[0].delta.content.len()).to_be_greater_than(0);
// }
// }
// Some(Err(error)) => eprintln!("Error processing chunk: {:?}", error),
// None => (),
// }
// }
// }

View File

@@ -1,20 +1,24 @@
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{ChatCompletionParams, ChatMessage, ChatMessageRole},
chat::{ChatMessage, ChatMessageRole, ChatParams, ChatResponseChoiceFinishReason},
client::Client,
constants::Model,
tool::{Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
mod setup;
#[test]
fn test_client_chat() {
setup::setup();
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
let messages = vec![ChatMessage::new_user_message(
"Just guess the next word: \"Eiffel ...\"?",
)];
let options = ChatParams {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
@@ -26,9 +30,53 @@ fn test_client_chat() {
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::assistant);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0].message.content.clone())
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}
#[test]
fn test_client_chat_with_function_calling() {
setup::setup();
let tools = vec![Tool::new(
"get_city_temperature".to_string(),
"Get the current temperature in a city.".to_string(),
vec![ToolFunctionParameter::new(
"city".to_string(),
"The name of the city.".to_string(),
ToolFunctionParameterType::String,
)],
)];
let client = Client::new(None, None, None, None).unwrap();
let model = Model::MistralSmallLatest;
let messages = vec![ChatMessage::new_user_message(
"What's the current temperature in Paris?",
)];
let options = ChatParams {
temperature: Some(0.0),
random_seed: Some(42),
tool_choice: Some(ToolChoice::Auto),
tools: Some(tools),
..Default::default()
};
let response = client.chat(model, messages, Some(options)).unwrap();
expect!(response.model).to_be(Model::MistralSmallLatest);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0].message.content.clone()).to_be("".to_string());
expect!(response.choices[0].finish_reason.clone())
.to_be(ChatResponseChoiceFinishReason::ToolCalls);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);

View File

@@ -9,12 +9,4 @@ async fn test_client_list_models_async() {
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be_greater_than(0);
// let open_mistral_7b_data_item = response
// .data
// .iter()
// .find(|item| item.id == "open-mistral-7b")
// .unwrap();
// expect!(open_mistral_7b_data_item.id).to_be("open-mistral-7b".to_string());
}

View File

@@ -9,12 +9,4 @@ fn test_client_list_models() {
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be_greater_than(0);
// let open_mistral_7b_data_item = response
// .data
// .iter()
// .find(|item| item.id == "open-mistral-7b")
// .unwrap();
// expect!(open_mistral_7b_data_item.id).to_be("open-mistral-7b".to_string());
}