feat(chat)!: change safe_prompt, temperature & top_p to non-Option types

BREAKING CHANGE:
- `Chat::ChatParams.safe_prompt` & `Chat::ChatRequest.safe_prompt` are now `bool` instead of `Option<bool>`. Default is `false`.
- `Chat::ChatParams.temperature` & `Chat::ChatRequest.temperature` are now `f32` instead of `Option<f32>`. Default is `0.7`.
- `Chat::ChatParams.top_p` & `Chat::ChatRequest.top_p` are now `f32` instead of `Option<f32>`. Default is `1.0`.
This commit is contained in:
Ivan Gabriele
2024-06-07 15:53:25 +02:00
parent e61ace9a18
commit cf68a77320
11 changed files with 85 additions and 39 deletions

View File

@@ -19,7 +19,7 @@ async fn test_client_chat_async() {
"Guess the next word: \"Eiffel ...\"?",
)];
let options = ChatParams {
temperature: Some(0.0),
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
@@ -37,7 +37,12 @@ async fn test_client_chat_async() {
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0].message.content.clone()).to_start_with("Tower".to_string());
expect!(response.choices[0]
.message
.content
.clone()
.contains("Tower"))
.to_be(true);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
@@ -65,7 +70,7 @@ async fn test_client_chat_async_with_function_calling() {
"What's the current temperature in Paris?",
)];
let options = ChatParams {
temperature: Some(0.0),
temperature: 0.0,
random_seed: Some(42),
tool_choice: Some(ToolChoice::Any),
tools: Some(tools),

View File

@@ -19,7 +19,7 @@ fn test_client_chat() {
"Guess the next word: \"Eiffel ...\"?",
)];
let options = ChatParams {
temperature: Some(0.0),
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
@@ -31,7 +31,12 @@ fn test_client_chat() {
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0].message.content.clone()).to_start_with("Tower".to_string());
expect!(response.choices[0]
.message
.content
.clone()
.contains("Tower"))
.to_be(true);
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
@@ -59,7 +64,7 @@ fn test_client_chat_with_function_calling() {
"What's the current temperature in Paris?",
)];
let options = ChatParams {
temperature: Some(0.0),
temperature: 0.0,
random_seed: Some(42),
tool_choice: Some(ToolChoice::Auto),
tools: Some(tools),