Compare commits
47 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
79a410b298 | ||
|
|
4a45ad337f | ||
|
|
2114916941 | ||
|
|
9bfbf2e9dc | ||
|
|
67aa5bbaef | ||
|
|
415fd98167 | ||
|
|
8e9f7a5386 | ||
|
|
3afeec1d58 | ||
|
|
0c097aa56d | ||
|
|
e6539c0ccf | ||
|
|
30156c5273 | ||
|
|
ecd0c3028f | ||
|
|
0df67b1b25 | ||
|
|
f7d012b280 | ||
|
|
5b5bd2d68e | ||
|
|
2fc0642a5e | ||
|
|
cf68a77320 | ||
|
|
e61ace9a18 | ||
|
|
64034402ca | ||
|
|
85c3611afb | ||
|
|
da5fe54115 | ||
|
|
7a5e0679c1 | ||
|
|
99d9d099e2 | ||
|
|
91fb775132 | ||
|
|
7474aa6730 | ||
|
|
6a99eca49c | ||
|
|
fccd59c0cc | ||
|
|
a463cb3106 | ||
|
|
8bee874bd4 | ||
|
|
16464a4c3d | ||
|
|
a4c2d4623d | ||
|
|
ab91154d35 | ||
|
|
74bf8a96ee | ||
|
|
9430d42382 | ||
|
|
e7d844dce9 | ||
|
|
29566f7948 | ||
|
|
72bae8817a | ||
|
|
08b042506d | ||
|
|
efcd93953a | ||
|
|
ea99a075ef | ||
|
|
ccf3d1431a | ||
|
|
a8bfb5333f | ||
|
|
ef5d475e2d | ||
|
|
5217fcfb94 | ||
|
|
6b1cc5c058 | ||
|
|
4a4219d3ea | ||
|
|
f91e794d71 |
15
.github/ISSUE_TEMPLATE/bug_report.md
vendored
15
.github/ISSUE_TEMPLATE/bug_report.md
vendored
@@ -1,17 +1,16 @@
|
||||
---
|
||||
name: Bug report
|
||||
about: Create a report to help us improve
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
title: ""
|
||||
labels: "bug"
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Describe the bug**
|
||||
|
||||
A clear and concise description of what the bug is.
|
||||
...
|
||||
|
||||
**To Reproduce**
|
||||
**Reproduction**
|
||||
|
||||
Steps to reproduce the behavior:
|
||||
|
||||
@@ -20,7 +19,7 @@ Steps to reproduce the behavior:
|
||||
|
||||
**Expected behavior**
|
||||
|
||||
A clear and concise description of what you expected to happen.
|
||||
...
|
||||
|
||||
**Screenshots**
|
||||
|
||||
@@ -32,4 +31,4 @@ If applicable, what version did you use?
|
||||
|
||||
**Environment**
|
||||
|
||||
Add useful information about your configuration and environment here.
|
||||
If applicable, add relevant information about your config and environment here.
|
||||
|
||||
25
.github/ISSUE_TEMPLATE/feature_request.md
vendored
25
.github/ISSUE_TEMPLATE/feature_request.md
vendored
@@ -1,24 +1,19 @@
|
||||
---
|
||||
name: Feature request
|
||||
about: Suggest an idea for this project
|
||||
title: ''
|
||||
labels: ''
|
||||
assignees: ''
|
||||
|
||||
about: Suggest a new idea for the project.
|
||||
title: ""
|
||||
labels: "enhancement"
|
||||
assignees: ""
|
||||
---
|
||||
|
||||
**Is your feature request related to a problem? Please describe.**
|
||||
**Is your feature request related to some problems?**
|
||||
|
||||
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
|
||||
- _Ex. I'm always frustrated when..._
|
||||
|
||||
**Describe the solution you'd like**
|
||||
**What are the solutions you'd like?**
|
||||
|
||||
A clear and concise description of what you want to happen.
|
||||
- _Ex. A new option to..._
|
||||
|
||||
**Describe alternatives you've considered**
|
||||
**Anything else?**
|
||||
|
||||
A clear and concise description of any alternative solutions or features you've considered.
|
||||
|
||||
**Additional context**
|
||||
|
||||
Add any other context or screenshots about the feature request here.
|
||||
- ...
|
||||
|
||||
43
.github/workflows/test.yml
vendored
43
.github/workflows/test.yml
vendored
@@ -1,15 +1,13 @@
|
||||
name: Test
|
||||
|
||||
on: push
|
||||
on:
|
||||
pull_request:
|
||||
push:
|
||||
|
||||
jobs:
|
||||
test:
|
||||
name: Test
|
||||
runs-on: ubuntu-latest
|
||||
container:
|
||||
image: xd009642/tarpaulin
|
||||
# https://github.com/xd009642/tarpaulin#github-actions
|
||||
options: --security-opt seccomp=unconfined
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
@@ -17,12 +15,45 @@ jobs:
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.76.0
|
||||
- name: Install cargo-llvm-cov
|
||||
uses: taiki-e/install-action@cargo-llvm-cov
|
||||
- name: Run tests (with coverage)
|
||||
run: make test-cover
|
||||
run: cargo llvm-cov --lcov --output-path ./lcov.info
|
||||
env:
|
||||
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
|
||||
- name: Upload tests coverage
|
||||
uses: codecov/codecov-action@v4
|
||||
with:
|
||||
fail_ci_if_error: true
|
||||
files: ./lcov.info
|
||||
token: ${{ secrets.CODECOV_TOKEN }}
|
||||
|
||||
test_documentation:
|
||||
name: Test Documentation
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.76.0
|
||||
- name: Run documentation tests
|
||||
run: make test-doc
|
||||
env:
|
||||
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
|
||||
|
||||
test_examples:
|
||||
name: Test Examples
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v4
|
||||
- name: Setup Rust
|
||||
uses: actions-rs/toolchain@v1
|
||||
with:
|
||||
toolchain: 1.76.0
|
||||
- name: Run examples
|
||||
run: make test-examples
|
||||
env:
|
||||
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
|
||||
|
||||
91
CHANGELOG.md
91
CHANGELOG.md
@@ -1,3 +1,94 @@
|
||||
## [0.13.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.12.0...v) (2024-08-21)
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **client:** `v1::model_list::ModelListData` struct has been updated.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **client:** remove the `Content-Type` from the headers of the reqwest builders. ([#14](https://github.com/ivangabriele/mistralai-client-rs/issues/14)) ([9bfbf2e](https://github.com/ivangabriele/mistralai-client-rs/commit/9bfbf2e9dc7b48103ac56923fb8b3ac9a5e2d9cf)), closes [#13](https://github.com/ivangabriele/mistralai-client-rs/issues/13)
|
||||
* **client:** update ModelListData struct following API changes ([2114916](https://github.com/ivangabriele/mistralai-client-rs/commit/2114916941e1ff5aa242290df5f092c0d4954afc))
|
||||
## [0.12.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.11.0...v) (2024-07-24)
|
||||
|
||||
### Features
|
||||
|
||||
* implement the Debug trait for Client ([#11](https://github.com/ivangabriele/mistralai-client-rs/issues/11)) ([3afeec1](https://github.com/ivangabriele/mistralai-client-rs/commit/3afeec1d586022e43c7b10906acec5e65927ba7d))
|
||||
* mark Function trait as Send ([#12](https://github.com/ivangabriele/mistralai-client-rs/issues/12)) ([8e9f7a5](https://github.com/ivangabriele/mistralai-client-rs/commit/8e9f7a53863879b2ad618e9e5707b198e4f3b135))
|
||||
## [0.11.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.10.0...v) (2024-06-22)
|
||||
|
||||
### Features
|
||||
|
||||
* **constants:** add OpenMixtral8x22b, MistralTiny & CodestralLatest to Model enum ([ecd0c30](https://github.com/ivangabriele/mistralai-client-rs/commit/ecd0c3028fdcfab32b867eb1eed86182f5f4ab81))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **chat:** implement Clone trait for ChatParams & ResponseFormat ([0df67b1](https://github.com/ivangabriele/mistralai-client-rs/commit/0df67b1b2571fb04b636ce015a2daabe629ff352))
|
||||
## [0.10.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.9.0...v) (2024-06-07)
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* **chat:** - `Chat::ChatParams.safe_prompt` & `Chat::ChatRequest.safe_prompt` are now `bool` instead of `Option<bool>`. Default is `false`.
|
||||
- `Chat::ChatParams.temperature` & `Chat::ChatRequest.temperature` are now `f32` instead of `Option<f32>`. Default is `0.7`.
|
||||
- `Chat::ChatParams.top_p` & `Chat::ChatRequest.top_p` are now `f32` instead of `Option<f32>`. Default is `1.0`.
|
||||
|
||||
### Features
|
||||
|
||||
* **chat:** add response_format for JSON return values ([85c3611](https://github.com/ivangabriele/mistralai-client-rs/commit/85c3611afbbe8df30dfc7512cc381ed304ce4024))
|
||||
* **chat:** add the 'system' and 'tool' message roles ([#10](https://github.com/ivangabriele/mistralai-client-rs/issues/10)) ([2fc0642](https://github.com/ivangabriele/mistralai-client-rs/commit/2fc0642a5e4c024b15710acaab7735480e8dfe6a))
|
||||
* **chat:** change safe_prompt, temperature & top_p to non-Option types ([cf68a77](https://github.com/ivangabriele/mistralai-client-rs/commit/cf68a773201ebe0e802face52af388711acf0c27))
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **chat:** skip serializing tool_calls if null, to avoid 422 error ([da5fe54](https://github.com/ivangabriele/mistralai-client-rs/commit/da5fe54115ce622379776661a440e2708b24810c))
|
||||
## [0.9.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.8.0...v) (2024-04-13)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* `Model.OpenMistral8x7b` has been renamed to `Model.OpenMixtral8x7b`.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* **deps:** update rust crate reqwest to 0.12.0 ([#6](https://github.com/ivangabriele/mistralai-client-rs/issues/6)) ([fccd59c](https://github.com/ivangabriele/mistralai-client-rs/commit/fccd59c0cc783edddec1b404363faabb009eecd6))
|
||||
* fix typo in OpenMixtral8x7b model name ([#8](https://github.com/ivangabriele/mistralai-client-rs/issues/8)) ([6a99eca](https://github.com/ivangabriele/mistralai-client-rs/commit/6a99eca49c0cc8e3764a56f6dfd7762ec44a4c3b))
|
||||
|
||||
## [0.8.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.7.0...v) (2024-03-09)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* Too many to count in this version. Check the README examples.
|
||||
|
||||
### Features
|
||||
|
||||
* add function calling support to client.chat() & client.chat_async() ([74bf8a9](https://github.com/ivangabriele/mistralai-client-rs/commit/74bf8a96ee31f9d54ee3d7404619e803a182918b))
|
||||
|
||||
## [0.7.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.6.0...v) (2024-03-05)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* - Rename `ClientError.ApiKeyError` to `MissingApiKey`.
|
||||
- Rename `ClientError.ReadResponseTextError` to `ClientError.UnreadableResponseText`.
|
||||
|
||||
### Bug Fixes
|
||||
|
||||
* fix failure when api key as param and not env ([ef5d475](https://github.com/ivangabriele/mistralai-client-rs/commit/ef5d475e2d0e3fe040c44d6adabf7249e9962835))
|
||||
|
||||
## [0.6.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.5.0...v) (2024-03-04)
|
||||
|
||||
|
||||
### ⚠ BREAKING CHANGES
|
||||
|
||||
* You can't set the `stream` option for `client.chat*()`.
|
||||
|
||||
Either use `client.chat_stream()` if you want to use streams
|
||||
or use `client.chat()` / `client.chat_async()` otherwise.
|
||||
|
||||
### Features
|
||||
|
||||
* add client.chat_stream() method ([4a4219d](https://github.com/ivangabriele/mistralai-client-rs/commit/4a4219d3eaa8f0ae953ee6182b36bf464d1c4a21))
|
||||
|
||||
## [0.5.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.4.0...v) (2024-03-04)
|
||||
|
||||
|
||||
|
||||
@@ -4,7 +4,10 @@
|
||||
- [Requirements](#requirements)
|
||||
- [First setup](#first-setup)
|
||||
- [Optional requirements](#optional-requirements)
|
||||
- [Local Development](#local-development)
|
||||
- [Test](#test)
|
||||
- [Documentation](#documentation)
|
||||
- [Readme](#readme)
|
||||
- [Code of Conduct](#code-of-conduct)
|
||||
- [Commit Message Format](#commit-message-format)
|
||||
|
||||
@@ -38,7 +41,10 @@ Then edit the `.env` file to set your `MISTRAL_API_KEY`.
|
||||
|
||||
### Optional requirements
|
||||
|
||||
- [cargo-watch](https://github.com/watchexec/cargo-watch#install) for `make test-*-watch`.
|
||||
- [cargo-llvm-cov](https://github.com/taiki-e/cargo-llvm-cov?tab=readme-ov-file#installation) for `make test-cover`
|
||||
- [cargo-watch](https://github.com/watchexec/cargo-watch#install) for `make test-watch`.
|
||||
|
||||
## Local Development
|
||||
|
||||
### Test
|
||||
|
||||
@@ -52,6 +58,16 @@ or
|
||||
make test-watch
|
||||
```
|
||||
|
||||
## Documentation
|
||||
|
||||
### Readme
|
||||
|
||||
> [!IMPORTANT]
|
||||
> Do not edit the `README.md` file directly. It is generated from the `README.template.md` file.
|
||||
|
||||
1. Edit the `README.template.md` file.
|
||||
2. Run `make readme` to generate/update the `README.md` file.
|
||||
|
||||
## Code of Conduct
|
||||
|
||||
Help us keep this project open and inclusive. Please read and follow our [Code of Conduct](./CODE_OF_CONDUCT.md).
|
||||
|
||||
11
Cargo.toml
11
Cargo.toml
@@ -2,7 +2,7 @@
|
||||
name = "mistralai-client"
|
||||
description = "Mistral AI API client library for Rust (unofficial)."
|
||||
license = "Apache-2.0"
|
||||
version = "0.5.0"
|
||||
version = "0.13.0"
|
||||
|
||||
edition = "2021"
|
||||
rust-version = "1.76.0"
|
||||
@@ -15,11 +15,18 @@ readme = "README.md"
|
||||
repository = "https://github.com/ivangabriele/mistralai-client-rs"
|
||||
|
||||
[dependencies]
|
||||
reqwest = { version = "0.11.24", features = ["json", "blocking"] }
|
||||
async-stream = "0.3.5"
|
||||
async-trait = "0.1.77"
|
||||
env_logger = "0.11.3"
|
||||
futures = "0.3.30"
|
||||
log = "0.4.21"
|
||||
reqwest = { version = "0.12.0", features = ["json", "blocking", "stream"] }
|
||||
serde = { version = "1.0.197", features = ["derive"] }
|
||||
serde_json = "1.0.114"
|
||||
strum = "0.26.1"
|
||||
thiserror = "1.0.57"
|
||||
tokio = { version = "1.36.0", features = ["full"] }
|
||||
tokio-stream = "0.1.14"
|
||||
|
||||
[dev-dependencies]
|
||||
jrest = "0.2.3"
|
||||
|
||||
58
Makefile
58
Makefile
@@ -1,9 +1,20 @@
|
||||
SHELL := /bin/bash
|
||||
|
||||
.PHONY: test
|
||||
.PHONY: doc readme test
|
||||
|
||||
define source_env_if_not_ci
|
||||
@if [ -z "$${CI}" ]; then \
|
||||
if [ -f ./.env ]; then \
|
||||
source ./.env; \
|
||||
else \
|
||||
echo "No .env file found"; \
|
||||
exit 1; \
|
||||
fi \
|
||||
fi
|
||||
endef
|
||||
|
||||
define RELEASE_TEMPLATE
|
||||
conventional-changelog -p conventionalcommits -i ./CHANGELOG.md -s
|
||||
npx conventional-changelog-cli -p conventionalcommits -i ./CHANGELOG.md -s
|
||||
git add .
|
||||
git commit -m "docs(changelog): update"
|
||||
git push origin HEAD
|
||||
@@ -11,18 +22,51 @@ define RELEASE_TEMPLATE
|
||||
git push origin HEAD --tags
|
||||
endef
|
||||
|
||||
doc:
|
||||
cargo doc
|
||||
open ./target/doc/mistralai_client/index.html
|
||||
|
||||
readme:
|
||||
@echo "Generating README.md from template..."
|
||||
@> README.md # Clear README.md content before starting
|
||||
@while IFS= read -r line || [[ -n "$$line" ]]; do \
|
||||
if [[ $$line == *"<CODE>"* && $$line == *"</CODE>"* ]]; then \
|
||||
example_path=$$(echo $$line | sed -n 's/.*<CODE>\(.*\)<\/CODE>.*/\1/p'); \
|
||||
if [ -f $$example_path ]; then \
|
||||
echo '```rs' >> README.md; \
|
||||
cat $$example_path >> README.md; \
|
||||
echo '```' >> README.md; \
|
||||
else \
|
||||
echo "Error: Example $$example_path not found." >&2; \
|
||||
fi; \
|
||||
else \
|
||||
echo "$$line" >> README.md; \
|
||||
fi; \
|
||||
done < README.template.md
|
||||
@echo "README.md has been generated."
|
||||
|
||||
release-patch:
|
||||
$(call RELEASE_TEMPLATE,patch)
|
||||
|
||||
release-minor:
|
||||
$(call RELEASE_TEMPLATE,minor)
|
||||
|
||||
release-major:
|
||||
$(call RELEASE_TEMPLATE,major)
|
||||
|
||||
test:
|
||||
@source ./.env && cargo test --all-targets --no-fail-fast
|
||||
@$(source_env_if_not_ci) && \
|
||||
cargo test --no-fail-fast
|
||||
test-cover:
|
||||
cargo tarpaulin --all-targets --frozen --no-fail-fast --out Xml --skip-clean
|
||||
@$(source_env_if_not_ci) && \
|
||||
cargo llvm-cov
|
||||
test-doc:
|
||||
@$(source_env_if_not_ci) && \
|
||||
cargo test --doc --no-fail-fast
|
||||
test-examples:
|
||||
@$(source_env_if_not_ci) && \
|
||||
for example in $$(ls examples/*.rs | sed 's/examples\/\(.*\)\.rs/\1/'); do \
|
||||
echo "Running $$example"; \
|
||||
cargo run --example $$example; \
|
||||
done
|
||||
test-watch:
|
||||
cargo watch -x "test -- --all-targets --nocapture"
|
||||
@source ./.env && \
|
||||
cargo watch -x "test -- --nocapture"
|
||||
|
||||
328
README.md
328
README.md
@@ -7,6 +7,10 @@
|
||||
|
||||
Rust client for the Mistral AI API.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> While we are in v0, minor versions may introduce breaking changes.
|
||||
> Please, refer to the [CHANGELOG.md](./CHANGELOG.md) for more information.
|
||||
|
||||
---
|
||||
|
||||
- [Supported APIs](#supported-apis)
|
||||
@@ -15,13 +19,16 @@ Rust client for the Mistral AI API.
|
||||
- [As an environment variable](#as-an-environment-variable)
|
||||
- [As a client argument](#as-a-client-argument)
|
||||
- [Usage](#usage)
|
||||
- [Chat without streaming](#chat-without-streaming)
|
||||
- [Chat without streaming (async)](#chat-without-streaming-async)
|
||||
- [Chat with streaming](#chat-with-streaming)
|
||||
- [Chat](#chat)
|
||||
- [Chat (async)](#chat-async)
|
||||
- [Chat with streaming (async)](#chat-with-streaming-async)
|
||||
- [Chat with Function Calling](#chat-with-function-calling)
|
||||
- [Chat with Function Calling (async)](#chat-with-function-calling-async)
|
||||
- [Embeddings](#embeddings)
|
||||
- [Embeddings (async)](#embeddings-async)
|
||||
- [List models](#list-models)
|
||||
- [List models (async)](#list-models-async)
|
||||
- [Contributing](#contributing)
|
||||
|
||||
---
|
||||
|
||||
@@ -29,13 +36,13 @@ Rust client for the Mistral AI API.
|
||||
|
||||
- [x] Chat without streaming
|
||||
- [x] Chat without streaming (async)
|
||||
- [ ] Chat with streaming
|
||||
- [x] Chat with streaming
|
||||
- [x] Embedding
|
||||
- [x] Embedding (async)
|
||||
- [x] List models
|
||||
- [x] List models (async)
|
||||
- [ ] Function Calling
|
||||
- [ ] Function Calling (async)
|
||||
- [x] Function Calling
|
||||
- [x] Function Calling (async)
|
||||
|
||||
## Installation
|
||||
|
||||
@@ -53,6 +60,18 @@ You can get your Mistral API Key there: <https://docs.mistral.ai/#api-access>.
|
||||
|
||||
Just set the `MISTRAL_API_KEY` environment variable.
|
||||
|
||||
```rs
|
||||
use mistralai_client::v1::client::Client;
|
||||
|
||||
fn main() {
|
||||
let client = Client::new(None, None, None, None);
|
||||
}
|
||||
```
|
||||
|
||||
```sh
|
||||
MISTRAL_API_KEY=your_api_key cargo run
|
||||
```
|
||||
|
||||
#### As a client argument
|
||||
|
||||
```rs
|
||||
@@ -67,11 +86,11 @@ fn main() {
|
||||
|
||||
## Usage
|
||||
|
||||
### Chat without streaming
|
||||
### Chat
|
||||
|
||||
```rs
|
||||
use mistralai_client::v1::{
|
||||
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequestOptions},
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
};
|
||||
@@ -81,27 +100,28 @@ fn main() {
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::OpenMistral7b;
|
||||
let messages = vec![ChatCompletionMessage {
|
||||
role: ChatCompletionMessageRole::user,
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatCompletionRequestOptions {
|
||||
temperature: Some(0.0),
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result = client.chat(model, messages, Some(options)).unwrap();
|
||||
println!("Assistant: {}", result.choices[0].message.content);
|
||||
// => "Assistant: Tower. [...]"
|
||||
// => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
|
||||
}
|
||||
```
|
||||
|
||||
### Chat without streaming (async)
|
||||
### Chat (async)
|
||||
|
||||
```rs
|
||||
use mistralai_client::v1::{
|
||||
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequestOptions},
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
};
|
||||
@@ -112,25 +132,234 @@ async fn main() {
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::OpenMistral7b;
|
||||
let messages = vec![ChatCompletionMessage {
|
||||
role: ChatCompletionMessageRole::user,
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatCompletionRequestOptions {
|
||||
temperature: Some(0.0),
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result = client.chat_async(model, messages, Some(options)).await.unwrap();
|
||||
println!("Assistant: {}", result.choices[0].message.content);
|
||||
// => "Assistant: Tower. [...]"
|
||||
let result = client
|
||||
.chat_async(model, messages, Some(options))
|
||||
.await
|
||||
.unwrap();
|
||||
println!(
|
||||
"{:?}: {}",
|
||||
result.choices[0].message.role, result.choices[0].message.content
|
||||
);
|
||||
// => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
|
||||
}
|
||||
```
|
||||
|
||||
### Chat with streaming
|
||||
### Chat with streaming (async)
|
||||
|
||||
_In progress._
|
||||
```rs
|
||||
use futures::stream::StreamExt;
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
};
|
||||
use std::io::{self, Write};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::OpenMistral7b;
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "Tell me a short happy story.".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let stream_result = client
|
||||
.chat_stream(model, messages, Some(options))
|
||||
.await
|
||||
.unwrap();
|
||||
stream_result
|
||||
.for_each(|chunk_result| async {
|
||||
match chunk_result {
|
||||
Ok(chunks) => chunks.iter().for_each(|chunk| {
|
||||
print!("{}", chunk.choices[0].delta.content);
|
||||
io::stdout().flush().unwrap();
|
||||
// => "Once upon a time, [...]"
|
||||
}),
|
||||
Err(error) => {
|
||||
eprintln!("Error processing chunk: {:?}", error)
|
||||
}
|
||||
}
|
||||
})
|
||||
.await;
|
||||
print!("\n") // To persist the last chunk output.
|
||||
}
|
||||
```
|
||||
|
||||
### Chat with Function Calling
|
||||
|
||||
```rs
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use std::any::Any;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GetCityTemperatureArguments {
|
||||
city: String,
|
||||
}
|
||||
|
||||
struct GetCityTemperatureFunction;
|
||||
#[async_trait::async_trait]
|
||||
impl Function for GetCityTemperatureFunction {
|
||||
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
|
||||
// Deserialize arguments, perform the logic, and return the result
|
||||
let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();
|
||||
|
||||
let temperature = match city.as_str() {
|
||||
"Paris" => "20°C",
|
||||
_ => "Unknown city",
|
||||
};
|
||||
|
||||
Box::new(temperature.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let tools = vec![Tool::new(
|
||||
"get_city_temperature".to_string(),
|
||||
"Get the current temperature in a city.".to_string(),
|
||||
vec![ToolFunctionParameter::new(
|
||||
"city".to_string(),
|
||||
"The name of the city.".to_string(),
|
||||
ToolFunctionParameterType::String,
|
||||
)],
|
||||
)];
|
||||
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let mut client = Client::new(None, None, None, None).unwrap();
|
||||
client.register_function(
|
||||
"get_city_temperature".to_string(),
|
||||
Box::new(GetCityTemperatureFunction),
|
||||
);
|
||||
|
||||
let model = Model::MistralSmallLatest;
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "What's the temperature in Paris?".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
tool_choice: Some(ToolChoice::Auto),
|
||||
tools: Some(tools),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
client.chat(model, messages, Some(options)).unwrap();
|
||||
let temperature = client
|
||||
.get_last_function_call_result()
|
||||
.unwrap()
|
||||
.downcast::<String>()
|
||||
.unwrap();
|
||||
println!("The temperature in Paris is: {}.", temperature);
|
||||
// => "The temperature in Paris is: 20°C."
|
||||
}
|
||||
```
|
||||
|
||||
### Chat with Function Calling (async)
|
||||
|
||||
```rs
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use std::any::Any;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GetCityTemperatureArguments {
|
||||
city: String,
|
||||
}
|
||||
|
||||
struct GetCityTemperatureFunction;
|
||||
#[async_trait::async_trait]
|
||||
impl Function for GetCityTemperatureFunction {
|
||||
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
|
||||
// Deserialize arguments, perform the logic, and return the result
|
||||
let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();
|
||||
|
||||
let temperature = match city.as_str() {
|
||||
"Paris" => "20°C",
|
||||
_ => "Unknown city",
|
||||
};
|
||||
|
||||
Box::new(temperature.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let tools = vec![Tool::new(
|
||||
"get_city_temperature".to_string(),
|
||||
"Get the current temperature in a city.".to_string(),
|
||||
vec![ToolFunctionParameter::new(
|
||||
"city".to_string(),
|
||||
"The name of the city.".to_string(),
|
||||
ToolFunctionParameterType::String,
|
||||
)],
|
||||
)];
|
||||
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let mut client = Client::new(None, None, None, None).unwrap();
|
||||
client.register_function(
|
||||
"get_city_temperature".to_string(),
|
||||
Box::new(GetCityTemperatureFunction),
|
||||
);
|
||||
|
||||
let model = Model::MistralSmallLatest;
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "What's the temperature in Paris?".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
tool_choice: Some(ToolChoice::Auto),
|
||||
tools: Some(tools),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
client
|
||||
.chat_async(model, messages, Some(options))
|
||||
.await
|
||||
.unwrap();
|
||||
let temperature = client
|
||||
.get_last_function_call_result()
|
||||
.unwrap()
|
||||
.downcast::<String>()
|
||||
.unwrap();
|
||||
println!("The temperature in Paris is: {}.", temperature);
|
||||
// => "The temperature in Paris is: 20°C."
|
||||
}
|
||||
```
|
||||
|
||||
### Embeddings
|
||||
|
||||
@@ -139,18 +368,18 @@ use mistralai_client::v1::{client::Client, constants::EmbedModel};
|
||||
|
||||
fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client: Client = Client::new(None, None, None, None).unwrap();
|
||||
let client: Client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = EmbedModel::MistralEmbed;
|
||||
let input = vec!["Embed this sentence.", "As well as this one."]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let options = None;
|
||||
let model = EmbedModel::MistralEmbed;
|
||||
let input = vec!["Embed this sentence.", "As well as this one."]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let options = None;
|
||||
|
||||
let response = client.embeddings(model, input, options).unwrap();
|
||||
println!("Embeddings: {:?}", response.data);
|
||||
// => "Embeddings: [{...}, {...}]"
|
||||
let response = client.embeddings(model, input, options).unwrap();
|
||||
println!("First Embedding: {:?}", response.data[0]);
|
||||
// => "First Embedding: {...}"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -162,18 +391,21 @@ use mistralai_client::v1::{client::Client, constants::EmbedModel};
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client: Client = Client::new(None, None, None, None).unwrap();
|
||||
let client: Client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = EmbedModel::MistralEmbed;
|
||||
let input = vec!["Embed this sentence.", "As well as this one."]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let options = None;
|
||||
let model = EmbedModel::MistralEmbed;
|
||||
let input = vec!["Embed this sentence.", "As well as this one."]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let options = None;
|
||||
|
||||
let response = client.embeddings_async(model, input, options).await.unwrap();
|
||||
println!("Embeddings: {:?}", response.data);
|
||||
// => "Embeddings: [{...}, {...}]"
|
||||
let response = client
|
||||
.embeddings_async(model, input, options)
|
||||
.await
|
||||
.unwrap();
|
||||
println!("First Embedding: {:?}", response.data[0]);
|
||||
// => "First Embedding: {...}"
|
||||
}
|
||||
```
|
||||
|
||||
@@ -200,10 +432,14 @@ use mistralai_client::v1::client::Client;
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client = Client::new(None, None, None, None).await.unwrap();
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let result = client.list_models_async().unwrap();
|
||||
let result = client.list_models_async().await.unwrap();
|
||||
println!("First Model ID: {:?}", result.data[0].id);
|
||||
// => "First Model ID: open-mistral-7b"
|
||||
}
|
||||
```
|
||||
|
||||
## Contributing
|
||||
|
||||
Please read [CONTRIBUTING.md](./CONTRIBUTING.md) for details on how to contribute to this library.
|
||||
|
||||
127
README.template.md
Normal file
127
README.template.md
Normal file
@@ -0,0 +1,127 @@
|
||||
# Mistral AI Rust Client
|
||||
|
||||
[](https://crates.io/crates/mistralai-client)
|
||||
[](https://docs.rs/mistralai-client/latest/mistralai-client)
|
||||
[](https://github.com/ivangabriele/mistralai-client-rs/actions?query=branch%3Amain+workflow%3ATest++)
|
||||
[](https://app.codecov.io/github/ivangabriele/mistralai-client-rs)
|
||||
|
||||
Rust client for the Mistral AI API.
|
||||
|
||||
> [!IMPORTANT]
|
||||
> While we are in v0, minor versions may introduce breaking changes.
|
||||
> Please, refer to the [CHANGELOG.md](./CHANGELOG.md) for more information.
|
||||
|
||||
---
|
||||
|
||||
- [Supported APIs](#supported-apis)
|
||||
- [Installation](#installation)
|
||||
- [Mistral API Key](#mistral-api-key)
|
||||
- [As an environment variable](#as-an-environment-variable)
|
||||
- [As a client argument](#as-a-client-argument)
|
||||
- [Usage](#usage)
|
||||
- [Chat](#chat)
|
||||
- [Chat (async)](#chat-async)
|
||||
- [Chat with streaming (async)](#chat-with-streaming-async)
|
||||
- [Chat with Function Calling](#chat-with-function-calling)
|
||||
- [Chat with Function Calling (async)](#chat-with-function-calling-async)
|
||||
- [Embeddings](#embeddings)
|
||||
- [Embeddings (async)](#embeddings-async)
|
||||
- [List models](#list-models)
|
||||
- [List models (async)](#list-models-async)
|
||||
- [Contributing](#contributing)
|
||||
|
||||
---
|
||||
|
||||
## Supported APIs
|
||||
|
||||
- [x] Chat without streaming
|
||||
- [x] Chat without streaming (async)
|
||||
- [x] Chat with streaming
|
||||
- [x] Embedding
|
||||
- [x] Embedding (async)
|
||||
- [x] List models
|
||||
- [x] List models (async)
|
||||
- [x] Function Calling
|
||||
- [x] Function Calling (async)
|
||||
|
||||
## Installation
|
||||
|
||||
You can install the library in your project using:
|
||||
|
||||
```sh
|
||||
cargo add mistralai-client
|
||||
```
|
||||
|
||||
### Mistral API Key
|
||||
|
||||
You can get your Mistral API Key there: <https://docs.mistral.ai/#api-access>.
|
||||
|
||||
#### As an environment variable
|
||||
|
||||
Just set the `MISTRAL_API_KEY` environment variable.
|
||||
|
||||
```rs
|
||||
use mistralai_client::v1::client::Client;
|
||||
|
||||
fn main() {
|
||||
let client = Client::new(None, None, None, None);
|
||||
}
|
||||
```
|
||||
|
||||
```sh
|
||||
MISTRAL_API_KEY=your_api_key cargo run
|
||||
```
|
||||
|
||||
#### As a client argument
|
||||
|
||||
```rs
|
||||
use mistralai_client::v1::client::Client;
|
||||
|
||||
fn main() {
|
||||
let api_key = "your_api_key";
|
||||
|
||||
let client = Client::new(Some(api_key), None, None, None).unwrap();
|
||||
}
|
||||
```
|
||||
|
||||
## Usage
|
||||
|
||||
### Chat
|
||||
|
||||
<CODE>examples/chat.rs</CODE>
|
||||
|
||||
### Chat (async)
|
||||
|
||||
<CODE>examples/chat_async.rs</CODE>
|
||||
|
||||
### Chat with streaming (async)
|
||||
|
||||
<CODE>examples/chat_with_streaming.rs</CODE>
|
||||
|
||||
### Chat with Function Calling
|
||||
|
||||
<CODE>examples/chat_with_function_calling.rs</CODE>
|
||||
|
||||
### Chat with Function Calling (async)
|
||||
|
||||
<CODE>examples/chat_with_function_calling_async.rs</CODE>
|
||||
|
||||
### Embeddings
|
||||
|
||||
<CODE>examples/embeddings.rs</CODE>
|
||||
|
||||
### Embeddings (async)
|
||||
|
||||
<CODE>examples/embeddings_async.rs</CODE>
|
||||
|
||||
### List models
|
||||
|
||||
<CODE>examples/list_models.rs</CODE>
|
||||
|
||||
### List models (async)
|
||||
|
||||
<CODE>examples/list_models_async.rs</CODE>
|
||||
|
||||
## Contributing
|
||||
|
||||
Please read [CONTRIBUTING.md](./CONTRIBUTING.md) for details on how to contribute to this library.
|
||||
26
examples/chat.rs
Normal file
26
examples/chat.rs
Normal file
@@ -0,0 +1,26 @@
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
};
|
||||
|
||||
fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::OpenMistral7b;
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result = client.chat(model, messages, Some(options)).unwrap();
|
||||
println!("Assistant: {}", result.choices[0].message.content);
|
||||
// => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
|
||||
}
|
||||
33
examples/chat_async.rs
Normal file
33
examples/chat_async.rs
Normal file
@@ -0,0 +1,33 @@
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::OpenMistral7b;
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let result = client
|
||||
.chat_async(model, messages, Some(options))
|
||||
.await
|
||||
.unwrap();
|
||||
println!(
|
||||
"{:?}: {}",
|
||||
result.choices[0].message.role, result.choices[0].message.content
|
||||
);
|
||||
// => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
|
||||
}
|
||||
71
examples/chat_with_function_calling.rs
Normal file
71
examples/chat_with_function_calling.rs
Normal file
@@ -0,0 +1,71 @@
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use std::any::Any;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GetCityTemperatureArguments {
|
||||
city: String,
|
||||
}
|
||||
|
||||
struct GetCityTemperatureFunction;
|
||||
#[async_trait::async_trait]
|
||||
impl Function for GetCityTemperatureFunction {
|
||||
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
|
||||
// Deserialize arguments, perform the logic, and return the result
|
||||
let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();
|
||||
|
||||
let temperature = match city.as_str() {
|
||||
"Paris" => "20°C",
|
||||
_ => "Unknown city",
|
||||
};
|
||||
|
||||
Box::new(temperature.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
fn main() {
|
||||
let tools = vec![Tool::new(
|
||||
"get_city_temperature".to_string(),
|
||||
"Get the current temperature in a city.".to_string(),
|
||||
vec![ToolFunctionParameter::new(
|
||||
"city".to_string(),
|
||||
"The name of the city.".to_string(),
|
||||
ToolFunctionParameterType::String,
|
||||
)],
|
||||
)];
|
||||
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let mut client = Client::new(None, None, None, None).unwrap();
|
||||
client.register_function(
|
||||
"get_city_temperature".to_string(),
|
||||
Box::new(GetCityTemperatureFunction),
|
||||
);
|
||||
|
||||
let model = Model::MistralSmallLatest;
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "What's the temperature in Paris?".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
tool_choice: Some(ToolChoice::Auto),
|
||||
tools: Some(tools),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
client.chat(model, messages, Some(options)).unwrap();
|
||||
let temperature = client
|
||||
.get_last_function_call_result()
|
||||
.unwrap()
|
||||
.downcast::<String>()
|
||||
.unwrap();
|
||||
println!("The temperature in Paris is: {}.", temperature);
|
||||
// => "The temperature in Paris is: 20°C."
|
||||
}
|
||||
75
examples/chat_with_function_calling_async.rs
Normal file
75
examples/chat_with_function_calling_async.rs
Normal file
@@ -0,0 +1,75 @@
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
|
||||
};
|
||||
use serde::Deserialize;
|
||||
use std::any::Any;
|
||||
|
||||
#[derive(Debug, Deserialize)]
|
||||
struct GetCityTemperatureArguments {
|
||||
city: String,
|
||||
}
|
||||
|
||||
struct GetCityTemperatureFunction;
|
||||
#[async_trait::async_trait]
|
||||
impl Function for GetCityTemperatureFunction {
|
||||
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
|
||||
// Deserialize arguments, perform the logic, and return the result
|
||||
let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();
|
||||
|
||||
let temperature = match city.as_str() {
|
||||
"Paris" => "20°C",
|
||||
_ => "Unknown city",
|
||||
};
|
||||
|
||||
Box::new(temperature.to_string())
|
||||
}
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
let tools = vec![Tool::new(
|
||||
"get_city_temperature".to_string(),
|
||||
"Get the current temperature in a city.".to_string(),
|
||||
vec![ToolFunctionParameter::new(
|
||||
"city".to_string(),
|
||||
"The name of the city.".to_string(),
|
||||
ToolFunctionParameterType::String,
|
||||
)],
|
||||
)];
|
||||
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let mut client = Client::new(None, None, None, None).unwrap();
|
||||
client.register_function(
|
||||
"get_city_temperature".to_string(),
|
||||
Box::new(GetCityTemperatureFunction),
|
||||
);
|
||||
|
||||
let model = Model::MistralSmallLatest;
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "What's the temperature in Paris?".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
tool_choice: Some(ToolChoice::Auto),
|
||||
tools: Some(tools),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
client
|
||||
.chat_async(model, messages, Some(options))
|
||||
.await
|
||||
.unwrap();
|
||||
let temperature = client
|
||||
.get_last_function_call_result()
|
||||
.unwrap()
|
||||
.downcast::<String>()
|
||||
.unwrap();
|
||||
println!("The temperature in Paris is: {}.", temperature);
|
||||
// => "The temperature in Paris is: 20°C."
|
||||
}
|
||||
45
examples/chat_with_streaming.rs
Normal file
45
examples/chat_with_streaming.rs
Normal file
@@ -0,0 +1,45 @@
|
||||
use futures::stream::StreamExt;
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
};
|
||||
use std::io::{self, Write};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::OpenMistral7b;
|
||||
let messages = vec![ChatMessage {
|
||||
role: ChatMessageRole::User,
|
||||
content: "Tell me a short happy story.".to_string(),
|
||||
tool_calls: None,
|
||||
}];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let stream_result = client
|
||||
.chat_stream(model, messages, Some(options))
|
||||
.await
|
||||
.unwrap();
|
||||
stream_result
|
||||
.for_each(|chunk_result| async {
|
||||
match chunk_result {
|
||||
Ok(chunks) => chunks.iter().for_each(|chunk| {
|
||||
print!("{}", chunk.choices[0].delta.content);
|
||||
io::stdout().flush().unwrap();
|
||||
// => "Once upon a time, [...]"
|
||||
}),
|
||||
Err(error) => {
|
||||
eprintln!("Error processing chunk: {:?}", error)
|
||||
}
|
||||
}
|
||||
})
|
||||
.await;
|
||||
print!("\n") // To persist the last chunk output.
|
||||
}
|
||||
17
examples/embeddings.rs
Normal file
17
examples/embeddings.rs
Normal file
@@ -0,0 +1,17 @@
|
||||
use mistralai_client::v1::{client::Client, constants::EmbedModel};
|
||||
|
||||
fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client: Client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = EmbedModel::MistralEmbed;
|
||||
let input = vec!["Embed this sentence.", "As well as this one."]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let options = None;
|
||||
|
||||
let response = client.embeddings(model, input, options).unwrap();
|
||||
println!("First Embedding: {:?}", response.data[0]);
|
||||
// => "First Embedding: {...}"
|
||||
}
|
||||
21
examples/embeddings_async.rs
Normal file
21
examples/embeddings_async.rs
Normal file
@@ -0,0 +1,21 @@
|
||||
use mistralai_client::v1::{client::Client, constants::EmbedModel};
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client: Client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = EmbedModel::MistralEmbed;
|
||||
let input = vec!["Embed this sentence.", "As well as this one."]
|
||||
.iter()
|
||||
.map(|s| s.to_string())
|
||||
.collect();
|
||||
let options = None;
|
||||
|
||||
let response = client
|
||||
.embeddings_async(model, input, options)
|
||||
.await
|
||||
.unwrap();
|
||||
println!("First Embedding: {:?}", response.data[0]);
|
||||
// => "First Embedding: {...}"
|
||||
}
|
||||
10
examples/list_models.rs
Normal file
10
examples/list_models.rs
Normal file
@@ -0,0 +1,10 @@
|
||||
use mistralai_client::v1::client::Client;
|
||||
|
||||
fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let result = client.list_models().unwrap();
|
||||
println!("First Model ID: {:?}", result.data[0].id);
|
||||
// => "First Model ID: open-mistral-7b"
|
||||
}
|
||||
11
examples/list_models_async.rs
Normal file
11
examples/list_models_async.rs
Normal file
@@ -0,0 +1,11 @@
|
||||
use mistralai_client::v1::client::Client;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() {
|
||||
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let result = client.list_models_async().await.unwrap();
|
||||
println!("First Model ID: {:?}", result.data[0].id);
|
||||
// => "First Model ID: open-mistral-7b"
|
||||
}
|
||||
@@ -1 +1,4 @@
|
||||
//! This crate provides a easy bindings and types for MistralAI's API.
|
||||
|
||||
/// The v1 module contains the types and methods for the v1 API endpoints.
|
||||
pub mod v1;
|
||||
|
||||
216
src/v1/chat.rs
Normal file
216
src/v1/chat.rs
Normal file
@@ -0,0 +1,216 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::v1::{common, constants, tool};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Definitions
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatMessage {
|
||||
pub role: ChatMessageRole,
|
||||
pub content: String,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tool_calls: Option<Vec<tool::ToolCall>>,
|
||||
}
|
||||
impl ChatMessage {
|
||||
pub fn new_assistant_message(content: &str, tool_calls: Option<Vec<tool::ToolCall>>) -> Self {
|
||||
Self {
|
||||
role: ChatMessageRole::Assistant,
|
||||
content: content.to_string(),
|
||||
tool_calls,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_user_message(content: &str) -> Self {
|
||||
Self {
|
||||
role: ChatMessageRole::User,
|
||||
content: content.to_string(),
|
||||
tool_calls: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// See the [Mistral AI API documentation](https://docs.mistral.ai/capabilities/completion/#chat-messages) for more information.
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub enum ChatMessageRole {
|
||||
#[serde(rename = "system")]
|
||||
System,
|
||||
#[serde(rename = "assistant")]
|
||||
Assistant,
|
||||
#[serde(rename = "user")]
|
||||
User,
|
||||
#[serde(rename = "tool")]
|
||||
Tool,
|
||||
}
|
||||
|
||||
/// The format that the model must output.
|
||||
///
|
||||
/// See the [API documentation](https://docs.mistral.ai/api/#operation/createChatCompletion) for more information.
|
||||
#[derive(Clone, Debug, Serialize, Deserialize)]
|
||||
pub struct ResponseFormat {
|
||||
#[serde(rename = "type")]
|
||||
pub type_: String,
|
||||
}
|
||||
impl ResponseFormat {
|
||||
pub fn json_object() -> Self {
|
||||
Self {
|
||||
type_: "json_object".to_string(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Request
|
||||
|
||||
/// The parameters for the chat request.
|
||||
///
|
||||
/// See the [API documentation](https://docs.mistral.ai/api/#operation/createChatCompletion) for more information.
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct ChatParams {
|
||||
/// The maximum number of tokens to generate in the completion.
|
||||
///
|
||||
/// Defaults to `None`.
|
||||
pub max_tokens: Option<u32>,
|
||||
/// The seed to use for random sampling. If set, different calls will generate deterministic results.
|
||||
///
|
||||
/// Defaults to `None`.
|
||||
pub random_seed: Option<u32>,
|
||||
/// The format that the model must output.
|
||||
///
|
||||
/// Defaults to `None`.
|
||||
pub response_format: Option<ResponseFormat>,
|
||||
/// Whether to inject a safety prompt before all conversations.
|
||||
///
|
||||
/// Defaults to `false`.
|
||||
pub safe_prompt: bool,
|
||||
/// What sampling temperature to use, between `Some(0.0)` and `Some(1.0)`.
|
||||
///
|
||||
/// Defaults to `0.7`.
|
||||
pub temperature: f32,
|
||||
/// Specifies if/how functions are called.
|
||||
///
|
||||
/// Defaults to `None`.
|
||||
pub tool_choice: Option<tool::ToolChoice>,
|
||||
/// A list of available tools for the model.
|
||||
///
|
||||
/// Defaults to `None`.
|
||||
pub tools: Option<Vec<tool::Tool>>,
|
||||
/// Nucleus sampling, where the model considers the results of the tokens with `top_p` probability mass.
|
||||
///
|
||||
/// Defaults to `1.0`.
|
||||
pub top_p: f32,
|
||||
}
|
||||
impl Default for ChatParams {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
max_tokens: None,
|
||||
random_seed: None,
|
||||
safe_prompt: false,
|
||||
response_format: None,
|
||||
temperature: 0.7,
|
||||
tool_choice: None,
|
||||
tools: None,
|
||||
top_p: 1.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
impl ChatParams {
|
||||
pub fn json_default() -> Self {
|
||||
Self {
|
||||
max_tokens: None,
|
||||
random_seed: None,
|
||||
safe_prompt: false,
|
||||
response_format: None,
|
||||
temperature: 0.7,
|
||||
tool_choice: None,
|
||||
tools: None,
|
||||
top_p: 1.0,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ChatRequest {
|
||||
pub messages: Vec<ChatMessage>,
|
||||
pub model: constants::Model,
|
||||
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_tokens: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub random_seed: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub response_format: Option<ResponseFormat>,
|
||||
pub safe_prompt: bool,
|
||||
pub stream: bool,
|
||||
pub temperature: f32,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tool_choice: Option<tool::ToolChoice>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tools: Option<Vec<tool::Tool>>,
|
||||
pub top_p: f32,
|
||||
}
|
||||
impl ChatRequest {
|
||||
pub fn new(
|
||||
model: constants::Model,
|
||||
messages: Vec<ChatMessage>,
|
||||
stream: bool,
|
||||
options: Option<ChatParams>,
|
||||
) -> Self {
|
||||
let ChatParams {
|
||||
max_tokens,
|
||||
random_seed,
|
||||
safe_prompt,
|
||||
temperature,
|
||||
tool_choice,
|
||||
tools,
|
||||
top_p,
|
||||
response_format,
|
||||
} = options.unwrap_or_default();
|
||||
|
||||
Self {
|
||||
messages,
|
||||
model,
|
||||
|
||||
max_tokens,
|
||||
random_seed,
|
||||
safe_prompt,
|
||||
stream,
|
||||
temperature,
|
||||
tool_choice,
|
||||
tools,
|
||||
top_p,
|
||||
response_format,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Response
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatResponse {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
/// Unix timestamp (in seconds).
|
||||
pub created: u32,
|
||||
pub model: constants::Model,
|
||||
pub choices: Vec<ChatResponseChoice>,
|
||||
pub usage: common::ResponseUsage,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatResponseChoice {
|
||||
pub index: u32,
|
||||
pub message: ChatMessage,
|
||||
pub finish_reason: ChatResponseChoiceFinishReason,
|
||||
// TODO Check this prop (seen in API responses but undocumented).
|
||||
// pub logprobs: ???
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub enum ChatResponseChoiceFinishReason {
|
||||
#[serde(rename = "stop")]
|
||||
Stop,
|
||||
#[serde(rename = "tool_calls")]
|
||||
ToolCalls,
|
||||
}
|
||||
@@ -1,113 +0,0 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::v1::{common, constants};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct ChatCompletionParams {
|
||||
pub tools: Option<String>,
|
||||
pub temperature: Option<f32>,
|
||||
pub max_tokens: Option<u32>,
|
||||
pub top_p: Option<f32>,
|
||||
pub random_seed: Option<u32>,
|
||||
pub stream: Option<bool>,
|
||||
pub safe_prompt: Option<bool>,
|
||||
}
|
||||
impl Default for ChatCompletionParams {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
tools: None,
|
||||
temperature: None,
|
||||
max_tokens: None,
|
||||
top_p: None,
|
||||
random_seed: None,
|
||||
stream: None,
|
||||
safe_prompt: None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize)]
|
||||
pub struct ChatCompletionRequest {
|
||||
pub messages: Vec<ChatCompletionMessage>,
|
||||
pub model: constants::Model,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub tools: Option<String>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub temperature: Option<f32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub max_tokens: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub top_p: Option<f32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub random_seed: Option<u32>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub stream: Option<bool>,
|
||||
#[serde(skip_serializing_if = "Option::is_none")]
|
||||
pub safe_prompt: Option<bool>,
|
||||
// TODO Check this prop (seen in official Python client but not in API doc).
|
||||
// pub tool_choice: Option<String>,
|
||||
// TODO Check this prop (seen in official Python client but not in API doc).
|
||||
// pub response_format: Option<String>,
|
||||
}
|
||||
impl ChatCompletionRequest {
|
||||
pub fn new(
|
||||
model: constants::Model,
|
||||
messages: Vec<ChatCompletionMessage>,
|
||||
options: Option<ChatCompletionParams>,
|
||||
) -> Self {
|
||||
let ChatCompletionParams {
|
||||
tools,
|
||||
temperature,
|
||||
max_tokens,
|
||||
top_p,
|
||||
random_seed,
|
||||
stream,
|
||||
safe_prompt,
|
||||
} = options.unwrap_or_default();
|
||||
|
||||
Self {
|
||||
messages,
|
||||
model,
|
||||
tools,
|
||||
temperature,
|
||||
max_tokens,
|
||||
top_p,
|
||||
random_seed,
|
||||
stream,
|
||||
safe_prompt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatCompletionResponse {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
/// Unix timestamp (in seconds).
|
||||
pub created: u32,
|
||||
pub model: constants::Model,
|
||||
pub choices: Vec<ChatCompletionChoice>,
|
||||
pub usage: common::ResponseUsage,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatCompletionChoice {
|
||||
pub index: u32,
|
||||
pub message: ChatCompletionMessage,
|
||||
pub finish_reason: String,
|
||||
// TODO Check this prop (seen in API responses but undocumented).
|
||||
// pub logprobs: ???
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatCompletionMessage {
|
||||
pub role: ChatCompletionMessageRole,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
#[allow(non_camel_case_types)]
|
||||
pub enum ChatCompletionMessageRole {
|
||||
assistant,
|
||||
user,
|
||||
}
|
||||
57
src/v1/chat_stream.rs
Normal file
57
src/v1/chat_stream.rs
Normal file
@@ -0,0 +1,57 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use serde_json::from_str;
|
||||
|
||||
use crate::v1::{chat, common, constants, error};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Response
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatStreamChunk {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
/// Unix timestamp (in seconds).
|
||||
pub created: u32,
|
||||
pub model: constants::Model,
|
||||
pub choices: Vec<ChatStreamChunkChoice>,
|
||||
pub usage: Option<common::ResponseUsage>,
|
||||
// TODO Check this prop (seen in API responses but undocumented).
|
||||
// pub logprobs: ???,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatStreamChunkChoice {
|
||||
pub index: u32,
|
||||
pub delta: ChatStreamChunkChoiceDelta,
|
||||
pub finish_reason: Option<String>,
|
||||
// TODO Check this prop (seen in API responses but undocumented).
|
||||
// pub logprobs: ???,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ChatStreamChunkChoiceDelta {
|
||||
pub role: Option<chat::ChatMessageRole>,
|
||||
pub content: String,
|
||||
}
|
||||
|
||||
/// Extracts serialized chunks from a stream message.
|
||||
pub fn get_chunk_from_stream_message_line(
|
||||
line: &str,
|
||||
) -> Result<Option<Vec<ChatStreamChunk>>, error::ApiError> {
|
||||
if line.trim() == "data: [DONE]" {
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
let chunk_as_json = line.trim_start_matches("data: ").trim();
|
||||
if chunk_as_json.is_empty() {
|
||||
return Ok(Some(vec![]));
|
||||
}
|
||||
|
||||
// Attempt to deserialize the JSON string into ChatStreamChunk
|
||||
match from_str::<ChatStreamChunk>(chunk_as_json) {
|
||||
Ok(chunk) => Ok(Some(vec![chunk])),
|
||||
Err(e) => Err(error::ApiError {
|
||||
message: e.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
539
src/v1/client.rs
539
src/v1/client.rs
@@ -1,128 +1,377 @@
|
||||
use crate::v1::error::ApiError;
|
||||
use futures::stream::StreamExt;
|
||||
use futures::Stream;
|
||||
use log::debug;
|
||||
use reqwest::Error as ReqwestError;
|
||||
|
||||
use crate::v1::{
|
||||
chat_completion::{
|
||||
ChatCompletionMessage, ChatCompletionParams, ChatCompletionRequest, ChatCompletionResponse,
|
||||
},
|
||||
constants::{EmbedModel, Model, API_URL_BASE},
|
||||
embedding::{EmbeddingRequest, EmbeddingRequestOptions, EmbeddingResponse},
|
||||
error::ClientError,
|
||||
model_list::ModelListResponse,
|
||||
use std::{
|
||||
any::Any,
|
||||
collections::HashMap,
|
||||
sync::{Arc, Mutex},
|
||||
};
|
||||
|
||||
use crate::v1::{chat, chat_stream, constants, embedding, error, model_list, tool, utils};
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Client {
|
||||
pub api_key: String,
|
||||
pub endpoint: String,
|
||||
pub max_retries: u32,
|
||||
pub timeout: u32,
|
||||
|
||||
functions: Arc<Mutex<HashMap<String, Box<dyn tool::Function>>>>,
|
||||
last_function_call_result: Arc<Mutex<Option<Box<dyn Any + Send>>>>,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
/// Constructs a new `Client`.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `api_key` - An optional API key.
|
||||
/// If not provided, the method will try to use the `MISTRAL_API_KEY` environment variable.
|
||||
/// * `endpoint` - An optional custom API endpoint. Defaults to the official API endpoint if not provided.
|
||||
/// * `max_retries` - Optional maximum number of retries for failed requests. Defaults to `5`.
|
||||
/// * `timeout` - Optional timeout in seconds for requests. Defaults to `120`.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use mistralai_client::v1::client::Client;
|
||||
///
|
||||
/// let client = Client::new(Some("your_api_key_here".to_string()), None, Some(3), Some(60));
|
||||
/// assert!(client.is_ok());
|
||||
/// ```
|
||||
///
|
||||
/// # Errors
|
||||
///
|
||||
/// This method fails whenever neither the `api_key` is provided
|
||||
/// nor the `MISTRAL_API_KEY` environment variable is set.
|
||||
pub fn new(
|
||||
api_key: Option<String>,
|
||||
endpoint: Option<String>,
|
||||
max_retries: Option<u32>,
|
||||
timeout: Option<u32>,
|
||||
) -> Result<Self, ClientError> {
|
||||
let api_key = api_key.unwrap_or(match std::env::var("MISTRAL_API_KEY") {
|
||||
Ok(api_key_from_env) => api_key_from_env,
|
||||
Err(_) => return Err(ClientError::ApiKeyError),
|
||||
});
|
||||
let endpoint = endpoint.unwrap_or(API_URL_BASE.to_string());
|
||||
) -> Result<Self, error::ClientError> {
|
||||
let api_key = match api_key {
|
||||
Some(api_key_from_param) => api_key_from_param,
|
||||
None => {
|
||||
std::env::var("MISTRAL_API_KEY").map_err(|_| error::ClientError::MissingApiKey)?
|
||||
}
|
||||
};
|
||||
let endpoint = endpoint.unwrap_or(constants::API_URL_BASE.to_string());
|
||||
let max_retries = max_retries.unwrap_or(5);
|
||||
let timeout = timeout.unwrap_or(120);
|
||||
|
||||
let functions: Arc<_> = Arc::new(Mutex::new(HashMap::new()));
|
||||
let last_function_call_result = Arc::new(Mutex::new(None));
|
||||
|
||||
Ok(Self {
|
||||
api_key,
|
||||
endpoint,
|
||||
max_retries,
|
||||
timeout,
|
||||
|
||||
functions,
|
||||
last_function_call_result,
|
||||
})
|
||||
}
|
||||
|
||||
/// Synchronously sends a chat completion request and returns the response.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `model` - The [Model] to use for the chat completion.
|
||||
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
|
||||
/// * `options` - Optional [ChatParams] to customize the request.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns a [Result] containing the `ChatResponse` if the request is successful,
|
||||
/// or an [ApiError] if there is an error.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use mistralai_client::v1::{
|
||||
/// chat::{ChatMessage, ChatMessageRole},
|
||||
/// client::Client,
|
||||
/// constants::Model,
|
||||
/// };
|
||||
///
|
||||
/// let client = Client::new(None, None, None, None).unwrap();
|
||||
/// let messages = vec![ChatMessage {
|
||||
/// role: ChatMessageRole::User,
|
||||
/// content: "Hello, world!".to_string(),
|
||||
/// tool_calls: None,
|
||||
/// }];
|
||||
/// let response = client.chat(Model::OpenMistral7b, messages, None).unwrap();
|
||||
/// println!("{:?}: {}", response.choices[0].message.role, response.choices[0].message.content);
|
||||
/// ```
|
||||
pub fn chat(
|
||||
&self,
|
||||
model: Model,
|
||||
messages: Vec<ChatCompletionMessage>,
|
||||
options: Option<ChatCompletionParams>,
|
||||
) -> Result<ChatCompletionResponse, ApiError> {
|
||||
let request = ChatCompletionRequest::new(model, messages, options);
|
||||
model: constants::Model,
|
||||
messages: Vec<chat::ChatMessage>,
|
||||
options: Option<chat::ChatParams>,
|
||||
) -> Result<chat::ChatResponse, error::ApiError> {
|
||||
let request = chat::ChatRequest::new(model, messages, false, options);
|
||||
|
||||
let response = self.post_sync("/chat/completions", &request)?;
|
||||
let result = response.json::<ChatCompletionResponse>();
|
||||
let result = response.json::<chat::ChatResponse>();
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Ok(data) => {
|
||||
utils::debug_pretty_json_from_struct("Response Data", &data);
|
||||
|
||||
self.call_function_if_any(data.clone());
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
Err(error) => Err(self.to_api_error(error)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Asynchronously sends a chat completion request and returns the response.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `model` - The [Model] to use for the chat completion.
|
||||
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
|
||||
/// * `options` - Optional [ChatParams] to customize the request.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns a [Result] containing a `Stream` of `ChatStreamChunk` if the request is successful,
|
||||
/// or an [ApiError] if there is an error.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use mistralai_client::v1::{
|
||||
/// chat::{ChatMessage, ChatMessageRole},
|
||||
/// client::Client,
|
||||
/// constants::Model,
|
||||
/// };
|
||||
///
|
||||
/// #[tokio::main]
|
||||
/// async fn main() {
|
||||
/// let client = Client::new(None, None, None, None).unwrap();
|
||||
/// let messages = vec![ChatMessage {
|
||||
/// role: ChatMessageRole::User,
|
||||
/// content: "Hello, world!".to_string(),
|
||||
/// tool_calls: None,
|
||||
/// }];
|
||||
/// let response = client.chat_async(Model::OpenMistral7b, messages, None).await.unwrap();
|
||||
/// println!("{:?}: {}", response.choices[0].message.role, response.choices[0].message.content);
|
||||
/// }
|
||||
/// ```
|
||||
pub async fn chat_async(
|
||||
&self,
|
||||
model: Model,
|
||||
messages: Vec<ChatCompletionMessage>,
|
||||
options: Option<ChatCompletionParams>,
|
||||
) -> Result<ChatCompletionResponse, ApiError> {
|
||||
let request = ChatCompletionRequest::new(model, messages, options);
|
||||
model: constants::Model,
|
||||
messages: Vec<chat::ChatMessage>,
|
||||
options: Option<chat::ChatParams>,
|
||||
) -> Result<chat::ChatResponse, error::ApiError> {
|
||||
let request = chat::ChatRequest::new(model, messages, false, options);
|
||||
|
||||
let response = self.post_async("/chat/completions", &request).await?;
|
||||
let result = response.json::<ChatCompletionResponse>().await;
|
||||
let result = response.json::<chat::ChatResponse>().await;
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Ok(data) => {
|
||||
utils::debug_pretty_json_from_struct("Response Data", &data);
|
||||
|
||||
self.call_function_if_any_async(data.clone()).await;
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
Err(error) => Err(self.to_api_error(error)),
|
||||
}
|
||||
}
|
||||
|
||||
/// Asynchronously sends a chat completion request and returns a stream of message chunks.
|
||||
///
|
||||
/// # Arguments
|
||||
///
|
||||
/// * `model` - The [Model] to use for the chat completion.
|
||||
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
|
||||
/// * `options` - Optional [ChatParams] to customize the request.
|
||||
///
|
||||
/// # Returns
|
||||
///
|
||||
/// Returns a [Result] containing a `Stream` of `ChatStreamChunk` if the request is successful,
|
||||
/// or an [ApiError] if there is an error.
|
||||
///
|
||||
/// # Examples
|
||||
///
|
||||
/// ```
|
||||
/// use futures::stream::StreamExt;
|
||||
/// use mistralai_client::v1::{
|
||||
/// chat::{ChatMessage, ChatMessageRole},
|
||||
/// client::Client,
|
||||
/// constants::Model,
|
||||
/// };
|
||||
/// use std::io::{self, Write};
|
||||
///
|
||||
/// #[tokio::main]
|
||||
/// async fn main() {
|
||||
/// let client = Client::new(None, None, None, None).unwrap();
|
||||
/// let messages = vec![ChatMessage {
|
||||
/// role: ChatMessageRole::User,
|
||||
/// content: "Hello, world!".to_string(),
|
||||
/// tool_calls: None,
|
||||
/// }];
|
||||
///
|
||||
/// let stream_result = client
|
||||
/// .chat_stream(Model::OpenMistral7b,messages, None)
|
||||
/// .await
|
||||
/// .unwrap();
|
||||
/// stream_result
|
||||
/// .for_each(|chunk_result| async {
|
||||
/// match chunk_result {
|
||||
/// Ok(chunks) => chunks.iter().for_each(|chunk| {
|
||||
/// print!("{}", chunk.choices[0].delta.content);
|
||||
/// io::stdout().flush().unwrap();
|
||||
/// // => "Once upon a time, [...]"
|
||||
/// }),
|
||||
/// Err(error) => {
|
||||
/// eprintln!("Error processing chunk: {:?}", error)
|
||||
/// }
|
||||
/// }
|
||||
/// })
|
||||
/// .await;
|
||||
/// print!("\n") // To persist the last chunk output.
|
||||
/// }
|
||||
pub async fn chat_stream(
|
||||
&self,
|
||||
model: constants::Model,
|
||||
messages: Vec<chat::ChatMessage>,
|
||||
options: Option<chat::ChatParams>,
|
||||
) -> Result<
|
||||
impl Stream<Item = Result<Vec<chat_stream::ChatStreamChunk>, error::ApiError>>,
|
||||
error::ApiError,
|
||||
> {
|
||||
let request = chat::ChatRequest::new(model, messages, true, options);
|
||||
let response = self
|
||||
.post_stream("/chat/completions", &request)
|
||||
.await
|
||||
.map_err(|e| error::ApiError {
|
||||
message: e.to_string(),
|
||||
})?;
|
||||
if !response.status().is_success() {
|
||||
let status = response.status();
|
||||
let text = response.text().await.unwrap_or_default();
|
||||
return Err(error::ApiError {
|
||||
message: format!("{}: {}", status, text),
|
||||
});
|
||||
}
|
||||
|
||||
let deserialized_stream = response.bytes_stream().then(|bytes_result| async move {
|
||||
match bytes_result {
|
||||
Ok(bytes) => match String::from_utf8(bytes.to_vec()) {
|
||||
Ok(message) => {
|
||||
let chunks = message
|
||||
.lines()
|
||||
.filter_map(
|
||||
|line| match chat_stream::get_chunk_from_stream_message_line(line) {
|
||||
Ok(Some(chunks)) => Some(chunks),
|
||||
Ok(None) => None,
|
||||
Err(_error) => None,
|
||||
},
|
||||
)
|
||||
.flatten()
|
||||
.collect();
|
||||
|
||||
Ok(chunks)
|
||||
}
|
||||
Err(e) => Err(error::ApiError {
|
||||
message: e.to_string(),
|
||||
}),
|
||||
},
|
||||
Err(e) => Err(error::ApiError {
|
||||
message: e.to_string(),
|
||||
}),
|
||||
}
|
||||
});
|
||||
|
||||
Ok(deserialized_stream)
|
||||
}
|
||||
|
||||
pub fn embeddings(
|
||||
&self,
|
||||
model: EmbedModel,
|
||||
model: constants::EmbedModel,
|
||||
input: Vec<String>,
|
||||
options: Option<EmbeddingRequestOptions>,
|
||||
) -> Result<EmbeddingResponse, ApiError> {
|
||||
let request = EmbeddingRequest::new(model, input, options);
|
||||
options: Option<embedding::EmbeddingRequestOptions>,
|
||||
) -> Result<embedding::EmbeddingResponse, error::ApiError> {
|
||||
let request = embedding::EmbeddingRequest::new(model, input, options);
|
||||
|
||||
let response = self.post_sync("/embeddings", &request)?;
|
||||
let result = response.json::<EmbeddingResponse>();
|
||||
let result = response.json::<embedding::EmbeddingResponse>();
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Ok(data) => {
|
||||
utils::debug_pretty_json_from_struct("Response Data", &data);
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
Err(error) => Err(self.to_api_error(error)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn embeddings_async(
|
||||
&self,
|
||||
model: EmbedModel,
|
||||
model: constants::EmbedModel,
|
||||
input: Vec<String>,
|
||||
options: Option<EmbeddingRequestOptions>,
|
||||
) -> Result<EmbeddingResponse, ApiError> {
|
||||
let request = EmbeddingRequest::new(model, input, options);
|
||||
options: Option<embedding::EmbeddingRequestOptions>,
|
||||
) -> Result<embedding::EmbeddingResponse, error::ApiError> {
|
||||
let request = embedding::EmbeddingRequest::new(model, input, options);
|
||||
|
||||
let response = self.post_async("/embeddings", &request).await?;
|
||||
let result = response.json::<EmbeddingResponse>().await;
|
||||
let result = response.json::<embedding::EmbeddingResponse>().await;
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Ok(data) => {
|
||||
utils::debug_pretty_json_from_struct("Response Data", &data);
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
Err(error) => Err(self.to_api_error(error)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn list_models(&self) -> Result<ModelListResponse, ApiError> {
|
||||
pub fn get_last_function_call_result(&self) -> Option<Box<dyn Any + Send>> {
|
||||
let mut result_lock = self.last_function_call_result.lock().unwrap();
|
||||
|
||||
result_lock.take()
|
||||
}
|
||||
|
||||
pub fn list_models(&self) -> Result<model_list::ModelListResponse, error::ApiError> {
|
||||
let response = self.get_sync("/models")?;
|
||||
let result = response.json::<ModelListResponse>();
|
||||
let result = response.json::<model_list::ModelListResponse>();
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Ok(data) => {
|
||||
utils::debug_pretty_json_from_struct("Response Data", &data);
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
Err(error) => Err(self.to_api_error(error)),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn list_models_async(&self) -> Result<ModelListResponse, ApiError> {
|
||||
pub async fn list_models_async(
|
||||
&self,
|
||||
) -> Result<model_list::ModelListResponse, error::ApiError> {
|
||||
let response = self.get_async("/models").await?;
|
||||
let result = response.json::<ModelListResponse>().await;
|
||||
let result = response.json::<model_list::ModelListResponse>().await;
|
||||
match result {
|
||||
Ok(response) => Ok(response),
|
||||
Ok(data) => {
|
||||
utils::debug_pretty_json_from_struct("Response Data", &data);
|
||||
|
||||
Ok(data)
|
||||
}
|
||||
Err(error) => Err(self.to_api_error(error)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn register_function(&mut self, name: String, function: Box<dyn tool::Function>) {
|
||||
let mut functions = self.functions.lock().unwrap();
|
||||
|
||||
functions.insert(name, function);
|
||||
}
|
||||
|
||||
fn build_request_sync(
|
||||
&self,
|
||||
request: reqwest::blocking::RequestBuilder,
|
||||
@@ -135,7 +384,6 @@ impl Client {
|
||||
let request_builder = request
|
||||
.bearer_auth(&self.api_key)
|
||||
.header("Accept", "application/json")
|
||||
.header("Content-Type", "application/json")
|
||||
.header("User-Agent", user_agent);
|
||||
|
||||
request_builder
|
||||
@@ -150,16 +398,90 @@ impl Client {
|
||||
let request_builder = request
|
||||
.bearer_auth(&self.api_key)
|
||||
.header("Accept", "application/json")
|
||||
.header("Content-Type", "application/json")
|
||||
.header("User-Agent", user_agent);
|
||||
|
||||
request_builder
|
||||
}
|
||||
|
||||
fn get_sync(&self, path: &str) -> Result<reqwest::blocking::Response, ApiError> {
|
||||
let client_sync = reqwest::blocking::Client::new();
|
||||
fn build_request_stream(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
|
||||
let user_agent = format!(
|
||||
"ivangabriele/mistralai-client-rs/{}",
|
||||
env!("CARGO_PKG_VERSION")
|
||||
);
|
||||
|
||||
let request_builder = request
|
||||
.bearer_auth(&self.api_key)
|
||||
.header("Accept", "text/event-stream")
|
||||
.header("User-Agent", user_agent);
|
||||
|
||||
request_builder
|
||||
}
|
||||
|
||||
fn call_function_if_any(&self, response: chat::ChatResponse) -> () {
|
||||
let next_result = match response.choices.get(0) {
|
||||
Some(first_choice) => match first_choice.message.tool_calls.to_owned() {
|
||||
Some(tool_calls) => match tool_calls.get(0) {
|
||||
Some(first_tool_call) => {
|
||||
let functions = self.functions.lock().unwrap();
|
||||
match functions.get(&first_tool_call.function.name) {
|
||||
Some(function) => {
|
||||
let runtime = tokio::runtime::Runtime::new().unwrap();
|
||||
let result = runtime.block_on(async {
|
||||
function
|
||||
.execute(first_tool_call.function.arguments.to_owned())
|
||||
.await
|
||||
});
|
||||
|
||||
Some(result)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
None => None,
|
||||
},
|
||||
None => None,
|
||||
};
|
||||
|
||||
let mut last_result_lock = self.last_function_call_result.lock().unwrap();
|
||||
*last_result_lock = next_result;
|
||||
}
|
||||
|
||||
async fn call_function_if_any_async(&self, response: chat::ChatResponse) -> () {
|
||||
let next_result = match response.choices.get(0) {
|
||||
Some(first_choice) => match first_choice.message.tool_calls.to_owned() {
|
||||
Some(tool_calls) => match tool_calls.get(0) {
|
||||
Some(first_tool_call) => {
|
||||
let functions = self.functions.lock().unwrap();
|
||||
match functions.get(&first_tool_call.function.name) {
|
||||
Some(function) => {
|
||||
let result = function
|
||||
.execute(first_tool_call.function.arguments.to_owned())
|
||||
.await;
|
||||
|
||||
Some(result)
|
||||
}
|
||||
None => None,
|
||||
}
|
||||
}
|
||||
None => None,
|
||||
},
|
||||
None => None,
|
||||
},
|
||||
None => None,
|
||||
};
|
||||
|
||||
let mut last_result_lock = self.last_function_call_result.lock().unwrap();
|
||||
*last_result_lock = next_result;
|
||||
}
|
||||
|
||||
fn get_sync(&self, path: &str) -> Result<reqwest::blocking::Response, error::ApiError> {
|
||||
let reqwest_client = reqwest::blocking::Client::new();
|
||||
let url = format!("{}{}", self.endpoint, path);
|
||||
let request = self.build_request_sync(client_sync.get(url));
|
||||
debug!("Request URL: {}", url);
|
||||
|
||||
let request = self.build_request_sync(reqwest_client.get(url));
|
||||
|
||||
let result = request.send();
|
||||
match result {
|
||||
@@ -167,51 +489,62 @@ impl Client {
|
||||
if response.status().is_success() {
|
||||
Ok(response)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let text = response.text().unwrap();
|
||||
Err(ApiError {
|
||||
message: format!("{}: {}", status, text),
|
||||
let response_status = response.status();
|
||||
let response_body = response.text().unwrap_or_default();
|
||||
debug!("Response Status: {}", &response_status);
|
||||
utils::debug_pretty_json_from_string("Response Data", &response_body);
|
||||
|
||||
Err(error::ApiError {
|
||||
message: format!("{}: {}", response_status, response_body),
|
||||
})
|
||||
}
|
||||
}
|
||||
Err(error) => Err(ApiError {
|
||||
Err(error) => Err(error::ApiError {
|
||||
message: error.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
async fn get_async(&self, path: &str) -> Result<reqwest::Response, ApiError> {
|
||||
async fn get_async(&self, path: &str) -> Result<reqwest::Response, error::ApiError> {
|
||||
let reqwest_client = reqwest::Client::new();
|
||||
let url = format!("{}{}", self.endpoint, path);
|
||||
debug!("Request URL: {}", url);
|
||||
|
||||
let request_builder = reqwest_client.get(url);
|
||||
let request = self.build_request_async(request_builder);
|
||||
|
||||
let result = request.send().await.map_err(|e| self.to_api_error(e));
|
||||
let result = request.send().await;
|
||||
match result {
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
Ok(response)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let text = response.text().await.unwrap_or_default();
|
||||
Err(ApiError {
|
||||
message: format!("{}: {}", status, text),
|
||||
let response_status = response.status();
|
||||
let response_body = response.text().await.unwrap_or_default();
|
||||
debug!("Response Status: {}", &response_status);
|
||||
utils::debug_pretty_json_from_string("Response Data", &response_body);
|
||||
|
||||
Err(error::ApiError {
|
||||
message: format!("{}: {}", response_status, response_body),
|
||||
})
|
||||
}
|
||||
}
|
||||
Err(error) => Err(ApiError {
|
||||
Err(error) => Err(error::ApiError {
|
||||
message: error.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn post_sync<T: serde::ser::Serialize + std::fmt::Debug>(
|
||||
fn post_sync<T: std::fmt::Debug + serde::ser::Serialize>(
|
||||
&self,
|
||||
path: &str,
|
||||
params: &T,
|
||||
) -> Result<reqwest::blocking::Response, ApiError> {
|
||||
) -> Result<reqwest::blocking::Response, error::ApiError> {
|
||||
let reqwest_client = reqwest::blocking::Client::new();
|
||||
let url = format!("{}{}", self.endpoint, path);
|
||||
debug!("Request URL: {}", url);
|
||||
utils::debug_pretty_json_from_struct("Request Body", params);
|
||||
|
||||
let request_builder = reqwest_client.post(url).json(params);
|
||||
let request = self.build_request_sync(request_builder);
|
||||
|
||||
@@ -221,14 +554,17 @@ impl Client {
|
||||
if response.status().is_success() {
|
||||
Ok(response)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let text = response.text().unwrap_or_default();
|
||||
Err(ApiError {
|
||||
message: format!("{}: {}", status, text),
|
||||
let response_status = response.status();
|
||||
let response_body = response.text().unwrap_or_default();
|
||||
debug!("Response Status: {}", &response_status);
|
||||
utils::debug_pretty_json_from_string("Response Data", &response_body);
|
||||
|
||||
Err(error::ApiError {
|
||||
message: format!("{}: {}", response_body, response_status),
|
||||
})
|
||||
}
|
||||
}
|
||||
Err(error) => Err(ApiError {
|
||||
Err(error) => Err(error::ApiError {
|
||||
message: error.to_string(),
|
||||
}),
|
||||
}
|
||||
@@ -238,33 +574,74 @@ impl Client {
|
||||
&self,
|
||||
path: &str,
|
||||
params: &T,
|
||||
) -> Result<reqwest::Response, ApiError> {
|
||||
) -> Result<reqwest::Response, error::ApiError> {
|
||||
let reqwest_client = reqwest::Client::new();
|
||||
let url = format!("{}{}", self.endpoint, path);
|
||||
debug!("Request URL: {}", url);
|
||||
utils::debug_pretty_json_from_struct("Request Body", params);
|
||||
|
||||
let request_builder = reqwest_client.post(url).json(params);
|
||||
let request = self.build_request_async(request_builder);
|
||||
|
||||
let result = request.send().await.map_err(|e| self.to_api_error(e));
|
||||
let result = request.send().await;
|
||||
match result {
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
Ok(response)
|
||||
} else {
|
||||
let status = response.status();
|
||||
let text = response.text().await.unwrap_or_default();
|
||||
Err(ApiError {
|
||||
message: format!("{}: {}", status, text),
|
||||
let response_status = response.status();
|
||||
let response_body = response.text().await.unwrap_or_default();
|
||||
debug!("Response Status: {}", &response_status);
|
||||
utils::debug_pretty_json_from_string("Response Data", &response_body);
|
||||
|
||||
Err(error::ApiError {
|
||||
message: format!("{}: {}", response_status, response_body),
|
||||
})
|
||||
}
|
||||
}
|
||||
Err(error) => Err(ApiError {
|
||||
Err(error) => Err(error::ApiError {
|
||||
message: error.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_api_error(&self, err: ReqwestError) -> ApiError {
|
||||
ApiError {
|
||||
async fn post_stream<T: serde::ser::Serialize + std::fmt::Debug>(
|
||||
&self,
|
||||
path: &str,
|
||||
params: &T,
|
||||
) -> Result<reqwest::Response, error::ApiError> {
|
||||
let reqwest_client = reqwest::Client::new();
|
||||
let url = format!("{}{}", self.endpoint, path);
|
||||
debug!("Request URL: {}", url);
|
||||
utils::debug_pretty_json_from_struct("Request Body", params);
|
||||
|
||||
let request_builder = reqwest_client.post(url).json(params);
|
||||
let request = self.build_request_stream(request_builder);
|
||||
|
||||
let result = request.send().await;
|
||||
match result {
|
||||
Ok(response) => {
|
||||
if response.status().is_success() {
|
||||
Ok(response)
|
||||
} else {
|
||||
let response_status = response.status();
|
||||
let response_body = response.text().await.unwrap_or_default();
|
||||
debug!("Response Status: {}", &response_status);
|
||||
utils::debug_pretty_json_from_string("Response Data", &response_body);
|
||||
|
||||
Err(error::ApiError {
|
||||
message: format!("{}: {}", response_status, response_body),
|
||||
})
|
||||
}
|
||||
}
|
||||
Err(error) => Err(error::ApiError {
|
||||
message: error.to_string(),
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
fn to_api_error(&self, err: ReqwestError) -> error::ApiError {
|
||||
error::ApiError {
|
||||
message: err.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
@@ -6,14 +6,20 @@ pub const API_URL_BASE: &str = "https://api.mistral.ai/v1";
|
||||
pub enum Model {
|
||||
#[serde(rename = "open-mistral-7b")]
|
||||
OpenMistral7b,
|
||||
#[serde(rename = "open-mistral-8x7b")]
|
||||
OpenMistral8x7b,
|
||||
#[serde(rename = "open-mixtral-8x7b")]
|
||||
OpenMixtral8x7b,
|
||||
#[serde(rename = "open-mixtral-8x22b")]
|
||||
OpenMixtral8x22b,
|
||||
#[serde(rename = "mistral-tiny")]
|
||||
MistralTiny,
|
||||
#[serde(rename = "mistral-small-latest")]
|
||||
MistralSmallLatest,
|
||||
#[serde(rename = "mistral-medium-latest")]
|
||||
MistralMediumLatest,
|
||||
#[serde(rename = "mistral-large-latest")]
|
||||
MistralLargeLatest,
|
||||
#[serde(rename = "codestral-latest")]
|
||||
CodestralLatest,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
|
||||
@@ -2,6 +2,9 @@ use serde::{Deserialize, Serialize};
|
||||
|
||||
use crate::v1::{common, constants};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Request
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct EmbeddingRequestOptions {
|
||||
pub encoding_format: Option<EmbeddingRequestEncodingFormat>,
|
||||
@@ -43,6 +46,9 @@ pub enum EmbeddingRequestEncodingFormat {
|
||||
float,
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Response
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct EmbeddingResponse {
|
||||
pub id: String,
|
||||
|
||||
@@ -14,8 +14,10 @@ impl Error for ApiError {}
|
||||
|
||||
#[derive(Debug, PartialEq, thiserror::Error)]
|
||||
pub enum ClientError {
|
||||
#[error("You must either set the `MISTRAL_API_KEY` environment variable or specify it in `Client::new(api_key, ...).")]
|
||||
ApiKeyError,
|
||||
#[error(
|
||||
"You must either set the `MISTRAL_API_KEY` environment variable or specify it in `Client::new(api_key, ...)."
|
||||
)]
|
||||
MissingApiKey,
|
||||
#[error("Failed to read the response text.")]
|
||||
ReadResponseTextError,
|
||||
UnreadableResponseText,
|
||||
}
|
||||
|
||||
@@ -1,7 +1,10 @@
|
||||
pub mod chat_completion;
|
||||
pub mod chat;
|
||||
pub mod chat_stream;
|
||||
pub mod client;
|
||||
pub mod common;
|
||||
pub mod constants;
|
||||
pub mod embedding;
|
||||
pub mod error;
|
||||
pub mod model_list;
|
||||
pub mod tool;
|
||||
pub mod utils;
|
||||
|
||||
@@ -1,11 +1,15 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Response
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ModelListResponse {
|
||||
pub object: String,
|
||||
pub data: Vec<ModelListData>,
|
||||
}
|
||||
|
||||
/// See: https://docs.mistral.ai/api/#tag/models
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ModelListData {
|
||||
pub id: String,
|
||||
@@ -13,27 +17,21 @@ pub struct ModelListData {
|
||||
/// Unix timestamp (in seconds).
|
||||
pub created: u32,
|
||||
pub owned_by: String,
|
||||
pub permission: Vec<ModelListDataPermission>,
|
||||
// TODO Check this prop (seen in API responses but undocumented).
|
||||
// pub root: ???,
|
||||
// TODO Check this prop (seen in API responses but undocumented).
|
||||
// pub parent: ???,
|
||||
pub root: Option<String>,
|
||||
pub archived: bool,
|
||||
pub name: String,
|
||||
pub description: String,
|
||||
pub capabilities: ModelListDataCapabilies,
|
||||
pub max_context_length: u32,
|
||||
pub aliases: Vec<String>,
|
||||
/// ISO 8601 date (`YYYY-MM-DDTHH:MM:SSZ`).
|
||||
pub deprecation: Option<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ModelListDataPermission {
|
||||
pub id: String,
|
||||
pub object: String,
|
||||
/// Unix timestamp (in seconds).
|
||||
pub created: u32,
|
||||
pub allow_create_engine: bool,
|
||||
pub allow_sampling: bool,
|
||||
pub allow_logprobs: bool,
|
||||
pub allow_search_indices: bool,
|
||||
pub allow_view: bool,
|
||||
pub allow_fine_tuning: bool,
|
||||
pub organization: String,
|
||||
pub is_blocking: bool,
|
||||
// TODO Check this prop (seen in API responses but undocumented).
|
||||
// pub group: ???,
|
||||
pub struct ModelListDataCapabilies {
|
||||
pub completion_chat: bool,
|
||||
pub completion_fim: bool,
|
||||
pub function_calling: bool,
|
||||
pub fine_tuning: bool,
|
||||
}
|
||||
|
||||
144
src/v1/tool.rs
Normal file
144
src/v1/tool.rs
Normal file
@@ -0,0 +1,144 @@
|
||||
use async_trait::async_trait;
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::{any::Any, collections::HashMap, fmt::Debug};
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Definitions
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub struct ToolCall {
|
||||
pub function: ToolCallFunction,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub struct ToolCallFunction {
|
||||
pub name: String,
|
||||
pub arguments: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct Tool {
|
||||
pub r#type: ToolType,
|
||||
pub function: ToolFunction,
|
||||
}
|
||||
impl Tool {
|
||||
pub fn new(
|
||||
function_name: String,
|
||||
function_description: String,
|
||||
function_parameters: Vec<ToolFunctionParameter>,
|
||||
) -> Self {
|
||||
let properties: HashMap<String, ToolFunctionParameterProperty> = function_parameters
|
||||
.into_iter()
|
||||
.map(|param| {
|
||||
(
|
||||
param.name,
|
||||
ToolFunctionParameterProperty {
|
||||
r#type: param.r#type,
|
||||
description: param.description,
|
||||
},
|
||||
)
|
||||
})
|
||||
.collect();
|
||||
let property_names = properties.keys().cloned().collect();
|
||||
|
||||
let parameters = ToolFunctionParameters {
|
||||
r#type: ToolFunctionParametersType::Object,
|
||||
properties,
|
||||
required: property_names,
|
||||
};
|
||||
|
||||
Self {
|
||||
r#type: ToolType::Function,
|
||||
function: ToolFunction {
|
||||
name: function_name,
|
||||
description: function_description,
|
||||
parameters,
|
||||
},
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Request
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ToolFunction {
|
||||
name: String,
|
||||
description: String,
|
||||
parameters: ToolFunctionParameters,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ToolFunctionParameter {
|
||||
name: String,
|
||||
description: String,
|
||||
r#type: ToolFunctionParameterType,
|
||||
}
|
||||
impl ToolFunctionParameter {
|
||||
pub fn new(name: String, description: String, r#type: ToolFunctionParameterType) -> Self {
|
||||
Self {
|
||||
name,
|
||||
r#type,
|
||||
description,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ToolFunctionParameters {
|
||||
r#type: ToolFunctionParametersType,
|
||||
properties: HashMap<String, ToolFunctionParameterProperty>,
|
||||
required: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Deserialize, Serialize)]
|
||||
pub struct ToolFunctionParameterProperty {
|
||||
r#type: ToolFunctionParameterType,
|
||||
description: String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub enum ToolFunctionParametersType {
|
||||
#[serde(rename = "object")]
|
||||
Object,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub enum ToolFunctionParameterType {
|
||||
#[serde(rename = "string")]
|
||||
String,
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub enum ToolType {
|
||||
#[serde(rename = "function")]
|
||||
Function,
|
||||
}
|
||||
|
||||
/// An enum representing how functions should be called.
|
||||
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
|
||||
pub enum ToolChoice {
|
||||
/// The model is forced to call a function.
|
||||
#[serde(rename = "any")]
|
||||
Any,
|
||||
/// The model can choose to either generate a message or call a function.
|
||||
#[serde(rename = "auto")]
|
||||
Auto,
|
||||
/// The model won't call a function and will generate a message instead.
|
||||
#[serde(rename = "none")]
|
||||
None,
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------
|
||||
// Custom
|
||||
|
||||
#[async_trait]
|
||||
pub trait Function: Send {
|
||||
async fn execute(&self, arguments: String) -> Box<dyn Any + Send>;
|
||||
}
|
||||
|
||||
impl Debug for dyn Function {
|
||||
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
|
||||
write!(f, "Function()")
|
||||
}
|
||||
}
|
||||
32
src/v1/utils.rs
Normal file
32
src/v1/utils.rs
Normal file
@@ -0,0 +1,32 @@
|
||||
use std::fmt::Debug;
|
||||
|
||||
use log::debug;
|
||||
use serde::Serialize;
|
||||
|
||||
pub fn prettify_json_string(json: &String) -> String {
|
||||
match serde_json::from_str::<serde_json::Value>(&json) {
|
||||
Ok(json_value) => {
|
||||
serde_json::to_string_pretty(&json_value).unwrap_or_else(|_| json.to_owned())
|
||||
}
|
||||
Err(_) => json.to_owned(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn prettify_json_struct<T: Debug + Serialize>(value: T) -> String {
|
||||
match serde_json::to_string_pretty(&value) {
|
||||
Ok(pretty_json) => pretty_json,
|
||||
Err(_) => format!("{:?}", value),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn debug_pretty_json_from_string(label: &str, json: &String) -> () {
|
||||
let pretty_json = prettify_json_string(json);
|
||||
|
||||
debug!("{label}: {}", pretty_json);
|
||||
}
|
||||
|
||||
pub fn debug_pretty_json_from_struct<T: Debug + Serialize>(label: &str, value: &T) -> () {
|
||||
let pretty_json = prettify_json_struct(value);
|
||||
|
||||
debug!("{label}: {}", pretty_json);
|
||||
}
|
||||
3
tests/setup.rs
Normal file
3
tests/setup.rs
Normal file
@@ -0,0 +1,3 @@
|
||||
pub fn setup() {
|
||||
let _ = env_logger::builder().is_test(true).try_init();
|
||||
}
|
||||
@@ -1,21 +1,25 @@
|
||||
use jrest::expect;
|
||||
use mistralai_client::v1::{
|
||||
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionParams},
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams, ChatResponseChoiceFinishReason},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
tool::{Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
|
||||
};
|
||||
|
||||
mod setup;
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_client_chat_async() {
|
||||
setup::setup();
|
||||
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::OpenMistral7b;
|
||||
let messages = vec![ChatCompletionMessage {
|
||||
role: ChatCompletionMessageRole::user,
|
||||
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
|
||||
}];
|
||||
let options = ChatCompletionParams {
|
||||
temperature: Some(0.0),
|
||||
let messages = vec![ChatMessage::new_user_message(
|
||||
"Guess the next word: \"Eiffel ...\"?",
|
||||
)];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
@@ -27,11 +31,74 @@ async fn test_client_chat_async() {
|
||||
|
||||
expect!(response.model).to_be(Model::OpenMistral7b);
|
||||
expect!(response.object).to_be("chat.completion".to_string());
|
||||
|
||||
expect!(response.choices.len()).to_be(1);
|
||||
expect!(response.choices[0].index).to_be(0);
|
||||
expect!(response.choices[0].message.role.clone()).to_be(ChatCompletionMessageRole::assistant);
|
||||
expect!(response.choices[0].message.content.clone())
|
||||
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
|
||||
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
|
||||
|
||||
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
|
||||
expect!(response.choices[0]
|
||||
.message
|
||||
.content
|
||||
.clone()
|
||||
.contains("Tower"))
|
||||
.to_be(true);
|
||||
|
||||
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
|
||||
expect!(response.usage.completion_tokens).to_be_greater_than(0);
|
||||
expect!(response.usage.total_tokens).to_be_greater_than(0);
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
async fn test_client_chat_async_with_function_calling() {
|
||||
setup::setup();
|
||||
|
||||
let tools = vec![Tool::new(
|
||||
"get_city_temperature".to_string(),
|
||||
"Get the current temperature in a city.".to_string(),
|
||||
vec![ToolFunctionParameter::new(
|
||||
"city".to_string(),
|
||||
"The name of the city.".to_string(),
|
||||
ToolFunctionParameterType::String,
|
||||
)],
|
||||
)];
|
||||
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::MistralSmallLatest;
|
||||
let messages = vec![ChatMessage::new_user_message(
|
||||
"What's the current temperature in Paris?",
|
||||
)];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
tool_choice: Some(ToolChoice::Any),
|
||||
tools: Some(tools),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let response = client
|
||||
.chat_async(model, messages, Some(options))
|
||||
.await
|
||||
.unwrap();
|
||||
|
||||
expect!(response.model).to_be(Model::MistralSmallLatest);
|
||||
expect!(response.object).to_be("chat.completion".to_string());
|
||||
|
||||
expect!(response.choices.len()).to_be(1);
|
||||
expect!(response.choices[0].index).to_be(0);
|
||||
expect!(response.choices[0].finish_reason.clone())
|
||||
.to_be(ChatResponseChoiceFinishReason::ToolCalls);
|
||||
|
||||
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
|
||||
expect!(response.choices[0].message.content.clone()).to_be("".to_string());
|
||||
// expect!(response.choices[0].message.tool_calls.clone()).to_be(Some(vec![ToolCall {
|
||||
// function: ToolCallFunction {
|
||||
// name: "get_city_temperature".to_string(),
|
||||
// arguments: "{\"city\": \"Paris\"}".to_string(),
|
||||
// },
|
||||
// }]));
|
||||
|
||||
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
|
||||
expect!(response.usage.completion_tokens).to_be_greater_than(0);
|
||||
expect!(response.usage.total_tokens).to_be_greater_than(0);
|
||||
|
||||
40
tests/v1_client_chat_stream_test.rs
Normal file
40
tests/v1_client_chat_stream_test.rs
Normal file
@@ -0,0 +1,40 @@
|
||||
// use futures::stream::StreamExt;
|
||||
// use jrest::expect;
|
||||
// use mistralai_client::v1::{
|
||||
// chat_completion::{ChatParams, ChatMessage, ChatMessageRole},
|
||||
// client::Client,
|
||||
// constants::Model,
|
||||
// };
|
||||
|
||||
// #[tokio::test]
|
||||
// async fn test_client_chat_stream() {
|
||||
// let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
// let model = Model::OpenMistral7b;
|
||||
// let messages = vec![ChatMessage::new_user_message(
|
||||
// "Just guess the next word: \"Eiffel ...\"?",
|
||||
// )];
|
||||
// let options = ChatParams {
|
||||
// temperature: Some(0.0),
|
||||
// random_seed: Some(42),
|
||||
// ..Default::default()
|
||||
// };
|
||||
|
||||
// let stream_result = client.chat_stream(model, messages, Some(options)).await;
|
||||
// let mut stream = stream_result.expect("Failed to create stream.");
|
||||
// while let Some(maybe_chunk_result) = stream.next().await {
|
||||
// match maybe_chunk_result {
|
||||
// Some(Ok(chunk)) => {
|
||||
// if chunk.choices[0].delta.role == Some(ChatMessageRole::Assistant)
|
||||
// || chunk.choices[0].finish_reason == Some("stop".to_string())
|
||||
// {
|
||||
// expect!(chunk.choices[0].delta.content.len()).to_be(0);
|
||||
// } else {
|
||||
// expect!(chunk.choices[0].delta.content.len()).to_be_greater_than(0);
|
||||
// }
|
||||
// }
|
||||
// Some(Err(error)) => eprintln!("Error processing chunk: {:?}", error),
|
||||
// None => (),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
@@ -1,21 +1,25 @@
|
||||
use jrest::expect;
|
||||
use mistralai_client::v1::{
|
||||
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionParams},
|
||||
chat::{ChatMessage, ChatMessageRole, ChatParams, ChatResponseChoiceFinishReason},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
tool::{Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
|
||||
};
|
||||
|
||||
mod setup;
|
||||
|
||||
#[test]
|
||||
fn test_client_chat() {
|
||||
setup::setup();
|
||||
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::OpenMistral7b;
|
||||
let messages = vec![ChatCompletionMessage {
|
||||
role: ChatCompletionMessageRole::user,
|
||||
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
|
||||
}];
|
||||
let options = ChatCompletionParams {
|
||||
temperature: Some(0.0),
|
||||
let messages = vec![ChatMessage::new_user_message(
|
||||
"Guess the next word: \"Eiffel ...\"?",
|
||||
)];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
@@ -26,9 +30,57 @@ fn test_client_chat() {
|
||||
expect!(response.object).to_be("chat.completion".to_string());
|
||||
expect!(response.choices.len()).to_be(1);
|
||||
expect!(response.choices[0].index).to_be(0);
|
||||
expect!(response.choices[0].message.role.clone()).to_be(ChatCompletionMessageRole::assistant);
|
||||
expect!(response.choices[0].message.content.clone())
|
||||
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
|
||||
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
|
||||
expect!(response.choices[0]
|
||||
.message
|
||||
.content
|
||||
.clone()
|
||||
.contains("Tower"))
|
||||
.to_be(true);
|
||||
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
|
||||
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
|
||||
expect!(response.usage.completion_tokens).to_be_greater_than(0);
|
||||
expect!(response.usage.total_tokens).to_be_greater_than(0);
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_client_chat_with_function_calling() {
|
||||
setup::setup();
|
||||
|
||||
let tools = vec![Tool::new(
|
||||
"get_city_temperature".to_string(),
|
||||
"Get the current temperature in a city.".to_string(),
|
||||
vec![ToolFunctionParameter::new(
|
||||
"city".to_string(),
|
||||
"The name of the city.".to_string(),
|
||||
ToolFunctionParameterType::String,
|
||||
)],
|
||||
)];
|
||||
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let model = Model::MistralSmallLatest;
|
||||
let messages = vec![ChatMessage::new_user_message(
|
||||
"What's the current temperature in Paris?",
|
||||
)];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
tool_choice: Some(ToolChoice::Auto),
|
||||
tools: Some(tools),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
let response = client.chat(model, messages, Some(options)).unwrap();
|
||||
|
||||
expect!(response.model).to_be(Model::MistralSmallLatest);
|
||||
expect!(response.object).to_be("chat.completion".to_string());
|
||||
expect!(response.choices.len()).to_be(1);
|
||||
expect!(response.choices[0].index).to_be(0);
|
||||
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
|
||||
expect!(response.choices[0].message.content.clone()).to_be("".to_string());
|
||||
expect!(response.choices[0].finish_reason.clone())
|
||||
.to_be(ChatResponseChoiceFinishReason::ToolCalls);
|
||||
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
|
||||
expect!(response.usage.completion_tokens).to_be_greater_than(0);
|
||||
expect!(response.usage.total_tokens).to_be_greater_than(0);
|
||||
|
||||
@@ -9,12 +9,4 @@ async fn test_client_list_models_async() {
|
||||
|
||||
expect!(response.object).to_be("list".to_string());
|
||||
expect!(response.data.len()).to_be_greater_than(0);
|
||||
|
||||
// let open_mistral_7b_data_item = response
|
||||
// .data
|
||||
// .iter()
|
||||
// .find(|item| item.id == "open-mistral-7b")
|
||||
// .unwrap();
|
||||
|
||||
// expect!(open_mistral_7b_data_item.id).to_be("open-mistral-7b".to_string());
|
||||
}
|
||||
|
||||
@@ -9,12 +9,4 @@ fn test_client_list_models() {
|
||||
|
||||
expect!(response.object).to_be("list".to_string());
|
||||
expect!(response.data.len()).to_be_greater_than(0);
|
||||
|
||||
// let open_mistral_7b_data_item = response
|
||||
// .data
|
||||
// .iter()
|
||||
// .find(|item| item.id == "open-mistral-7b")
|
||||
// .unwrap();
|
||||
|
||||
// expect!(open_mistral_7b_data_item.id).to_be("open-mistral-7b".to_string());
|
||||
}
|
||||
|
||||
@@ -1,6 +1,11 @@
|
||||
use jrest::expect;
|
||||
use mistralai_client::v1::{client::Client, error::ClientError};
|
||||
|
||||
#[derive(Debug)]
|
||||
struct _Foo {
|
||||
_client: Client,
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_client_new_with_none_params() {
|
||||
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
|
||||
@@ -26,6 +31,37 @@ fn test_client_new_with_none_params() {
|
||||
fn test_client_new_with_all_params() {
|
||||
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
|
||||
std::env::remove_var("MISTRAL_API_KEY");
|
||||
|
||||
let api_key = Some("test_api_key_from_param".to_string());
|
||||
let endpoint = Some("https://example.org".to_string());
|
||||
let max_retries = Some(10);
|
||||
let timeout = Some(20);
|
||||
|
||||
let client = Client::new(
|
||||
api_key.clone(),
|
||||
endpoint.clone(),
|
||||
max_retries.clone(),
|
||||
timeout.clone(),
|
||||
)
|
||||
.unwrap();
|
||||
|
||||
expect!(client.api_key).to_be(api_key.unwrap());
|
||||
expect!(client.endpoint).to_be(endpoint.unwrap());
|
||||
expect!(client.max_retries).to_be(max_retries.unwrap());
|
||||
expect!(client.timeout).to_be(timeout.unwrap());
|
||||
|
||||
match maybe_original_mistral_api_key {
|
||||
Some(original_mistral_api_key) => {
|
||||
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
|
||||
}
|
||||
None => std::env::remove_var("MISTRAL_API_KEY"),
|
||||
}
|
||||
}
|
||||
|
||||
#[test]
|
||||
fn test_client_new_with_api_key_as_both_env_and_param() {
|
||||
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
|
||||
std::env::remove_var("MISTRAL_API_KEY");
|
||||
std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env");
|
||||
|
||||
let api_key = Some("test_api_key_from_param".to_string());
|
||||
@@ -62,8 +98,8 @@ fn test_client_new_with_missing_api_key() {
|
||||
let call = || Client::new(None, None, None, None);
|
||||
|
||||
match call() {
|
||||
Ok(_) => panic!("Expected `ClientError::ApiKeyError` but got Ok.`"),
|
||||
Err(error) => assert_eq!(error, ClientError::ApiKeyError),
|
||||
Ok(_) => panic!("Expected `ClientError::MissingApiKey` but got Ok.`"),
|
||||
Err(error) => assert_eq!(error, ClientError::MissingApiKey),
|
||||
}
|
||||
|
||||
match maybe_original_mistral_api_key {
|
||||
|
||||
41
tests/v1_constants_test.rs
Normal file
41
tests/v1_constants_test.rs
Normal file
@@ -0,0 +1,41 @@
|
||||
use jrest::expect;
|
||||
use mistralai_client::v1::{
|
||||
chat::{ChatMessage, ChatParams},
|
||||
client::Client,
|
||||
constants::Model,
|
||||
};
|
||||
|
||||
#[test]
|
||||
fn test_model_constant() {
|
||||
let models = vec![
|
||||
Model::OpenMistral7b,
|
||||
Model::OpenMixtral8x7b,
|
||||
Model::OpenMixtral8x22b,
|
||||
Model::MistralTiny,
|
||||
Model::MistralSmallLatest,
|
||||
Model::MistralMediumLatest,
|
||||
Model::MistralLargeLatest,
|
||||
Model::CodestralLatest,
|
||||
];
|
||||
|
||||
let client = Client::new(None, None, None, None).unwrap();
|
||||
|
||||
let messages = vec![ChatMessage::new_user_message("A number between 0 and 100?")];
|
||||
let options = ChatParams {
|
||||
temperature: 0.0,
|
||||
random_seed: Some(42),
|
||||
..Default::default()
|
||||
};
|
||||
|
||||
for model in models {
|
||||
let response = client
|
||||
.chat(model.clone(), messages.clone(), Some(options.clone()))
|
||||
.unwrap();
|
||||
|
||||
expect!(response.model).to_be(model);
|
||||
expect!(response.object).to_be("chat.completion".to_string());
|
||||
expect!(response.choices.len()).to_be(1);
|
||||
expect!(response.choices[0].index).to_be(0);
|
||||
expect!(response.choices[0].message.content.len()).to_be_greater_than(0);
|
||||
}
|
||||
}
|
||||
7
tests/v1_tool_test.rs
Normal file
7
tests/v1_tool_test.rs
Normal file
@@ -0,0 +1,7 @@
|
||||
use mistralai_client::v1::client::Client;
|
||||
|
||||
trait _Trait: Send {}
|
||||
struct _Foo {
|
||||
_dummy: Client,
|
||||
}
|
||||
impl _Trait for _Foo {}
|
||||
Reference in New Issue
Block a user