59 Commits

Author SHA1 Message Date
Ivan Gabriele
0c097aa56d ci(release): v0.11.0 2024-06-22 14:05:11 +02:00
Ivan Gabriele
e6539c0ccf docs(changelog): update 2024-06-22 14:05:04 +02:00
Ivan Gabriele
30156c5273 test: remove useless setup in constants test 2024-06-22 14:02:52 +02:00
Ivan Gabriele
ecd0c3028f feat(constants): add OpenMixtral8x22b, MistralTiny & CodestralLatest to Model enum 2024-06-22 13:22:57 +02:00
Ivan Gabriele
0df67b1b25 fix(chat): implement Clone trait for ChatParams & ResponseFormat 2024-06-22 13:09:15 +02:00
Ivan Gabriele
f7d012b280 ci(release): v0.10.0 2024-06-07 16:55:50 +02:00
Ivan Gabriele
5b5bd2d68e docs(changelog): update 2024-06-07 16:55:39 +02:00
Xavier Gillard
2fc0642a5e feat(chat): add the 'system' and 'tool' message roles (#10)
* add the 'system' and 'tool' message roles (see: https://docs.mistral.ai/capabilities/completion/ )

* docs(chat): add offical doc link in ChatMessageRole

* ci(github): listen to pull_request event in Test workflow

---------

Co-authored-by: Ivan Gabriele <ivan.gabriele@protonmail.com>
2024-06-07 16:49:55 +02:00
Ivan Gabriele
cf68a77320 feat(chat)!: change safe_prompt, temperature & top_p to non-Option types
BREAKING CHANGE:
- `Chat::ChatParams.safe_prompt` & `Chat::ChatRequest.safe_prompt` are now `bool` instead of `Option<bool>`. Default is `false`.
- `Chat::ChatParams.temperature` & `Chat::ChatRequest.temperature` are now `f32` instead of `Option<f32>`. Default is `0.7`.
- `Chat::ChatParams.top_p` & `Chat::ChatRequest.top_p` are now `f32` instead of `Option<f32>`. Default is `1.0`.
2024-06-07 16:00:10 +02:00
Ivan Gabriele
e61ace9a18 test(chat): simplify chat response message content check to cover variations 2024-06-07 14:36:49 +02:00
Ivan Gabriele
64034402ca build(makefile): fix env file loading 2024-06-07 14:36:48 +02:00
Nick Anderson
85c3611afb feat(chat): add response_format for JSON return values 2024-06-07 14:36:37 +02:00
seurimas
da5fe54115 fix(chat): skip serializing tool_calls if null, to avoid 422 error 2024-06-07 14:12:22 +02:00
Ivan Gabriele
7a5e0679c1 ci(release): v0.9.0 2024-04-13 14:06:55 +02:00
Ivan Gabriele
99d9d099e2 docs(changelog): update 2024-04-13 13:56:20 +02:00
Ivan Gabriele
91fb775132 ci(makefile): fix conventional-changelog-cli suffix 2024-04-13 13:55:22 +02:00
Ivan Gabriele
7474aa6730 ci(makefile): prefix conventional-changelog with npx 2024-04-13 13:54:05 +02:00
Ivan Gabriele
6a99eca49c fix!: fix typo in OpenMixtral8x7b model name (#8)
BREAKING CHANGE: `Model.OpenMistral8x7b` has been renamed to `Model.OpenMixtral8x7b`.
2024-04-13 13:49:54 +02:00
renovate[bot]
fccd59c0cc fix(deps): update rust crate reqwest to 0.12.0 (#6)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-04-13 13:38:35 +02:00
Ivan Gabriele
a463cb3106 docs(github): update issue templates 2024-03-10 04:36:08 +01:00
Ivan Gabriele
8bee874bd4 ci(release): v0.8.0 2024-03-09 11:45:06 +01:00
Ivan Gabriele
16464a4c3d docs(changelog): update 2024-03-09 11:44:57 +01:00
Ivan Gabriele
a4c2d4623d ci(github): fix examples test 2024-03-09 11:43:06 +01:00
Ivan Gabriele
ab91154d35 docs(readme): update examples 2024-03-09 11:41:39 +01:00
Ivan Gabriele
74bf8a96ee feat!: add function calling support to client.chat() & client.chat_async()
BREAKING CHANGE: Too many to count in this version. Check the README examples.
2024-03-09 11:40:07 +01:00
Ivan Gabriele
9430d42382 ci(release): v0.7.0 2024-03-05 02:49:59 +01:00
Ivan Gabriele
e7d844dce9 docs(changelog): update 2024-03-05 02:49:52 +01:00
Ivan Gabriele
29566f7948 ci(github): split documentation tests into a separate job 2024-03-05 02:48:24 +01:00
Ivan Gabriele
72bae8817a docs: add client.chat*() documentation 2024-03-05 02:40:49 +01:00
Ivan Gabriele
08b042506d test(coverage): migrate from tarpaulin to llvm-cov 2024-03-05 02:34:50 +01:00
Ivan Gabriele
efcd93953a build(makefile): add --skip-clean option to test-cover command 2024-03-05 01:36:02 +01:00
Ivan Gabriele
ea99a075ef build(makefile): remove wrong --nocapture option from test-doc command 2024-03-05 00:59:13 +01:00
Ivan Gabriele
ccf3d1431a build(makefile): add doc command 2024-03-05 00:55:07 +01:00
Ivan Gabriele
a8bfb5333f ci(github): add documentation tests 2024-03-05 00:50:21 +01:00
Ivan Gabriele
ef5d475e2d fix!: fix failure when api key as param and not env
BREAKING CHANGE:

- Rename `ClientError.ApiKeyError` to `MissingApiKey`.
- Rename `ClientError.ReadResponseTextError` to `ClientError.UnreadableResponseText`.
2024-03-04 21:12:08 +01:00
Ivan Gabriele
5217fcfb94 ci(release): v0.6.0 2024-03-04 08:20:46 +01:00
Ivan Gabriele
6b1cc5c058 docs(changelog): update 2024-03-04 08:20:38 +01:00
Ivan Gabriele
4a4219d3ea feat!: add client.chat_stream() method
BREAKING CHANGE: You can't set the `stream` option for `client.chat*()`.

Either use `client.chat_stream()` if you want to use streams
or use `client.chat()` / `client.chat_async()` otherwise.
2024-03-04 08:16:10 +01:00
Ivan Gabriele
f91e794d71 refactor: remove useless error mappers 2024-03-04 06:54:24 +01:00
Ivan Gabriele
7c96a4a88d ci(release): v0.5.0 2024-03-04 06:39:54 +01:00
Ivan Gabriele
14437bf609 docs(changelog): update 2024-03-04 06:39:47 +01:00
Ivan Gabriele
3c228914f7 feat: add client.embeddings_async() method 2024-03-04 06:39:21 +01:00
Ivan Gabriele
b69f7c617c feat: add client.list_models_async() method 2024-03-04 06:33:38 +01:00
Ivan Gabriele
75788b9395 refactor: migrate to reqwest-only 2024-03-04 06:33:38 +01:00
renovate[bot]
a862b92c98 chore(deps): update codecov/codecov-action action to v4 (#2)
Co-authored-by: renovate[bot] <29139614+renovate[bot]@users.noreply.github.com>
2024-03-04 05:04:56 +01:00
Ivan Gabriele
47c9b9b4fe ci(release): v0.4.0 2024-03-04 04:58:33 +01:00
Ivan Gabriele
791bef34b3 docs(changelog): update 2024-03-04 04:58:26 +01:00
Ivan Gabriele
1dd59f6704 feat: add client.chat_async() method 2024-03-04 04:57:48 +01:00
Ivan Gabriele
33876183e4 feat!: wrap Client::new() return in a Result
BREAKING CHANGE: `Client::new()` now returns a `Result`.
2024-03-04 04:43:22 +01:00
Ivan Gabriele
1deab88251 feat!: add missing api key error
BREAKING CHANGE: `APIError` is renamed to `ApiError`.
2024-03-04 04:30:13 +01:00
Ivan Gabriele
b0a3f10c9f ci(release): add custom pre-release-commit-message 2024-03-04 03:32:23 +01:00
Ivan Gabriele
bbba6b9878 docs(readme): fix list models example 2024-03-04 03:27:20 +01:00
Ivan Gabriele
0386b95b7b chore: Release mistralai-client version 0.3.0 2024-03-04 03:25:41 +01:00
Ivan Gabriele
c61f2278bb docs(changelog): update 2024-03-04 03:25:35 +01:00
Ivan Gabriele
4c8e330c95 ci(release): fix pre-release-replacements regex 2024-03-04 03:25:20 +01:00
Ivan Gabriele
64c7f2feb5 ci(release): fix type in pre-release-replacements prop 2024-03-04 03:21:13 +01:00
Ivan Gabriele
f44d951247 feat!: add client.embeddings() method
BREAKING CHANGE: Models are now enforced by `Model` & `EmbedModel` enums.
2024-03-04 03:16:59 +01:00
Ivan Gabriele
4e702aa48e refactor: rename ListModels* to ModelList* 2024-03-04 01:56:48 +01:00
Ivan Gabriele
809af31dd0 ci(release): fix changelog version replacement 2024-03-03 19:49:27 +01:00
44 changed files with 2628 additions and 387 deletions

View File

@@ -1,3 +1,3 @@
# This key is only used for development purposes.
# You'll only need one if you want to contribute to this library.
MISTRAL_API_KEY=
export MISTRAL_API_KEY=

View File

@@ -1,17 +1,16 @@
---
name: Bug report
about: Create a report to help us improve
title: ''
labels: ''
assignees: ''
title: ""
labels: "bug"
assignees: ""
---
**Describe the bug**
A clear and concise description of what the bug is.
...
**To Reproduce**
**Reproduction**
Steps to reproduce the behavior:
@@ -20,7 +19,7 @@ Steps to reproduce the behavior:
**Expected behavior**
A clear and concise description of what you expected to happen.
...
**Screenshots**
@@ -32,4 +31,4 @@ If applicable, what version did you use?
**Environment**
Add useful information about your configuration and environment here.
If applicable, add relevant information about your config and environment here.

View File

@@ -1,24 +1,19 @@
---
name: Feature request
about: Suggest an idea for this project
title: ''
labels: ''
assignees: ''
about: Suggest a new idea for the project.
title: ""
labels: "enhancement"
assignees: ""
---
**Is your feature request related to a problem? Please describe.**
**Is your feature request related to some problems?**
A clear and concise description of what the problem is. Ex. I'm always frustrated when [...]
- _Ex. I'm always frustrated when..._
**Describe the solution you'd like**
**What are the solutions you'd like?**
A clear and concise description of what you want to happen.
- _Ex. A new option to..._
**Describe alternatives you've considered**
**Anything else?**
A clear and concise description of any alternative solutions or features you've considered.
**Additional context**
Add any other context or screenshots about the feature request here.
- ...

View File

@@ -1,15 +1,13 @@
name: Test
on: push
on:
pull_request:
push:
jobs:
test:
name: Test
runs-on: ubuntu-latest
container:
image: xd009642/tarpaulin
# https://github.com/xd009642/tarpaulin#github-actions
options: --security-opt seccomp=unconfined
steps:
- name: Checkout
uses: actions/checkout@v4
@@ -17,12 +15,45 @@ jobs:
uses: actions-rs/toolchain@v1
with:
toolchain: 1.76.0
- name: Install cargo-llvm-cov
uses: taiki-e/install-action@cargo-llvm-cov
- name: Run tests (with coverage)
run: make test-cover
run: cargo llvm-cov --lcov --output-path ./lcov.info
env:
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
- name: Upload tests coverage
uses: codecov/codecov-action@v3
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: true
files: ./lcov.info
token: ${{ secrets.CODECOV_TOKEN }}
test_documentation:
name: Test Documentation
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.76.0
- name: Run documentation tests
run: make test-doc
env:
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}
test_examples:
name: Test Examples
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v4
- name: Setup Rust
uses: actions-rs/toolchain@v1
with:
toolchain: 1.76.0
- name: Run examples
run: make test-examples
env:
MISTRAL_API_KEY: ${{ secrets.MISTRAL_API_KEY }}

View File

@@ -1,16 +1,112 @@
## [](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.1.0...v) (2024-03-03)
## [0.11.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.10.0...v) (2024-06-22)
### Features
* **constants:** add OpenMixtral8x22b, MistralTiny & CodestralLatest to Model enum ([ecd0c30](https://github.com/ivangabriele/mistralai-client-rs/commit/ecd0c3028fdcfab32b867eb1eed86182f5f4ab81))
### Bug Fixes
* **chat:** implement Clone trait for ChatParams & ResponseFormat ([0df67b1](https://github.com/ivangabriele/mistralai-client-rs/commit/0df67b1b2571fb04b636ce015a2daabe629ff352))
## [0.10.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.9.0...v) (2024-06-07)
### ⚠ BREAKING CHANGES
* **chat:** - `Chat::ChatParams.safe_prompt` & `Chat::ChatRequest.safe_prompt` are now `bool` instead of `Option<bool>`. Default is `false`.
- `Chat::ChatParams.temperature` & `Chat::ChatRequest.temperature` are now `f32` instead of `Option<f32>`. Default is `0.7`.
- `Chat::ChatParams.top_p` & `Chat::ChatRequest.top_p` are now `f32` instead of `Option<f32>`. Default is `1.0`.
### Features
* **chat:** add response_format for JSON return values ([85c3611](https://github.com/ivangabriele/mistralai-client-rs/commit/85c3611afbbe8df30dfc7512cc381ed304ce4024))
* **chat:** add the 'system' and 'tool' message roles ([#10](https://github.com/ivangabriele/mistralai-client-rs/issues/10)) ([2fc0642](https://github.com/ivangabriele/mistralai-client-rs/commit/2fc0642a5e4c024b15710acaab7735480e8dfe6a))
* **chat:** change safe_prompt, temperature & top_p to non-Option types ([cf68a77](https://github.com/ivangabriele/mistralai-client-rs/commit/cf68a773201ebe0e802face52af388711acf0c27))
### Bug Fixes
* **chat:** skip serializing tool_calls if null, to avoid 422 error ([da5fe54](https://github.com/ivangabriele/mistralai-client-rs/commit/da5fe54115ce622379776661a440e2708b24810c))
## [0.9.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.8.0...v) (2024-04-13)
### ⚠ BREAKING CHANGES
* Chat completions must now be called directly from client.chat() without building a request in between.
* `Model.OpenMistral8x7b` has been renamed to `Model.OpenMixtral8x7b`.
### Bug Fixes
* **deps:** update rust crate reqwest to 0.12.0 ([#6](https://github.com/ivangabriele/mistralai-client-rs/issues/6)) ([fccd59c](https://github.com/ivangabriele/mistralai-client-rs/commit/fccd59c0cc783edddec1b404363faabb009eecd6))
* fix typo in OpenMixtral8x7b model name ([#8](https://github.com/ivangabriele/mistralai-client-rs/issues/8)) ([6a99eca](https://github.com/ivangabriele/mistralai-client-rs/commit/6a99eca49c0cc8e3764a56f6dfd7762ec44a4c3b))
## [0.8.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.7.0...v) (2024-03-09)
### ⚠ BREAKING CHANGES
* Too many to count in this version. Check the README examples.
### Features
* add client.list_models() method ([814b991](https://github.com/ivangabriele/mistralai-client-rs/commit/814b9918b3aca78bfd606b5b9bb470b70ea2a5c6))
* simplify chat completion call ([7de2b19](https://github.com/ivangabriele/mistralai-client-rs/commit/7de2b19b981f1d65fe5c566fcaf521e4f2a9ced1))
* add function calling support to client.chat() & client.chat_async() ([74bf8a9](https://github.com/ivangabriele/mistralai-client-rs/commit/74bf8a96ee31f9d54ee3d7404619e803a182918b))
## [](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.1.0...v) (2024-03-03)
## [0.7.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.6.0...v) (2024-03-05)
### ⚠ BREAKING CHANGES
* - Rename `ClientError.ApiKeyError` to `MissingApiKey`.
- Rename `ClientError.ReadResponseTextError` to `ClientError.UnreadableResponseText`.
### Bug Fixes
* fix failure when api key as param and not env ([ef5d475](https://github.com/ivangabriele/mistralai-client-rs/commit/ef5d475e2d0e3fe040c44d6adabf7249e9962835))
## [0.6.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.5.0...v) (2024-03-04)
### ⚠ BREAKING CHANGES
* You can't set the `stream` option for `client.chat*()`.
Either use `client.chat_stream()` if you want to use streams
or use `client.chat()` / `client.chat_async()` otherwise.
### Features
* add client.chat_stream() method ([4a4219d](https://github.com/ivangabriele/mistralai-client-rs/commit/4a4219d3eaa8f0ae953ee6182b36bf464d1c4a21))
## [0.5.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.4.0...v) (2024-03-04)
### Features
* add client.embeddings_async() method ([3c22891](https://github.com/ivangabriele/mistralai-client-rs/commit/3c228914f78b0edd4a592091265b88d0bc55568b))
* add client.list_models_async() method ([b69f7c6](https://github.com/ivangabriele/mistralai-client-rs/commit/b69f7c617c15dd63abb61d004636512916d766bb))
## [0.4.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.3.0...v) (2024-03-04)
### ⚠ BREAKING CHANGES
* `Client::new()` now returns a `Result`.
* `APIError` is renamed to `ApiError`.
### Features
* add client.chat_async() method ([1dd59f6](https://github.com/ivangabriele/mistralai-client-rs/commit/1dd59f67048c10458ab0382af8fdfe4ed21c82fa))
* add missing api key error ([1deab88](https://github.com/ivangabriele/mistralai-client-rs/commit/1deab88251fc706e0415a5e416ab9aee4b52f6f3))
* wrap Client::new() return in a Result ([3387618](https://github.com/ivangabriele/mistralai-client-rs/commit/33876183e41340f426aa1dd1b6d8b5c05c8e15b9))
## [0.3.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.2.0...v) (2024-03-04)
### ⚠ BREAKING CHANGES
* Models are now enforced by `Model` & `EmbedModel` enums.
### Features
* add client.embeddings() method ([f44d951](https://github.com/ivangabriele/mistralai-client-rs/commit/f44d95124767c3a3f14c78c4be3d9c203fac49ad))
## [0.2.0](https://github.com/ivangabriele/mistralai-client-rs/compare/v0.1.0...v) (2024-03-03)
### ⚠ BREAKING CHANGES

View File

@@ -4,7 +4,10 @@
- [Requirements](#requirements)
- [First setup](#first-setup)
- [Optional requirements](#optional-requirements)
- [Local Development](#local-development)
- [Test](#test)
- [Documentation](#documentation)
- [Readme](#readme)
- [Code of Conduct](#code-of-conduct)
- [Commit Message Format](#commit-message-format)
@@ -27,11 +30,21 @@ Then run:
git clone https://github.com/ivangabriele/mistralai-client-rs.git # or your fork
cd ./mistralai-client-rs
cargo build
cp .env.example .env
```
Then edit the `.env` file to set your `MISTRAL_API_KEY`.
> [!NOTE]
> All tests use either the `open-mistral-7b` or `mistral-embed` models and only consume a few dozen tokens.
> So you would have to run them thousands of times to even reach a single dollar of usage.
### Optional requirements
- [cargo-watch](https://github.com/watchexec/cargo-watch#install) for `make test-*-watch`.
- [cargo-llvm-cov](https://github.com/taiki-e/cargo-llvm-cov?tab=readme-ov-file#installation) for `make test-cover`
- [cargo-watch](https://github.com/watchexec/cargo-watch#install) for `make test-watch`.
## Local Development
### Test
@@ -45,11 +58,20 @@ or
make test-watch
```
## Documentation
### Readme
> [!IMPORTANT]
> Do not edit the `README.md` file directly. It is generated from the `README.template.md` file.
1. Edit the `README.template.md` file.
2. Run `make readme` to generate/update the `README.md` file.
## Code of Conduct
Help us keep this project open and inclusive. Please read and follow our [Code of Conduct](./CODE_OF_CONDUCT.md).
## Commit Message Format
This repository follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification and
specificaly the [Angular Commit Message Guidelines](https://github.com/angular/angular/blob/main/CONTRIBUTING.md#commit).
This repository follow the [Conventional Commits](https://www.conventionalcommits.org/en/v1.0.0/) specification.

View File

@@ -2,7 +2,7 @@
name = "mistralai-client"
description = "Mistral AI API client library for Rust (unofficial)."
license = "Apache-2.0"
version = "0.2.0"
version = "0.11.0"
edition = "2021"
rust-version = "1.76.0"
@@ -15,12 +15,18 @@ readme = "README.md"
repository = "https://github.com/ivangabriele/mistralai-client-rs"
[dependencies]
minreq = { version = "2.11.0", features = ["https-rustls", "json-using-serde"] }
async-stream = "0.3.5"
async-trait = "0.1.77"
env_logger = "0.11.3"
futures = "0.3.30"
log = "0.4.21"
reqwest = { version = "0.12.0", features = ["json", "blocking", "stream"] }
serde = { version = "1.0.197", features = ["derive"] }
serde_json = "1.0.114"
strum = "0.26.1"
thiserror = "1.0.57"
tokio = { version = "1.36.0", features = ["full"] }
tokio-stream = "0.1.14"
[dev-dependencies]
dotenv = "0.15.0"
jrest = "0.2.3"

View File

@@ -1,7 +1,20 @@
.PHONY: test
SHELL := /bin/bash
.PHONY: doc readme test
define source_env_if_not_ci
@if [ -z "$${CI}" ]; then \
if [ -f ./.env ]; then \
source ./.env; \
else \
echo "No .env file found"; \
exit 1; \
fi \
fi
endef
define RELEASE_TEMPLATE
conventional-changelog -p conventionalcommits -i CHANGELOG.md -s
npx conventional-changelog-cli -p conventionalcommits -i ./CHANGELOG.md -s
git add .
git commit -m "docs(changelog): update"
git push origin HEAD
@@ -9,18 +22,51 @@ define RELEASE_TEMPLATE
git push origin HEAD --tags
endef
test:
cargo test --no-fail-fast
test-cover:
cargo tarpaulin --frozen --no-fail-fast --out Xml --skip-clean
test-watch:
cargo watch -x "test -- --nocapture"
doc:
cargo doc
open ./target/doc/mistralai_client/index.html
readme:
@echo "Generating README.md from template..."
@> README.md # Clear README.md content before starting
@while IFS= read -r line || [[ -n "$$line" ]]; do \
if [[ $$line == *"<CODE>"* && $$line == *"</CODE>"* ]]; then \
example_path=$$(echo $$line | sed -n 's/.*<CODE>\(.*\)<\/CODE>.*/\1/p'); \
if [ -f $$example_path ]; then \
echo '```rs' >> README.md; \
cat $$example_path >> README.md; \
echo '```' >> README.md; \
else \
echo "Error: Example $$example_path not found." >&2; \
fi; \
else \
echo "$$line" >> README.md; \
fi; \
done < README.template.md
@echo "README.md has been generated."
release-patch:
$(call RELEASE_TEMPLATE,patch)
release-minor:
$(call RELEASE_TEMPLATE,minor)
release-major:
$(call RELEASE_TEMPLATE,major)
test:
@$(source_env_if_not_ci) && \
cargo test --no-fail-fast
test-cover:
@$(source_env_if_not_ci) && \
cargo llvm-cov
test-doc:
@$(source_env_if_not_ci) && \
cargo test --doc --no-fail-fast
test-examples:
@$(source_env_if_not_ci) && \
for example in $$(ls examples/*.rs | sed 's/examples\/\(.*\)\.rs/\1/'); do \
echo "Running $$example"; \
cargo run --example $$example; \
done
test-watch:
@source ./.env && \
cargo watch -x "test -- --nocapture"

373
README.md
View File

@@ -7,6 +7,10 @@
Rust client for the Mistral AI API.
> [!IMPORTANT]
> While we are in v0, minor versions may introduce breaking changes.
> Please, refer to the [CHANGELOG.md](./CHANGELOG.md) for more information.
---
- [Supported APIs](#supported-apis)
@@ -15,20 +19,30 @@ Rust client for the Mistral AI API.
- [As an environment variable](#as-an-environment-variable)
- [As a client argument](#as-a-client-argument)
- [Usage](#usage)
- [Chat without streaming](#chat-without-streaming)
- [Chat with streaming](#chat-with-streaming)
- [Chat](#chat)
- [Chat (async)](#chat-async)
- [Chat with streaming (async)](#chat-with-streaming-async)
- [Chat with Function Calling](#chat-with-function-calling)
- [Chat with Function Calling (async)](#chat-with-function-calling-async)
- [Embeddings](#embeddings)
- [Embeddings (async)](#embeddings-async)
- [List models](#list-models)
- [List models (async)](#list-models-async)
- [Contributing](#contributing)
---
## Supported APIs
- [x] Chat without streaming
- [ ] Chat with streaming
- [ ] Embedding
- [x] Chat without streaming (async)
- [x] Chat with streaming
- [x] Embedding
- [x] Embedding (async)
- [x] List models
- [ ] Function Calling
- [x] List models (async)
- [x] Function Calling
- [x] Function Calling (async)
## Installation
@@ -46,6 +60,18 @@ You can get your Mistral API Key there: <https://docs.mistral.ai/#api-access>.
Just set the `MISTRAL_API_KEY` environment variable.
```rs
use mistralai_client::v1::client::Client;
fn main() {
let client = Client::new(None, None, None, None);
}
```
```sh
MISTRAL_API_KEY=your_api_key cargo run
```
#### As a client argument
```rs
@@ -54,49 +80,334 @@ use mistralai_client::v1::client::Client;
fn main() {
let api_key = "your_api_key";
let client = Client::new(Some(api_key), None, None, None);
let client = Client::new(Some(api_key), None, None, None).unwrap();
}
```
## Usage
### Chat without streaming
### Chat
```rs
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionRequestOptions},
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::OPEN_MISTRAL_7B,
constants::Model,
};
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None);
let client = Client::new(None, None, None, None).unwrap();
let model = OPEN_MISTRAL_7B.to_string();
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
tool_calls: None,
}];
let options = ChatCompletionRequestOptions {
temperature: Some(0.0),
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
let result = client.chat(model, messages, Some(options)).unwrap();
println!("Assistant: {}", result.choices[0].message.content);
// => "Assistant: Tower. [...]"
// => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
}
```
### Chat with streaming
### Chat (async)
_In progress._
```rs
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
let result = client
.chat_async(model, messages, Some(options))
.await
.unwrap();
println!(
"{:?}: {}",
result.choices[0].message.role, result.choices[0].message.content
);
// => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
}
```
### Chat with streaming (async)
```rs
use futures::stream::StreamExt;
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
};
use std::io::{self, Write};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Tell me a short happy story.".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
let stream_result = client
.chat_stream(model, messages, Some(options))
.await
.unwrap();
stream_result
.for_each(|chunk_result| async {
match chunk_result {
Ok(chunks) => chunks.iter().for_each(|chunk| {
print!("{}", chunk.choices[0].delta.content);
io::stdout().flush().unwrap();
// => "Once upon a time, [...]"
}),
Err(error) => {
eprintln!("Error processing chunk: {:?}", error)
}
}
})
.await;
print!("\n") // To persist the last chunk output.
}
```
### Chat with Function Calling
```rs
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
use serde::Deserialize;
use std::any::Any;
#[derive(Debug, Deserialize)]
struct GetCityTemperatureArguments {
city: String,
}
struct GetCityTemperatureFunction;
#[async_trait::async_trait]
impl Function for GetCityTemperatureFunction {
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
// Deserialize arguments, perform the logic, and return the result
let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();
let temperature = match city.as_str() {
"Paris" => "20°C",
_ => "Unknown city",
};
Box::new(temperature.to_string())
}
}
fn main() {
let tools = vec![Tool::new(
"get_city_temperature".to_string(),
"Get the current temperature in a city.".to_string(),
vec![ToolFunctionParameter::new(
"city".to_string(),
"The name of the city.".to_string(),
ToolFunctionParameterType::String,
)],
)];
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let mut client = Client::new(None, None, None, None).unwrap();
client.register_function(
"get_city_temperature".to_string(),
Box::new(GetCityTemperatureFunction),
);
let model = Model::MistralSmallLatest;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "What's the temperature in Paris?".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
tool_choice: Some(ToolChoice::Auto),
tools: Some(tools),
..Default::default()
};
client.chat(model, messages, Some(options)).unwrap();
let temperature = client
.get_last_function_call_result()
.unwrap()
.downcast::<String>()
.unwrap();
println!("The temperature in Paris is: {}.", temperature);
// => "The temperature in Paris is: 20°C."
}
```
### Chat with Function Calling (async)
```rs
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
use serde::Deserialize;
use std::any::Any;
#[derive(Debug, Deserialize)]
struct GetCityTemperatureArguments {
city: String,
}
struct GetCityTemperatureFunction;
#[async_trait::async_trait]
impl Function for GetCityTemperatureFunction {
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
// Deserialize arguments, perform the logic, and return the result
let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();
let temperature = match city.as_str() {
"Paris" => "20°C",
_ => "Unknown city",
};
Box::new(temperature.to_string())
}
}
#[tokio::main]
async fn main() {
let tools = vec![Tool::new(
"get_city_temperature".to_string(),
"Get the current temperature in a city.".to_string(),
vec![ToolFunctionParameter::new(
"city".to_string(),
"The name of the city.".to_string(),
ToolFunctionParameterType::String,
)],
)];
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let mut client = Client::new(None, None, None, None).unwrap();
client.register_function(
"get_city_temperature".to_string(),
Box::new(GetCityTemperatureFunction),
);
let model = Model::MistralSmallLatest;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "What's the temperature in Paris?".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
tool_choice: Some(ToolChoice::Auto),
tools: Some(tools),
..Default::default()
};
client
.chat_async(model, messages, Some(options))
.await
.unwrap();
let temperature = client
.get_last_function_call_result()
.unwrap()
.downcast::<String>()
.unwrap();
println!("The temperature in Paris is: {}.", temperature);
// => "The temperature in Paris is: 20°C."
}
```
### Embeddings
_In progress._
```rs
use mistralai_client::v1::{client::Client, constants::EmbedModel};
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client.embeddings(model, input, options).unwrap();
println!("First Embedding: {:?}", response.data[0]);
// => "First Embedding: {...}"
}
```
### Embeddings (async)
```rs
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client
.embeddings_async(model, input, options)
.await
.unwrap();
println!("First Embedding: {:?}", response.data[0]);
// => "First Embedding: {...}"
}
```
### List models
@@ -105,10 +416,30 @@ use mistralai_client::v1::client::Client;
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None);
let client = Client::new(None, None, None, None).unwrap();
let result = client.list_models(model, messages, Some(options)).unwrap();
let result = client.list_models().unwrap();
println!("First Model ID: {:?}", result.data[0].id);
// => "First Model ID: open-mistral-7b"
}
```
### List models (async)
```rs
use mistralai_client::v1::client::Client;
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let result = client.list_models_async().await.unwrap();
println!("First Model ID: {:?}", result.data[0].id);
// => "First Model ID: open-mistral-7b"
}
```
## Contributing
Please read [CONTRIBUTING.md](./CONTRIBUTING.md) for details on how to contribute to this library.

127
README.template.md Normal file
View File

@@ -0,0 +1,127 @@
# Mistral AI Rust Client
[![Crates.io Package](https://img.shields.io/crates/v/mistralai-client?style=for-the-badge)](https://crates.io/crates/mistralai-client)
[![Docs.rs Documentation](https://img.shields.io/docsrs/mistralai-client/latest?style=for-the-badge)](https://docs.rs/mistralai-client/latest/mistralai-client)
[![Test Workflow Status](https://img.shields.io/github/actions/workflow/status/ivangabriele/mistralai-client-rs/test.yml?label=CI&style=for-the-badge)](https://github.com/ivangabriele/mistralai-client-rs/actions?query=branch%3Amain+workflow%3ATest++)
[![Code Coverage](https://img.shields.io/codecov/c/github/ivangabriele/mistralai-client-rs/main?label=Cov&style=for-the-badge)](https://app.codecov.io/github/ivangabriele/mistralai-client-rs)
Rust client for the Mistral AI API.
> [!IMPORTANT]
> While we are in v0, minor versions may introduce breaking changes.
> Please, refer to the [CHANGELOG.md](./CHANGELOG.md) for more information.
---
- [Supported APIs](#supported-apis)
- [Installation](#installation)
- [Mistral API Key](#mistral-api-key)
- [As an environment variable](#as-an-environment-variable)
- [As a client argument](#as-a-client-argument)
- [Usage](#usage)
- [Chat](#chat)
- [Chat (async)](#chat-async)
- [Chat with streaming (async)](#chat-with-streaming-async)
- [Chat with Function Calling](#chat-with-function-calling)
- [Chat with Function Calling (async)](#chat-with-function-calling-async)
- [Embeddings](#embeddings)
- [Embeddings (async)](#embeddings-async)
- [List models](#list-models)
- [List models (async)](#list-models-async)
- [Contributing](#contributing)
---
## Supported APIs
- [x] Chat without streaming
- [x] Chat without streaming (async)
- [x] Chat with streaming
- [x] Embedding
- [x] Embedding (async)
- [x] List models
- [x] List models (async)
- [x] Function Calling
- [x] Function Calling (async)
## Installation
You can install the library in your project using:
```sh
cargo add mistralai-client
```
### Mistral API Key
You can get your Mistral API Key there: <https://docs.mistral.ai/#api-access>.
#### As an environment variable
Just set the `MISTRAL_API_KEY` environment variable.
```rs
use mistralai_client::v1::client::Client;
fn main() {
let client = Client::new(None, None, None, None);
}
```
```sh
MISTRAL_API_KEY=your_api_key cargo run
```
#### As a client argument
```rs
use mistralai_client::v1::client::Client;
fn main() {
let api_key = "your_api_key";
let client = Client::new(Some(api_key), None, None, None).unwrap();
}
```
## Usage
### Chat
<CODE>examples/chat.rs</CODE>
### Chat (async)
<CODE>examples/chat_async.rs</CODE>
### Chat with streaming (async)
<CODE>examples/chat_with_streaming.rs</CODE>
### Chat with Function Calling
<CODE>examples/chat_with_function_calling.rs</CODE>
### Chat with Function Calling (async)
<CODE>examples/chat_with_function_calling_async.rs</CODE>
### Embeddings
<CODE>examples/embeddings.rs</CODE>
### Embeddings (async)
<CODE>examples/embeddings_async.rs</CODE>
### List models
<CODE>examples/list_models.rs</CODE>
### List models (async)
<CODE>examples/list_models_async.rs</CODE>
## Contributing
Please read [CONTRIBUTING.md](./CONTRIBUTING.md) for details on how to contribute to this library.

26
examples/chat.rs Normal file
View File

@@ -0,0 +1,26 @@
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
};
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
let result = client.chat(model, messages, Some(options)).unwrap();
println!("Assistant: {}", result.choices[0].message.content);
// => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
}

33
examples/chat_async.rs Normal file
View File

@@ -0,0 +1,33 @@
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
let result = client
.chat_async(model, messages, Some(options))
.await
.unwrap();
println!(
"{:?}: {}",
result.choices[0].message.role, result.choices[0].message.content
);
// => "Assistant: Tower. The Eiffel Tower is a famous landmark in Paris, France."
}

View File

@@ -0,0 +1,71 @@
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
use serde::Deserialize;
use std::any::Any;
#[derive(Debug, Deserialize)]
struct GetCityTemperatureArguments {
city: String,
}
struct GetCityTemperatureFunction;
#[async_trait::async_trait]
impl Function for GetCityTemperatureFunction {
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
// Deserialize arguments, perform the logic, and return the result
let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();
let temperature = match city.as_str() {
"Paris" => "20°C",
_ => "Unknown city",
};
Box::new(temperature.to_string())
}
}
fn main() {
let tools = vec![Tool::new(
"get_city_temperature".to_string(),
"Get the current temperature in a city.".to_string(),
vec![ToolFunctionParameter::new(
"city".to_string(),
"The name of the city.".to_string(),
ToolFunctionParameterType::String,
)],
)];
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let mut client = Client::new(None, None, None, None).unwrap();
client.register_function(
"get_city_temperature".to_string(),
Box::new(GetCityTemperatureFunction),
);
let model = Model::MistralSmallLatest;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "What's the temperature in Paris?".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
tool_choice: Some(ToolChoice::Auto),
tools: Some(tools),
..Default::default()
};
client.chat(model, messages, Some(options)).unwrap();
let temperature = client
.get_last_function_call_result()
.unwrap()
.downcast::<String>()
.unwrap();
println!("The temperature in Paris is: {}.", temperature);
// => "The temperature in Paris is: 20°C."
}

View File

@@ -0,0 +1,75 @@
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
tool::{Function, Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
use serde::Deserialize;
use std::any::Any;
#[derive(Debug, Deserialize)]
struct GetCityTemperatureArguments {
city: String,
}
struct GetCityTemperatureFunction;
#[async_trait::async_trait]
impl Function for GetCityTemperatureFunction {
async fn execute(&self, arguments: String) -> Box<dyn Any + Send> {
// Deserialize arguments, perform the logic, and return the result
let GetCityTemperatureArguments { city } = serde_json::from_str(&arguments).unwrap();
let temperature = match city.as_str() {
"Paris" => "20°C",
_ => "Unknown city",
};
Box::new(temperature.to_string())
}
}
#[tokio::main]
async fn main() {
let tools = vec![Tool::new(
"get_city_temperature".to_string(),
"Get the current temperature in a city.".to_string(),
vec![ToolFunctionParameter::new(
"city".to_string(),
"The name of the city.".to_string(),
ToolFunctionParameterType::String,
)],
)];
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let mut client = Client::new(None, None, None, None).unwrap();
client.register_function(
"get_city_temperature".to_string(),
Box::new(GetCityTemperatureFunction),
);
let model = Model::MistralSmallLatest;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "What's the temperature in Paris?".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
tool_choice: Some(ToolChoice::Auto),
tools: Some(tools),
..Default::default()
};
client
.chat_async(model, messages, Some(options))
.await
.unwrap();
let temperature = client
.get_last_function_call_result()
.unwrap()
.downcast::<String>()
.unwrap();
println!("The temperature in Paris is: {}.", temperature);
// => "The temperature in Paris is: 20°C."
}

View File

@@ -0,0 +1,45 @@
use futures::stream::StreamExt;
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams},
client::Client,
constants::Model,
};
use std::io::{self, Write};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage {
role: ChatMessageRole::User,
content: "Tell me a short happy story.".to_string(),
tool_calls: None,
}];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
let stream_result = client
.chat_stream(model, messages, Some(options))
.await
.unwrap();
stream_result
.for_each(|chunk_result| async {
match chunk_result {
Ok(chunks) => chunks.iter().for_each(|chunk| {
print!("{}", chunk.choices[0].delta.content);
io::stdout().flush().unwrap();
// => "Once upon a time, [...]"
}),
Err(error) => {
eprintln!("Error processing chunk: {:?}", error)
}
}
})
.await;
print!("\n") // To persist the last chunk output.
}

17
examples/embeddings.rs Normal file
View File

@@ -0,0 +1,17 @@
use mistralai_client::v1::{client::Client, constants::EmbedModel};
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client.embeddings(model, input, options).unwrap();
println!("First Embedding: {:?}", response.data[0]);
// => "First Embedding: {...}"
}

View File

@@ -0,0 +1,21 @@
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client
.embeddings_async(model, input, options)
.await
.unwrap();
println!("First Embedding: {:?}", response.data[0]);
// => "First Embedding: {...}"
}

10
examples/list_models.rs Normal file
View File

@@ -0,0 +1,10 @@
use mistralai_client::v1::client::Client;
fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let result = client.list_models().unwrap();
println!("First Model ID: {:?}", result.data[0].id);
// => "First Model ID: open-mistral-7b"
}

View File

@@ -0,0 +1,11 @@
use mistralai_client::v1::client::Client;
#[tokio::main]
async fn main() {
// This example suppose you have set the `MISTRAL_API_KEY` environment variable.
let client = Client::new(None, None, None, None).unwrap();
let result = client.list_models_async().await.unwrap();
println!("First Model ID: {:?}", result.data[0].id);
// => "First Model ID: open-mistral-7b"
}

View File

@@ -1 +1,4 @@
# https://github.com/crate-ci/cargo-release/blob/master/docs/reference.md
allow-branch = ["main"]
pre-release-commit-message = "ci(release): v{{version}}"
pre-release-replacements = [{ file = "CHANGELOG.md", search = "## \\[\\]", replace = "## [{{version}}]" }]

View File

@@ -1 +1,4 @@
//! This crate provides a easy bindings and types for MistralAI's API.
/// The v1 module contains the types and methods for the v1 API endpoints.
pub mod v1;

216
src/v1/chat.rs Normal file
View File

@@ -0,0 +1,216 @@
use serde::{Deserialize, Serialize};
use crate::v1::{common, constants, tool};
// -----------------------------------------------------------------------------
// Definitions
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatMessage {
pub role: ChatMessageRole,
pub content: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_calls: Option<Vec<tool::ToolCall>>,
}
impl ChatMessage {
pub fn new_assistant_message(content: &str, tool_calls: Option<Vec<tool::ToolCall>>) -> Self {
Self {
role: ChatMessageRole::Assistant,
content: content.to_string(),
tool_calls,
}
}
pub fn new_user_message(content: &str) -> Self {
Self {
role: ChatMessageRole::User,
content: content.to_string(),
tool_calls: None,
}
}
}
/// See the [Mistral AI API documentation](https://docs.mistral.ai/capabilities/completion/#chat-messages) for more information.
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum ChatMessageRole {
#[serde(rename = "system")]
System,
#[serde(rename = "assistant")]
Assistant,
#[serde(rename = "user")]
User,
#[serde(rename = "tool")]
Tool,
}
/// The format that the model must output.
///
/// See the [API documentation](https://docs.mistral.ai/api/#operation/createChatCompletion) for more information.
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ResponseFormat {
#[serde(rename = "type")]
pub type_: String,
}
impl ResponseFormat {
pub fn json_object() -> Self {
Self {
type_: "json_object".to_string(),
}
}
}
// -----------------------------------------------------------------------------
// Request
/// The parameters for the chat request.
///
/// See the [API documentation](https://docs.mistral.ai/api/#operation/createChatCompletion) for more information.
#[derive(Clone, Debug)]
pub struct ChatParams {
/// The maximum number of tokens to generate in the completion.
///
/// Defaults to `None`.
pub max_tokens: Option<u32>,
/// The seed to use for random sampling. If set, different calls will generate deterministic results.
///
/// Defaults to `None`.
pub random_seed: Option<u32>,
/// The format that the model must output.
///
/// Defaults to `None`.
pub response_format: Option<ResponseFormat>,
/// Whether to inject a safety prompt before all conversations.
///
/// Defaults to `false`.
pub safe_prompt: bool,
/// What sampling temperature to use, between `Some(0.0)` and `Some(1.0)`.
///
/// Defaults to `0.7`.
pub temperature: f32,
/// Specifies if/how functions are called.
///
/// Defaults to `None`.
pub tool_choice: Option<tool::ToolChoice>,
/// A list of available tools for the model.
///
/// Defaults to `None`.
pub tools: Option<Vec<tool::Tool>>,
/// Nucleus sampling, where the model considers the results of the tokens with `top_p` probability mass.
///
/// Defaults to `1.0`.
pub top_p: f32,
}
impl Default for ChatParams {
fn default() -> Self {
Self {
max_tokens: None,
random_seed: None,
safe_prompt: false,
response_format: None,
temperature: 0.7,
tool_choice: None,
tools: None,
top_p: 1.0,
}
}
}
impl ChatParams {
pub fn json_default() -> Self {
Self {
max_tokens: None,
random_seed: None,
safe_prompt: false,
response_format: None,
temperature: 0.7,
tool_choice: None,
tools: None,
top_p: 1.0,
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ChatRequest {
pub messages: Vec<ChatMessage>,
pub model: constants::Model,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub random_seed: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub response_format: Option<ResponseFormat>,
pub safe_prompt: bool,
pub stream: bool,
pub temperature: f32,
#[serde(skip_serializing_if = "Option::is_none")]
pub tool_choice: Option<tool::ToolChoice>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<Vec<tool::Tool>>,
pub top_p: f32,
}
impl ChatRequest {
pub fn new(
model: constants::Model,
messages: Vec<ChatMessage>,
stream: bool,
options: Option<ChatParams>,
) -> Self {
let ChatParams {
max_tokens,
random_seed,
safe_prompt,
temperature,
tool_choice,
tools,
top_p,
response_format,
} = options.unwrap_or_default();
Self {
messages,
model,
max_tokens,
random_seed,
safe_prompt,
stream,
temperature,
tool_choice,
tools,
top_p,
response_format,
}
}
}
// -----------------------------------------------------------------------------
// Response
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatResponse {
pub id: String,
pub object: String,
/// Unix timestamp (in seconds).
pub created: u32,
pub model: constants::Model,
pub choices: Vec<ChatResponseChoice>,
pub usage: common::ResponseUsage,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatResponseChoice {
pub index: u32,
pub message: ChatMessage,
pub finish_reason: ChatResponseChoiceFinishReason,
// TODO Check this prop (seen in API responses but undocumented).
// pub logprobs: ???
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum ChatResponseChoiceFinishReason {
#[serde(rename = "stop")]
Stop,
#[serde(rename = "tool_calls")]
ToolCalls,
}

View File

@@ -1,113 +0,0 @@
use serde::{Deserialize, Serialize};
use crate::v1::common;
#[derive(Debug)]
pub struct ChatCompletionParams {
pub tools: Option<String>,
pub temperature: Option<f32>,
pub max_tokens: Option<u32>,
pub top_p: Option<f32>,
pub random_seed: Option<u32>,
pub stream: Option<bool>,
pub safe_prompt: Option<bool>,
}
impl Default for ChatCompletionParams {
fn default() -> Self {
Self {
tools: None,
temperature: None,
max_tokens: None,
top_p: None,
random_seed: None,
stream: None,
safe_prompt: None,
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct ChatCompletionRequest {
pub messages: Vec<ChatCompletionMessage>,
pub model: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub tools: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub temperature: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_tokens: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub top_p: Option<f32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub random_seed: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stream: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub safe_prompt: Option<bool>,
// TODO Check this prop (seen in official Python client but not in API doc).
// pub tool_choice: Option<String>,
// TODO Check this prop (seen in official Python client but not in API doc).
// pub response_format: Option<String>,
}
impl ChatCompletionRequest {
pub fn new(
model: String,
messages: Vec<ChatCompletionMessage>,
options: Option<ChatCompletionParams>,
) -> Self {
let ChatCompletionParams {
tools,
temperature,
max_tokens,
top_p,
random_seed,
stream,
safe_prompt,
} = options.unwrap_or_default();
Self {
messages,
model,
tools,
temperature,
max_tokens,
top_p,
random_seed,
stream,
safe_prompt,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatCompletionResponse {
pub id: String,
pub object: String,
/// Unix timestamp (in seconds).
pub created: u32,
pub model: String,
pub choices: Vec<ChatCompletionChoice>,
pub usage: common::ResponseUsage,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatCompletionChoice {
pub index: u32,
pub message: ChatCompletionMessage,
pub finish_reason: String,
// TODO Check this prop (seen in API responses but undocumented).
// pub logprobs: ???
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatCompletionMessage {
pub role: ChatCompletionMessageRole,
pub content: String,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
#[allow(non_camel_case_types)]
pub enum ChatCompletionMessageRole {
assistant,
user,
}

57
src/v1/chat_stream.rs Normal file
View File

@@ -0,0 +1,57 @@
use serde::{Deserialize, Serialize};
use serde_json::from_str;
use crate::v1::{chat, common, constants, error};
// -----------------------------------------------------------------------------
// Response
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatStreamChunk {
pub id: String,
pub object: String,
/// Unix timestamp (in seconds).
pub created: u32,
pub model: constants::Model,
pub choices: Vec<ChatStreamChunkChoice>,
pub usage: Option<common::ResponseUsage>,
// TODO Check this prop (seen in API responses but undocumented).
// pub logprobs: ???,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatStreamChunkChoice {
pub index: u32,
pub delta: ChatStreamChunkChoiceDelta,
pub finish_reason: Option<String>,
// TODO Check this prop (seen in API responses but undocumented).
// pub logprobs: ???,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ChatStreamChunkChoiceDelta {
pub role: Option<chat::ChatMessageRole>,
pub content: String,
}
/// Extracts serialized chunks from a stream message.
pub fn get_chunk_from_stream_message_line(
line: &str,
) -> Result<Option<Vec<ChatStreamChunk>>, error::ApiError> {
if line.trim() == "data: [DONE]" {
return Ok(None);
}
let chunk_as_json = line.trim_start_matches("data: ").trim();
if chunk_as_json.is_empty() {
return Ok(Some(vec![]));
}
// Attempt to deserialize the JSON string into ChatStreamChunk
match from_str::<ChatStreamChunk>(chunk_as_json) {
Ok(chunk) => Ok(Some(vec![chunk])),
Err(e) => Err(error::ApiError {
message: e.to_string(),
}),
}
}

View File

@@ -1,161 +1,649 @@
use crate::v1::error::APIError;
use minreq::Response;
use crate::v1::{
chat_completion::{
ChatCompletionMessage, ChatCompletionParams, ChatCompletionRequest, ChatCompletionResponse,
},
constants::API_URL_BASE,
list_models::ListModelsResponse,
use futures::stream::StreamExt;
use futures::Stream;
use log::debug;
use reqwest::Error as ReqwestError;
use std::{
any::Any,
collections::HashMap,
sync::{Arc, Mutex},
};
use crate::v1::{chat, chat_stream, constants, embedding, error, model_list, tool, utils};
pub struct Client {
pub api_key: String,
pub endpoint: String,
pub max_retries: u32,
pub timeout: u32,
functions: Arc<Mutex<HashMap<String, Box<dyn tool::Function>>>>,
last_function_call_result: Arc<Mutex<Option<Box<dyn Any + Send>>>>,
}
impl Client {
/// Constructs a new `Client`.
///
/// # Arguments
///
/// * `api_key` - An optional API key.
/// If not provided, the method will try to use the `MISTRAL_API_KEY` environment variable.
/// * `endpoint` - An optional custom API endpoint. Defaults to the official API endpoint if not provided.
/// * `max_retries` - Optional maximum number of retries for failed requests. Defaults to `5`.
/// * `timeout` - Optional timeout in seconds for requests. Defaults to `120`.
///
/// # Examples
///
/// ```
/// use mistralai_client::v1::client::Client;
///
/// let client = Client::new(Some("your_api_key_here".to_string()), None, Some(3), Some(60));
/// assert!(client.is_ok());
/// ```
///
/// # Errors
///
/// This method fails whenever neither the `api_key` is provided
/// nor the `MISTRAL_API_KEY` environment variable is set.
pub fn new(
api_key: Option<String>,
endpoint: Option<String>,
max_retries: Option<u32>,
timeout: Option<u32>,
) -> Self {
let api_key = api_key.unwrap_or(std::env::var("MISTRAL_API_KEY").unwrap());
let endpoint = endpoint.unwrap_or(API_URL_BASE.to_string());
) -> Result<Self, error::ClientError> {
let api_key = match api_key {
Some(api_key_from_param) => api_key_from_param,
None => {
std::env::var("MISTRAL_API_KEY").map_err(|_| error::ClientError::MissingApiKey)?
}
};
let endpoint = endpoint.unwrap_or(constants::API_URL_BASE.to_string());
let max_retries = max_retries.unwrap_or(5);
let timeout = timeout.unwrap_or(120);
Self {
let functions: Arc<_> = Arc::new(Mutex::new(HashMap::new()));
let last_function_call_result = Arc::new(Mutex::new(None));
Ok(Self {
api_key,
endpoint,
max_retries,
timeout,
functions,
last_function_call_result,
})
}
/// Synchronously sends a chat completion request and returns the response.
///
/// # Arguments
///
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing the `ChatResponse` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
/// ```
/// use mistralai_client::v1::{
/// chat::{ChatMessage, ChatMessageRole},
/// client::Client,
/// constants::Model,
/// };
///
/// let client = Client::new(None, None, None, None).unwrap();
/// let messages = vec![ChatMessage {
/// role: ChatMessageRole::User,
/// content: "Hello, world!".to_string(),
/// tool_calls: None,
/// }];
/// let response = client.chat(Model::OpenMistral7b, messages, None).unwrap();
/// println!("{:?}: {}", response.choices[0].message.role, response.choices[0].message.content);
/// ```
pub fn chat(
&self,
model: constants::Model,
messages: Vec<chat::ChatMessage>,
options: Option<chat::ChatParams>,
) -> Result<chat::ChatResponse, error::ApiError> {
let request = chat::ChatRequest::new(model, messages, false, options);
let response = self.post_sync("/chat/completions", &request)?;
let result = response.json::<chat::ChatResponse>();
match result {
Ok(data) => {
utils::debug_pretty_json_from_struct("Response Data", &data);
self.call_function_if_any(data.clone());
Ok(data)
}
Err(error) => Err(self.to_api_error(error)),
}
}
pub fn build_request(&self, request: minreq::Request) -> minreq::Request {
let authorization = format!("Bearer {}", self.api_key);
/// Asynchronously sends a chat completion request and returns the response.
///
/// # Arguments
///
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing a `Stream` of `ChatStreamChunk` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
/// ```
/// use mistralai_client::v1::{
/// chat::{ChatMessage, ChatMessageRole},
/// client::Client,
/// constants::Model,
/// };
///
/// #[tokio::main]
/// async fn main() {
/// let client = Client::new(None, None, None, None).unwrap();
/// let messages = vec![ChatMessage {
/// role: ChatMessageRole::User,
/// content: "Hello, world!".to_string(),
/// tool_calls: None,
/// }];
/// let response = client.chat_async(Model::OpenMistral7b, messages, None).await.unwrap();
/// println!("{:?}: {}", response.choices[0].message.role, response.choices[0].message.content);
/// }
/// ```
pub async fn chat_async(
&self,
model: constants::Model,
messages: Vec<chat::ChatMessage>,
options: Option<chat::ChatParams>,
) -> Result<chat::ChatResponse, error::ApiError> {
let request = chat::ChatRequest::new(model, messages, false, options);
let response = self.post_async("/chat/completions", &request).await?;
let result = response.json::<chat::ChatResponse>().await;
match result {
Ok(data) => {
utils::debug_pretty_json_from_struct("Response Data", &data);
self.call_function_if_any_async(data.clone()).await;
Ok(data)
}
Err(error) => Err(self.to_api_error(error)),
}
}
/// Asynchronously sends a chat completion request and returns a stream of message chunks.
///
/// # Arguments
///
/// * `model` - The [Model] to use for the chat completion.
/// * `messages` - A vector of [ChatMessage] to send as part of the chat.
/// * `options` - Optional [ChatParams] to customize the request.
///
/// # Returns
///
/// Returns a [Result] containing a `Stream` of `ChatStreamChunk` if the request is successful,
/// or an [ApiError] if there is an error.
///
/// # Examples
///
/// ```
/// use futures::stream::StreamExt;
/// use mistralai_client::v1::{
/// chat::{ChatMessage, ChatMessageRole},
/// client::Client,
/// constants::Model,
/// };
/// use std::io::{self, Write};
///
/// #[tokio::main]
/// async fn main() {
/// let client = Client::new(None, None, None, None).unwrap();
/// let messages = vec![ChatMessage {
/// role: ChatMessageRole::User,
/// content: "Hello, world!".to_string(),
/// tool_calls: None,
/// }];
///
/// let stream_result = client
/// .chat_stream(Model::OpenMistral7b,messages, None)
/// .await
/// .unwrap();
/// stream_result
/// .for_each(|chunk_result| async {
/// match chunk_result {
/// Ok(chunks) => chunks.iter().for_each(|chunk| {
/// print!("{}", chunk.choices[0].delta.content);
/// io::stdout().flush().unwrap();
/// // => "Once upon a time, [...]"
/// }),
/// Err(error) => {
/// eprintln!("Error processing chunk: {:?}", error)
/// }
/// }
/// })
/// .await;
/// print!("\n") // To persist the last chunk output.
/// }
pub async fn chat_stream(
&self,
model: constants::Model,
messages: Vec<chat::ChatMessage>,
options: Option<chat::ChatParams>,
) -> Result<
impl Stream<Item = Result<Vec<chat_stream::ChatStreamChunk>, error::ApiError>>,
error::ApiError,
> {
let request = chat::ChatRequest::new(model, messages, true, options);
let response = self
.post_stream("/chat/completions", &request)
.await
.map_err(|e| error::ApiError {
message: e.to_string(),
})?;
if !response.status().is_success() {
let status = response.status();
let text = response.text().await.unwrap_or_default();
return Err(error::ApiError {
message: format!("{}: {}", status, text),
});
}
let deserialized_stream = response.bytes_stream().then(|bytes_result| async move {
match bytes_result {
Ok(bytes) => match String::from_utf8(bytes.to_vec()) {
Ok(message) => {
let chunks = message
.lines()
.filter_map(
|line| match chat_stream::get_chunk_from_stream_message_line(line) {
Ok(Some(chunks)) => Some(chunks),
Ok(None) => None,
Err(_error) => None,
},
)
.flatten()
.collect();
Ok(chunks)
}
Err(e) => Err(error::ApiError {
message: e.to_string(),
}),
},
Err(e) => Err(error::ApiError {
message: e.to_string(),
}),
}
});
Ok(deserialized_stream)
}
pub fn embeddings(
&self,
model: constants::EmbedModel,
input: Vec<String>,
options: Option<embedding::EmbeddingRequestOptions>,
) -> Result<embedding::EmbeddingResponse, error::ApiError> {
let request = embedding::EmbeddingRequest::new(model, input, options);
let response = self.post_sync("/embeddings", &request)?;
let result = response.json::<embedding::EmbeddingResponse>();
match result {
Ok(data) => {
utils::debug_pretty_json_from_struct("Response Data", &data);
Ok(data)
}
Err(error) => Err(self.to_api_error(error)),
}
}
pub async fn embeddings_async(
&self,
model: constants::EmbedModel,
input: Vec<String>,
options: Option<embedding::EmbeddingRequestOptions>,
) -> Result<embedding::EmbeddingResponse, error::ApiError> {
let request = embedding::EmbeddingRequest::new(model, input, options);
let response = self.post_async("/embeddings", &request).await?;
let result = response.json::<embedding::EmbeddingResponse>().await;
match result {
Ok(data) => {
utils::debug_pretty_json_from_struct("Response Data", &data);
Ok(data)
}
Err(error) => Err(self.to_api_error(error)),
}
}
pub fn get_last_function_call_result(&self) -> Option<Box<dyn Any + Send>> {
let mut result_lock = self.last_function_call_result.lock().unwrap();
result_lock.take()
}
pub fn list_models(&self) -> Result<model_list::ModelListResponse, error::ApiError> {
let response = self.get_sync("/models")?;
let result = response.json::<model_list::ModelListResponse>();
match result {
Ok(data) => {
utils::debug_pretty_json_from_struct("Response Data", &data);
Ok(data)
}
Err(error) => Err(self.to_api_error(error)),
}
}
pub async fn list_models_async(
&self,
) -> Result<model_list::ModelListResponse, error::ApiError> {
let response = self.get_async("/models").await?;
let result = response.json::<model_list::ModelListResponse>().await;
match result {
Ok(data) => {
utils::debug_pretty_json_from_struct("Response Data", &data);
Ok(data)
}
Err(error) => Err(self.to_api_error(error)),
}
}
pub fn register_function(&mut self, name: String, function: Box<dyn tool::Function>) {
let mut functions = self.functions.lock().unwrap();
functions.insert(name, function);
}
fn build_request_sync(
&self,
request: reqwest::blocking::RequestBuilder,
) -> reqwest::blocking::RequestBuilder {
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request = request
.with_header("Authorization", authorization)
.with_header("Accept", "application/json")
.with_header("Content-Type", "application/json")
.with_header("User-Agent", user_agent);
let request_builder = request
.bearer_auth(&self.api_key)
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.header("User-Agent", user_agent);
request
request_builder
}
pub fn get(&self, path: &str) -> Result<Response, APIError> {
fn build_request_async(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request_builder = request
.bearer_auth(&self.api_key)
.header("Accept", "application/json")
.header("Content-Type", "application/json")
.header("User-Agent", user_agent);
request_builder
}
fn build_request_stream(&self, request: reqwest::RequestBuilder) -> reqwest::RequestBuilder {
let user_agent = format!(
"ivangabriele/mistralai-client-rs/{}",
env!("CARGO_PKG_VERSION")
);
let request_builder = request
.bearer_auth(&self.api_key)
.header("Accept", "text/event-stream")
.header("Content-Type", "application/json")
.header("User-Agent", user_agent);
request_builder
}
fn call_function_if_any(&self, response: chat::ChatResponse) -> () {
let next_result = match response.choices.get(0) {
Some(first_choice) => match first_choice.message.tool_calls.to_owned() {
Some(tool_calls) => match tool_calls.get(0) {
Some(first_tool_call) => {
let functions = self.functions.lock().unwrap();
match functions.get(&first_tool_call.function.name) {
Some(function) => {
let runtime = tokio::runtime::Runtime::new().unwrap();
let result = runtime.block_on(async {
function
.execute(first_tool_call.function.arguments.to_owned())
.await
});
Some(result)
}
None => None,
}
}
None => None,
},
None => None,
},
None => None,
};
let mut last_result_lock = self.last_function_call_result.lock().unwrap();
*last_result_lock = next_result;
}
async fn call_function_if_any_async(&self, response: chat::ChatResponse) -> () {
let next_result = match response.choices.get(0) {
Some(first_choice) => match first_choice.message.tool_calls.to_owned() {
Some(tool_calls) => match tool_calls.get(0) {
Some(first_tool_call) => {
let functions = self.functions.lock().unwrap();
match functions.get(&first_tool_call.function.name) {
Some(function) => {
let result = function
.execute(first_tool_call.function.arguments.to_owned())
.await;
Some(result)
}
None => None,
}
}
None => None,
},
None => None,
},
None => None,
};
let mut last_result_lock = self.last_function_call_result.lock().unwrap();
*last_result_lock = next_result;
}
fn get_sync(&self, path: &str) -> Result<reqwest::blocking::Response, error::ApiError> {
let reqwest_client = reqwest::blocking::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::get(url));
debug!("Request URL: {}", url);
let request = self.build_request_sync(reqwest_client.get(url));
let result = request.send();
match result {
Ok(response) => {
if (200..=299).contains(&response.status_code) {
if response.status().is_success() {
Ok(response)
} else {
Err(APIError {
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
let response_status = response.status();
let response_body = response.text().unwrap_or_default();
debug!("Response Status: {}", &response_status);
utils::debug_pretty_json_from_string("Response Data", &response_body);
Err(error::ApiError {
message: format!("{}: {}", response_status, response_body),
})
}
}
Err(error) => Err(self.new_error(error)),
Err(error) => Err(error::ApiError {
message: error.to_string(),
}),
}
}
pub fn post<T: serde::ser::Serialize + std::fmt::Debug>(
async fn get_async(&self, path: &str) -> Result<reqwest::Response, error::ApiError> {
let reqwest_client = reqwest::Client::new();
let url = format!("{}{}", self.endpoint, path);
debug!("Request URL: {}", url);
let request_builder = reqwest_client.get(url);
let request = self.build_request_async(request_builder);
let result = request.send().await;
match result {
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let response_status = response.status();
let response_body = response.text().await.unwrap_or_default();
debug!("Response Status: {}", &response_status);
utils::debug_pretty_json_from_string("Response Data", &response_body);
Err(error::ApiError {
message: format!("{}: {}", response_status, response_body),
})
}
}
Err(error) => Err(error::ApiError {
message: error.to_string(),
}),
}
}
fn post_sync<T: std::fmt::Debug + serde::ser::Serialize>(
&self,
path: &str,
params: &T,
) -> Result<Response, APIError> {
// print!("{:?}", params);
) -> Result<reqwest::blocking::Response, error::ApiError> {
let reqwest_client = reqwest::blocking::Client::new();
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::post(url));
debug!("Request URL: {}", url);
utils::debug_pretty_json_from_struct("Request Body", params);
let result = request.with_json(params).unwrap().send();
match result {
Ok(response) => {
// print!("{:?}", response.as_str().unwrap());
if (200..=299).contains(&response.status_code) {
Ok(response)
} else {
Err(APIError {
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
})
}
}
Err(error) => Err(self.new_error(error)),
}
}
pub fn delete(&self, path: &str) -> Result<Response, APIError> {
let url = format!("{}{}", self.endpoint, path);
let request = self.build_request(minreq::post(url));
let request_builder = reqwest_client.post(url).json(params);
let request = self.build_request_sync(request_builder);
let result = request.send();
match result {
Ok(response) => {
if (200..=299).contains(&response.status_code) {
if response.status().is_success() {
Ok(response)
} else {
Err(APIError {
message: format!(
"{}: {}",
response.status_code,
response.as_str().unwrap()
),
let response_status = response.status();
let response_body = response.text().unwrap_or_default();
debug!("Response Status: {}", &response_status);
utils::debug_pretty_json_from_string("Response Data", &response_body);
Err(error::ApiError {
message: format!("{}: {}", response_body, response_status),
})
}
}
Err(error) => Err(self.new_error(error)),
Err(error) => Err(error::ApiError {
message: error.to_string(),
}),
}
}
pub fn chat(
async fn post_async<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
model: String,
messages: Vec<ChatCompletionMessage>,
options: Option<ChatCompletionParams>,
) -> Result<ChatCompletionResponse, APIError> {
let request = ChatCompletionRequest::new(model, messages, options);
path: &str,
params: &T,
) -> Result<reqwest::Response, error::ApiError> {
let reqwest_client = reqwest::Client::new();
let url = format!("{}{}", self.endpoint, path);
debug!("Request URL: {}", url);
utils::debug_pretty_json_from_struct("Request Body", params);
let response = self.post("/chat/completions", &request)?;
let result = response.json::<ChatCompletionResponse>();
let request_builder = reqwest_client.post(url).json(params);
let request = self.build_request_async(request_builder);
let result = request.send().await;
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.new_error(error)),
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let response_status = response.status();
let response_body = response.text().await.unwrap_or_default();
debug!("Response Status: {}", &response_status);
utils::debug_pretty_json_from_string("Response Data", &response_body);
Err(error::ApiError {
message: format!("{}: {}", response_status, response_body),
})
}
}
Err(error) => Err(error::ApiError {
message: error.to_string(),
}),
}
}
pub fn list_models(&self) -> Result<ListModelsResponse, APIError> {
let response = self.get("/models")?;
let result = response.json::<ListModelsResponse>();
async fn post_stream<T: serde::ser::Serialize + std::fmt::Debug>(
&self,
path: &str,
params: &T,
) -> Result<reqwest::Response, error::ApiError> {
let reqwest_client = reqwest::Client::new();
let url = format!("{}{}", self.endpoint, path);
debug!("Request URL: {}", url);
utils::debug_pretty_json_from_struct("Request Body", params);
let request_builder = reqwest_client.post(url).json(params);
let request = self.build_request_stream(request_builder);
let result = request.send().await;
match result {
Ok(response) => Ok(response),
Err(error) => Err(self.new_error(error)),
Ok(response) => {
if response.status().is_success() {
Ok(response)
} else {
let response_status = response.status();
let response_body = response.text().await.unwrap_or_default();
debug!("Response Status: {}", &response_status);
utils::debug_pretty_json_from_string("Response Data", &response_body);
Err(error::ApiError {
message: format!("{}: {}", response_status, response_body),
})
}
}
Err(error) => Err(error::ApiError {
message: error.to_string(),
}),
}
}
fn new_error(&self, err: minreq::Error) -> APIError {
APIError {
fn to_api_error(&self, err: ReqwestError) -> error::ApiError {
error::ApiError {
message: err.to_string(),
}
}

View File

@@ -1,7 +1,29 @@
use serde::{Deserialize, Serialize};
pub const API_URL_BASE: &str = "https://api.mistral.ai/v1";
pub const OPEN_MISTRAL_7B: &str = "open-mistral-7b";
pub const OPEN_MISTRAL_8X7B: &str = "open-mixtral-8x7b";
pub const MISTRAL_SMALL_LATEST: &str = "mistral-small-latest";
pub const MISTRAL_MEDIUM_LATEST: &str = "mistral-medium-latest";
pub const MISTRAL_LARGE_LATEST: &str = "mistral-large-latest";
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum Model {
#[serde(rename = "open-mistral-7b")]
OpenMistral7b,
#[serde(rename = "open-mixtral-8x7b")]
OpenMixtral8x7b,
#[serde(rename = "open-mixtral-8x22b")]
OpenMixtral8x22b,
#[serde(rename = "mistral-tiny")]
MistralTiny,
#[serde(rename = "mistral-small-latest")]
MistralSmallLatest,
#[serde(rename = "mistral-medium-latest")]
MistralMediumLatest,
#[serde(rename = "mistral-large-latest")]
MistralLargeLatest,
#[serde(rename = "codestral-latest")]
CodestralLatest,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum EmbedModel {
#[serde(rename = "mistral-embed")]
MistralEmbed,
}

66
src/v1/embedding.rs Normal file
View File

@@ -0,0 +1,66 @@
use serde::{Deserialize, Serialize};
use crate::v1::{common, constants};
// -----------------------------------------------------------------------------
// Request
#[derive(Debug)]
pub struct EmbeddingRequestOptions {
pub encoding_format: Option<EmbeddingRequestEncodingFormat>,
}
impl Default for EmbeddingRequestOptions {
fn default() -> Self {
Self {
encoding_format: None,
}
}
}
#[derive(Debug, Serialize, Deserialize)]
pub struct EmbeddingRequest {
pub model: constants::EmbedModel,
pub input: Vec<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub encoding_format: Option<EmbeddingRequestEncodingFormat>,
}
impl EmbeddingRequest {
pub fn new(
model: constants::EmbedModel,
input: Vec<String>,
options: Option<EmbeddingRequestOptions>,
) -> Self {
let EmbeddingRequestOptions { encoding_format } = options.unwrap_or_default();
Self {
model,
input,
encoding_format,
}
}
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
#[allow(non_camel_case_types)]
pub enum EmbeddingRequestEncodingFormat {
float,
}
// -----------------------------------------------------------------------------
// Response
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct EmbeddingResponse {
pub id: String,
pub object: String,
pub model: constants::EmbedModel,
pub data: Vec<EmbeddingResponseDataItem>,
pub usage: common::ResponseUsage,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct EmbeddingResponseDataItem {
pub index: u32,
pub embedding: Vec<f32>,
pub object: String,
}

View File

@@ -2,14 +2,22 @@ use std::error::Error;
use std::fmt;
#[derive(Debug)]
pub struct APIError {
pub struct ApiError {
pub message: String,
}
impl fmt::Display for APIError {
impl fmt::Display for ApiError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "APIError: {}", self.message)
write!(f, "ApiError: {}", self.message)
}
}
impl Error for ApiError {}
impl Error for APIError {}
#[derive(Debug, PartialEq, thiserror::Error)]
pub enum ClientError {
#[error(
"You must either set the `MISTRAL_API_KEY` environment variable or specify it in `Client::new(api_key, ...)."
)]
MissingApiKey,
#[error("Failed to read the response text.")]
UnreadableResponseText,
}

View File

@@ -1,6 +1,10 @@
pub mod chat_completion;
pub mod chat;
pub mod chat_stream;
pub mod client;
pub mod common;
pub mod constants;
pub mod embedding;
pub mod error;
pub mod list_models;
pub mod model_list;
pub mod tool;
pub mod utils;

View File

@@ -1,19 +1,22 @@
use serde::{Deserialize, Serialize};
// -----------------------------------------------------------------------------
// Response
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ListModelsResponse {
pub struct ModelListResponse {
pub object: String,
pub data: Vec<ListModelsModel>,
pub data: Vec<ModelListData>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ListModelsModel {
pub struct ModelListData {
pub id: String,
pub object: String,
/// Unix timestamp (in seconds).
pub created: u32,
pub owned_by: String,
pub permission: Vec<ListModelsModelPermission>,
pub permission: Vec<ModelListDataPermission>,
// TODO Check this prop (seen in API responses but undocumented).
// pub root: ???,
// TODO Check this prop (seen in API responses but undocumented).
@@ -21,7 +24,7 @@ pub struct ListModelsModel {
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ListModelsModelPermission {
pub struct ModelListDataPermission {
pub id: String,
pub object: String,
/// Unix timestamp (in seconds).

138
src/v1/tool.rs Normal file
View File

@@ -0,0 +1,138 @@
use async_trait::async_trait;
use serde::{Deserialize, Serialize};
use std::{any::Any, collections::HashMap};
// -----------------------------------------------------------------------------
// Definitions
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct ToolCall {
pub function: ToolCallFunction,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub struct ToolCallFunction {
pub name: String,
pub arguments: String,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct Tool {
pub r#type: ToolType,
pub function: ToolFunction,
}
impl Tool {
pub fn new(
function_name: String,
function_description: String,
function_parameters: Vec<ToolFunctionParameter>,
) -> Self {
let properties: HashMap<String, ToolFunctionParameterProperty> = function_parameters
.into_iter()
.map(|param| {
(
param.name,
ToolFunctionParameterProperty {
r#type: param.r#type,
description: param.description,
},
)
})
.collect();
let property_names = properties.keys().cloned().collect();
let parameters = ToolFunctionParameters {
r#type: ToolFunctionParametersType::Object,
properties,
required: property_names,
};
Self {
r#type: ToolType::Function,
function: ToolFunction {
name: function_name,
description: function_description,
parameters,
},
}
}
}
// -----------------------------------------------------------------------------
// Request
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ToolFunction {
name: String,
description: String,
parameters: ToolFunctionParameters,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ToolFunctionParameter {
name: String,
description: String,
r#type: ToolFunctionParameterType,
}
impl ToolFunctionParameter {
pub fn new(name: String, description: String, r#type: ToolFunctionParameterType) -> Self {
Self {
name,
r#type,
description,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ToolFunctionParameters {
r#type: ToolFunctionParametersType,
properties: HashMap<String, ToolFunctionParameterProperty>,
required: Vec<String>,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct ToolFunctionParameterProperty {
r#type: ToolFunctionParameterType,
description: String,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum ToolFunctionParametersType {
#[serde(rename = "object")]
Object,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum ToolFunctionParameterType {
#[serde(rename = "string")]
String,
}
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum ToolType {
#[serde(rename = "function")]
Function,
}
/// An enum representing how functions should be called.
#[derive(Clone, Debug, Eq, PartialEq, Deserialize, Serialize)]
pub enum ToolChoice {
/// The model is forced to call a function.
#[serde(rename = "any")]
Any,
/// The model can choose to either generate a message or call a function.
#[serde(rename = "auto")]
Auto,
/// The model won't call a function and will generate a message instead.
#[serde(rename = "none")]
None,
}
// -----------------------------------------------------------------------------
// Custom
#[async_trait]
pub trait Function {
async fn execute(&self, arguments: String) -> Box<dyn Any + Send>;
}

32
src/v1/utils.rs Normal file
View File

@@ -0,0 +1,32 @@
use std::fmt::Debug;
use log::debug;
use serde::Serialize;
pub fn prettify_json_string(json: &String) -> String {
match serde_json::from_str::<serde_json::Value>(&json) {
Ok(json_value) => {
serde_json::to_string_pretty(&json_value).unwrap_or_else(|_| json.to_owned())
}
Err(_) => json.to_owned(),
}
}
pub fn prettify_json_struct<T: Debug + Serialize>(value: T) -> String {
match serde_json::to_string_pretty(&value) {
Ok(pretty_json) => pretty_json,
Err(_) => format!("{:?}", value),
}
}
pub fn debug_pretty_json_from_string(label: &str, json: &String) -> () {
let pretty_json = prettify_json_string(json);
debug!("{label}: {}", pretty_json);
}
pub fn debug_pretty_json_from_struct<T: Debug + Serialize>(label: &str, value: &T) -> () {
let pretty_json = prettify_json_struct(value);
debug!("{label}: {}", pretty_json);
}

3
tests/setup.rs Normal file
View File

@@ -0,0 +1,3 @@
pub fn setup() {
let _ = env_logger::builder().is_test(true).try_init();
}

View File

@@ -1,40 +0,0 @@
use jrest::expect;
use mistralai_client::v1::{
chat_completion::{ChatCompletionMessage, ChatCompletionMessageRole, ChatCompletionParams},
client::Client,
constants::OPEN_MISTRAL_7B,
};
#[test]
fn test_chat_completion() {
extern crate dotenv;
use dotenv::dotenv;
dotenv().ok();
let client = Client::new(None, None, None, None);
let model = OPEN_MISTRAL_7B.to_string();
let messages = vec![ChatCompletionMessage {
role: ChatCompletionMessageRole::user,
content: "Just guess the next word: \"Eiffel ...\"?".to_string(),
}];
let options = ChatCompletionParams {
temperature: Some(0.0),
random_seed: Some(42),
..Default::default()
};
let response = client.chat(model, messages, Some(options)).unwrap();
expect!(response.model).to_be("open-mistral-7b".to_string());
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatCompletionMessageRole::assistant);
expect!(response.choices[0].message.content.clone())
.to_be("Tower. The Eiffel Tower is a famous landmark in Paris, France.".to_string());
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(21);
}

View File

@@ -0,0 +1,105 @@
use jrest::expect;
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams, ChatResponseChoiceFinishReason},
client::Client,
constants::Model,
tool::{Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
mod setup;
#[tokio::test]
async fn test_client_chat_async() {
setup::setup();
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage::new_user_message(
"Guess the next word: \"Eiffel ...\"?",
)];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
let response = client
.chat_async(model, messages, Some(options))
.await
.unwrap();
expect!(response.model).to_be(Model::OpenMistral7b);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0]
.message
.content
.clone()
.contains("Tower"))
.to_be(true);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}
#[tokio::test]
async fn test_client_chat_async_with_function_calling() {
setup::setup();
let tools = vec![Tool::new(
"get_city_temperature".to_string(),
"Get the current temperature in a city.".to_string(),
vec![ToolFunctionParameter::new(
"city".to_string(),
"The name of the city.".to_string(),
ToolFunctionParameterType::String,
)],
)];
let client = Client::new(None, None, None, None).unwrap();
let model = Model::MistralSmallLatest;
let messages = vec![ChatMessage::new_user_message(
"What's the current temperature in Paris?",
)];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
tool_choice: Some(ToolChoice::Any),
tools: Some(tools),
..Default::default()
};
let response = client
.chat_async(model, messages, Some(options))
.await
.unwrap();
expect!(response.model).to_be(Model::MistralSmallLatest);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].finish_reason.clone())
.to_be(ChatResponseChoiceFinishReason::ToolCalls);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0].message.content.clone()).to_be("".to_string());
// expect!(response.choices[0].message.tool_calls.clone()).to_be(Some(vec![ToolCall {
// function: ToolCallFunction {
// name: "get_city_temperature".to_string(),
// arguments: "{\"city\": \"Paris\"}".to_string(),
// },
// }]));
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -0,0 +1,40 @@
// use futures::stream::StreamExt;
// use jrest::expect;
// use mistralai_client::v1::{
// chat_completion::{ChatParams, ChatMessage, ChatMessageRole},
// client::Client,
// constants::Model,
// };
// #[tokio::test]
// async fn test_client_chat_stream() {
// let client = Client::new(None, None, None, None).unwrap();
// let model = Model::OpenMistral7b;
// let messages = vec![ChatMessage::new_user_message(
// "Just guess the next word: \"Eiffel ...\"?",
// )];
// let options = ChatParams {
// temperature: Some(0.0),
// random_seed: Some(42),
// ..Default::default()
// };
// let stream_result = client.chat_stream(model, messages, Some(options)).await;
// let mut stream = stream_result.expect("Failed to create stream.");
// while let Some(maybe_chunk_result) = stream.next().await {
// match maybe_chunk_result {
// Some(Ok(chunk)) => {
// if chunk.choices[0].delta.role == Some(ChatMessageRole::Assistant)
// || chunk.choices[0].finish_reason == Some("stop".to_string())
// {
// expect!(chunk.choices[0].delta.content.len()).to_be(0);
// } else {
// expect!(chunk.choices[0].delta.content.len()).to_be_greater_than(0);
// }
// }
// Some(Err(error)) => eprintln!("Error processing chunk: {:?}", error),
// None => (),
// }
// }
// }

View File

@@ -0,0 +1,87 @@
use jrest::expect;
use mistralai_client::v1::{
chat::{ChatMessage, ChatMessageRole, ChatParams, ChatResponseChoiceFinishReason},
client::Client,
constants::Model,
tool::{Tool, ToolChoice, ToolFunctionParameter, ToolFunctionParameterType},
};
mod setup;
#[test]
fn test_client_chat() {
setup::setup();
let client = Client::new(None, None, None, None).unwrap();
let model = Model::OpenMistral7b;
let messages = vec![ChatMessage::new_user_message(
"Guess the next word: \"Eiffel ...\"?",
)];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
let response = client.chat(model, messages, Some(options)).unwrap();
expect!(response.model).to_be(Model::OpenMistral7b);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0]
.message
.content
.clone()
.contains("Tower"))
.to_be(true);
expect!(response.choices[0].finish_reason.clone()).to_be(ChatResponseChoiceFinishReason::Stop);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}
#[test]
fn test_client_chat_with_function_calling() {
setup::setup();
let tools = vec![Tool::new(
"get_city_temperature".to_string(),
"Get the current temperature in a city.".to_string(),
vec![ToolFunctionParameter::new(
"city".to_string(),
"The name of the city.".to_string(),
ToolFunctionParameterType::String,
)],
)];
let client = Client::new(None, None, None, None).unwrap();
let model = Model::MistralSmallLatest;
let messages = vec![ChatMessage::new_user_message(
"What's the current temperature in Paris?",
)];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
tool_choice: Some(ToolChoice::Auto),
tools: Some(tools),
..Default::default()
};
let response = client.chat(model, messages, Some(options)).unwrap();
expect!(response.model).to_be(Model::MistralSmallLatest);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.role.clone()).to_be(ChatMessageRole::Assistant);
expect!(response.choices[0].message.content.clone()).to_be("".to_string());
expect!(response.choices[0].finish_reason.clone())
.to_be(ChatResponseChoiceFinishReason::ToolCalls);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be_greater_than(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -0,0 +1,29 @@
use jrest::expect;
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[tokio::test]
async fn test_client_embeddings_async() {
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client
.embeddings_async(model, input, options)
.await
.unwrap();
expect!(response.model).to_be(EmbedModel::MistralEmbed);
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be(2);
expect!(response.data[0].index).to_be(0);
expect!(response.data[0].object.clone()).to_be("embedding".to_string());
expect!(response.data[0].embedding.len()).to_be_greater_than(0);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -0,0 +1,26 @@
use jrest::expect;
use mistralai_client::v1::{client::Client, constants::EmbedModel};
#[test]
fn test_client_embeddings() {
let client: Client = Client::new(None, None, None, None).unwrap();
let model = EmbedModel::MistralEmbed;
let input = vec!["Embed this sentence.", "As well as this one."]
.iter()
.map(|s| s.to_string())
.collect();
let options = None;
let response = client.embeddings(model, input, options).unwrap();
expect!(response.model).to_be(EmbedModel::MistralEmbed);
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be(2);
expect!(response.data[0].index).to_be(0);
expect!(response.data[0].object.clone()).to_be("embedding".to_string());
expect!(response.data[0].embedding.len()).to_be_greater_than(0);
expect!(response.usage.prompt_tokens).to_be_greater_than(0);
expect!(response.usage.completion_tokens).to_be(0);
expect!(response.usage.total_tokens).to_be_greater_than(0);
}

View File

@@ -0,0 +1,12 @@
use jrest::expect;
use mistralai_client::v1::client::Client;
#[tokio::test]
async fn test_client_list_models_async() {
let client = Client::new(None, None, None, None).unwrap();
let response = client.list_models_async().await.unwrap();
expect!(response.object).to_be("list".to_string());
expect!(response.data.len()).to_be_greater_than(0);
}

View File

@@ -2,13 +2,8 @@ use jrest::expect;
use mistralai_client::v1::client::Client;
#[test]
fn test_list_models() {
extern crate dotenv;
use dotenv::dotenv;
dotenv().ok();
let client = Client::new(None, None, None, None);
fn test_client_list_models() {
let client = Client::new(None, None, None, None).unwrap();
let response = client.list_models().unwrap();

106
tests/v1_client_new_test.rs Normal file
View File

@@ -0,0 +1,106 @@
use jrest::expect;
use mistralai_client::v1::{client::Client, error::ClientError};
#[test]
fn test_client_new_with_none_params() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env");
let client = Client::new(None, None, None, None).unwrap();
expect!(client.api_key).to_be("test_api_key_from_env".to_string());
expect!(client.endpoint).to_be("https://api.mistral.ai/v1".to_string());
expect!(client.max_retries).to_be(5);
expect!(client.timeout).to_be(120);
match maybe_original_mistral_api_key {
Some(original_mistral_api_key) => {
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
}
None => std::env::remove_var("MISTRAL_API_KEY"),
}
}
#[test]
fn test_client_new_with_all_params() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
let api_key = Some("test_api_key_from_param".to_string());
let endpoint = Some("https://example.org".to_string());
let max_retries = Some(10);
let timeout = Some(20);
let client = Client::new(
api_key.clone(),
endpoint.clone(),
max_retries.clone(),
timeout.clone(),
)
.unwrap();
expect!(client.api_key).to_be(api_key.unwrap());
expect!(client.endpoint).to_be(endpoint.unwrap());
expect!(client.max_retries).to_be(max_retries.unwrap());
expect!(client.timeout).to_be(timeout.unwrap());
match maybe_original_mistral_api_key {
Some(original_mistral_api_key) => {
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
}
None => std::env::remove_var("MISTRAL_API_KEY"),
}
}
#[test]
fn test_client_new_with_api_key_as_both_env_and_param() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env");
let api_key = Some("test_api_key_from_param".to_string());
let endpoint = Some("https://example.org".to_string());
let max_retries = Some(10);
let timeout = Some(20);
let client = Client::new(
api_key.clone(),
endpoint.clone(),
max_retries.clone(),
timeout.clone(),
)
.unwrap();
expect!(client.api_key).to_be(api_key.unwrap());
expect!(client.endpoint).to_be(endpoint.unwrap());
expect!(client.max_retries).to_be(max_retries.unwrap());
expect!(client.timeout).to_be(timeout.unwrap());
match maybe_original_mistral_api_key {
Some(original_mistral_api_key) => {
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
}
None => std::env::remove_var("MISTRAL_API_KEY"),
}
}
#[test]
fn test_client_new_with_missing_api_key() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::remove_var("MISTRAL_API_KEY");
let call = || Client::new(None, None, None, None);
match call() {
Ok(_) => panic!("Expected `ClientError::MissingApiKey` but got Ok.`"),
Err(error) => assert_eq!(error, ClientError::MissingApiKey),
}
match maybe_original_mistral_api_key {
Some(original_mistral_api_key) => {
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
}
None => std::env::remove_var("MISTRAL_API_KEY"),
}
}

View File

@@ -1,52 +0,0 @@
use jrest::expect;
use mistralai_client::v1::client::Client;
#[test]
fn test_client_new_with_none_params() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env");
let client = Client::new(None, None, None, None);
expect!(client.api_key).to_be("test_api_key_from_env".to_string());
expect!(client.endpoint).to_be("https://api.mistral.ai/v1".to_string());
expect!(client.max_retries).to_be(5);
expect!(client.timeout).to_be(120);
match maybe_original_mistral_api_key {
Some(original_mistral_api_key) => {
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
}
None => std::env::remove_var("MISTRAL_API_KEY"),
}
}
#[test]
fn test_client_new_with_all_params() {
let maybe_original_mistral_api_key = std::env::var("MISTRAL_API_KEY").ok();
std::env::set_var("MISTRAL_API_KEY", "test_api_key_from_env");
let api_key = Some("test_api_key_from_param".to_string());
let endpoint = Some("https://example.org".to_string());
let max_retries = Some(10);
let timeout = Some(20);
let client = Client::new(
api_key.clone(),
endpoint.clone(),
max_retries.clone(),
timeout.clone(),
);
expect!(client.api_key).to_be(api_key.unwrap());
expect!(client.endpoint).to_be(endpoint.unwrap());
expect!(client.max_retries).to_be(max_retries.unwrap());
expect!(client.timeout).to_be(timeout.unwrap());
match maybe_original_mistral_api_key {
Some(original_mistral_api_key) => {
std::env::set_var("MISTRAL_API_KEY", original_mistral_api_key)
}
None => std::env::remove_var("MISTRAL_API_KEY"),
}
}

View File

@@ -0,0 +1,41 @@
use jrest::expect;
use mistralai_client::v1::{
chat::{ChatMessage, ChatParams},
client::Client,
constants::Model,
};
#[test]
fn test_model_constant() {
let models = vec![
Model::OpenMistral7b,
Model::OpenMixtral8x7b,
Model::OpenMixtral8x22b,
Model::MistralTiny,
Model::MistralSmallLatest,
Model::MistralMediumLatest,
Model::MistralLargeLatest,
Model::CodestralLatest,
];
let client = Client::new(None, None, None, None).unwrap();
let messages = vec![ChatMessage::new_user_message("A number between 0 and 100?")];
let options = ChatParams {
temperature: 0.0,
random_seed: Some(42),
..Default::default()
};
for model in models {
let response = client
.chat(model.clone(), messages.clone(), Some(options.clone()))
.unwrap();
expect!(response.model).to_be(model);
expect!(response.object).to_be("chat.completion".to_string());
expect!(response.choices.len()).to_be(1);
expect!(response.choices[0].index).to_be(0);
expect!(response.choices[0].message.content.len()).to_be_greater_than(0);
}
}