chore: checkpoint before Python removal

This commit is contained in:
2026-03-26 22:33:59 +00:00
parent 683cec9307
commit e568ddf82a
29972 changed files with 11269302 additions and 2 deletions

View File

@@ -0,0 +1 @@
{"files":{".cargo_vcs_info.json":"881072ee56e770a771f0a8ae43e5b6e8ddb351de6e85e83bf401b09e7a81b042",".github/workflows/main.yml":"c5daa56313bf0c231b99852ed42d8898a5eeddd6a87252f3b676c0001c1793ce","CHANGELOG.md":"d67fa4379ab2e844f67f2c0393df86b7af7fa2759ed806a8380863b73a9af04a","Cargo.toml":"f9874896c3270a5d861062f33a0546e6cb0d277c5f41e980377b759b8410c35b","Cargo.toml.orig":"e4c7cb947c01c1b972958d4e9e69af26a225526c0e2e61c2e11a1ef1450e38fd","LICENSE-APACHE":"b40930bbcf80744c86c46a12bc9da056641d722716c378f5659b9e555ef833e1","LICENSE-MIT":"87f7527b1066337751f7f4d717089bf514ff8f7ee7d66c4c5b891b09e744ec11","README.md":"d365aa7166ef9b36b072c4974728a8ee1678d4f38fbf1cb1697a2d02a4c85d17","README.tpl":"bfee2e50704a35849794bc862f7150fc67ac5648e48b7e1fdf18a7f91725d51f","benches/bench.rs":"15d4b58ad84cfe6c7af0d96f8aecef69e4e7cb23c7100b66a7c74dc1b714c2cb","benches/generator/mod.rs":"ee2048c04deb952b9c1bef15b776a62ee1ad92cb4a1c6f30fb85531a78daf206","codecov.yml":"a212c3e24e3ef6fb40f66aeaf193a66ba489ea9c4ec19a3a790cc92bc8280cc8","release.toml":"f6d947829797e436571122ccb016dcd7f32b5f543c9abcd66939ffbba1702049","specs/merge_tests.json":"a5bdf1ef083612f525b9705519e12725e8b5e94f5b9f228aa292d727d6d43aa2","specs/revert_tests.json":"bdf77e96478c29507077dad7437ec7e4a461118c085aee1df9267287eea8f8d0","specs/spec_tests.json":"9b560eb025fe8d05ae5abbca2af4911227ab4ef4a4ffa10e6d822bee249f4f21","specs/tests.json":"d241228a887f0b95532f716e714ad83d44ba00cc1dd65725fb90d7ae81858dbf","src/diff.rs":"c8aa40023836cdef3d4a7eb4cb03717ab8ae1a265121547d23eb21c81d03aba1","src/lib.rs":"fbbf84065c5bfb337e67049e4ad8807caf996ecf0e0b6b0bb529c225de093dad","tests/basic.rs":"598053dbdee364a6300f78b1543cec0ce7cedf00e58001f5b19062722f89fc0e","tests/errors.yaml":"ff4509bb0e7f092c48f3a941041f2332b33c3dd59e18e016871d6dd9a02e5529","tests/schemars.json":"885bf8d0eca3048dc973ea51f39e8b770cb47d96f09ce19087c820cadcb44065","tests/schemars.rs":"ea1a4b8bb7b79152b9096ba7e7efd548a0608085978514869e9ed32712b46f26","tests/suite.rs":"6946d943195e52a4c8ca0ff3231a7408b2d41a9433f49bff84c234cedc2c50ee","tests/utoipa.json":"3185ba738576da247c0ab0a6240aff8ae57c3087fc48575642e28436ab46285a","tests/utoipa.rs":"fcaac1bfca344f8ef76ef2ee75f8f88d1851fd4edbfcf21eda8fb601c7b744d1","update-readme.sh":"8513af38a42e1988ae85014805208d5af8ed68a0a30afabd7a85693d11480307"},"package":"f300e415e2134745ef75f04562dd0145405c2f7fd92065db029ac4b16b57fe90"}

View File

@@ -0,0 +1,6 @@
{
"git": {
"sha1": "85a7f20c3b2e05d5463a4e15bc1462fc7d7c5618"
},
"path_in_vcs": ""
}

View File

@@ -0,0 +1,92 @@
on:
push:
branches:
- main
- actions-*
pull_request:
branches: [ "main" ]
name: Build
jobs:
check:
name: Build
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Run cargo build
run: cargo build --verbose
bench:
name: Bench
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install nightly toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: nightly
override: true
- name: Run cargo bench
run: cargo +nightly bench --verbose
test:
name: Test
runs-on: ubuntu-latest
container:
image: xd009642/tarpaulin:0.22.0
options: --security-opt seccomp=unconfined
steps:
- name: Checkout sources
uses: actions/checkout@v3
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
- name: Run cargo test
run: cargo tarpaulin --verbose --all-features --workspace --timeout 120 --out Xml
- name: Codecov
uses: codecov/codecov-action@v3.1.4
with:
token: ${{ secrets.CODECOV_TOKEN }}
verbose: true
fail_ci_if_error: true
lints:
name: Lints
runs-on: ubuntu-latest
steps:
- name: Checkout sources
uses: actions/checkout@v2
- name: Install stable toolchain
uses: actions-rs/toolchain@v1
with:
profile: minimal
toolchain: stable
override: true
components: rustfmt, clippy
- name: Run cargo fmt
run: cargo fmt --all -- --check
- name: Run cargo clippy
run: cargo clippy -- -D warnings

17
vendor/json-patch/CHANGELOG.md vendored Normal file
View File

@@ -0,0 +1,17 @@
# Changelog
## 0.3.0 (2022-12-10)
### Breaking Changes
- Removed `json_patch::patch_unsafe` operation as regular `patch` is it does not provide enough value.
- Error types changed to include some context.
- Removed `json_patch::from_value`. Use `serde_json::from_value` instead.
## 0.2.7 (2022-12-09)
### Fixed
- Fixed incorrect diffing for the whole document. Previously, differ would incorrectly yield path of `"/"` when the
whole document is replaced. The correct path should be `""`. This is a breaking change.
[#18](https://github.com/idubrov/json-patch/pull/18)

99
vendor/json-patch/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,99 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
name = "json-patch"
version = "4.1.0"
authors = ["Ivan Dubrov <dubrov.ivan@gmail.com>"]
build = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "RFC 6902, JavaScript Object Notation (JSON) Patch"
readme = "README.md"
keywords = [
"json",
"json-patch",
]
categories = []
license = "MIT/Apache-2.0"
repository = "https://github.com/idubrov/json-patch"
[lib]
name = "json_patch"
path = "src/lib.rs"
[[test]]
name = "basic"
path = "tests/basic.rs"
[[test]]
name = "schemars"
path = "tests/schemars.rs"
[[test]]
name = "suite"
path = "tests/suite.rs"
[[test]]
name = "utoipa"
path = "tests/utoipa.rs"
[[bench]]
name = "bench"
path = "benches/bench.rs"
[dependencies.jsonptr]
version = "0.7.1"
[dependencies.schemars]
version = "1.0.4"
optional = true
[dependencies.serde]
version = "1.0.159"
features = ["derive"]
[dependencies.serde_json]
version = "1.0.95"
[dependencies.thiserror]
version = "1.0.40"
[dependencies.utoipa]
version = "5"
optional = true
[dev-dependencies.expectorate]
version = "1.0"
[dev-dependencies.rand]
version = "0.8.5"
[dev-dependencies.schemars]
version = "1.0.4"
[dev-dependencies.serde_json]
version = "1.0.95"
features = ["preserve_order"]
[dev-dependencies.serde_yaml]
version = "0.9.19"
[dev-dependencies.utoipa]
version = "5"
features = ["debug"]
[features]
default = ["diff"]
diff = []

201
vendor/json-patch/LICENSE-APACHE vendored Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "{}"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright {yyyy} {name of copyright owner}
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
vendor/json-patch/LICENSE-MIT vendored Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2017 Ivan Dubrov
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

96
vendor/json-patch/README.md vendored Normal file
View File

@@ -0,0 +1,96 @@
[![crates.io](https://img.shields.io/crates/v/json-patch.svg)](https://crates.io/crates/json-patch)
[![crates.io](https://img.shields.io/crates/d/json-patch.svg)](https://crates.io/crates/json-patch)
[![Build](https://github.com/idubrov/json-patch/actions/workflows/main.yml/badge.svg)](https://github.com/idubrov/json-patch/actions)
[![Codecov](https://codecov.io/gh/idubrov/json-patch/branch/main/graph/badge.svg?token=hdcr6yfBfa)](https://codecov.io/gh/idubrov/json-patch)
# json-patch
A [JSON Patch (RFC 6902)](https://tools.ietf.org/html/rfc6902) and
[JSON Merge Patch (RFC 7396)](https://tools.ietf.org/html/rfc7396) implementation for Rust.
## Usage
Add this to your *Cargo.toml*:
```toml
[dependencies]
json-patch = "*"
```
## Examples
Create and patch document using JSON Patch:
```rust
#[macro_use]
use json_patch::{Patch, patch};
use serde_json::{from_value, json};
let mut doc = json!([
{ "name": "Andrew" },
{ "name": "Maxim" }
]);
let p: Patch = from_value(json!([
{ "op": "test", "path": "/0/name", "value": "Andrew" },
{ "op": "add", "path": "/0/happy", "value": true }
])).unwrap();
patch(&mut doc, &p).unwrap();
assert_eq!(doc, json!([
{ "name": "Andrew", "happy": true },
{ "name": "Maxim" }
]));
```
Create and patch document using JSON Merge Patch:
```rust
#[macro_use]
use json_patch::merge;
use serde_json::json;
let mut doc = json!({
"title": "Goodbye!",
"author" : {
"givenName" : "John",
"familyName" : "Doe"
},
"tags":[ "example", "sample" ],
"content": "This will be unchanged"
});
let patch = json!({
"title": "Hello!",
"phoneNumber": "+01-123-456-7890",
"author": {
"familyName": null
},
"tags": [ "example" ]
});
merge(&mut doc, &patch);
assert_eq!(doc, json!({
"title": "Hello!",
"author" : {
"givenName" : "John"
},
"tags": [ "example" ],
"content": "This will be unchanged",
"phoneNumber": "+01-123-456-7890"
}));
```
## License
Licensed under either of
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any
additional terms or conditions.

23
vendor/json-patch/README.tpl vendored Normal file
View File

@@ -0,0 +1,23 @@
[![crates.io](https://img.shields.io/crates/v/json-patch.svg)](https://crates.io/crates/json-patch)
[![crates.io](https://img.shields.io/crates/d/json-patch.svg)](https://crates.io/crates/json-patch)
[![Build](https://github.com/idubrov/json-patch/actions/workflows/main.yml/badge.svg)](https://github.com/idubrov/json-patch/actions)
[![Codecov](https://codecov.io/gh/idubrov/json-patch/branch/main/graph/badge.svg?token=hdcr6yfBfa)](https://codecov.io/gh/idubrov/json-patch)
# {{crate}}
{{readme}}
## License
Licensed under either of
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
* MIT license ([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your option.
### Contribution
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in the work by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any
additional terms or conditions.

47
vendor/json-patch/benches/bench.rs vendored Normal file
View File

@@ -0,0 +1,47 @@
#![feature(test)]
extern crate test;
use json_patch::*;
use rand::SeedableRng;
use test::Bencher;
mod generator;
#[bench]
fn bench_add_removes(b: &mut Bencher) {
let mut rng = rand::rngs::StdRng::from_seed(Default::default());
let params = generator::Params {
..Default::default()
};
let doc = params.gen(&mut rng);
let patches = generator::gen_add_remove_patches(&doc, &mut rng, 10, 10);
b.iter(|| {
let mut doc = doc.clone();
let mut result = Ok(());
for p in &patches {
// Patch mutable
result = result.and_then(|_| patch(&mut doc, p));
}
});
}
#[cfg(feature = "nightly")]
#[bench]
fn bench_add_removes_unsafe(b: &mut Bencher) {
let mut rng = rand::StdRng::from_seed(Default::default());
let params = generator::Params {
..Default::default()
};
let doc = params.gen(&mut rng);
let patches = generator::gen_add_remove_patches(&doc, &mut rng, 10, 10);
b.iter(|| {
let mut doc = doc.clone();
let mut result = Ok(());
for ref p in &patches {
// Patch mutable
result = result.and_then(|_| patch_unsafe(&mut doc, p));
}
});
}

View File

@@ -0,0 +1,127 @@
use json_patch::{AddOperation, Patch, PatchOperation, RemoveOperation};
use jsonptr::PointerBuf;
use rand::distributions::Alphanumeric;
use rand::prelude::*;
use serde_json::{Map, Value};
pub struct Params {
pub array_size: usize,
pub map_size: usize,
pub value_size: usize,
pub depth: usize,
pub key_size: usize,
}
impl Default for Params {
fn default() -> Self {
Params {
array_size: 6,
map_size: 6,
value_size: 100,
depth: 8,
key_size: 20,
}
}
}
fn rand_str<R: Rng>(rng: &mut R, max_len: usize) -> String {
let len = rng.gen::<usize>() % max_len + 1;
rng.sample_iter(&Alphanumeric)
.take(len)
.map(char::from)
.collect()
}
fn rand_literal<R: Rng>(rng: &mut R, value_size: usize) -> Value {
match rng.gen::<u32>() % 4 {
0 => Value::Null,
1 => Value::String(rand_str(rng, value_size)),
2 => Value::Bool(false),
3 => Value::from(rng.gen::<u64>()),
_ => panic!(),
}
}
impl Params {
pub fn gen<R: Rng>(&self, rng: &mut R) -> Value {
self.gen_internal(self.depth, rng)
}
fn gen_internal<R: Rng>(&self, depth: usize, rng: &mut R) -> Value {
if depth == 0 {
rand_literal(rng, self.value_size)
} else if rng.gen::<bool>() {
// Generate random array
let len = (rng.gen::<usize>() % self.array_size) + 1;
let vec: Vec<Value> = (0..len)
.map(|_| self.gen_internal(depth - 1, rng))
.collect();
Value::from(vec)
} else {
// Generate random object
let len = (rng.gen::<usize>() % self.map_size) + 1;
let map: Map<String, Value> = (0..len)
.map(|_| {
(
rand_str(rng, self.key_size),
self.gen_internal(depth - 1, rng),
)
})
.collect();
Value::from(map)
}
}
}
pub fn gen_add_remove_patches<R: Rng>(
value: &Value,
rnd: &mut R,
patches: usize,
operations: usize,
) -> Vec<Patch> {
let leaves = all_leaves(value);
let mut vec = Vec::new();
for _ in 0..patches {
let mut ops = Vec::new();
for _ in 0..operations {
let path = leaves.choose(rnd).unwrap();
ops.push(PatchOperation::Remove(RemoveOperation {
path: (*path).clone(),
}));
ops.push(PatchOperation::Add(AddOperation {
path: (*path).clone(),
value: Value::Null,
}));
}
vec.push(Patch(ops));
}
vec
}
fn all_leaves(value: &Value) -> Vec<PointerBuf> {
let mut result = Vec::new();
collect_leaves(value, &mut PointerBuf::new(), &mut result);
result
}
fn collect_leaves(value: &Value, prefix: &mut PointerBuf, result: &mut Vec<PointerBuf>) {
match *value {
Value::Array(ref arr) => {
for (idx, val) in arr.iter().enumerate() {
prefix.push_back(idx);
collect_leaves(val, prefix, result);
prefix.pop_back();
}
}
Value::Object(ref map) => {
for (key, val) in map.iter() {
prefix.push_back(key);
collect_leaves(val, prefix, result);
prefix.pop_back();
}
}
_ => {
result.push(prefix.clone());
}
}
}

6
vendor/json-patch/codecov.yml vendored Normal file
View File

@@ -0,0 +1,6 @@
coverage:
status:
project: off
patch: off
ignore:
- "benches"

1
vendor/json-patch/release.toml vendored Normal file
View File

@@ -0,0 +1 @@
pre-release-hook = "./update-readme.sh"

View File

@@ -0,0 +1,92 @@
[
{
"comment": "1. introduction",
"doc": {
"a": "b",
"c": {
"d": "e",
"f": "g"
}
},
"patch": {
"a": "z",
"c": {
"f": null
}
},
"expected": {
"a": "z",
"c": {
"d": "e"
}
},
"merge": true
},
{
"comment": "3. example",
"doc": {
"title": "Goodbye!",
"author": {
"givenName": "John",
"familyName": "Doe"
},
"tags": [
"example",
"sample"
],
"content": "This will be unchanged"
},
"patch": {
"title": "Hello!",
"phoneNumber": "+01-123-456-7890",
"author": {
"familyName": null
},
"tags": [
"example"
]
},
"expected": {
"title": "Hello!",
"author": {
"givenName": "John"
},
"tags": [
"example"
],
"content": "This will be unchanged",
"phoneNumber": "+01-123-456-7890"
},
"merge": true
},
{
"comment": "replacing non-object",
"doc": {
"title": "Goodbye!",
"author": {
"givenName": "John"
},
"tags": [
"example",
"sample"
],
"content": "This will be unchanged"
},
"patch": {
"tags": {
"kind": "example"
}
},
"expected": {
"title": "Goodbye!",
"author": {
"givenName": "John"
},
"tags": {
"kind": "example"
},
"content": "This will be unchanged"
},
"merge": true
}
]

View File

@@ -0,0 +1,286 @@
[
{
"comment": "Can revert add (replace key)",
"doc": {
"foo": {
"bar": {
"baz": true
}
}
},
"patch": [
{
"op": "add",
"path": "/foo",
"value": false
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert add (insert into array)",
"doc": {
"foo": [1, 2, 3]
},
"patch": [
{
"op": "add",
"path": "/foo/1",
"value": false
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert add (insert last element into array)",
"doc": {
"foo": [1, 2, 3]
},
"patch": [
{
"op": "add",
"path": "/foo/-",
"value": false
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert remove (object)",
"doc": {
"foo": {
"bar": {
"baz": true
}
}
},
"patch": [
{
"op": "remove",
"path": "/foo"
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert remove (array)",
"doc": {
"foo": [1, 2, 3]
},
"patch": [
{
"op": "remove",
"path": "/foo/1"
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert replace (replace key)",
"doc": {
"foo": {
"bar": {
"baz": true
}
}
},
"patch": [
{
"op": "replace",
"path": "/foo",
"value": false
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert replace (replace array element)",
"doc": {
"foo": [1, 2, 3]
},
"patch": [
{
"op": "replace",
"path": "/foo/1",
"value": false
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert move (move into key)",
"doc": {
"foo": {
"bar": {
"baz": true
}
},
"abc": {
"def": {
"ghi": false
}
}
},
"patch": [
{
"op": "move",
"from": "/abc",
"path": "/foo",
"value": false
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert move (move into array)",
"doc": {
"foo": [1, 2, 3],
"abc": {
"def": {
"ghi": false
}
}
},
"patch": [
{
"op": "move",
"path": "/foo/1",
"from": "/abc"
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert move (move into last element of an array)",
"doc": {
"foo": [1, 2, 3],
"abc": {
"def": {
"ghi": false
}
}
},
"patch": [
{
"op": "move",
"path": "/foo/-",
"from": "/abc"
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert copy (copy into key)",
"doc": {
"foo": {
"bar": {
"baz": true
}
},
"abc": {
"def": {
"ghi": false
}
}
},
"patch": [
{
"op": "copy",
"from": "/abc",
"path": "/foo",
"value": false
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert copy (copy into array)",
"doc": {
"foo": [1, 2, 3],
"abc": {
"def": {
"ghi": false
}
}
},
"patch": [
{
"op": "copy",
"path": "/foo/1",
"from": "/abc"
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
},
{
"comment": "Can revert copy (copy into last element of an array)",
"doc": {
"foo": [1, 2, 3],
"abc": {
"def": {
"ghi": false
}
}
},
"patch": [
{
"op": "copy",
"path": "/foo/-",
"from": "/abc"
},
{
"op": "remove",
"path": "/foo/bar"
}
],
"error": "invalid pointer"
}
]

343
vendor/json-patch/specs/spec_tests.json vendored Normal file
View File

@@ -0,0 +1,343 @@
[
{
"comment": "4.1. add with missing object",
"doc": {
"q": {
"bar": 2
}
},
"patch": [
{
"op": "add",
"path": "/a/b",
"value": 1
}
],
"error": "path /a does not exist -- missing objects are not created recursively"
},
{
"comment": "A.1. Adding an Object Member",
"doc": {
"foo": "bar"
},
"patch": [
{
"op": "add",
"path": "/baz",
"value": "qux"
}
],
"expected": {
"baz": "qux",
"foo": "bar"
}
},
{
"comment": "A.2. Adding an Array Element",
"doc": {
"foo": [
"bar",
"baz"
]
},
"patch": [
{
"op": "add",
"path": "/foo/1",
"value": "qux"
}
],
"expected": {
"foo": [
"bar",
"qux",
"baz"
]
}
},
{
"comment": "A.3. Removing an Object Member",
"doc": {
"baz": "qux",
"foo": "bar"
},
"patch": [
{
"op": "remove",
"path": "/baz"
}
],
"expected": {
"foo": "bar"
}
},
{
"comment": "A.4. Removing an Array Element",
"doc": {
"foo": [
"bar",
"qux",
"baz"
]
},
"patch": [
{
"op": "remove",
"path": "/foo/1"
}
],
"expected": {
"foo": [
"bar",
"baz"
]
}
},
{
"comment": "A.5. Replacing a Value",
"doc": {
"baz": "qux",
"foo": "bar"
},
"patch": [
{
"op": "replace",
"path": "/baz",
"value": "boo"
}
],
"expected": {
"baz": "boo",
"foo": "bar"
}
},
{
"comment": "A.6. Moving a Value",
"doc": {
"foo": {
"bar": "baz",
"waldo": "fred"
},
"qux": {
"corge": "grault"
}
},
"patch": [
{
"op": "move",
"from": "/foo/waldo",
"path": "/qux/thud"
}
],
"expected": {
"foo": {
"bar": "baz"
},
"qux": {
"corge": "grault",
"thud": "fred"
}
}
},
{
"comment": "A.7. Moving an Array Element",
"doc": {
"foo": [
"all",
"grass",
"cows",
"eat"
]
},
"patch": [
{
"op": "move",
"from": "/foo/1",
"path": "/foo/3"
}
],
"expected": {
"foo": [
"all",
"cows",
"eat",
"grass"
]
}
},
{
"comment": "A.8. Testing a Value: Success",
"doc": {
"baz": "qux",
"foo": [
"a",
2,
"c"
]
},
"patch": [
{
"op": "test",
"path": "/baz",
"value": "qux"
},
{
"op": "test",
"path": "/foo/1",
"value": 2
}
],
"expected": {
"baz": "qux",
"foo": [
"a",
2,
"c"
]
}
},
{
"comment": "A.9. Testing a Value: Error",
"doc": {
"baz": "qux"
},
"patch": [
{
"op": "test",
"path": "/baz",
"value": "bar"
}
],
"error": "string not equivalent"
},
{
"comment": "A.10. Adding a nested Member Object",
"doc": {
"foo": "bar"
},
"patch": [
{
"op": "add",
"path": "/child",
"value": {
"grandchild": {}
}
}
],
"expected": {
"foo": "bar",
"child": {
"grandchild": {
}
}
}
},
{
"comment": "A.11. Ignoring Unrecognized Elements",
"doc": {
"foo": "bar"
},
"patch": [
{
"op": "add",
"path": "/baz",
"value": "qux",
"xyz": 123
}
],
"expected": {
"foo": "bar",
"baz": "qux"
}
},
{
"comment": "A.12. Adding to a Non-existent Target",
"doc": {
"foo": "bar"
},
"patch": [
{
"op": "add",
"path": "/baz/bat",
"value": "qux"
}
],
"error": "add to a non-existent target"
},
{
"comment": "A.13 Invalid JSON Patch Document",
"doc": {
"foo": "bar"
},
"patch": [
{
"op": "add",
"path": "/baz",
"value": "qux",
"op": "remove"
}
],
"error": "operation has two 'op' members",
"disabled": true
},
{
"comment": "A.14. ~ Escape Ordering",
"doc": {
"/": 9,
"~1": 10
},
"patch": [
{
"op": "test",
"path": "/~01",
"value": 10
}
],
"expected": {
"/": 9,
"~1": 10
}
},
{
"comment": "A.15. Comparing Strings and Numbers",
"doc": {
"/": 9,
"~1": 10
},
"patch": [
{
"op": "test",
"path": "/~01",
"value": "10"
}
],
"error": "number is not equal to string"
},
{
"comment": "A.16. Adding an Array Value",
"doc": {
"foo": [
"bar"
]
},
"patch": [
{
"op": "add",
"path": "/foo/-",
"value": [
"abc",
"def"
]
}
],
"expected": {
"foo": [
"bar",
[
"abc",
"def"
]
]
}
}
]

1877
vendor/json-patch/specs/tests.json vendored Normal file

File diff suppressed because it is too large Load Diff

358
vendor/json-patch/src/diff.rs vendored Normal file
View File

@@ -0,0 +1,358 @@
use crate::Patch;
use jsonptr::PointerBuf;
use serde_json::{Map, Value};
fn diff_impl(left: &Value, right: &Value, pointer: &mut PointerBuf, patch: &mut super::Patch) {
match (left, right) {
(Value::Object(ref left_obj), Value::Object(ref right_obj)) => {
diff_object(left_obj, right_obj, pointer, patch);
}
(Value::Array(ref left_array), Value::Array(ref ref_array)) => {
diff_array(left_array, ref_array, pointer, patch);
}
(_, _) if left == right => {
// Nothing to do
}
(_, _) => {
// Values are different, replace the value at the path
patch
.0
.push(super::PatchOperation::Replace(super::ReplaceOperation {
path: pointer.clone(),
value: right.clone(),
}));
}
}
}
fn diff_array(left: &[Value], right: &[Value], pointer: &mut PointerBuf, patch: &mut Patch) {
let len = left.len().max(right.len());
let mut shift = 0usize;
for idx in 0..len {
pointer.push_back(idx - shift);
match (left.get(idx), right.get(idx)) {
(Some(left), Some(right)) => {
// Both array have an element at this index
diff_impl(left, right, pointer, patch);
}
(Some(_left), None) => {
// The left array has an element at this index, but not the right
shift += 1;
patch
.0
.push(super::PatchOperation::Remove(super::RemoveOperation {
path: pointer.clone(),
}));
}
(None, Some(right)) => {
// The right array has an element at this index, but not the left
patch
.0
.push(super::PatchOperation::Add(super::AddOperation {
path: pointer.clone(),
value: right.clone(),
}));
}
(None, None) => {
unreachable!()
}
}
pointer.pop_back();
}
}
fn diff_object(
left: &Map<String, Value>,
right: &Map<String, Value>,
pointer: &mut PointerBuf,
patch: &mut Patch,
) {
// Add or replace keys in the right object
for (key, right_value) in right {
pointer.push_back(key);
match left.get(key) {
Some(left_value) => {
diff_impl(left_value, right_value, pointer, patch);
}
None => {
patch
.0
.push(super::PatchOperation::Add(super::AddOperation {
path: pointer.clone(),
value: right_value.clone(),
}));
}
}
pointer.pop_back();
}
// Remove keys that are not in the right object
for key in left.keys() {
if !right.contains_key(key) {
pointer.push_back(key);
patch
.0
.push(super::PatchOperation::Remove(super::RemoveOperation {
path: pointer.clone(),
}));
pointer.pop_back();
}
}
}
/// Diff two JSON documents and generate a JSON Patch (RFC 6902).
///
/// # Example
/// Diff two JSONs:
///
/// ```rust
/// #[macro_use]
/// use json_patch::{Patch, patch, diff};
/// use serde_json::{json, from_value};
///
/// # pub fn main() {
/// let left = json!({
/// "title": "Goodbye!",
/// "author" : {
/// "givenName" : "John",
/// "familyName" : "Doe"
/// },
/// "tags":[ "example", "sample" ],
/// "content": "This will be unchanged"
/// });
///
/// let right = json!({
/// "title": "Hello!",
/// "author" : {
/// "givenName" : "John"
/// },
/// "tags": [ "example" ],
/// "content": "This will be unchanged",
/// "phoneNumber": "+01-123-456-7890"
/// });
///
/// let p = diff(&left, &right);
/// assert_eq!(p, from_value::<Patch>(json!([
/// { "op": "replace", "path": "/title", "value": "Hello!" },
/// { "op": "remove", "path": "/author/familyName" },
/// { "op": "remove", "path": "/tags/1" },
/// { "op": "add", "path": "/phoneNumber", "value": "+01-123-456-7890" },
/// ])).unwrap());
///
/// let mut doc = left.clone();
/// patch(&mut doc, &p).unwrap();
/// assert_eq!(doc, right);
///
/// # }
/// ```
pub fn diff(left: &Value, right: &Value) -> super::Patch {
let mut patch = super::Patch::default();
let mut path = PointerBuf::new();
diff_impl(left, right, &mut path, &mut patch);
patch
}
#[cfg(test)]
mod tests {
use serde_json::{json, Value};
#[test]
pub fn replace_all() {
let mut left = json!({"title": "Hello!"});
let patch = super::diff(&left, &Value::Null);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "replace", "path": "", "value": null },
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
}
#[test]
pub fn diff_empty_key() {
let mut left = json!({"title": "Something", "": "Hello!"});
let right = json!({"title": "Something", "": "Bye!"});
let patch = super::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "replace", "path": "/", "value": "Bye!" },
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn add_all() {
let right = json!({"title": "Hello!"});
let patch = super::diff(&Value::Null, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "replace", "path": "", "value": { "title": "Hello!" } },
]))
.unwrap()
);
let mut left = Value::Null;
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn remove_all() {
let mut left = json!(["hello", "bye"]);
let right = json!([]);
let patch = super::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "remove", "path": "/0" },
{ "op": "remove", "path": "/0" },
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn remove_tail() {
let mut left = json!(["hello", "bye", "hi"]);
let right = json!(["hello"]);
let patch = super::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "remove", "path": "/1" },
{ "op": "remove", "path": "/1" },
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn add_tail() {
let mut left = json!(["hello"]);
let right = json!(["hello", "bye", "hi"]);
let patch = super::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "add", "path": "/1", "value": "bye" },
{ "op": "add", "path": "/2", "value": "hi" }
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn replace_object() {
let mut left = json!(["hello", "bye"]);
let right = json!({"hello": "bye"});
let patch = super::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "replace", "path": "", "value": {"hello": "bye"} }
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
fn escape_json_keys() {
let mut left = json!({
"/slashed/path/with/~": 1
});
let right = json!({
"/slashed/path/with/~": 2,
});
let patch = super::diff(&left, &right);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn replace_object_array() {
let mut left = json!({ "style": { "ref": {"name": "name"} } });
let right = json!({ "style": [{ "ref": {"hello": "hello"} }]});
let patch = crate::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "replace", "path": "/style", "value": [{ "ref": {"hello": "hello"} }] },
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn replace_array_object() {
let mut left = json!({ "style": [{ "ref": {"hello": "hello"} }]});
let right = json!({ "style": { "ref": {"name": "name"} } });
let patch = crate::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "replace", "path": "/style", "value": { "ref": {"name": "name"} } },
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn remove_keys() {
let mut left = json!({"first": 1, "second": 2, "third": 3});
let right = json!({"first": 1, "second": 2});
let patch = super::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "remove", "path": "/third" }
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
#[test]
pub fn add_keys() {
let mut left = json!({"first": 1, "second": 2});
let right = json!({"first": 1, "second": 2, "third": 3});
let patch = super::diff(&left, &right);
assert_eq!(
patch,
serde_json::from_value(json!([
{ "op": "add", "path": "/third", "value": 3 }
]))
.unwrap()
);
crate::patch(&mut left, &patch).unwrap();
assert_eq!(left, right);
}
}

683
vendor/json-patch/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,683 @@
//! A [JSON Patch (RFC 6902)](https://tools.ietf.org/html/rfc6902) and
//! [JSON Merge Patch (RFC 7396)](https://tools.ietf.org/html/rfc7396) implementation for Rust.
//!
//! # Usage
//!
//! Add this to your *Cargo.toml*:
//! ```toml
//! [dependencies]
//! json-patch = "*"
//! ```
//!
//! # Examples
//! Create and patch document using JSON Patch:
//!
//! ```rust
//! #[macro_use]
//! use json_patch::{Patch, patch};
//! use serde_json::{from_value, json};
//!
//! # pub fn main() {
//! let mut doc = json!([
//! { "name": "Andrew" },
//! { "name": "Maxim" }
//! ]);
//!
//! let p: Patch = from_value(json!([
//! { "op": "test", "path": "/0/name", "value": "Andrew" },
//! { "op": "add", "path": "/0/happy", "value": true }
//! ])).unwrap();
//!
//! patch(&mut doc, &p).unwrap();
//! assert_eq!(doc, json!([
//! { "name": "Andrew", "happy": true },
//! { "name": "Maxim" }
//! ]));
//!
//! # }
//! ```
//!
//! Create and patch document using JSON Merge Patch:
//!
//! ```rust
//! #[macro_use]
//! use json_patch::merge;
//! use serde_json::json;
//!
//! # pub fn main() {
//! let mut doc = json!({
//! "title": "Goodbye!",
//! "author" : {
//! "givenName" : "John",
//! "familyName" : "Doe"
//! },
//! "tags":[ "example", "sample" ],
//! "content": "This will be unchanged"
//! });
//!
//! let patch = json!({
//! "title": "Hello!",
//! "phoneNumber": "+01-123-456-7890",
//! "author": {
//! "familyName": null
//! },
//! "tags": [ "example" ]
//! });
//!
//! merge(&mut doc, &patch);
//! assert_eq!(doc, json!({
//! "title": "Hello!",
//! "author" : {
//! "givenName" : "John"
//! },
//! "tags": [ "example" ],
//! "content": "This will be unchanged",
//! "phoneNumber": "+01-123-456-7890"
//! }));
//! # }
//! ```
#![warn(missing_docs)]
use jsonptr::{index::Index, Pointer, PointerBuf};
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use std::fmt::{self, Display, Formatter};
use thiserror::Error;
// So users can instance `jsonptr::PointerBuf` and others without
// having to explicitly match our `jsonptr` version.
pub use jsonptr;
#[cfg(feature = "diff")]
mod diff;
#[cfg(feature = "diff")]
pub use self::diff::diff;
struct WriteAdapter<'a>(&'a mut dyn fmt::Write);
impl std::io::Write for WriteAdapter<'_> {
fn write(&mut self, buf: &[u8]) -> Result<usize, std::io::Error> {
let s = std::str::from_utf8(buf).unwrap();
self.0
.write_str(s)
.map_err(|_| std::io::Error::from(std::io::ErrorKind::Other))?;
Ok(buf.len())
}
fn flush(&mut self) -> Result<(), std::io::Error> {
Ok(())
}
}
macro_rules! impl_display {
($name:ident) => {
impl Display for $name {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
let alternate = f.alternate();
if alternate {
serde_json::to_writer_pretty(WriteAdapter(f), self)
.map_err(|_| std::fmt::Error)?;
} else {
serde_json::to_writer(WriteAdapter(f), self).map_err(|_| std::fmt::Error)?;
}
Ok(())
}
}
};
}
/// Representation of JSON Patch (list of patch operations)
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))]
pub struct Patch(pub Vec<PatchOperation>);
impl_display!(Patch);
impl std::ops::Deref for Patch {
type Target = [PatchOperation];
fn deref(&self) -> &[PatchOperation] {
&self.0
}
}
/// JSON Patch 'add' operation representation
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))]
pub struct AddOperation {
/// JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location
/// within the target document where the operation is performed.
#[cfg_attr(feature = "schemars", schemars(schema_with = "String::json_schema"))]
#[cfg_attr(feature = "utoipa", schema(value_type = String))]
pub path: PointerBuf,
/// Value to add to the target location.
pub value: Value,
}
impl_display!(AddOperation);
/// JSON Patch 'remove' operation representation
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))]
pub struct RemoveOperation {
/// JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location
/// within the target document where the operation is performed.
#[cfg_attr(feature = "schemars", schemars(schema_with = "String::json_schema"))]
#[cfg_attr(feature = "utoipa", schema(value_type = String))]
pub path: PointerBuf,
}
impl_display!(RemoveOperation);
/// JSON Patch 'replace' operation representation
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))]
pub struct ReplaceOperation {
/// JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location
/// within the target document where the operation is performed.
#[cfg_attr(feature = "schemars", schemars(schema_with = "String::json_schema"))]
#[cfg_attr(feature = "utoipa", schema(value_type = String))]
pub path: PointerBuf,
/// Value to replace with.
pub value: Value,
}
impl_display!(ReplaceOperation);
/// JSON Patch 'move' operation representation
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))]
pub struct MoveOperation {
/// JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location
/// to move value from.
#[cfg_attr(feature = "schemars", schemars(schema_with = "String::json_schema"))]
#[cfg_attr(feature = "utoipa", schema(value_type = String))]
pub from: PointerBuf,
/// JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location
/// within the target document where the operation is performed.
#[cfg_attr(feature = "schemars", schemars(schema_with = "String::json_schema"))]
#[cfg_attr(feature = "utoipa", schema(value_type = String))]
pub path: PointerBuf,
}
impl_display!(MoveOperation);
/// JSON Patch 'copy' operation representation
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))]
pub struct CopyOperation {
/// JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location
/// to copy value from.
#[cfg_attr(feature = "schemars", schemars(schema_with = "String::json_schema"))]
#[cfg_attr(feature = "utoipa", schema(value_type = String))]
pub from: PointerBuf,
/// JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location
/// within the target document where the operation is performed.
#[cfg_attr(feature = "schemars", schemars(schema_with = "String::json_schema"))]
#[cfg_attr(feature = "utoipa", schema(value_type = String))]
pub path: PointerBuf,
}
impl_display!(CopyOperation);
/// JSON Patch 'test' operation representation
#[derive(Clone, Debug, Default, Deserialize, Eq, PartialEq, Serialize)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))]
pub struct TestOperation {
/// JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location
/// within the target document where the operation is performed.
#[cfg_attr(feature = "schemars", schemars(schema_with = "String::json_schema"))]
#[cfg_attr(feature = "utoipa", schema(value_type = String))]
pub path: PointerBuf,
/// Value to test against.
pub value: Value,
}
impl_display!(TestOperation);
/// JSON Patch single patch operation
#[derive(Debug, Serialize, Deserialize, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))]
#[cfg_attr(feature = "utoipa", derive(utoipa::ToSchema))]
#[serde(tag = "op")]
#[serde(rename_all = "lowercase")]
pub enum PatchOperation {
/// 'add' operation
Add(AddOperation),
/// 'remove' operation
Remove(RemoveOperation),
/// 'replace' operation
Replace(ReplaceOperation),
/// 'move' operation
Move(MoveOperation),
/// 'copy' operation
Copy(CopyOperation),
/// 'test' operation
Test(TestOperation),
}
impl_display!(PatchOperation);
impl PatchOperation {
/// Returns a reference to the path the operation applies to.
pub fn path(&self) -> &Pointer {
match self {
Self::Add(op) => &op.path,
Self::Remove(op) => &op.path,
Self::Replace(op) => &op.path,
Self::Move(op) => &op.path,
Self::Copy(op) => &op.path,
Self::Test(op) => &op.path,
}
}
}
impl Default for PatchOperation {
fn default() -> Self {
PatchOperation::Test(TestOperation::default())
}
}
/// This type represents all possible errors that can occur when applying JSON patch
#[derive(Debug, Error)]
#[non_exhaustive]
pub enum PatchErrorKind {
/// `test` operation failed because values did not match.
#[error("value did not match")]
TestFailed,
/// `from` JSON pointer in a `move` or a `copy` operation was incorrect.
#[error("\"from\" path is invalid")]
InvalidFromPointer,
/// `path` JSON pointer is incorrect.
#[error("path is invalid")]
InvalidPointer,
/// `move` operation failed because target is inside the `from` location.
#[error("cannot move the value inside itself")]
CannotMoveInsideItself,
}
impl From<jsonptr::index::ParseIndexError> for PatchErrorKind {
fn from(_: jsonptr::index::ParseIndexError) -> Self {
Self::InvalidPointer
}
}
impl From<jsonptr::index::OutOfBoundsError> for PatchErrorKind {
fn from(_: jsonptr::index::OutOfBoundsError) -> Self {
Self::InvalidPointer
}
}
/// This type represents all possible errors that can occur when applying JSON patch
#[derive(Debug, Error)]
#[error("operation '/{operation}' failed at path '{path}': {kind}")]
#[non_exhaustive]
pub struct PatchError {
/// Index of the operation that has failed.
pub operation: usize,
/// `path` of the operation.
pub path: PointerBuf,
/// Kind of the error.
pub kind: PatchErrorKind,
}
fn translate_error(kind: PatchErrorKind, operation: usize, path: &Pointer) -> PatchError {
PatchError {
operation,
path: path.to_owned(),
kind,
}
}
fn add(doc: &mut Value, path: &Pointer, value: Value) -> Result<Option<Value>, PatchErrorKind> {
let Some((parent, last)) = path.split_back() else {
// empty path, add is replace the value wholesale
return Ok(Some(std::mem::replace(doc, value)));
};
let mut parent = doc
.pointer_mut(parent.as_str())
.ok_or(PatchErrorKind::InvalidPointer)?;
match &mut parent {
Value::Object(obj) => Ok(obj.insert(last.decoded().into_owned(), value)),
Value::Array(arr) => {
let idx = last.to_index()?.for_len_incl(arr.len())?;
arr.insert(idx, value);
Ok(None)
}
_ => Err(PatchErrorKind::InvalidPointer),
}
}
fn remove(doc: &mut Value, path: &Pointer, allow_last: bool) -> Result<Value, PatchErrorKind> {
let Some((parent, last)) = path.split_back() else {
return Err(PatchErrorKind::InvalidPointer);
};
let mut parent = doc
.pointer_mut(parent.as_str())
.ok_or(PatchErrorKind::InvalidPointer)?;
match &mut parent {
Value::Object(obj) => match obj.remove(last.decoded().as_ref()) {
None => Err(PatchErrorKind::InvalidPointer),
Some(val) => Ok(val),
},
// XXX: is this really correct? semantically it seems off, `-` refers to an empty position, not the last element
Value::Array(arr) if allow_last && matches!(last.to_index(), Ok(Index::Next)) => {
Ok(arr.pop().unwrap())
}
Value::Array(arr) => {
let idx = last.to_index()?.for_len(arr.len())?;
Ok(arr.remove(idx))
}
_ => Err(PatchErrorKind::InvalidPointer),
}
}
fn replace(doc: &mut Value, path: &Pointer, value: Value) -> Result<Value, PatchErrorKind> {
let target = doc
.pointer_mut(path.as_str())
.ok_or(PatchErrorKind::InvalidPointer)?;
Ok(std::mem::replace(target, value))
}
fn mov(
doc: &mut Value,
from: &Pointer,
path: &Pointer,
allow_last: bool,
) -> Result<Option<Value>, PatchErrorKind> {
// Check we are not moving inside own child
if path.starts_with(from) && path.len() != from.len() {
return Err(PatchErrorKind::CannotMoveInsideItself);
}
let val = remove(doc, from, allow_last).map_err(|err| match err {
PatchErrorKind::InvalidPointer => PatchErrorKind::InvalidFromPointer,
err => err,
})?;
add(doc, path, val)
}
fn copy(doc: &mut Value, from: &Pointer, path: &Pointer) -> Result<Option<Value>, PatchErrorKind> {
let source = doc
.pointer(from.as_str())
.ok_or(PatchErrorKind::InvalidFromPointer)?
.clone();
add(doc, path, source)
}
fn test(doc: &Value, path: &Pointer, expected: &Value) -> Result<(), PatchErrorKind> {
let target = doc
.pointer(path.as_str())
.ok_or(PatchErrorKind::InvalidPointer)?;
if *target == *expected {
Ok(())
} else {
Err(PatchErrorKind::TestFailed)
}
}
/// Patch provided JSON document (given as `serde_json::Value`) in-place. If any of the patch is
/// failed, all previous operations are reverted. In case of internal error resulting in panic,
/// document might be left in inconsistent state.
///
/// # Example
/// Create and patch document:
///
/// ```rust
/// #[macro_use]
/// use json_patch::{Patch, patch};
/// use serde_json::{from_value, json};
///
/// # pub fn main() {
/// let mut doc = json!([
/// { "name": "Andrew" },
/// { "name": "Maxim" }
/// ]);
///
/// let p: Patch = from_value(json!([
/// { "op": "test", "path": "/0/name", "value": "Andrew" },
/// { "op": "add", "path": "/0/happy", "value": true }
/// ])).unwrap();
///
/// patch(&mut doc, &p).unwrap();
/// assert_eq!(doc, json!([
/// { "name": "Andrew", "happy": true },
/// { "name": "Maxim" }
/// ]));
///
/// # }
/// ```
pub fn patch(doc: &mut Value, patch: &[PatchOperation]) -> Result<(), PatchError> {
let mut undo_stack = Vec::with_capacity(patch.len());
if let Err(e) = apply_patches(doc, patch, Some(&mut undo_stack)) {
if let Err(e) = undo_patches(doc, &undo_stack) {
unreachable!("unable to undo applied patches: {e}")
}
return Err(e);
}
Ok(())
}
/// Patch provided JSON document (given as `serde_json::Value`) in-place. Different from [`patch`]
/// if any patch failed, the document is left in an inconsistent state. In case of internal error
/// resulting in panic, document might be left in inconsistent state.
///
/// # Example
/// Create and patch document:
///
/// ```rust
/// #[macro_use]
/// use json_patch::{Patch, patch_unsafe};
/// use serde_json::{from_value, json};
///
/// # pub fn main() {
/// let mut doc = json!([
/// { "name": "Andrew" },
/// { "name": "Maxim" }
/// ]);
///
/// let p: Patch = from_value(json!([
/// { "op": "test", "path": "/0/name", "value": "Andrew" },
/// { "op": "add", "path": "/0/happy", "value": true }
/// ])).unwrap();
///
/// patch_unsafe(&mut doc, &p).unwrap();
/// assert_eq!(doc, json!([
/// { "name": "Andrew", "happy": true },
/// { "name": "Maxim" }
/// ]));
///
/// # }
/// ```
pub fn patch_unsafe(doc: &mut Value, patch: &[PatchOperation]) -> Result<(), PatchError> {
apply_patches(doc, patch, None)
}
/// Undoes operations performed by `apply_patches`. This is useful to recover the original document
/// in case of an error.
fn undo_patches(doc: &mut Value, undo_patches: &[PatchOperation]) -> Result<(), PatchError> {
for (operation, patch) in undo_patches.iter().enumerate().rev() {
match patch {
PatchOperation::Add(op) => {
add(doc, &op.path, op.value.clone())
.map_err(|e| translate_error(e, operation, &op.path))?;
}
PatchOperation::Remove(op) => {
remove(doc, &op.path, true).map_err(|e| translate_error(e, operation, &op.path))?;
}
PatchOperation::Replace(op) => {
replace(doc, &op.path, op.value.clone())
.map_err(|e| translate_error(e, operation, &op.path))?;
}
PatchOperation::Move(op) => {
mov(doc, &op.from, &op.path, true)
.map_err(|e| translate_error(e, operation, &op.path))?;
}
PatchOperation::Copy(op) => {
copy(doc, &op.from, &op.path)
.map_err(|e| translate_error(e, operation, &op.path))?;
}
_ => unreachable!(),
}
}
Ok(())
}
// Apply patches while tracking all the changes being made so they can be reverted back in case
// subsequent patches fail. The inverse of all state changes is recorded in the `undo_stack` which
// can be reapplied using `undo_patches` to get back to the original document.
fn apply_patches(
doc: &mut Value,
patches: &[PatchOperation],
undo_stack: Option<&mut Vec<PatchOperation>>,
) -> Result<(), PatchError> {
for (operation, patch) in patches.iter().enumerate() {
match patch {
PatchOperation::Add(ref op) => {
let prev = add(doc, &op.path, op.value.clone())
.map_err(|e| translate_error(e, operation, &op.path))?;
if let Some(&mut ref mut undo_stack) = undo_stack {
undo_stack.push(match prev {
None => PatchOperation::Remove(RemoveOperation {
path: op.path.clone(),
}),
Some(v) => PatchOperation::Add(AddOperation {
path: op.path.clone(),
value: v,
}),
})
}
}
PatchOperation::Remove(ref op) => {
let prev = remove(doc, &op.path, false)
.map_err(|e| translate_error(e, operation, &op.path))?;
if let Some(&mut ref mut undo_stack) = undo_stack {
undo_stack.push(PatchOperation::Add(AddOperation {
path: op.path.clone(),
value: prev,
}))
}
}
PatchOperation::Replace(ref op) => {
let prev = replace(doc, &op.path, op.value.clone())
.map_err(|e| translate_error(e, operation, &op.path))?;
if let Some(&mut ref mut undo_stack) = undo_stack {
undo_stack.push(PatchOperation::Replace(ReplaceOperation {
path: op.path.clone(),
value: prev,
}))
}
}
PatchOperation::Move(ref op) => {
let prev = mov(doc, &op.from, &op.path, false)
.map_err(|e| translate_error(e, operation, &op.path))?;
if let Some(&mut ref mut undo_stack) = undo_stack {
if let Some(prev) = prev {
undo_stack.push(PatchOperation::Add(AddOperation {
path: op.path.clone(),
value: prev,
}));
}
undo_stack.push(PatchOperation::Move(MoveOperation {
from: op.path.clone(),
path: op.from.clone(),
}));
}
}
PatchOperation::Copy(ref op) => {
let prev = copy(doc, &op.from, &op.path)
.map_err(|e| translate_error(e, operation, &op.path))?;
if let Some(&mut ref mut undo_stack) = undo_stack {
undo_stack.push(match prev {
None => PatchOperation::Remove(RemoveOperation {
path: op.path.clone(),
}),
Some(v) => PatchOperation::Add(AddOperation {
path: op.path.clone(),
value: v,
}),
})
}
}
PatchOperation::Test(ref op) => {
test(doc, &op.path, &op.value)
.map_err(|e| translate_error(e, operation, &op.path))?;
}
}
}
Ok(())
}
/// Patch provided JSON document (given as `serde_json::Value`) in place with JSON Merge Patch
/// (RFC 7396).
///
/// # Example
/// Create and patch document:
///
/// ```rust
/// #[macro_use]
/// use json_patch::merge;
/// use serde_json::json;
///
/// # pub fn main() {
/// let mut doc = json!({
/// "title": "Goodbye!",
/// "author" : {
/// "givenName" : "John",
/// "familyName" : "Doe"
/// },
/// "tags":[ "example", "sample" ],
/// "content": "This will be unchanged"
/// });
///
/// let patch = json!({
/// "title": "Hello!",
/// "phoneNumber": "+01-123-456-7890",
/// "author": {
/// "familyName": null
/// },
/// "tags": [ "example" ]
/// });
///
/// merge(&mut doc, &patch);
/// assert_eq!(doc, json!({
/// "title": "Hello!",
/// "author" : {
/// "givenName" : "John"
/// },
/// "tags": [ "example" ],
/// "content": "This will be unchanged",
/// "phoneNumber": "+01-123-456-7890"
/// }));
/// # }
/// ```
pub fn merge(doc: &mut Value, patch: &Value) {
if !patch.is_object() {
*doc = patch.clone();
return;
}
if !doc.is_object() {
*doc = Value::Object(Map::new());
}
let map = doc.as_object_mut().unwrap();
for (key, value) in patch.as_object().unwrap() {
if value.is_null() {
map.remove(key.as_str());
} else {
merge(map.entry(key.as_str()).or_insert(Value::Null), value);
}
}
}

Binary file not shown.

View File

@@ -0,0 +1 @@
{"name":"json-patch","vers":"4.1.0","deps":[{"name":"jsonptr","req":"^0.7.1","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"schemars","req":"^1.0.4","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"serde","req":"^1.0.159","features":["derive"],"optional":false,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"serde_json","req":"^1.0.95","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"thiserror","req":"^1.0.40","features":[],"optional":false,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"utoipa","req":"^5","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"expectorate","req":"^1.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"rand","req":"^0.8.5","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"schemars","req":"^1.0.4","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"serde_json","req":"^1.0.95","features":["preserve_order"],"optional":false,"default_features":true,"target":null,"kind":"dev","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"serde_yaml","req":"^0.9.19","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"utoipa","req":"^5","features":["debug"],"optional":false,"default_features":true,"target":null,"kind":"dev","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false}],"features":{"default":["diff"],"diff":[]},"features2":null,"cksum":"8fcc73c14417591102111710b1bf0d3a9552470b11ee5d50d37449170c9a7293","yanked":null,"links":null,"rust_version":null,"v":2}

222
vendor/json-patch/tests/basic.rs vendored Normal file
View File

@@ -0,0 +1,222 @@
use json_patch::{
AddOperation, CopyOperation, MoveOperation, Patch, PatchOperation, RemoveOperation,
ReplaceOperation, TestOperation,
};
use serde_json::{from_str, from_value, json, Value};
#[test]
fn parse_from_value() {
let json = json!([{"op": "add", "path": "/a/b", "value": 1}, {"op": "remove", "path": "/c"}]);
let patch: Patch = from_value(json).unwrap();
assert_eq!(
patch,
Patch(vec![
PatchOperation::Add(AddOperation {
path: "/a/b".parse().unwrap(),
value: Value::from(1),
}),
PatchOperation::Remove(RemoveOperation {
path: "/c".parse().unwrap(),
}),
])
);
let _patch: Patch =
from_str(r#"[{"op": "add", "path": "/a/b", "value": 1}, {"op": "remove", "path": "/c"}]"#)
.unwrap();
}
#[test]
fn parse_from_string() {
let patch: Patch =
from_str(r#"[{"op": "add", "path": "/a/b", "value": 1}, {"op": "remove", "path": "/c"}]"#)
.unwrap();
assert_eq!(
patch,
Patch(vec![
PatchOperation::Add(AddOperation {
path: "/a/b".parse().unwrap(),
value: Value::from(1),
}),
PatchOperation::Remove(RemoveOperation {
path: "/c".parse().unwrap(),
}),
])
);
}
#[test]
fn serialize_patch() {
let s = r#"[{"op":"add","path":"/a/b","value":1},{"op":"remove","path":"/c"}]"#;
let patch: Patch = from_str(s).unwrap();
let serialized = serde_json::to_string(&patch).unwrap();
assert_eq!(serialized, s);
}
#[test]
fn display_add_operation() {
let op = PatchOperation::Add(AddOperation {
path: "/a/b/c".parse().unwrap(),
value: json!(["hello", "bye"]),
});
assert_eq!(
op.to_string(),
r#"{"op":"add","path":"/a/b/c","value":["hello","bye"]}"#
);
assert_eq!(
format!("{:#}", op),
r#"{
"op": "add",
"path": "/a/b/c",
"value": [
"hello",
"bye"
]
}"#
);
}
#[test]
fn display_remove_operation() {
let op = PatchOperation::Remove(RemoveOperation {
path: "/a/b/c".parse().unwrap(),
});
assert_eq!(op.to_string(), r#"{"op":"remove","path":"/a/b/c"}"#);
assert_eq!(
format!("{:#}", op),
r#"{
"op": "remove",
"path": "/a/b/c"
}"#
);
}
#[test]
fn display_replace_operation() {
let op = PatchOperation::Replace(ReplaceOperation {
path: "/a/b/c".parse().unwrap(),
value: json!(42),
});
assert_eq!(
op.to_string(),
r#"{"op":"replace","path":"/a/b/c","value":42}"#
);
assert_eq!(
format!("{:#}", op),
r#"{
"op": "replace",
"path": "/a/b/c",
"value": 42
}"#
);
}
#[test]
fn display_move_operation() {
let op = PatchOperation::Move(MoveOperation {
from: "/a/b/c".parse().unwrap(),
path: "/a/b/d".parse().unwrap(),
});
assert_eq!(
op.to_string(),
r#"{"op":"move","from":"/a/b/c","path":"/a/b/d"}"#
);
assert_eq!(
format!("{:#}", op),
r#"{
"op": "move",
"from": "/a/b/c",
"path": "/a/b/d"
}"#
);
}
#[test]
fn display_copy_operation() {
let op = PatchOperation::Copy(CopyOperation {
from: "/a/b/d".parse().unwrap(),
path: "/a/b/e".parse().unwrap(),
});
assert_eq!(
op.to_string(),
r#"{"op":"copy","from":"/a/b/d","path":"/a/b/e"}"#
);
assert_eq!(
format!("{:#}", op),
r#"{
"op": "copy",
"from": "/a/b/d",
"path": "/a/b/e"
}"#
);
}
#[test]
fn display_test_operation() {
let op = PatchOperation::Test(TestOperation {
path: "/a/b/c".parse().unwrap(),
value: json!("hello"),
});
assert_eq!(
op.to_string(),
r#"{"op":"test","path":"/a/b/c","value":"hello"}"#
);
assert_eq!(
format!("{:#}", op),
r#"{
"op": "test",
"path": "/a/b/c",
"value": "hello"
}"#
);
}
#[test]
fn display_patch() {
let patch = Patch(vec![
PatchOperation::Add(AddOperation {
path: "/a/b/c".parse().unwrap(),
value: json!(["hello", "bye"]),
}),
PatchOperation::Remove(RemoveOperation {
path: "/a/b/c".parse().unwrap(),
}),
]);
assert_eq!(
patch.to_string(),
r#"[{"op":"add","path":"/a/b/c","value":["hello","bye"]},{"op":"remove","path":"/a/b/c"}]"#
);
assert_eq!(
format!("{:#}", patch),
r#"[
{
"op": "add",
"path": "/a/b/c",
"value": [
"hello",
"bye"
]
},
{
"op": "remove",
"path": "/a/b/c"
}
]"#
);
}
#[test]
fn display_patch_default() {
let patch = Patch::default();
assert_eq!(patch.to_string(), r#"[]"#);
}
#[test]
fn display_patch_operation_default() {
let op = PatchOperation::default();
assert_eq!(op.to_string(), r#"{"op":"test","path":"","value":null}"#);
}

126
vendor/json-patch/tests/errors.yaml vendored Normal file
View File

@@ -0,0 +1,126 @@
- doc: &1
first: "Hello"
second: "Bye"
third:
- "first"
- "second"
patch:
- op: add
path: "/first"
value: "Hello!!!"
- op: add
path: "/third/00"
value: "value"
error: "operation '/1' failed at path '/third/00': path is invalid"
- doc: *1
patch:
- op: add
path: "/third/01"
value: "value"
error: "operation '/0' failed at path '/third/01': path is invalid"
- doc: *1
patch:
- op: add
path: "/third/1~1"
value: "value"
error: "operation '/0' failed at path '/third/1~1': path is invalid"
- doc: *1
patch:
- op: add
path: "/third/1.0"
value: "value"
error: "operation '/0' failed at path '/third/1.0': path is invalid"
- doc: *1
patch:
- op: add
path: "/third/1e2"
value: "value"
error: "operation '/0' failed at path '/third/1e2': path is invalid"
- doc: *1
patch:
- op: add
path: "/third/+1"
value: "value"
error: "operation '/0' failed at path '/third/+1': path is invalid"
- doc: *1
patch:
- op: copy
from: "/third/1~1"
path: "/fourth"
error: 'operation ''/0'' failed at path ''/fourth'': "from" path is invalid'
- doc: *1
patch:
- op: move
from: "/third/1~1"
path: "/fourth"
error: 'operation ''/0'' failed at path ''/fourth'': "from" path is invalid'
- doc: *1
patch:
- op: move
from: "/third"
path: "/third/0"
error: "operation '/0' failed at path '/third/0': cannot move the value inside itself"
- doc: *1
patch:
- op: add
path: "/invalid/add/path"
value: true
error: "operation '/0' failed at path '/invalid/add/path': path is invalid"
- doc: *1
patch:
- op: remove
path: "/invalid/remove/path"
value: true
error: "operation '/0' failed at path '/invalid/remove/path': path is invalid"
- doc: *1
patch:
- op: replace
path: "/invalid/replace/path"
value: true
error: "operation '/0' failed at path '/invalid/replace/path': path is invalid"
- doc: *1
patch:
- op: test
path: "/invalid/test/path"
value: true
error: "operation '/0' failed at path '/invalid/test/path': path is invalid"
- doc: *1
patch:
- op: add
path: "first"
value: true
error: "json pointer failed to parse; does not start with a slash ('/') and is not empty"
- doc: *1
patch:
- op: replace
path: "first"
value: true
error: "json pointer failed to parse; does not start with a slash ('/') and is not empty"
- doc: *1
patch:
- op: remove
path: "first"
value: true
error: "json pointer failed to parse; does not start with a slash ('/') and is not empty"
- doc: *1
patch:
- op: add
path: "/first/add_to_primitive"
value: true
error: "operation '/0' failed at path '/first/add_to_primitive': path is invalid"
- doc: *1
patch:
- op: remove
path: "/remove_non_existent"
error: "operation '/0' failed at path '/remove_non_existent': path is invalid"
- doc: *1
patch:
- op: remove
path: "/first/remove_from_primitive"
error: "operation '/0' failed at path '/first/remove_from_primitive': path is invalid"
- doc: *1
patch:
- op: test
path: "/first"
value: "Other"
error: "operation '/0' failed at path '/first': value did not match"

149
vendor/json-patch/tests/schemars.json vendored Normal file
View File

@@ -0,0 +1,149 @@
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "PatchOperation",
"description": "JSON Patch single patch operation",
"oneOf": [
{
"description": "'add' operation",
"type": "object",
"required": [
"op",
"path",
"value"
],
"properties": {
"op": {
"type": "string",
"enum": [
"add"
]
},
"path": {
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location within the target document where the operation is performed.",
"type": "string"
},
"value": {
"description": "Value to add to the target location."
}
}
},
{
"description": "'remove' operation",
"type": "object",
"required": [
"op",
"path"
],
"properties": {
"op": {
"type": "string",
"enum": [
"remove"
]
},
"path": {
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location within the target document where the operation is performed.",
"type": "string"
}
}
},
{
"description": "'replace' operation",
"type": "object",
"required": [
"op",
"path",
"value"
],
"properties": {
"op": {
"type": "string",
"enum": [
"replace"
]
},
"path": {
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location within the target document where the operation is performed.",
"type": "string"
},
"value": {
"description": "Value to replace with."
}
}
},
{
"description": "'move' operation",
"type": "object",
"required": [
"from",
"op",
"path"
],
"properties": {
"from": {
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location to move value from.",
"type": "string"
},
"op": {
"type": "string",
"enum": [
"move"
]
},
"path": {
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location within the target document where the operation is performed.",
"type": "string"
}
}
},
{
"description": "'copy' operation",
"type": "object",
"required": [
"from",
"op",
"path"
],
"properties": {
"from": {
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location to copy value from.",
"type": "string"
},
"op": {
"type": "string",
"enum": [
"copy"
]
},
"path": {
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location within the target document where the operation is performed.",
"type": "string"
}
}
},
{
"description": "'test' operation",
"type": "object",
"required": [
"op",
"path",
"value"
],
"properties": {
"op": {
"type": "string",
"enum": [
"test"
]
},
"path": {
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location within the target document where the operation is performed.",
"type": "string"
},
"value": {
"description": "Value to test against."
}
}
}
]
}

9
vendor/json-patch/tests/schemars.rs vendored Normal file
View File

@@ -0,0 +1,9 @@
#[cfg(feature = "schemars")]
#[test]
fn schema() {
use json_patch::*;
let schema = schemars::schema_for!(PatchOperation);
let json = serde_json::to_string_pretty(&schema).unwrap();
expectorate::assert_contents("tests/schemars.json", &json);
}

133
vendor/json-patch/tests/suite.rs vendored Normal file
View File

@@ -0,0 +1,133 @@
use json_patch::Patch;
use serde::Deserialize;
use serde_json::Value;
#[test]
fn errors() {
run_specs("tests/errors.yaml", Errors::ExactMatch, PatchKind::Patch);
}
#[test]
fn tests() {
run_specs("specs/tests.json", Errors::IgnoreContent, PatchKind::Patch);
}
#[test]
fn spec_tests() {
run_specs(
"specs/spec_tests.json",
Errors::IgnoreContent,
PatchKind::Patch,
);
}
#[test]
fn revert_tests() {
run_specs(
"specs/revert_tests.json",
Errors::IgnoreContent,
PatchKind::Patch,
);
}
#[test]
fn merge_tests() {
run_specs(
"specs/merge_tests.json",
Errors::IgnoreContent,
PatchKind::MergePatch,
);
}
#[derive(PartialEq, Eq, Clone, Copy)]
enum Errors {
ExactMatch,
IgnoreContent,
}
#[derive(PartialEq, Eq, Clone, Copy)]
enum PatchKind {
Patch,
MergePatch,
}
#[derive(Debug, Deserialize)]
struct PatchTestCase {
comment: Option<String>,
doc: Value,
patch: Value,
expected: Option<Value>,
error: Option<String>,
#[serde(default)]
disabled: bool,
}
fn run_patch_test_case(tc: &PatchTestCase, kind: PatchKind) -> Result<Value, String> {
let mut actual = tc.doc.clone();
if kind == PatchKind::MergePatch {
json_patch::merge(&mut actual, &tc.patch);
return Ok(actual);
}
// Patch and verify that in case of error document wasn't changed
let patch: Patch = serde_json::from_value(tc.patch.clone()).map_err(|err| err.to_string())?;
json_patch::patch(&mut actual, &patch)
.inspect_err(|_| {
assert_eq!(
tc.doc, actual,
"no changes should be made to the original document"
);
})
.map_err(|err| err.to_string())?;
Ok(actual)
}
fn run_specs(path: &str, errors: Errors, kind: PatchKind) {
let cases = std::fs::read_to_string(path).unwrap();
let is_yaml = path.ends_with(".yaml") || path.ends_with(".yml");
let cases: Vec<PatchTestCase> = if is_yaml {
serde_yaml::from_str(&cases).unwrap()
} else {
serde_json::from_str(&cases).unwrap()
};
for (idx, tc) in cases.into_iter().enumerate() {
if tc.disabled {
continue;
}
match run_patch_test_case(&tc, kind) {
Ok(actual) => {
if let Some(error) = tc.error {
panic!(
"expected to fail with an error: {}, got document {:?}",
error, actual
);
} else {
let comment = tc.comment.as_deref().unwrap_or("");
let expected = if let Some(ref expected) = tc.expected {
expected
} else {
&tc.doc
};
assert_eq!(
*expected, actual,
"\nActual does not match expected in test case {}: {}",
idx, comment
);
}
}
Err(actual_error) => {
if let Some(expected_error) = tc.error {
if errors == Errors::ExactMatch {
assert_eq!(actual_error, expected_error, "Expected test case {} to fail with an error:\n{}\n\nbut instead failed with an error:\n{}", idx, expected_error, actual_error);
}
} else {
panic!(
"Patch expected to succeed, but failed with an error:\n{}",
actual_error
);
}
}
}
}
}

288
vendor/json-patch/tests/utoipa.json vendored Normal file
View File

@@ -0,0 +1,288 @@
{
"openapi": "3.0.3",
"info": {
"title": "json-patch",
"description": "RFC 6902, JavaScript Object Notation (JSON) Patch",
"contact": {
"name": "Ivan Dubrov",
"email": "dubrov.ivan@gmail.com"
},
"license": {
"name": "MIT/Apache-2.0"
},
"version": "0.0.0"
},
"paths": {
"foo": {
"get": {
"tags": [
"crate"
],
"operationId": "get_foo",
"requestBody": {
"content": {
"application/json": {
"schema": {
"$ref": "#/components/schemas/Patch"
}
}
},
"required": true
},
"responses": {
"200": {
"description": "Patch completed"
},
"406": {
"description": "Not accepted"
}
}
}
}
},
"components": {
"schemas": {
"AddOperation": {
"type": "object",
"description": "JSON Patch 'add' operation representation",
"required": [
"path",
"value"
],
"properties": {
"path": {
"type": "string",
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location\nwithin the target document where the operation is performed."
},
"value": {
"description": "Value to add to the target location."
}
}
},
"CopyOperation": {
"type": "object",
"description": "JSON Patch 'copy' operation representation",
"required": [
"from",
"path"
],
"properties": {
"from": {
"type": "string",
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location\nto copy value from."
},
"path": {
"type": "string",
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location\nwithin the target document where the operation is performed."
}
}
},
"MoveOperation": {
"type": "object",
"description": "JSON Patch 'move' operation representation",
"required": [
"from",
"path"
],
"properties": {
"from": {
"type": "string",
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location\nto move value from."
},
"path": {
"type": "string",
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location\nwithin the target document where the operation is performed."
}
}
},
"Patch": {
"type": "array",
"items": {
"$ref": "#/components/schemas/PatchOperation"
},
"description": "Representation of JSON Patch (list of patch operations)"
},
"PatchOperation": {
"oneOf": [
{
"allOf": [
{
"$ref": "#/components/schemas/AddOperation"
},
{
"type": "object",
"required": [
"op"
],
"properties": {
"op": {
"type": "string",
"enum": [
"add"
]
}
}
}
]
},
{
"allOf": [
{
"$ref": "#/components/schemas/RemoveOperation"
},
{
"type": "object",
"required": [
"op"
],
"properties": {
"op": {
"type": "string",
"enum": [
"remove"
]
}
}
}
]
},
{
"allOf": [
{
"$ref": "#/components/schemas/ReplaceOperation"
},
{
"type": "object",
"required": [
"op"
],
"properties": {
"op": {
"type": "string",
"enum": [
"replace"
]
}
}
}
]
},
{
"allOf": [
{
"$ref": "#/components/schemas/MoveOperation"
},
{
"type": "object",
"required": [
"op"
],
"properties": {
"op": {
"type": "string",
"enum": [
"move"
]
}
}
}
]
},
{
"allOf": [
{
"$ref": "#/components/schemas/CopyOperation"
},
{
"type": "object",
"required": [
"op"
],
"properties": {
"op": {
"type": "string",
"enum": [
"copy"
]
}
}
}
]
},
{
"allOf": [
{
"$ref": "#/components/schemas/TestOperation"
},
{
"type": "object",
"required": [
"op"
],
"properties": {
"op": {
"type": "string",
"enum": [
"test"
]
}
}
}
]
}
],
"description": "JSON Patch single patch operation",
"discriminator": {
"propertyName": "op"
}
},
"RemoveOperation": {
"type": "object",
"description": "JSON Patch 'remove' operation representation",
"required": [
"path"
],
"properties": {
"path": {
"type": "string",
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location\nwithin the target document where the operation is performed."
}
}
},
"ReplaceOperation": {
"type": "object",
"description": "JSON Patch 'replace' operation representation",
"required": [
"path",
"value"
],
"properties": {
"path": {
"type": "string",
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location\nwithin the target document where the operation is performed."
},
"value": {
"description": "Value to replace with."
}
}
},
"TestOperation": {
"type": "object",
"description": "JSON Patch 'test' operation representation",
"required": [
"path",
"value"
],
"properties": {
"path": {
"type": "string",
"description": "JSON-Pointer value [RFC6901](https://tools.ietf.org/html/rfc6901) that references a location\nwithin the target document where the operation is performed."
},
"value": {
"description": "Value to test against."
}
}
}
}
}
}

40
vendor/json-patch/tests/utoipa.rs vendored Normal file
View File

@@ -0,0 +1,40 @@
#[cfg(feature = "utoipa")]
#[test]
fn schema() {
use json_patch::*;
use utoipa::OpenApi;
#[utoipa::path(
get,
path = "foo",
request_body = Patch,
responses(
(status = 200, description = "Patch completed"),
(status = 406, description = "Not accepted"),
),
)]
#[allow(unused)]
fn get_foo(body: Patch) {}
#[derive(OpenApi, Default)]
#[openapi(
paths(get_foo),
components(schemas(
AddOperation,
CopyOperation,
MoveOperation,
PatchOperation,
RemoveOperation,
ReplaceOperation,
TestOperation,
Patch,
))
)]
struct ApiDoc;
let mut doc = ApiDoc::openapi();
doc.info.version = "0.0.0".to_string();
let json = doc.to_pretty_json().unwrap();
expectorate::assert_contents("tests/utoipa.json", &json);
}

2
vendor/json-patch/update-readme.sh vendored Executable file
View File

@@ -0,0 +1,2 @@
#!/bin/sh
cargo readme > ./README.md