chore: checkpoint before Python removal

This commit is contained in:
2026-03-26 22:33:59 +00:00
parent 683cec9307
commit e568ddf82a
29972 changed files with 11269302 additions and 2 deletions

1
vendor/jsonptr/.cargo-checksum.json vendored Normal file
View File

@@ -0,0 +1 @@
{"files":{".cargo/config.toml":"382efa67bcc1312cc23cd5c9c2ec470549d0687165f72eb85620021a0e70a881",".cargo_vcs_info.json":"4e04561b5069df2d1a7001cd53da5827094b21314a00631d714dfc87b9287cfb",".github/DOCS.md":"6a3c2248bb234e7fde0ce6ee9d986c7c0dbd25f33807b1808a62490d47c799be",".github/codecov.yml":"47c451cf94e3fe4f7edf46e42cb9b86c9d65eb7b65ca66f2e50ec936437a21e1",".github/dependabot.yml":"0721729dc23500350d26acccee5ed86474da5b75c909533994922f6b6cd0d4cb",".github/workflows/check.yml":"0a6d2476c5804fb3b44300d60640b661d432fb1fb7093e90b4dc6e1e867af6a2",".github/workflows/nostd.yml":"194dbc384506ad8aa4a7756eca23f8ec4ffe1bef06cb1465a96a80848640492b",".github/workflows/safety.yml":"5dd1de3e4dcfab36626f972b34f76f005d3511ae2d68fcb605927a9fb40165a3",".github/workflows/scheduled.yml":"5b8d25904e105c07bb79593abb86e6d28826275ea2f6b7fd1d8f1679444ffccb",".github/workflows/test.yml":"0dcd112459606c8a8ed5ddeee3e18a87f9ed6c517d8ecfab8963b828d7c8cee5",".taplo.toml":"c145e393eb1d7eb30c92925edf7c9b95af6d6ee310ce2a37e0900da36044e6fe","CHANGELOG.md":"92ce91d46c9350151c5a21bb52ace6326d085e385286198b57e4f766c66f8545","Cargo.toml":"8bd1d8f3d4f1a6a49aaf09feb6197559176ad04ec70c8849860b7ab4f11dd0be","Cargo.toml.orig":"c5156b43f7b8793529be74feba8423e2cc3bd515adb23ffb5e6baf3ce199fe0e","LICENSE-APACHE":"2cf31f2ae7f3feba667d50fc1d4d21ecf217703c04c3068b83c9a73b6dde723a","LICENSE-MIT":"2bc107bdc18a74263d20bdefdb8073e17bc1a01b6b374dc5835683a33ee5ba7d","README.md":"ee78b7b08ab8e2ee7884c931f70764c419e3203ea7f8c3184cdbc9acf2588b6b","src/arbitrary.rs":"53c583026478052b91013b516d8a85c6fd137aeb0705f209b444991bbc4e32d6","src/assign.rs":"96855e522977d85df6d0a18d7538f3150c554ab31095e374b17b5e9fd14b7f3e","src/component.rs":"f7cfcc02255c0755be3dd82a4b2940c183e083922c7b047b03672d6da35e1fb3","src/delete.rs":"6e9e009a196498c8897927238a307cd86821faf99b5373e98d25edb2d5cd9472","src/diagnostic.rs":"0cd58cffba09db778eb62fba0328c5eb0910d1f3f91aa57b5de290a80332af81","src/index.rs":"8e7948b52ab4d0c94cd3b3d3ec845cdeb38b64d62cf77f94d403f1e21d749fcf","src/lib.rs":"34b97027b8432d392f493109511d07f0ba83e0d36441355bec4468f6cc73c8b9","src/pointer.rs":"1c4259b7562322cc6a3533c98a4dd3582d82027217db580b283f6d796ab9bc3a","src/pointer/slice.rs":"8261847ac6c6ece54a35774267e6a9bb842f05ff27afb58535917dc41a98d0a3","src/resolve.rs":"6ceae51bd47f1d656d8bed1ecb324ffbb655b61fb81003c88ba1186ce0c61273","src/token.rs":"6632b2be443bc20eb852d478f1254d0840633e45ac50e9a48620fe348e23cd43"},"package":"a5a3cc660ba5d72bce0b3bb295bf20847ccbb40fd423f3f05b61273672e561fe"}

5
vendor/jsonptr/.cargo/config.toml vendored Normal file
View File

@@ -0,0 +1,5 @@
[alias]
cov = "llvm-cov --lcov --output-path lcov.info"
[build]
rustdocflags = ["--cfg", "docsrs"]

6
vendor/jsonptr/.cargo_vcs_info.json vendored Normal file
View File

@@ -0,0 +1,6 @@
{
"git": {
"sha1": "c0201c7c45ab3f9982601cfe6947c64a0106d589"
},
"path_in_vcs": ""
}

23
vendor/jsonptr/.github/DOCS.md vendored Normal file
View File

@@ -0,0 +1,23 @@
# Github config and workflows
In this folder there is configuration for codecoverage, dependabot, and ci
workflows that check the library more deeply than the default configurations.
This folder can be or was merged using a --allow-unrelated-histories merge
strategy from <https://github.com/jonhoo/rust-ci-conf/> which provides a
reasonably sensible base for writing your own ci on. By using this strategy
the history of the CI repo is included in your repo, and future updates to
the CI can be merged later.
To perform this merge run:
```shell
git remote add ci https://github.com/jonhoo/rust-ci-conf.git
git fetch ci
git merge --allow-unrelated-histories ci/main
```
An overview of the files in this project is available at:
<https://www.youtube.com/watch?v=xUH-4y92jPg&t=491s>, which contains some
rationale for decisions and runs through an example of solving minimal version
and OpenSSL issues.

23
vendor/jsonptr/.github/codecov.yml vendored Normal file
View File

@@ -0,0 +1,23 @@
# ref: https://docs.codecov.com/docs/codecovyml-reference
coverage:
# Hold ourselves to a high bar
range: 85..100
round: down
precision: 1
status:
# ref: https://docs.codecov.com/docs/commit-status
project:
default:
# Avoid false negatives
threshold: 1%
# Test files aren't important for coverage
ignore:
- "tests"
- "arbitrary.rs"
- "src/arbitrary.rs"
# Make comments less noisy
comment:
layout: "files"
require_changes: true

19
vendor/jsonptr/.github/dependabot.yml vendored Normal file
View File

@@ -0,0 +1,19 @@
version: 2
updates:
- package-ecosystem: github-actions
directory: /
schedule:
interval: daily
- package-ecosystem: cargo
directory: /
schedule:
interval: daily
ignore:
- dependency-name: "*"
# patch and minor updates don't matter for libraries as consumers of this library build
# with their own lockfile, rather than the version specified in this library's lockfile
# remove this ignore rule if your package has binaries to ensure that the binaries are
# built with the exact set of dependencies and those are up to date.
update-types:
- "version-update:semver-patch"
- "version-update:semver-minor"

View File

@@ -0,0 +1,138 @@
# This workflow runs whenever a PR is opened or updated, or a commit is pushed to main. It runs
# several checks:
# - fmt: checks that the code is formatted according to rustfmt
# - clippy: checks that the code does not contain any clippy warnings
# - doc: checks that the code can be documented without errors
# - hack: check combinations of feature flags
# - msrv: check that the msrv specified in the crate is correct
permissions:
contents: read
# This configuration allows maintainers of this repo to create a branch and pull request based on
# the new branch. Restricting the push trigger to the main branch ensures that the PR only gets
# built once.
on:
push:
branches: [main]
pull_request:
# If new code is pushed to a PR branch, then cancel in progress workflows for that PR. Ensures that
# we don't waste CI time, and returns results quicker https://github.com/jonhoo/rust-ci-conf/pull/5
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
name: check
jobs:
fmt:
runs-on: ubuntu-latest
name: stable / fmt
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install stable
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt
- name: cargo fmt --check
run: cargo fmt --check
clippy:
runs-on: ubuntu-latest
name: ${{ matrix.toolchain }} / clippy
permissions:
contents: read
checks: write
strategy:
fail-fast: false
matrix:
# Get early warning of new lints which are regularly introduced in beta channels.
toolchain: [stable, beta]
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install ${{ matrix.toolchain }}
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ matrix.toolchain }}
components: clippy
- name: cargo clippy
uses: giraffate/clippy-action@v1
with:
reporter: "github-pr-check"
github_token: ${{ secrets.GITHUB_TOKEN }}
semver:
runs-on: ubuntu-latest
name: semver
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install stable
uses: dtolnay/rust-toolchain@stable
with:
components: rustfmt
- name: cargo-semver-checks
uses: obi1kenobi/cargo-semver-checks-action@v2
doc:
# run docs generation on nightly rather than stable. This enables features like
# https://doc.rust-lang.org/beta/unstable-book/language-features/doc-cfg.html which allows an
# API be documented as only available in some specific platforms.
runs-on: ubuntu-latest
name: nightly / doc
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install nightly
uses: dtolnay/rust-toolchain@nightly
- name: cargo doc
run: cargo doc --no-deps --all-features
env:
RUSTDOCFLAGS: --cfg docsrs
hack:
# cargo-hack checks combinations of feature flags to ensure that features are all additive
# which is required for feature unification
runs-on: ubuntu-latest
name: ubuntu / stable / features
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install stable
uses: dtolnay/rust-toolchain@stable
- name: cargo install cargo-hack
uses: taiki-e/install-action@cargo-hack
# intentionally no target specifier; see https://github.com/jonhoo/rust-ci-conf/pull/4
# --feature-powerset runs for every combination of features
- name: cargo hack
run: cargo hack --feature-powerset check
msrv:
# check that we can build using the minimal rust version that is specified by this crate
runs-on: ubuntu-latest
# we use a matrix here just because env can't be used in job names
# https://docs.github.com/en/actions/learn-github-actions/contexts#context-availability
# TODO: would be nice to just parse this from the manifest
strategy:
matrix:
msrv: ["1.79"]
name: ubuntu / ${{ matrix.msrv }}
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install ${{ matrix.msrv }}
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ matrix.msrv }}
- name: cargo +${{ matrix.msrv }} check
run: cargo check
toml-fmt:
runs-on: ubuntu-latest
name: toml / fmt
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install taplo
uses: docker://docker.io/tamasfe/taplo:latest
with:
args: fmt --check --diff

View File

@@ -0,0 +1,30 @@
# This workflow checks whether the library is able to run without the std library (e.g., embedded).
# This entire file should be removed if this crate does not support no-std. See check.yml for
# information about how the concurrency cancellation and workflow triggering works
permissions:
contents: read
on:
push:
branches: [main]
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
name: no-std
jobs:
nostd:
runs-on: ubuntu-latest
name: ${{ matrix.target }}
strategy:
matrix:
target: [thumbv7m-none-eabi, aarch64-unknown-none]
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install stable
uses: dtolnay/rust-toolchain@stable
- name: rustup target add ${{ matrix.target }}
run: rustup target add ${{ matrix.target }}
- name: cargo check
run: cargo check --target ${{ matrix.target }} --no-default-features

View File

@@ -0,0 +1,70 @@
# This workflow runs checks for unsafe code. In crates that don't have any unsafe code, this can be
# removed. Runs:
# - miri - detects undefined behavior and memory leaks
# - address sanitizer - detects memory errors
# - leak sanitizer - detects memory leaks
# See check.yml for information about how the concurrency cancellation and workflow triggering works
permissions:
contents: read
on:
push:
branches: [main]
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
name: safety
jobs:
sanitizers:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install nightly
uses: dtolnay/rust-toolchain@nightly
- run: |
# to get the symbolizer for debug symbol resolution
sudo apt install llvm
# to fix buggy leak analyzer:
# https://github.com/japaric/rust-san#unrealiable-leaksanitizer
# ensure there's a profile.dev section
if ! grep -qE '^[ \t]*[profile.dev]' Cargo.toml; then
echo >> Cargo.toml
echo '[profile.dev]' >> Cargo.toml
fi
# remove pre-existing opt-levels in profile.dev
sed -i '/^\s*\[profile.dev\]/,/^\s*\[/ {/^\s*opt-level/d}' Cargo.toml
# now set opt-level to 1
sed -i '/^\s*\[profile.dev\]/a opt-level = 1' Cargo.toml
cat Cargo.toml
name: Enable debug symbols
- name: cargo test -Zsanitizer=address
# only --lib --tests b/c of https://github.com/rust-lang/rust/issues/53945
run: cargo test --lib --tests --all-features --target x86_64-unknown-linux-gnu
env:
ASAN_OPTIONS: "detect_odr_violation=0:detect_leaks=0"
RUSTFLAGS: "-Z sanitizer=address"
- name: cargo test -Zsanitizer=leak
if: always()
run: cargo test --all-features --target x86_64-unknown-linux-gnu
env:
LSAN_OPTIONS: "suppressions=lsan-suppressions.txt"
RUSTFLAGS: "-Z sanitizer=leak"
miri:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
submodules: true
- run: |
echo "NIGHTLY=nightly-$(curl -s https://rust-lang.github.io/rustup-components-history/x86_64-unknown-linux-gnu/miri)" >> $GITHUB_ENV
- name: Install ${{ env.NIGHTLY }}
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ env.NIGHTLY }}
components: miri
- name: cargo miri test
run: cargo miri test -- --skip pointer::tests::qc
env:
MIRIFLAGS: ""

View File

@@ -0,0 +1,58 @@
# Run scheduled (rolling) jobs on a nightly basis, as your crate may break independently of any
# given PR. E.g., updates to rust nightly and updates to this crates dependencies. See check.yml for
# information about how the concurrency cancellation and workflow triggering works
permissions:
contents: read
on:
push:
branches: [main]
pull_request:
schedule:
- cron: '7 7 * * *'
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
name: rolling
jobs:
# https://twitter.com/mycoliza/status/1571295690063753218
nightly:
runs-on: ubuntu-latest
name: ubuntu / nightly
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install nightly
uses: dtolnay/rust-toolchain@nightly
- name: cargo generate-lockfile
if: hashFiles('Cargo.lock') == ''
run: cargo generate-lockfile
- name: cargo test --locked
run: cargo test --locked --all-features --all-targets
# https://twitter.com/alcuadrado/status/1571291687837732873
update:
# This action checks that updating the dependencies of this crate to the latest available that
# satisfy the versions in Cargo.toml does not break this crate. This is important as consumers
# of this crate will generally use the latest available crates. This is subject to the standard
# Cargo semver rules (i.e cargo does not update to a new major version unless explicitly told
# to).
runs-on: ubuntu-latest
name: ubuntu / beta / updated
# There's no point running this if no Cargo.lock was checked in in the first place, since we'd
# just redo what happened in the regular test job. Unfortunately, hashFiles only works in if on
# steps, so we repeat it.
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install beta
if: hashFiles('Cargo.lock') != ''
uses: dtolnay/rust-toolchain@beta
- name: cargo update
if: hashFiles('Cargo.lock') != ''
run: cargo update
- name: cargo test
if: hashFiles('Cargo.lock') != ''
run: cargo test --locked --all-features --all-targets
env:
RUSTFLAGS: -D deprecated

View File

@@ -0,0 +1,156 @@
# This is the main CI workflow that runs the test suite on all pushes to main and all pull requests.
# It runs the following jobs:
# - required: runs the test suite on ubuntu with stable and beta rust toolchains
# - minimal: runs the test suite with the minimal versions of the dependencies that satisfy the
# requirements of this crate, and its dependencies
# - os-check: runs the test suite on mac and windows
# - coverage: runs the test suite and collects coverage information
# See check.yml for information about how the concurrency cancellation and workflow triggering works
permissions:
contents: read
on:
push:
branches: [main]
pull_request:
concurrency:
group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }}
cancel-in-progress: true
name: test
jobs:
required:
runs-on: ubuntu-latest
name: ubuntu / ${{ matrix.toolchain }}
strategy:
matrix:
# run on stable and beta to ensure that tests won't break on the next version of the rust
# toolchain
toolchain: [stable, beta]
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install ${{ matrix.toolchain }}
uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ matrix.toolchain }}
- name: cargo generate-lockfile
# enable this ci template to run regardless of whether the lockfile is checked in or not
if: hashFiles('Cargo.lock') == ''
run: cargo generate-lockfile
# https://twitter.com/jonhoo/status/1571290371124260865
- name: cargo test --locked
run: cargo test --locked --all-features --all-targets
# https://github.com/rust-lang/cargo/issues/6669
- name: cargo test --doc
run: cargo test --locked --all-features --doc
minimal:
# This action chooses the oldest version of the dependencies permitted by Cargo.toml to ensure
# that this crate is compatible with the minimal version that this crate and its dependencies
# require. This will pickup issues where this create relies on functionality that was introduced
# later than the actual version specified (e.g., when we choose just a major version, but a
# method was added after this version).
#
# This particular check can be difficult to get to succeed as often transitive dependencies may
# be incorrectly specified (e.g., a dependency specifies 1.0 but really requires 1.1.5). There
# is an alternative flag available -Zdirect-minimal-versions that uses the minimal versions for
# direct dependencies of this crate, while selecting the maximal versions for the transitive
# dependencies. Alternatively, you can add a line in your Cargo.toml to artificially increase
# the minimal dependency, which you do with e.g.:
# ```toml
# # for minimal-versions
# [target.'cfg(any())'.dependencies]
# openssl = { version = "0.10.55", optional = true } # needed to allow foo to build with -Zminimal-versions
# ```
# The optional = true is necessary in case that dependency isn't otherwise transitively required
# by your library, and the target bit is so that this dependency edge never actually affects
# Cargo build order. See also
# https://github.com/jonhoo/fantoccini/blob/fde336472b712bc7ebf5b4e772023a7ba71b2262/Cargo.toml#L47-L49.
# This action is run on ubuntu with the stable toolchain, as it is not expected to fail
runs-on: ubuntu-latest
name: ubuntu / stable / minimal-versions
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install stable
uses: dtolnay/rust-toolchain@stable
- name: Install nightly for -Zminimal-versions
uses: dtolnay/rust-toolchain@nightly
- name: rustup default stable
run: rustup default stable
- name: cargo update -Zminimal-versions
run: cargo +nightly update -Zminimal-versions
- name: cargo test
run: cargo test --locked --all-features --all-targets
os-check:
# run cargo test on mac and windows
runs-on: ${{ matrix.os }}
name: ${{ matrix.os }} / stable
strategy:
fail-fast: false
matrix:
os: [macos-latest, windows-latest]
steps:
# if your project needs OpenSSL, uncomment this to fix Windows builds.
# it's commented out by default as the install command takes 5-10m.
# - run: echo "VCPKG_ROOT=$env:VCPKG_INSTALLATION_ROOT" | Out-File -FilePath $env:GITHUB_ENV -Append
# if: runner.os == 'Windows'
# - run: vcpkg install openssl:x64-windows-static-md
# if: runner.os == 'Windows'
- uses: actions/checkout@v4
with:
submodules: true
- name: Install stable
uses: dtolnay/rust-toolchain@stable
- name: cargo generate-lockfile
if: hashFiles('Cargo.lock') == ''
run: cargo generate-lockfile
- name: cargo test
run: cargo test --locked --all-features --all-targets
coverage:
# use llvm-cov to build and collect coverage and outputs in a format that
# is compatible with codecov.io
#
# note that codecov as of v4 requires that CODECOV_TOKEN from
#
# https://app.codecov.io/gh/<user or org>/<project>/settings
#
# is set in two places on your repo:
#
# - https://github.com/jonhoo/guardian/settings/secrets/actions
# - https://github.com/jonhoo/guardian/settings/secrets/dependabot
#
# (the former is needed for codecov uploads to work with Dependabot PRs)
#
# PRs coming from forks of your repo will not have access to the token, but
# for those, codecov allows uploading coverage reports without a token.
# it's all a little weird and inconvenient. see
#
# https://github.com/codecov/feedback/issues/112
#
# for lots of more discussion
runs-on: ubuntu-latest
name: ubuntu / stable / coverage
steps:
- uses: actions/checkout@v4
with:
submodules: true
- name: Install stable
uses: dtolnay/rust-toolchain@stable
with:
components: llvm-tools-preview
- name: cargo install cargo-llvm-cov
uses: taiki-e/install-action@cargo-llvm-cov
- name: cargo generate-lockfile
if: hashFiles('Cargo.lock') == ''
run: cargo generate-lockfile
- name: cargo llvm-cov
run: cargo llvm-cov --locked --all-features --lcov --output-path lcov.info
- name: Record Rust version
run: echo "RUST=$(rustc --version)" >> "$GITHUB_ENV"
- name: Upload to codecov.io
uses: codecov/codecov-action@v4
with:
fail_ci_if_error: true
token: ${{ secrets.CODECOV_TOKEN }}
env_vars: OS,RUST

24
vendor/jsonptr/.taplo.toml vendored Normal file
View File

@@ -0,0 +1,24 @@
include = ["**/*.toml", "**/Cargo.toml", "Cargo.toml"]
[formatting]
align_comments = true
align_entries = true
allowed_blank_lines = 1
indent_entries = false
reorder_arrays = false
reorder_keys = true
trailing_newline = true
[[rule]]
formatting.align_entries = true
formatting.array_auto_expand = false
formatting.reorder_arrays = true
formatting.reorder_keys = true
include = ["Cargo.toml", "**/Cargo.toml"]
keys = [
"dependencies",
"dev-dependencies",
"build-dependencies",
"workspace.dependencies",
"workspace.dev-dependencies",
"workspace.build-dependencies",
]

218
vendor/jsonptr/CHANGELOG.md vendored Normal file
View File

@@ -0,0 +1,218 @@
# Changelog
All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/),
and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html).
## [0.7.1] 2025-02-16
### Changed
- Removes accidentally enabled default features `"miette"` and `"toml"`
## [0.7.0] 2025-02-13
### Added
- Adds method `into_buf` for `Box<Pointer>` and `impl From<PathBuf> for Box<Pointer>`.
- Adds unsafe associated methods `Pointer::new_unchecked` and `PointerBuf::new_unchecked` for
external zero-cost construction.
- Adds `Pointer::starts_with` and `Pointer::ends_with` for prefix and suffix matching.
- Adds new `ParseIndexError` variant to express the presence non-digit characters in the token.
- Adds `Token::is_next` for checking if a token represents the `-` character.
- Adds `InvalidEncoding` to represent the two possible encoding errors when decoding a token.
- Adds `diagnotic::Diagnostic` trait to facilitate error reporting and
`miette` integration. All errors intended for usage with `assign::Assign` or
`resolve::Resolve` must implement this trait.
- Adds `diagnostic::Report<T>` to capture the input for `PointerBuf::parse`
and to facilitate `miette` integration for all errors.
- Adds `"miette"` feature flag to enable `miette` integration for error reporting.
### Changed
- `Pointer::get` now accepts ranges and can produce `Pointer` segments as output (similar to
`slice::get`).
- Bumps minimum Rust version to 1.79.
- `PointerBuf::parse` now returns `RichParseError`, an alias to
`Report<ParseError>` which contains the allocated string as well as the
error. Use `Report::original` for matches or `Report::
- Renames `ParseError::NoLeadingBackslash` to `ParseError::NoLeadingSlash`
(sorry for the churn, I spaced hard - @chanced).
- Adds field `position` to variants of `resolve::Error` and `assign::Error` to indicate the
token index of where the error occurred.
- Renames `ParseError::is_no_leading_backslash` to `ParseError::is_no_leading_slash`.
- Renames `assign::AssignError` to `assign::Error`
- Renames `resolve::ResolveError` to `resolve::Error`
- Renames `InvalidEncodingError` to `EncodingError`
### Fixed
- Make validation of array indices conform to RFC 6901 in the presence of non-digit characters.
### Deprecated
- `ParseError::is_no_leading_backslash` renamed to `ParseError::is_no_leading_slash`.
- `assign::AssignError` renamed to `assign::Error`
- `resolve::ResolveError` renamed to `resolve::Error`
- `InvalidEncodingError` renamed to `EncodingError`
## [0.6.2] 2024-09-30
### Added
- Adds methods `len` and `is_empty` to `Pointer`
## [0.6.1] 2024-09-26
## Added
- Adds fluid methods `with_trailing_token`, `with_leading_token`, `concat` to `Pointer`.
## [0.6.0] - 2024-08-06
### Fixed
- `Token::to_index` now fails if the token contains leading zeros, as mandated by the RFC.
### Changed
- `ParseIndexError` is now an enum to reflect the new failure mode when parsing indices.
## [0.5.1]
### Changed
- README tweak.
## [0.5.0]
This is a breaking release including:
- [#30](https://github.com/chanced/jsonptr/pull/30) and [#37](https://github.com/chanced/jsonptr/pull/37) by [@asmello](https://github.com/asmello)
- [#41](https://github.com/chanced/jsonptr/pull/41) by [@chanced](https://github.com/chanced) & [@asmello](https://github.com/asmello)
### Added
- New slice type `Pointer` that enables zero-copy usage patterns
- New const constructor `const fn Pointer::from_static` for compile-time allocated `Pointer`s
- Zero-allocation `Pointer::root` singleton pointer
- [Quickcheck](https://docs.rs/quickcheck/latest/quickcheck/index.html)-based testing
- New methods: `Pointer::split_front`, `Pointer::split_back`, `Pointer::parent`, `Pointer::strip_suffix`
- Implemented `Display` and `Debug` for `ParseError`
- Adds `Pointer::split_at` which utilizes character offsets to split a pointer at a separator
- Adds specific error types `ParseError`, `ResolveError`, `AssignError`
### Changed
- JSON Pointers with leading `"#"` are no longer accepted. Previously, the erroneous leading hashtag was allowed during parsing but discarded.
- `Assign`, `Resolve`, `ResolveMut`, `Delete` all now use associated types `Value` and `Error`, allowing for more impls other than JSON
- Debug implementation now preserves type information (e.g. prints `PathBuf("/foo/bar")` instead of `"/foo/bar"`) - `Display` remains the same
- Original `Pointer` type renamed to `PointerBuf`
- Error types now use character `offset` indexing instead of owned copies of `Pointer` and `Token`.
- `Pointer::root` is now `PointerBuf::new`
- `Pointer::new` is now `PointerBuf::from_tokens` (and takes an `IntoIterator` argument - arrays still work)
- `Pointer::union` is now `PointerBuf::intersection`
- `Token` type has been simplified and is now by default a borrowed type (use `Token::to_owned` or `Token::into_owned` to make it owned)
- `Assign::assign` now returns `Result<Option<Assign::Value>, AssignError>`, where `Option<Assign::Value>` is the replaced value
### Fixed
- Fixes [#28](https://github.com/chanced/jsonptr/pull/28): `Pointer::union` is misleadingly named
### Removed
- Removes `Assignment`
- Removes `MaybePointer`
- Removes `Error`
- Removes `impl Deref<Target=&str>` from `Pointer`
- Removes optional dependencies of `url`, `fluent-uri` and `uniresid` as well as the `TryFrom` implementations for their respective types
- Removed `Token::as_key` and `Token::as_str` - use `Token::decoded().as_ref()` to achieve the same effect
- Several redundant or error-prone trait implementations were removed from `Token`
## [0.4.7] 2024-03-18
- Fixes issue with `pop_front` on a token with an empty string leaving the pointer in an invalid state. #25 by [@wngr](https://github.com/wngr)
- Fixes issue with `pop_back` on a token with an empty string. #26 by [@asmello](https://github.com/asmello)
## [0.4.6] 2024-03-24
- Fixes `Pointer::last` panicking for empty/root pointers #23 by [@wngr](https://github.com/wngr)
## [0.4.5] 2024-02-23
### Fixed
- Fixes issue with `Pointer::push_back` that does not allow for empty strings
to be appended as tokens. #21 fixed by [@wngr](https://github.com/wngr)
## [0.4.3] 2023-08-20
### Added
- Adds `parse` method to `Pointer` which calls the currently existing `FromStr`
impl
## [0.4.2] 2023-06-23
### Added
- implements `IntoIterator` for `&Pointer`
## [0.4.1] 2023-06-21
### Added
- implements `Borrow<[u8]>` and `AsRef<[u8]>` for `Pointer`
## [0.4.0] 2023-05-31
### Added
- Adds `CHANGELOG.md` which will be better upkept moving forward.
- Adds `MaybePointer` to assist with deserialization which should not fail fast.
### Changed
- `Pointer::new` now accepts a generic list, so `&["example"]` can be replaced by `["example"]`. For untyped, empty slices (i.e. `Pointer::new(&[])`), use `Pointer::default()`.
- `std` is now enabled by default.
### Removed
- Removes optional `MalformedPointerError` from `Pointer`.
## [0.3.6] 2023-05-23
### Changed
- Adds quotes around `Pointer` debug output (#11)
### Fixed
- Adds missing `impl std::error::Error` for `Error`, `NotFoundError`, `MalformedError`
- Fixes build for `std` feature flag
## [0.3.4] 2023-05-11
### Added
- Adds feature flag `fluent-uri` for `From<fluent_uri::Uri<_>` impl (#3)
## [0.2.0] 2023-02-24
### Changed
- `std` is now optional
- Adds feature flags `"uniresid"`, `"url"` to enable implementing `From<Uri>`, `From<Url>` (respectively).
### Removed
- Removes `Cargo.lock`
- Makes `uniresid` and `uri` optional
## [0.1.0] - 2022-06-12
### Fixed
- Fixes root pointer representation `""` rather than the erroneous `"/"`
- Fixes an issue where encoded tokens were not being resolved properly

100
vendor/jsonptr/Cargo.toml vendored Normal file
View File

@@ -0,0 +1,100 @@
# THIS FILE IS AUTOMATICALLY GENERATED BY CARGO
#
# When uploading crates to the registry Cargo will automatically
# "normalize" Cargo.toml files for maximal compatibility
# with all versions of Cargo and also rewrite `path` dependencies
# to registry (e.g., crates.io) dependencies.
#
# If you are reading this file be aware that the original Cargo.toml
# will likely look very different (and much more reasonable).
# See Cargo.toml.orig for the original contents.
[package]
edition = "2021"
rust-version = "1.79.0"
name = "jsonptr"
version = "0.7.1"
authors = [
"chance dinkins",
"André Sá de Mello <codasm@pm.me>",
"Oliver Wangler <oliver@wngr.de>",
]
build = false
autobins = false
autoexamples = false
autotests = false
autobenches = false
description = "Data structures and logic for resolving, assigning, and deleting by JSON Pointers (RFC 6901)"
homepage = "https://github.com/chanced/jsonptr"
documentation = "https://docs.rs/jsonptr"
readme = "README.md"
keywords = [
"json-pointer",
"rfc-6901",
"6901",
]
license = "MIT OR Apache-2.0"
repository = "https://github.com/chanced/jsonptr"
[lib]
name = "jsonptr"
path = "src/lib.rs"
[dependencies.miette]
version = "7.4.0"
features = ["fancy"]
optional = true
[dependencies.serde]
version = "1.0.203"
features = ["alloc"]
optional = true
[dependencies.serde_json]
version = "1.0.119"
features = ["alloc"]
optional = true
[dependencies.toml]
version = "0.8"
optional = true
[dev-dependencies.quickcheck]
version = "1.0.3"
[dev-dependencies.quickcheck_macros]
version = "1.0.0"
[features]
assign = []
default = [
"std",
"serde",
"json",
"resolve",
"assign",
"delete",
]
delete = ["resolve"]
json = [
"dep:serde_json",
"serde",
]
miette = [
"dep:miette",
"std",
]
resolve = []
std = [
"serde/std",
"serde_json?/std",
]
toml = [
"dep:toml",
"serde",
"std",
]
[target."cfg(any())".dependencies.syn]
version = "1.0.109"
optional = true

201
vendor/jsonptr/LICENSE-APACHE vendored Normal file
View File

@@ -0,0 +1,201 @@
Apache License
Version 2.0, January 2004
http://www.apache.org/licenses/
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
1. Definitions.
"License" shall mean the terms and conditions for use, reproduction,
and distribution as defined by Sections 1 through 9 of this document.
"Licensor" shall mean the copyright owner or entity authorized by
the copyright owner that is granting the License.
"Legal Entity" shall mean the union of the acting entity and all
other entities that control, are controlled by, or are under common
control with that entity. For the purposes of this definition,
"control" means (i) the power, direct or indirect, to cause the
direction or management of such entity, whether by contract or
otherwise, or (ii) ownership of fifty percent (50%) or more of the
outstanding shares, or (iii) beneficial ownership of such entity.
"You" (or "Your") shall mean an individual or Legal Entity
exercising permissions granted by this License.
"Source" form shall mean the preferred form for making modifications,
including but not limited to software source code, documentation
source, and configuration files.
"Object" form shall mean any form resulting from mechanical
transformation or translation of a Source form, including but
not limited to compiled object code, generated documentation,
and conversions to other media types.
"Work" shall mean the work of authorship, whether in Source or
Object form, made available under the License, as indicated by a
copyright notice that is included in or attached to the work
(an example is provided in the Appendix below).
"Derivative Works" shall mean any work, whether in Source or Object
form, that is based on (or derived from) the Work and for which the
editorial revisions, annotations, elaborations, or other modifications
represent, as a whole, an original work of authorship. For the purposes
of this License, Derivative Works shall not include works that remain
separable from, or merely link (or bind by name) to the interfaces of,
the Work and Derivative Works thereof.
"Contribution" shall mean any work of authorship, including
the original version of the Work and any modifications or additions
to that Work or Derivative Works thereof, that is intentionally
submitted to Licensor for inclusion in the Work by the copyright owner
or by an individual or Legal Entity authorized to submit on behalf of
the copyright owner. For the purposes of this definition, "submitted"
means any form of electronic, verbal, or written communication sent
to the Licensor or its representatives, including but not limited to
communication on electronic mailing lists, source code control systems,
and issue tracking systems that are managed by, or on behalf of, the
Licensor for the purpose of discussing and improving the Work, but
excluding communication that is conspicuously marked or otherwise
designated in writing by the copyright owner as "Not a Contribution."
"Contributor" shall mean Licensor and any individual or Legal Entity
on behalf of whom a Contribution has been received by Licensor and
subsequently incorporated within the Work.
2. Grant of Copyright License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
copyright license to reproduce, prepare Derivative Works of,
publicly display, publicly perform, sublicense, and distribute the
Work and such Derivative Works in Source or Object form.
3. Grant of Patent License. Subject to the terms and conditions of
this License, each Contributor hereby grants to You a perpetual,
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
(except as stated in this section) patent license to make, have made,
use, offer to sell, sell, import, and otherwise transfer the Work,
where such license applies only to those patent claims licensable
by such Contributor that are necessarily infringed by their
Contribution(s) alone or by combination of their Contribution(s)
with the Work to which such Contribution(s) was submitted. If You
institute patent litigation against any entity (including a
cross-claim or counterclaim in a lawsuit) alleging that the Work
or a Contribution incorporated within the Work constitutes direct
or contributory patent infringement, then any patent licenses
granted to You under this License for that Work shall terminate
as of the date such litigation is filed.
4. Redistribution. You may reproduce and distribute copies of the
Work or Derivative Works thereof in any medium, with or without
modifications, and in Source or Object form, provided that You
meet the following conditions:
(a) You must give any other recipients of the Work or
Derivative Works a copy of this License; and
(b) You must cause any modified files to carry prominent notices
stating that You changed the files; and
(c) You must retain, in the Source form of any Derivative Works
that You distribute, all copyright, patent, trademark, and
attribution notices from the Source form of the Work,
excluding those notices that do not pertain to any part of
the Derivative Works; and
(d) If the Work includes a "NOTICE" text file as part of its
distribution, then any Derivative Works that You distribute must
include a readable copy of the attribution notices contained
within such NOTICE file, excluding those notices that do not
pertain to any part of the Derivative Works, in at least one
of the following places: within a NOTICE text file distributed
as part of the Derivative Works; within the Source form or
documentation, if provided along with the Derivative Works; or,
within a display generated by the Derivative Works, if and
wherever such third-party notices normally appear. The contents
of the NOTICE file are for informational purposes only and
do not modify the License. You may add Your own attribution
notices within Derivative Works that You distribute, alongside
or as an addendum to the NOTICE text from the Work, provided
that such additional attribution notices cannot be construed
as modifying the License.
You may add Your own copyright statement to Your modifications and
may provide additional or different license terms and conditions
for use, reproduction, or distribution of Your modifications, or
for any such Derivative Works as a whole, provided Your use,
reproduction, and distribution of the Work otherwise complies with
the conditions stated in this License.
5. Submission of Contributions. Unless You explicitly state otherwise,
any Contribution intentionally submitted for inclusion in the Work
by You to the Licensor shall be under the terms and conditions of
this License, without any additional terms or conditions.
Notwithstanding the above, nothing herein shall supersede or modify
the terms of any separate license agreement you may have executed
with Licensor regarding such Contributions.
6. Trademarks. This License does not grant permission to use the trade
names, trademarks, service marks, or product names of the Licensor,
except as required for reasonable and customary use in describing the
origin of the Work and reproducing the content of the NOTICE file.
7. Disclaimer of Warranty. Unless required by applicable law or
agreed to in writing, Licensor provides the Work (and each
Contributor provides its Contributions) on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
implied, including, without limitation, any warranties or conditions
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
PARTICULAR PURPOSE. You are solely responsible for determining the
appropriateness of using or redistributing the Work and assume any
risks associated with Your exercise of permissions under this License.
8. Limitation of Liability. In no event and under no legal theory,
whether in tort (including negligence), contract, or otherwise,
unless required by applicable law (such as deliberate and grossly
negligent acts) or agreed to in writing, shall any Contributor be
liable to You for damages, including any direct, indirect, special,
incidental, or consequential damages of any character arising as a
result of this License or out of the use or inability to use the
Work (including but not limited to damages for loss of goodwill,
work stoppage, computer failure or malfunction, or any and all
other commercial damages or losses), even if such Contributor
has been advised of the possibility of such damages.
9. Accepting Warranty or Additional Liability. While redistributing
the Work or Derivative Works thereof, You may choose to offer,
and charge a fee for, acceptance of support, warranty, indemnity,
or other liability obligations and/or rights consistent with this
License. However, in accepting such obligations, You may act only
on Your own behalf and on Your sole responsibility, not on behalf
of any other Contributor, and only if You agree to indemnify,
defend, and hold each Contributor harmless for any liability
incurred by, or claims asserted against, such Contributor by reason
of your accepting any such warranty or additional liability.
END OF TERMS AND CONDITIONS
APPENDIX: How to apply the Apache License to your work.
To apply the Apache License to your work, attach the following
boilerplate notice, with the fields enclosed by brackets "[]"
replaced with your own identifying information. (Don't include
the brackets!) The text should be enclosed in the appropriate
comment syntax for the file format. We also recommend that a
file or class name and description of purpose be included on the
same "printed page" as the copyright notice for easier
identification within third-party archives.
Copyright 2024 Chance Dinkins
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.

21
vendor/jsonptr/LICENSE-MIT vendored Normal file
View File

@@ -0,0 +1,21 @@
MIT License
Copyright (c) 2022 Chance Dinkins
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.

240
vendor/jsonptr/README.md vendored Normal file
View File

@@ -0,0 +1,240 @@
<div class="rustdoc-hidden">
# jsonptr - JSON Pointers (RFC 6901) for Rust
</div>
[<img alt="github" src="https://img.shields.io/badge/github-chanced/jsonptr-62D1FC?style=for-the-badge&labelColor=777&logo=github" height="21">](https://github.com/chanced/jsonptr)
[<img alt="crates.io" src="https://img.shields.io/crates/v/jsonptr.svg?style=for-the-badge&color=fc8d62&logo=rust" height="21">](https://crates.io/crates/jsonptr)
[<img alt="docs.rs" src="https://img.shields.io/badge/docs.rs-jsonptr-f0f0f0?style=for-the-badge&labelColor=777&logo=docs.rs" height="21">](https://docs.rs/jsonptr)
[<img alt="build status" src="https://img.shields.io/github/actions/workflow/status/chanced/jsonptr/test.yml?branch=main&style=for-the-badge" height="21">](https://github.com/chanced/jsonptr/actions?query=branch%3Amain)
[<img alt="code coverage" src="https://img.shields.io/codecov/c/github/chanced/jsonptr?style=for-the-badge&color=CBB88D" height="21">](https://codecov.io/gh/chanced/jsonptr)
JSON Pointers ([RFC 6901](https://datatracker.ietf.org/doc/html/rfc6901))
defines a string syntax for identifying a specific location within a JSON, or
similar, document. This crate provides two types, [`Pointer`] and [`PointerBuf`]
(akin to [`Path`] and [`PathBuf`]), for working with them abstractly.
A pointer is composed of zero or more [`Token`]s, single segments which
represent a field of an object or an [`index`] of an array, and are bounded by
either `'/'` or the end of the string. Tokens are lightly encoded, where `'~'`
is escaped as `"~0"` due to it signaling encoding and `'/'` is escaped as `"~1"`
because `'/'` separates tokens and would split the token into two otherwise.
[`Token`]s can be iterated over using either [`Tokens`], returned from the
[`tokens`] method of a pointer or [`Components`], returned from the
[`components`] method. The difference being that `Tokens` iterates over each
token in the pointer, while `Components` iterates over [`Component`]s, which can
represent the root of the document or a single token of the pointer.
Operations [`resolve`], [`assign`] and [`delete`] are provided as traits with
corresponding methods on pointer types. Implementations of each trait are
provided for value types of the crates [`serde_json`] and [`toml`]. All
operations are enabled by default but are gated by [feature
flags](#feature-flags).
## Usage
### Parsing and General Usage
To parse a [`Pointer`] from a string, use either [`Pointer::parse`], for
potentially fallible parsing, or the `const fn` `from_static` to produce a
`&'static Pointer` from a string that is known to be valid.
```rust
use jsonptr::Pointer;
let ptr = Pointer::parse("/examples/0/name").unwrap();
let static_ptr = Pointer::from_static("/examples/0/name");
assert_eq!(ptr, static_ptr);
assert_eq!(ptr.get(1..).unwrap(), Pointer::parse("/0/name").unwrap());
let parent = ptr.parent().unwrap();
assert_eq!(parent, Pointer::parse("/examples/0").unwrap());
let (token, remaining) = ptr.split_front().unwrap();
assert_eq!(token.decoded(), "examples");
assert_eq!(remaining, Pointer::parse("/0/name").unwrap());
```
[`PointerBuf`]s can be parsed using [`PointerBuf::parse`] or constructed from an
iterator of [`Token`]s with the [`from_tokens`] method:
```rust
use jsonptr::PointerBuf;
let mut buf = PointerBuf::parse("/examples/0/name").unwrap();
let from_tokens = PointerBuf::from_tokens(["examples", "0", "name"]);
assert_eq!(&buf, &from_tokens);
buf.push_front("pointer");
buf.push_front("~");
buf.push_back("/");
assert_eq!(buf.as_str(), "/~0/pointer/examples/0/name/~1");
```
### Token Iteration
Iterating over the tokens or components of a pointer:
```rust
use jsonptr::{Pointer, Component, Token};
let ptr = Pointer::from_static("/path/to/value");
// Using the `tokens` method:
let tokens: Vec<_> = ptr.tokens().collect();
assert_eq!(tokens, vec![Token::new("path"), Token::new("to"), Token::new("value")]);
// Using the `components` method:
let mut components = ptr.components();
assert_eq!(components.next(), Some(Component::Root));
assert_eq!(components.next(), Some(Component::Token(Token::new("path"))));
assert_eq!(components.next(), Some(Component::Token(Token::new("to"))));
assert_eq!(components.next(), Some(Component::Token(Token::new("value"))));
```
### Resolving Values
To get a value at the location of a pointer, use either the [`Resolve`] and
[`ResolveMut`] traits or [`Pointer::resolve`] and [`Pointer::resolve_mut`]
methods. See the [`resolve`] mod for more information.
```rust
use jsonptr::Pointer;
use serde_json::json;
let ptr = Pointer::parse("/foo/bar").unwrap();
let data = json!({"foo": { "bar": 34 }});
let bar = ptr.resolve(&data).unwrap();
assert_eq!(bar, &json!(34));
```
### Assigning Values
Values can be set, with path expansion, using the either the [`Assign`] trait or
[`Pointer::assign`]. See [`assign`] for more information.
```rust
use jsonptr::Pointer;
use serde_json::json;
let ptr = Pointer::parse("/secret/universe").unwrap();
let mut data = json!({"secret": { "universe": 42 }});
let replaced = ptr.assign(&mut data, json!(34)).unwrap();
assert_eq!(replaced, Some(json!(42)));
assert_eq!(data, json!({"secret": { "universe": 34 }}));
```
### Deleting Values
Values can be removed with the either the [`Delete`] trait or
[`Pointer::delete`]. See [`delete`] for more information.
```rust
use jsonptr::Pointer;
use serde_json::json;
let ptr = Pointer::parse("/secret/universe").unwrap();
let mut data = json!({"secret": { "universe": 42 }});
let replaced = ptr.assign(&mut data, json!(34)).unwrap();
assert_eq!(replaced, Some(json!(42)));
assert_eq!(data, json!({"secret": { "universe": 34 }}));
```
### Error Reporting
Any error produced by function calls into methods of traits or types of this
crate can be converted into a [`Report`] which contains the original error
and the [`String`] which failed to parse or the [`PointerBuf`] which failed to
resolve or assign.
```rust
use jsonptr::{Pointer, Diagnose};
let ptr_str = "foo/bar";
let err /* Result<&Pointer, Report<ParseError>> */ = Pointer::parse(ptr_str).diagnose(ptr_str).unwrap_err();
assert!(err.original().is_no_leading_slash());
```
In the case of [`PointerBuf::parse`], the [`ParseError`] is always wrapped in a
[`Report`] so that the input `String` is not dropped.
```rust
use jsonptr::{PointerBuf};
let ptr_str = "foo/bar";
let err /* Result<&PointerBuf, Report<ParseError>> */ = PointerBuf::parse(ptr_str).unwrap_err();
assert!(err.original().is_no_leading_slash());
```
## Feature Flags
| Flag | Description | Enables | Default |
| :---------: | ----------------------------------------------------------------------------------------------------------------------------------------- | --------------- | :-----: |
| `"std"` | Implements `std::error::Error` for error types | | ✓ |
| `"serde"` | Enables [`serde`] support for types | | ✓ |
| `"json"` | Implements ops for [`serde_json::Value`] | `"serde"` | ✓ |
| `"toml"` | Implements ops for [`toml::Value`] | `"std"`, `toml` | |
| `"assign"` | Enables the [`assign`] module and related pointer methods, providing a means to assign a value to a specific location within a document | | ✓ |
| `"resolve"` | Enables the [`resolve`] module and related pointer methods, providing a means to resolve a value at a specific location within a document | | ✓ |
| `"delete"` | Enables the [`delete`] module and related pointer methods, providing a means to delete a value at a specific location within a document | `"resolve"` | ✓ |
| `"miette"` | Enables integration with [`miette`](https://docs.rs/miette) for error reporting | `"std"` | |
<div class="rustdoc-hidden">
## License
Licensed under either of
- Apache License, Version 2.0
([LICENSE-APACHE](LICENSE-APACHE) or http://www.apache.org/licenses/LICENSE-2.0)
- MIT license
([LICENSE-MIT](LICENSE-MIT) or http://opensource.org/licenses/MIT)
at your convenience.
## Contribution
Contributions and feedback are always welcome and appreciated. If you find an
issue, please open a ticket or a pull request.
Unless you explicitly state otherwise, any contribution intentionally submitted
for inclusion in the work by you, as defined in the Apache-2.0 license, shall be
dual licensed as above, without any additional terms or conditions.
[LICENSE-APACHE]: LICENSE-APACHE
[LICENSE-MIT]: LICENSE-MIT
</div>
[`Pointer::components`]: https://docs.rs/jsonptr/latest/jsonptrstruct.Pointer.html#method.components
[`Pointer::tokens`]: https://docs.rs/jsonptr/latest/jsonptrstruct.Pointer.html#method.tokens
[`Pointer`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html
[`Pointer::parse`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html#method.parse
[`Pointer::resolve`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html#method.resolve
[`Pointer::resolve_mut`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html#method.resolve_mut
[`Pointer::assign`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html#method.assign
[`Pointer::delete`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html#method.delete
[`PointerBuf::parse`]: https://docs.rs/jsonptr/latest/jsonptr/struct.PointerBuf.html#method.parse
[`from_tokens`]: https://docs.rs/jsonptr/latest/jsonptr/struct.PointerBuf.html#method.from_tokens
[`PointerBuf`]: https://docs.rs/jsonptr/latest/jsonptr/struct.PointerBuf.html
[`Token`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Token.html
[`Tokens`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Tokens.html
[`Components`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Components.html
[`Component`]: https://docs.rs/jsonptr/latest/jsonptr/enum.Component.html
[`index`]: https://docs.rs/jsonptr/latest/jsonptr/index/index.html
[`tokens`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html#method.tokens
[`components`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html#method.components
[`resolve`]: https://docs.rs/jsonptr/latest/jsonptr/resolve/index.html
[`assign`]: https://docs.rs/jsonptr/latest/jsonptr/assign/index.html
[`delete`]: https://docs.rs/jsonptr/latest/jsonptr/delete/index.html
[`Resolve`]: https://docs.rs/jsonptr/latest/jsonptr/resolve/trait.Resolve.html
[`ResolveMut`]: https://docs.rs/jsonptr/latest/jsonptr/resolve/trait.ResolveMut.html
[`Assign`]: https://docs.rs/jsonptr/latest/jsonptr/assign/trait.Assign.html
[`Delete`]: https://docs.rs/jsonptr/latest/jsonptr/delete/trait.Delete.html
[`serde`]: https://docs.rs/serde/1.0/serde/index
[`serde_json`]: https://docs.rs/serde_json/1.0/serde_json/enum.Value.html
[`serde_json::Value`]: https://docs.rs/serde_json/1.0/serde_json/enum.Value.html
[`toml`]: https://docs.rs/toml/0.8/toml/enum.Value.html
[`toml::Value`]: https://docs.rs/toml/0.8/toml/enum.Value.html
[`Path`]: https://doc.rust-lang.org/std/path/struct.Path.html
[`PathBuf`]: https://doc.rust-lang.org/std/path/struct.PathBuf.html

32
vendor/jsonptr/src/arbitrary.rs vendored Normal file
View File

@@ -0,0 +1,32 @@
use crate::{PointerBuf, Token};
use alloc::{boxed::Box, string::String, vec::Vec};
use quickcheck::Arbitrary;
impl Arbitrary for Token<'static> {
fn arbitrary(g: &mut quickcheck::Gen) -> Self {
Self::new(String::arbitrary(g))
}
fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {
Box::new(self.decoded().into_owned().shrink().map(Self::new))
}
}
impl Arbitrary for PointerBuf {
fn arbitrary(g: &mut quickcheck::Gen) -> Self {
let size = usize::arbitrary(g) % g.size();
Self::from_tokens((0..size).map(|_| Token::arbitrary(g)).collect::<Vec<_>>())
}
fn shrink(&self) -> Box<dyn Iterator<Item = Self>> {
let tokens: Vec<_> = self.tokens().map(Token::into_owned).collect();
Box::new((0..self.count()).map(move |i| {
let subset: Vec<_> = tokens
.iter()
.enumerate()
.filter_map(|(j, t)| (i != j).then_some(t.clone()))
.collect();
Self::from_tokens(subset)
}))
}
}

991
vendor/jsonptr/src/assign.rs vendored Normal file
View File

@@ -0,0 +1,991 @@
//! # Assign values based on JSON [`Pointer`]s
//!
//! This module provides the [`Assign`] trait which allows for the assignment of
//! values based on a JSON Pointer.
//!
//! This module is enabled by default with the `"assign"` feature flag.
//!
//! # Expansion
//! The path will automatically be expanded if the [`Pointer`] is not fully
//! exhausted before reaching a non-existent key in the case of objects, index
//! in the case of arrays, or a scalar value (including `null`) based upon a
//! best-guess effort on the meaning of each [`Token`](crate::Token):
//! - If the [`Token`](crate::Token) is equal to `"0"` or `"-"`, the token will
//! be considered an index of an array.
//! - All tokens not equal to `"0"` or `"-"` will be considered keys of an
//! object.
//!
//! ## Usage
//! [`Assign`] can be used directly or through the [`assign`](Pointer::assign)
//! method of [`Pointer`].
//!
//! ```rust
//! use jsonptr::Pointer;
//! use serde_json::json;
//! let mut data = json!({"foo": "bar"});
//! let ptr = Pointer::from_static("/foo");
//! let replaced = ptr.assign(&mut data, "baz").unwrap();
//! assert_eq!(replaced, Some(json!("bar")));
//! assert_eq!(data, json!({"foo": "baz"}));
//! ```
//! ## Provided implementations
//!
//! | Lang | value type | feature flag | Default |
//! | ----- |: ----------------- :|: ---------- :| ------- |
//! | JSON | `serde_json::Value` | `"json"` | ✓ |
//! | TOML | `toml::Value` | `"toml"` | |
//!
use crate::{
diagnostic::{diagnostic_url, Diagnostic, Label},
index::{OutOfBoundsError, ParseIndexError},
Pointer, PointerBuf,
};
use alloc::{boxed::Box, string::ToString};
use core::{
fmt::{self, Debug},
iter::once,
};
/// Implemented by types which can internally assign a
/// ([`Value`](`Assign::Value`)) at a path represented by a JSON [`Pointer`].
///
/// ## Expansion
/// For provided implementations (`"json"`, and `"toml"`) path will
/// automatically be expanded the if the [`Pointer`] is not fully exhausted
/// before reaching a non-existent key in the case of objects, index in the case
/// of arrays, or a scalar value (including `null`) based upon a best-guess
/// effort on the meaning of each [`Token`](crate::Token):
///
/// - If the [`Token`](crate::Token) is equal to `"0"` or `"-"`, the token will
/// be considered an index of an array.
/// - All tokens not equal to `"0"` or `"-"` will be considered keys of an
/// object.
///
/// ## Examples
///
/// ### Successful assignment with replacement
/// This example demonstrates a successful assignment with replacement.
/// ```rust
/// use jsonptr::{Pointer, assign::Assign};
/// use serde_json::{json, Value};
///
/// let mut data = json!({"foo": "bar"});
/// let ptr = Pointer::from_static("/foo");
///
/// let replaced = data.assign(&ptr, "baz").unwrap();
/// assert_eq!(replaced, Some(json!("bar")));
/// assert_eq!(data, json!({"foo": "baz"}));
/// ```
///
/// ### Successful assignment with path expansion
/// This example demonstrates path expansion, including an array index (`"0"`)
/// ```rust
/// # use jsonptr::{Pointer, assign::Assign};
/// # use serde_json::{json, Value};
/// let ptr = Pointer::from_static("/foo/bar/0/baz");
/// let mut data = serde_json::json!({"foo": "bar"});
///
/// let replaced = data.assign(ptr, json!("qux")).unwrap();
///
/// assert_eq!(&data, &json!({"foo": {"bar": [{"baz": "qux"}]}}));
/// assert_eq!(replaced, Some(json!("bar")));
/// ```
///
/// ### Successful assignment with `"-"` token
///
/// This example performs path expansion using the special `"-"` token (per RFC
/// 6901) to represent the next element in an array.
///
/// ```rust
/// # use jsonptr::{Pointer, assign::Assign};
/// # use serde_json::{json, Value};
/// let ptr = Pointer::from_static("/foo/bar/-/baz");
/// let mut data = json!({"foo": "bar"});
///
/// let replaced = data.assign(ptr, json!("qux")).unwrap();
/// assert_eq!(&data, &json!({"foo": {"bar": [{"baz": "qux"}]}}));
/// assert_eq!(replaced, Some(json!("bar")));
/// ```
pub trait Assign {
/// The type of value that this implementation can operate on.
type Value;
/// Error associated with `Assign`
type Error;
/// Assigns a value of based on the path provided by a JSON Pointer,
/// returning the replaced value, if any.
///
/// # Errors
/// Returns [`Self::Error`] if the assignment fails.
fn assign<V>(&mut self, ptr: &Pointer, value: V) -> Result<Option<Self::Value>, Self::Error>
where
V: Into<Self::Value>;
}
/// Alias for [`Error`] - indicates a value assignment failed.
#[deprecated(since = "0.7.0", note = "renamed to `Error`")]
pub type AssignError = Error;
/// Possible error returned from [`Assign`] implementations for
/// [`serde_json::Value`] and
/// [`toml::Value`](https://docs.rs/toml/0.8.14/toml/index.html).
#[derive(Debug, PartialEq, Eq)]
pub enum Error {
/// A [`Token`](crate::Token) within the [`Pointer`] failed to be parsed as
/// an array index.
FailedToParseIndex {
/// Position (index) of the token which failed to parse as an [`Index`](crate::index::Index)
position: usize,
/// Offset of the partial pointer starting with the invalid index.
offset: usize,
/// The source [`ParseIndexError`]
source: ParseIndexError,
},
/// A [`Token`](crate::Token) within the [`Pointer`] contains an
/// [`Index`](crate::index::Index) which is out of bounds.
///
/// The current or resulting array's length is less than the index.
OutOfBounds {
/// Position (index) of the token which failed to parse as an [`Index`](crate::index::Index)
position: usize,
/// Offset of the partial pointer starting with the invalid index.
offset: usize,
/// The source [`OutOfBoundsError`]
source: OutOfBoundsError,
},
}
impl Error {
/// The position (token index) of the [`Token`](crate::Token) which was out of bounds
pub fn position(&self) -> usize {
match self {
Self::OutOfBounds { position, .. } | Self::FailedToParseIndex { position, .. } => {
*position
}
}
}
/// Offset (in bytes) of the partial pointer starting with the invalid token.
pub fn offset(&self) -> usize {
match self {
Self::OutOfBounds { offset, .. } | Self::FailedToParseIndex { offset, .. } => *offset,
}
}
/// Returns `true` if the error is [`OutOfBounds`].
///
/// [`OutOfBounds`]: Error::OutOfBounds
#[must_use]
pub fn is_out_of_bounds(&self) -> bool {
matches!(self, Self::OutOfBounds { .. })
}
/// Returns `true` if the error is [`FailedToParseIndex`].
///
/// [`FailedToParseIndex`]: Error::FailedToParseIndex
#[must_use]
pub fn is_failed_to_parse_index(&self) -> bool {
matches!(self, Self::FailedToParseIndex { .. })
}
}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::FailedToParseIndex { offset, .. } => {
write!(
f,
"assign failed: json pointer token at offset {offset} failed to parse as an array index"
)
}
Self::OutOfBounds { offset, .. } => write!(
f,
"assign failed: json pointer token at offset {offset} is out of bounds",
),
}
}
}
impl Diagnostic for Error {
type Subject = PointerBuf;
fn url() -> &'static str {
diagnostic_url!(enum assign::Error)
}
fn labels(&self, origin: &Self::Subject) -> Option<Box<dyn Iterator<Item = Label>>> {
let position = self.position();
let token = origin.get(position)?;
let offset = if self.offset() + 1 < origin.as_str().len() {
self.offset() + 1
} else {
self.offset()
};
let len = token.encoded().len();
let text = match self {
Error::FailedToParseIndex { .. } => "expected array index or '-'".to_string(),
Error::OutOfBounds { source, .. } => {
format!("{} is out of bounds (len: {})", source.index, source.length)
}
};
Some(Box::new(once(Label::new(text, offset, len))))
}
}
#[cfg(feature = "miette")]
impl miette::Diagnostic for Error {
fn url<'a>(&'a self) -> Option<Box<dyn fmt::Display + 'a>> {
Some(Box::new(<Self as Diagnostic>::url()))
}
}
#[cfg(feature = "std")]
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Self::FailedToParseIndex { source, .. } => Some(source),
Self::OutOfBounds { source, .. } => Some(source),
}
}
}
#[cfg(feature = "json")]
mod json {
use super::{Assign, Assigned, Error};
use crate::{Pointer, Token};
use alloc::{
string::{String, ToString},
vec::Vec,
};
use core::mem;
use serde_json::{map::Entry, Map, Value};
fn expand(mut remaining: &Pointer, mut value: Value) -> Value {
while let Some((ptr, tok)) = remaining.split_back() {
remaining = ptr;
match tok.encoded() {
"0" | "-" => {
value = Value::Array(vec![value]);
}
_ => {
let mut obj = Map::new();
obj.insert(tok.to_string(), value);
value = Value::Object(obj);
}
}
}
value
}
impl Assign for Value {
type Value = Value;
type Error = Error;
fn assign<V>(&mut self, ptr: &Pointer, value: V) -> Result<Option<Self::Value>, Self::Error>
where
V: Into<Self::Value>,
{
assign_value(ptr, self, value.into())
}
}
pub(crate) fn assign_value(
mut ptr: &Pointer,
mut dest: &mut Value,
mut value: Value,
) -> Result<Option<Value>, Error> {
let mut offset = 0;
let mut position = 0;
while let Some((token, tail)) = ptr.split_front() {
let tok_len = token.encoded().len();
let assigned = match dest {
Value::Array(array) => assign_array(token, tail, array, value, position, offset)?,
Value::Object(obj) => assign_object(token, tail, obj, value),
_ => assign_scalar(ptr, dest, value),
};
match assigned {
Assigned::Done(assignment) => {
return Ok(assignment);
}
Assigned::Continue {
next_dest: next_value,
same_value: same_src,
} => {
value = same_src;
dest = next_value;
ptr = tail;
}
}
offset += 1 + tok_len;
position += 1;
}
// Pointer is root, we can replace `dest` directly
let replaced = Some(core::mem::replace(dest, value));
Ok(replaced)
}
#[allow(clippy::needless_pass_by_value)]
fn assign_array<'v>(
token: Token<'_>,
remaining: &Pointer,
array: &'v mut Vec<Value>,
src: Value,
position: usize,
offset: usize,
) -> Result<Assigned<'v, Value>, Error> {
// parsing the index
let idx = token
.to_index()
.map_err(|source| Error::FailedToParseIndex {
position,
offset,
source,
})?
.for_len_incl(array.len())
.map_err(|source| Error::OutOfBounds {
position,
offset,
source,
})?;
debug_assert!(idx <= array.len());
if idx < array.len() {
// element exists in the array, we either need to replace it or continue
// depending on whether this is the last token or not
if remaining.is_root() {
// last token, we replace the value and call it a day
Ok(Assigned::Done(Some(mem::replace(&mut array[idx], src))))
} else {
// not the last token, we continue with a mut ref to the element as
// the next value
Ok(Assigned::Continue {
next_dest: &mut array[idx],
same_value: src,
})
}
} else {
// element does not exist in the array.
// we create the path and assign the value
let src = expand(remaining, src);
array.push(src);
Ok(Assigned::Done(None))
}
}
#[allow(clippy::needless_pass_by_value)]
fn assign_object<'v>(
token: Token<'_>,
remaining: &Pointer,
obj: &'v mut Map<String, Value>,
src: Value,
) -> Assigned<'v, Value> {
// grabbing the entry of the token
let entry = obj.entry(token.to_string());
// adding token to the pointer buf
match entry {
Entry::Occupied(entry) => {
// if the entry exists, we either replace it or continue
let entry = entry.into_mut();
if remaining.is_root() {
// if this is the last token, we are done
// grab the old value and replace it with the new one
Assigned::Done(Some(mem::replace(entry, src)))
} else {
// if this is not the last token, we continue with a mutable
// reference to the entry as the next value
Assigned::Continue {
same_value: src,
next_dest: entry,
}
}
}
Entry::Vacant(entry) => {
// if the entry does not exist, we create a value based on the
// remaining path with the src value as a leaf and assign it to the
// entry
entry.insert(expand(remaining, src));
Assigned::Done(None)
}
}
}
fn assign_scalar<'v>(
remaining: &Pointer,
scalar: &'v mut Value,
value: Value,
) -> Assigned<'v, Value> {
// scalar values are always replaced at the current buf (with its token)
// build the new src and we replace the value with it.
let replaced = Some(mem::replace(scalar, expand(remaining, value)));
Assigned::Done(replaced)
}
}
#[cfg(feature = "toml")]
mod toml {
use super::{Assign, Assigned, Error};
use crate::{Pointer, Token};
use alloc::{string::String, vec, vec::Vec};
use core::mem;
use toml::{map::Entry, map::Map, Value};
fn expand(mut remaining: &Pointer, mut value: Value) -> Value {
while let Some((ptr, tok)) = remaining.split_back() {
remaining = ptr;
match tok.encoded() {
"0" | "-" => {
value = Value::Array(vec![value]);
}
_ => {
let mut obj = Map::new();
obj.insert(tok.to_string(), value);
value = Value::Table(obj);
}
}
}
value
}
impl Assign for Value {
type Value = Value;
type Error = Error;
fn assign<V>(&mut self, ptr: &Pointer, value: V) -> Result<Option<Self::Value>, Self::Error>
where
V: Into<Self::Value>,
{
assign_value(ptr, self, value.into())
}
}
pub(crate) fn assign_value(
mut ptr: &Pointer,
mut dest: &mut Value,
mut value: Value,
) -> Result<Option<Value>, Error> {
let mut offset = 0;
let mut position = 0;
while let Some((token, tail)) = ptr.split_front() {
let tok_len = token.encoded().len();
let assigned = match dest {
Value::Array(array) => assign_array(token, tail, array, value, position, offset)?,
Value::Table(tbl) => assign_object(token, tail, tbl, value),
_ => assign_scalar(ptr, dest, value),
};
match assigned {
Assigned::Done(assignment) => {
return Ok(assignment);
}
Assigned::Continue {
next_dest: next_value,
same_value: same_src,
} => {
value = same_src;
dest = next_value;
ptr = tail;
}
}
offset += 1 + tok_len;
position += 1;
}
// Pointer is root, we can replace `dest` directly
let replaced = Some(mem::replace(dest, value));
Ok(replaced)
}
#[allow(clippy::needless_pass_by_value)]
fn assign_array<'v>(
token: Token<'_>,
remaining: &Pointer,
array: &'v mut Vec<Value>,
src: Value,
position: usize,
offset: usize,
) -> Result<Assigned<'v, Value>, Error> {
// parsing the index
let idx = token
.to_index()
.map_err(|source| Error::FailedToParseIndex {
position,
offset,
source,
})?
.for_len_incl(array.len())
.map_err(|source| Error::OutOfBounds {
position,
offset,
source,
})?;
debug_assert!(idx <= array.len());
if idx < array.len() {
// element exists in the array, we either need to replace it or continue
// depending on whether this is the last token or not
if remaining.is_root() {
// last token, we replace the value and call it a day
Ok(Assigned::Done(Some(mem::replace(&mut array[idx], src))))
} else {
// not the last token, we continue with a mut ref to the element as
// the next value
Ok(Assigned::Continue {
next_dest: &mut array[idx],
same_value: src,
})
}
} else {
// element does not exist in the array.
// we create the path and assign the value
let src = expand(remaining, src);
array.push(src);
Ok(Assigned::Done(None))
}
}
#[allow(clippy::needless_pass_by_value)]
fn assign_object<'v>(
token: Token<'_>,
remaining: &Pointer,
obj: &'v mut Map<String, Value>,
src: Value,
) -> Assigned<'v, Value> {
// grabbing the entry of the token
match obj.entry(token.to_string()) {
Entry::Occupied(entry) => {
// if the entry exists, we either replace it or continue
let entry = entry.into_mut();
if remaining.is_root() {
// if this is the last token, we are done
// grab the old value and replace it with the new one
Assigned::Done(Some(mem::replace(entry, src)))
} else {
// if this is not the last token, we continue with a mutable
// reference to the entry as the next value
Assigned::Continue {
same_value: src,
next_dest: entry,
}
}
}
Entry::Vacant(entry) => {
// if the entry does not exist, we create a value based on the
// remaining path with the src value as a leaf and assign it to the
// entry
entry.insert(expand(remaining, src));
Assigned::Done(None)
}
}
}
fn assign_scalar<'v>(
remaining: &Pointer,
scalar: &'v mut Value,
value: Value,
) -> Assigned<'v, Value> {
// scalar values are always replaced at the current buf (with its token)
// build the new src and we replace the value with it.
Assigned::Done(Some(mem::replace(scalar, expand(remaining, value))))
}
}
enum Assigned<'v, V> {
Done(Option<V>),
Continue { next_dest: &'v mut V, same_value: V },
}
#[cfg(test)]
#[allow(clippy::too_many_lines)]
mod tests {
use super::{Assign, Error};
use crate::{
index::{InvalidCharacterError, OutOfBoundsError, ParseIndexError},
Pointer,
};
use alloc::vec;
use core::fmt::{Debug, Display};
#[derive(Debug)]
struct Test<V: Assign> {
data: V,
ptr: &'static str,
assign: V,
expected_data: V,
expected: Result<Option<V>, V::Error>,
}
impl<V> Test<V>
where
V: Assign + Clone + PartialEq + Display + Debug,
V::Value: Debug + PartialEq + From<V>,
V::Error: Debug + PartialEq,
Result<Option<V>, V::Error>: PartialEq<Result<Option<V::Value>, V::Error>>,
{
fn run(self, i: usize) {
let Test {
ptr,
mut data,
assign,
expected_data,
expected,
..
} = self;
let ptr = Pointer::from_static(ptr);
let replaced = ptr.assign(&mut data, assign.clone());
assert_eq!(
&expected_data, &data,
"test #{i}:\n\ndata: \n{data:#?}\n\nexpected_data\n{expected_data:#?}"
);
assert_eq!(&expected, &replaced);
}
}
#[test]
#[cfg(feature = "json")]
fn assign_json() {
use serde_json::json;
[
Test {
ptr: "/foo",
data: json!({}),
assign: json!("bar"),
expected_data: json!({"foo": "bar"}),
expected: Ok(None),
},
Test {
ptr: "",
data: json!({"foo": "bar"}),
assign: json!("baz"),
expected_data: json!("baz"),
expected: Ok(Some(json!({"foo": "bar"}))),
},
Test {
ptr: "/foo",
data: json!({"foo": "bar"}),
assign: json!("baz"),
expected_data: json!({"foo": "baz"}),
expected: Ok(Some(json!("bar"))),
},
Test {
ptr: "/foo/bar",
data: json!({"foo": "bar"}),
assign: json!("baz"),
expected_data: json!({"foo": {"bar": "baz"}}),
expected: Ok(Some(json!("bar"))),
},
Test {
ptr: "/foo/bar",
data: json!({}),
assign: json!("baz"),
expected_data: json!({"foo": {"bar": "baz"}}),
expected: Ok(None),
},
Test {
ptr: "/",
data: json!({}),
assign: json!("foo"),
expected_data: json!({"": "foo"}),
expected: Ok(None),
},
Test {
ptr: "/-",
data: json!({}),
assign: json!("foo"),
expected_data: json!({"-": "foo"}),
expected: Ok(None),
},
Test {
ptr: "/-",
data: json!(null),
assign: json!(34),
expected_data: json!([34]),
expected: Ok(Some(json!(null))),
},
Test {
ptr: "/foo/-",
data: json!({"foo": "bar"}),
assign: json!("baz"),
expected_data: json!({"foo": ["baz"]}),
expected: Ok(Some(json!("bar"))),
},
Test {
ptr: "/foo/-/bar",
assign: "baz".into(),
data: json!({}),
expected: Ok(None),
expected_data: json!({"foo":[{"bar": "baz"}]}),
},
Test {
ptr: "/foo/-/bar",
assign: "qux".into(),
data: json!({"foo":[{"bar":"baz" }]}),
expected: Ok(None),
expected_data: json!({"foo":[{"bar":"baz"},{"bar":"qux"}]}),
},
Test {
ptr: "/foo/-/bar",
data: json!({"foo":[{"bar":"baz"},{"bar":"qux"}]}),
assign: "quux".into(),
expected: Ok(None),
expected_data: json!({"foo":[{"bar":"baz"},{"bar":"qux"},{"bar":"quux"}]}),
},
Test {
ptr: "/foo/0/bar",
data: json!({"foo":[{"bar":"baz"},{"bar":"qux"},{"bar":"quux"}]}),
assign: "grault".into(),
expected: Ok(Some("baz".into())),
expected_data: json!({"foo":[{"bar":"grault"},{"bar":"qux"},{"bar":"quux"}]}),
},
Test {
ptr: "/0",
data: json!({}),
assign: json!("foo"),
expected_data: json!({"0": "foo"}),
expected: Ok(None),
},
Test {
ptr: "/1",
data: json!(null),
assign: json!("foo"),
expected_data: json!({"1": "foo"}),
expected: Ok(Some(json!(null))),
},
Test {
ptr: "/0",
data: json!([]),
expected_data: json!(["foo"]),
assign: json!("foo"),
expected: Ok(None),
},
Test {
ptr: "///bar",
data: json!({"":{"":{"bar": 42}}}),
assign: json!(34),
expected_data: json!({"":{"":{"bar":34}}}),
expected: Ok(Some(json!(42))),
},
Test {
ptr: "/1",
data: json!([]),
assign: json!("foo"),
expected: Err(Error::OutOfBounds {
position: 0,
offset: 0,
source: OutOfBoundsError {
index: 1,
length: 0,
},
}),
expected_data: json!([]),
},
Test {
ptr: "/0",
data: json!(["foo"]),
assign: json!("bar"),
expected: Ok(Some(json!("foo"))),
expected_data: json!(["bar"]),
},
Test {
ptr: "/12a",
data: json!([]),
assign: json!("foo"),
expected: Err(Error::FailedToParseIndex {
position: 0,
offset: 0,
source: ParseIndexError::InvalidCharacter(InvalidCharacterError {
source: "12a".into(),
offset: 2,
}),
}),
expected_data: json!([]),
},
Test {
ptr: "/002",
data: json!([]),
assign: json!("foo"),
expected: Err(Error::FailedToParseIndex {
position: 0,
offset: 0,
source: ParseIndexError::LeadingZeros,
}),
expected_data: json!([]),
},
Test {
ptr: "/+23",
data: json!([]),
assign: json!("foo"),
expected: Err(Error::FailedToParseIndex {
position: 0,
offset: 0,
source: ParseIndexError::InvalidCharacter(InvalidCharacterError {
source: "+23".into(),
offset: 0,
}),
}),
expected_data: json!([]),
},
]
.into_iter()
.enumerate()
.for_each(|(i, t)| t.run(i));
}
#[test]
#[cfg(feature = "toml")]
fn assign_toml() {
use toml::{toml, Table, Value};
[
Test {
data: Value::Table(toml::Table::new()),
ptr: "/foo",
assign: "bar".into(),
expected_data: toml! { "foo" = "bar" }.into(),
expected: Ok(None),
},
Test {
data: toml! {foo = "bar"}.into(),
ptr: "",
assign: "baz".into(),
expected_data: "baz".into(),
expected: Ok(Some(toml! {foo = "bar"}.into())),
},
Test {
data: toml! { foo = "bar"}.into(),
ptr: "/foo",
assign: "baz".into(),
expected_data: toml! {foo = "baz"}.into(),
expected: Ok(Some("bar".into())),
},
Test {
data: toml! { foo = "bar"}.into(),
ptr: "/foo/bar",
assign: "baz".into(),
expected_data: toml! {foo = { bar = "baz"}}.into(),
expected: Ok(Some("bar".into())),
},
Test {
data: Table::new().into(),
ptr: "/",
assign: "foo".into(),
expected_data: toml! {"" = "foo"}.into(),
expected: Ok(None),
},
Test {
data: Table::new().into(),
ptr: "/-",
assign: "foo".into(),
expected_data: toml! {"-" = "foo"}.into(),
expected: Ok(None),
},
Test {
data: "data".into(),
ptr: "/-",
assign: 34.into(),
expected_data: Value::Array(vec![34.into()]),
expected: Ok(Some("data".into())),
},
Test {
data: toml! {foo = "bar"}.into(),
ptr: "/foo/-",
assign: "baz".into(),
expected_data: toml! {foo = ["baz"]}.into(),
expected: Ok(Some("bar".into())),
},
Test {
data: Table::new().into(),
ptr: "/0",
assign: "foo".into(),
expected_data: toml! {"0" = "foo"}.into(),
expected: Ok(None),
},
Test {
data: 21.into(),
ptr: "/1",
assign: "foo".into(),
expected_data: toml! {"1" = "foo"}.into(),
expected: Ok(Some(21.into())),
},
Test {
data: Value::Array(vec![]),
ptr: "/0",
expected_data: vec![Value::from("foo")].into(),
assign: "foo".into(),
expected: Ok(None),
},
Test {
ptr: "/foo/-/bar",
assign: "baz".into(),
data: Table::new().into(),
expected: Ok(None),
expected_data: toml! { "foo" = [{"bar" = "baz"}] }.into(),
},
Test {
ptr: "/foo/-/bar",
assign: "qux".into(),
data: toml! {"foo" = [{"bar" = "baz"}] }.into(),
expected: Ok(None),
expected_data: toml! {"foo" = [{"bar" = "baz"}, {"bar" = "qux"}]}.into(),
},
Test {
ptr: "/foo/-/bar",
data: toml! {"foo" = [{"bar" = "baz"}, {"bar" = "qux"}]}.into(),
assign: "quux".into(),
expected: Ok(None),
expected_data: toml! {"foo" = [{"bar" = "baz"}, {"bar" = "qux"}, {"bar" = "quux"}]}
.into(),
},
Test {
ptr: "/foo/0/bar",
data: toml! {"foo" = [{"bar" = "baz"}, {"bar" = "qux"}, {"bar" = "quux"}]}.into(),
assign: "grault".into(),
expected: Ok(Some("baz".into())),
expected_data:
toml! {"foo" = [{"bar" = "grault"}, {"bar" = "qux"}, {"bar" = "quux"}]}.into(),
},
Test {
data: Value::Array(vec![]),
ptr: "/-",
assign: "foo".into(),
expected: Ok(None),
expected_data: vec!["foo"].into(),
},
Test {
data: Value::Array(vec![]),
ptr: "/1",
assign: "foo".into(),
expected: Err(Error::OutOfBounds {
position: 0,
offset: 0,
source: OutOfBoundsError {
index: 1,
length: 0,
},
}),
expected_data: Value::Array(vec![]),
},
Test {
data: Value::Array(vec![]),
ptr: "/a",
assign: "foo".into(),
expected: Err(Error::FailedToParseIndex {
position: 0,
offset: 0,
source: ParseIndexError::InvalidCharacter(InvalidCharacterError {
source: "a".into(),
offset: 0,
}),
}),
expected_data: Value::Array(vec![]),
},
]
.into_iter()
.enumerate()
.for_each(|(i, t)| t.run(i));
}
}

75
vendor/jsonptr/src/component.rs vendored Normal file
View File

@@ -0,0 +1,75 @@
use crate::{Pointer, Token, Tokens};
/// A single [`Token`] or the root of a JSON Pointer
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum Component<'t> {
/// The document root
Root,
/// A segment of a JSON Pointer
Token(Token<'t>),
}
impl<'t> From<Token<'t>> for Component<'t> {
fn from(token: Token<'t>) -> Self {
Self::Token(token)
}
}
/// An iterator over the [`Component`]s of a JSON Pointer
#[derive(Debug)]
pub struct Components<'t> {
tokens: Tokens<'t>,
sent_root: bool,
}
impl<'t> Iterator for Components<'t> {
type Item = Component<'t>;
fn next(&mut self) -> Option<Self::Item> {
if !self.sent_root {
self.sent_root = true;
return Some(Component::Root);
}
self.tokens.next().map(Component::Token)
}
}
impl<'t> From<&'t Pointer> for Components<'t> {
fn from(pointer: &'t Pointer) -> Self {
Self {
sent_root: false,
tokens: pointer.tokens(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn components() {
let ptr = Pointer::from_static("");
let components: Vec<_> = Components::from(ptr).collect();
assert_eq!(components, vec![Component::Root]);
let ptr = Pointer::from_static("/foo");
let components = ptr.components().collect::<Vec<_>>();
assert_eq!(
components,
vec![Component::Root, Component::Token("foo".into())]
);
let ptr = Pointer::from_static("/foo/bar/-/0/baz");
let components = ptr.components().collect::<Vec<_>>();
assert_eq!(
components,
vec![
Component::Root,
Component::from(Token::from("foo")),
Component::Token("bar".into()),
Component::Token("-".into()),
Component::Token("0".into()),
Component::Token("baz".into())
]
);
}
}

344
vendor/jsonptr/src/delete.rs vendored Normal file
View File

@@ -0,0 +1,344 @@
//! # Delete values based on JSON Pointers
//!
//! This module provides the [`Delete`] trait which is implemented by types that
//! can internally remove a value based on a JSON Pointer.
//!
//! The rules of deletion are determined by the implementation, with the
//! provided implementations (`"json"` & `"toml"`) operating as follows:
//! - If the [`Pointer`] can be resolved, then the [`Value`](`Delete::Value`) is
//! deleted and returned as `Some(value)`.
//! - If the [`Pointer`] fails to resolve for any reason, `Ok(None)` is
//! returned.
//! - If the [`Pointer`] is root, `value` is replaced:
//! - `"json"` - `serde_json::Value::Null`
//! - `"toml"` - `toml::Value::Table::Default`
//!
//! This module is enabled by default with the `"delete"` feature flag.
//!
//! ## Usage
//! Deleting a resolved pointer:
//! ```rust
//! use jsonptr::{Pointer, delete::Delete};
//! use serde_json::json;
//!
//! let mut data = json!({ "foo": { "bar": { "baz": "qux" } } });
//! let ptr = Pointer::from_static("/foo/bar/baz");
//! assert_eq!(data.delete(&ptr), Some("qux".into()));
//! assert_eq!(data, json!({ "foo": { "bar": {} } }));
//! ```
//! Deleting a non-existent Pointer returns `None`:
//! ```rust
//! use jsonptr::{ Pointer, delete::Delete };
//! use serde_json::json;
//!
//! let mut data = json!({});
//! let ptr = Pointer::from_static("/foo/bar/baz");
//! assert_eq!(ptr.delete(&mut data), None);
//! assert_eq!(data, json!({}));
//! ```
//! Deleting a root pointer replaces the value with `Value::Null`:
//! ```rust
//! use jsonptr::{Pointer, delete::Delete};
//! use serde_json::json;
//!
//! let mut data = json!({ "foo": { "bar": "baz" } });
//! let ptr = Pointer::root();
//! assert_eq!(data.delete(&ptr), Some(json!({ "foo": { "bar": "baz" } })));
//! assert!(data.is_null());
//! ```
//!
//! ## Provided implementations
//!
//! | Lang | value type | feature flag | Default |
//! | ----- |: ----------------- :|: ---------- :| ------- |
//! | JSON | `serde_json::Value` | `"json"` | ✓ |
//! | TOML | `toml::Value` | `"toml"` | |
use crate::Pointer;
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ Delete ║
║ ¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
/// Delete is implemented by types which can internally remove a value based on
/// a JSON Pointer
pub trait Delete {
/// The type of value that this implementation can operate on.
type Value;
/// Attempts to internally delete a value based upon a [Pointer].
fn delete(&mut self, ptr: &Pointer) -> Option<Self::Value>;
}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ json impl ║
║ ¯¯¯¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
#[cfg(feature = "json")]
mod json {
use super::Delete;
use crate::Pointer;
use core::mem;
use serde_json::Value;
impl Delete for Value {
type Value = Value;
fn delete(&mut self, ptr: &Pointer) -> Option<Self::Value> {
let Some((parent_ptr, last)) = ptr.split_back() else {
// deleting at root
return Some(mem::replace(self, Value::Null));
};
parent_ptr
.resolve_mut(self)
.ok()
.and_then(|parent| match parent {
Value::Array(children) => {
let idx = last.to_index().ok()?.for_len_incl(children.len()).ok()?;
children.remove(idx).into()
}
Value::Object(children) => children.remove(last.decoded().as_ref()),
_ => None,
})
}
}
}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ toml impl ║
║ ¯¯¯¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
#[cfg(feature = "toml")]
mod toml {
use super::Delete;
use crate::Pointer;
use core::mem;
use toml::{Table, Value};
impl Delete for Value {
type Value = Value;
fn delete(&mut self, ptr: &Pointer) -> Option<Self::Value> {
let Some((parent_ptr, last)) = ptr.split_back() else {
// deleting at root
return Some(mem::replace(self, Table::default().into()));
};
parent_ptr
.resolve_mut(self)
.ok()
.and_then(|parent| match parent {
Value::Array(children) => {
let idx = last.to_index().ok()?.for_len_incl(children.len()).ok()?;
children.remove(idx).into()
}
Value::Table(children) => children.remove(last.decoded().as_ref()),
_ => None,
})
}
}
}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ Tests ║
║ ¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
#[cfg(test)]
mod tests {
use super::Delete;
use crate::Pointer;
use core::fmt;
use serde_json::json;
struct Test<V> {
data: V,
ptr: &'static str,
expected_data: V,
expected_deleted: Option<V>,
}
impl<V> Test<V>
where
V: Delete<Value = V> + Clone + PartialEq + fmt::Display + fmt::Debug,
{
fn all(tests: impl IntoIterator<Item = Test<V>>) {
tests.into_iter().enumerate().for_each(|(i, t)| t.run(i));
}
fn run(self, _i: usize) {
let Test {
mut data,
ptr,
expected_data,
expected_deleted,
} = self;
let ptr = Pointer::from_static(ptr);
let deleted = ptr.delete(&mut data);
assert_eq!(expected_data, data);
assert_eq!(expected_deleted, deleted);
}
}
/*
╔═══════════════════════════════════════════════════╗
║ json ║
╚═══════════════════════════════════════════════════╝
*/
#[test]
#[cfg(feature = "json")]
fn delete_json() {
Test::all([
// 0
Test {
ptr: "/foo",
data: json!({"foo": "bar"}),
expected_data: json!({}),
expected_deleted: Some(json!("bar")),
},
// 1
Test {
ptr: "/foo/bar",
data: json!({"foo": {"bar": "baz"}}),
expected_data: json!({"foo": {}}),
expected_deleted: Some(json!("baz")),
},
// 2
Test {
ptr: "/foo/bar",
data: json!({"foo": "bar"}),
expected_data: json!({"foo": "bar"}),
expected_deleted: None,
},
// 3
Test {
ptr: "/foo/bar",
data: json!({"foo": {"bar": "baz"}}),
expected_data: json!({"foo": {}}),
expected_deleted: Some(json!("baz")),
},
// 4
Test {
ptr: "/foo/bar/0",
data: json!({"foo": {"bar": ["baz", "qux"]}}),
expected_data: json!({"foo": {"bar": ["qux"]}}),
expected_deleted: Some(json!("baz")),
},
// 5
Test {
ptr: "/foo/0",
data: json!({"foo": "bar"}),
expected_data: json!({"foo": "bar"}),
expected_deleted: None,
},
// 6
Test {
ptr: "/foo/bar/0/baz",
data: json!({"foo": { "bar": [{"baz": "qux", "remaining": "field"}]}}),
expected_data: json!({"foo": { "bar": [{"remaining": "field"}]} }),
expected_deleted: Some(json!("qux")),
},
// 7
// issue #18 - unable to delete root token https://github.com/chanced/jsonptr/issues/18
Test {
ptr: "/Example",
data: json!({"Example": 21, "test": "test"}),
expected_data: json!({"test": "test"}),
expected_deleted: Some(json!(21)),
},
Test {
ptr: "",
data: json!({"Example": 21, "test": "test"}),
expected_data: json!(null),
expected_deleted: Some(json!({"Example": 21, "test": "test"})),
},
]);
}
/*
╔═══════════════════════════════════════════════════╗
║ toml ║
╚═══════════════════════════════════════════════════╝
*/
#[test]
#[cfg(feature = "toml")]
fn delete_toml() {
use toml::{toml, Table, Value};
Test::all([
// 0
Test {
data: toml! {"foo" = "bar"}.into(),
ptr: "/foo",
expected_data: Value::Table(Table::new()),
expected_deleted: Some("bar".into()),
},
// 1
Test {
data: toml! {"foo" = {"bar" = "baz"}}.into(),
ptr: "/foo/bar",
expected_data: toml! {"foo" = {}}.into(),
expected_deleted: Some("baz".into()),
},
// 2
Test {
data: toml! {"foo" = "bar"}.into(),
ptr: "/foo/bar",
expected_data: toml! {"foo" = "bar"}.into(),
expected_deleted: None,
},
// 3
Test {
data: toml! {"foo" = {"bar" = "baz"}}.into(),
ptr: "/foo/bar",
expected_data: toml! {"foo" = {}}.into(),
expected_deleted: Some("baz".into()),
},
// 4
Test {
data: toml! {"foo" = {"bar" = ["baz", "qux"]}}.into(),
ptr: "/foo/bar/0",
expected_data: toml! {"foo" = {"bar" = ["qux"]}}.into(),
expected_deleted: Some("baz".into()),
},
// 5
Test {
data: toml! {"foo" = "bar"}.into(),
ptr: "/foo/0",
expected_data: toml! {"foo" = "bar"}.into(),
expected_deleted: None,
},
// 6
Test {
data: toml! {"foo" = { "bar" = [{"baz" = "qux", "remaining" = "field"}]}}.into(),
ptr: "/foo/bar/0/baz",
expected_data: toml! {"foo" = { "bar" = [{"remaining" = "field"}]} }.into(),
expected_deleted: Some("qux".into()),
},
// 7
// issue #18 - unable to delete root token https://github.com/chanced/jsonptr/issues/18
Test {
data: toml! {"Example" = 21 "test" = "test"}.into(),
ptr: "/Example",
expected_data: toml! {"test" = "test"}.into(),
expected_deleted: Some(21.into()),
},
]);
}
}

280
vendor/jsonptr/src/diagnostic.rs vendored Normal file
View File

@@ -0,0 +1,280 @@
//! Error reporting data structures and miette integration.
//!
use alloc::{boxed::Box, string::String};
use core::{fmt, ops::Deref};
/// Implemented by errors which can be converted into a [`Report`].
pub trait Diagnostic: Sized {
/// The value which caused the error.
type Subject: Deref;
/// Combine the error with its subject to generate a [`Report`].
fn into_report(self, subject: impl Into<Self::Subject>) -> Report<Self> {
Report::new(self, subject.into())
}
/// The docs.rs URL for this error
fn url() -> &'static str;
/// Returns the label for the given [`Subject`] if applicable.
fn labels(&self, subject: &Self::Subject) -> Option<Box<dyn Iterator<Item = Label>>>;
}
/// A label for a span within a json pointer or malformed string.
///
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct Label {
text: String,
offset: usize,
len: usize,
}
impl Label {
/// Creates a new instance of a [`Label`] from its parts
pub fn new(text: String, offset: usize, len: usize) -> Self {
Self { text, offset, len }
}
}
#[cfg(feature = "miette")]
impl From<Label> for miette::LabeledSpan {
fn from(value: Label) -> Self {
miette::LabeledSpan::new(Some(value.text), value.offset, value.len)
}
}
/// An enriched error wrapper which captures the original error and the subject
/// (`String` or `PointerBuf`) which caused it, for reporting purposes.
///
/// This type serves two roles:
///
/// 1. **[`PointerBuf::parse`]**: Captures the [`ParseError`] along with the
/// input `String`.
///
/// 2. **Reporting:** Provides enriched reporting capabilities, including
/// (optional) `miette` integration, for `ParseError` and associated errors
/// of `assign::Assign` and `resolve::Resolve` implementations
#[derive(Debug, Clone)]
pub struct Report<T: Diagnostic> {
source: T,
subject: T::Subject,
}
impl<T: Diagnostic> Report<T> {
fn new(source: T, subject: T::Subject) -> Self {
Self { source, subject }
}
/// The value which caused the error.
pub fn subject(&self) -> &<T::Subject as Deref>::Target {
&self.subject
}
/// The error which occurred.
pub fn original(&self) -> &T {
&self.source
}
/// The original parts of the [`Report`].
pub fn decompose(self) -> (T, T::Subject) {
(self.source, self.subject)
}
/// Consumes the [`Report`] and returns the original error `T`.
pub fn into_original(self) -> T {
self.source
}
}
impl<T: Diagnostic> core::ops::Deref for Report<T> {
type Target = T;
fn deref(&self) -> &Self::Target {
&self.source
}
}
impl<T: Diagnostic + fmt::Display> fmt::Display for Report<T> {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
fmt::Display::fmt(&self.source, f)
}
}
#[cfg(feature = "std")]
impl<T> std::error::Error for Report<T>
where
T: Diagnostic + fmt::Debug + std::error::Error + 'static,
T::Subject: fmt::Debug,
{
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.source.source()
}
}
#[cfg(feature = "miette")]
impl<T> miette::Diagnostic for Report<T>
where
T: Diagnostic + fmt::Debug + std::error::Error + 'static,
T::Subject: fmt::Debug + miette::SourceCode,
{
fn url<'a>(&'a self) -> Option<Box<dyn core::fmt::Display + 'a>> {
Some(Box::new(T::url()))
}
fn source_code(&self) -> Option<&dyn miette::SourceCode> {
Some(&self.subject)
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
Some(Box::new(T::labels(self, &self.subject)?.map(Into::into)))
}
}
macro_rules! diagnostic_url {
(enum $type:ident) => {
$crate::diagnostic::diagnostic_url!("enum", "", $type)
};
(struct $type:ident) => {
$crate::diagnostic::diagnostic_url!("struct", "", $type)
};
(enum $mod:ident::$type:ident) => {
$crate::diagnostic::diagnostic_url!("enum", concat!("/", stringify!($mod)), $type)
};
(struct $mod:ident::$type:ident) => {
$crate::diagnostic::diagnostic_url!("struct", concat!("/", stringify!($mod)), $type)
};
($kind:literal, $mod:expr, $type:ident) => {
concat!(
"https://docs.rs/jsonptr/",
env!("CARGO_PKG_VERSION"),
"/jsonptr",
$mod,
"/",
$kind,
".",
stringify!($type),
".html",
)
};
}
pub(crate) use diagnostic_url;
/// An extension trait for `Result<_, E>`, where `E` is an implementation of
/// [`Diagnostic`], that converts `E` into [`Report<E>`](`Report`), yielding
/// `Result<_, Report<E>>`.
pub trait Diagnose<'s, T> {
/// The error type returned from `diagnose` and `diagnose_with`.
type Error: Diagnostic;
/// If the `Result` is an `Err`, converts the error into a [`Report`] with
/// the supplied `subject`.
///
/// ## Example
/// ```
/// use core::any::{Any, TypeId};
/// use jsonptr::{Pointer, ParseError, Diagnose, Report};
/// let subj = "invalid/pointer";
/// let err = Pointer::parse(subj).diagnose(subj).unwrap_err();
/// assert_eq!(err.type_id(),TypeId::of::<Report<ParseError>>());
/// ```
#[allow(clippy::missing_errors_doc)]
fn diagnose(
self,
subject: impl Into<<Self::Error as Diagnostic>::Subject>,
) -> Result<T, Report<Self::Error>>;
/// If the `Result` is an `Err`, converts the error into a [`Report`] with
/// the subject returned from `f`
///
/// ## Example
/// ```
/// use core::any::{Any, TypeId};
/// use jsonptr::{Pointer, ParseError, Diagnose, Report};
/// let subj = "invalid/pointer";
/// let err = Pointer::parse(subj).diagnose_with(|| subj).unwrap_err();
///
/// assert_eq!(err.type_id(),TypeId::of::<Report<ParseError>>());
#[allow(clippy::missing_errors_doc)]
fn diagnose_with<F, S>(self, f: F) -> Result<T, Report<Self::Error>>
where
F: FnOnce() -> S,
S: Into<<Self::Error as Diagnostic>::Subject>;
}
impl<T, E> Diagnose<'_, T> for Result<T, E>
where
E: Diagnostic,
{
type Error = E;
fn diagnose(
self,
subject: impl Into<<Self::Error as Diagnostic>::Subject>,
) -> Result<T, Report<Self::Error>> {
self.map_err(|error| error.into_report(subject.into()))
}
fn diagnose_with<F, S>(self, f: F) -> Result<T, Report<Self::Error>>
where
F: FnOnce() -> S,
S: Into<<Self::Error as Diagnostic>::Subject>,
{
self.diagnose(f())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{Pointer, PointerBuf};
#[test]
#[cfg(all(
feature = "assign",
feature = "miette",
feature = "serde",
feature = "json"
))]
fn assign_error() {
let mut v = serde_json::json!({"foo": {"bar": ["0"]}});
let ptr = PointerBuf::parse("/foo/bar/invalid/cannot/reach").unwrap();
let report = ptr.assign(&mut v, "qux").diagnose(ptr).unwrap_err();
println!("{:?}", miette::Report::from(report));
let ptr = PointerBuf::parse("/foo/bar/3/cannot/reach").unwrap();
let report = ptr.assign(&mut v, "qux").diagnose(ptr).unwrap_err();
println!("{:?}", miette::Report::from(report));
}
#[test]
#[cfg(all(
feature = "resolve",
feature = "miette",
feature = "serde",
feature = "json"
))]
fn resolve_error() {
let v = serde_json::json!({"foo": {"bar": ["0"]}});
let ptr = PointerBuf::parse("/foo/bar/invalid/cannot/reach").unwrap();
let report = ptr.resolve(&v).diagnose(ptr).unwrap_err();
println!("{:?}", miette::Report::from(report));
let ptr = PointerBuf::parse("/foo/bar/3/cannot/reach").unwrap();
let report = ptr.resolve(&v).diagnose(ptr).unwrap_err();
println!("{:?}", miette::Report::from(report));
}
#[test]
#[cfg(feature = "miette")]
fn parse_error() {
let invalid = "/foo/bar/invalid~3~encoding/cannot/reach";
let report = Pointer::parse(invalid).diagnose(invalid).unwrap_err();
println!("{:?}", miette::Report::from(report));
let report = PointerBuf::parse("/foo/bar/invalid~3~encoding/cannot/reach").unwrap_err();
let report = miette::Report::from(report);
println!("{report:?}");
}
}

470
vendor/jsonptr/src/index.rs vendored Normal file
View File

@@ -0,0 +1,470 @@
//! Abstract index representation for RFC 6901.
//!
//! [RFC 6901](https://datatracker.ietf.org/doc/html/rfc6901) defines two valid
//! ways to represent array indices as Pointer tokens: non-negative integers,
//! and the character `-`, which stands for the index after the last existing
//! array member. While attempting to use `-` to resolve an array value will
//! always be out of bounds, the token can be useful when paired with utilities
//! which can mutate a value, such as this crate's [`assign`](crate::assign)
//! functionality or JSON Patch [RFC
//! 6902](https://datatracker.ietf.org/doc/html/rfc6902), as it provides a way
//! to express where to put the new element when extending an array.
//!
//! While this crate doesn't implement RFC 6902, it still must consider
//! non-numerical indices as valid, and provide a mechanism for manipulating
//! them. This is what this module provides.
//!
//! The main use of the `Index` type is when resolving a [`Token`] instance as a
//! concrete index for a given array length:
//!
//! ```
//! # use jsonptr::{index::Index, Token};
//! assert_eq!(Token::new("1").to_index(), Ok(Index::Num(1)));
//! assert_eq!(Token::new("-").to_index(), Ok(Index::Next));
//! assert!(Token::new("a").to_index().is_err());
//!
//! assert_eq!(Index::Num(0).for_len(1), Ok(0));
//! assert!(Index::Num(1).for_len(1).is_err());
//! assert!(Index::Next.for_len(1).is_err());
//!
//! assert_eq!(Index::Num(1).for_len_incl(1), Ok(1));
//! assert_eq!(Index::Next.for_len_incl(1), Ok(1));
//! assert!(Index::Num(2).for_len_incl(1).is_err());
//!
//! assert_eq!(Index::Num(42).for_len_unchecked(30), 42);
//! assert_eq!(Index::Next.for_len_unchecked(30), 30);
//! ```
use crate::Token;
use alloc::string::String;
use core::{fmt, num::ParseIntError, str::FromStr};
/// Represents an abstract index into an array.
///
/// If provided an upper bound with [`Self::for_len`] or [`Self::for_len_incl`],
/// will produce a concrete numerical index.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Index {
/// A non-negative integer value
Num(usize),
/// The `-` token, the position of the next would-be item in the array
Next,
}
impl Index {
/// Bounds the index for a given array length (exclusive).
///
/// The upper range is exclusive, so only indices that are less than
/// the given length will be accepted as valid. This ensures that
/// the resolved numerical index can be used to access an existing array
/// element.
///
/// [`Self::Next`], by consequence, is always considered *invalid*, since
/// it resolves to the array length itself.
///
/// See also [`Self::for_len_incl`] for an alternative if you wish to accept
/// [`Self::Next`] (or its numerical equivalent) as valid.
///
/// # Examples
///
/// ```
/// # use jsonptr::index::Index;
/// assert_eq!(Index::Num(0).for_len(1), Ok(0));
/// assert!(Index::Num(1).for_len(1).is_err());
/// assert!(Index::Next.for_len(1).is_err());
/// ```
/// # Errors
/// Returns [`OutOfBoundsError`] if the index is out of bounds.
pub fn for_len(&self, length: usize) -> Result<usize, OutOfBoundsError> {
match *self {
Self::Num(index) if index < length => Ok(index),
Self::Num(index) => Err(OutOfBoundsError { length, index }),
Self::Next => Err(OutOfBoundsError {
length,
index: length,
}),
}
}
/// Bounds the index for a given array length (inclusive).
///
/// The upper range is inclusive, so an index pointing to the position
/// _after_ the last element will be considered valid. Be careful when using
/// the resulting numerical index for accessing an array.
///
/// [`Self::Next`] is always considered valid.
///
/// See also [`Self::for_len`] for an alternative if you wish to ensure that
/// the resolved index can be used to access an existing array element.
///
/// # Examples
///
/// ```
/// # use jsonptr::index::Index;
/// assert_eq!(Index::Num(1).for_len_incl(1), Ok(1));
/// assert_eq!(Index::Next.for_len_incl(1), Ok(1));
/// assert!(Index::Num(2).for_len_incl(1).is_err());
/// ```
///
/// # Errors
/// Returns [`OutOfBoundsError`] if the index is out of bounds.
pub fn for_len_incl(&self, length: usize) -> Result<usize, OutOfBoundsError> {
match *self {
Self::Num(index) if index <= length => Ok(index),
Self::Num(index) => Err(OutOfBoundsError { length, index }),
Self::Next => Ok(length),
}
}
/// Resolves the index for a given array length.
///
/// No bound checking will take place. If you wish to ensure the
/// index can be used to access an existing element in the array, use
/// [`Self::for_len`] - or use [`Self::for_len_incl`] if you wish to accept
/// [`Self::Next`] as valid as well.
///
/// # Examples
///
/// ```
/// # use jsonptr::index::Index;
/// assert_eq!(Index::Num(42).for_len_unchecked(30), 42);
/// assert_eq!(Index::Next.for_len_unchecked(30), 30);
///
/// // no bounds checks
/// assert_eq!(Index::Num(34).for_len_unchecked(40), 34);
/// assert_eq!(Index::Next.for_len_unchecked(34), 34);
/// ```
pub fn for_len_unchecked(&self, length: usize) -> usize {
match *self {
Self::Num(idx) => idx,
Self::Next => length,
}
}
}
impl fmt::Display for Index {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match *self {
Self::Num(index) => write!(f, "{index}"),
Self::Next => f.write_str("-"),
}
}
}
impl From<usize> for Index {
fn from(value: usize) -> Self {
Self::Num(value)
}
}
impl FromStr for Index {
type Err = ParseIndexError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
if s == "-" {
Ok(Index::Next)
} else if s.starts_with('0') && s != "0" {
Err(ParseIndexError::LeadingZeros)
} else {
s.chars().position(|c| !c.is_ascii_digit()).map_or_else(
|| {
s.parse::<usize>()
.map(Index::Num)
.map_err(ParseIndexError::from)
},
|offset| {
// this comes up with the `+` sign which is valid for
// representing a `usize` but not allowed in RFC 6901 array
// indices
Err(ParseIndexError::InvalidCharacter(InvalidCharacterError {
source: String::from(s),
offset,
}))
},
)
}
}
}
impl TryFrom<&Token<'_>> for Index {
type Error = ParseIndexError;
fn try_from(value: &Token) -> Result<Self, Self::Error> {
Index::from_str(value.encoded())
}
}
impl TryFrom<&str> for Index {
type Error = ParseIndexError;
fn try_from(value: &str) -> Result<Self, Self::Error> {
Index::from_str(value)
}
}
impl TryFrom<Token<'_>> for Index {
type Error = ParseIndexError;
fn try_from(value: Token) -> Result<Self, Self::Error> {
Index::from_str(value.encoded())
}
}
macro_rules! derive_try_from {
($($t:ty),+ $(,)?) => {
$(
impl TryFrom<$t> for Index {
type Error = ParseIndexError;
fn try_from(value: $t) -> Result<Self, Self::Error> {
Index::from_str(&value)
}
}
)*
}
}
derive_try_from!(String, &String);
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ OutOfBoundsError ║
║ ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
/// Indicates that an `Index` is not within the given bounds.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct OutOfBoundsError {
/// The provided array length.
///
/// If the range is inclusive, the resolved numerical index will be strictly
/// less than this value, otherwise it could be equal to it.
pub length: usize,
/// The resolved numerical index.
///
/// Note that [`Index::Next`] always resolves to the given array length,
/// so it is only valid when the range is inclusive.
pub index: usize,
}
impl fmt::Display for OutOfBoundsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"index {} out of bounds (len: {})",
self.index, self.length
)
}
}
#[cfg(feature = "std")]
impl std::error::Error for OutOfBoundsError {}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ ParseIndexError ║
║ ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
/// Indicates that the `Token` could not be parsed as valid RFC 6901 array index.
#[derive(Debug, Clone, PartialEq, Eq)]
pub enum ParseIndexError {
/// The Token does not represent a valid integer.
InvalidInteger(ParseIntError),
/// The Token contains leading zeros.
LeadingZeros,
/// The Token contains a non-digit character.
InvalidCharacter(InvalidCharacterError),
}
impl From<ParseIntError> for ParseIndexError {
fn from(source: ParseIntError) -> Self {
Self::InvalidInteger(source)
}
}
impl fmt::Display for ParseIndexError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ParseIndexError::InvalidInteger(_) => {
write!(f, "failed to parse token as an integer")
}
ParseIndexError::LeadingZeros => write!(
f,
"token contained leading zeros, which are disallowed by RFC 6901"
),
ParseIndexError::InvalidCharacter(_) => {
write!(f, "failed to parse token as an index")
}
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for ParseIndexError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
ParseIndexError::InvalidInteger(source) => Some(source),
ParseIndexError::InvalidCharacter(source) => Some(source),
ParseIndexError::LeadingZeros => None,
}
}
}
/// Indicates that a non-digit character was found when parsing the RFC 6901 array index.
#[derive(Debug, Clone, PartialEq, Eq)]
pub struct InvalidCharacterError {
pub(crate) source: String,
pub(crate) offset: usize,
}
impl InvalidCharacterError {
/// Returns the offset of the character in the string.
///
/// This offset is given in characters, not in bytes.
pub fn offset(&self) -> usize {
self.offset
}
/// Returns the source string.
pub fn source(&self) -> &str {
&self.source
}
/// Returns the offending character.
#[allow(clippy::missing_panics_doc)]
pub fn char(&self) -> char {
self.source
.chars()
.nth(self.offset)
.expect("char was found at offset")
}
}
impl fmt::Display for InvalidCharacterError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"token contains the non-digit character '{}', \
which is disallowed by RFC 6901",
self.char()
)
}
}
#[cfg(feature = "std")]
impl std::error::Error for InvalidCharacterError {}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ Tests ║
║ ¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
#[cfg(test)]
mod tests {
use super::*;
use crate::Token;
#[test]
fn index_from_usize() {
let index = Index::from(5usize);
assert_eq!(index, Index::Num(5));
}
#[test]
fn index_try_from_token_num() {
let token = Token::new("3");
let index = Index::try_from(&token).unwrap();
assert_eq!(index, Index::Num(3));
}
#[test]
fn index_try_from_token_next() {
let token = Token::new("-");
let index = Index::try_from(&token).unwrap();
assert_eq!(index, Index::Next);
}
#[test]
fn index_try_from_str_num() {
let index = Index::try_from("42").unwrap();
assert_eq!(index, Index::Num(42));
}
#[test]
fn index_try_from_str_next() {
let index = Index::try_from("-").unwrap();
assert_eq!(index, Index::Next);
}
#[test]
fn index_try_from_string_num() {
let index = Index::try_from(String::from("7")).unwrap();
assert_eq!(index, Index::Num(7));
}
#[test]
fn index_try_from_string_next() {
let index = Index::try_from(String::from("-")).unwrap();
assert_eq!(index, Index::Next);
}
#[test]
fn index_for_len_incl_valid() {
assert_eq!(Index::Num(0).for_len_incl(1), Ok(0));
assert_eq!(Index::Next.for_len_incl(2), Ok(2));
}
#[test]
fn index_for_len_incl_out_of_bounds() {
Index::Num(2).for_len_incl(1).unwrap_err();
}
#[test]
fn index_for_len_unchecked() {
assert_eq!(Index::Num(10).for_len_unchecked(5), 10);
assert_eq!(Index::Next.for_len_unchecked(3), 3);
}
#[test]
fn display_index_num() {
let index = Index::Num(5);
assert_eq!(index.to_string(), "5");
}
#[test]
fn display_index_next() {
assert_eq!(Index::Next.to_string(), "-");
}
#[test]
fn for_len() {
assert_eq!(Index::Num(0).for_len(1), Ok(0));
assert!(Index::Num(1).for_len(1).is_err());
assert!(Index::Next.for_len(1).is_err());
}
#[test]
fn try_from_token() {
let token = Token::new("3");
let index = <Index as TryFrom<Token>>::try_from(token).unwrap();
assert_eq!(index, Index::Num(3));
let token = Token::new("-");
let index = Index::try_from(&token).unwrap();
assert_eq!(index, Index::Next);
}
}

85
vendor/jsonptr/src/lib.rs vendored Normal file
View File

@@ -0,0 +1,85 @@
// rustdoc + README hack: https://linebender.org/blog/doc-include
//! <style>.rustdoc-hidden { display: none; }</style>
//! [`Pointer`]: https://docs.rs/jsonptr/latest/jsonptr/struct.Pointer.html
//! [`Pointer::tokens`]: crate::Pointer::tokens
//! [`Pointer::components`]: crate::Pointer::components
//! [`Pointer::parse`]: crate::Pointer::parse
//! [`Pointer::resolve`]: crate::Pointer::resolve
//! [`Pointer::resolve_mut`]: crate::Pointer::resolve_mut
//! [`Pointer::assign`]: crate::Pointer::assign
//! [`Pointer::delete`]: crate::Pointer::delete
//! [`PointerBuf::parse`]: crate::PointerBuf::parse
//! [`PointerBuf`]: crate::PointerBuf
//! [`from_tokens`]: crate::PointerBuf::from_tokens
//! [`Token`]: crate::Token
//! [`Tokens`]: crate::Tokens
//! [`Components`]: crate::Components
//! [`Component`]: crate::Component
//! [`index`]: crate::index
//! [`tokens`]: crate::Pointer::tokens
//! [`components`]: crate::Pointer::components
//! [`resolve`]: crate::resolve
//! [`assign`]: crate::asign
//! [`delete`]: crate::delete
//! [`Resolve`]: crate::resolve::Resolve
//! [`ResolveMut`]: crate::resolve::ResolveMut
//! [`Assign`]: crate::assign::Assign
//! [`Delete`]: crate::delete::Delete
//! [`serde`]: https://docs.rs/serde/1.0/serde/index
//! [`serde_json`]: https://docs.rs/serde_json/1.0/serde_json/enum.Value.html
//! [`serde_json::Value`]: https://docs.rs/serde_json/1.0/serde_json/enum.Value.html
//! [`toml`]: https://docs.rs/toml/0.8/toml/enum.Value.html
//! [`toml::Value`]: https://docs.rs/toml/0.8/toml/enum.Value.html
//! [`Path`]: https://doc.rust-lang.org/std/path/struct.Path.html
//! [`PathBuf`]: https://doc.rust-lang.org/std/path/struct.PathBuf.html
#![doc = include_str!("../README.md")]
#![warn(missing_docs)]
#![deny(clippy::all, clippy::pedantic)]
#![cfg_attr(not(feature = "std"), no_std)]
#![allow(
clippy::module_name_repetitions,
clippy::into_iter_without_iter,
clippy::needless_pass_by_value,
clippy::expect_fun_call,
clippy::must_use_candidate,
clippy::similar_names
)]
#[cfg_attr(not(feature = "std"), macro_use)]
extern crate alloc;
#[cfg(feature = "assign")]
pub mod assign;
#[cfg(feature = "assign")]
pub use assign::Assign;
#[cfg(feature = "delete")]
pub mod delete;
#[cfg(feature = "delete")]
pub use delete::Delete;
#[cfg(feature = "resolve")]
pub mod resolve;
#[cfg(feature = "resolve")]
pub use resolve::{Resolve, ResolveMut};
pub mod diagnostic;
pub use diagnostic::{Diagnose, Report};
mod pointer;
pub use pointer::{ParseError, Pointer, PointerBuf, RichParseError};
mod token;
pub use token::{EncodingError, InvalidEncoding, Token, Tokens};
#[allow(deprecated)]
pub use token::InvalidEncodingError;
pub mod index;
mod component;
pub use component::{Component, Components};
#[cfg(test)]
mod arbitrary;

2353
vendor/jsonptr/src/pointer.rs vendored Normal file

File diff suppressed because it is too large Load Diff

500
vendor/jsonptr/src/pointer/slice.rs vendored Normal file
View File

@@ -0,0 +1,500 @@
use super::Pointer;
use crate::Token;
use core::ops::Bound;
pub trait PointerIndex<'p>: private::Sealed {
type Output: 'p;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output>;
}
impl<'p> PointerIndex<'p> for usize {
type Output = Token<'p>;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output> {
pointer.tokens().nth(self)
}
}
impl<'p> PointerIndex<'p> for core::ops::Range<usize> {
type Output = &'p Pointer;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output> {
if self.end < self.start {
// never valid
return None;
}
let mut idx = 0;
let mut offset = 0;
let mut start_offset = None;
let mut end_offset = None;
for token in pointer.tokens() {
if idx == self.start {
start_offset = Some(offset);
}
if idx == self.end {
end_offset = Some(offset);
break;
}
idx += 1;
// also include the `/` separator
offset += token.encoded().len() + 1;
}
// edge case where end is last token index + 1
// this is valid because range is exclusive
if idx == self.end {
end_offset = Some(offset);
}
let slice = &pointer.0.as_bytes()[start_offset?..end_offset?];
// SAFETY: start and end offsets are token boundaries, so the slice is
// valid utf-8 (and also a valid json pointer!)
Some(unsafe { Pointer::new_unchecked(core::str::from_utf8_unchecked(slice)) })
}
}
impl<'p> PointerIndex<'p> for core::ops::RangeFrom<usize> {
type Output = &'p Pointer;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output> {
{
let mut offset = 0;
let mut start_offset = None;
for (idx, token) in pointer.tokens().enumerate() {
if idx == self.start {
start_offset = Some(offset);
break;
}
// also include the `/` separator
offset += token.encoded().len() + 1;
}
let slice = &pointer.0.as_bytes()[start_offset?..];
// SAFETY: start offset is token boundary, so the slice is valid
// utf-8 (and also a valid json pointer!)
Some(unsafe { Pointer::new_unchecked(core::str::from_utf8_unchecked(slice)) })
}
}
}
impl<'p> PointerIndex<'p> for core::ops::RangeTo<usize> {
type Output = &'p Pointer;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output> {
{
let mut idx = 0;
let mut offset = 0;
let mut end_offset = None;
for token in pointer.tokens() {
if idx == self.end {
end_offset = Some(offset);
break;
}
idx += 1;
// also include the `/` separator
offset += token.encoded().len() + 1;
}
// edge case where end is last token index + 1
// this is valid because range is exclusive
if idx == self.end {
end_offset = Some(offset);
}
let slice = &pointer.0.as_bytes()[..end_offset?];
// SAFETY: start and end offsets are token boundaries, so the slice is
// valid utf-8 (and also a valid json pointer!)
Some(unsafe { Pointer::new_unchecked(core::str::from_utf8_unchecked(slice)) })
}
}
}
impl<'p> PointerIndex<'p> for core::ops::RangeFull {
type Output = &'p Pointer;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output> {
Some(pointer)
}
}
impl<'p> PointerIndex<'p> for core::ops::RangeInclusive<usize> {
type Output = &'p Pointer;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output> {
let (start, end) = self.into_inner();
if end < start {
// never valid
return None;
}
let mut offset = 0;
let mut start_offset = None;
let mut end_offset = None;
for (idx, token) in pointer.tokens().enumerate() {
if idx == start {
start_offset = Some(offset);
}
// also include the `/` separator
offset += token.encoded().len() + 1;
// since the range is inclusive, we wish to slice up until the end
// of the token whose index is `end`, so we increment offset first
// before checking for a match
if idx == end {
end_offset = Some(offset);
break;
}
}
// notice that we don't use an inclusive range here, because we already
// acounted for the included end token when computing `end_offset` above
let slice = &pointer.0.as_bytes()[start_offset?..end_offset?];
// SAFETY: start and end offsets are token boundaries, so the slice is
// valid utf-8 (and also a valid json pointer!)
Some(unsafe { Pointer::new_unchecked(core::str::from_utf8_unchecked(slice)) })
}
}
impl<'p> PointerIndex<'p> for core::ops::RangeToInclusive<usize> {
type Output = &'p Pointer;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output> {
{
let mut offset = 0;
let mut end_offset = None;
for (idx, token) in pointer.tokens().enumerate() {
// also include the `/` separator
offset += token.encoded().len() + 1;
// since the range is inclusive, we wish to slice up until the end
// of the token whose index is `end`, so we increment offset first
// before checking for a match
if idx == self.end {
end_offset = Some(offset);
break;
}
}
// notice that we don't use an inclusive range here, because we already
// acounted for the included end token when computing `end_offset` above
let slice = &pointer.0.as_bytes()[..end_offset?];
// SAFETY: start and end offsets are token boundaries, so the slice is
// valid utf-8 (and also a valid json pointer!)
Some(unsafe { Pointer::new_unchecked(core::str::from_utf8_unchecked(slice)) })
}
}
}
impl<'p> PointerIndex<'p> for (Bound<usize>, Bound<usize>) {
type Output = &'p Pointer;
fn get(self, pointer: &'p Pointer) -> Option<Self::Output> {
match self {
(Bound::Included(start), Bound::Included(end)) => pointer.get(start..=end),
(Bound::Included(start), Bound::Excluded(end)) => pointer.get(start..end),
(Bound::Included(start), Bound::Unbounded) => pointer.get(start..),
(Bound::Excluded(start), Bound::Included(end)) => pointer.get(start + 1..=end),
(Bound::Excluded(start), Bound::Excluded(end)) => pointer.get(start + 1..end),
(Bound::Excluded(start), Bound::Unbounded) => pointer.get(start + 1..),
(Bound::Unbounded, Bound::Included(end)) => pointer.get(..=end),
(Bound::Unbounded, Bound::Excluded(end)) => pointer.get(..end),
(Bound::Unbounded, Bound::Unbounded) => pointer.get(..),
}
}
}
mod private {
use core::ops;
pub trait Sealed {}
impl Sealed for usize {}
impl Sealed for ops::Range<usize> {}
impl Sealed for ops::RangeTo<usize> {}
impl Sealed for ops::RangeFrom<usize> {}
impl Sealed for ops::RangeFull {}
impl Sealed for ops::RangeInclusive<usize> {}
impl Sealed for ops::RangeToInclusive<usize> {}
impl Sealed for (ops::Bound<usize>, ops::Bound<usize>) {}
}
#[cfg(test)]
mod tests {
use core::ops::Bound;
use crate::{Pointer, Token};
#[test]
fn get_single() {
let ptr = Pointer::from_static("/foo/bar/qux");
let s = ptr.get(0);
assert_eq!(s, Some(Token::new("foo")));
let s = ptr.get(1);
assert_eq!(s, Some(Token::new("bar")));
let s = ptr.get(2);
assert_eq!(s, Some(Token::new("qux")));
let s = ptr.get(3);
assert_eq!(s, None);
let ptr = Pointer::from_static("/");
let s = ptr.get(0);
assert_eq!(s, Some(Token::new("")));
let s = ptr.get(1);
assert_eq!(s, None);
let ptr = Pointer::from_static("");
let s = ptr.get(0);
assert_eq!(s, None);
let s = ptr.get(1);
assert_eq!(s, None);
}
#[allow(clippy::reversed_empty_ranges)]
#[test]
fn get_range() {
let ptr = Pointer::from_static("/foo/bar/qux");
let s = ptr.get(0..3);
assert_eq!(s, Some(ptr));
let s = ptr.get(0..2);
assert_eq!(s, Some(Pointer::from_static("/foo/bar")));
let s = ptr.get(0..1);
assert_eq!(s, Some(Pointer::from_static("/foo")));
let s = ptr.get(0..0);
assert_eq!(s, Some(Pointer::from_static("")));
let s = ptr.get(1..3);
assert_eq!(s, Some(Pointer::from_static("/bar/qux")));
let s = ptr.get(1..2);
assert_eq!(s, Some(Pointer::from_static("/bar")));
let s = ptr.get(1..1);
assert_eq!(s, Some(Pointer::from_static("")));
let s = ptr.get(1..0);
assert_eq!(s, None);
let s = ptr.get(0..4);
assert_eq!(s, None);
let s = ptr.get(2..4);
assert_eq!(s, None);
let ptr = Pointer::from_static("/");
let s = ptr.get(0..1);
assert_eq!(s, Some(ptr));
let s = ptr.get(0..0);
assert_eq!(s, Some(Pointer::root()));
let s = ptr.get(1..0);
assert_eq!(s, None);
let s = ptr.get(0..2);
assert_eq!(s, None);
let s = ptr.get(1..2);
assert_eq!(s, None);
let s = ptr.get(1..1);
assert_eq!(s, None);
let ptr = Pointer::root();
let s = ptr.get(0..1);
assert_eq!(s, None);
let s = ptr.get(0..0);
assert_eq!(s, None);
let s = ptr.get(1..0);
assert_eq!(s, None);
let s = ptr.get(1..1);
assert_eq!(s, None);
}
#[test]
fn get_from_range() {
let ptr = Pointer::from_static("/foo/bar/qux");
let s = ptr.get(0..);
assert_eq!(s, Some(ptr));
let s = ptr.get(1..);
assert_eq!(s, Some(Pointer::from_static("/bar/qux")));
let s = ptr.get(2..);
assert_eq!(s, Some(Pointer::from_static("/qux")));
let s = ptr.get(3..);
assert_eq!(s, None);
let ptr = Pointer::from_static("/");
let s = ptr.get(0..);
assert_eq!(s, Some(ptr));
let s = ptr.get(1..);
assert_eq!(s, None);
let ptr = Pointer::from_static("");
let s = ptr.get(0..);
assert_eq!(s, None);
}
#[test]
fn get_to_range() {
let ptr = Pointer::from_static("/foo/bar/qux");
let s = ptr.get(..4);
assert_eq!(s, None);
let s = ptr.get(..3);
assert_eq!(s, Some(ptr));
let s = ptr.get(..2);
assert_eq!(s, Some(Pointer::from_static("/foo/bar")));
let s = ptr.get(..1);
assert_eq!(s, Some(Pointer::from_static("/foo")));
let s = ptr.get(..0);
assert_eq!(s, Some(Pointer::from_static("")));
let ptr = Pointer::from_static("/");
let s = ptr.get(..0);
assert_eq!(s, Some(Pointer::from_static("")));
let s = ptr.get(..1);
assert_eq!(s, Some(ptr));
let s = ptr.get(..2);
assert_eq!(s, None);
let ptr = Pointer::from_static("");
let s = ptr.get(..0);
assert_eq!(s, Some(ptr));
let s = ptr.get(..1);
assert_eq!(s, None);
}
#[test]
fn get_full_range() {
let ptr = Pointer::from_static("/foo/bar");
let s = ptr.get(..);
assert_eq!(s, Some(ptr));
let ptr = Pointer::from_static("/");
let s = ptr.get(..);
assert_eq!(s, Some(ptr));
let ptr = Pointer::from_static("");
let s = ptr.get(..);
assert_eq!(s, Some(ptr));
}
#[allow(clippy::reversed_empty_ranges)]
#[test]
fn get_range_inclusive() {
let ptr = Pointer::from_static("/foo/bar/qux");
let s = ptr.get(0..=3);
assert_eq!(s, None);
let s = ptr.get(0..=2);
assert_eq!(s, Some(ptr));
let s = ptr.get(0..=1);
assert_eq!(s, Some(Pointer::from_static("/foo/bar")));
let s = ptr.get(0..=0);
assert_eq!(s, Some(Pointer::from_static("/foo")));
let s = ptr.get(1..=3);
assert_eq!(s, None);
let s = ptr.get(1..=2);
assert_eq!(s, Some(Pointer::from_static("/bar/qux")));
let s = ptr.get(1..=1);
assert_eq!(s, Some(Pointer::from_static("/bar")));
let s = ptr.get(1..=0);
assert_eq!(s, None);
let ptr = Pointer::from_static("/");
let s = ptr.get(0..=0);
assert_eq!(s, Some(ptr));
let s = ptr.get(1..=0);
assert_eq!(s, None);
let s = ptr.get(0..=1);
assert_eq!(s, None);
let s = ptr.get(1..=1);
assert_eq!(s, None);
let ptr = Pointer::root();
let s = ptr.get(0..=1);
assert_eq!(s, None);
let s = ptr.get(0..=0);
assert_eq!(s, None);
let s = ptr.get(1..=0);
assert_eq!(s, None);
let s = ptr.get(1..=1);
assert_eq!(s, None);
}
#[test]
fn get_to_range_inclusive() {
let ptr = Pointer::from_static("/foo/bar/qux");
let s = ptr.get(..=3);
assert_eq!(s, None);
let s = ptr.get(..=2);
assert_eq!(s, Some(ptr));
let s = ptr.get(..=1);
assert_eq!(s, Some(Pointer::from_static("/foo/bar")));
let s = ptr.get(..=0);
assert_eq!(s, Some(Pointer::from_static("/foo")));
let ptr = Pointer::from_static("/");
let s = ptr.get(..=0);
assert_eq!(s, Some(ptr));
let s = ptr.get(..=1);
assert_eq!(s, None);
let ptr = Pointer::from_static("");
let s = ptr.get(..=0);
assert_eq!(s, None);
let s = ptr.get(..=1);
assert_eq!(s, None);
}
#[test]
fn get_by_explicit_bounds() {
let ptr = Pointer::from_static("/foo/bar/qux");
let s = ptr.get((Bound::Excluded(0), Bound::Included(2)));
assert_eq!(s, Some(Pointer::from_static("/bar/qux")));
let s = ptr.get((Bound::Excluded(0), Bound::Excluded(2)));
assert_eq!(s, Some(Pointer::from_static("/bar")));
let s = ptr.get((Bound::Excluded(0), Bound::Unbounded));
assert_eq!(s, Some(Pointer::from_static("/bar/qux")));
let s = ptr.get((Bound::Included(0), Bound::Included(2)));
assert_eq!(s, Some(Pointer::from_static("/foo/bar/qux")));
let s = ptr.get((Bound::Included(0), Bound::Excluded(2)));
assert_eq!(s, Some(Pointer::from_static("/foo/bar")));
let s = ptr.get((Bound::Included(0), Bound::Unbounded));
assert_eq!(s, Some(Pointer::from_static("/foo/bar/qux")));
let s = ptr.get((Bound::Unbounded, Bound::Included(2)));
assert_eq!(s, Some(Pointer::from_static("/foo/bar/qux")));
let s = ptr.get((Bound::Unbounded, Bound::Excluded(2)));
assert_eq!(s, Some(Pointer::from_static("/foo/bar")));
let s = ptr.get((Bound::Unbounded, Bound::Unbounded));
assert_eq!(s, Some(Pointer::from_static("/foo/bar/qux")));
let ptr = Pointer::from_static("/foo/bar");
let s = ptr.get((Bound::Excluded(0), Bound::Included(2)));
assert_eq!(s, None);
let s = ptr.get((Bound::Excluded(0), Bound::Excluded(2)));
assert_eq!(s, Some(Pointer::from_static("/bar")));
let s = ptr.get((Bound::Excluded(0), Bound::Unbounded));
assert_eq!(s, Some(Pointer::from_static("/bar")));
let s = ptr.get((Bound::Included(0), Bound::Included(2)));
assert_eq!(s, None);
let s = ptr.get((Bound::Included(0), Bound::Excluded(2)));
assert_eq!(s, Some(ptr));
let s = ptr.get((Bound::Included(0), Bound::Unbounded));
assert_eq!(s, Some(ptr));
let s = ptr.get((Bound::Unbounded, Bound::Included(2)));
assert_eq!(s, None);
let s = ptr.get((Bound::Unbounded, Bound::Excluded(2)));
assert_eq!(s, Some(ptr));
let s = ptr.get((Bound::Unbounded, Bound::Unbounded));
assert_eq!(s, Some(ptr));
// testing only the start excluded case a bit more exhaustively since
// other cases just delegate directly (so are covered by other tests)
let ptr = Pointer::from_static("/");
let s = ptr.get((Bound::Excluded(0), Bound::Included(0)));
assert_eq!(s, None);
let s = ptr.get((Bound::Excluded(0), Bound::Excluded(0)));
assert_eq!(s, None);
let s = ptr.get((Bound::Excluded(0), Bound::Unbounded));
assert_eq!(s, None);
let ptr = Pointer::from_static("");
let s = ptr.get((Bound::Excluded(0), Bound::Included(0)));
assert_eq!(s, None);
let s = ptr.get((Bound::Excluded(0), Bound::Excluded(0)));
assert_eq!(s, None);
let s = ptr.get((Bound::Excluded(0), Bound::Unbounded));
assert_eq!(s, None);
}
}

861
vendor/jsonptr/src/resolve.rs vendored Normal file
View File

@@ -0,0 +1,861 @@
//! # Resolve values based on JSON [`Pointer`]s
//!
//! This module provides the [`Resolve`] and [`ResolveMut`] traits which are
//! implemented by types that can internally resolve a value based on a JSON
//! Pointer.
//!
//! This module is enabled by default with the `"resolve"` feature flag.
//!
//! ## Usage
//! [`Resolve`] and [`ResolveMut`] can be used directly or through the
//! [`resolve`](Pointer::resolve) and [`resolve_mut`](Pointer::resolve_mut)
//! methods on [`Pointer`] and [`PointerBuf`](crate::PointerBuf).
//!
//! ```rust
//! use jsonptr::{Pointer, Resolve, ResolveMut};
//! use serde_json::json;
//!
//! let ptr = Pointer::from_static("/foo/1");
//! let mut data = json!({"foo": ["bar", "baz"]});
//!
//! let value = ptr.resolve(&data).unwrap();
//! assert_eq!(value, &json!("baz"));
//!
//! let value = data.resolve_mut(ptr).unwrap();
//! assert_eq!(value, &json!("baz"));
//! ```
//!
//! ## Provided implementations
//!
//! | Lang | value type | feature flag | Default |
//! | ----- |: ----------------- :|: ---------- :| ------- |
//! | JSON | `serde_json::Value` | `"json"` | ✓ |
//! | TOML | `toml::Value` | `"toml"` | |
//!
//!
use crate::{
diagnostic::{diagnostic_url, Diagnostic, Label},
index::{OutOfBoundsError, ParseIndexError},
Pointer, PointerBuf, Token,
};
use alloc::{boxed::Box, string::ToString};
use core::iter::once;
/// A trait implemented by types which can resolve a reference to a value type
/// from a path represented by a JSON [`Pointer`].
pub trait Resolve {
/// The type of value that this implementation can operate on.
type Value;
/// Error associated with `Resolve`
type Error;
/// Resolve a reference to `Self::Value` based on the path in a [Pointer].
///
/// ## Errors
/// Returns a [`Self::Error`](Resolve::Error) if the [`Pointer`] can not
/// be resolved.
fn resolve(&self, ptr: &Pointer) -> Result<&Self::Value, Self::Error>;
}
/// A trait implemented by types which can resolve a mutable reference to a
/// value type from a path represented by a JSON [`Pointer`].
pub trait ResolveMut {
/// The type of value that is being resolved.
type Value;
/// Error associated with `ResolveMut`
type Error;
/// Resolve a mutable reference to a `serde_json::Value` based on the path
/// in a JSON Pointer.
///
/// ## Errors
/// Returns a [`Self::Error`](ResolveMut::Error) if the [`Pointer`] can not
/// be resolved.
fn resolve_mut(&mut self, ptr: &Pointer) -> Result<&mut Self::Value, Self::Error>;
}
// TODO: should ResolveError be deprecated?
/// Alias for [`Error`].
pub type ResolveError = Error;
/// Indicates that the `Pointer` could not be resolved.
#[derive(Debug, PartialEq, Eq)]
pub enum Error {
/// `Pointer` could not be resolved because a `Token` for an array index is
/// not a valid integer or dash (`"-"`).
///
/// ## Example
/// ```rust
/// # use serde_json::json;
/// # use jsonptr::Pointer;
/// let data = json!({ "foo": ["bar"] });
/// let ptr = Pointer::from_static("/foo/invalid");
/// assert!(ptr.resolve(&data).unwrap_err().is_failed_to_parse_index());
/// ```
FailedToParseIndex {
/// Position (index) of the token which failed to parse as an [`Index`](crate::index::Index)
position: usize,
/// Offset of the partial pointer starting with the invalid index.
offset: usize,
/// The source `ParseIndexError`
source: ParseIndexError,
},
/// A [`Token`] within the [`Pointer`] contains an [`Index`] which is out of
/// bounds.
///
/// ## Example
/// ```rust
/// # use serde_json::json;
/// # use jsonptr::Pointer;
/// let data = json!({ "foo": ["bar"] });
/// let ptr = Pointer::from_static("/foo/1");
/// assert!(ptr.resolve(&data).unwrap_err().is_out_of_bounds());
OutOfBounds {
/// Position (index) of the token which failed to parse as an [`Index`](crate::index::Index)
position: usize,
/// Offset of the partial pointer starting with the invalid index.
offset: usize,
/// The source `OutOfBoundsError`
source: OutOfBoundsError,
},
/// `Pointer` could not be resolved as a segment of the path was not found.
///
/// ## Example
/// ```rust
/// # use serde_json::json;
/// # use jsonptr::{Pointer};
/// let mut data = json!({ "foo": "bar" });
/// let ptr = Pointer::from_static("/bar");
/// assert!(ptr.resolve(&data).unwrap_err().is_not_found());
/// ```
NotFound {
/// Position (index) of the token which was not found.
position: usize,
/// Offset of the pointer starting with the `Token` which was not found.
offset: usize,
},
/// `Pointer` could not be resolved as the path contains a scalar value
/// before fully exhausting the path.
///
/// ## Example
/// ```rust
/// # use serde_json::json;
/// # use jsonptr::Pointer;
/// let mut data = json!({ "foo": "bar" });
/// let ptr = Pointer::from_static("/foo/unreachable");
/// let err = ptr.resolve(&data).unwrap_err();
/// assert!(err.is_unreachable());
/// ```
Unreachable {
/// Position (index) of the token which was unreachable.
position: usize,
/// Offset of the pointer which was unreachable.
offset: usize,
},
}
impl Error {
/// Offset of the partial pointer starting with the token which caused the
/// error.
pub fn offset(&self) -> usize {
match self {
Self::FailedToParseIndex { offset, .. }
| Self::OutOfBounds { offset, .. }
| Self::NotFound { offset, .. }
| Self::Unreachable { offset, .. } => *offset,
}
}
/// Position (index) of the token which caused the error.
pub fn position(&self) -> usize {
match self {
Self::FailedToParseIndex { position, .. }
| Self::OutOfBounds { position, .. }
| Self::NotFound { position, .. }
| Self::Unreachable { position, .. } => *position,
}
}
/// Returns `true` if this error is `FailedToParseIndex`; otherwise returns
/// `false`.
pub fn is_unreachable(&self) -> bool {
matches!(self, Self::Unreachable { .. })
}
/// Returns `true` if this error is `FailedToParseIndex`; otherwise returns
/// `false`.
pub fn is_not_found(&self) -> bool {
matches!(self, Self::NotFound { .. })
}
/// Returns `true` if this error is `FailedToParseIndex`; otherwise returns
/// `false`.
pub fn is_out_of_bounds(&self) -> bool {
matches!(self, Self::OutOfBounds { .. })
}
/// Returns `true` if this error is `FailedToParseIndex`; otherwise returns
/// `false`.
pub fn is_failed_to_parse_index(&self) -> bool {
matches!(self, Self::FailedToParseIndex { .. })
}
}
impl Diagnostic for Error {
type Subject = PointerBuf;
fn url() -> &'static str {
diagnostic_url!(enum assign::Error)
}
fn labels(&self, origin: &Self::Subject) -> Option<Box<dyn Iterator<Item = Label>>> {
let position = self.position();
let token = origin.get(position)?;
let offset = if self.offset() + 1 < origin.as_str().len() {
self.offset() + 1
} else {
self.offset()
};
let len = token.encoded().len();
let text = match self {
Error::FailedToParseIndex { .. } => "not an array index".to_string(),
Error::OutOfBounds { source, .. } => source.to_string(),
Error::NotFound { .. } => "not found in value".to_string(),
Error::Unreachable { .. } => "unreachable".to_string(),
};
Some(Box::new(once(Label::new(text, offset, len))))
}
}
impl core::fmt::Display for Error {
fn fmt(&self, f: &mut core::fmt::Formatter<'_>) -> core::fmt::Result {
match self {
Self::FailedToParseIndex { offset, .. } => {
write!(f, "resolve failed: json pointer token at offset {offset} failed to parse as an index")
}
Self::OutOfBounds { offset, .. } => {
write!(
f,
"resolve failed: json pointer token at offset {offset} is out of bounds"
)
}
Self::NotFound { offset, .. } => {
write!(
f,
"resolve failed: json pointer token at {offset} was not found in value"
)
}
Self::Unreachable { offset, .. } => {
write!(f, "resolve failed: json pointer token at {offset} is unreachable (previous token resolved to a scalar or null value)")
}
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for Error {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
Self::FailedToParseIndex { source, .. } => Some(source),
Self::OutOfBounds { source, .. } => Some(source),
_ => None,
}
}
}
#[cfg(feature = "json")]
mod json {
use super::{parse_index, Error, Pointer, Resolve, ResolveMut};
use serde_json::Value;
impl Resolve for Value {
type Value = Value;
type Error = Error;
fn resolve(&self, mut ptr: &Pointer) -> Result<&Value, Self::Error> {
let mut offset = 0;
let mut position = 0;
let mut value = self;
while let Some((token, rem)) = ptr.split_front() {
let tok_len = token.encoded().len();
ptr = rem;
value = match value {
Value::Array(v) => {
let idx = token
.to_index()
.map_err(|source| Error::FailedToParseIndex {
position,
offset,
source,
})?
.for_len(v.len())
.map_err(|source| Error::OutOfBounds {
position,
offset,
source,
})?;
Ok(&v[idx])
}
Value::Object(v) => v
.get(token.decoded().as_ref())
.ok_or(Error::NotFound { position, offset }),
// found a leaf node but the pointer hasn't been exhausted
_ => Err(Error::Unreachable { position, offset }),
}?;
offset += 1 + tok_len;
position += 1;
}
Ok(value)
}
}
impl ResolveMut for Value {
type Value = Value;
type Error = Error;
fn resolve_mut(&mut self, mut ptr: &Pointer) -> Result<&mut Value, Error> {
let mut offset = 0;
let mut position = 0;
let mut value = self;
while let Some((token, rem)) = ptr.split_front() {
let tok_len = token.encoded().len();
ptr = rem;
value = match value {
Value::Array(array) => {
let idx = parse_index(token, array.len(), position, offset)?;
Ok(&mut array[idx])
}
Value::Object(v) => v
.get_mut(token.decoded().as_ref())
.ok_or(Error::NotFound { position, offset }),
// found a leaf node but the pointer hasn't been exhausted
_ => Err(Error::Unreachable { position, offset }),
}?;
offset += 1 + tok_len;
position += 1;
}
Ok(value)
}
}
}
fn parse_index(
token: Token,
array_len: usize,
position: usize,
offset: usize,
) -> Result<usize, Error> {
token
.to_index()
.map_err(|source| Error::FailedToParseIndex {
position,
offset,
source,
})?
.for_len(array_len)
.map_err(|source| Error::OutOfBounds {
position,
offset,
source,
})
}
#[cfg(feature = "toml")]
mod toml {
use super::{Error, Resolve, ResolveMut};
use crate::Pointer;
use toml::Value;
impl Resolve for Value {
type Value = Value;
type Error = Error;
fn resolve(&self, mut ptr: &Pointer) -> Result<&Value, Self::Error> {
let mut offset = 0;
let mut position = 0;
let mut value = self;
while let Some((token, rem)) = ptr.split_front() {
let tok_len = token.encoded().len();
ptr = rem;
value = match value {
Value::Array(v) => {
let idx = token
.to_index()
.map_err(|source| Error::FailedToParseIndex {
position,
offset,
source,
})?
.for_len(v.len())
.map_err(|source| Error::OutOfBounds {
position,
offset,
source,
})?;
Ok(&v[idx])
}
Value::Table(v) => v
.get(token.decoded().as_ref())
.ok_or(Error::NotFound { position, offset }),
// found a leaf node but the pointer hasn't been exhausted
_ => Err(Error::Unreachable { position, offset }),
}?;
offset += 1 + tok_len;
position += 1;
}
Ok(value)
}
}
impl ResolveMut for Value {
type Value = Value;
type Error = Error;
fn resolve_mut(&mut self, mut ptr: &Pointer) -> Result<&mut Value, Error> {
let mut offset = 0;
let mut position = 0;
let mut value = self;
while let Some((token, rem)) = ptr.split_front() {
let tok_len = token.encoded().len();
ptr = rem;
value = match value {
Value::Array(array) => {
let idx = token
.to_index()
.map_err(|source| Error::FailedToParseIndex {
position,
offset,
source,
})?
.for_len(array.len())
.map_err(|source| Error::OutOfBounds {
position,
offset,
source,
})?;
Ok(&mut array[idx])
}
Value::Table(v) => v
.get_mut(token.decoded().as_ref())
.ok_or(Error::NotFound { position, offset }),
// found a leaf node but the pointer hasn't been exhausted
_ => Err(Error::Unreachable { position, offset }),
}?;
offset += 1 + tok_len;
position += 1;
}
Ok(value)
}
}
}
#[cfg(test)]
mod tests {
use super::{Error, Resolve, ResolveMut};
use crate::{
index::{OutOfBoundsError, ParseIndexError},
Pointer,
};
use core::fmt;
#[test]
fn resolve_error_is_unreachable() {
let err = Error::FailedToParseIndex {
position: 0,
offset: 0,
source: ParseIndexError::InvalidInteger("invalid".parse::<usize>().unwrap_err()),
};
assert!(!err.is_unreachable());
let err = Error::OutOfBounds {
position: 0,
offset: 0,
source: OutOfBoundsError {
index: 1,
length: 0,
},
};
assert!(!err.is_unreachable());
let err = Error::NotFound {
position: 0,
offset: 0,
};
assert!(!err.is_unreachable());
let err = Error::Unreachable {
position: 0,
offset: 0,
};
assert!(err.is_unreachable());
}
#[test]
fn resolve_error_is_not_found() {
let err = Error::FailedToParseIndex {
position: 0,
offset: 0,
source: ParseIndexError::InvalidInteger("invalid".parse::<usize>().unwrap_err()),
};
assert!(!err.is_not_found());
let err = Error::OutOfBounds {
position: 0,
offset: 0,
source: OutOfBoundsError {
index: 1,
length: 0,
},
};
assert!(!err.is_not_found());
let err = Error::NotFound {
position: 0,
offset: 0,
};
assert!(err.is_not_found());
let err = Error::Unreachable {
position: 0,
offset: 0,
};
assert!(!err.is_not_found());
}
#[test]
fn resolve_error_is_out_of_bounds() {
let err = Error::FailedToParseIndex {
position: 0,
offset: 0,
source: ParseIndexError::InvalidInteger("invalid".parse::<usize>().unwrap_err()),
};
assert!(!err.is_out_of_bounds());
let err = Error::OutOfBounds {
position: 0,
offset: 0,
source: OutOfBoundsError {
index: 1,
length: 0,
},
};
assert!(err.is_out_of_bounds());
let err = Error::NotFound {
position: 0,
offset: 0,
};
assert!(!err.is_out_of_bounds());
let err = Error::Unreachable {
position: 0,
offset: 0,
};
assert!(!err.is_out_of_bounds());
}
#[test]
fn resolve_error_is_failed_to_parse_index() {
let err = Error::FailedToParseIndex {
position: 0,
offset: 0,
source: ParseIndexError::InvalidInteger("invalid".parse::<usize>().unwrap_err()),
};
assert!(err.is_failed_to_parse_index());
let err = Error::OutOfBounds {
position: 0,
offset: 0,
source: OutOfBoundsError {
index: 1,
length: 0,
},
};
assert!(!err.is_failed_to_parse_index());
let err = Error::NotFound {
position: 0,
offset: 0,
};
assert!(!err.is_failed_to_parse_index());
let err = Error::Unreachable {
position: 0,
offset: 0,
};
assert!(!err.is_failed_to_parse_index());
}
/*
╔═══════════════════════════════════════════════════╗
║ json ║
╚═══════════════════════════════════════════════════╝
*/
#[test]
#[cfg(feature = "json")]
fn resolve_json() {
use serde_json::json;
let data = &json!({
"array": ["bar", "baz"],
"object": {
"object": {"baz": {"qux": "quux"}},
"strings": ["zero", "one", "two"],
"nothing": null,
"bool": true,
"objects": [{"field": "zero"}, {"field": "one"}, {"field": "two"}]
},
"": 0,
"a/b": 1,
"c%d": 2,
"e^f": 3,
"g|h": 4,
"i\\j": 5,
"k\"l": 6,
" ": 7,
"m~n": 8
});
// let data = &data;
Test::all([
// 0
Test {
ptr: "",
data,
expected: Ok(data),
},
// 1
Test {
ptr: "/array",
data,
expected: Ok(data.get("array").unwrap()), // ["bar", "baz"]
},
// 2
Test {
ptr: "/array/0",
data,
expected: Ok(data.get("array").unwrap().get(0).unwrap()), // "bar"
},
// 3
Test {
ptr: "/a~1b",
data,
expected: Ok(data.get("a/b").unwrap()), // 1
},
// 4
Test {
ptr: "/c%d",
data,
expected: Ok(data.get("c%d").unwrap()), // 2
},
// 5
Test {
ptr: "/e^f",
data,
expected: Ok(data.get("e^f").unwrap()), // 3
},
// 6
Test {
ptr: "/g|h",
data,
expected: Ok(data.get("g|h").unwrap()), // 4
},
// 7
Test {
ptr: "/i\\j",
data,
expected: Ok(data.get("i\\j").unwrap()), // 5
},
// 8
Test {
ptr: "/k\"l",
data,
expected: Ok(data.get("k\"l").unwrap()), // 6
},
// 9
Test {
ptr: "/ ",
data,
expected: Ok(data.get(" ").unwrap()), // 7
},
// 10
Test {
ptr: "/m~0n",
data,
expected: Ok(data.get("m~n").unwrap()), // 8
},
// 11
Test {
ptr: "/object/bool/unresolvable",
data,
expected: Err(Error::Unreachable {
position: 2,
offset: 12,
}),
},
// 12
Test {
ptr: "/object/not_found",
data,
expected: Err(Error::NotFound {
position: 1,
offset: 7,
}),
},
]);
}
/*
╔═══════════════════════════════════════════════════╗
║ toml ║
╚═══════════════════════════════════════════════════╝
*/
#[test]
#[cfg(feature = "toml")]
fn resolve_toml() {
use toml::{toml, Value};
let data = &Value::Table(toml! {
"array" = ["bar", "baz"]
"object" = {
"object" = {"baz" = {"qux" = "quux"}},
"strings" = ["zero", "one", "two"],
"bool" = true,
"objects" = [{"field" = "zero"}, {"field" = "one"}, {"field" = "two"}]
}
"" = 0
"a/b" = 1
"c%d" = 2
"e^f" = 3
"g|h" = 4
"i\\j" = 5
"k\"l" = 6
" " = 7
"m~n" = 8
});
// let data = &data;
Test::all([
Test {
ptr: "",
data,
expected: Ok(data),
},
Test {
ptr: "/array",
data,
expected: Ok(data.get("array").unwrap()), // ["bar", "baz"]
},
Test {
ptr: "/array/0",
data,
expected: Ok(data.get("array").unwrap().get(0).unwrap()), // "bar"
},
Test {
ptr: "/a~1b",
data,
expected: Ok(data.get("a/b").unwrap()), // 1
},
Test {
ptr: "/c%d",
data,
expected: Ok(data.get("c%d").unwrap()), // 2
},
Test {
ptr: "/e^f",
data,
expected: Ok(data.get("e^f").unwrap()), // 3
},
Test {
ptr: "/g|h",
data,
expected: Ok(data.get("g|h").unwrap()), // 4
},
Test {
ptr: "/i\\j",
data,
expected: Ok(data.get("i\\j").unwrap()), // 5
},
Test {
ptr: "/k\"l",
data,
expected: Ok(data.get("k\"l").unwrap()), // 6
},
Test {
ptr: "/ ",
data,
expected: Ok(data.get(" ").unwrap()), // 7
},
Test {
ptr: "/m~0n",
data,
expected: Ok(data.get("m~n").unwrap()), // 8
},
Test {
ptr: "/object/bool/unresolvable",
data,
expected: Err(Error::Unreachable {
position: 2,
offset: 12,
}),
},
Test {
ptr: "/object/not_found",
data,
expected: Err(Error::NotFound {
position: 1,
offset: 7,
}),
},
]);
}
struct Test<'v, V> {
ptr: &'static str,
expected: Result<&'v V, Error>,
data: &'v V,
}
impl<'v, V> Test<'v, V>
where
V: Resolve<Value = V, Error = Error>
+ ResolveMut<Value = V, Error = Error>
+ Clone
+ PartialEq
+ fmt::Display
+ fmt::Debug,
{
fn all(tests: impl IntoIterator<Item = Test<'v, V>>) {
tests.into_iter().enumerate().for_each(|(i, t)| t.run(i));
}
fn run(self, _i: usize) {
_ = self;
let Test {
ptr,
data,
expected,
} = self;
let ptr = Pointer::from_static(ptr);
// cloning the data & expected to make comparison easier
let mut data = data.clone();
let expected = expected.cloned();
// testing Resolve
let res = data.resolve(ptr).cloned();
assert_eq!(&res, &expected);
// testing ResolveMut
let res = data.resolve_mut(ptr).cloned();
assert_eq!(&res, &expected);
}
}
}

520
vendor/jsonptr/src/token.rs vendored Normal file
View File

@@ -0,0 +1,520 @@
use core::str::Split;
use crate::index::{Index, ParseIndexError};
use alloc::{
borrow::Cow,
fmt,
string::{String, ToString},
vec::Vec,
};
const ENCODED_TILDE: &[u8] = b"~0";
const ENCODED_SLASH: &[u8] = b"~1";
const ENC_PREFIX: u8 = b'~';
const TILDE_ENC: u8 = b'0';
const SLASH_ENC: u8 = b'1';
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ Token ║
║ ¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
/// A `Token` is a segment of a JSON [`Pointer`](crate::Token), preceded by `'/'` (`%x2F`).
///
/// `Token`s can represent a key in a JSON object or an index in an array.
///
/// - Indexes should not contain leading zeros.
/// - When dealing with arrays or path expansion for assignment, `"-"` represent
/// the next, non-existent index in a JSON array.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Token<'a> {
inner: Cow<'a, str>,
}
impl<'a> Token<'a> {
/// Constructs a `Token` from an RFC 6901 encoded string.
///
/// This is like [`Self::from_encoded`], except that no validation is
/// performed on the input string.
///
/// ## Safety
/// Input string must be RFC 6901 encoded.
pub(crate) unsafe fn from_encoded_unchecked(inner: impl Into<Cow<'a, str>>) -> Self {
Self {
inner: inner.into(),
}
}
/// Constructs a `Token` from an RFC 6901 encoded string.
///
/// To be valid, the string must not contain any `/` characters, and any `~`
/// characters must be followed by either `0` or `1`.
///
/// This function does not allocate.
///
/// # Examples
///
/// ```
/// # use jsonptr::Token;
/// assert_eq!(Token::from_encoded("~1foo~1~0bar").unwrap().decoded(), "/foo/~bar");
/// let err = Token::from_encoded("foo/oops~bar").unwrap_err();
/// assert_eq!(err.offset, 3);
/// ```
///
/// ## Errors
/// Returns `InvalidEncodingError` if the input string is not a valid RFC
/// 6901 (`~` must be followed by `0` or `1`)
pub fn from_encoded(s: &'a str) -> Result<Self, EncodingError> {
let mut escaped = false;
for (offset, b) in s.bytes().enumerate() {
match b {
b'/' => {
return Err(EncodingError {
offset,
source: InvalidEncoding::Slash,
})
}
ENC_PREFIX => {
escaped = true;
}
TILDE_ENC | SLASH_ENC if escaped => {
escaped = false;
}
_ => {
if escaped {
return Err(EncodingError {
offset,
source: InvalidEncoding::Tilde,
});
}
}
}
}
if escaped {
return Err(EncodingError {
offset: s.len(),
source: InvalidEncoding::Slash,
});
}
Ok(Self { inner: s.into() })
}
/// Constructs a `Token` from an arbitrary string.
///
/// If the string contains a `/` or a `~`, then it will be assumed not
/// encoded, in which case this function will encode it, allocating a new
/// string.
///
/// If the string is already encoded per RFC 6901, use
/// [`Self::from_encoded`] instead, otherwise it will end up double-encoded.
///
/// # Examples
///
/// ```
/// # use jsonptr::Token;
/// assert_eq!(Token::new("/foo/~bar").encoded(), "~1foo~1~0bar");
/// ```
pub fn new(s: impl Into<Cow<'a, str>>) -> Self {
let s = s.into();
if let Some(i) = s.bytes().position(|b| b == b'/' || b == b'~') {
let input = s.as_bytes();
// we could take advantage of [`Cow::into_owned`] here, but it would
// mean copying over the entire string, only to overwrite a portion
// of it... so instead we explicitly allocate a new buffer and copy
// only the prefix until the first encoded character
// NOTE: the output is at least as large as the input + 1, so we
// allocate that much capacity ahead of time
let mut bytes = Vec::with_capacity(input.len() + 1);
bytes.extend_from_slice(&input[..i]);
for &b in &input[i..] {
match b {
b'/' => {
bytes.extend_from_slice(ENCODED_SLASH);
}
b'~' => {
bytes.extend_from_slice(ENCODED_TILDE);
}
other => {
bytes.push(other);
}
}
}
Self {
// SAFETY: we started from a valid UTF-8 sequence of bytes,
// and only replaced some ASCII characters with other two ASCII
// characters, so the output is guaranteed valid UTF-8.
inner: Cow::Owned(unsafe { String::from_utf8_unchecked(bytes) }),
}
} else {
Self { inner: s }
}
}
/// Converts into an owned copy of this token.
///
/// If the token is not already owned, this will clone the referenced string
/// slice.
pub fn into_owned(self) -> Token<'static> {
Token {
inner: Cow::Owned(self.inner.into_owned()),
}
}
/// Extracts an owned copy of this token.
///
/// If the token is not already owned, this will clone the referenced string
/// slice.
///
/// This method is like [`Self::into_owned`], except it doesn't take
/// ownership of the original `Token`.
pub fn to_owned(&self) -> Token<'static> {
Token {
inner: Cow::Owned(self.inner.clone().into_owned()),
}
}
/// Returns the encoded string representation of the `Token`.
///
/// # Examples
///
/// ```
/// # use jsonptr::Token;
/// assert_eq!(Token::new("~bar").encoded(), "~0bar");
/// ```
pub fn encoded(&self) -> &str {
&self.inner
}
/// Returns the decoded string representation of the `Token`.
///
/// # Examples
///
/// ```
/// # use jsonptr::Token;
/// assert_eq!(Token::new("~bar").decoded(), "~bar");
/// ```
pub fn decoded(&self) -> Cow<'_, str> {
if let Some(i) = self.inner.bytes().position(|b| b == ENC_PREFIX) {
let input = self.inner.as_bytes();
// we could take advantage of [`Cow::into_owned`] here, but it would
// mean copying over the entire string, only to overwrite a portion
// of it... so instead we explicitly allocate a new buffer and copy
// only the prefix until the first encoded character
// NOTE: the output is at least as large as the input + 1, so we
// allocate that much capacity ahead of time
let mut bytes = Vec::with_capacity(input.len() + 1);
bytes.extend_from_slice(&input[..i]);
// we start from the first escaped character
let mut escaped = true;
for &b in &input[i + 1..] {
match b {
ENC_PREFIX => {
escaped = true;
}
TILDE_ENC if escaped => {
bytes.push(b'~');
escaped = false;
}
SLASH_ENC if escaped => {
bytes.push(b'/');
escaped = false;
}
other => {
bytes.push(other);
}
}
}
// SAFETY: we start from a valid String, and only write valid UTF-8
// byte sequences into it.
Cow::Owned(unsafe { String::from_utf8_unchecked(bytes) })
} else {
// if there are no encoded characters, we don't need to allocate!
self.inner.clone()
}
}
/// Attempts to parse the given `Token` as an array index.
///
/// Per [RFC 6901](https://datatracker.ietf.org/doc/html/rfc6901#section-4),
/// the acceptable values are non-negative integers and the `-` character,
/// which stands for the next, non-existent member after the last array
/// element.
///
/// ## Examples
///
/// ```
/// # use jsonptr::{index::Index, Token};
/// assert_eq!(Token::new("-").to_index(), Ok(Index::Next));
/// assert_eq!(Token::new("0").to_index(), Ok(Index::Num(0)));
/// assert_eq!(Token::new("2").to_index(), Ok(Index::Num(2)));
/// assert!(Token::new("a").to_index().is_err());
/// assert!(Token::new("-1").to_index().is_err());
/// ```
/// ## Errors
/// Returns [`ParseIndexError`] if the token is not a valid array index.
pub fn to_index(&self) -> Result<Index, ParseIndexError> {
self.try_into()
}
/// Returns if the `Token` is `-`, which stands for the next array index.
///
/// See also [`Self::to_index`].
pub fn is_next(&self) -> bool {
matches!(self.to_index(), Ok(Index::Next))
}
}
macro_rules! impl_from_num {
($($ty:ty),*) => {
$(
impl From<$ty> for Token<'static> {
fn from(v: $ty) -> Self {
// SAFETY: only used for integer types, which are always valid
unsafe { Token::from_encoded_unchecked(v.to_string()) }
}
}
)*
};
}
impl_from_num!(u8, u16, u32, u64, u128, usize, i8, i16, i32, i64, i128, isize);
impl<'a> From<&'a str> for Token<'a> {
fn from(value: &'a str) -> Self {
Token::new(value)
}
}
impl<'a> From<&'a String> for Token<'a> {
fn from(value: &'a String) -> Self {
Token::new(value)
}
}
impl From<String> for Token<'static> {
fn from(value: String) -> Self {
Token::new(value)
}
}
impl<'a> From<&Token<'a>> for Token<'a> {
fn from(value: &Token<'a>) -> Self {
value.clone()
}
}
impl alloc::fmt::Display for Token<'_> {
fn fmt(&self, f: &mut alloc::fmt::Formatter<'_>) -> alloc::fmt::Result {
write!(f, "{}", self.decoded())
}
}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ Tokens ║
║ ¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
/// An iterator over the [`Token`]s of a [`Pointer`](crate::Pointer).
#[derive(Debug)]
pub struct Tokens<'a> {
inner: Split<'a, char>,
}
impl<'a> Iterator for Tokens<'a> {
type Item = Token<'a>;
fn next(&mut self) -> Option<Self::Item> {
self.inner
.next()
// SAFETY: source pointer is encoded
.map(|s| unsafe { Token::from_encoded_unchecked(s) })
}
}
impl<'t> Tokens<'t> {
pub(crate) fn new(inner: Split<'t, char>) -> Self {
Self { inner }
}
}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ InvalidEncodingError ║
║ ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
#[deprecated(since = "0.7.0", note = "renamed to `EncodingError`")]
/// Deprecated alias for [`EncodingError`].
pub type InvalidEncodingError = EncodingError;
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ EncodingError ║
║ ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
/// A token within a json pointer contained invalid encoding (`~` not followed
/// by `0` or `1`).
///
#[derive(Debug, PartialEq, Eq)]
pub struct EncodingError {
/// offset of the erroneous `~` from within the `Token`
pub offset: usize,
/// the specific encoding error
pub source: InvalidEncoding,
}
#[cfg(feature = "std")]
impl std::error::Error for EncodingError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
Some(&self.source)
}
}
impl fmt::Display for EncodingError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"token contains invalid encoding at offset {}",
self.offset
)
}
}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ InvalidEncoding ║
║ ¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
/// Represents the specific type of invalid encoding error.
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum InvalidEncoding {
/// `~` not followed by `0` or `1`
Tilde,
/// non-encoded `/` found in token
Slash,
}
impl fmt::Display for InvalidEncoding {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
InvalidEncoding::Tilde => write!(f, "tilde (~) not followed by 0 or 1"),
InvalidEncoding::Slash => write!(f, "slash (/) found in token"),
}
}
}
#[cfg(feature = "std")]
impl std::error::Error for InvalidEncoding {}
/*
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
╔══════════════════════════════════════════════════════════════════════════════╗
║ ║
║ Tests ║
║ ¯¯¯¯¯¯¯ ║
╚══════════════════════════════════════════════════════════════════════════════╝
░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░░
*/
#[cfg(test)]
mod tests {
use crate::Pointer;
use super::*;
use quickcheck_macros::quickcheck;
#[test]
fn from() {
assert_eq!(Token::from("/").encoded(), "~1");
assert_eq!(Token::from("~/").encoded(), "~0~1");
assert_eq!(Token::from(34u32).encoded(), "34");
assert_eq!(Token::from(34u64).encoded(), "34");
assert_eq!(Token::from(String::from("foo")).encoded(), "foo");
assert_eq!(Token::from(&Token::new("foo")).encoded(), "foo");
}
#[test]
fn to_index() {
assert_eq!(Token::new("-").to_index(), Ok(Index::Next));
assert_eq!(Token::new("0").to_index(), Ok(Index::Num(0)));
assert_eq!(Token::new("2").to_index(), Ok(Index::Num(2)));
assert!(Token::new("a").to_index().is_err());
assert!(Token::new("-1").to_index().is_err());
}
#[test]
fn new() {
assert_eq!(Token::new("~1").encoded(), "~01");
assert_eq!(Token::new("a/b").encoded(), "a~1b");
}
#[test]
fn from_encoded() {
assert_eq!(Token::from_encoded("~1").unwrap().encoded(), "~1");
assert_eq!(Token::from_encoded("~0~1").unwrap().encoded(), "~0~1");
let t = Token::from_encoded("a~1b").unwrap();
assert_eq!(t.decoded(), "a/b");
assert!(Token::from_encoded("a/b").is_err());
assert!(Token::from_encoded("a~a").is_err());
}
#[test]
fn into_owned() {
let token = Token::from_encoded("foo~0").unwrap().into_owned();
assert_eq!(token.encoded(), "foo~0");
}
#[quickcheck]
fn encode_decode(s: String) -> bool {
let token = Token::new(s);
let decoded = Token::from_encoded(token.encoded()).unwrap();
token == decoded
}
#[test]
fn tokens() {
let pointer = Pointer::from_static("/a/b/c");
let tokens: Vec<Token> = pointer.tokens().collect();
assert_eq!(tokens, unsafe {
vec![
Token::from_encoded_unchecked("a"),
Token::from_encoded_unchecked("b"),
Token::from_encoded_unchecked("c"),
]
});
}
#[test]
fn is_next() {
let token = Token::new("-");
assert!(token.is_next());
let token = Token::new("0");
assert!(!token.is_next());
let token = Token::new("a");
assert!(!token.is_next());
let token = Token::new("");
assert!(!token.is_next());
}
}

Binary file not shown.

View File

View File

@@ -0,0 +1 @@
{"name":"jsonptr","vers":"0.7.1","deps":[{"name":"miette","req":"^7.4.0","features":["fancy"],"optional":true,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"serde","req":"^1.0.203","features":["alloc"],"optional":true,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"serde_json","req":"^1.0.119","features":["alloc"],"optional":true,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"toml","req":"^0.8","features":[],"optional":true,"default_features":true,"target":null,"kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"quickcheck","req":"^1.0.3","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"quickcheck_macros","req":"^1.0.0","features":[],"optional":false,"default_features":true,"target":null,"kind":"dev","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false},{"name":"syn","req":"^1.0.109","features":[],"optional":true,"default_features":true,"target":"cfg(any())","kind":"normal","registry":"https://github.com/rust-lang/crates.io-index","package":null,"public":null,"artifact":null,"bindep_target":null,"lib":false}],"features":{"assign":[],"default":["std","serde","json","resolve","assign","delete"],"delete":["resolve"],"json":["dep:serde_json","serde"],"miette":["dep:miette","std"],"resolve":[],"std":["serde/std","serde_json?/std"],"toml":["dep:toml","serde","std"]},"features2":null,"cksum":"1d1ad9eec21cdca73a22f502a346defc51f573cb24dc3bff5c09d6a809357444","yanked":null,"links":null,"rust_version":null,"v":2}

Binary file not shown.