Compare commits

..

No commits in common. "main" and "2.1.0" have entirely different histories.
main ... 2.1.0

127 changed files with 1026 additions and 6269 deletions

View File

@ -1,14 +1,17 @@
BasedOnStyle: LLVM
---
BasedOnStyle: LLVM
AccessModifierOffset: -2
#AlignConsecutiveAssignments: true
#AlignConsecutiveDeclarations: true
AllowShortFunctionsOnASingleLine: Inline
BreakBeforeBraces: Linux
ColumnLimit: 0
ColumnLimit: 0
ConstructorInitializerAllOnOneLineOrOnePerLine: true
IndentWidth: 4
IndentWidth: 4
IndentPPDirectives: AfterHash
ObjCBlockIndentWidth: 0
SpaceAfterCStyleCast: true
TabWidth: 4
TabWidth: 4
AccessModifierOffset: -4
UseTab: ForIndentation
UseTab: ForIndentation
...

View File

@ -1 +0,0 @@
1

4
.distro/.gitignore vendored
View File

@ -1,4 +0,0 @@
/main.fmf
/plans/main.fmf
/tests/main.fmf
*.tar.gz

View File

@ -1,61 +0,0 @@
Name: json-schema-validator
Summary: JSON schema validator for JSON for Modern C++
Version: 0.0.0
Release: %autorelease
License: MIT
URL: https://github.com/pboettch/json-schema-validator
Source: https://github.com/pboettch/json-schema-validator/archive/refs/tags/v%{version}.tar.gz
BuildRequires: ninja-build
BuildRequires: cmake
BuildRequires: gcc-c++
BuildRequires: json-devel
%description
Json schema validator library for C++ projects using nlohmann/json
%package devel
Summary: Development files for JSON schema validator
Requires: json-schema-validator%{?_isa} = %{version}-%{release}
Requires: json-devel
%description devel
Json schema validator development files for C++ projects using nlohmann/json
%prep
%autosetup -n json-schema-validator-%{version}
%build
%cmake \
-DJSON_VALIDATOR_SHARED_LIBS=ON \
-DJSON_VALIDATOR_INSTALL=ON \
-DJSON_VALIDATOR_BUILD_EXAMPLES=OFF \
-DJSON_VALIDATOR_BUILD_TESTS=ON
%cmake_build
%install
%cmake_install
%check
%ctest
%files
%doc README.md
%license LICENSE
%{_libdir}/libnlohmann_json_validator.so.*
%files devel
%{_libdir}/libnlohmann_json_validator.so
%{_includedir}/nlohmann/json-schema.hpp
%{_libdir}/cmake/nlohmann_json_schema_validator
%changelog
%autochangelog

View File

@ -1,3 +0,0 @@
Filters = [
"unknown-key",
]

View File

@ -1,12 +0,0 @@
summary:
Basic importing tests
prepare+:
- name: Include minimum fetching packages
how: install
package:
- git
discover+:
how: fmf
filter: "tag: import"
execute:
how: tmt

View File

@ -1,4 +0,0 @@
discover:
how: fmf
dist-git-source: true
path: .distro

View File

@ -1,14 +0,0 @@
summary:
Perform rpmlint and rpminspect tests
prepare:
- name: Download the source rpm
how: shell
script: cd /tmp && curl -O ${PACKIT_SRPM_URL}
- name: Download rpm packages
how: shell
script: cd /tmp && dnf download ${PACKIT_COPR_RPMS}
discover+:
how: fmf
filter: "tag: rpmlint"
execute:
how: tmt

View File

@ -1,7 +0,0 @@
summary:
Basic smoke tests
discover+:
how: fmf
filter: "tag: smoke"
execute:
how: tmt

View File

@ -1,15 +0,0 @@
# This is a simple project that tests using cmake to load the installed libraries
cmake_minimum_required(VERSION 3.14)
project(test_fetch_content LANGUAGES CXX)
FetchContent_Declare(nlohmann_json_schema_validator
GIT_REPOSITORY https://github.com/pboettch/json-schema-validator
GIT_TAG main
)
FetchContent_MakeAvailable(nlohmann_json_schema_validator)
if (NOT TARGET nlohmann_json_schema_validator::validator)
message(FATAL_ERROR "Missing target nlohmann_json_schema_validator::validator")
endif ()

View File

@ -1,11 +0,0 @@
# This is a simple project that tests using cmake to load the installed libraries
cmake_minimum_required(VERSION 3.14)
project(test_find_package LANGUAGES CXX)
set(CMAKE_FIND_DEBUG_MODE ON)
find_package(nlohmann_json_schema_validator REQUIRED)
if (NOT TARGET nlohmann_json_schema_validator::validator)
message(FATAL_ERROR "Missing target nlohmann_json_schema_validator::validator")
endif ()

View File

@ -1,11 +0,0 @@
# Common test variables
tag:
- import
tier: 0
path: /tests/import
# Define tests
/find_package:
test: ./test_find_package.sh
/FetchContent:
test: ./test_FetchContent.sh

View File

@ -1,4 +0,0 @@
#!/bin/bash -eux
tmp_dir=$(mktemp -d)
cmake -S ./FetchContent -B ${tmp_dir}

View File

@ -1,4 +0,0 @@
#!/bin/bash -eux
tmp_dir=$(mktemp -d)
cmake -S ./find_package -B ${tmp_dir}

View File

@ -1,13 +0,0 @@
# Common test variables
tag:
- rpmlint
tier: 0
path: /
# Define tests
/rpmlint:
summary: Rpmlint spec and rpmfiles
test: rpmlint -c packit.toml -r json-schema-validator.rpmlintrc ./*.spec /tmp/*.rpm
/rpminspect-rpms:
summary: Rpminspect the rpms
test: ls /tmp/*.rpm | xargs -L1 rpminspect-fedora -E metadata,disttag

View File

@ -1,9 +0,0 @@
# Common test variables
tag:
- smoke
tier: 0
path: /
# Define tests
/version:
test: echo "TODO: Write a minimum working example"

View File

@ -1,31 +0,0 @@
name: release
run-name: Release
on:
push:
tags:
- "v[0-9]+.[0-9]+.[0-9]+"
- "v[0-9]+.[0-9]+.[0-9]+-rc[0-9]+"
jobs:
tests:
uses: ./.github/workflows/test.yaml
secrets: inherit
build_conan:
runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v2.4.0
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub."
- run: echo "🔎 Branch name is ${{ github.ref }} and repository is ${{ github.repository }}."
- name: Clone json-schema-validator
uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- run: python -m pip install --upgrade conan
- run: conan config init
- run: conan profile update settings.compiler.libcxx=libstdc++11 default
- name: conan create package
run: conan create .

View File

@ -1,94 +0,0 @@
name: test
run-name: Tests
on:
push:
branches: [ main ]
pull_request:
branches: [ main ]
# Make it able to be used in other workflows
workflow_call:
defaults:
run:
shell: bash
jobs:
pre-commit:
name: Check pre-commit
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
- uses: pre-commit/action@v3.0.0
test:
name: Run ctests
needs: [ pre-commit ]
continue-on-error: ${{ matrix.experimental }}
strategy:
fail-fast: false
matrix:
toolchain: [ gcc, llvm, intel ]
json_version: [ v3.11.2, v3.8.0 ]
experimental: [ false ]
include:
- toolchain: llvm
compiler_version: 15
- toolchain: gcc
compiler_version: latest
env:
NLOHMANN_JSON_VERSION: ${{ matrix.json_version }}
runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v2.4.0
steps:
- name: Activate Intel compilers
# Not elegant, it will propagate all environment variable.
# Intel does not provide a way to output the environment variables to a file
# Note: PATH needs to be exported to GITHUB_PATH otherwise it can be overwritten
run: |
source /opt/intel/oneapi/setvars.sh
printenv >> $GITHUB_ENV
echo $PATH >> $GITHUB_PATH
if: matrix.toolchain == 'intel'
- name: Setup gcc toolchain
run: |
update-alternatives --install /usr/bin/g++ g++ $(which g++-${{ matrix.compiler_version }}) 999
if: matrix.compiler_version && matrix.toolchain == 'gcc'
- name: Setup llvm toolchain
run: |
update-alternatives --install /usr/bin/clang++ clang++ $(which clang++-${{ matrix.compiler_version }}) 999
if: matrix.compiler_version && matrix.toolchain == 'llvm'
- uses: actions/checkout@v3
# container version is < 3.25 which does not have workflows
- name: Get a working cmake version
uses: lukka/get-cmake@v3.25.2
- name: Run CMake ${{ matrix.toolchain }}-ci workflow with nlohmann/json version ${{ matrix.json_version }}
uses: lukka/run-cmake@v10.5
with:
workflowPreset: "${{ matrix.toolchain }}-ci"
coverage:
name: Run coverage tests
needs: [ test ]
runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v2.4.0
if: ${{ github.event_name == 'push' || github.event_name == 'pull_request' }}
steps:
- uses: actions/checkout@v3
- name: Get latest cmake version
uses: lukka/get-cmake@latest
- name: Get test coverage
uses: lukka/run-cmake@v10.5
with:
workflowPreset: ci-coverage
- name: Get lcov data
uses: danielealbano/lcov-action@v3
with:
# Note lcov-action prepends and appends wild-cards *. Account for those
# https://github.com/danielealbano/lcov-action/issues/11
remove_patterns: /test/,/cmake-build*/
- name: Upload coverage to Codecov
uses: codecov/codecov-action@v3
with:
files: coverage.info
verbose: true

6
.gitignore vendored
View File

@ -1,7 +1,3 @@
build*/
*.sw?
cmake-build-*
venv
env
compile_commands.json
.vs/*

View File

@ -1,79 +0,0 @@
specfile_path: .distro/json-schema-validator.spec
files_to_sync:
- src: .distro/json-schema-validator.spec
dest: json-schema-validator.spec
- .packit.yaml
- src: .distro/json-schema-validator.rpmlintrc
dest: json-schema-validator.rpmlintrc
# tmt setup
- src: .distro/.fmf/
dest: .fmf/
- src: .distro/plans/
dest: plans/
filters:
- "- .distro/plans/main.fmf.dist-git"
- "- .distro/plans/rpmlint.fmf"
- src: .distro/plans/main.fmf.dist-git
dest: plans/main.fmf
upstream_package_name: json-schema-validator
downstream_package_name: json-schema-validator
update_release: false
upstream_tag_template: v{version}
jobs:
- job: copr_build
trigger: pull_request
owner: lecris
project: json-schema-validator
update_release: true
release_suffix: "{PACKIT_RPMSPEC_RELEASE}"
targets:
- fedora-development
- job: tests
trigger: pull_request
targets:
- fedora-development
fmf_path: .distro
- job: copr_build
trigger: commit
branch: main
owner: lecris
project: nightly
# TODO: Remove when upstream issue is resolved
# https://github.com/packit/packit/issues/1924
additional_repos:
- copr://@scikit-build/release
targets:
- fedora-development-x86_64
- fedora-latest-x86_64
- fedora-development-aarch64
- fedora-latest-aarch64
- job: copr_build
trigger: release
owner: lecris
project: release
targets:
- fedora-development-x86_64
- fedora-latest-x86_64
- fedora-development-aarch64
- fedora-latest-aarch64
- job: tests
trigger: commit
branch: main
targets:
- fedora-development
- fedora-latest
fmf_path: .distro
- job: propose_downstream
trigger: release
dist_git_branches:
- fedora-development
- fedora-latest
- job: koji_build
trigger: commit
dist_git_branches:
- fedora-all
- job: bodhi_update
trigger: commit
dist_git_branches:
- fedora-branched

View File

@ -1,33 +0,0 @@
repos:
- repo: https://github.com/Takishima/cmake-pre-commit-hooks
rev: v1.8.1
hooks:
- id: clang-format
args:
- '-i'
- id: clang-tidy
args:
# TODO: Remove when upstream issue is fixed
# https://gitlab.kitware.com/cmake/cmake/-/issues/24827
# https://github.com/Takishima/cmake-pre-commit-hooks/issues/63
- '-Bcmake-build-pre-commit'
- '--preset'
- 'pre-commit'
stages: [ manual ]
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- repo: https://github.com/executablebooks/mdformat
rev: 0.7.16
hooks:
- id: mdformat
additional_dependencies:
- mdformat-gfm
- mdformat-tables
- repo: https://github.com/python-jsonschema/check-jsonschema
rev: 0.23.0
hooks:
- id: check-github-workflows

69
.travis.yml Normal file
View File

@ -0,0 +1,69 @@
#########################
# project configuration #
#########################
# C++ project
language: cpp
dist: trusty
sudo: required
group: edge
matrix:
include:
- os: linux
compiler: gcc
env: COMPILER=g++-4.9
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-4.9', 'ninja-build']
- os: linux
compiler: gcc
env: COMPILER=g++-5
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-5', 'ninja-build']
- os: linux
compiler: gcc
env: COMPILER=g++-6
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-6', 'ninja-build']
- os: linux
compiler: gcc
env: COMPILER=g++-7
addons:
apt:
sources: ['ubuntu-toolchain-r-test']
packages: ['g++-7', 'ninja-build']
script:
# get CMake and Ninja (only for systems with brew - macOS)
- |
if [[ (-x $(which brew)) ]]; then
brew update
brew install cmake ninja
brew upgrade cmake
fi
# make sure CXX is correctly set
- if [[ "${COMPILER}" != "" ]]; then export CXX=${COMPILER}; fi
# show OS/compiler version
- uname -a
- cmake --version
- $CXX --version
# put json.hpp to nlohmann
- mkdir -p nlohmann && wget https://github.com/nlohmann/json/releases/download/v3.6.0/json.hpp -O nlohmann/json.hpp
# compile and execute unit tests
- mkdir -p build && cd build
- cmake .. -Dnlohmann_json_DIR=.. ${CMAKE_OPTIONS} -GNinja && cmake --build . --config Release
- ctest -C Release -V -j
- cd ..

View File

@ -1,220 +1,185 @@
cmake_minimum_required(VERSION 3.14)
# CMake version compatibility
# TODO: Remove when bumping cmake >= 3.25
if (POLICY CMP0140)
# Enables: return(PROPAGATE)
cmake_policy(SET CMP0140 NEW)
endif ()
#[==============================================================================================[
# Basic project definition #
]==============================================================================================]
# TODO: CMake >= 3.19 can use string(JSON VERSION GET "${METADATA}" "version") to load from JSON
set(PROJECT_VERSION 2.4.0)
# TODO: Version 3, rename the project and namespace to something more compact
project(nlohmann_json_schema_validator
VERSION ${PROJECT_VERSION}
DESCRIPTION "Json validator for nlohmann::json library"
HOMEPAGE_URL "https://github.com/pboettch/json-schema-validator"
LANGUAGES CXX)
# TODO: Remove when bumping cmake >= 3.21
if (NOT DEFINED nlohmann_json_schema_validator_IS_TOP_LEVEL)
if (CMAKE_PROJECT_NAME STREQUAL PROJECT_NAME)
set(PROJECT_IS_TOP_LEVEL ON)
else ()
set(PROJECT_IS_TOP_LEVEL OFF)
endif ()
endif ()
LANGUAGES CXX)
#[==============================================================================================[
# Options #
]==============================================================================================]
set(PROJECT_VERSION 2.1.0)
option(JSON_VALIDATOR_INSTALL "JsonValidator: Install targets" ${PROJECT_IS_TOP_LEVEL})
option(JSON_VALIDATOR_BUILD_TESTS "JsonValidator: Build tests" ${PROJECT_IS_TOP_LEVEL})
option(JSON_VALIDATOR_BUILD_EXAMPLES "JsonValidator: Build examples" ${PROJECT_IS_TOP_LEVEL})
option(JSON_VALIDATOR_SHARED_LIBS "JsonValidator: Build as shared library" ${PROJECT_IS_TOP_LEVEL})
option(JSON_VALIDATOR_TEST_COVERAGE "JsonValidator: Build with test coverage" OFF)
mark_as_advanced(JSON_VALIDATOR_TEST_COVERAGE)
# Get a default JSON_FETCH_VERSION from environment variables to workaround the CI
if (DEFINED ENV{NLOHMANN_JSON_VERSION})
set(JSON_FETCH_VERSION_DEFAULT $ENV{NLOHMANN_JSON_VERSION})
else ()
set(JSON_FETCH_VERSION_DEFAULT v3.11.2)
endif ()
set(JSON_FETCH_VERSION ${JSON_FETCH_VERSION_DEFAULT} CACHE STRING "Fetch nlohmann::json version")
cmake_minimum_required(VERSION 3.2)
#[==============================================================================================[
# Project configuration #
]==============================================================================================]
option(BUILD_TESTS "Build tests" ON)
option(BUILD_EXAMPLES "Build examples" ON)
# Include cmake modules
include(FetchContent)
if (JSON_VALIDATOR_INSTALL)
include(GNUInstallDirs)
include(CMakePackageConfigHelpers)
endif ()
# the library
add_library(nlohmann_json_schema_validator
src/json-schema-draft7.json.cpp
src/json-uri.cpp
src/json-validator.cpp
src/json-patch.cpp
src/string-format-check.cpp)
# Default to release build
if (NOT CMAKE_BUILD_TYPE)
set(CMAKE_BUILD_TYPE Release)
endif ()
target_include_directories(nlohmann_json_schema_validator
PUBLIC
$<INSTALL_INTERFACE:include>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}/src>)
# Enable cmake's BUILD_SHARED_LIBS
set(BUILD_SHARED_LIBS ${nlohmann_json_schema_validator_SHARED_LIBS})
target_compile_features(nlohmann_json_schema_validator
PUBLIC
cxx_range_for) # for C++11 - flags
if (JSON_VALIDATOR_TEST_COVERAGE)
if (CMAKE_CXX_COMPILER_ID STREQUAL Clang)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -fprofile-instr-generate -fcoverage-mapping")
elseif (CMAKE_CXX_COMPILER_ID STREQUAL GNU)
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage")
else ()
message(WARNING
"JsonValidator: Other toolchain coverage flags unknown.\n"
"Using --coverage as default")
set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} --coverage")
endif ()
endif ()
set_target_properties(nlohmann_json_schema_validator
PROPERTIES
VERSION ${PROJECT_VERSION}
SOVERSION 1)
#[==============================================================================================[
# External packages #
]==============================================================================================]
# if used as a sub-directory, do not create install-rules -
# because of the dependency to nlohmann_json.
set(JSON_VALIDATOR_INSTALL ON)
set(fetch_packages "")
if (NOT TARGET nlohmann_json)
# Fetch/Find nlohmann_json
# TODO: Remove when bumping cmake >= 3.24
if (CMAKE_VERSION VERSION_GREATER_EQUAL 3.24)
FetchContent_Declare(nlohmann_json
GIT_REPOSITORY https://github.com/nlohmann/json
GIT_TAG ${JSON_FETCH_VERSION}
FIND_PACKAGE_ARGS
)
list(APPEND fetch_packages nlohmann_json)
else ()
# Try to get system installed version
find_package(nlohmann_json QUIET)
if (NOT nlohmann_json_FOUND)
# If failed fetch the desired version
FetchContent_Declare(nlohmann_json
GIT_REPOSITORY https://github.com/nlohmann/json
GIT_TAG ${JSON_FETCH_VERSION}
)
list(APPEND fetch_packages nlohmann_json)
endif ()
endif ()
endif ()
# here we decice how nlohmann::json is found and used to build this project
# Handle configure flags
if (JSON_VALIDATOR_INSTALL)
# TODO: This is not ideal, this package should not be installing nlohmann::json
# Currently required in order to satisfy cmake exporter
set(JSON_Install ON CACHE BOOL "")
endif ()
# first, check whether a nlohmann_json::nlohmann_json target exists already
# -> we are used as a sub-directory from within another project
if(TARGET nlohmann_json::nlohmann_json)
message(STATUS "Found nlohmann_json::nlohmann_json-target - linking with it")
target_link_libraries(
nlohmann_json_schema_validator
PUBLIC nlohmann_json::nlohmann_json)
# Get all dependencies
FetchContent_MakeAvailable(${fetch_packages})
if (JSON_VALIDATOR_INSTALL AND NOT nlohmann_json_FOUND AND JSON_Install)
# TODO: This is not ideal
message(WARNING
"JsonValidator: No nlohmann::json found on the system and nlohmann_json_schema_validator will be installed\n"
"This will also install nlohmann::json in its typical installation path\n"
"This is not ideal because it might overwrite system installed")
endif ()
set(JSON_VALIDATOR_INSTALL OFF)
#[==============================================================================================[
# Main definition #
]==============================================================================================]
elseif(TARGET nlohmann_json) # or nlohmann_json, we are used a sub-project next to nlohmann-json's git repo
message(STATUS "Found nlohmann_json-target - linking with it")
target_link_libraries(
nlohmann_json_schema_validator
PUBLIC nlohmann_json)
set(JSON_VALIDATOR_INSTALL OFF)
message(STATUS "JsonValidator: Configured for ${CMAKE_BUILD_TYPE}")
if (DEFINED nlohmann_json_VERSION)
message(STATUS "JsonValidator: Using nlohmann/json version: ${nlohmann_json_VERSION}")
else ()
message(STATUS "JsonValidator: nlohmann_json_VERSION is not set. Possible value: ${JSON_FETCH_VERSION}")
endif ()
else()
if (NOT IS_ABSOLUTE ${nlohmann_json_DIR}) # make nlohmann_json_DIR absolute
get_filename_component(nlohmann_json_DIR
"${CMAKE_CURRENT_BINARY_DIR}/${nlohmann_json_DIR}"
REALPATH)
endif()
## Main targets
add_library(nlohmann_json_schema_validator)
add_library(nlohmann_json_schema_validator::validator ALIAS nlohmann_json_schema_validator)
set_target_properties(nlohmann_json_schema_validator PROPERTIES
VERSION ${PROJECT_VERSION}
SOVERSION ${PROJECT_VERSION_MAJOR}
EXPORT_NAME validator
# TODO: Version 3, simplify the library name
# OUTPUT_NAME nlohmann_json_validator
)
set(nlohmann_json_orignal_DIR ${nlohmann_json_DIR}) # save path for later use
# Main definitions in here
add_subdirectory(src)
# find nlohmann_json-cmake-package
find_package(nlohmann_json QUIET)
# Enable examples
if(TARGET nlohmann_json::nlohmann_json)
message(STATUS "Found nlohmann_json-cmake-package - linking with it")
target_link_libraries(
nlohmann_json_schema_validator
PUBLIC nlohmann_json::nlohmann_json)
else()
# Enable testings
if (JSON_VALIDATOR_BUILD_TESTS)
# find nlohmann/json.hpp
message(STATUS ${nlohmann_json_orignal_DIR})
find_path(JSON_HPP nlohmann/json.hpp
PATHS ${nlohmann_json_orignal_DIR})
if(EXISTS ${JSON_HPP}/nlohmann/json.hpp)
message(STATUS "Found nlohmann/json.hpp in given path: ${JSON_HPP}")
target_include_directories(
nlohmann_json_schema_validator
PUBLIC $<BUILD_INTERFACE:${JSON_HPP}>)
else()
message(FATAL_ERROR "could not find nlohmann/json.hpp or any related cmake-target. Please set nlohmann_json_DIR.")
endif()
# nlohmann_json_DIR has to be reset (for later use in tests)
set(nlohmann_json_DIR ${JSON_HPP})
endif()
endif()
if("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR
"${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
target_compile_options(nlohmann_json_schema_validator
PRIVATE
-Wall -Wextra)
endif()
if(BUILD_SHARED_LIBS)
target_compile_definitions(nlohmann_json_schema_validator
PRIVATE
-DJSON_SCHEMA_VALIDATOR_EXPORTS)
endif()
# regex with boost if gcc < 4.9 - default is std::regex
if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
if(CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.9.0")
find_package(Boost COMPONENTS regex)
if(NOT Boost_FOUND)
message(STATUS "GCC less then 4.9 and boost-regex NOT found - no regex used")
target_compile_definitions(nlohmann_json_schema_validator PRIVATE -DJSON_SCHEMA_NO_REGEX)
else()
message(STATUS "GCC less then 4.9 and boost-regex FOUND - using boost::regex")
target_compile_definitions(nlohmann_json_schema_validator PRIVATE -DJSON_SCHEMA_BOOST_REGEX)
target_include_directories(nlohmann_json_schema_validator PRIVATE ${Boost_INCLUDE_DIRS})
target_link_libraries(nlohmann_json_schema_validator PRIVATE ${Boost_LIBRARIES})
endif()
endif()
endif()
if(JSON_VALIDATOR_INSTALL)
install(TARGETS nlohmann_json_schema_validator
EXPORT ${PROJECT_NAME}Targets
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin)
install(FILES src/nlohmann/json-schema.hpp
DESTINATION include/nlohmann)
endif()
if (BUILD_EXAMPLES)
# simple nlohmann_json_schema_validator-executable
add_executable(json-schema-validate app/json-schema-validate.cpp)
target_link_libraries(json-schema-validate nlohmann_json_schema_validator)
add_executable(readme-json-schema app/readme.cpp)
target_link_libraries(readme-json-schema nlohmann_json_schema_validator)
install(TARGETS json-schema-validate readme-json-schema
DESTINATION bin)
endif()
if (BUILD_TESTS)
# test-zone
enable_testing()
add_subdirectory(test)
endif ()
endif()
if (JSON_VALIDATOR_BUILD_EXAMPLES)
add_subdirectory(example)
endif ()
if(JSON_VALIDATOR_INSTALL)
# Set Up the Project Targets and Config Files for CMake
# Set the install path to the cmake config files
set(INSTALL_CMAKE_DIR ${CMAKE_INSTALL_PREFIX}/lib/cmake/${PROJECT_NAME})
# Create the ConfigVersion file
include(CMakePackageConfigHelpers) # write_basic_package_version_file
write_basic_package_version_file( ${PROJECT_NAME}ConfigVersion.cmake
VERSION ${PACKAGE_VERSION}
COMPATIBILITY SameMajorVersion)
# Get the relative path from the INSTALL_CMAKE_DIR to the include directory
file(RELATIVE_PATH REL_INCLUDE_DIR "${INSTALL_CMAKE_DIR}" "${CMAKE_INSTALL_PREFIX}/include")
#[==============================================================================================[
# Install or Export #
]==============================================================================================]
# Configure the Config.cmake file with the proper include directory
set(CONF_INCLUDE_DIRS "\${JSON_SCHEMA_VALIDATOR_CMAKE_DIR}/${REL_INCLUDE_DIR}")
configure_file(${PROJECT_NAME}Config.cmake.in
"${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake" @ONLY)
if (JSON_VALIDATOR_INSTALL)
# Note other install targets found in subdirectories
# Here mostly the cmake boilerplate are set
write_basic_package_version_file(nlohmann_json_schema_validatorConfigVersion.cmake
VERSION ${PROJECT_VERSION}
COMPATIBILITY SameMajorVersion
)
configure_package_config_file(cmake/nlohmann_json_schema_validatorConfig.cmake.in
nlohmann_json_schema_validatorConfig.cmake
INSTALL_DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/nlohmann_json_schema_validator
)
# Install Targets files
export(EXPORT nlohmann_json_schema_validatorTargets
NAMESPACE nlohmann_json_schema_validator::
FILE nlohmann_json_schema_validatorTargets.cmake
)
install(EXPORT nlohmann_json_schema_validatorTargets
FILE nlohmann_json_schema_validatorTargets.cmake
NAMESPACE nlohmann_json_schema_validator::
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/nlohmann_json_schema_validator
COMPONENT nlohmann_json_schema_validator_Development
)
# Install cmake export files
# Install the Config.cmake and ConfigVersion.cmake files
install(FILES
${CMAKE_CURRENT_BINARY_DIR}/nlohmann_json_schema_validatorConfig.cmake
${CMAKE_CURRENT_BINARY_DIR}/nlohmann_json_schema_validatorConfigVersion.cmake
DESTINATION ${CMAKE_INSTALL_LIBDIR}/cmake/nlohmann_json_schema_validator
COMPONENT nlohmann_json_schema_validator_Development
)
endif ()
"${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake"
"${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake"
DESTINATION "${INSTALL_CMAKE_DIR}")
# Handle the project being included externally (e.g. FetchContent)
if (NOT PROJECT_IS_TOP_LEVEL)
# Export variables set in nlohmann_json_schema_validatorConfig.cmake
# TODO: Remove when bumping cmake >= 3.25
if (CMAKE_VERSION VERSION_GREATER_EQUAL 3.25)
return(PROPAGATE
nlohmann_json_schema_validator_VERSION
nlohmann_json_schema_validator_VERSION_MAJOR
nlohmann_json_schema_validator_VERSION_MINOR
nlohmann_json_schema_validator_VERSION_PATCH
nlohmann_json_schema_validator_VERSION_TWEAK
)
else ()
set(nlohmann_json_schema_validator_VERSION ${nlohmann_json_schema_validator_VERSION} PARENT_SCOPE)
set(nlohmann_json_schema_validator_VERSION_MAJOR ${nlohmann_json_schema_validator_VERSION_MAJOR} PARENT_SCOPE)
set(nlohmann_json_schema_validator_VERSION_MINOR ${nlohmann_json_schema_validator_VERSION_MINOR} PARENT_SCOPE)
set(nlohmann_json_schema_validator_VERSION_PATCH ${nlohmann_json_schema_validator_VERSION_PATCH} PARENT_SCOPE)
set(nlohmann_json_schema_validator_VERSION_TWEAK ${nlohmann_json_schema_validator_VERSION_TWEAK} PARENT_SCOPE)
endif ()
endif ()
# Install Targets
install(EXPORT ${PROJECT_NAME}Targets
FILE ${PROJECT_NAME}Targets.cmake
DESTINATION "${INSTALL_CMAKE_DIR}")
endif()

View File

@ -1,7 +0,0 @@
{
"version": 6,
"include": [
"cmake/CMakePresets-defaults.json",
"cmake/CMakePresets-CI.json"
]
}

178
README.md
View File

@ -1,3 +1,4 @@
[![Build Status](https://travis-ci.org/pboettch/json-schema-validator.svg?branch=master)](https://travis-ci.org/pboettch/json-schema-validator)
# JSON schema validator for JSON for Modern C++
@ -9,7 +10,7 @@ This is a C++ library for validating JSON documents based on a
[draft-7 of JSON Schema Validation](http://json-schema.org/schema).
First a disclaimer: *It is work in progress and
contributions or hints or discussions are welcome.*
contributions or hints or discussions are welcome.* Even though a 2.0.0 release is imminent.
Niels Lohmann et al develop a great JSON parser for C++ called [JSON for Modern
C++](https://github.com/nlohmann/json). This validator is based on this
@ -22,7 +23,7 @@ is rather simple.
Although significant changes have been done for the 2nd version
(a complete rewrite) the API is compatible with the 1.0.0 release. Except for
the namespace which is now `nlohmann::json_schema`.
the namespace which is now `nlohmann::json_schema.
Version **2** supports JSON schema draft 7, whereas 1 was supporting draft 4
only. Please update your schemas.
@ -48,21 +49,6 @@ a validation error occurs and decide what to do (throwing, counting, collecting)
Another goal was to use Niels Lohmann's JSON-library. This is why the validator
lives in his namespace.
# Thread-safety
Instance validation is thread-safe and the same validator-object can be used by
different threads:
The validate method is `const` which indicates the object is not modified when
being called:
```C++
json json_validator::validate(const json &) const;
```
Validator-object creation however is not thread-safe. A validator has to be
created in one (main?) thread once.
# Weaknesses
Numerical validation uses nlohmann-json's integer, unsigned and floating point
@ -74,7 +60,7 @@ types, depending on if the schema type is "integer" or "number". Bignum
This library is based on Niels Lohmann's JSON-library and thus has
a build-dependency to it.
Currently at least version **3.8.0** of NLohmann's JSON library
Currently at least version **3.6.0** of NLohmann's JSON library
is required.
Various methods using CMake can be used to build this project.
@ -100,32 +86,39 @@ By default a static library is built. Shared libraries can be generated by using
the `BUILD_SHARED_LIBS`-cmake variable:
In your initial call to cmake simply add:
```bash
cmake [..] -DBUILD_SHARED_LIBS=ON [..]
```
## nlohmann-json integration
## Providing access to nlohmann-json
As nlohmann-json is a dependency, this library tries find it.
The CMake-file of this libraries tries several ways to ultimately include
`nlohmann/json.hpp`
The cmake-configuration first checks if nlohmann-json is available as a cmake-target. This may be the case, because it is used as a submodule in a super-project which already provides and uses nlohmann-json.
Otherwise, it calls `find_package` for nlohmann-json and requires nlohmann-json to be installed on the system.
During the cmake-configurate-step there are 3 tries done trying to
access nlohmann-json:
### Building with Hunter package manager
1. link with a nlohmann_json::nlohmann_json-target,
2. find the nlohmann_json-cmake-package and link with nlohmann_json::nlohmann_json-target or
3. find path to `nlohmann/json.hpp`.
To enable access to nlohmann json library, Hunter can be used. Just run with `JSON_VALIDATOR_HUNTER=ON` option. No further dependencies needed
```bash
cmake [..] -DJSON_VALIDATOR_HUNTER=ON [..]
```
1 is there to make it work when this project is added as
a sub-directory (via `add_subdirectory()`), 2 and 3 can be
assisted by setting the `nlohmann_json_DIR`-variable.
### Building as a CMake-subdirectory from within another project
Adding this library as a subdirectory to a parent project is one way of
building it.
If the parent project already used `find_package()` to find the CMake-package of nlohmann_json or includes it as a submodule likewise.
If the parent project
- already used `find_package()` to find the CMake-package of nlohmann_json, method 1 will work.
- uses the git-repo of nlohmann_json as a subdirectory, method 1 will work.
- sets nlohmann_json_DIR, method 2 or 3 will work.
Afterwards a target called `nlohmann_json_schema_validator`
is available in order to compile and link.
### Building directly, finding a CMake-package. (short)
@ -135,7 +128,22 @@ CMake's `find_package()` to be used.
This library is using this mechanism if `nlohmann_json::nlohmann_json`-target
does not exist.
### Install
The variable `nlohmann_json_DIR` can be used to help `find_package()` find this package.
### Building directly: provide a path to where to find json.hpp
The last method before fataling out is by providing a path where the file json.hpp can be found.
The variable `nlohmann_json_DIR` has to be used to point to the path
where `json.hpp` is found in a subdirectory called `nlohmann`, e.g.:
`json.hpp` is located in `/path/to/nlohmann/json.hpp`. The `cmake`-command has to be run as follows:
```bash
cmake -Dnlohmann_json_DIR=/path/to [..]
```
### Method 1 - long version
Since version 2.1.0 this library can be installed and CMake-package-files will be
created accordingly. If the installation of nlohmann-json and this library
@ -143,6 +151,7 @@ is done into default unix-system-paths CMake will be able to find this
library by simply doing:
```CMake
find_package(nlohmann_json REQUIRED)
find_package(nlohmann_json_schema_validator REQUIRED)
```
@ -151,9 +160,20 @@ and
```CMake
target_link_libraries(<your-target> [..] nlohmann_json_schema_validator)
```
to build and link.
If a custom path has been used to install this library (and nlohmann-json), `find_package()`
needs a hint for where to find the package-files, it can be provided by setting the following variables
```CMake
cmake .. \
-Dnlohmann_json_DIR=<path/to/>lib/cmake/nlohmann_json \
-Dnlohmann_json_schema_validator_DIR:PATH=<path/to/>/lib/cmake/nlohmann_json_schema_validator
```
Note that if the this library is used as cmake-package, nlohmann-json also has
to be used a cmake-package.
## Code
See also `app/json-schema-validate.cpp`.
@ -265,102 +285,24 @@ All required tests are **OK**.
# Format
Optionally JSON-schema-validator can validate predefined or user-defined formats.
Optionally JSON-schema-validator can validation predefined or user-defined formats.
Therefore a format-checker-function can be provided by the user which is called by
the validator when a format-check is required (ie. the schema contains a format-field).
This is how the prototype looks like and how it can be passed to the validation-instance:
```C++
static void my_format_checker(const std::string &format, const std::string &value)
{
if (format == "something") {
if (!check_value_for_something(value))
throw std::invalid_argument("value is not a good something");
} else
throw std::logic_error("Don't know how to validate " + format);
}
// when creating the validator
json_validator validator(nullptr, // or loader-callback
my_format_checker); // create validator
```
## Default Checker
the validator when a format-check is required.
The library contains a default-checker, which does some checks. It needs to be
provided manually to the constructor of the validator:
```C++
json_validator validator(loader, // or nullptr for no loader
json_validator validator(loader,
nlohmann::json_schema::default_string_format_check);
```
Supported formats: `date-time, date, time, email, hostname, ipv4, ipv6, uuid, regex`
More formats can be added in `src/string-format-check.cpp`. Please contribute implementions for missing json schema draft formats.
## Default value processing
As a result of the validation, the library returns a json patch including the default values of the specified schema.
```C++
#include <iostream>
#include <nlohmann/json-schema.hpp>
using nlohmann::json;
using nlohmann::json_schema::json_validator;
static const json rectangle_schema = R"(
{
"$schema": "http://json-schema.org/draft-07/schema#",
"title": "A rectangle",
"properties": {
"width": {
"$ref": "#/definitions/length",
"default": 20
},
"height": {
"$ref": "#/definitions/length"
}
},
"definitions": {
"length": {
"type": "integer",
"minimum": 1,
"default": 10
}
}
})"_json;
int main()
{
try {
json_validator validator{rectangle_schema};
/* validate empty json -> will be expanded by the default values defined in the schema */
json rectangle = "{}"_json;
const auto default_patch = validator.validate(rectangle);
rectangle = rectangle.patch(default_patch);
std::cout << rectangle.dump() << std::endl; // {"height":10,"width":20}
} catch (const std::exception &e) {
std::cerr << "Validation of schema failed: " << e.what() << "\n";
return EXIT_FAILURE;
}
return EXIT_SUCCESS;
}
```
The example above will output the specified default values `{"height":10,"width":20}` to stdout.
> Note that the default value specified in a `$ref` may be overridden by the current instance location. Also note that this behavior will break draft-7, but it is compliant to newer drafts (e.g. `2019-09` or `2020-12`).
# Contributing
This project uses [`pre-commit`](https://pre-commit.com/) to enforce style-checks. Please install and run it before
creating commits and making pull requests.
Before opening a pull request, please apply the coding style given in the
`.clang-format` by running clang-format from the git top-level for all touched
files:
```console
$ pip install pre-commit
$ pre-commit install
```shell
git diff master --name-only | grep '\.[ch]pp$' | xargs -P 3 -I{} clang-format -i {}
```

View File

@ -65,7 +65,7 @@ int main(int argc, char *argv[])
// 2) create the validator and
json_validator validator(loader,
nlohmann::json_schema::default_string_format_check);
nlohmann::json_schema::default_string_format_check);
try {
// insert this schema as the root to the validator

View File

@ -1,281 +0,0 @@
{
"version": 6,
"include": [
"CMakePresets-defaults.json"
],
"configurePresets": [
{
"name": "ci-base",
"hidden": true,
"generator": "Ninja",
"inherits": [
"default"
],
"cacheVariables": {
"CMAKE_BUILD_TYPE": {
"type": "STRING",
"value": "Debug"
},
"JSON_VALIDATOR_BUILD_TESTS": {
"type": "BOOL",
"value": true
},
"JSON_VALIDATOR_INSTALL": {
"type": "BOOL",
"value": false
},
"JSON_BuildTests": {
"type": "BOOL",
"value": false
}
},
"errors": {
"deprecated": true
}
},
{
"name": "gcc-ci",
"displayName": "Configure preset for GCC toolchain",
"inherits": [
"ci-base"
],
"binaryDir": "cmake-build-ci-gcc",
"cacheVariables": {
"CMAKE_CXX_COMPILER": {
"type": "FILEPATH",
"value": "g++"
},
"CMAKE_LINKER": {
"type": "FILEPATH",
"value": "ld"
}
}
},
{
"name": "intel-ci",
"displayName": "Configure preset for Intel toolchain",
"inherits": [
"ci-base"
],
"binaryDir": "cmake-build-ci-intel",
"cacheVariables": {
"CMAKE_CXX_COMPILER": {
"type": "FILEPATH",
"value": "icpx"
}
}
},
{
"name": "llvm-ci",
"displayName": "Configure preset for LLVM toolchain",
"inherits": [
"ci-base"
],
"binaryDir": "cmake-build-ci-llvm",
"cacheVariables": {
"CMAKE_CXX_COMPILER": {
"type": "FILEPATH",
"value": "clang++"
},
"CMAKE_LINKER": {
"type": "FILEPATH",
"value": "lld"
}
}
},
{
"name": "ci-coverage",
"displayName": "Configure preset for test coverage",
"inherits": [
"gcc-ci"
],
"binaryDir": "cmake-build-ci-coverage",
"errors": {
"deprecated": false
},
"cacheVariables": {
"JSON_VALIDATOR_TEST_COVERAGE": {
"type": "BOOL",
"value": true
}
}
},
{
"name": "pre-commit",
"displayName": "Configure preset for pre-commit checks",
"inherits": [
"default"
],
"binaryDir": "cmake-build-pre-commit",
"cacheVariables": {
"JSON_VALIDATOR_TEST_COVERAGE": {
"type": "BOOL",
"value": true
},
"JSON_VALIDATOR_INSTALL": {
"type": "BOOL",
"value": false
}
}
}
],
"buildPresets": [
{
"name": "ci-base",
"hidden": true,
"inherits": [
"default"
],
"cleanFirst": true
},
{
"name": "ci-coverage",
"displayName": "Build preset for test coverage",
"inherits": [
"ci-base"
],
"configurePreset": "ci-coverage"
},
{
"name": "gcc-ci",
"displayName": "Build preset for GCC toolchain",
"inherits": [
"ci-base"
],
"configurePreset": "gcc-ci"
},
{
"name": "intel-ci",
"displayName": "Build preset for Intel toolchain",
"inherits": [
"ci-base"
],
"configurePreset": "intel-ci"
},
{
"name": "llvm-ci",
"displayName": "Build preset for LLVM toolchain",
"inherits": [
"ci-base"
],
"configurePreset": "llvm-ci"
}
],
"testPresets": [
{
"name": "ci-base",
"hidden": true,
"inherits": [
"default"
],
"output": {
"outputOnFailure": true
}
},
{
"name": "ci-coverage",
"inherits": [
"default"
],
"configurePreset": "ci-coverage"
},
{
"name": "gcc-ci",
"displayName": "Test preset for GCC toolchain",
"inherits": [
"ci-base"
],
"configurePreset": "gcc-ci"
},
{
"name": "intel-ci",
"displayName": "Test preset for Intel toolchain",
"inherits": [
"ci-base"
],
"configurePreset": "intel-ci"
},
{
"name": "llvm-ci",
"displayName": "Test preset for LLVM toolchain",
"inherits": [
"ci-base"
],
"configurePreset": "llvm-ci"
}
],
"workflowPresets": [
{
"name": "gcc-ci",
"displayName": "CI test for GCC toolchain",
"steps": [
{
"type": "configure",
"name": "gcc-ci"
},
{
"type": "build",
"name": "gcc-ci"
},
{
"type": "test",
"name": "gcc-ci"
}
]
},
{
"name": "intel-ci",
"displayName": "CI test for Intel toolchain",
"steps": [
{
"type": "configure",
"name": "intel-ci"
},
{
"type": "build",
"name": "intel-ci"
},
{
"type": "test",
"name": "intel-ci"
}
]
},
{
"name": "llvm-ci",
"displayName": "CI test for LLVM toolchain",
"steps": [
{
"type": "configure",
"name": "llvm-ci"
},
{
"type": "build",
"name": "llvm-ci"
},
{
"type": "test",
"name": "llvm-ci"
}
]
},
{
"name": "ci-coverage",
"displayName": "Coverage tests",
"steps": [
{
"type": "configure",
"name": "ci-coverage"
},
{
"type": "build",
"name": "ci-coverage"
},
{
"type": "test",
"name": "ci-coverage"
}
]
}
]
}

View File

@ -1,50 +0,0 @@
{
"version": 6,
"configurePresets": [
{
"name": "default",
"displayName": "Default configuration preset",
"binaryDir": "cmake-build-release",
"cacheVariables": {
"CMAKE_BUILD_TYPE": {
"type": "STRING",
"value": "Release"
}
}
}
],
"buildPresets": [
{
"name": "default",
"displayName": "Default build preset",
"configurePreset": "default"
}
],
"testPresets": [
{
"name": "default",
"displayName": "Default test preset",
"configurePreset": "default"
}
],
"workflowPresets": [
{
"name": "default",
"displayName": "Default workflow",
"steps": [
{
"type": "configure",
"name": "default"
},
{
"type": "build",
"name": "default"
},
{
"type": "test",
"name": "default"
}
]
}
]
}

View File

@ -1,9 +0,0 @@
@PACKAGE_INIT@
include(CMakeFindDependencyMacro)
find_dependency(nlohmann_json)
include("${CMAKE_CURRENT_LIST_DIR}/nlohmann_json_schema_validatorTargets.cmake")
check_required_components(
"nlohmann_json_schema_validator"
)

View File

@ -1,10 +1,7 @@
import os
import re
from conans import load, tools, ConanFile, CMake
from conan import ConanFile
from conan.tools.cmake import cmake_layout, CMake, CMakeToolchain
from conans.tools import load
from conans import tools as ctools
def get_version():
try:
@ -18,60 +15,39 @@ def get_version():
except:
return None
class JsonSchemaValidatorConan(ConanFile):
name = 'JsonSchemaValidator'
version = get_version()
url = 'https://github.com/pboettch/json-schema-validator'
license = 'MIT'
settings = 'os', 'compiler', 'build_type', 'arch'
options = {
'shared': [True, False],
'fPIC': [True, False],
'build_examples': [True, False],
'build_tests': [True, False],
'test_coverage': [True, False],
'fPIC': [True, False]
}
default_options = {
'shared': False,
'fPIC': True,
'build_examples': True,
'build_tests': False,
'test_coverage': False,
'fPIC': True
}
generators = 'CMakeDeps', 'CMakeToolchain', 'VirtualBuildEnv', 'VirtualRunEnv'
generators = "cmake"
exports_sources = [
'CMakeLists.txt',
'conanfile.py',
'cmake/*',
'nlohmann_json_schema_validatorConfig.cmake.in',
'src/*',
'example/*',
'test/*',
'app/*',
]
requires = [
'nlohmann_json/3.11.2'
]
def generate(self):
tc = CMakeToolchain(self)
tc.variables['JSON_VALIDATOR_BUILD_EXAMPLES'] = self.options.build_examples
tc.variables['JSON_VALIDATOR_BUILD_TESTS'] = self.options.build_tests
tc.variables['JSON_VALIDATOR_SHARED_LIBS '] = self.options.shared
tc.variables['JSON_VALIDATOR_TEST_COVERAGE '] = self.options.test_coverage
tc.generate()
def layout(self):
cmake_layout(self)
requires = (
'nlohmann_json/3.7.3'
)
def build(self):
cmake = CMake(self)
cmake.definitions['nlohmann_json_DIR'] = os.path.join(self.deps_cpp_info['nlohmann_json'].rootpath, 'include')
cmake.definitions['BUILD_EXAMPLES'] = True
cmake.definitions['BUILD_TESTS'] = False
cmake.configure()
cmake.verbose = True
cmake.build()
def package(self):
@ -84,7 +60,7 @@ class JsonSchemaValidatorConan(ConanFile):
libdir = os.path.join(self.package_folder, "lib")
self.cpp_info.libdirs = [libdir]
self.cpp_info.libs += ctools.collect_libs(self, libdir)
self.cpp_info.libs += tools.collect_libs(self, libdir)
bindir = os.path.join(self.package_folder, "bin")
self.output.info("Appending PATH environment variable: {}".format(bindir))

View File

@ -1,14 +0,0 @@
# simple nlohmann_json_schema_validator-executable
add_executable(json-schema-validate json-schema-validate.cpp)
target_link_libraries(json-schema-validate nlohmann_json_schema_validator)
add_executable(readme-json-schema readme.cpp)
target_link_libraries(readme-json-schema nlohmann_json_schema_validator)
add_executable(format-json-schema format.cpp)
target_link_libraries(format-json-schema nlohmann_json_schema_validator)
if (JSON_VALIDATOR_INSTALL)
install(TARGETS json-schema-validate readme-json-schema format-json-schema
DESTINATION ${CMAKE_INSTALL_BINDIR})
endif ()

View File

@ -1,54 +0,0 @@
#include <iostream>
#include <nlohmann/json-schema.hpp>
using nlohmann::json;
using nlohmann::json_schema::json_validator;
// The schema is defined based upon a string literal
static json uri_schema = R"(
{
"$schema": "http://json-schema.org/draft-07/schema#",
"type": "object",
"properties": {
"myUri": {
"type":"string",
"format": "uri"
}
}
})"_json;
// The people are defined with brace initialization
static json good_uri = {{"myUri", "http://hostname.com/"}};
static json bad_uri = {{"myUri", "http:/hostname.com/"}};
static void uri_format_checker(const std::string &format, const std::string &value)
{
if (format == "uri") {
if (value.find("://") == std::string::npos)
throw std::invalid_argument("URI does not contain :// - invalid");
} else
throw std::logic_error("Don't know how to validate " + format);
}
int main()
{
json_validator validator(nullptr, uri_format_checker); // create validator
try {
validator.set_root_schema(uri_schema); // insert root-schema
} catch (const std::exception &e) {
std::cerr << "Validation of schema failed, here is why: " << e.what() << "\n";
return EXIT_FAILURE;
}
validator.validate(good_uri);
try {
validator.validate(bad_uri);
} catch (const std::exception &e) {
std::cerr << "Validation expectedly failed, here is why: " << e.what() << "\n";
}
return EXIT_SUCCESS;
}

View File

@ -0,0 +1,13 @@
# Config file for the json-schema-validator
# It defines the following variables
# NLOHMANN_JSON_SCHEMA_VALIDATOR_INCLUDE_DIRS - include directories for json-schema-validator
# nlohmann_json_schema_validator - json-schema-validator library to link against
# Compute paths
get_filename_component(NLOHMANN_JSON_SCHEMA_VALIDATOR_CMAKE_DIR "${CMAKE_CURRENT_LIST_FILE}" PATH)
set(NLOHMANN_JSON_SCHEMA_VALIDATOR_INCLUDE_DIRS @CONF_INCLUDE_DIRS@)
# Our library dependencies (contains definitions for IMPORTED targets)
if(NOT TARGET json-schema-validator)
include("${NLOHMANN_JSON_SCHEMA_VALIDATOR_CMAKE_DIR}/nlohmann_json_schema_validatorTargets.cmake")
endif()

View File

@ -1,64 +0,0 @@
target_sources(nlohmann_json_schema_validator PRIVATE
smtp-address-validator.cpp
json-schema-draft7.json.cpp
json-uri.cpp
json-validator.cpp
json-patch.cpp
string-format-check.cpp
)
target_include_directories(nlohmann_json_schema_validator PUBLIC
$<INSTALL_INTERFACE:${CMAKE_INSTALL_INCLUDEDIR}>
$<BUILD_INTERFACE:${CMAKE_CURRENT_SOURCE_DIR}>
)
set_target_properties(nlohmann_json_schema_validator PROPERTIES
PUBLIC_HEADER nlohmann/json-schema.hpp)
# TODO: Why would this need to be if guarded?
if (JSON_VALIDATOR_SHARED_LIBS)
target_compile_definitions(nlohmann_json_schema_validator PRIVATE
-DJSON_SCHEMA_VALIDATOR_EXPORTS)
endif ()
# TODO: Consider setting minimum cxx standard instead
target_compile_features(nlohmann_json_schema_validator PUBLIC
cxx_range_for) # for C++11 - flags
# TODO: This should be handled by the CI/presets, not the cmake
if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "Clang" OR
"${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU")
target_compile_options(nlohmann_json_schema_validator
PRIVATE
-Wall -Wextra -Wshadow)
endif ()
# TODO: gcc support for <4.9 should be removed
# regex with boost if gcc < 4.9 - default is std::regex
if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
if (CMAKE_CXX_COMPILER_VERSION VERSION_LESS "4.9.0")
find_package(Boost COMPONENTS regex)
if (NOT Boost_FOUND)
message(STATUS "GCC less then 4.9 and boost-regex NOT found - no regex used")
target_compile_definitions(nlohmann_json_schema_validator PRIVATE -DJSON_SCHEMA_NO_REGEX)
else ()
message(STATUS "GCC less then 4.9 and boost-regex FOUND - using boost::regex")
target_compile_definitions(nlohmann_json_schema_validator PRIVATE -DJSON_SCHEMA_BOOST_REGEX)
target_include_directories(nlohmann_json_schema_validator PRIVATE ${Boost_INCLUDE_DIRS})
target_link_libraries(nlohmann_json_schema_validator PRIVATE ${Boost_LIBRARIES})
endif ()
endif ()
endif ()
target_link_libraries(nlohmann_json_schema_validator PUBLIC
nlohmann_json::nlohmann_json)
if (JSON_VALIDATOR_INSTALL)
# Normal installation target to system. When using scikit-build check python subdirectory
install(TARGETS nlohmann_json_schema_validator
EXPORT nlohmann_json_schema_validatorTargets
LIBRARY DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT nlohmann_json_schema_validator_Runtime
NAMELINK_COMPONENT nlohmann_json_schema_validator_Development
ARCHIVE DESTINATION ${CMAKE_INSTALL_LIBDIR} COMPONENT nlohmann_json_schema_validator_Development
PUBLIC_HEADER DESTINATION ${CMAKE_INSTALL_INCLUDEDIR}/nlohmann COMPONENT nlohmann_json_schema_validator_Development
RUNTIME DESTINATION ${CMAKE_INSTALL_BINDIR} COMPONENT nlohmann_json_schema_validator_Runtime)
endif ()

View File

@ -66,7 +66,7 @@ const nlohmann::json patch_schema = R"patch({
}
}
})patch"_json;
} // namespace
}; // namespace
namespace nlohmann
{
@ -85,19 +85,19 @@ json_patch::json_patch(const json &patch)
json_patch &json_patch::add(const json::json_pointer &ptr, json value)
{
j_.push_back(json{{"op", "add"}, {"path", ptr.to_string()}, {"value", std::move(value)}});
j_.push_back(json{{"op", "add"}, {"path", ptr}, {"value", std::move(value)}});
return *this;
}
json_patch &json_patch::replace(const json::json_pointer &ptr, json value)
{
j_.push_back(json{{"op", "replace"}, {"path", ptr.to_string()}, {"value", std::move(value)}});
j_.push_back(json{{"op", "replace"}, {"path", ptr}, {"value", std::move(value)}});
return *this;
}
json_patch &json_patch::remove(const json::json_pointer &ptr)
{
j_.push_back(json{{"op", "remove"}, {"path", ptr.to_string()}});
j_.push_back(json{{"op", "remove"}, {"path", ptr}});
return *this;
}

View File

@ -28,13 +28,10 @@ public:
json_patch &replace(const json::json_pointer &, json value);
json_patch &remove(const json::json_pointer &);
json &get_json() { return j_; }
const json &get_json() const { return j_; }
operator json() const { return j_; }
private:
json j_ = nlohmann::json::array();
json j_;
static void validateJsonPatch(json const &patch);
};

View File

@ -35,7 +35,7 @@ void json_uri::update(const std::string &uri)
}
std::string hex = pointer.substr(pos + 1, 2);
char ascii = static_cast<char>(std::strtoul(hex.c_str(), nullptr, 16));
char ascii = (char) std::strtoul(hex.c_str(), nullptr, 16);
pointer.replace(pos, 3, 1, ascii);
pos--;

View File

@ -10,11 +10,9 @@
#include "json-patch.hpp"
#include <deque>
#include <memory>
#include <set>
#include <sstream>
#include <string>
using nlohmann::json;
using nlohmann::json_patch;
@ -35,21 +33,12 @@ using namespace nlohmann::json_schema;
namespace
{
static const json EmptyDefault{};
class schema
{
protected:
root_schema *root_;
json default_value_ = nullptr;
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> & /* sch */,
root_schema * /* root */,
std::vector<nlohmann::json_uri> & /* uris */,
nlohmann::json & /* default_value */) const
{
return nullptr;
};
public:
virtual ~schema() = default;
@ -59,13 +48,11 @@ public:
virtual void validate(const json::json_pointer &ptr, const json &instance, json_patch &patch, error_handler &e) const = 0;
virtual const json &default_value(const json::json_pointer &, const json &, error_handler &) const
virtual const json &defaultValue(const json::json_pointer &, const json &, error_handler &) const
{
return default_value_;
return EmptyDefault;
}
void set_default_value(const json &v) { default_value_ = v; }
static std::shared_ptr<schema> make(json &schema,
root_schema *root,
const std::vector<std::string> &key,
@ -76,8 +63,6 @@ class schema_ref : public schema
{
const std::string id_;
std::weak_ptr<schema> target_;
std::shared_ptr<schema> target_strong_; // for references to references keep also the shared_ptr because
// no one else might use it after resolving
void validate(const json::json_pointer &ptr, const json &instance, json_patch &patch, error_handler &e) const final
{
@ -89,47 +74,24 @@ class schema_ref : public schema
e.error(ptr, instance, "unresolved or freed schema-reference " + id_);
}
const json &default_value(const json::json_pointer &ptr, const json &instance, error_handler &e) const override final
const json &defaultValue(const json::json_pointer &ptr, const json &instance, error_handler &e) const override
{
if (!default_value_.is_null())
return default_value_;
auto target = target_.lock();
if (target)
return target->default_value(ptr, instance, e);
return target->defaultValue(ptr, instance, e);
else
e.error(ptr, instance, "unresolved or freed schema-reference " + id_);
e.error(ptr, instance, "unresolved or freed schema-reference " + id_);
return default_value_;
return EmptyDefault;
}
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> &sch,
root_schema *root,
std::vector<nlohmann::json_uri> &uris,
nlohmann::json &default_value) const override
{
// create a new reference schema using the original reference (which will be resolved later)
// to store this overloaded default value #209
auto result = std::make_shared<schema_ref>(uris[0].to_string(), root);
result->set_target(sch, true);
result->set_default_value(default_value);
return result;
};
public:
schema_ref(const std::string &id, root_schema *root)
: schema(root), id_(id) {}
const std::string &id() const { return id_; }
void set_target(const std::shared_ptr<schema> &target, bool strong = false)
{
target_ = target;
if (strong)
target_strong_ = target;
}
void set_target(const std::shared_ptr<schema> &target) { target_ = target; }
};
} // namespace
@ -139,11 +101,10 @@ namespace nlohmann
namespace json_schema
{
class root_schema
class root_schema : public schema
{
schema_loader loader_;
format_checker format_check_;
content_checker content_check_;
std::shared_ptr<schema> root_;
@ -167,23 +128,16 @@ class root_schema
public:
root_schema(schema_loader &&loader,
format_checker &&format,
content_checker &&content)
: loader_(std::move(loader)),
format_check_(std::move(format)),
content_check_(std::move(content))
{
}
format_checker &&format)
: schema(this), loader_(std::move(loader)), format_check_(std::move(format)) {}
format_checker &format_check() { return format_check_; }
content_checker &content_check() { return content_check_; }
void insert(const json_uri &uri, const std::shared_ptr<schema> &s)
{
auto &file = get_or_create_file(uri.location());
auto sch = file.schemas.lower_bound(uri.fragment());
if (sch != file.schemas.end() && !(file.schemas.key_comp()(uri.fragment(), sch->first))) {
auto schema = file.schemas.lower_bound(uri.fragment());
if (schema != file.schemas.end() && !(file.schemas.key_comp()(uri.fragment(), schema->first))) {
throw std::invalid_argument("schema with " + uri.to_string() + " already inserted");
return;
}
@ -205,35 +159,11 @@ public:
auto fragment = new_uri.pointer();
// is there a reference looking for this unknown-keyword, which is thus no longer a unknown keyword but a schema
auto unresolved = file.unresolved.find(fragment.to_string());
auto unresolved = file.unresolved.find(fragment);
if (unresolved != file.unresolved.end())
schema::make(value, this, {}, {{new_uri}});
else { // no, nothing ref'd it, keep for later
// need to create an object for each reference-token in the
// JSON-Pointer When not existing, a stringified integer reference
// token (e.g. "123") in the middle of the pointer will be
// interpreted a an array-index and an array will be created.
// json_pointer's reference_tokens is private - get them
std::deque<std::string> ref_tokens;
auto uri_pointer = uri.pointer();
while (!uri_pointer.empty()) {
ref_tokens.push_front(uri_pointer.back());
uri_pointer.pop_back();
}
// for each token create an object, if not already existing
auto unk_kw = &file.unknown_keywords;
for (auto &rt : ref_tokens) {
// create a json_pointer from rt as rt can be an stringified integer doing find on an array won't work
json::json_pointer rt_ptr{"/" + rt};
if (unk_kw->contains(rt_ptr) == false)
(*unk_kw)[rt] = json::object();
unk_kw = &(*unk_kw)[rt_ptr];
}
(*unk_kw)[key] = value;
}
else // no, nothing ref'd it, keep for later
file.unknown_keywords[fragment] = value;
// recursively add possible subschemas of unknown keywords
if (value.type() == json::value_t::object)
@ -246,23 +176,23 @@ public:
auto &file = get_or_create_file(uri.location());
// existing schema
auto sch = file.schemas.find(uri.fragment());
if (sch != file.schemas.end())
return sch->second;
auto schema = file.schemas.find(uri.fragment());
if (schema != file.schemas.end())
return schema->second;
// referencing an unknown keyword, turn it into schema
//
// an unknown keyword can only be referenced by a json-pointer,
// not by a plain name fragment
if (!uri.pointer().to_string().empty()) {
bool contains_pointer = file.unknown_keywords.contains(uri.pointer());
if (contains_pointer) {
auto &subschema = file.unknown_keywords.at(uri.pointer());
auto s = schema::make(subschema, this, {}, {{uri}});
if (s) { // if schema is valid (non-null)
if (uri.pointer() != "") {
try {
auto &subschema = file.unknown_keywords.at(uri.pointer()); // null is returned if not existing
auto s = schema::make(subschema, this, {}, {{uri}}); // A JSON Schema MUST be an object or a boolean.
if (s) { // nullptr if invalid schema, e.g. null
file.unknown_keywords.erase(uri.fragment());
return s;
}
} catch (nlohmann::detail::out_of_range &) { // at() did not find it
}
}
@ -277,10 +207,10 @@ public:
}
}
void set_root_schema(json sch)
void set_root_schema(json schema)
{
files_.clear();
root_ = schema::make(sch, this, {}, {{"#"}});
root_ = schema::make(schema, this, {}, {{"#"}});
// load all files which have not yet been loaded
do {
@ -294,11 +224,11 @@ public:
for (auto &loc : locations) {
if (files_[loc].schemas.size() == 0) { // nothing has been loaded for this file
if (loader_) {
json loaded_schema;
json sch;
loader_(loc, loaded_schema);
loader_(loc, sch);
schema::make(loaded_schema, this, {}, {{loc}});
schema::make(sch, this, {}, {{loc}});
new_schema_loaded = true;
} else {
throw std::invalid_argument("external schema reference '" + loc + "' needs loading, but no loader callback given");
@ -310,58 +240,29 @@ public:
break;
} while (1);
for (const auto &file : files_) {
if (file.second.unresolved.size() != 0) {
// Build a representation of the undefined
// references as a list of comma-separated strings.
auto n_urefs = file.second.unresolved.size();
std::string urefs = "[";
decltype(n_urefs) counter = 0;
for (const auto &p : file.second.unresolved) {
urefs += p.first;
if (counter != n_urefs - 1u) {
urefs += ", ";
}
++counter;
}
urefs += "]";
for (const auto &file : files_)
if (file.second.unresolved.size() != 0)
throw std::invalid_argument("after all files have been parsed, '" +
(file.first == "" ? "<root>" : file.first) +
"' has still the following undefined references: " + urefs);
}
}
"' has still undefined references.");
}
void validate(const json::json_pointer &ptr,
const json &instance,
json_patch &patch,
error_handler &e,
const json_uri &initial) const
void validate(const json::json_pointer &ptr, const json &instance, json_patch &patch, error_handler &e) const final
{
if (!root_) {
if (root_)
root_->validate(ptr, instance, patch, e);
else
e.error(ptr, "", "no root schema has yet been set for validating an instance");
return;
}
}
auto file_entry = files_.find(initial.location());
if (file_entry == files_.end()) {
e.error(ptr, "", "no file found serving requested root-URI. " + initial.location());
return;
}
const json &defaultValue(const json::json_pointer &ptr, const json &instance, error_handler &e) const override
{
if (root_)
return root_->defaultValue(ptr, instance, e);
else
e.error(ptr, "", "no root schema has yet been set for validating an instance");
auto &file = file_entry->second;
auto sch = file.schemas.find(initial.fragment());
if (sch == file.schemas.end()) {
e.error(ptr, "", "no schema find for request initial URI: " + initial.to_string());
return;
}
sch->second->validate(ptr, instance, patch, e);
return EmptyDefault;
}
};
@ -405,9 +306,9 @@ class logical_not : public schema
e.error(ptr, instance, "the subschema has succeeded, but it is required to not validate");
}
const json &default_value(const json::json_pointer &ptr, const json &instance, error_handler &e) const override
const json &defaultValue(const json::json_pointer &ptr, const json &instance, error_handler &e) const override
{
return subschema_->default_value(ptr, instance, e);
return subschema_->defaultValue(ptr, instance, e);
}
public:
@ -426,31 +327,6 @@ enum logical_combination_types {
oneOf
};
class logical_combination_error_handler : public error_handler
{
public:
struct error_entry {
json::json_pointer ptr_;
json instance_;
std::string message_;
};
std::vector<error_entry> error_entry_list_;
void error(const json::json_pointer &ptr, const json &instance, const std::string &message) override
{
error_entry_list_.push_back(error_entry{ptr, instance, message});
}
void propagate(error_handler &e, const std::string &prefix) const
{
for (const error_entry &entry : error_entry_list_)
e.error(entry.ptr_, entry.instance_, prefix + entry.message_);
}
operator bool() const { return !error_entry_list_.empty(); }
};
template <enum logical_combination_types combine_logic>
class logical_combination : public schema
{
@ -459,33 +335,26 @@ class logical_combination : public schema
void validate(const json::json_pointer &ptr, const json &instance, json_patch &patch, error_handler &e) const final
{
size_t count = 0;
logical_combination_error_handler error_summary;
for (std::size_t index = 0; index < subschemata_.size(); ++index) {
const std::shared_ptr<schema> &s = subschemata_[index];
logical_combination_error_handler esub;
auto oldPatchSize = patch.get_json().size();
for (auto &s : subschemata_) {
first_error_handler esub;
s->validate(ptr, instance, patch, esub);
if (!esub)
count++;
else {
patch.get_json().get_ref<nlohmann::json::array_t &>().resize(oldPatchSize);
esub.propagate(error_summary, "case#" + std::to_string(index) + "] ");
}
if (is_validate_complete(instance, ptr, e, esub, count, index))
if (is_validate_complete(instance, ptr, e, esub, count))
return;
}
if (count == 0) {
e.error(ptr, instance, "no subschema has succeeded, but one of them is required to validate. Type: " + key + ", number of failed subschemas: " + std::to_string(subschemata_.size()));
error_summary.propagate(e, "[combination: " + key + " / ");
}
// could accumulate esub details for anyOf and oneOf, but not clear how to select which subschema failure to report
// or how to report multiple such failures
if (count == 0)
e.error(ptr, instance, "no subschema has succeeded, but one of them is required to validate");
}
// specialized for each of the logical_combination_types
static const std::string key;
static bool is_validate_complete(const json &, const json::json_pointer &, error_handler &, const logical_combination_error_handler &, size_t, size_t);
static bool is_validate_complete(const json &, const json::json_pointer &, error_handler &, const first_error_handler &, size_t);
public:
logical_combination(json &sch,
@ -510,23 +379,21 @@ template <>
const std::string logical_combination<oneOf>::key = "oneOf";
template <>
bool logical_combination<allOf>::is_validate_complete(const json &, const json::json_pointer &, error_handler &e, const logical_combination_error_handler &esub, size_t, size_t current_schema_index)
bool logical_combination<allOf>::is_validate_complete(const json &, const json::json_pointer &, error_handler &e, const first_error_handler &esub, size_t)
{
if (esub) {
e.error(esub.error_entry_list_.front().ptr_, esub.error_entry_list_.front().instance_, "at least one subschema has failed, but all of them are required to validate - " + esub.error_entry_list_.front().message_);
esub.propagate(e, "[combination: allOf / case#" + std::to_string(current_schema_index) + "] ");
}
if (esub)
e.error(esub.ptr_, esub.instance_, "at least one subschema has failed, but all of them are required to validate - " + esub.message_);
return esub;
}
template <>
bool logical_combination<anyOf>::is_validate_complete(const json &, const json::json_pointer &, error_handler &, const logical_combination_error_handler &, size_t count, size_t)
bool logical_combination<anyOf>::is_validate_complete(const json &, const json::json_pointer &, error_handler &, const first_error_handler &, size_t count)
{
return count == 1;
}
template <>
bool logical_combination<oneOf>::is_validate_complete(const json &instance, const json::json_pointer &ptr, error_handler &e, const logical_combination_error_handler &, size_t count, size_t)
bool logical_combination<oneOf>::is_validate_complete(const json &instance, const json::json_pointer &ptr, error_handler &e, const first_error_handler &, size_t count)
{
if (count > 1)
e.error(ptr, instance, "more than one subschema has succeeded, but exactly one of them is required to validate");
@ -535,6 +402,7 @@ bool logical_combination<oneOf>::is_validate_complete(const json &instance, cons
class type_schema : public schema
{
json defaultValue_{};
std::vector<std::shared_ptr<schema>> type_;
std::pair<bool, json> enum_, const_;
std::vector<std::shared_ptr<schema>> logic_;
@ -547,10 +415,15 @@ class type_schema : public schema
std::shared_ptr<schema> if_, then_, else_;
const json &defaultValue(const json::json_pointer &, const json &, error_handler &) const override
{
return defaultValue_;
}
void validate(const json::json_pointer &ptr, const json &instance, json_patch &patch, error_handler &e) const override final
{
// depending on the type of instance run the type specific validator - if present
auto type = type_[static_cast<uint8_t>(instance.type())];
auto type = type_[(uint8_t) instance.type()];
if (type)
type->validate(ptr, instance, patch, e);
@ -588,28 +461,13 @@ class type_schema : public schema
else_->validate(ptr, instance, patch, e);
}
}
if (instance.is_null()) {
patch.add(nlohmann::json::json_pointer{}, default_value_);
}
}
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> & /* sch */,
root_schema * /* root */,
std::vector<nlohmann::json_uri> & /* uris */,
nlohmann::json &default_value) const override
{
auto result = std::make_shared<type_schema>(*this);
result->set_default_value(default_value);
return result;
};
public:
type_schema(json &sch,
root_schema *root,
const std::vector<nlohmann::json_uri> &uris)
: schema(root), type_(static_cast<uint8_t>(json::value_t::discarded) + 1)
: schema(root), type_((uint8_t) json::value_t::discarded + 1)
{
// association between JSON-schema-type and NLohmann-types
static const std::vector<std::pair<std::string, json::value_t>> schema_types = {
@ -627,7 +485,7 @@ public:
auto attr = sch.find("type");
if (attr == sch.end()) // no type field means all sub-types possible
for (auto &t : schema_types)
type_[static_cast<uint8_t>(t.second)] = type_schema::make(sch, t.second, root, uris, known_keywords);
type_[(uint8_t) t.second] = type_schema::make(sch, t.second, root, uris, known_keywords);
else {
switch (attr.value().type()) { // "type": "type"
@ -635,16 +493,14 @@ public:
auto schema_type = attr.value().get<std::string>();
for (auto &t : schema_types)
if (t.first == schema_type)
type_[static_cast<uint8_t>(t.second)] = type_schema::make(sch, t.second, root, uris, known_keywords);
type_[(uint8_t) t.second] = type_schema::make(sch, t.second, root, uris, known_keywords);
} break;
case json::value_t::array: // "type": ["type1", "type2"]
for (auto &array_value : attr.value()) {
auto schema_type = array_value.get<std::string>();
for (auto &schema_type : attr.value())
for (auto &t : schema_types)
if (t.first == schema_type)
type_[static_cast<uint8_t>(t.second)] = type_schema::make(sch, t.second, root, uris, known_keywords);
}
type_[(uint8_t) t.second] = type_schema::make(sch, t.second, root, uris, known_keywords);
break;
default:
@ -654,10 +510,9 @@ public:
sch.erase(attr);
}
attr = sch.find("default");
if (attr != sch.end()) {
set_default_value(attr.value());
sch.erase(attr);
const auto defaultAttr = sch.find("default");
if (defaultAttr != sch.end()) {
defaultValue_ = defaultAttr.value();
}
for (auto &key : known_keywords)
@ -665,17 +520,12 @@ public:
// with nlohmann::json float instance (but number in schema-definition) can be seen as unsigned or integer -
// reuse the number-validator for integer values as well, if they have not been specified explicitly
if (type_[static_cast<uint8_t>(json::value_t::number_float)] && !type_[static_cast<uint8_t>(json::value_t::number_integer)])
type_[static_cast<uint8_t>(json::value_t::number_integer)] = type_[static_cast<uint8_t>(json::value_t::number_float)];
if (type_[(uint8_t) json::value_t::number_float] && !type_[(uint8_t) json::value_t::number_integer])
type_[(uint8_t) json::value_t::number_integer] = type_[(uint8_t) json::value_t::number_float];
// #54: JSON-schema does not differentiate between unsigned and signed integer - nlohmann::json does
// we stick with JSON-schema: use the integer-validator if instance-value is unsigned
type_[static_cast<uint8_t>(json::value_t::number_unsigned)] = type_[static_cast<uint8_t>(json::value_t::number_integer)];
// special for binary types
if (type_[static_cast<uint8_t>(json::value_t::string)]) {
type_[static_cast<uint8_t>(json::value_t::binary)] = type_[static_cast<uint8_t>(json::value_t::string)];
}
type_[(uint8_t) json::value_t::number_unsigned] = type_[(uint8_t) json::value_t::number_integer];
attr = sch.find("enum");
if (attr != sch.end()) {
@ -747,12 +597,11 @@ class string : public schema
#endif
std::pair<bool, std::string> format_;
std::tuple<bool, std::string, std::string> content_{false, "", ""};
std::size_t utf8_length(const std::string &s) const
{
size_t len = 0;
for (auto c : s)
for (const unsigned char &c : s)
if ((c & 0xc0) != 0x80)
len++;
return len;
@ -761,7 +610,7 @@ class string : public schema
void validate(const json::json_pointer &ptr, const json &instance, json_patch &, error_handler &e) const override
{
if (minLength_.first) {
if (utf8_length(instance.get<std::string>()) < minLength_.second) {
if (utf8_length(instance) < minLength_.second) {
std::ostringstream s;
s << "instance is too short as per minLength:" << minLength_.second;
e.error(ptr, instance, s.str());
@ -769,31 +618,13 @@ class string : public schema
}
if (maxLength_.first) {
if (utf8_length(instance.get<std::string>()) > maxLength_.second) {
if (utf8_length(instance) > maxLength_.second) {
std::ostringstream s;
s << "instance is too long as per maxLength: " << maxLength_.second;
e.error(ptr, instance, s.str());
}
}
if (std::get<0>(content_)) {
if (root_->content_check() == nullptr)
e.error(ptr, instance, std::string("a content checker was not provided but a contentEncoding or contentMediaType for this string have been present: '") + std::get<1>(content_) + "' '" + std::get<2>(content_) + "'");
else {
try {
root_->content_check()(std::get<1>(content_), std::get<2>(content_), instance);
} catch (const std::exception &ex) {
e.error(ptr, instance, std::string("content-checking failed: ") + ex.what());
}
}
} else if (instance.type() == json::value_t::binary) {
e.error(ptr, instance, "expected string, but get binary data");
}
if (instance.type() != json::value_t::string) {
return; // next checks only for strings
}
#ifndef NO_STD_REGEX
if (pattern_.first &&
!REGEX_NAMESPACE::regex_search(instance.get<std::string>(), pattern_.second))
@ -805,7 +636,7 @@ class string : public schema
e.error(ptr, instance, std::string("a format checker was not provided but a format keyword for this string is present: ") + format_.second);
else {
try {
root_->format_check()(format_.second, instance.get<std::string>());
root_->format_check()(format_.second, instance);
} catch (const std::exception &ex) {
e.error(ptr, instance, std::string("format-checking failed: ") + ex.what());
}
@ -819,51 +650,20 @@ public:
{
auto attr = sch.find("maxLength");
if (attr != sch.end()) {
maxLength_ = {true, attr.value().get<size_t>()};
maxLength_ = {true, attr.value()};
sch.erase(attr);
}
attr = sch.find("minLength");
if (attr != sch.end()) {
minLength_ = {true, attr.value().get<size_t>()};
minLength_ = {true, attr.value()};
sch.erase(attr);
}
attr = sch.find("contentEncoding");
if (attr != sch.end()) {
std::get<0>(content_) = true;
std::get<1>(content_) = attr.value().get<std::string>();
// special case for nlohmann::json-binary-types
//
// https://github.com/pboettch/json-schema-validator/pull/114
//
// We cannot use explicitly in a schema: {"type": "binary"} or
// "type": ["binary", "number"] we have to be implicit. For a
// schema where "contentEncoding" is set to "binary", an instance
// of type json::value_t::binary is accepted. If a
// contentEncoding-callback has to be provided and is called
// accordingly. For encoding=binary, no other type validations are done
sch.erase(attr);
}
attr = sch.find("contentMediaType");
if (attr != sch.end()) {
std::get<0>(content_) = true;
std::get<2>(content_) = attr.value().get<std::string>();
sch.erase(attr);
}
if (std::get<0>(content_) == true && root_->content_check() == nullptr) {
throw std::invalid_argument{"schema contains contentEncoding/contentMediaType but content checker was not set"};
}
#ifndef NO_STD_REGEX
attr = sch.find("pattern");
if (attr != sch.end()) {
patternString_ = attr.value().get<std::string>();
patternString_ = attr.value();
pattern_ = {true, REGEX_NAMESPACE::regex(attr.value().get<std::string>(),
REGEX_NAMESPACE::regex::ECMAScript)};
sch.erase(attr);
@ -872,10 +672,7 @@ public:
attr = sch.find("format");
if (attr != sch.end()) {
if (root_->format_check() == nullptr)
throw std::invalid_argument{"a format checker was not provided but a format keyword for this string is present: " + format_.second};
format_ = {true, attr.value().get<std::string>()};
format_ = {true, attr.value()};
sch.erase(attr);
}
}
@ -896,12 +693,7 @@ class numeric : public schema
bool violates_multiple_of(T x) const
{
double res = std::remainder(x, multipleOf_.second);
double multiple = std::fabs(x / multipleOf_.second);
if (multiple > 1) {
res = res / multiple;
}
double eps = std::nextafter(x, 0) - static_cast<double>(x);
double eps = std::nextafter(x, 0) - x;
return std::fabs(res) > std::fabs(eps);
}
@ -909,32 +701,19 @@ class numeric : public schema
{
T value = instance; // conversion of json to value_type
std::ostringstream oss;
if (multipleOf_.first && value != 0) // zero is multiple of everything
if (violates_multiple_of(value))
oss << "instance is not a multiple of " << json(multipleOf_.second);
e.error(ptr, instance, "instance is not a multiple of " + std::to_string(multipleOf_.second));
if (maximum_.first) {
if (exclusiveMaximum_ && value >= maximum_.second)
oss << "instance exceeds or equals maximum of " << json(maximum_.second);
else if (value > maximum_.second)
oss << "instance exceeds maximum of " << json(maximum_.second);
}
if (maximum_.first)
if ((exclusiveMaximum_ && value >= maximum_.second) ||
value > maximum_.second)
e.error(ptr, instance, "instance exceeds maximum of " + std::to_string(maximum_.second));
if (minimum_.first) {
if (exclusiveMinimum_ && value <= minimum_.second)
oss << "instance is below or equals minimum of " << json(minimum_.second);
else if (value < minimum_.second)
oss << "instance is below minimum of " << json(minimum_.second);
}
oss.seekp(0, std::ios::end);
auto size = oss.tellp();
if (size != 0) {
oss.seekp(0, std::ios::beg);
e.error(ptr, instance, oss.str());
}
if (minimum_.first)
if ((exclusiveMinimum_ && value <= minimum_.second) ||
value < minimum_.second)
e.error(ptr, instance, "instance is below minimum of " + std::to_string(minimum_.second));
}
public:
@ -943,33 +722,33 @@ public:
{
auto attr = sch.find("maximum");
if (attr != sch.end()) {
maximum_ = {true, attr.value().get<T>()};
maximum_ = {true, attr.value()};
kw.insert("maximum");
}
attr = sch.find("minimum");
if (attr != sch.end()) {
minimum_ = {true, attr.value().get<T>()};
minimum_ = {true, attr.value()};
kw.insert("minimum");
}
attr = sch.find("exclusiveMaximum");
if (attr != sch.end()) {
exclusiveMaximum_ = true;
maximum_ = {true, attr.value().get<T>()};
maximum_ = {true, attr.value()};
kw.insert("exclusiveMaximum");
}
attr = sch.find("exclusiveMinimum");
if (attr != sch.end()) {
minimum_ = {true, attr.value()};
exclusiveMinimum_ = true;
minimum_ = {true, attr.value().get<T>()};
kw.insert("exclusiveMinimum");
}
attr = sch.find("multipleOf");
if (attr != sch.end()) {
multipleOf_ = {true, attr.value().get<json::number_float_t>()};
multipleOf_ = {true, attr.value()};
kw.insert("multipleOf");
}
}
@ -1004,8 +783,8 @@ class boolean : public schema
{
if (!true_) { // false schema
// empty array
// switch (instance.type()) {
// case json::value_t::array:
//switch (instance.type()) {
//case json::value_t::array:
// if (instance.size() != 0) // valid false-schema
// e.error(ptr, instance, "false-schema required empty array");
// return;
@ -1099,9 +878,9 @@ class object : public schema
for (auto const &prop : properties_) {
const auto finding = instance.find(prop.first);
if (instance.end() == finding) { // if the prop is not in the instance
const auto &default_value = prop.second->default_value(ptr, instance, e);
if (!default_value.is_null()) { // if default value is available
patch.add((ptr / prop.first), default_value);
const auto &defaultValue = prop.second->defaultValue(ptr, instance, e);
if (!defaultValue.empty()) { // if default value is available
patch.add((ptr / prop.first), defaultValue);
}
}
}
@ -1121,13 +900,13 @@ public:
{
auto attr = sch.find("maxProperties");
if (attr != sch.end()) {
maxProperties_ = {true, attr.value().get<size_t>()};
maxProperties_ = {true, attr.value()};
sch.erase(attr);
}
attr = sch.find("minProperties");
if (attr != sch.end()) {
minProperties_ = {true, attr.value().get<size_t>()};
minProperties_ = {true, attr.value()};
sch.erase(attr);
}
@ -1188,11 +967,6 @@ public:
propertyNames_ = schema::make(attr.value(), root, {"propertyNames"}, uris);
sch.erase(attr);
}
attr = sch.find("default");
if (attr != sch.end()) {
set_default_value(*attr);
}
}
};
@ -1270,19 +1044,19 @@ public:
{
auto attr = sch.find("maxItems");
if (attr != sch.end()) {
maxItems_ = {true, attr.value().get<size_t>()};
maxItems_ = {true, attr.value()};
sch.erase(attr);
}
attr = sch.find("minItems");
if (attr != sch.end()) {
minItems_ = {true, attr.value().get<size_t>()};
minItems_ = {true, attr.value()};
sch.erase(attr);
}
attr = sch.find("uniqueItems");
if (attr != sch.end()) {
uniqueItems_ = attr.value().get<bool>();
uniqueItems_ = attr.value();
sch.erase(attr);
}
@ -1341,9 +1115,6 @@ std::shared_ptr<schema> type_schema::make(json &schema,
case json::value_t::discarded: // not a real type - silence please
break;
case json::value_t::binary:
break;
}
return nullptr;
}
@ -1382,47 +1153,30 @@ std::shared_ptr<schema> schema::make(json &schema,
if (std::find(uris.begin(),
uris.end(),
attr.value().get<std::string>()) == uris.end())
uris.push_back(uris.back().derive(attr.value().get<std::string>())); // so add it to the list if it is not there already
uris.push_back(uris.back().derive(attr.value())); // so add it to the list if it is not there already
schema.erase(attr);
}
auto findDefinitions = [&](const std::string &defs) -> bool {
attr = schema.find(defs);
if (attr != schema.end()) {
for (auto &def : attr.value().items())
schema::make(def.value(), root, {defs, def.key()}, uris);
schema.erase(attr);
return true;
}
return false;
};
if (!findDefinitions("$defs")) {
findDefinitions("definitions");
attr = schema.find("definitions");
if (attr != schema.end()) {
for (auto &def : attr.value().items())
schema::make(def.value(), root, {"definitions", def.key()}, uris);
schema.erase(attr);
}
attr = schema.find("$ref");
if (attr != schema.end()) { // this schema is a reference
// the last one on the uri-stack is the last id seen before coming here,
// so this is the origial URI for this reference, the $ref-value has thus be resolved from it
auto id = uris.back().derive(attr.value().get<std::string>());
auto id = uris.back().derive(attr.value());
sch = root->get_or_create_ref(id);
schema.erase(attr);
// special case where we break draft-7 and allow overriding of properties when a $ref is used
attr = schema.find("default");
if (attr != schema.end()) {
// copy the referenced schema depending on the underlying type and modify the default value
if (auto new_sch = sch->make_for_default_(sch, root, uris, attr.value())) {
sch = new_sch;
}
schema.erase(attr);
}
} else {
sch = std::make_shared<type_schema>(schema, root, uris);
}
schema.erase("$schema");
schema.erase("default");
schema.erase("title");
schema.erase("description");
} else {
@ -1455,33 +1209,19 @@ namespace json_schema
{
json_validator::json_validator(schema_loader loader,
format_checker format,
content_checker content)
: root_(std::unique_ptr<root_schema>(new root_schema(std::move(loader),
std::move(format),
std::move(content))))
format_checker format)
: root_(std::unique_ptr<root_schema>(new root_schema(std::move(loader), std::move(format))))
{
}
json_validator::json_validator(const json &schema,
schema_loader loader,
format_checker format,
content_checker content)
: json_validator(std::move(loader),
std::move(format),
std::move(content))
json_validator::json_validator(const json &schema, schema_loader loader, format_checker format)
: json_validator(std::move(loader), std::move(format))
{
set_root_schema(schema);
}
json_validator::json_validator(json &&schema,
schema_loader loader,
format_checker format,
content_checker content)
: json_validator(std::move(loader),
std::move(format),
std::move(content))
json_validator::json_validator(json &&schema, schema_loader loader, format_checker format)
: json_validator(std::move(loader), std::move(format))
{
set_root_schema(std::move(schema));
}
@ -1508,11 +1248,11 @@ json json_validator::validate(const json &instance) const
return validate(instance, err);
}
json json_validator::validate(const json &instance, error_handler &err, const json_uri &initial_uri) const
json json_validator::validate(const json &instance, error_handler &err) const
{
json::json_pointer ptr;
json_patch patch;
root_->validate(ptr, instance, patch, err, initial_uri);
root_->validate(ptr, instance, patch, err);
return patch;
}

View File

@ -24,11 +24,11 @@
#include <nlohmann/json.hpp>
#ifdef NLOHMANN_JSON_VERSION_MAJOR
# if (NLOHMANN_JSON_VERSION_MAJOR * 10000 + NLOHMANN_JSON_VERSION_MINOR * 100 + NLOHMANN_JSON_VERSION_PATCH) < 30800
# error "Please use this library with NLohmann's JSON version 3.8.0 or higher"
# if (NLOHMANN_JSON_VERSION_MAJOR * 10000 + NLOHMANN_JSON_VERSION_MINOR * 100 + NLOHMANN_JSON_VERSION_PATCH) < 30600
# error "Please use this library with NLohmann's JSON version 3.6.0 or higher"
# endif
#else
# error "expected existing NLOHMANN_JSON_VERSION_MAJOR preproc variable, please update to NLohmann's JSON 3.8.0"
# error "expected existing NLOHMANN_JSON_VERSION_MAJOR preproc variable, please update to NLohmann's JSON 3.6.0"
#endif
// make yourself a home - welcome to nlohmann's namespace
@ -59,9 +59,10 @@ protected:
// decodes a JSON uri and replaces all or part of the currently stored values
void update(const std::string &uri);
std::tuple<std::string, std::string, std::string, std::string, std::string> as_tuple() const
std::tuple<std::string, std::string, std::string, std::string, std::string> tie() const
{
return std::make_tuple(urn_, scheme_, authority_, path_, identifier_ != "" ? identifier_ : pointer_.to_string());
return std::tie(urn_, scheme_, authority_, path_,
identifier_ != "" ? identifier_ : pointer_);
}
public:
@ -80,7 +81,7 @@ public:
std::string fragment() const
{
if (identifier_ == "")
return pointer_.to_string();
return pointer_;
else
return identifier_;
}
@ -114,12 +115,12 @@ public:
friend bool operator<(const json_uri &l, const json_uri &r)
{
return l.as_tuple() < r.as_tuple();
return l.tie() < r.tie();
}
friend bool operator==(const json_uri &l, const json_uri &r)
{
return l.as_tuple() == r.as_tuple();
return l.tie() == r.tie();
}
friend std::ostream &operator<<(std::ostream &os, const json_uri &u);
@ -132,7 +133,6 @@ extern json draft7_schema_builtin;
typedef std::function<void(const json_uri & /*id*/, json & /*value*/)> schema_loader;
typedef std::function<void(const std::string & /*format*/, const std::string & /*value*/)> format_checker;
typedef std::function<void(const std::string & /*contentEncoding*/, const std::string & /*contentMediaType*/, const json & /*instance*/)> content_checker;
// Interface for validation error handlers
class JSON_SCHEMA_VALIDATOR_API error_handler
@ -159,7 +159,7 @@ public:
/**
* Checks validity of JSON schema built-in string format specifiers like 'date-time', 'ipv4', ...
*/
void JSON_SCHEMA_VALIDATOR_API default_string_format_check(const std::string &format, const std::string &value);
void default_string_format_check(const std::string &format, const std::string &value);
class root_schema;
@ -168,10 +168,10 @@ class JSON_SCHEMA_VALIDATOR_API json_validator
std::unique_ptr<root_schema> root_;
public:
json_validator(schema_loader = nullptr, format_checker = nullptr, content_checker = nullptr);
json_validator(schema_loader = nullptr, format_checker = nullptr);
json_validator(const json &, schema_loader = nullptr, format_checker = nullptr, content_checker = nullptr);
json_validator(json &&, schema_loader = nullptr, format_checker = nullptr, content_checker = nullptr);
json_validator(const json &, schema_loader = nullptr, format_checker = nullptr);
json_validator(json &&, schema_loader = nullptr, format_checker = nullptr);
json_validator(json_validator &&);
json_validator &operator=(json_validator &&);
@ -189,7 +189,7 @@ public:
json validate(const json &) const;
// validate a json-document based on the root-schema with a custom error-handler
json validate(const json &, error_handler &, const json_uri &initial_uri = json_uri("#")) const;
json validate(const json &, error_handler &) const;
};
} // namespace json_schema

View File

@ -1,792 +0,0 @@
/*
Snarfed from <https://github.com/gene-hightower/smtp-address-validator>
<http://opensource.org/licenses/MIT>:
Copyright (c) 2021 Gene Hightower
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
#include "smtp-address-validator.hpp"
static const signed char _address_actions[] = {
0, 1, 0, 1, 1, 0};
static const short _address_key_offsets[] = {
0, 0, 24, 26, 50, 52, 54, 56,
58, 60, 62, 86, 103, 105, 107, 109,
111, 113, 115, 117, 134, 150, 161, 168,
176, 180, 181, 190, 195, 196, 201, 202,
207, 210, 213, 219, 222, 225, 228, 234,
237, 240, 243, 249, 252, 261, 270, 282,
293, 302, 311, 320, 328, 345, 353, 360,
367, 368, 375, 382, 389, 396, 397, 404,
411, 418, 425, 426, 433, 440, 447, 454,
455, 462, 469, 476, 483, 484, 491, 498,
505, 512, 513, 523, 531, 538, 545, 546,
552, 559, 566, 573, 581, 589, 597, 608,
618, 626, 634, 641, 649, 657, 665, 667,
673, 681, 689, 697, 699, 705, 713, 721,
729, 731, 737, 745, 753, 761, 763, 769,
777, 785, 793, 795, 802, 812, 821, 829,
837, 839, 848, 857, 865, 873, 875, 884,
893, 901, 909, 911, 920, 929, 937, 945,
947, 956, 965, 974, 983, 992, 1004, 1015,
1024, 1033, 1042, 1051, 1060, 1072, 1083, 1092,
1101, 1109, 1118, 1127, 1136, 1148, 1159, 1168,
1177, 1185, 1194, 1203, 1212, 1224, 1235, 1244,
1253, 1261, 1270, 1279, 1288, 1300, 1311, 1320,
1329, 1337, 1339, 1353, 1355, 1357, 1359, 1361,
1363, 1365, 1367, 1368, 1370, 1388, 0};
static const signed char _address_trans_keys[] = {
-32, -19, -16, -12, 34, 45, 61, 63,
-62, -33, -31, -17, -15, -13, 33, 39,
42, 43, 47, 57, 65, 90, 94, 126,
-128, -65, -32, -19, -16, -12, 33, 46,
61, 64, -62, -33, -31, -17, -15, -13,
35, 39, 42, 43, 45, 57, 63, 90,
94, 126, -96, -65, -128, -65, -128, -97,
-112, -65, -128, -65, -128, -113, -32, -19,
-16, -12, 33, 45, 61, 63, -62, -33,
-31, -17, -15, -13, 35, 39, 42, 43,
47, 57, 65, 90, 94, 126, -32, -19,
-16, -12, 91, -62, -33, -31, -17, -15,
-13, 48, 57, 65, 90, 97, 122, -128,
-65, -96, -65, -128, -65, -128, -97, -112,
-65, -128, -65, -128, -113, -32, -19, -16,
-12, 45, -62, -33, -31, -17, -15, -13,
48, 57, 65, 90, 97, 122, -32, -19,
-16, -12, -62, -33, -31, -17, -15, -13,
48, 57, 65, 90, 97, 122, 45, 48,
49, 50, 73, 51, 57, 65, 90, 97,
122, 45, 48, 57, 65, 90, 97, 122,
45, 58, 48, 57, 65, 90, 97, 122,
33, 90, 94, 126, 93, 45, 46, 58,
48, 57, 65, 90, 97, 122, 48, 49,
50, 51, 57, 46, 48, 49, 50, 51,
57, 46, 48, 49, 50, 51, 57, 93,
48, 57, 93, 48, 57, 53, 93, 48,
52, 54, 57, 93, 48, 53, 46, 48,
57, 46, 48, 57, 46, 53, 48, 52,
54, 57, 46, 48, 53, 46, 48, 57,
46, 48, 57, 46, 53, 48, 52, 54,
57, 46, 48, 53, 45, 46, 58, 48,
57, 65, 90, 97, 122, 45, 46, 58,
48, 57, 65, 90, 97, 122, 45, 46,
53, 58, 48, 52, 54, 57, 65, 90,
97, 122, 45, 46, 58, 48, 53, 54,
57, 65, 90, 97, 122, 45, 58, 80,
48, 57, 65, 90, 97, 122, 45, 58,
118, 48, 57, 65, 90, 97, 122, 45,
54, 58, 48, 57, 65, 90, 97, 122,
45, 58, 48, 57, 65, 90, 97, 122,
58, 33, 47, 48, 57, 59, 64, 65,
70, 71, 90, 94, 96, 97, 102, 103,
126, 58, 93, 48, 57, 65, 70, 97,
102, 58, 48, 57, 65, 70, 97, 102,
58, 48, 57, 65, 70, 97, 102, 58,
58, 48, 57, 65, 70, 97, 102, 58,
48, 57, 65, 70, 97, 102, 58, 48,
57, 65, 70, 97, 102, 58, 48, 57,
65, 70, 97, 102, 58, 58, 48, 57,
65, 70, 97, 102, 58, 48, 57, 65,
70, 97, 102, 58, 48, 57, 65, 70,
97, 102, 58, 48, 57, 65, 70, 97,
102, 58, 58, 48, 57, 65, 70, 97,
102, 58, 48, 57, 65, 70, 97, 102,
58, 48, 57, 65, 70, 97, 102, 58,
48, 57, 65, 70, 97, 102, 58, 58,
48, 57, 65, 70, 97, 102, 58, 48,
57, 65, 70, 97, 102, 58, 48, 57,
65, 70, 97, 102, 58, 48, 57, 65,
70, 97, 102, 58, 58, 48, 57, 65,
70, 97, 102, 58, 48, 57, 65, 70,
97, 102, 58, 48, 57, 65, 70, 97,
102, 58, 48, 57, 65, 70, 97, 102,
58, 48, 49, 50, 58, 51, 57, 65,
70, 97, 102, 46, 58, 48, 57, 65,
70, 97, 102, 58, 48, 57, 65, 70,
97, 102, 58, 48, 57, 65, 70, 97,
102, 58, 48, 57, 65, 70, 97, 102,
93, 48, 57, 65, 70, 97, 102, 93,
48, 57, 65, 70, 97, 102, 93, 48,
57, 65, 70, 97, 102, 46, 58, 48,
57, 65, 70, 97, 102, 46, 58, 48,
57, 65, 70, 97, 102, 46, 58, 48,
57, 65, 70, 97, 102, 46, 53, 58,
48, 52, 54, 57, 65, 70, 97, 102,
46, 58, 48, 53, 54, 57, 65, 70,
97, 102, 46, 58, 48, 57, 65, 70,
97, 102, 46, 58, 48, 57, 65, 70,
97, 102, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 58, 48, 57, 65, 70,
97, 102, 48, 49, 50, 93, 51, 57,
65, 70, 97, 102, 46, 58, 93, 48,
57, 65, 70, 97, 102, 58, 93, 48,
57, 65, 70, 97, 102, 58, 93, 48,
57, 65, 70, 97, 102, 58, 93, 48,
49, 50, 51, 57, 65, 70, 97, 102,
46, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 49, 50, 51, 57,
65, 70, 97, 102, 46, 58, 93, 48,
57, 65, 70, 97, 102, 58, 93, 48,
57, 65, 70, 97, 102, 58, 93, 48,
57, 65, 70, 97, 102, 58, 93, 48,
49, 50, 51, 57, 65, 70, 97, 102,
46, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 49, 50, 51, 57,
65, 70, 97, 102, 46, 58, 93, 48,
57, 65, 70, 97, 102, 46, 58, 93,
48, 57, 65, 70, 97, 102, 46, 58,
93, 48, 57, 65, 70, 97, 102, 46,
58, 93, 48, 57, 65, 70, 97, 102,
46, 53, 58, 93, 48, 52, 54, 57,
65, 70, 97, 102, 46, 58, 93, 48,
53, 54, 57, 65, 70, 97, 102, 46,
58, 93, 48, 57, 65, 70, 97, 102,
46, 58, 93, 48, 57, 65, 70, 97,
102, 46, 58, 93, 48, 57, 65, 70,
97, 102, 46, 58, 93, 48, 57, 65,
70, 97, 102, 46, 58, 93, 48, 57,
65, 70, 97, 102, 46, 53, 58, 93,
48, 52, 54, 57, 65, 70, 97, 102,
46, 58, 93, 48, 53, 54, 57, 65,
70, 97, 102, 46, 58, 93, 48, 57,
65, 70, 97, 102, 46, 58, 93, 48,
57, 65, 70, 97, 102, 58, 93, 48,
57, 65, 70, 97, 102, 46, 58, 93,
48, 57, 65, 70, 97, 102, 46, 58,
93, 48, 57, 65, 70, 97, 102, 46,
58, 93, 48, 57, 65, 70, 97, 102,
46, 53, 58, 93, 48, 52, 54, 57,
65, 70, 97, 102, 46, 58, 93, 48,
53, 54, 57, 65, 70, 97, 102, 46,
58, 93, 48, 57, 65, 70, 97, 102,
46, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 46, 58, 93, 48, 57, 65, 70,
97, 102, 46, 58, 93, 48, 57, 65,
70, 97, 102, 46, 58, 93, 48, 57,
65, 70, 97, 102, 46, 53, 58, 93,
48, 52, 54, 57, 65, 70, 97, 102,
46, 58, 93, 48, 53, 54, 57, 65,
70, 97, 102, 46, 58, 93, 48, 57,
65, 70, 97, 102, 46, 58, 93, 48,
57, 65, 70, 97, 102, 58, 93, 48,
57, 65, 70, 97, 102, 46, 58, 93,
48, 57, 65, 70, 97, 102, 46, 58,
93, 48, 57, 65, 70, 97, 102, 46,
58, 93, 48, 57, 65, 70, 97, 102,
46, 53, 58, 93, 48, 52, 54, 57,
65, 70, 97, 102, 46, 58, 93, 48,
53, 54, 57, 65, 70, 97, 102, 46,
58, 93, 48, 57, 65, 70, 97, 102,
46, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, 48, 57, 65, 70, 97,
102, 58, 93, -32, -19, -16, -12, 34,
92, -62, -33, -31, -17, -15, -13, 32,
126, -128, -65, -96, -65, -128, -65, -128,
-97, -112, -65, -128, -65, -128, -113, 64,
32, 126, -32, -19, -16, -12, 45, 46,
-62, -33, -31, -17, -15, -13, 48, 57,
65, 90, 97, 122, 0};
static const signed char _address_single_lengths[] = {
0, 8, 0, 8, 0, 0, 0, 0,
0, 0, 8, 5, 0, 0, 0, 0,
0, 0, 0, 5, 4, 5, 1, 2,
0, 1, 3, 3, 1, 3, 1, 3,
1, 1, 2, 1, 1, 1, 2, 1,
1, 1, 2, 1, 3, 3, 4, 3,
3, 3, 3, 2, 1, 2, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 1, 1, 1, 1, 1, 1,
1, 1, 4, 2, 1, 1, 1, 0,
1, 1, 1, 2, 2, 2, 3, 2,
2, 2, 1, 2, 2, 2, 2, 0,
2, 2, 2, 2, 0, 2, 2, 2,
2, 0, 2, 2, 2, 2, 0, 2,
2, 2, 2, 1, 4, 3, 2, 2,
2, 3, 3, 2, 2, 2, 3, 3,
2, 2, 2, 3, 3, 2, 2, 2,
3, 3, 3, 3, 3, 4, 3, 3,
3, 3, 3, 3, 4, 3, 3, 3,
2, 3, 3, 3, 4, 3, 3, 3,
2, 3, 3, 3, 4, 3, 3, 3,
2, 3, 3, 3, 4, 3, 3, 3,
2, 2, 6, 0, 0, 0, 0, 0,
0, 0, 1, 0, 6, 0, 0};
static const signed char _address_range_lengths[] = {
0, 8, 1, 8, 1, 1, 1, 1,
1, 1, 8, 6, 1, 1, 1, 1,
1, 1, 1, 6, 6, 3, 3, 3,
2, 0, 3, 1, 0, 1, 0, 1,
1, 1, 2, 1, 1, 1, 2, 1,
1, 1, 2, 1, 3, 3, 4, 4,
3, 3, 3, 3, 8, 3, 3, 3,
0, 3, 3, 3, 3, 0, 3, 3,
3, 3, 0, 3, 3, 3, 3, 0,
3, 3, 3, 3, 0, 3, 3, 3,
3, 0, 3, 3, 3, 3, 0, 3,
3, 3, 3, 3, 3, 3, 4, 4,
3, 3, 3, 3, 3, 3, 0, 3,
3, 3, 3, 0, 3, 3, 3, 3,
0, 3, 3, 3, 3, 0, 3, 3,
3, 3, 0, 3, 3, 3, 3, 3,
0, 3, 3, 3, 3, 0, 3, 3,
3, 3, 0, 3, 3, 3, 3, 0,
3, 3, 3, 3, 3, 4, 4, 3,
3, 3, 3, 3, 4, 4, 3, 3,
3, 3, 3, 3, 4, 4, 3, 3,
3, 3, 3, 3, 4, 4, 3, 3,
3, 3, 3, 3, 4, 4, 3, 3,
3, 0, 4, 1, 1, 1, 1, 1,
1, 1, 0, 1, 6, 0, 0};
static const short _address_index_offsets[] = {
0, 0, 17, 19, 36, 38, 40, 42,
44, 46, 48, 65, 77, 79, 81, 83,
85, 87, 89, 91, 103, 114, 123, 128,
134, 137, 139, 146, 151, 153, 158, 160,
165, 168, 171, 176, 179, 182, 185, 190,
193, 196, 199, 204, 207, 214, 221, 230,
238, 245, 252, 259, 265, 275, 281, 286,
291, 293, 298, 303, 308, 313, 315, 320,
325, 330, 335, 337, 342, 347, 352, 357,
359, 364, 369, 374, 379, 381, 386, 391,
396, 401, 403, 411, 417, 422, 427, 429,
433, 438, 443, 448, 454, 460, 466, 474,
481, 487, 493, 498, 504, 510, 516, 519,
523, 529, 535, 541, 544, 548, 554, 560,
566, 569, 573, 579, 585, 591, 594, 598,
604, 610, 616, 619, 624, 632, 639, 645,
651, 654, 661, 668, 674, 680, 683, 690,
697, 703, 709, 712, 719, 726, 732, 738,
741, 748, 755, 762, 769, 776, 785, 793,
800, 807, 814, 821, 828, 837, 845, 852,
859, 865, 872, 879, 886, 895, 903, 910,
917, 923, 930, 937, 944, 953, 961, 968,
975, 981, 988, 995, 1002, 1011, 1019, 1026,
1033, 1039, 1042, 1053, 1055, 1057, 1059, 1061,
1063, 1065, 1067, 1069, 1071, 1084, 0};
static const short _address_cond_targs[] = {
4, 6, 7, 9, 186, 3, 3, 3,
2, 5, 8, 3, 3, 3, 3, 3,
0, 3, 0, 4, 6, 7, 9, 3,
10, 3, 11, 2, 5, 8, 3, 3,
3, 3, 3, 0, 2, 0, 2, 0,
2, 0, 5, 0, 5, 0, 5, 0,
4, 6, 7, 9, 3, 3, 3, 3,
2, 5, 8, 3, 3, 3, 3, 3,
0, 13, 15, 16, 18, 21, 12, 14,
17, 196, 196, 196, 0, 196, 0, 12,
0, 12, 0, 12, 0, 14, 0, 14,
0, 14, 0, 13, 15, 16, 18, 19,
12, 14, 17, 196, 196, 196, 0, 13,
15, 16, 18, 12, 14, 17, 196, 196,
196, 0, 22, 26, 44, 46, 48, 45,
23, 23, 0, 22, 23, 23, 23, 0,
22, 24, 23, 23, 23, 0, 25, 25,
0, 197, 0, 22, 27, 24, 23, 23,
23, 0, 28, 40, 42, 41, 0, 29,
0, 30, 36, 38, 37, 0, 31, 0,
25, 32, 34, 33, 0, 197, 33, 0,
197, 25, 0, 35, 197, 33, 25, 0,
197, 25, 0, 31, 37, 0, 31, 30,
0, 31, 39, 37, 30, 0, 31, 30,
0, 29, 41, 0, 29, 28, 0, 29,
43, 41, 28, 0, 29, 28, 0, 22,
27, 24, 45, 23, 23, 0, 22, 27,
24, 26, 23, 23, 0, 22, 27, 47,
24, 45, 26, 23, 23, 0, 22, 27,
24, 26, 23, 23, 23, 0, 22, 24,
49, 23, 23, 23, 0, 22, 24, 50,
23, 23, 23, 0, 22, 51, 24, 23,
23, 23, 0, 22, 52, 23, 23, 23,
0, 185, 25, 53, 25, 53, 25, 25,
53, 25, 0, 57, 197, 54, 54, 54,
0, 57, 55, 55, 55, 0, 57, 56,
56, 56, 0, 57, 0, 124, 58, 58,
58, 0, 62, 59, 59, 59, 0, 62,
60, 60, 60, 0, 62, 61, 61, 61,
0, 62, 0, 124, 63, 63, 63, 0,
67, 64, 64, 64, 0, 67, 65, 65,
65, 0, 67, 66, 66, 66, 0, 67,
0, 124, 68, 68, 68, 0, 72, 69,
69, 69, 0, 72, 70, 70, 70, 0,
72, 71, 71, 71, 0, 72, 0, 124,
73, 73, 73, 0, 77, 74, 74, 74,
0, 77, 75, 75, 75, 0, 77, 76,
76, 76, 0, 77, 0, 98, 78, 78,
78, 0, 82, 79, 79, 79, 0, 82,
80, 80, 80, 0, 82, 81, 81, 81,
0, 82, 0, 83, 91, 94, 98, 97,
123, 123, 0, 27, 87, 84, 84, 84,
0, 87, 85, 85, 85, 0, 87, 86,
86, 86, 0, 87, 0, 88, 88, 88,
0, 197, 89, 89, 89, 0, 197, 90,
90, 90, 0, 197, 25, 25, 25, 0,
27, 87, 92, 84, 84, 0, 27, 87,
93, 85, 85, 0, 27, 87, 86, 86,
86, 0, 27, 95, 87, 92, 96, 84,
84, 0, 27, 87, 93, 85, 85, 85,
0, 27, 87, 85, 85, 85, 0, 27,
87, 96, 84, 84, 0, 197, 99, 99,
99, 0, 103, 197, 100, 100, 100, 0,
103, 197, 101, 101, 101, 0, 103, 197,
102, 102, 102, 0, 103, 197, 0, 104,
104, 104, 0, 108, 197, 105, 105, 105,
0, 108, 197, 106, 106, 106, 0, 108,
197, 107, 107, 107, 0, 108, 197, 0,
109, 109, 109, 0, 113, 197, 110, 110,
110, 0, 113, 197, 111, 111, 111, 0,
113, 197, 112, 112, 112, 0, 113, 197,
0, 114, 114, 114, 0, 118, 197, 115,
115, 115, 0, 118, 197, 116, 116, 116,
0, 118, 197, 117, 117, 117, 0, 118,
197, 0, 119, 119, 119, 0, 87, 197,
120, 120, 120, 0, 87, 197, 121, 121,
121, 0, 87, 197, 122, 122, 122, 0,
87, 197, 0, 87, 84, 84, 84, 0,
125, 177, 180, 197, 183, 184, 184, 0,
27, 129, 197, 126, 126, 126, 0, 129,
197, 127, 127, 127, 0, 129, 197, 128,
128, 128, 0, 129, 197, 0, 130, 169,
172, 175, 176, 176, 0, 27, 134, 197,
131, 131, 131, 0, 134, 197, 132, 132,
132, 0, 134, 197, 133, 133, 133, 0,
134, 197, 0, 135, 161, 164, 167, 168,
168, 0, 27, 139, 197, 136, 136, 136,
0, 139, 197, 137, 137, 137, 0, 139,
197, 138, 138, 138, 0, 139, 197, 0,
140, 153, 156, 159, 160, 160, 0, 27,
144, 197, 141, 141, 141, 0, 144, 197,
142, 142, 142, 0, 144, 197, 143, 143,
143, 0, 144, 197, 0, 145, 146, 149,
152, 119, 119, 0, 27, 87, 197, 120,
120, 120, 0, 27, 87, 197, 147, 120,
120, 0, 27, 87, 197, 148, 121, 121,
0, 27, 87, 197, 122, 122, 122, 0,
27, 150, 87, 197, 147, 151, 120, 120,
0, 27, 87, 197, 148, 121, 121, 121,
0, 27, 87, 197, 121, 121, 121, 0,
27, 87, 197, 151, 120, 120, 0, 27,
144, 197, 154, 141, 141, 0, 27, 144,
197, 155, 142, 142, 0, 27, 144, 197,
143, 143, 143, 0, 27, 157, 144, 197,
154, 158, 141, 141, 0, 27, 144, 197,
155, 142, 142, 142, 0, 27, 144, 197,
142, 142, 142, 0, 27, 144, 197, 158,
141, 141, 0, 144, 197, 141, 141, 141,
0, 27, 139, 197, 162, 136, 136, 0,
27, 139, 197, 163, 137, 137, 0, 27,
139, 197, 138, 138, 138, 0, 27, 165,
139, 197, 162, 166, 136, 136, 0, 27,
139, 197, 163, 137, 137, 137, 0, 27,
139, 197, 137, 137, 137, 0, 27, 139,
197, 166, 136, 136, 0, 139, 197, 136,
136, 136, 0, 27, 134, 197, 170, 131,
131, 0, 27, 134, 197, 171, 132, 132,
0, 27, 134, 197, 133, 133, 133, 0,
27, 173, 134, 197, 170, 174, 131, 131,
0, 27, 134, 197, 171, 132, 132, 132,
0, 27, 134, 197, 132, 132, 132, 0,
27, 134, 197, 174, 131, 131, 0, 134,
197, 131, 131, 131, 0, 27, 129, 197,
178, 126, 126, 0, 27, 129, 197, 179,
127, 127, 0, 27, 129, 197, 128, 128,
128, 0, 27, 181, 129, 197, 178, 182,
126, 126, 0, 27, 129, 197, 179, 127,
127, 127, 0, 27, 129, 197, 127, 127,
127, 0, 27, 129, 197, 182, 126, 126,
0, 129, 197, 126, 126, 126, 0, 124,
197, 0, 188, 190, 191, 193, 194, 195,
187, 189, 192, 186, 0, 186, 0, 187,
0, 187, 0, 187, 0, 189, 0, 189,
0, 189, 0, 11, 0, 186, 0, 13,
15, 16, 18, 19, 20, 12, 14, 17,
196, 196, 196, 0, 0, 0, 1, 2,
3, 4, 5, 6, 7, 8, 9, 10,
11, 12, 13, 14, 15, 16, 17, 18,
19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29, 30, 31, 32, 33, 34,
35, 36, 37, 38, 39, 40, 41, 42,
43, 44, 45, 46, 47, 48, 49, 50,
51, 52, 53, 54, 55, 56, 57, 58,
59, 60, 61, 62, 63, 64, 65, 66,
67, 68, 69, 70, 71, 72, 73, 74,
75, 76, 77, 78, 79, 80, 81, 82,
83, 84, 85, 86, 87, 88, 89, 90,
91, 92, 93, 94, 95, 96, 97, 98,
99, 100, 101, 102, 103, 104, 105, 106,
107, 108, 109, 110, 111, 112, 113, 114,
115, 116, 117, 118, 119, 120, 121, 122,
123, 124, 125, 126, 127, 128, 129, 130,
131, 132, 133, 134, 135, 136, 137, 138,
139, 140, 141, 142, 143, 144, 145, 146,
147, 148, 149, 150, 151, 152, 153, 154,
155, 156, 157, 158, 159, 160, 161, 162,
163, 164, 165, 166, 167, 168, 169, 170,
171, 172, 173, 174, 175, 176, 177, 178,
179, 180, 181, 182, 183, 184, 185, 186,
187, 188, 189, 190, 191, 192, 193, 194,
195, 196, 197, 0};
static const signed char _address_cond_actions[] = {
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
3, 0, 3, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 3, 0, 3, 0, 3,
0, 3, 0, 3, 0, 3, 0, 3,
0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0,
3, 0, 0, 0, 0, 0, 0, 0,
0, 1, 1, 1, 3, 1, 3, 0,
3, 0, 3, 0, 3, 0, 3, 0,
3, 0, 3, 0, 0, 0, 0, 0,
0, 0, 0, 1, 1, 1, 3, 0,
0, 0, 0, 0, 0, 0, 1, 1,
1, 3, 0, 0, 0, 0, 0, 0,
0, 0, 3, 0, 0, 0, 0, 3,
0, 0, 0, 0, 0, 3, 0, 0,
3, 1, 3, 0, 0, 0, 0, 0,
0, 3, 0, 0, 0, 0, 3, 0,
3, 0, 0, 0, 0, 3, 0, 3,
0, 0, 0, 0, 3, 1, 0, 3,
1, 0, 3, 0, 1, 0, 0, 3,
1, 0, 3, 0, 0, 3, 0, 0,
3, 0, 0, 0, 0, 3, 0, 0,
3, 0, 0, 3, 0, 0, 3, 0,
0, 0, 0, 3, 0, 0, 3, 0,
0, 0, 0, 0, 0, 3, 0, 0,
0, 0, 0, 0, 3, 0, 0, 0,
0, 0, 0, 0, 0, 3, 0, 0,
0, 0, 0, 0, 0, 3, 0, 0,
0, 0, 0, 0, 3, 0, 0, 0,
0, 0, 0, 3, 0, 0, 0, 0,
0, 0, 3, 0, 0, 0, 0, 0,
3, 0, 0, 0, 0, 0, 0, 0,
0, 0, 3, 0, 1, 0, 0, 0,
3, 0, 0, 0, 0, 3, 0, 0,
0, 0, 3, 0, 3, 0, 0, 0,
0, 3, 0, 0, 0, 0, 3, 0,
0, 0, 0, 3, 0, 0, 0, 0,
3, 0, 3, 0, 0, 0, 0, 3,
0, 0, 0, 0, 3, 0, 0, 0,
0, 3, 0, 0, 0, 0, 3, 0,
3, 0, 0, 0, 0, 3, 0, 0,
0, 0, 3, 0, 0, 0, 0, 3,
0, 0, 0, 0, 3, 0, 3, 0,
0, 0, 0, 3, 0, 0, 0, 0,
3, 0, 0, 0, 0, 3, 0, 0,
0, 0, 3, 0, 3, 0, 0, 0,
0, 3, 0, 0, 0, 0, 3, 0,
0, 0, 0, 3, 0, 0, 0, 0,
3, 0, 3, 0, 0, 0, 0, 0,
0, 0, 3, 0, 0, 0, 0, 0,
3, 0, 0, 0, 0, 3, 0, 0,
0, 0, 3, 0, 3, 0, 0, 0,
3, 1, 0, 0, 0, 3, 1, 0,
0, 0, 3, 1, 0, 0, 0, 3,
0, 0, 0, 0, 0, 3, 0, 0,
0, 0, 0, 3, 0, 0, 0, 0,
0, 3, 0, 0, 0, 0, 0, 0,
0, 3, 0, 0, 0, 0, 0, 0,
3, 0, 0, 0, 0, 0, 3, 0,
0, 0, 0, 0, 3, 1, 0, 0,
0, 3, 0, 1, 0, 0, 0, 3,
0, 1, 0, 0, 0, 3, 0, 1,
0, 0, 0, 3, 0, 1, 3, 0,
0, 0, 3, 0, 1, 0, 0, 0,
3, 0, 1, 0, 0, 0, 3, 0,
1, 0, 0, 0, 3, 0, 1, 3,
0, 0, 0, 3, 0, 1, 0, 0,
0, 3, 0, 1, 0, 0, 0, 3,
0, 1, 0, 0, 0, 3, 0, 1,
3, 0, 0, 0, 3, 0, 1, 0,
0, 0, 3, 0, 1, 0, 0, 0,
3, 0, 1, 0, 0, 0, 3, 0,
1, 3, 0, 0, 0, 3, 0, 1,
0, 0, 0, 3, 0, 1, 0, 0,
0, 3, 0, 1, 0, 0, 0, 3,
0, 1, 3, 0, 0, 0, 0, 3,
0, 0, 0, 1, 0, 0, 0, 3,
0, 0, 1, 0, 0, 0, 3, 0,
1, 0, 0, 0, 3, 0, 1, 0,
0, 0, 3, 0, 1, 3, 0, 0,
0, 0, 0, 0, 3, 0, 0, 1,
0, 0, 0, 3, 0, 1, 0, 0,
0, 3, 0, 1, 0, 0, 0, 3,
0, 1, 3, 0, 0, 0, 0, 0,
0, 3, 0, 0, 1, 0, 0, 0,
3, 0, 1, 0, 0, 0, 3, 0,
1, 0, 0, 0, 3, 0, 1, 3,
0, 0, 0, 0, 0, 0, 3, 0,
0, 1, 0, 0, 0, 3, 0, 1,
0, 0, 0, 3, 0, 1, 0, 0,
0, 3, 0, 1, 3, 0, 0, 0,
0, 0, 0, 3, 0, 0, 1, 0,
0, 0, 3, 0, 0, 1, 0, 0,
0, 3, 0, 0, 1, 0, 0, 0,
3, 0, 0, 1, 0, 0, 0, 3,
0, 0, 0, 1, 0, 0, 0, 0,
3, 0, 0, 1, 0, 0, 0, 0,
3, 0, 0, 1, 0, 0, 0, 3,
0, 0, 1, 0, 0, 0, 3, 0,
0, 1, 0, 0, 0, 3, 0, 0,
1, 0, 0, 0, 3, 0, 0, 1,
0, 0, 0, 3, 0, 0, 0, 1,
0, 0, 0, 0, 3, 0, 0, 1,
0, 0, 0, 0, 3, 0, 0, 1,
0, 0, 0, 3, 0, 0, 1, 0,
0, 0, 3, 0, 1, 0, 0, 0,
3, 0, 0, 1, 0, 0, 0, 3,
0, 0, 1, 0, 0, 0, 3, 0,
0, 1, 0, 0, 0, 3, 0, 0,
0, 1, 0, 0, 0, 0, 3, 0,
0, 1, 0, 0, 0, 0, 3, 0,
0, 1, 0, 0, 0, 3, 0, 0,
1, 0, 0, 0, 3, 0, 1, 0,
0, 0, 3, 0, 0, 1, 0, 0,
0, 3, 0, 0, 1, 0, 0, 0,
3, 0, 0, 1, 0, 0, 0, 3,
0, 0, 0, 1, 0, 0, 0, 0,
3, 0, 0, 1, 0, 0, 0, 0,
3, 0, 0, 1, 0, 0, 0, 3,
0, 0, 1, 0, 0, 0, 3, 0,
1, 0, 0, 0, 3, 0, 0, 1,
0, 0, 0, 3, 0, 0, 1, 0,
0, 0, 3, 0, 0, 1, 0, 0,
0, 3, 0, 0, 0, 1, 0, 0,
0, 0, 3, 0, 0, 1, 0, 0,
0, 0, 3, 0, 0, 1, 0, 0,
0, 3, 0, 0, 1, 0, 0, 0,
3, 0, 1, 0, 0, 0, 3, 0,
1, 3, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 3, 0, 3, 0,
3, 0, 3, 0, 3, 0, 3, 0,
3, 0, 3, 0, 3, 0, 3, 0,
0, 0, 0, 0, 0, 0, 0, 0,
1, 1, 1, 3, 3, 0, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 3, 3, 3, 3, 3, 3, 3,
3, 0, 0, 0};
static const short _address_eof_trans[] = {
1086, 1087, 1088, 1089, 1090, 1091, 1092, 1093,
1094, 1095, 1096, 1097, 1098, 1099, 1100, 1101,
1102, 1103, 1104, 1105, 1106, 1107, 1108, 1109,
1110, 1111, 1112, 1113, 1114, 1115, 1116, 1117,
1118, 1119, 1120, 1121, 1122, 1123, 1124, 1125,
1126, 1127, 1128, 1129, 1130, 1131, 1132, 1133,
1134, 1135, 1136, 1137, 1138, 1139, 1140, 1141,
1142, 1143, 1144, 1145, 1146, 1147, 1148, 1149,
1150, 1151, 1152, 1153, 1154, 1155, 1156, 1157,
1158, 1159, 1160, 1161, 1162, 1163, 1164, 1165,
1166, 1167, 1168, 1169, 1170, 1171, 1172, 1173,
1174, 1175, 1176, 1177, 1178, 1179, 1180, 1181,
1182, 1183, 1184, 1185, 1186, 1187, 1188, 1189,
1190, 1191, 1192, 1193, 1194, 1195, 1196, 1197,
1198, 1199, 1200, 1201, 1202, 1203, 1204, 1205,
1206, 1207, 1208, 1209, 1210, 1211, 1212, 1213,
1214, 1215, 1216, 1217, 1218, 1219, 1220, 1221,
1222, 1223, 1224, 1225, 1226, 1227, 1228, 1229,
1230, 1231, 1232, 1233, 1234, 1235, 1236, 1237,
1238, 1239, 1240, 1241, 1242, 1243, 1244, 1245,
1246, 1247, 1248, 1249, 1250, 1251, 1252, 1253,
1254, 1255, 1256, 1257, 1258, 1259, 1260, 1261,
1262, 1263, 1264, 1265, 1266, 1267, 1268, 1269,
1270, 1271, 1272, 1273, 1274, 1275, 1276, 1277,
1278, 1279, 1280, 1281, 1282, 1283, 0};
static const int address_start = 1;
bool is_address(const char *p, const char *pe)
{
int cs = 0;
const char *eof = pe;
bool result = false;
{
cs = (int) address_start;
}
{
int _klen;
unsigned int _trans = 0;
const signed char *_keys;
const signed char *_acts;
unsigned int _nacts;
_resume : {
}
if (p == pe && p != eof)
goto _out;
if (p == eof) {
if (_address_eof_trans[cs] > 0) {
_trans = (unsigned int) _address_eof_trans[cs] - 1;
}
} else {
_keys = (_address_trans_keys + (_address_key_offsets[cs]));
_trans = (unsigned int) _address_index_offsets[cs];
_klen = (int) _address_single_lengths[cs];
if (_klen > 0) {
const signed char *_lower = _keys;
const signed char *_upper = _keys + _klen - 1;
const signed char *_mid;
while (1) {
if (_upper < _lower) {
_keys += _klen;
_trans += (unsigned int) _klen;
break;
}
_mid = _lower + ((_upper - _lower) >> 1);
if (((*(p))) < (*(_mid)))
_upper = _mid - 1;
else if (((*(p))) > (*(_mid)))
_lower = _mid + 1;
else {
_trans += (unsigned int) (_mid - _keys);
goto _match;
}
}
}
_klen = (int) _address_range_lengths[cs];
if (_klen > 0) {
const signed char *_lower = _keys;
const signed char *_upper = _keys + (_klen << 1) - 2;
const signed char *_mid;
while (1) {
if (_upper < _lower) {
_trans += (unsigned int) _klen;
break;
}
_mid = _lower + (((_upper - _lower) >> 1) & ~1);
if (((*(p))) < (*(_mid)))
_upper = _mid - 2;
else if (((*(p))) > (*(_mid + 1)))
_lower = _mid + 2;
else {
_trans += (unsigned int) ((_mid - _keys) >> 1);
break;
}
}
}
_match : {
}
}
cs = (int) _address_cond_targs[_trans];
if (_address_cond_actions[_trans] != 0) {
_acts = (_address_actions + (_address_cond_actions[_trans]));
_nacts = (unsigned int) (*(_acts));
_acts += 1;
while (_nacts > 0) {
switch ((*(_acts))) {
case 0: {
{
result = true;
}
break;
}
case 1: {
{
result = false;
}
break;
}
}
_nacts -= 1;
_acts += 1;
}
}
if (p == eof) {
if (cs >= 196)
goto _out;
} else {
if (cs != 0) {
p += 1;
goto _resume;
}
}
_out : {
}
}
return result;
}

View File

@ -1,34 +0,0 @@
#ifndef SMTP_ADDRESS_PARSER_HPP_INCLUDED
#define SMTP_ADDRESS_PARSER_HPP_INCLUDED
/*
Snarfed from <https://github.com/gene-hightower/smtp-address-validator>
<http://opensource.org/licenses/MIT>:
Copyright (c) 2021 Gene Hightower
Permission is hereby granted, free of charge, to any person obtaining a copy of
this software and associated documentation files (the "Software"), to deal in
the Software without restriction, including without limitation the rights to
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies
of the Software, and to permit persons to whom the Software is furnished to do
so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
*/
bool is_address(const char *p, const char *pe);
#endif // SMTP_ADDRESS_PARSER_HPP_INCLUDED

View File

@ -1,7 +1,5 @@
#include <nlohmann/json-schema.hpp>
#include "smtp-address-validator.hpp"
#include <algorithm>
#include <exception>
#include <iostream>
@ -11,16 +9,6 @@
#include <utility>
#include <vector>
#ifdef JSON_SCHEMA_BOOST_REGEX
# include <boost/regex.hpp>
# define REGEX_NAMESPACE boost
#elif defined(JSON_SCHEMA_NO_REGEX)
# define NO_STD_REGEX
#else
# include <regex>
# define REGEX_NAMESPACE std
#endif
/**
* Many of the RegExes are from @see http://jmrware.com/articles/2009/uri_regexp/URI_regex.html
*/
@ -40,10 +28,10 @@ void range_check(const T value, const T min, const T max)
/** @see date_time_check */
void rfc3339_date_check(const std::string &value)
{
const static REGEX_NAMESPACE::regex dateRegex{R"(^([0-9]{4})\-([0-9]{2})\-([0-9]{2})$)"};
const static std::regex dateRegex{R"(^([0-9]{4})\-([0-9]{2})\-([0-9]{2})$)"};
REGEX_NAMESPACE::smatch matches;
if (!REGEX_NAMESPACE::regex_match(value, matches, dateRegex)) {
std::smatch matches;
if (!std::regex_match(value, matches, dateRegex)) {
throw std::invalid_argument(value + " is not a date string according to RFC 3339.");
}
@ -66,51 +54,35 @@ void rfc3339_date_check(const std::string &value)
/** @see date_time_check */
void rfc3339_time_check(const std::string &value)
{
const static REGEX_NAMESPACE::regex timeRegex{R"(^([0-9]{2})\:([0-9]{2})\:([0-9]{2})(\.[0-9]+)?(?:[Zz]|((?:\+|\-)[0-9]{2})\:([0-9]{2}))$)"};
const static std::regex timeRegex{R"(^([0-9]{2})\:([0-9]{2})\:([0-9]{2})(\.[0-9]+)?(?:[Zz]|((?:\+|\-)[0-9]{2})\:([0-9]{2}))$)"};
REGEX_NAMESPACE::smatch matches;
if (!REGEX_NAMESPACE::regex_match(value, matches, timeRegex)) {
std::smatch matches;
if (!std::regex_match(value, matches, timeRegex)) {
throw std::invalid_argument(value + " is not a time string according to RFC 3339.");
}
auto hour = std::stoi(matches[1].str());
auto minute = std::stoi(matches[2].str());
auto second = std::stoi(matches[3].str());
const auto hour = std::stoi(matches[1].str());
const auto minute = std::stoi(matches[2].str());
const auto second = std::stoi(matches[3].str());
// const auto secfrac = std::stof( matches[4].str() );
range_check(hour, 0, 23);
range_check(minute, 0, 59);
int offsetHour = 0,
offsetMinute = 0;
/**
* @todo Could be made more exact by querying a leap second database and choosing the
* correct maximum in {58,59,60}. This current solution might match some invalid dates
* but it won't lead to false negatives. This only works if we know the full date, however
*/
range_check(second, 0, 60);
/* don't check the numerical offset if time zone is specified as 'Z' */
if (!matches[5].str().empty()) {
offsetHour = std::stoi(matches[5].str());
offsetMinute = std::stoi(matches[6].str());
const auto offsetHour = std::stoi(matches[5].str());
const auto offsetMinute = std::stoi(matches[6].str());
range_check(offsetHour, -23, 23);
range_check(offsetMinute, 0, 59);
if (offsetHour < 0)
offsetMinute *= -1;
}
/**
* @todo Could be made more exact by querying a leap second database and choosing the
* correct maximum in {58,59,60}. This current solution might match some invalid dates
* but it won't lead to false negatives. This only works if we know the full date, however
*/
auto day_minutes = hour * 60 + minute - (offsetHour * 60 + offsetMinute);
if (day_minutes < 0)
day_minutes += 60 * 24;
hour = day_minutes % 24;
minute = day_minutes / 24;
if (hour == 23 && minute == 59)
range_check(second, 0, 60); // possible leap-second
else
range_check(second, 0, 59);
}
/**
@ -138,13 +110,13 @@ void rfc3339_time_check(const std::string &value)
* @endverbatim
* NOTE: Per [ABNF] and ISO8601, the "T" and "Z" characters in this
* syntax may alternatively be lower case "t" or "z" respectively.
*/
*/
void rfc3339_date_time_check(const std::string &value)
{
const static REGEX_NAMESPACE::regex dateTimeRegex{R"(^([0-9]{4}\-[0-9]{2}\-[0-9]{2})[Tt]([0-9]{2}\:[0-9]{2}\:[0-9]{2}(?:\.[0-9]+)?(?:[Zz]|(?:\+|\-)[0-9]{2}\:[0-9]{2}))$)"};
const static std::regex dateTimeRegex{R"(^([0-9]{4}\-[0-9]{2}\-[0-9]{2})[Tt]([0-9]{2}\:[0-9]{2}\:[0-9]{2}(?:\.[0-9]+)?(?:[Zz]|(?:\+|\-)[0-9]{2}\:[0-9]{2}))$)"};
REGEX_NAMESPACE::smatch matches;
if (!REGEX_NAMESPACE::regex_match(value, matches, dateTimeRegex)) {
std::smatch matches;
if (!std::regex_match(value, matches, dateTimeRegex)) {
throw std::invalid_argument(value + " is not a date-time string according to RFC 3339.");
}
@ -152,7 +124,7 @@ void rfc3339_date_time_check(const std::string &value)
rfc3339_time_check(matches[2].str());
}
const std::string decOctet{R"((?:25[0-5]|2[0-4][0-9]|1[0-9][0-9]|[1-9]?[0-9]))"}; // matches numbers 0-255
const std::string decOctet{R"((?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?))"}; // matches numbers 0-255
const std::string ipv4Address{"(?:" + decOctet + R"(\.){3})" + decOctet};
const std::string h16{R"([0-9A-Fa-f]{1,4})"};
const std::string h16Left{"(?:" + h16 + ":)"};
@ -187,156 +159,94 @@ const std::string host{
"|" + regName +
")"};
const std::string uuid{R"([0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{12})"};
// from http://stackoverflow.com/questions/106179/regular-expression-to-match-dns-hostname-or-ip-address
const std::string hostname{R"(^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$)"};
bool is_ascii(std::string const &value)
{
for (auto ch : value) {
if (ch & 0x80) {
return false;
}
}
return true;
}
/**
* @see
* @see https://tools.ietf.org/html/rfc5322#section-4.1
*
* @verbatim
* URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
*
* hier-part = "//" authority path-abempty
* / path-absolute
* / path-rootless
* / path-empty
*
* URI-reference = URI / relative-ref
*
* absolute-URI = scheme ":" hier-part [ "?" query ]
*
* relative-ref = relative-part [ "?" query ] [ "#" fragment ]
*
* relative-part = "//" authority path-abempty
* / path-absolute
* / path-noscheme
* / path-empty
*
* scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
*
* authority = [ userinfo "@" ] host [ ":" port ]
* userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
* host = IP-literal / IPv4address / reg-name
* port = *DIGIT
*
* IP-literal = "[" ( IPv6address / IPvFuture ) "]"
*
* IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" )
*
* IPv6address = 6( h16 ":" ) ls32
* / "::" 5( h16 ":" ) ls32
* / [ h16 ] "::" 4( h16 ":" ) ls32
* / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
* / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
* / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
* / [ *4( h16 ":" ) h16 ] "::" ls32
* / [ *5( h16 ":" ) h16 ] "::" h16
* / [ *6( h16 ":" ) h16 ] "::"
*
* h16 = 1*4HEXDIG
* ls32 = ( h16 ":" h16 ) / IPv4address
* IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
* dec-octet = DIGIT ; 0-9
* / %x31-39 DIGIT ; 10-99
* / "1" 2DIGIT ; 100-199
* / "2" %x30-34 DIGIT ; 200-249
* / "25" %x30-35 ; 250-255
*
* reg-name = *( unreserved / pct-encoded / sub-delims )
*
* path = path-abempty ; begins with "/" or is empty
* / path-absolute ; begins with "/" but not "//"
* / path-noscheme ; begins with a non-colon segment
* / path-rootless ; begins with a segment
* / path-empty ; zero characters
*
* path-abempty = *( "/" segment )
* path-absolute = "/" [ segment-nz *( "/" segment ) ]
* path-noscheme = segment-nz-nc *( "/" segment )
* path-rootless = segment-nz *( "/" segment )
* path-empty = 0<pchar>
*
* segment = *pchar
* segment-nz = 1*pchar
* segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" )
* ; non-zero-length segment without any colon ":"
*
* pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
*
* query = *( pchar / "/" / "?" )
*
* fragment = *( pchar / "/" / "?" )
*
* pct-encoded = "%" HEXDIG HEXDIG
*
* unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
* reserved = gen-delims / sub-delims
* gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
* sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
* / "*" / "+" / "," / ";" / "="
*
* atom = [CFWS] 1*atext [CFWS]
* word = atom / quoted-string
* phrase = 1*word / obs-phrase
* obs-FWS = 1*WSP *(CRLF 1*WSP)
* FWS = ([*WSP CRLF] 1*WSP) / obs-FWS
* ; Folding white space
* ctext = %d33-39 / ; Printable US-ASCII
* %d42-91 / ; characters not including
* %d93-126 / ; "(", ")", or "\"
* obs-ctext
* ccontent = ctext / quoted-pair / comment
* comment = "(" *([FWS] ccontent) [FWS] ")"
* CFWS = (1*([FWS] comment) [FWS]) / FWS
* obs-local-part = word *("." word)
* obs-domain = atom *("." atom)
* obs-dtext = obs-NO-WS-CTL / quoted-pair
* quoted-pair = ("\" (VCHAR / WSP)) / obs-qp
* obs-NO-WS-CTL = %d1-8 / ; US-ASCII control
* %d11 / ; characters that do not
* %d12 / ; include the carriage
* %d14-31 / ; return, line feed, and
* %d127 ; white space characters
* obs-ctext = obs-NO-WS-CTL
* obs-qtext = obs-NO-WS-CTL
* obs-utext = %d0 / obs-NO-WS-CTL / VCHAR
* obs-qp = "\" (%d0 / obs-NO-WS-CTL / LF / CR)
* obs-body = *((*LF *CR *((%d0 / text) *LF *CR)) / CRLF)
* obs-unstruct = *((*LF *CR *(obs-utext *LF *CR)) / FWS)
* obs-phrase = word *(word / "." / CFWS)
* obs-phrase-list = [phrase / CFWS] *("," [phrase / CFWS])
* qtext = %d33 / ; Printable US-ASCII
* %d35-91 / ; characters not including
* %d93-126 / ; "\" or the quote character
* obs-qtext
* qcontent = qtext / quoted-pair
* quoted-string = [CFWS]
* DQUOTE *([FWS] qcontent) [FWS] DQUOTE
* [CFWS]
* atext = ALPHA / DIGIT / ; Printable US-ASCII
* "!" / "#" / ; characters not including
* "$" / "%" / ; specials. Used for atoms.
* "&" / "'" /
* "*" / "+" /
* "-" / "/" /
* "=" / "?" /
* "^" / "_" /
* "`" / "{" /
* "|" / "}" /
* "~"
* dot-atom-text = 1*atext *("." 1*atext)
* dot-atom = [CFWS] dot-atom-text [CFWS]
* addr-spec = local-part "@" domain
* local-part = dot-atom / quoted-string / obs-local-part
* domain = dot-atom / domain-literal / obs-domain
* domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]
* dtext = %d33-90 / ; Printable US-ASCII
* %d94-126 / ; characters not including
* obs-dtext ; "[", "]", or "\"
* @endverbatim
* @see adapted from: https://github.com/jhermsmeier/uri.regex/blob/master/uri.regex
* @todo Currently don't have a working tool for this larger ABNF to generate a regex.
* Other options:
* - https://github.com/ldthomas/apg-6.3
* - https://github.com/akr/abnf
*
* The problematic thing are the allowed whitespaces (even newlines) in the email.
* Ignoring those and starting with
* @see https://stackoverflow.com/questions/13992403/regex-validation-of-email-addresses-according-to-rfc5321-rfc5322
* and trying to divide up the complicated regex into understandable ABNF definitions from rfc5322 yields:
*/
void rfc3986_uri_check(const std::string &value)
{
const static std::string scheme{R"(([A-Za-z][A-Za-z0-9+\-.]*):)"};
const static std::string hierPart{
R"((?:(\/\/)(?:((?:[A-Za-z0-9\-._~!$&'()*+,;=:]|)"
R"(%[0-9A-Fa-f]{2})*)@)?((?:\[(?:(?:(?:(?:[0-9A-Fa-f]{1,4}:){6}|)"
R"(::(?:[0-9A-Fa-f]{1,4}:){5}|)"
R"((?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,1}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})?::)(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|)"
R"((?:(?:25[0-5]|2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)\.){3}(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?))|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})?::)|)"
R"([Vv][0-9A-Fa-f]+\.[A-Za-z0-9\-._~!$&'()*+,;=:]+)\]|)"
R"((?:(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)\.){3}(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)|)"
R"((?:[A-Za-z0-9\-._~!$&'()*+,;=]|)"
R"(%[0-9A-Fa-f]{2})*))(?::([0-9]*))?((?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)|)"
R"(\/((?:(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})+(?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)?)|)"
R"(((?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})+(?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)|))"};
const static std::string query{R"((?:\?((?:[A-Za-z0-9\-._~!$&'()*+,;=:@\/?]|%[0-9A-Fa-f]{2})*))?)"};
const static std::string fragment{
R"((?:\#((?:[A-Za-z0-9\-._~!$&'()*+,;=:@\/?]|%[0-9A-Fa-f]{2})*))?)"};
const static std::string uriFormat{scheme + hierPart + query + fragment};
const static REGEX_NAMESPACE::regex uriRegex{uriFormat};
if (!REGEX_NAMESPACE::regex_match(value, uriRegex)) {
throw std::invalid_argument(value + " is not a URI string according to RFC 3986.");
}
}
const std::string obsnowsctl{R"([\x01-\x08\x0b\x0c\x0e-\x1f\x7f])"};
const std::string obsqp{R"(\\[\x01-\x09\x0b\x0c\x0e-\x7f])"};
const std::string qtext{R"((?:[\x21\x23-\x5b\x5d-\x7e]|)" + obsnowsctl + ")"};
const std::string dtext{R"([\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f])"};
const std::string quotedString{R"("(?:)" + qtext + "|" + obsqp + R"()*")"};
const std::string atext{R"([A-Za-z0-9!#$%&'*+/=?^_`{|}~-])"};
const std::string domainLiteral{R"(\[(?:(?:)" + decOctet + R"()\.){3}(?:)" + decOctet + R"(|[A-Za-z0-9-]*[A-Za-z0-9]:(?:)" + dtext + "|" + obsqp + R"()+)\])"};
const std::string dotAtom{"(?:" + atext + R"(+(?:\.)" + atext + "+)*)"};
const std::string stackoverflowMagicPart{R"((?:[[:alnum:]](?:[[:alnum:]-]*[[:alnum:]])?\.)+)"
R"([[:alnum:]](?:[[:alnum:]-]*[[:alnum:]])?)"};
const std::string email{"(?:" + dotAtom + "|" + quotedString + ")@(?:" + stackoverflowMagicPart + "|" + domainLiteral + ")"};
} // namespace
namespace nlohmann
@ -358,42 +268,29 @@ void default_string_format_check(const std::string &format, const std::string &v
rfc3339_date_check(value);
} else if (format == "time") {
rfc3339_time_check(value);
} else if (format == "uri") {
rfc3986_uri_check(value);
} else if (format == "email") {
if (!is_ascii(value)) {
throw std::invalid_argument(value + " contains non-ASCII values, not RFC 5321 compliant.");
}
if (!is_address(&*value.begin(), &*value.end())) {
throw std::invalid_argument(value + " is not a valid email according to RFC 5321.");
}
} else if (format == "idn-email") {
if (!is_address(&*value.begin(), &*value.end())) {
throw std::invalid_argument(value + " is not a valid idn-email according to RFC 6531.");
static const std::regex emailRegex{email};
if (!std::regex_match(value, emailRegex)) {
throw std::invalid_argument(value + " is not a valid email according to RFC 5322.");
}
} else if (format == "hostname") {
static const REGEX_NAMESPACE::regex hostRegex{hostname};
if (!REGEX_NAMESPACE::regex_match(value, hostRegex)) {
static const std::regex hostRegex{hostname};
if (!std::regex_match(value, hostRegex)) {
throw std::invalid_argument(value + " is not a valid hostname according to RFC 3986 Appendix A.");
}
} else if (format == "ipv4") {
const static REGEX_NAMESPACE::regex ipv4Regex{"^" + ipv4Address + "$"};
if (!REGEX_NAMESPACE::regex_match(value, ipv4Regex)) {
const static std::regex ipv4Regex{"^" + ipv4Address + "$"};
if (!std::regex_match(value, ipv4Regex)) {
throw std::invalid_argument(value + " is not an IPv4 string according to RFC 2673.");
}
} else if (format == "ipv6") {
static const REGEX_NAMESPACE::regex ipv6Regex{ipv6Address};
if (!REGEX_NAMESPACE::regex_match(value, ipv6Regex)) {
static const std::regex ipv6Regex{ipv6Address};
if (!std::regex_match(value, ipv6Regex)) {
throw std::invalid_argument(value + " is not an IPv6 string according to RFC 5954.");
}
} else if (format == "uuid") {
static const REGEX_NAMESPACE::regex uuidRegex{uuid};
if (!REGEX_NAMESPACE::regex_match(value, uuidRegex)) {
throw std::invalid_argument(value + " is not an uuid string according to RFC 4122.");
}
} else if (format == "regex") {
try {
REGEX_NAMESPACE::regex re(value, std::regex::ECMAScript);
std::regex re(value, std::regex::ECMAScript);
} catch (std::exception &exception) {
throw exception;
}

View File

@ -43,10 +43,6 @@ add_executable(issue-98 issue-98.cpp)
target_link_libraries(issue-98 nlohmann_json_schema_validator)
add_test(NAME issue-98-erase-exception-unknown-keywords COMMAND issue-98)
add_executable(issue-293 issue-293.cpp)
target_link_libraries(issue-293 nlohmann_json_schema_validator)
add_test(NAME issue-293-float-point-error COMMAND issue-293)
# Unit test for string format checks
add_executable(string-format-check-test string-format-check-test.cpp)
target_include_directories(string-format-check-test PRIVATE ${PROJECT_SOURCE_DIR}/src/)
@ -59,37 +55,3 @@ add_executable(json-patch json-patch.cpp)
target_include_directories(json-patch PRIVATE ${PROJECT_SOURCE_DIR}/src)
target_link_libraries(json-patch nlohmann_json_schema_validator)
add_test(NAME json-patch COMMAND json-patch)
# Unit test for format checker fail at schema parsing time
add_executable(issue-117-format-error issue-117-format-error.cpp)
target_link_libraries(issue-117-format-error nlohmann_json_schema_validator)
add_test(NAME issue-117-format-error COMMAND issue-117-format-error)
add_executable(binary-validation binary-validation.cpp)
target_include_directories(binary-validation PRIVATE ${PROJECT_SOURCE_DIR}/src)
target_link_libraries(binary-validation PRIVATE nlohmann_json_schema_validator)
add_test(NAME binary-validation COMMAND binary-validation)
add_executable(issue-149-entry-selection issue-149-entry-selection.cpp)
target_link_libraries(issue-149-entry-selection PRIVATE nlohmann_json_schema_validator)
add_test(NAME issue-149-entry-selection COMMAND issue-149-entry-selection)
add_executable(issue-189-default-values issue-189-default-values.cpp)
target_link_libraries(issue-189-default-values nlohmann_json_schema_validator)
add_test(NAME issue-189-default-values COMMAND issue-189-default-values)
add_executable(issue-229-oneof-default-values issue-229-oneof-default-values.cpp)
target_link_libraries(issue-229-oneof-default-values nlohmann_json_schema_validator)
add_test(NAME issue-229-oneof-default-values COMMAND issue-229-oneof-default-values)
add_executable(issue-243-root-default-values issue-243-root-default-values.cpp)
target_link_libraries(issue-243-root-default-values nlohmann_json_schema_validator)
add_test(NAME issue-243-root-default-values COMMAND issue-243-root-default-values)
add_executable(issue-255-error-message-limit-precision issue-255-error-message-limit-precision.cpp)
target_link_libraries(issue-255-error-message-limit-precision nlohmann_json_schema_validator)
add_test(NAME issue-255-error-message-limit-precision COMMAND issue-255-error-message-limit-precision)
add_executable(issue-105-verbose-combination-errors issue-105-verbose-combination-errors.cpp)
target_link_libraries(issue-105-verbose-combination-errors nlohmann_json_schema_validator)
add_test(NAME issue-105-verbose-combination-errors COMMAND issue-105-verbose-combination-errors)

View File

@ -50,10 +50,12 @@ if(JSON_SCHEMA_TEST_SUITE_PATH)
# some optional tests will fail
set_tests_properties(
JSON-Suite::Optional::bignum
JSON-Suite::Optional::content
JSON-Suite::Optional::zeroTerminatedFloats
JSON-Suite::Optional::non-bmp-regex
JSON-Suite::Optional::float-overflow
JSON-Suite::Optional::ecmascript-regex
JSON-Suite::Optional::Format::ecmascript-regex
JSON-Suite::Optional::Format::idn-email
JSON-Suite::Optional::Format::idn-hostname
JSON-Suite::Optional::Format::iri-reference
JSON-Suite::Optional::Format::iri
@ -61,7 +63,7 @@ if(JSON_SCHEMA_TEST_SUITE_PATH)
JSON-Suite::Optional::Format::relative-json-pointer
JSON-Suite::Optional::Format::uri-reference
JSON-Suite::Optional::Format::uri-template
JSON-Suite::Optional::unicode
JSON-Suite::Optional::Format::uri
PROPERTIES
WILL_FAIL ON)

View File

@ -39,50 +39,6 @@ static void loader(const json_uri &uri, json &schema)
}
}
// from here
// https://stackoverflow.com/a/34571089/880584
static std::string base64_decode(const std::string &in)
{
std::string out;
std::vector<int> T(256, -1);
for (int i = 0; i < 64; i++)
T["ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/"[i]] = i;
unsigned val = 0;
int valb = -8;
for (uint8_t c : in) {
if (c == '=')
break;
if (T[c] == -1) {
throw std::invalid_argument("base64-decode: unexpected character in encode string: '" + std::string(1, c) + "'");
}
val = (val << 6) + T[c];
valb += 6;
if (valb >= 0) {
out.push_back(char((val >> valb) & 0xFF));
valb -= 8;
}
}
return out;
}
static void content(const std::string &contentEncoding, const std::string &contentMediaType, const json &instance)
{
std::string content = instance;
if (contentEncoding == "base64")
content = base64_decode(instance);
else if (contentEncoding != "")
throw std::invalid_argument("unable to check for contentEncoding '" + contentEncoding + "'");
if (contentMediaType == "application/json")
auto dummy = json::parse(content); // throws if conversion fails
else if (contentMediaType != "")
throw std::invalid_argument("unable to check for contentMediaType '" + contentMediaType + "'");
}
int main(void)
{
json validation; // a validation case following the JSON-test-suite-schema
@ -106,8 +62,7 @@ int main(void)
const auto &schema = test_group["schema"];
json_validator validator(loader,
nlohmann::json_schema::default_string_format_check,
content);
nlohmann::json_schema::default_string_format_check);
validator.set_root_schema(schema);

View File

@ -1,3 +0,0 @@
{
"type": "integer"
}

View File

@ -1,3 +0,0 @@
{
"type": "integer"
}

View File

@ -1,11 +0,0 @@
{
"$id": "http://localhost:1234/ref-and-definitions.json",
"definitions": {
"inner": {
"properties": {
"bar": { "type": "string" }
}
}
},
"allOf": [ { "$ref": "#/definitions/inner" } ]
}

View File

@ -1,11 +0,0 @@
{
"$id": "http://localhost:1234/ref-and-defs.json",
"$defs": {
"inner": {
"properties": {
"bar": { "type": "string" }
}
}
},
"$ref": "#/$defs/inner"
}

View File

@ -19,7 +19,7 @@
]
},
{
"description": "when items is schema, additionalItems does nothing",
"description": "items is schema, no additionalItems",
"schema": {
"items": {},
"additionalItems": false
@ -33,24 +33,14 @@
]
},
{
"description": "array of items with no additionalItems permitted",
"description": "array of items with no additionalItems",
"schema": {
"items": [{}, {}, {}],
"additionalItems": false
},
"tests": [
{
"description": "empty array",
"data": [ ],
"valid": true
},
{
"description": "fewer number of items present (1)",
"data": [ 1 ],
"valid": true
},
{
"description": "fewer number of items present (2)",
"description": "fewer number of items present",
"data": [ 1, 2 ],
"valid": true
},
@ -93,57 +83,5 @@
"valid": true
}
]
},
{
"description": "additionalItems should not look in applicators, valid case",
"schema": {
"allOf": [
{ "items": [ { "type": "integer" } ] }
],
"additionalItems": { "type": "boolean" }
},
"tests": [
{
"description": "items defined in allOf are not examined",
"data": [ 1, null ],
"valid": true
}
]
},
{
"description": "additionalItems should not look in applicators, invalid case",
"schema": {
"allOf": [
{ "items": [ { "type": "integer" }, { "type": "string" } ] }
],
"items": [ {"type": "integer" } ],
"additionalItems": { "type": "boolean" }
},
"tests": [
{
"description": "items defined in allOf are not examined",
"data": [ 1, "hello" ],
"valid": false
}
]
},
{
"description": "items validation adjusts the starting index for additionalItems",
"schema": {
"items": [ { "type": "string" } ],
"additionalItems": { "type": "integer" }
},
"tests": [
{
"description": "valid items",
"data": [ "x", 2, 3 ],
"valid": true
},
{
"description": "wrong type of second item",
"data": [ "x", "y" ],
"valid": false
}
]
}
]

View File

@ -124,7 +124,7 @@
},
"tests": [
{
"description": "properties defined in allOf are not examined",
"description": "properties defined in allOf are not allowed",
"data": {"foo": 1, "bar": true},
"valid": false
}

View File

@ -240,55 +240,5 @@
"valid": false
}
]
},
{
"description": "allOf combined with anyOf, oneOf",
"schema": {
"allOf": [ { "multipleOf": 2 } ],
"anyOf": [ { "multipleOf": 3 } ],
"oneOf": [ { "multipleOf": 5 } ]
},
"tests": [
{
"description": "allOf: false, anyOf: false, oneOf: false",
"data": 1,
"valid": false
},
{
"description": "allOf: false, anyOf: false, oneOf: true",
"data": 5,
"valid": false
},
{
"description": "allOf: false, anyOf: true, oneOf: false",
"data": 3,
"valid": false
},
{
"description": "allOf: false, anyOf: true, oneOf: true",
"data": 15,
"valid": false
},
{
"description": "allOf: true, anyOf: false, oneOf: false",
"data": 2,
"valid": false
},
{
"description": "allOf: true, anyOf: false, oneOf: true",
"data": 10,
"valid": false
},
{
"description": "allOf: true, anyOf: true, oneOf: false",
"data": 6,
"valid": false
},
{
"description": "allOf: true, anyOf: true, oneOf: true",
"data": 30,
"valid": true
}
]
}
]

View File

@ -125,90 +125,6 @@
}
]
},
{
"description": "const with [false] does not match [0]",
"schema": {"const": [false]},
"tests": [
{
"description": "[false] is valid",
"data": [false],
"valid": true
},
{
"description": "[0] is invalid",
"data": [0],
"valid": false
},
{
"description": "[0.0] is invalid",
"data": [0.0],
"valid": false
}
]
},
{
"description": "const with [true] does not match [1]",
"schema": {"const": [true]},
"tests": [
{
"description": "[true] is valid",
"data": [true],
"valid": true
},
{
"description": "[1] is invalid",
"data": [1],
"valid": false
},
{
"description": "[1.0] is invalid",
"data": [1.0],
"valid": false
}
]
},
{
"description": "const with {\"a\": false} does not match {\"a\": 0}",
"schema": {"const": {"a": false}},
"tests": [
{
"description": "{\"a\": false} is valid",
"data": {"a": false},
"valid": true
},
{
"description": "{\"a\": 0} is invalid",
"data": {"a": 0},
"valid": false
},
{
"description": "{\"a\": 0.0} is invalid",
"data": {"a": 0.0},
"valid": false
}
]
},
{
"description": "const with {\"a\": true} does not match {\"a\": 1}",
"schema": {"const": {"a": true}},
"tests": [
{
"description": "{\"a\": true} is valid",
"data": {"a": true},
"valid": true
},
{
"description": "{\"a\": 1} is invalid",
"data": {"a": 1},
"valid": false
},
{
"description": "{\"a\": 1.0} is invalid",
"data": {"a": 1.0},
"valid": false
}
]
},
{
"description": "const with 0 does not match other zero-like types",
"schema": {"const": 0},
@ -322,21 +238,5 @@
"valid": false
}
]
},
{
"description": "nul characters in strings",
"schema": { "const": "hello\u0000there" },
"tests": [
{
"description": "match string with nul",
"data": "hello\u0000there",
"valid": true
},
{
"description": "do not match string lacking nul",
"data": "hellothere",
"valid": false
}
]
}
]

View File

@ -96,55 +96,5 @@
"valid": true
}
]
},
{
"description": "items + contains",
"schema": {
"items": { "multipleOf": 2 },
"contains": { "multipleOf": 3 }
},
"tests": [
{
"description": "matches items, does not match contains",
"data": [ 2, 4, 8 ],
"valid": false
},
{
"description": "does not match items, matches contains",
"data": [ 3, 6, 9 ],
"valid": false
},
{
"description": "matches both items and contains",
"data": [ 6, 12 ],
"valid": true
},
{
"description": "matches neither items nor contains",
"data": [ 1, 5 ],
"valid": false
}
]
},
{
"description": "contains with false if subschema",
"schema": {
"contains": {
"if": false,
"else": true
}
},
"tests": [
{
"description": "any non-empty array is valid",
"data": ["foo"],
"valid": true
},
{
"description": "empty array is invalid",
"data": [],
"valid": false
}
]
}
]

View File

@ -45,35 +45,5 @@
"valid": true
}
]
},
{
"description": "the default keyword does not do anything if the property is missing",
"schema": {
"type": "object",
"properties": {
"alpha": {
"type": "number",
"maximum": 3,
"default": 5
}
}
},
"tests": [
{
"description": "an explicit property value is checked against maximum (passing)",
"data": { "alpha": 1 },
"valid": true
},
{
"description": "an explicit property value is checked against maximum (failing)",
"data": { "alpha": 5 },
"valid": false
},
{
"description": "missing properties are not filled in with the default",
"data": {},
"valid": true
}
]
}
]

View File

@ -1,6 +1,6 @@
[
{
"description": "validate definition against metaschema",
"description": "valid definition",
"schema": {"$ref": "http://json-schema.org/draft-07/schema#"},
"tests": [
{
@ -11,7 +11,13 @@
}
},
"valid": true
},
}
]
},
{
"description": "invalid definition",
"schema": {"$ref": "http://json-schema.org/draft-07/schema#"},
"tests": [
{
"description": "invalid definition schema",
"data": {

View File

@ -33,16 +33,6 @@
"description": "objects are deep compared",
"data": {"foo": false},
"valid": false
},
{
"description": "valid object matches",
"data": {"foo": 12},
"valid": true
},
{
"description": "extra properties in object is invalid",
"data": {"foo": 12, "boo": 42},
"valid": false
}
]
},
@ -216,21 +206,5 @@
"valid": true
}
]
},
{
"description": "nul characters in strings",
"schema": { "enum": [ "hello\u0000there" ] },
"tests": [
{
"description": "match string with nul",
"data": "hello\u0000there",
"valid": true
},
{
"description": "do not match string lacking nul",
"data": "hellothere",
"valid": false
}
]
}
]

View File

@ -1,611 +1,611 @@
[
{
"description": "email format",
"schema": { "format": "email" },
"description": "validation of e-mail addresses",
"schema": {"format": "email"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "idn-email format",
"schema": { "format": "idn-email" },
"description": "validation of IDN e-mail addresses",
"schema": {"format": "idn-email"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "regex format",
"schema": { "format": "regex" },
"description": "validation of regexes",
"schema": {"format": "regex"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "ipv4 format",
"schema": { "format": "ipv4" },
"description": "validation of IP addresses",
"schema": {"format": "ipv4"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "ipv6 format",
"schema": { "format": "ipv6" },
"description": "validation of IPv6 addresses",
"schema": {"format": "ipv6"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "idn-hostname format",
"schema": { "format": "idn-hostname" },
"description": "validation of IDN hostnames",
"schema": {"format": "idn-hostname"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "hostname format",
"schema": { "format": "hostname" },
"description": "validation of hostnames",
"schema": {"format": "hostname"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "date format",
"schema": { "format": "date" },
"description": "validation of date strings",
"schema": {"format": "date"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "date-time format",
"schema": { "format": "date-time" },
"description": "validation of date-time strings",
"schema": {"format": "date-time"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "time format",
"schema": { "format": "time" },
"description": "validation of time strings",
"schema": {"format": "time"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "json-pointer format",
"schema": { "format": "json-pointer" },
"description": "validation of JSON pointers",
"schema": {"format": "json-pointer"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "relative-json-pointer format",
"schema": { "format": "relative-json-pointer" },
"description": "validation of relative JSON pointers",
"schema": {"format": "relative-json-pointer"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "iri format",
"schema": { "format": "iri" },
"description": "validation of IRIs",
"schema": {"format": "iri"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "iri-reference format",
"schema": { "format": "iri-reference" },
"description": "validation of IRI references",
"schema": {"format": "iri-reference"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "uri format",
"schema": { "format": "uri" },
"description": "validation of URIs",
"schema": {"format": "uri"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "uri-reference format",
"schema": { "format": "uri-reference" },
"description": "validation of URI references",
"schema": {"format": "uri-reference"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}
]
},
{
"description": "uri-template format",
"schema": { "format": "uri-template" },
"description": "validation of URI templates",
"schema": {"format": "uri-template"},
"tests": [
{
"description": "all string formats ignore integers",
"description": "ignores integers",
"data": 12,
"valid": true
},
{
"description": "all string formats ignore floats",
"description": "ignores floats",
"data": 13.7,
"valid": true
},
{
"description": "all string formats ignore objects",
"description": "ignores objects",
"data": {},
"valid": true
},
{
"description": "all string formats ignore arrays",
"description": "ignores arrays",
"data": [],
"valid": true
},
{
"description": "all string formats ignore booleans",
"description": "ignores booleans",
"data": false,
"valid": true
},
{
"description": "all string formats ignore nulls",
"description": "ignores null",
"data": null,
"valid": true
}

View File

@ -1,53 +0,0 @@
[
{
"description": "id inside an enum is not a real identifier",
"comment": "the implementation must not be confused by an id buried in the enum",
"schema": {
"definitions": {
"id_in_enum": {
"enum": [
{
"$id": "https://localhost:1234/id/my_identifier.json",
"type": "null"
}
]
},
"real_id_in_schema": {
"$id": "https://localhost:1234/id/my_identifier.json",
"type": "string"
},
"zzz_id_in_const": {
"const": {
"$id": "https://localhost:1234/id/my_identifier.json",
"type": "null"
}
}
},
"anyOf": [
{ "$ref": "#/definitions/id_in_enum" },
{ "$ref": "https://localhost:1234/id/my_identifier.json" }
]
},
"tests": [
{
"description": "exact match to enum, and type matches",
"data": {
"$id": "https://localhost:1234/id/my_identifier.json",
"type": "null"
},
"valid": true
},
{
"description": "match $ref to id",
"data": "a string to match #/definitions/id_in_enum",
"valid": true
},
{
"description": "no match on enum or $ref to id",
"data": 1,
"valid": false
}
]
}
]

View File

@ -184,75 +184,5 @@
"valid": true
}
]
},
{
"description": "if with boolean schema true",
"schema": {
"if": true,
"then": { "const": "then" },
"else": { "const": "else" }
},
"tests": [
{
"description": "boolean schema true in if always chooses the then path (valid)",
"data": "then",
"valid": true
},
{
"description": "boolean schema true in if always chooses the then path (invalid)",
"data": "else",
"valid": false
}
]
},
{
"description": "if with boolean schema false",
"schema": {
"if": false,
"then": { "const": "then" },
"else": { "const": "else" }
},
"tests": [
{
"description": "boolean schema false in if always chooses the else path (invalid)",
"data": "then",
"valid": false
},
{
"description": "boolean schema false in if always chooses the else path (valid)",
"data": "else",
"valid": true
}
]
},
{
"description": "if appears at the end when serialized (keyword processing sequence)",
"schema": {
"then": { "const": "yes" },
"else": { "const": "other" },
"if": { "maxLength": 4 }
},
"tests": [
{
"description": "yes redirects to then and passes",
"data": "yes",
"valid": true
},
{
"description": "other redirects to else and passes",
"data": "other",
"valid": true
},
{
"description": "no redirects to then and fails",
"data": "no",
"valid": false
},
{
"description": "invalid redirects to else and fails",
"data": "invalid",
"valid": false
}
]
}
]

View File

@ -1,36 +0,0 @@
[
{
"description": "evaluating the same schema location against the same data location twice is not a sign of an infinite loop",
"schema": {
"definitions": {
"int": { "type": "integer" }
},
"allOf": [
{
"properties": {
"foo": {
"$ref": "#/definitions/int"
}
}
},
{
"additionalProperties": {
"$ref": "#/definitions/int"
}
}
]
},
"tests": [
{
"description": "passing case",
"data": { "foo": 1 },
"valid": true
},
{
"description": "failing case",
"data": { "foo": "a string" },
"valid": false
}
]
}
]

View File

@ -34,21 +34,5 @@
"valid": true
}
]
},
{
"description": "maxProperties = 0 means the object is empty",
"schema": { "maxProperties": 0 },
"tests": [
{
"description": "no properties is valid",
"data": {},
"valid": true
},
{
"description": "one property is invalid",
"data": { "foo": 1 },
"valid": false
}
]
}
]

View File

@ -56,16 +56,5 @@
"valid": false
}
]
},
{
"description": "invalid instance should not raise error when float division = inf",
"schema": {"type": "integer", "multipleOf": 0.123456789},
"tests": [
{
"description": "always invalid, but naive implementations may raise an overflow error",
"data": 1e308,
"valid": false
}
]
}
]

View File

@ -74,7 +74,7 @@
"description": "forbidden property",
"schema": {
"properties": {
"foo": {
"foo": {
"not": {}
}
}

View File

@ -1,13 +1,30 @@
[
{
"description": "integer",
"schema": { "type": "integer" },
"schema": {"type": "integer"},
"tests": [
{
"description": "a bignum is an integer",
"data": 12345678910111213141516171819202122232425262728293031,
"valid": true
},
}
]
},
{
"description": "number",
"schema": {"type": "number"},
"tests": [
{
"description": "a bignum is a number",
"data": 98249283749234923498293171823948729348710298301928331,
"valid": true
}
]
},
{
"description": "integer",
"schema": {"type": "integer"},
"tests": [
{
"description": "a negative bignum is an integer",
"data": -12345678910111213141516171819202122232425262728293031,
@ -17,13 +34,8 @@
},
{
"description": "number",
"schema": { "type": "number" },
"schema": {"type": "number"},
"tests": [
{
"description": "a bignum is a number",
"data": 98249283749234923498293171823948729348710298301928331,
"valid": true
},
{
"description": "a negative bignum is a number",
"data": -98249283749234923498293171823948729348710298301928331,
@ -33,7 +45,7 @@
},
{
"description": "string",
"schema": { "type": "string" },
"schema": {"type": "string"},
"tests": [
{
"description": "a bignum is not a string",
@ -44,7 +56,7 @@
},
{
"description": "integer comparison",
"schema": { "maximum": 18446744073709551615 },
"schema": {"maximum": 18446744073709551615},
"tests": [
{
"description": "comparison works for high numbers",
@ -68,7 +80,7 @@
},
{
"description": "integer comparison",
"schema": { "minimum": -18446744073709551615 },
"schema": {"minimum": -18446744073709551615},
"tests": [
{
"description": "comparison works for very negative numbers",

View File

@ -1,13 +0,0 @@
[
{
"description": "all integers are multiples of 0.5, if overflow is handled",
"schema": {"type": "integer", "multipleOf": 0.5},
"tests": [
{
"description": "valid if optional overflow handling is implemented",
"data": 1e308,
"valid": true
}
]
}
]

View File

@ -47,26 +47,6 @@
"description": "only RFC3339 not all of ISO 8601 are valid",
"data": "2013-350T01:01:01",
"valid": false
},
{
"description": "invalid non-padded month dates",
"data": "1963-6-19T08:30:06.283185Z",
"valid": false
},
{
"description": "invalid non-padded day dates",
"data": "1963-06-1T08:30:06.283185Z",
"valid": false
},
{
"description": "non-ascii digits should be rejected in the date portion",
"data": "1963-06-1T00:00:00Z",
"valid": false
},
{
"description": "non-ascii digits should be rejected in the time portion",
"data": "1963-06-11T0:00:00Z",
"valid": false
}
]
}

View File

@ -9,142 +9,7 @@
"valid": true
},
{
"description": "a valid date string with 31 days in January",
"data": "2020-01-31",
"valid": true
},
{
"description": "a invalid date string with 32 days in January",
"data": "2020-01-32",
"valid": false
},
{
"description": "a valid date string with 28 days in February (normal)",
"data": "2021-02-28",
"valid": true
},
{
"description": "a invalid date string with 29 days in February (normal)",
"data": "2021-02-29",
"valid": false
},
{
"description": "a valid date string with 29 days in February (leap)",
"data": "2020-02-29",
"valid": true
},
{
"description": "a invalid date string with 30 days in February (leap)",
"data": "2020-02-30",
"valid": false
},
{
"description": "a valid date string with 31 days in March",
"data": "2020-03-31",
"valid": true
},
{
"description": "a invalid date string with 32 days in March",
"data": "2020-03-32",
"valid": false
},
{
"description": "a valid date string with 30 days in April",
"data": "2020-04-30",
"valid": true
},
{
"description": "a invalid date string with 31 days in April",
"data": "2020-04-31",
"valid": false
},
{
"description": "a valid date string with 31 days in May",
"data": "2020-05-31",
"valid": true
},
{
"description": "a invalid date string with 32 days in May",
"data": "2020-05-32",
"valid": false
},
{
"description": "a valid date string with 30 days in June",
"data": "2020-06-30",
"valid": true
},
{
"description": "a invalid date string with 31 days in June",
"data": "2020-06-31",
"valid": false
},
{
"description": "a valid date string with 31 days in July",
"data": "2020-07-31",
"valid": true
},
{
"description": "a invalid date string with 32 days in July",
"data": "2020-07-32",
"valid": false
},
{
"description": "a valid date string with 31 days in August",
"data": "2020-08-31",
"valid": true
},
{
"description": "a invalid date string with 32 days in August",
"data": "2020-08-32",
"valid": false
},
{
"description": "a valid date string with 30 days in September",
"data": "2020-09-30",
"valid": true
},
{
"description": "a invalid date string with 31 days in September",
"data": "2020-09-31",
"valid": false
},
{
"description": "a valid date string with 31 days in October",
"data": "2020-10-31",
"valid": true
},
{
"description": "a invalid date string with 32 days in October",
"data": "2020-10-32",
"valid": false
},
{
"description": "a valid date string with 30 days in November",
"data": "2020-11-30",
"valid": true
},
{
"description": "a invalid date string with 31 days in November",
"data": "2020-11-31",
"valid": false
},
{
"description": "a valid date string with 31 days in December",
"data": "2020-12-31",
"valid": true
},
{
"description": "a invalid date string with 32 days in December",
"data": "2020-12-32",
"valid": false
},
{
"description": "a invalid date string with invalid month",
"data": "2020-13-01",
"valid": false
},
{
"description": "an invalid date string",
"description": "an invalid date-time string",
"data": "06/19/1963",
"valid": false
},
@ -152,41 +17,6 @@
"description": "only RFC3339 not all of ISO 8601 are valid",
"data": "2013-350",
"valid": false
},
{
"description": "non-padded month dates are not valid",
"data": "1998-1-20",
"valid": false
},
{
"description": "non-padded day dates are not valid",
"data": "1998-01-1",
"valid": false
},
{
"description": "invalid month",
"data": "1998-13-01",
"valid": false
},
{
"description": "invalid month-day combination",
"data": "1998-04-31",
"valid": false
},
{
"description": "2021 is not a leap year",
"data": "2021-02-29",
"valid": false
},
{
"description": "2020 is a leap year",
"data": "2020-02-29",
"valid": true
},
{
"description": "non-ascii digits should be rejected",
"data": "1963-06-1",
"valid": false
}
]
}

View File

@ -1,4 +1,15 @@
[
{
"description": "ECMA 262 regex non-compliance",
"schema": { "format": "regex" },
"tests": [
{
"description": "ECMA 262 has no support for \\Z anchor from .NET",
"data": "^\\S(|(.|\\n)*\\S)\\Z",
"valid": false
}
]
},
{
"description": "ECMA 262 regex $ does not match trailing newline",
"schema": {
@ -8,7 +19,7 @@
"tests": [
{
"description": "matches in Python, but should not in jsonschema",
"data": "abc\\n",
"data": "abc\n",
"valid": false
},
{
@ -143,7 +154,7 @@
]
},
{
"description": "ECMA 262 \\W matches everything but ascii letters",
"description": "ECMA 262 \\w matches everything but ascii letters",
"schema": {
"type": "string",
"pattern": "^\\W$"
@ -162,7 +173,7 @@
]
},
{
"description": "ECMA 262 \\s matches whitespace",
"description": "ECMA 262 \\s matches ascii whitespace only",
"schema": {
"type": "string",
"pattern": "^\\s$"
@ -174,59 +185,14 @@
"valid": true
},
{
"description": "Character tabulation matches",
"data": "\t",
"valid": true
},
{
"description": "Line tabulation matches",
"data": "\u000b",
"valid": true
},
{
"description": "Form feed matches",
"data": "\u000c",
"valid": true
},
{
"description": "latin-1 non-breaking-space matches",
"description": "latin-1 non-breaking-space does not match (unlike e.g. Python)",
"data": "\u00a0",
"valid": true
},
{
"description": "zero-width whitespace matches",
"data": "\ufeff",
"valid": true
},
{
"description": "line feed matches (line terminator)",
"data": "\u000a",
"valid": true
},
{
"description": "paragraph separator matches (line terminator)",
"data": "\u2029",
"valid": true
},
{
"description": "EM SPACE matches (Space_Separator)",
"data": "\u2003",
"valid": true
},
{
"description": "Non-whitespace control does not match",
"data": "\u0001",
"valid": false
},
{
"description": "Non-whitespace does not match",
"data": "\u2013",
"valid": false
}
]
},
{
"description": "ECMA 262 \\S matches everything but whitespace",
"description": "ECMA 262 \\S matches everything but ascii whitespace",
"schema": {
"type": "string",
"pattern": "^\\S$"
@ -238,53 +204,8 @@
"valid": false
},
{
"description": "Character tabulation does not match",
"data": "\t",
"valid": false
},
{
"description": "Line tabulation does not match",
"data": "\u000b",
"valid": false
},
{
"description": "Form feed does not match",
"data": "\u000c",
"valid": false
},
{
"description": "latin-1 non-breaking-space does not match",
"description": "latin-1 non-breaking-space matches (unlike e.g. Python)",
"data": "\u00a0",
"valid": false
},
{
"description": "zero-width whitespace does not match",
"data": "\ufeff",
"valid": false
},
{
"description": "line feed does not match (line terminator)",
"data": "\u000a",
"valid": false
},
{
"description": "paragraph separator does not match (line terminator)",
"data": "\u2029",
"valid": false
},
{
"description": "EM SPACE does not match (Space_Separator)",
"data": "\u2003",
"valid": false
},
{
"description": "Non-whitespace control matches",
"data": "\u0001",
"valid": true
},
{
"description": "Non-whitespace matches",
"data": "\u2013",
"valid": true
}
]

View File

@ -12,41 +12,6 @@
"description": "an invalid e-mail address",
"data": "2962",
"valid": false
},
{
"description": "tilde in local part is valid",
"data": "te~st@example.com",
"valid": true
},
{
"description": "tilde before local part is valid",
"data": "~test@example.com",
"valid": true
},
{
"description": "tilde after local part is valid",
"data": "test~@example.com",
"valid": true
},
{
"description": "dot before local part is not valid",
"data": ".test@example.com",
"valid": false
},
{
"description": "dot after local part is not valid",
"data": "test.@example.com",
"valid": false
},
{
"description": "two separated dots inside local part are valid",
"data": "te.s.t@example.com",
"valid": true
},
{
"description": "two subsequent dots inside local part are not valid",
"data": "te..st@example.com",
"valid": false
}
]
}

View File

@ -27,41 +27,6 @@
"description": "a host name with a component too long",
"data": "a-vvvvvvvvvvvvvvvveeeeeeeeeeeeeeeerrrrrrrrrrrrrrrryyyyyyyyyyyyyyyy-long-host-name-component",
"valid": false
},
{
"description": "starts with hyphen",
"data": "-hostname",
"valid": false
},
{
"description": "ends with hyphen",
"data": "hostname-",
"valid": false
},
{
"description": "starts with underscore",
"data": "_hostname",
"valid": false
},
{
"description": "ends with underscore",
"data": "hostname_",
"valid": false
},
{
"description": "contains underscore",
"data": "host_name",
"valid": false
},
{
"description": "maximum label length",
"data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijk.com",
"valid": true
},
{
"description": "exceeds maximum label length",
"data": "abcdefghijklmnopqrstuvwxyzabcdefghijklmnopqrstuvwxyzabcdefghijkl.com",
"valid": false
}
]
}

View File

@ -12,16 +12,6 @@
"description": "an invalid idn e-mail address",
"data": "2962",
"valid": false
},
{
"description": "a valid e-mail address",
"data": "joe.bloggs@example.com",
"valid": true
},
{
"description": "an invalid e-mail address",
"data": "2962",
"valid": false
}
]
}

View File

@ -22,252 +22,6 @@
"description": "a host name with a component too long",
"data": "실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실실례례테스트례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례례례례례례례례테스트례례례례례례례례례례례례테스트례례실례.테스트",
"valid": false
},
{
"description": "invalid label, correct Punycode",
"comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc3492#section-7.1",
"data": "-> $1.00 <--",
"valid": false
},
{
"description": "valid Chinese Punycode",
"comment": "https://tools.ietf.org/html/rfc5890#section-2.3.2.1 https://tools.ietf.org/html/rfc5891#section-4.4",
"data": "xn--ihqwcrb4cv8a8dqg056pqjye",
"valid": true
},
{
"description": "invalid Punycode",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.4 https://tools.ietf.org/html/rfc5890#section-2.3.2.1",
"data": "xn--X",
"valid": false
},
{
"description": "U-label contains \"--\" in the 3rd and 4th position",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1 https://tools.ietf.org/html/rfc5890#section-2.3.2.1",
"data": "XN--aa---o47jg78q",
"valid": false
},
{
"description": "U-label starts with a dash",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1",
"data": "-hello",
"valid": false
},
{
"description": "U-label ends with a dash",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1",
"data": "hello-",
"valid": false
},
{
"description": "U-label starts and ends with a dash",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.1",
"data": "-hello-",
"valid": false
},
{
"description": "Begins with a Spacing Combining Mark",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2",
"data": "\u0903hello",
"valid": false
},
{
"description": "Begins with a Nonspacing Mark",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2",
"data": "\u0300hello",
"valid": false
},
{
"description": "Begins with an Enclosing Mark",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.2",
"data": "\u0488hello",
"valid": false
},
{
"description": "Exceptions that are PVALID, left-to-right chars",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6",
"data": "\u00df\u03c2\u0f0b\u3007",
"valid": true
},
{
"description": "Exceptions that are PVALID, right-to-left chars",
"comment": "https://tools.ietf.org/html/rfc/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6",
"data": "\u06fd\u06fe",
"valid": true
},
{
"description": "Exceptions that are DISALLOWED, right-to-left chars",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6",
"data": "\u0640\u07fa",
"valid": false
},
{
"description": "Exceptions that are DISALLOWED, left-to-right chars",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.2 https://tools.ietf.org/html/rfc5892#section-2.6 Note: The two combining marks (U+302E and U+302F) are in the middle and not at the start",
"data": "\u3031\u3032\u3033\u3034\u3035\u302e\u302f\u303b",
"valid": false
},
{
"description": "MIDDLE DOT with no preceding 'l'",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3",
"data": "a\u00b7l",
"valid": false
},
{
"description": "MIDDLE DOT with nothing preceding",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3",
"data": "\u00b7l",
"valid": false
},
{
"description": "MIDDLE DOT with no following 'l'",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3",
"data": "l\u00b7a",
"valid": false
},
{
"description": "MIDDLE DOT with nothing following",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3",
"data": "l\u00b7",
"valid": false
},
{
"description": "MIDDLE DOT with surrounding 'l's",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.3",
"data": "l\u00b7l",
"valid": true
},
{
"description": "Greek KERAIA not followed by Greek",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4",
"data": "\u03b1\u0375S",
"valid": false
},
{
"description": "Greek KERAIA not followed by anything",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4",
"data": "\u03b1\u0375",
"valid": false
},
{
"description": "Greek KERAIA followed by Greek",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.4",
"data": "\u03b1\u0375\u03b2",
"valid": true
},
{
"description": "Hebrew GERESH not preceded by Hebrew",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5",
"data": "A\u05f3\u05d1",
"valid": false
},
{
"description": "Hebrew GERESH not preceded by anything",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5",
"data": "\u05f3\u05d1",
"valid": false
},
{
"description": "Hebrew GERESH preceded by Hebrew",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.5",
"data": "\u05d0\u05f3\u05d1",
"valid": true
},
{
"description": "Hebrew GERSHAYIM not preceded by Hebrew",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6",
"data": "A\u05f4\u05d1",
"valid": false
},
{
"description": "Hebrew GERSHAYIM not preceded by anything",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6",
"data": "\u05f4\u05d1",
"valid": false
},
{
"description": "Hebrew GERSHAYIM preceded by Hebrew",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.6",
"data": "\u05d0\u05f4\u05d1",
"valid": true
},
{
"description": "KATAKANA MIDDLE DOT with no Hiragana, Katakana, or Han",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7",
"data": "def\u30fbabc",
"valid": false
},
{
"description": "KATAKANA MIDDLE DOT with no other characters",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7",
"data": "\u30fb",
"valid": false
},
{
"description": "KATAKANA MIDDLE DOT with Hiragana",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7",
"data": "\u30fb\u3041",
"valid": true
},
{
"description": "KATAKANA MIDDLE DOT with Katakana",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7",
"data": "\u30fb\u30a1",
"valid": true
},
{
"description": "KATAKANA MIDDLE DOT with Han",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.7",
"data": "\u30fb\u4e08",
"valid": true
},
{
"description": "Arabic-Indic digits mixed with Extended Arabic-Indic digits",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8",
"data": "\u0660\u06f0",
"valid": false
},
{
"description": "Arabic-Indic digits not mixed with Extended Arabic-Indic digits",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.8",
"data": "\u0628\u0660\u0628",
"valid": true
},
{
"description": "Extended Arabic-Indic digits not mixed with Arabic-Indic digits",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.9",
"data": "\u06f00",
"valid": true
},
{
"description": "ZERO WIDTH JOINER not preceded by Virama",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf",
"data": "\u0915\u200d\u0937",
"valid": false
},
{
"description": "ZERO WIDTH JOINER not preceded by anything",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf",
"data": "\u200d\u0937",
"valid": false
},
{
"description": "ZERO WIDTH JOINER preceded by Virama",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.2 https://www.unicode.org/review/pr-37.pdf",
"data": "\u0915\u094d\u200d\u0937",
"valid": true
},
{
"description": "ZERO WIDTH NON-JOINER preceded by Virama",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1",
"data": "\u0915\u094d\u200c\u0937",
"valid": true
},
{
"description": "ZERO WIDTH NON-JOINER not preceded by Virama but matches regexp",
"comment": "https://tools.ietf.org/html/rfc5891#section-4.2.3.3 https://tools.ietf.org/html/rfc5892#appendix-A.1 https://www.w3.org/TR/alreq/#h_disjoining_enforcement",
"data": "\u0628\u064a\u200c\u0628\u064a",
"valid": true
}
]
}

View File

@ -27,27 +27,6 @@
"description": "an IP address as an integer",
"data": "0x7f000001",
"valid": false
},
{
"description": "an IP address as an integer (decimal)",
"data": "2130706433",
"valid": false
},
{
"description": "leading zeroes should be rejected, as they are treated as octals",
"comment": "see https://sick.codes/universal-netmask-npm-package-used-by-270000-projects-vulnerable-to-octal-input-data-server-side-request-forgery-remote-file-inclusion-local-file-inclusion-and-more-cve-2021-28918/",
"data": "087.10.0.1",
"valid": false
},
{
"description": "value without leading zero is valid",
"data": "87.10.0.1",
"valid": true
},
{
"description": "non-ascii digits should be rejected",
"data": "1২7.0.0.1",
"valid": false
}
]
}

View File

@ -22,141 +22,6 @@
"description": "an IPv6 address containing illegal characters",
"data": "::laptop",
"valid": false
},
{
"description": "no digits is valid",
"data": "::",
"valid": true
},
{
"description": "leading colons is valid",
"data": "::42:ff:1",
"valid": true
},
{
"description": "trailing colons is valid",
"data": "d6::",
"valid": true
},
{
"description": "missing leading octet is invalid",
"data": ":2:3:4:5:6:7:8",
"valid": false
},
{
"description": "missing trailing octet is invalid",
"data": "1:2:3:4:5:6:7:",
"valid": false
},
{
"description": "missing leading octet with omitted octets later",
"data": ":2:3:4::8",
"valid": false
},
{
"description": "two sets of double colons is invalid",
"data": "1::d6::42",
"valid": false
},
{
"description": "mixed format with the ipv4 section as decimal octets",
"data": "1::d6:192.168.0.1",
"valid": true
},
{
"description": "mixed format with double colons between the sections",
"data": "1:2::192.168.0.1",
"valid": true
},
{
"description": "mixed format with ipv4 section with octet out of range",
"data": "1::2:192.168.256.1",
"valid": false
},
{
"description": "mixed format with ipv4 section with a hex octet",
"data": "1::2:192.168.ff.1",
"valid": false
},
{
"description": "mixed format with leading double colons (ipv4-mapped ipv6 address)",
"data": "::ffff:192.168.0.1",
"valid": true
},
{
"description": "triple colons is invalid",
"data": "1:2:3:4:5:::8",
"valid": false
},
{
"description": "8 octets",
"data": "1:2:3:4:5:6:7:8",
"valid": true
},
{
"description": "insufficient octets without double colons",
"data": "1:2:3:4:5:6:7",
"valid": false
},
{
"description": "no colons is invalid",
"data": "1",
"valid": false
},
{
"description": "ipv4 is not ipv6",
"data": "127.0.0.1",
"valid": false
},
{
"description": "ipv4 segment must have 4 octets",
"data": "1:2:3:4:1.2.3",
"valid": false
},
{
"description": "leading whitespace is invalid",
"data": " ::1",
"valid": false
},
{
"description": "trailing whitespace is invalid",
"data": "::1 ",
"valid": false
},
{
"description": "netmask is not a part of ipv6 address",
"data": "fe80::/64",
"valid": false
},
{
"description": "zone id is not a part of ipv6 address",
"data": "fe80::a%eth1",
"valid": false
},
{
"description": "a long valid ipv6",
"data": "1000:1000:1000:1000:1000:1000:255.255.255.255",
"valid": true
},
{
"description": "a long invalid ipv6, below length limit, first",
"data": "100:100:100:100:100:100:255.255.255.255.255",
"valid": false
},
{
"description": "a long invalid ipv6, below length limit, second",
"data": "100:100:100:100:100:100:100:255.255.255.255",
"valid": false
},
{
"description": "non-ascii digits should be rejected",
"data": "1:2:3:4:5:6:7:",
"valid": false
},
{
"description": "non-ascii digits should be rejected in the ipv4 portion also",
"data": "1:2::192.16.0.1",
"valid": false
}
]
}

View File

@ -9,7 +9,7 @@
"valid": true
},
{
"description": "a valid IRI with anchor tag and parentheses",
"description": "a valid IRI with anchor tag and parantheses",
"data": "http://ƒøø.com/blah_(wîkïpédiå)_blah#ßité-1",
"valid": true
},

View File

@ -32,21 +32,6 @@
"description": "negative prefix",
"data": "-1/foo/bar",
"valid": false
},
{
"description": "## is not a valid json-pointer",
"data": "0##",
"valid": false
},
{
"description": "zero cannot be followed by other digits, plus json-pointer",
"data": "01/a",
"valid": false
},
{
"description": "zero cannot be followed by other digits, plus octothorpe",
"data": "01#",
"valid": false
}
]
}

View File

@ -5,146 +5,11 @@
"tests": [
{
"description": "a valid time string",
"data": "08:30:06Z",
"valid": true
},
{
"description": "a valid time string with leap second, Zulu",
"data": "23:59:60Z",
"valid": true
},
{
"description": "invalid leap second, Zulu (wrong hour)",
"data": "22:59:60Z",
"valid": false
},
{
"description": "invalid leap second, Zulu (wrong minute)",
"data": "23:58:60Z",
"valid": false
},
{
"description": "valid leap second, zero time-offset",
"data": "23:59:60+00:00",
"valid": true
},
{
"description": "invalid leap second, zero time-offset (wrong hour)",
"data": "22:59:60+00:00",
"valid": false
},
{
"description": "invalid leap second, zero time-offset (wrong minute)",
"data": "23:58:60+00:00",
"valid": false
},
{
"description": "valid leap second, positive time-offset",
"data": "01:29:60+01:30",
"valid": true
},
{
"description": "valid leap second, large positive time-offset",
"data": "23:29:60+23:30",
"valid": true
},
{
"description": "invalid leap second, positive time-offset (wrong hour)",
"data": "23:59:60+01:00",
"valid": false
},
{
"description": "invalid leap second, positive time-offset (wrong minute)",
"data": "23:59:60+00:30",
"valid": false
},
{
"description": "valid leap second, negative time-offset",
"data": "15:59:60-08:00",
"valid": true
},
{
"description": "valid leap second, large negative time-offset",
"data": "00:29:60-23:30",
"valid": true
},
{
"description": "invalid leap second, negative time-offset (wrong hour)",
"data": "23:59:60-01:00",
"valid": false
},
{
"description": "invalid leap second, negative time-offset (wrong minute)",
"data": "23:59:60-00:30",
"valid": false
},
{
"description": "a valid time string with second fraction",
"data": "23:20:50.52Z",
"valid": true
},
{
"description": "a valid time string with precise second fraction",
"data": "08:30:06.283185Z",
"valid": true
},
{
"description": "a valid time string with plus offset",
"data": "08:30:06+00:20",
"valid": true
},
{
"description": "a valid time string with minus offset",
"data": "08:30:06-08:00",
"valid": true
},
{
"description": "a valid time string with case-insensitive Z",
"data": "08:30:06z",
"valid": true
},
{
"description": "an invalid time string with invalid hour",
"data": "24:00:00Z",
"valid": false
},
{
"description": "an invalid time string with invalid minute",
"data": "00:60:00Z",
"valid": false
},
{
"description": "an invalid time string with invalid second",
"data": "00:00:61Z",
"valid": false
},
{
"description": "an invalid time string with invalid leap second (wrong hour)",
"data": "22:59:60Z",
"valid": false
},
{
"description": "an invalid time string with invalid leap second (wrong minute)",
"data": "23:58:60Z",
"valid": false
},
{
"description": "an invalid time string with invalid time numoffset hour",
"data": "01:02:03+24:00",
"valid": false
},
{
"description": "an invalid time string with invalid time numoffset minute",
"data": "01:02:03+00:60",
"valid": false
},
{
"description": "an invalid time string with invalid time with both Z and numoffset",
"data": "01:02:03Z+00:30",
"valid": false
},
{
"description": "an invalid offset indicator",
"description": "an invalid time string",
"data": "08:30:06 PST",
"valid": false
},
@ -152,16 +17,6 @@
"description": "only RFC3339 not all of ISO 8601 are valid",
"data": "01:01:01,1111",
"valid": false
},
{
"description": "no time offset",
"data": "12:00:00",
"valid": false
},
{
"description": "non-ascii digits should be rejected",
"data": "1২:00:00Z",
"valid": false
}
]
}

View File

@ -9,7 +9,7 @@
"valid": true
},
{
"description": "a valid URL with anchor tag and parentheses",
"description": "a valid URL with anchor tag and parantheses",
"data": "http://foo.com/blah_(wikipedia)_blah#cite-1",
"valid": true
},
@ -97,11 +97,6 @@
"description": "an invalid URI with spaces and missing scheme",
"data": ":// should fail",
"valid": false
},
{
"description": "an invalid URI with comma in scheme",
"data": "bar,baz:foo",
"valid": false
}
]
}

View File

@ -1,85 +0,0 @@
[
{
"description": "uuid format",
"schema": {
"format": "uuid"
},
"tests": [
{
"description": "all upper-case",
"data": "2EB8AA08-AA98-11EA-B4AA-73B441D16380",
"valid": true
},
{
"description": "all lower-case",
"data": "2eb8aa08-aa98-11ea-b4aa-73b441d16380",
"valid": true
},
{
"description": "mixed case",
"data": "2eb8aa08-AA98-11ea-B4Aa-73B441D16380",
"valid": true
},
{
"description": "all zeroes is valid",
"data": "00000000-0000-0000-0000-000000000000",
"valid": true
},
{
"description": "wrong length",
"data": "2eb8aa08-aa98-11ea-b4aa-73b441d1638",
"valid": false
},
{
"description": "missing section",
"data": "2eb8aa08-aa98-11ea-73b441d16380",
"valid": false
},
{
"description": "bad characters (not hex)",
"data": "2eb8aa08-aa98-11ea-b4ga-73b441d16380",
"valid": false
},
{
"description": "no dashes",
"data": "2eb8aa08aa9811eab4aa73b441d16380",
"valid": false
},
{
"description": "too few dashes",
"data": "2eb8aa08aa98-11ea-b4aa73b441d16380",
"valid": false
},
{
"description": "too many dashes",
"data": "2eb8-aa08-aa98-11ea-b4aa73b44-1d16380",
"valid": false
},
{
"description": "dashes in the wrong spot",
"data": "2eb8aa08aa9811eab4aa73b441d16380----",
"valid": false
},
{
"description": "valid version 4",
"data": "98d80576-482e-427f-8434-7f86890ab222",
"valid": true
},
{
"description": "valid version 5",
"data": "99c17cbb-656f-564a-940f-1a4568f03487",
"valid": true
},
{
"description": "hypothetical version 6",
"data": "99c17cbb-656f-664a-940f-1a4568f03487",
"valid": true
},
{
"description": "hypothetical version 15",
"data": "99c17cbb-656f-f64a-940f-1a4568f03487",
"valid": true
}
]
}
]

View File

@ -1,146 +0,0 @@
[
{
"description": "unicode semantics should be used for all pattern matching",
"schema": { "pattern": "\\wcole" },
"tests": [
{
"description": "literal unicode character in json string",
"data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.",
"valid": true
},
{
"description": "unicode character in hex format in string",
"data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.",
"valid": true
},
{
"description": "unicode matching is case-sensitive",
"data": "LES HIVERS DE MON ENFANCE ÉTAIENT DES SAISONS LONGUES, LONGUES. NOUS VIVIONS EN TROIS LIEUX: L'ÉCOLE, L'ÉGLISE ET LA PATINOIRE; MAIS LA VRAIE VIE ÉTAIT SUR LA PATINOIRE.",
"valid": false
}
]
},
{
"description": "unicode characters do not match ascii ranges",
"schema": { "pattern": "[a-z]cole" },
"tests": [
{
"description": "literal unicode character in json string",
"data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'école, l'église et la patinoire; mais la vraie vie était sur la patinoire.",
"valid": false
},
{
"description": "unicode character in hex format in string",
"data": "Les hivers de mon enfance étaient des saisons longues, longues. Nous vivions en trois lieux: l'\u00e9cole, l'église et la patinoire; mais la vraie vie était sur la patinoire.",
"valid": false
},
{
"description": "ascii characters match",
"data": "Les hivers de mon enfance etaient des saisons longues, longues. Nous vivions en trois lieux: l'ecole, l'eglise et la patinoire; mais la vraie vie etait sur la patinoire.",
"valid": true
}
]
},
{
"description": "unicode digits are more than 0 through 9",
"schema": { "pattern": "^\\d+$" },
"tests": [
{
"description": "ascii digits",
"data": "42",
"valid": true
},
{
"description": "ascii non-digits",
"data": "-%#",
"valid": false
},
{
"description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)",
"data": "৪২",
"valid": true
}
]
},
{
"description": "unicode semantics should be used for all patternProperties matching",
"schema": {
"type": "object",
"patternProperties": {
"\\wcole": true
},
"additionalProperties": false
},
"tests": [
{
"description": "literal unicode character in json string",
"data": { "l'école": "pas de vraie vie" },
"valid": true
},
{
"description": "unicode character in hex format in string",
"data": { "l'\u00e9cole": "pas de vraie vie" },
"valid": true
},
{
"description": "unicode matching is case-sensitive",
"data": { "L'ÉCOLE": "PAS DE VRAIE VIE" },
"valid": false
}
]
},
{
"description": "unicode characters do not match ascii ranges",
"schema": {
"type": "object",
"patternProperties": {
"[a-z]cole": true
},
"additionalProperties": false
},
"tests": [
{
"description": "literal unicode character in json string",
"data": { "l'école": "pas de vraie vie" },
"valid": false
},
{
"description": "unicode character in hex format in string",
"data": { "l'\u00e9cole": "pas de vraie vie" },
"valid": false
},
{
"description": "ascii characters match",
"data": { "l'ecole": "pas de vraie vie" },
"valid": true
}
]
},
{
"description": "unicode digits are more than 0 through 9",
"schema": {
"type": "object",
"patternProperties": {
"^\\d+$": true
},
"additionalProperties": false
},
"tests": [
{
"description": "ascii digits",
"data": { "42": "life, the universe, and everything" },
"valid": true
},
{
"description": "ascii non-digits",
"data": { "-%#": "spending the year dead for tax reasons" },
"valid": false
},
{
"description": "non-ascii digits (BENGALI DIGIT FOUR, BENGALI DIGIT TWO)",
"data": { "৪২": "khajit has wares if you have coin" },
"valid": true
}
]
}
]

View File

@ -0,0 +1,15 @@
[
{
"description": "some languages do not distinguish between different types of numeric value",
"schema": {
"type": "integer"
},
"tests": [
{
"description": "a float without fractional part is an integer",
"data": 1.0,
"valid": true
}
]
}
]

View File

@ -141,11 +141,6 @@
"data": {"foo": 1, "bar": 2},
"valid": false
},
{
"description": "object with a property matching both true and false is invalid",
"data": {"foobar":1},
"valid": false
},
{
"description": "empty object is valid",
"data": {},

View File

@ -43,35 +43,6 @@
}
]
},
{
"description": "propertyNames validation with pattern",
"schema": {
"propertyNames": { "pattern": "^a+$" }
},
"tests": [
{
"description": "matching property names valid",
"data": {
"a": {},
"aa": {},
"aaa": {}
},
"valid": true
},
{
"description": "non-matching property name is invalid",
"data": {
"aaA": {}
},
"valid": false
},
{
"description": "object without properties is valid",
"data": {},
"valid": true
}
]
},
{
"description": "propertyNames with boolean schema true",
"schema": {"propertyNames": true},

View File

@ -75,15 +75,13 @@
{
"description": "escaped pointer ref",
"schema": {
"definitions": {
"tilde~field": {"type": "integer"},
"slash/field": {"type": "integer"},
"percent%field": {"type": "integer"}
},
"tilda~field": {"type": "integer"},
"slash/field": {"type": "integer"},
"percent%field": {"type": "integer"},
"properties": {
"tilde": {"$ref": "#/definitions/tilde~0field"},
"slash": {"$ref": "#/definitions/slash~1field"},
"percent": {"$ref": "#/definitions/percent%25field"}
"tilda": {"$ref": "#/tilda~0field"},
"slash": {"$ref": "#/slash~1field"},
"percent": {"$ref": "#/percent%25field"}
}
},
"tests": [
@ -93,8 +91,8 @@
"valid": false
},
{
"description": "tilde invalid",
"data": {"tilde": "aoeu"},
"description": "tilda invalid",
"data": {"tilda": "aoeu"},
"valid": false
},
{
@ -108,8 +106,8 @@
"valid": true
},
{
"description": "tilde valid",
"data": {"tilde": 123},
"description": "tilda valid",
"data": {"tilda": 123},
"valid": true
},
{
@ -127,7 +125,7 @@
"b": {"$ref": "#/definitions/a"},
"c": {"$ref": "#/definitions/b"}
},
"allOf": [{ "$ref": "#/definitions/c" }]
"$ref": "#/definitions/c"
},
"tests": [
{
@ -175,42 +173,6 @@
}
]
},
{
"description": "$ref prevents a sibling $id from changing the base uri",
"schema": {
"$id": "http://localhost:1234/sibling_id/base/",
"definitions": {
"foo": {
"$id": "http://localhost:1234/sibling_id/foo.json",
"minimum": 2
},
"base_foo": {
"$comment": "this canonical uri is http://localhost:1234/sibling_id/base/foo.json",
"$id": "foo.json",
"minimum": 5
}
},
"allOf": [
{
"$comment": "$ref resolves to http://localhost:1234/sibling_id/base/foo.json, not ttp://localhost:1234/sibling_id/foo.json",
"$id": "http://localhost:1234/sibling_id/",
"$ref": "foo.json"
}
]
},
"tests": [
{
"description": "$ref resolves to /definitions/foo, data validates",
"data": 10,
"valid": true
},
{
"description": "$ref resolves to /definitions/foo, data does not validate",
"data": 1,
"valid": false
}
]
},
{
"description": "remote ref, containing refs itself",
"schema": {"$ref": "http://json-schema.org/draft-07/schema#"},
@ -247,35 +209,10 @@
}
]
},
{
"description": "property named $ref, containing an actual $ref",
"schema": {
"properties": {
"$ref": {"$ref": "#/definitions/is-string"}
},
"definitions": {
"is-string": {
"type": "string"
}
}
},
"tests": [
{
"description": "property named $ref valid",
"data": {"$ref": "a"},
"valid": true
},
{
"description": "property named $ref invalid",
"data": {"$ref": 2},
"valid": false
}
]
},
{
"description": "$ref to boolean schema true",
"schema": {
"allOf": [{ "$ref": "#/definitions/bool" }],
"$ref": "#/definitions/bool",
"definitions": {
"bool": true
}
@ -291,7 +228,7 @@
{
"description": "$ref to boolean schema false",
"schema": {
"allOf": [{ "$ref": "#/definitions/bool" }],
"$ref": "#/definitions/bool",
"definitions": {
"bool": false
}
@ -334,7 +271,7 @@
"tests": [
{
"description": "valid tree",
"data": {
"data": {
"meta": "root",
"nodes": [
{
@ -363,7 +300,7 @@
},
{
"description": "invalid tree",
"data": {
"data": {
"meta": "root",
"nodes": [
{
@ -445,6 +382,32 @@
}
]
},
{
"description": "Location-independent identifier with absolute URI",
"schema": {
"allOf": [{
"$ref": "http://localhost:1234/bar#foo"
}],
"definitions": {
"A": {
"$id": "http://localhost:1234/bar#foo",
"type": "integer"
}
}
},
"tests": [
{
"data": 1,
"description": "match",
"valid": true
},
{
"data": "a",
"description": "mismatch",
"valid": false
}
]
},
{
"description": "Location-independent identifier with base URI change in subschema",
"schema": {
@ -476,137 +439,5 @@
"valid": false
}
]
},
{
"description": "naive replacement of $ref with its destination is not correct",
"schema": {
"definitions": {
"a_string": { "type": "string" }
},
"enum": [
{ "$ref": "#/definitions/a_string" }
]
},
"tests": [
{
"description": "do not evaluate the $ref inside the enum, matching any string",
"data": "this is a string",
"valid": false
},
{
"description": "do not evaluate the $ref inside the enum, definition exact match",
"data": { "type": "string" },
"valid": false
},
{
"description": "match the enum exactly",
"data": { "$ref": "#/definitions/a_string" },
"valid": true
}
]
},
{
"description": "refs with relative uris and defs",
"schema": {
"$id": "http://example.com/schema-relative-uri-defs1.json",
"properties": {
"foo": {
"$id": "schema-relative-uri-defs2.json",
"definitions": {
"inner": {
"properties": {
"bar": { "type": "string" }
}
}
},
"allOf": [ { "$ref": "#/definitions/inner" } ]
}
},
"allOf": [ { "$ref": "schema-relative-uri-defs2.json" } ]
},
"tests": [
{
"description": "invalid on inner field",
"data": {
"foo": {
"bar": 1
},
"bar": "a"
},
"valid": false
},
{
"description": "invalid on outer field",
"data": {
"foo": {
"bar": "a"
},
"bar": 1
},
"valid": false
},
{
"description": "valid on both fields",
"data": {
"foo": {
"bar": "a"
},
"bar": "a"
},
"valid": true
}
]
},
{
"description": "relative refs with absolute uris and defs",
"schema": {
"$id": "http://example.com/schema-refs-absolute-uris-defs1.json",
"properties": {
"foo": {
"$id": "http://example.com/schema-refs-absolute-uris-defs2.json",
"definitions": {
"inner": {
"properties": {
"bar": { "type": "string" }
}
}
},
"allOf": [ { "$ref": "#/definitions/inner" } ]
}
},
"allOf": [ { "$ref": "schema-refs-absolute-uris-defs2.json" } ]
},
"tests": [
{
"description": "invalid on inner field",
"data": {
"foo": {
"bar": 1
},
"bar": "a"
},
"valid": false
},
{
"description": "invalid on outer field",
"data": {
"foo": {
"bar": "a"
},
"bar": 1
},
"valid": false
},
{
"description": "valid on both fields",
"data": {
"foo": {
"bar": "a"
},
"bar": "a"
},
"valid": true
}
]
}
]

View File

@ -54,7 +54,7 @@
"schema": {
"$id": "http://localhost:1234/",
"items": {
"$id": "baseUriChange/",
"$id": "folder/",
"items": {"$ref": "folderInteger.json"}
}
},
@ -81,7 +81,7 @@
},
"definitions": {
"baz": {
"$id": "baseUriChangeFolder/",
"$id": "folder/",
"type": "array",
"items": {"$ref": "folderInteger.json"}
}
@ -110,7 +110,7 @@
},
"definitions": {
"baz": {
"$id": "baseUriChangeFolderInSubschema/",
"$id": "folder/",
"definitions": {
"bar": {
"type": "array",
@ -167,30 +167,5 @@
"valid": false
}
]
},
{
"description": "remote ref with ref to definitions",
"schema": {
"$id": "http://localhost:1234/schema-remote-ref-ref-defs1.json",
"allOf": [
{ "$ref": "ref-and-definitions.json" }
]
},
"tests": [
{
"description": "invalid",
"data": {
"bar": 1
},
"valid": false
},
{
"description": "valid",
"data": {
"bar": "a"
},
"valid": true
}
]
}
]

View File

@ -74,55 +74,15 @@
"data": [0, false],
"valid": true
},
{
"description": "[1] and [true] are unique",
"data": [[1], [true]],
"valid": true
},
{
"description": "[0] and [false] are unique",
"data": [[0], [false]],
"valid": true
},
{
"description": "nested [1] and [true] are unique",
"data": [[[1], "foo"], [[true], "foo"]],
"valid": true
},
{
"description": "nested [0] and [false] are unique",
"data": [[[0], "foo"], [[false], "foo"]],
"valid": true
},
{
"description": "unique heterogeneous types are valid",
"data": [{}, [1], true, null, 1, "{}"],
"data": [{}, [1], true, null, 1],
"valid": true
},
{
"description": "non-unique heterogeneous types are invalid",
"data": [{}, [1], true, null, {}, 1],
"valid": false
},
{
"description": "different objects are unique",
"data": [{"a": 1, "b": 2}, {"a": 2, "b": 1}],
"valid": true
},
{
"description": "objects are non-unique despite key order",
"data": [{"a": 1, "b": 2}, {"b": 2, "a": 1}],
"valid": false
},
{
"description": "{\"a\": false} and {\"a\": 0} are unique",
"data": [{"a": false}, {"a": 0}],
"valid": true
},
{
"description": "{\"a\": true} and {\"a\": 1} are unique",
"data": [{"a": true}, {"a": 1}],
"valid": true
}
]
},

View File

@ -1,56 +0,0 @@
[
{
"description": "$id inside an unknown keyword is not a real identifier",
"comment": "the implementation must not be confused by an $id in locations we do not know how to parse",
"schema": {
"definitions": {
"id_in_unknown0": {
"not": {
"array_of_schemas": [
{
"$id": "https://localhost:1234/unknownKeyword/my_identifier.json",
"type": "null"
}
]
}
},
"real_id_in_schema": {
"$id": "https://localhost:1234/unknownKeyword/my_identifier.json",
"type": "string"
},
"id_in_unknown1": {
"not": {
"object_of_schemas": {
"foo": {
"$id": "https://localhost:1234/unknownKeyword/my_identifier.json",
"type": "integer"
}
}
}
}
},
"anyOf": [
{ "$ref": "#/definitions/id_in_unknown0" },
{ "$ref": "#/definitions/id_in_unknown1" },
{ "$ref": "https://localhost:1234/unknownKeyword/my_identifier.json" }
]
},
"tests": [
{
"description": "type matches second anyOf, which has a real schema in it",
"data": "a string",
"valid": true
},
{
"description": "type matches non-schema in first anyOf",
"data": null,
"valid": false
},
{
"description": "type matches non-schema in third anyOf",
"data": 1,
"valid": false
}
]
}
]

View File

@ -1,175 +0,0 @@
// bson-validate.cpp
#include <iostream>
#include <nlohmann/json-schema.hpp>
#include <nlohmann/json.hpp>
static int error_count = 0;
#define EXPECT_EQ(a, b) \
do { \
if (a != b) { \
std::cerr << "Failed: '" << a << "' != '" << b << "'\n"; \
error_count++; \
} \
} while (0)
#define EXPECT_THROW(foo) \
{ \
bool ok = false; \
try { \
foo; \
} catch (std::exception &) { \
ok = true; \
} \
if (ok == false) { \
error_count++; \
} \
}
using json = nlohmann::json;
using validator = nlohmann::json_schema::json_validator;
// check binary data validation
const json bson_schema = json::parse(R"(
{
"type": "object",
"properties": {
"standard_string": {
"type": "string"
},
"binary_data": {
"type": "string",
"contentEncoding": "binary"
}
},
"additionalProperties": false
}
)");
const json array_of_types = json::parse(R"(
{
"type": "object",
"properties": {
"something": {
"type": ["string", "number", "boolean"],
"contentEncoding": "binary"
}
}
}
)");
const json array_of_types_without_binary = json::parse(R"(
{
"type": "object",
"properties": {
"something": {
"type": ["string", "number", "boolean"]
}
}
}
)");
class store_ptr_err_handler : public nlohmann::json_schema::basic_error_handler
{
void error(const nlohmann::json::json_pointer &ptr, const json &, const std::string &message) override
{
nlohmann::json_schema::basic_error_handler::error(ptr, "", message);
std::cerr << "ERROR: '" << ptr << "' - '"
<< ""
<< "': " << message << "\n";
failed_pointers.push_back(ptr);
}
public:
std::vector<nlohmann::json::json_pointer> failed_pointers;
void reset() override
{
nlohmann::json_schema::basic_error_handler::reset();
failed_pointers.clear();
}
};
static void content(const std::string &contentEncoding, const std::string &contentMediaType, const json &instance)
{
std::cerr << "mediaType: '" << contentMediaType << "', encoding: '" << contentEncoding << "'\n";
if (contentEncoding == "binary") {
if (instance.type() != json::value_t::binary) {
throw std::invalid_argument{"expected binary data"};
}
} else {
if (instance.type() == json::value_t::binary) {
throw std::invalid_argument{"expected string, but get binary"};
}
}
}
int main()
{
validator val(nullptr, nullptr, content);
// create some bson doc
json::binary_t arr;
std::string as_binary = "hello world";
std::copy(as_binary.begin(), as_binary.end(), std::back_inserter(arr));
json binary = json::binary(arr);
store_ptr_err_handler err;
/////////////////////////////////////
val.set_root_schema(bson_schema);
// all right
val.validate({{"standard_string", "some string"}, {"binary_data", binary}}, err);
EXPECT_EQ(err.failed_pointers.size(), 0);
err.reset();
// invalid binary data
val.validate({{"binary_data", "string, but expect binary data"}}, err);
EXPECT_EQ(err.failed_pointers.size(), 1);
EXPECT_EQ(err.failed_pointers[0].to_string(), "/binary_data");
err.reset();
// also check that simple string not accept binary data
val.validate({{"standard_string", binary}, {"binary_data", binary}}, err);
EXPECT_EQ(err.failed_pointers.size(), 1);
EXPECT_EQ(err.failed_pointers[0].to_string(), "/standard_string");
err.reset();
/////////////////////////////////////
// check with array of types
// check simple types
val.set_root_schema(array_of_types);
val.validate({{"something", 1}}, err);
val.validate({{"something", false}}, err);
// TODO when we set `string` in array and set `contentEncoding` = "binary" - what it means? We expected string or binary?
// Or we expect only binary? Now if you set `contentEncoding` = "binary", then it means that you expect only binary data,
// not string
// val.validate({{"something", "string"}}, err); -> produce error about type
EXPECT_EQ(err.failed_pointers.size(), 0);
err.reset();
// check binary data
val.validate({{"something", binary}}, err);
EXPECT_EQ(err.failed_pointers.size(), 0);
err.reset();
/////////////////////////////////////
// and check that you can't set binary data if contentEncoding don't set
val.set_root_schema(array_of_types_without_binary);
val.validate({{"something", binary}}, err);
EXPECT_EQ(err.failed_pointers.size(), 1);
EXPECT_EQ(err.failed_pointers[0].to_string(), "/something");
err.reset();
// check that without content callback you get exception with schema with contentEncoding or contentMeditType
validator val_no_content;
EXPECT_THROW(val_no_content.set_root_schema(bson_schema));
return error_count;
}

View File

@ -0,0 +1,15 @@
# Configure install script
configure_file(test.sh.in
${CMAKE_CURRENT_BINARY_DIR}/test.sh @ONLY)
get_filename_component(TEST_NAME
${CMAKE_CURRENT_SOURCE_DIR}
NAME)
# this build test only works, if nlohmann-json was found via a cmake-package
if(TARGET nlohmann_json::nlohmann_json)
add_test(NAME Build::${TEST_NAME}
COMMAND ${CMAKE_CURRENT_BINARY_DIR}/test.sh
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
endif()

View File

@ -0,0 +1,34 @@
# This is a simple project that tests using cmake to load the installed libraries
cmake_minimum_required(VERSION 3.2)
project(cmake_install_test LANGUAGES CXX)
set(PROJECT_VERSION 1.0.0)
# Find the nlohmann_json and the validator package
set(CMAKE_FIND_DEBUG_MODE ON)
find_package(nlohmann_json REQUIRED)
find_package(nlohmann_json_schema_validator REQUIRED)
# Add simple json-schema-validator-executable
add_executable(json-schema-validate ${CMAKE_CURRENT_SOURCE_DIR}/../../../app/json-schema-validate.cpp)
target_link_libraries(json-schema-validate nlohmann_json_schema_validator)
enable_testing()
# Add built-in tests function needed for issues
set(PIPE_IN_TEST_SCRIPT ${CMAKE_CURRENT_SOURCE_DIR}/../../test-pipe-in.sh)
function(add_test_simple_schema name schema instance)
add_test(
NAME ${name}
COMMAND ${PIPE_IN_TEST_SCRIPT}
$<TARGET_FILE:json-schema-validate>
${schema}
${instance}
WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR})
endfunction()
# Run tests for issues 9, 12, 27, 48, 54
foreach(NUMBER "9" "12" "27" "48" "54")
add_subdirectory("${CMAKE_CURRENT_SOURCE_DIR}/../../issue-${NUMBER}" "${CMAKE_CURRENT_BINARY_DIR}/issue-${NUMBER}" EXCLUDE_FROM_ALL)
endforeach()

55
test/cmake-install/test.sh.in Executable file
View File

@ -0,0 +1,55 @@
#!/bin/bash
## Configure, build, install, and test json-schema-validator with CMAKE
## This script is instantiated via configure_file() to run cmake the same the original build has been invoked.
set -xe
EXTRA_ARGS=$@
SRC_DIR=@PROJECT_SOURCE_DIR@
BUILD_DIR=@CMAKE_CURRENT_BINARY_DIR@/build-dir
INSTALL_DIR=@CMAKE_CURRENT_BINARY_DIR@/install-dir
NLOHMANN_JSON_DIR=@nlohmann_json_DIR@
TEST_SRC_DIR=@CMAKE_CURRENT_SOURCE_DIR@/project
cmake --version
# Clear out build directory
rm -rf ${BUILD_DIR}
# Create build-dir
mkdir -p ${BUILD_DIR}
cd ${BUILD_DIR}
# configure json-schema-validator
printf "\n-----------------------------------------------------------\n"
printf "Configuring, building, and installing json-schema-validator"
printf "\n-----------------------------------------------------------\n"
cmake \
-DCMAKE_INSTALL_PREFIX:PATH=${INSTALL_DIR} \
-Dnlohmann_json_DIR:PATH=${NLOHMANN_JSON_DIR} \
${EXTRA_ARGS} \
${SRC_DIR}
CPU_COUNT=$(nproc)
# Build and install json-schema-validator
cmake --build . -- -j${CPU_COUNT}
cmake --build . --target install -- -j${CPU_COUNT}
# Make sure build directory is empty
rm -rf ./*
# configure test project
printf "\n-----------------------------------------------------------\n"
printf "Configuring, building, and running test project"
printf "\n-----------------------------------------------------------\n"
cmake \
-Dnlohmann_json_DIR:PATH=${NLOHMANN_JSON_DIR} \
-Dnlohmann_json_schema_validator_DIR:PATH=${INSTALL_DIR}/lib/cmake/nlohmann_json_schema_validator \
-DVALIDATOR_INSTALL_DIR:PATH=${INSTALL_DIR} \
${EXTRA_ARGS} \
${TEST_SRC_DIR}
# Build test project and test
cmake --build .
ctest --output-on-failure

View File

@ -119,7 +119,7 @@ int main(void)
{"age", 42},
{"name", "John"},
{"phones", {0}},
{"post-code", 12345},
{"post-code", 12345},
},
err); // name must be a string
EXPECT_EQ(err.failed_pointers.size(), 1);

View File

@ -64,7 +64,7 @@ auto schema_draft = R"(
urn:uuid:ee564b8a-7a87-4125-8c96-e9f123d6766f
urn:uuid:ee564b8a-7a87-4125-8c96-e9f123d6766f#
http://example.com/root.json#/definitions/C
*/
*/
auto schema = R"(
{

View File

@ -1,321 +0,0 @@
#include "nlohmann/json-schema.hpp"
#include "nlohmann/json.hpp"
#include <iostream>
#include <regex>
#include <string>
#include <vector>
//==============================================================================
// Test macros
//==============================================================================
#define LOG_ERROR(LOG_ERROR__ARGS) \
std::cerr << __FILE__ << ":" << __LINE__ << ": " << LOG_ERROR__ARGS << std::endl
#define EXPECT_THROW_WITH_MESSAGE(EXPRESSION, MESSAGE) \
do { \
try { \
EXPRESSION; \
LOG_ERROR("Expected exception not thrown with matching regex: \"" << MESSAGE << "\""); \
++g_error_count; \
} catch (const std::exception &error) { \
const std::regex error_re{MESSAGE}; \
if (!std::regex_search(error.what(), error_re)) { \
LOG_ERROR("Expected exception with matching regex: \"" << MESSAGE << "\", but got this instead: " << error.what()); \
++g_error_count; \
} \
} \
} while (false)
#define ASSERT_OR_EXPECT_EQ(FIRST_THING, SECOND_THING, RETURN_IN_CASE_OF_ERROR) \
do { \
if ((FIRST_THING) != (SECOND_THING)) { \
LOG_ERROR("The two values of " << (FIRST_THING) << " (" #FIRST_THING << ") and " << (SECOND_THING) << " (" #SECOND_THING << ") should be equal"); \
if (RETURN_IN_CASE_OF_ERROR) { \
return; \
} \
} \
} while (false)
#define ASSERT_EQ(FIRST_THING, SECOND_THING) ASSERT_OR_EXPECT_EQ(FIRST_THING, SECOND_THING, true)
#define EXPECT_EQ(FIRST_THING, SECOND_THING) ASSERT_OR_EXPECT_EQ(FIRST_THING, SECOND_THING, true)
#define EXPECT_MATCH(STRING, REGEX) \
do { \
if (!std::regex_search((STRING), std::regex{(REGEX)})) { \
LOG_ERROR("String \"" << (STRING) << "\" doesn't match with regex: \"" << (REGEX) << "\""); \
++g_error_count; \
} \
} while (false)
namespace
{
//==============================================================================
// Test environment
//==============================================================================
int g_error_count = 0;
//==============================================================================
// The schema used for testing
//==============================================================================
const std::string g_schema_template = R"(
{
"properties": {
"first": {
"%COMBINATION_FIRST_LEVEL%": [
{
"properties": {
"second": {
"%COMBINATION_SECOND_LEVEL%": [
{
"minimum": 5,
"type": "integer"
},
{
"multipleOf": 2,
"type": "integer"
}
]
}
},
"type": "object"
},
{
"minimum": 20,
"type": "integer"
},
{
"minLength": 10,
"type": "string"
}
]
}
},
"type": "object"
}
)";
auto generateSchema(const std::string &first_combination, const std::string &second_combination) -> nlohmann::json
{
static const std::regex first_replace_re{"%COMBINATION_FIRST_LEVEL%"};
static const std::regex second_replace_re{"%COMBINATION_SECOND_LEVEL%"};
std::string intermediate = std::regex_replace(g_schema_template, first_replace_re, first_combination);
return nlohmann::json::parse(std::regex_replace(intermediate, second_replace_re, second_combination));
}
//==============================================================================
// Error handler to catch all the errors generated by the validator - also inside the combinations
//==============================================================================
class MyErrorHandler : public nlohmann::json_schema::error_handler
{
public:
struct ErrorEntry {
nlohmann::json::json_pointer ptr;
nlohmann::json intance;
std::string message;
};
using ErrorEntryList = std::vector<ErrorEntry>;
auto getErrors() const -> const ErrorEntryList &
{
return m_error_list;
}
private:
auto error(const nlohmann::json::json_pointer &ptr, const nlohmann::json &instance, const std::string &message) -> void override
{
m_error_list.push_back(ErrorEntry{ptr, instance, message});
}
ErrorEntryList m_error_list;
};
//==============================================================================
// Error string helpers
//==============================================================================
auto operator<<(std::string first, const std::string &second) -> std::string
{
first += ".*";
first += second;
return first;
}
auto rootError(const std::string &combination_type, std::size_t number_of_subschemas) -> std::string
{
return "no subschema has succeeded, but one of them is required to validate. Type: " + combination_type + ", number of failed subschemas: " + std::to_string(number_of_subschemas);
}
auto combinationError(const std::string &combination_type, std::size_t test_case_number) -> std::string
{
return "[combination: " + combination_type + " / case#" + std::to_string(test_case_number) + "]";
}
//==============================================================================
// Validator function - for simplicity
//==============================================================================
auto validate(const nlohmann::json &schema, const nlohmann::json &instance, nlohmann::json_schema::error_handler *error_handler = nullptr) -> void
{
nlohmann::json_schema::json_validator validator;
validator.set_root_schema(schema);
if (error_handler) {
validator.validate(instance, *error_handler);
} else {
validator.validate(instance);
}
}
//==============================================================================
// The test cases
//==============================================================================
auto simpleTest(const std::string &first_combination, const std::string &second_combination) -> void
{
const nlohmann::json schema = generateSchema(first_combination, second_combination);
EXPECT_THROW_WITH_MESSAGE(validate(schema, nlohmann::json{{"first", {{"second", 1}}}}), rootError(first_combination, 3));
if (second_combination == "oneOf") {
EXPECT_THROW_WITH_MESSAGE(validate(schema, nlohmann::json{{"first", {{"second", 8}}}}), rootError(first_combination, 3));
}
EXPECT_THROW_WITH_MESSAGE(validate(schema, nlohmann::json{{"first", 10}}), rootError(first_combination, 3));
EXPECT_THROW_WITH_MESSAGE(validate(schema, nlohmann::json{{"first", "short"}}), rootError(first_combination, 3));
}
auto verboseTest(const std::string &first_combination, const std::string &second_combination) -> void
{
const nlohmann::json schema = generateSchema(first_combination, second_combination);
{
MyErrorHandler error_handler;
validate(schema, nlohmann::json{{"first", {{"second", 1}}}}, &error_handler);
const MyErrorHandler::ErrorEntryList &error_list = error_handler.getErrors();
EXPECT_EQ(error_list.size(), 6);
EXPECT_EQ(error_list[0].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[0].message, rootError(first_combination, 3));
EXPECT_EQ(error_list[1].ptr, nlohmann::json::json_pointer{"/first/second"});
EXPECT_MATCH(error_list[1].message, combinationError(first_combination, 0) << rootError(second_combination, 2));
EXPECT_EQ(error_list[2].ptr, nlohmann::json::json_pointer{"/first/second"});
EXPECT_MATCH(error_list[2].message, combinationError(first_combination, 0) << combinationError(second_combination, 0) << "instance is below minimum of 5");
EXPECT_EQ(error_list[3].ptr, nlohmann::json::json_pointer{"/first/second"});
EXPECT_MATCH(error_list[3].message, combinationError(first_combination, 0) << combinationError(second_combination, 1) << "instance is not a multiple of 2.0");
EXPECT_EQ(error_list[4].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[4].message, combinationError(first_combination, 1) << "unexpected instance type");
EXPECT_EQ(error_list[5].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[5].message, combinationError(first_combination, 2) << "unexpected instance type");
}
{
MyErrorHandler error_handler;
validate(schema, nlohmann::json{{"first", {{"second", "not-an-integer"}}}}, &error_handler);
const MyErrorHandler::ErrorEntryList &error_list = error_handler.getErrors();
EXPECT_EQ(error_list.size(), 6);
EXPECT_EQ(error_list[0].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[0].message, rootError(first_combination, 3));
EXPECT_EQ(error_list[1].ptr, nlohmann::json::json_pointer{"/first/second"});
EXPECT_MATCH(error_list[1].message, combinationError(first_combination, 0) << rootError(second_combination, 2));
EXPECT_EQ(error_list[2].ptr, nlohmann::json::json_pointer{"/first/second"});
EXPECT_MATCH(error_list[2].message, combinationError(first_combination, 0) << combinationError(second_combination, 0) << "unexpected instance type");
EXPECT_EQ(error_list[3].ptr, nlohmann::json::json_pointer{"/first/second"});
EXPECT_MATCH(error_list[3].message, combinationError(first_combination, 0) << combinationError(second_combination, 1) << "unexpected instance type");
EXPECT_EQ(error_list[4].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[4].message, combinationError(first_combination, 1) << "unexpected instance type");
EXPECT_EQ(error_list[5].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[5].message, combinationError(first_combination, 2) << "unexpected instance type");
}
if (second_combination == "oneOf") {
MyErrorHandler error_handler;
validate(schema, nlohmann::json{{"first", {{"second", 8}}}}, &error_handler);
const MyErrorHandler::ErrorEntryList &error_list = error_handler.getErrors();
EXPECT_EQ(error_list.size(), 4);
EXPECT_EQ(error_list[0].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[0].message, rootError(first_combination, 3));
EXPECT_EQ(error_list[1].ptr, nlohmann::json::json_pointer{"/first/second"});
EXPECT_MATCH(error_list[1].message, combinationError(first_combination, 0) << "more than one subschema has succeeded, but exactly one of them is required to validate");
EXPECT_EQ(error_list[2].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[2].message, combinationError(first_combination, 1) << "unexpected instance type");
EXPECT_EQ(error_list[3].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[3].message, combinationError(first_combination, 2) << "unexpected instance type");
}
{
MyErrorHandler error_handler;
validate(schema, nlohmann::json{{"first", 10}}, &error_handler);
const MyErrorHandler::ErrorEntryList &error_list = error_handler.getErrors();
EXPECT_EQ(error_list.size(), 4);
EXPECT_EQ(error_list[0].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[0].message, rootError(first_combination, 3));
EXPECT_EQ(error_list[1].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[1].message, combinationError(first_combination, 0) << "unexpected instance type");
EXPECT_EQ(error_list[2].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[2].message, combinationError(first_combination, 1) << "instance is below minimum of 20");
EXPECT_EQ(error_list[3].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[3].message, combinationError(first_combination, 2) << "unexpected instance type");
}
{
MyErrorHandler error_handler;
validate(schema, nlohmann::json{{"first", "short"}}, &error_handler);
const MyErrorHandler::ErrorEntryList &error_list = error_handler.getErrors();
EXPECT_EQ(error_list.size(), 4);
EXPECT_EQ(error_list[0].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[0].message, rootError(first_combination, 3));
EXPECT_EQ(error_list[1].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[1].message, combinationError(first_combination, 0) << "unexpected instance type");
EXPECT_EQ(error_list[2].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[2].message, combinationError(first_combination, 1) << "unexpected instance type");
EXPECT_EQ(error_list[3].ptr, nlohmann::json::json_pointer{"/first"});
EXPECT_MATCH(error_list[3].message, combinationError(first_combination, 2) << "instance is too short as per minLength:10");
}
}
} // namespace
//==============================================================================
// MAIN - calling the test cases
//==============================================================================
auto main() -> int
{
simpleTest("anyOf", "anyOf");
simpleTest("anyOf", "oneOf");
simpleTest("oneOf", "anyOf");
simpleTest("oneOf", "oneOf");
verboseTest("anyOf", "anyOf");
verboseTest("anyOf", "oneOf");
verboseTest("oneOf", "anyOf");
verboseTest("oneOf", "oneOf");
return g_error_count;
}

View File

@ -1,69 +0,0 @@
// issue-00-format-error.cpp
#include "nlohmann/json-schema.hpp"
#include "nlohmann/json.hpp"
#include <iostream>
static int error_count = 0;
#define CHECK_THROW(x, msg) \
{ \
bool fail = false; \
try { \
x; \
} catch (std::exception &) { \
fail = true; \
} \
if (fail == false) { \
++error_count; \
std::cout << msg << std::endl; \
} \
}
#define CHECK_NO_THROW(x, msg) \
{ \
bool fail = false; \
std::string exception_error; \
try { \
x; \
} catch (std::exception & e) { \
fail = true; \
exception_error = e.what(); \
} \
if (fail == true) { \
++error_count; \
std::cout << msg << ": " << exception_error << std::endl; \
} \
}
using json = nlohmann::json;
using validator = nlohmann::json_schema::json_validator;
json schema_with_format = json::parse(R"(
{
"type": "object",
"properties": {
"str": {
"type": "string",
"format": "placeholder"
}
}
}
)");
int main()
{
// check that if we get validator without format checker we get error at schema loading
validator without_format_checker;
CHECK_THROW(without_format_checker.set_root_schema(schema_with_format), "validator without format checker must fail at schema loading");
// check that with format checker all works fine
validator with_format_checker{nullptr, [](const std::string &, const std::string &) {}};
CHECK_NO_THROW(with_format_checker.set_root_schema(schema_with_format), "schema must be succesed by validator with format checker");
CHECK_NO_THROW(with_format_checker.validate(json{{"str", "placeholder"}}), "validator must not throw while validation schema with format");
return error_count;
}

View File

@ -1,13 +0,0 @@
add_test_simple_schema(Issue::143-1
${CMAKE_CURRENT_SOURCE_DIR}/schema.json
${CMAKE_CURRENT_SOURCE_DIR}/instance-fail-1.json)
add_test_simple_schema(Issue::143-a
${CMAKE_CURRENT_SOURCE_DIR}/schema.json
${CMAKE_CURRENT_SOURCE_DIR}/instance-fail-a.json)
add_test_simple_schema(Issue::143-ok
${CMAKE_CURRENT_SOURCE_DIR}/schema.json
${CMAKE_CURRENT_SOURCE_DIR}/instance.json)
set_tests_properties(Issue::143-1 Issue::143-a
PROPERTIES
WILL_FAIL 1)

Some files were not shown because too many files have changed in this diff Show More