🐛 pull from upstream

This commit is contained in:
opsocket 2023-01-30 18:49:22 -05:00
parent 882e43d1a2
commit 823ab6db18
15 changed files with 380 additions and 174 deletions

View File

@ -6,12 +6,13 @@ on:
- develop
- master
- release/*
- main
pull_request:
jobs:
build_and_test:
runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v1.0.0
container: ghcr.io/nlohmann/json-ci:v2.4.0
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub."
@ -21,7 +22,7 @@ jobs:
with:
repository: nlohmann/json
path: nlohmann-json
ref: release/3.10.2
ref: v3.11.2
- name: Build and install nlohmann json
run: |
cd nlohmann-json
@ -36,3 +37,47 @@ jobs:
run: cmake --build build --target all -j$(nproc)
- name: test
run: cd build && ctest
build_and_test_min_version:
runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v2.4.0
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub."
- run: echo "🔎 Branch name is ${{ github.ref }} and repository is ${{ github.repository }}."
- name: Clone nlohmann json
uses: actions/checkout@master
with:
repository: nlohmann/json
path: nlohmann-json
ref: v3.8.0
- name: Build and install nlohmann json
run: |
cd nlohmann-json
cmake -S . -B build
cmake --build build --target install -j$(nproc)
cd ..
- name: Clone json-schema-validator
uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build
- name: build
run: cmake --build build --target all -j$(nproc)
- name: test
run: cd build && ctest
build_conan:
runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v2.4.0
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub."
- run: echo "🔎 Branch name is ${{ github.ref }} and repository is ${{ github.repository }}."
- name: Clone json-schema-validator
uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- run: python -m pip install --upgrade conan
- run: conan config init
- run: conan profile update settings.compiler.libcxx=libstdc++11 default
- name: conan create package
run: conan create .

View File

@ -1,7 +1,6 @@
cmake_minimum_required(VERSION 3.2)
option(JSON_VALIDATOR_BUILD_TESTS "Build tests" ON)
option(JSON_VALIDATOR_BUILD_EXAMPLES "Build examples" ON)
option(JSON_VALIDATOR_INSTALL "Install target" ON)
option(JSON_VALIDATOR_HUNTER "Enable Hunter package manager support" OFF)
if(JSON_VALIDATOR_HUNTER)
@ -16,7 +15,7 @@ endif()
project(nlohmann_json_schema_validator
LANGUAGES CXX)
set(PROJECT_VERSION 2.1.1)
set(PROJECT_VERSION 2.2.0)
if(JSON_VALIDATOR_HUNTER)
hunter_add_package(nlohmann_json)
@ -24,6 +23,7 @@ endif()
# the library
add_library(nlohmann_json_schema_validator
src/smtp-address-validator.cpp
src/json-schema-draft7.json.cpp
src/json-uri.cpp
src/json-validator.cpp
@ -42,21 +42,16 @@ target_compile_features(nlohmann_json_schema_validator
set_target_properties(nlohmann_json_schema_validator
PROPERTIES
VERSION ${PROJECT_VERSION}
SOVERSION 1)
SOVERSION 2)
# disable tests and examples if project is not super project
if(CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR)
# I am top-level project.
set(JSON_VALIDATOR_IS_TOP_LEVEL TRUE)
set(JSON_VALIDATOR_IS_TOP_LEVEL ON)
endif()
if(JSON_VALIDATOR_IS_TOP_LEVEL)
set(JSON_VALIDATOR_BUILD_TESTS ON)
set(JSON_VALIDATOR_BUILD_EXAMPLES ON)
else()
set(JSON_VALIDATOR_BUILD_TESTS OFF)
set(JSON_VALIDATOR_BUILD_EXAMPLES OFF)
endif()
option(JSON_VALIDATOR_BUILD_TESTS "Build tests" ${JSON_VALIDATOR_IS_TOP_LEVEL})
option(JSON_VALIDATOR_BUILD_EXAMPLES "Build examples" ${JSON_VALIDATOR_IS_TOP_LEVEL})
if(NOT TARGET nlohmann_json::nlohmann_json)
find_package(nlohmann_json REQUIRED)
@ -95,14 +90,16 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
endif()
endif()
install(TARGETS nlohmann_json_schema_validator
EXPORT ${PROJECT_NAME}Targets
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin)
if(JSON_VALIDATOR_INSTALL)
install(TARGETS nlohmann_json_schema_validator
EXPORT ${PROJECT_NAME}Targets
LIBRARY DESTINATION lib
ARCHIVE DESTINATION lib
RUNTIME DESTINATION bin)
install(FILES src/nlohmann/json-schema.hpp
DESTINATION include/nlohmann)
install(FILES src/nlohmann/json-schema.hpp
DESTINATION include/nlohmann)
endif()
if (JSON_VALIDATOR_BUILD_EXAMPLES)
# simple nlohmann_json_schema_validator-executable
@ -127,32 +124,34 @@ endif()
# Set Up the Project Targets and Config Files for CMake
# Set the install path to the cmake config files (Relative, so install works correctly under Hunter as well)
set(INSTALL_CMAKE_DIR "lib/cmake/${PROJECT_NAME}")
set(INSTALL_CMAKEDIR_ROOT share/cmake)
if(JSON_VALIDATOR_INSTALL)
# Set the install path to the cmake config files (Relative, so install works correctly under Hunter as well)
set(INSTALL_CMAKE_DIR "lib/cmake/${PROJECT_NAME}")
set(INSTALL_CMAKEDIR_ROOT share/cmake)
# Install Targets
install(EXPORT ${PROJECT_NAME}Targets
FILE ${PROJECT_NAME}Targets.cmake
DESTINATION "${INSTALL_CMAKE_DIR}")
# Install Targets
install(EXPORT ${PROJECT_NAME}Targets
FILE ${PROJECT_NAME}Targets.cmake
DESTINATION "${INSTALL_CMAKE_DIR}")
include(CMakePackageConfigHelpers)
write_basic_package_version_file(
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake
VERSION ${PROJECT_VERSION}
COMPATIBILITY SameMajorVersion
)
configure_package_config_file(
${PROJECT_SOURCE_DIR}/${PROJECT_NAME}Config.cmake.in
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake
INSTALL_DESTINATION ${INSTALL_CMAKEDIR_ROOT}/${PROJECT_NAME}
)
install(
FILES
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake
include(CMakePackageConfigHelpers)
write_basic_package_version_file(
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake
DESTINATION
${INSTALL_CMAKE_DIR}
)
VERSION ${PROJECT_VERSION}
COMPATIBILITY SameMajorVersion
)
configure_package_config_file(
${PROJECT_SOURCE_DIR}/${PROJECT_NAME}Config.cmake.in
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake
INSTALL_DESTINATION ${INSTALL_CMAKEDIR_ROOT}/${PROJECT_NAME}
)
install(
FILES
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}Config.cmake
${CMAKE_CURRENT_BINARY_DIR}/${PROJECT_NAME}ConfigVersion.cmake
DESTINATION
${INSTALL_CMAKE_DIR}
)
endif()

View File

@ -10,7 +10,7 @@ This is a C++ library for validating JSON documents based on a
[draft-7 of JSON Schema Validation](http://json-schema.org/schema).
First a disclaimer: *It is work in progress and
contributions or hints or discussions are welcome.* Even though a 2.0.0 release is imminent.
contributions or hints or discussions are welcome.*
Niels Lohmann et al develop a great JSON parser for C++ called [JSON for Modern
C++](https://github.com/nlohmann/json). This validator is based on this

View File

@ -15,7 +15,6 @@ def get_version():
except:
return None
class JsonSchemaValidatorConan(ConanFile):
name = 'JsonSchemaValidator'
version = get_version()
@ -24,34 +23,49 @@ class JsonSchemaValidatorConan(ConanFile):
settings = 'os', 'compiler', 'build_type', 'arch'
options = {
'shared': [True, False],
'fPIC': [True, False]
'fPIC': [True, False],
'build_examples': [True, False],
'build_tests': [True, False]
}
default_options = {
'shared': False,
'fPIC': True
'fPIC': True,
'build_examples': True,
'build_tests': False
}
generators = "cmake"
generators = "CMakeDeps"
exports_sources = [
'CMakeLists.txt',
'nlohmann_json_schema_validatorConfig.cmake.in',
'src/*',
'app/*',
'test/*',
]
requires = (
'nlohmann_json/3.7.3'
'nlohmann_json/3.11.2'
)
_cmake = None
def _configure_cmake(self):
if self._cmake:
return self._cmake
self._cmake = CMake(self)
self._cmake.definitions['JSON_VALIDATOR_BUILD_EXAMPLES'] = self.options.build_examples
self._cmake.definitions['JSON_VALIDATOR_BUILD_TESTS'] = self.options.build_tests
self._cmake.configure()
return self._cmake
def layout(self):
build_type = str(self.settings.build_type).lower()
self.folders.build = "build-{}".format(build_type)
def build(self):
cmake = CMake(self)
cmake.definitions['nlohmann_json_DIR'] = os.path.join(self.deps_cpp_info['nlohmann_json'].rootpath, 'include')
cmake.definitions['JSON_VALIDATOR_BUILD_EXAMPLES'] = True
cmake.definitions['JSON_VALIDATOR_BUILD_TESTS'] = False
cmake = self._configure_cmake()
cmake.configure()
cmake.build()
def package(self):
cmake = CMake(self)
cmake = self._configure_cmake()
cmake.install()
def package_info(self):

View File

@ -1,6 +1,7 @@
@PACKAGE_INIT@
find_package(nlohmann_json 3.8.0 REQUIRED)
include(CMakeFindDependencyMacro)
find_dependency(nlohmann_json)
include("${CMAKE_CURRENT_LIST_DIR}/nlohmann_json_schema_validatorTargets.cmake")
check_required_components(

View File

@ -85,19 +85,19 @@ json_patch::json_patch(const json &patch)
json_patch &json_patch::add(const json::json_pointer &ptr, json value)
{
j_.push_back(json{{"op", "add"}, {"path", ptr}, {"value", std::move(value)}});
j_.push_back(json{{"op", "add"}, {"path", ptr.to_string()}, {"value", std::move(value)}});
return *this;
}
json_patch &json_patch::replace(const json::json_pointer &ptr, json value)
{
j_.push_back(json{{"op", "replace"}, {"path", ptr}, {"value", std::move(value)}});
j_.push_back(json{{"op", "replace"}, {"path", ptr.to_string()}, {"value", std::move(value)}});
return *this;
}
json_patch &json_patch::remove(const json::json_pointer &ptr)
{
j_.push_back(json{{"op", "remove"}, {"path", ptr}});
j_.push_back(json{{"op", "remove"}, {"path", ptr.to_string()}});
return *this;
}

View File

@ -14,6 +14,7 @@
#include <memory>
#include <set>
#include <sstream>
#include <string>
using nlohmann::json;
using nlohmann::json_patch;
@ -40,6 +41,16 @@ protected:
root_schema *root_;
json default_value_ = nullptr;
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> & /* sch */,
root_schema * /* root */,
std::vector<nlohmann::json_uri> & /* uris */,
nlohmann::json & /* default_value */) const
{
return nullptr;
};
public:
virtual ~schema() = default;
@ -92,6 +103,21 @@ class schema_ref : public schema
return default_value_;
}
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> &sch,
root_schema *root,
std::vector<nlohmann::json_uri> &uris,
nlohmann::json &default_value) const override
{
// create a new reference schema using the original reference (which will be resolved later)
// to store this overloaded default value #209
auto result = std::make_shared<schema_ref>(uris[0].to_string(), root);
result->set_target(sch, true);
result->set_default_value(default_value);
return result;
};
public:
schema_ref(const std::string &id, root_schema *root)
: schema(root), id_(id) {}
@ -179,7 +205,7 @@ public:
auto fragment = new_uri.pointer();
// is there a reference looking for this unknown-keyword, which is thus no longer a unknown keyword but a schema
auto unresolved = file.unresolved.find(fragment);
auto unresolved = file.unresolved.find(fragment.to_string());
if (unresolved != file.unresolved.end())
schema::make(value, this, {}, {{new_uri}});
else { // no, nothing ref'd it, keep for later
@ -283,11 +309,31 @@ public:
break;
} while (1);
for (const auto &file : files_)
if (file.second.unresolved.size() != 0)
for (const auto &file : files_) {
if (file.second.unresolved.size() != 0) {
// Build a representation of the undefined
// references as a list of comma-separated strings.
auto n_urefs = file.second.unresolved.size();
std::string urefs = "[";
decltype(n_urefs) counter = 0;
for (const auto &p : file.second.unresolved) {
urefs += p.first;
if (counter != n_urefs - 1u) {
urefs += ", ";
}
++counter;
}
urefs += "]";
throw std::invalid_argument("after all files have been parsed, '" +
(file.first == "" ? "<root>" : file.first) +
"' has still undefined references.");
"' has still the following undefined references: " + urefs);
}
}
}
void validate(const json::json_pointer &ptr,
@ -507,8 +553,23 @@ class type_schema : public schema
else_->validate(ptr, instance, patch, e);
}
}
if (instance.is_null()) {
patch.add(nlohmann::json::json_pointer{}, default_value_);
}
}
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> & /* sch */,
root_schema * /* root */,
std::vector<nlohmann::json_uri> & /* uris */,
nlohmann::json &default_value) const override
{
auto result = std::make_shared<type_schema>(*this);
result->set_default_value(default_value);
return result;
};
public:
type_schema(json &sch,
root_schema *root,
@ -892,8 +953,8 @@ class boolean : public schema
{
if (!true_) { // false schema
// empty array
//switch (instance.type()) {
//case json::value_t::array:
// switch (instance.type()) {
// case json::value_t::array:
// if (instance.size() != 0) // valid false-schema
// e.error(ptr, instance, "false-schema required empty array");
// return;
@ -1076,6 +1137,11 @@ public:
propertyNames_ = schema::make(attr.value(), root, {"propertyNames"}, uris);
sch.erase(attr);
}
attr = sch.find("default");
if (attr != sch.end()) {
set_default_value(*attr);
}
}
};
@ -1289,16 +1355,8 @@ std::shared_ptr<schema> schema::make(json &schema,
attr = schema.find("default");
if (attr != schema.end()) {
// copy the referenced schema depending on the underlying type and modify the default value
if (dynamic_cast<schema_ref *>(sch.get())) {
// create a new reference schema use the original reference (which will be resolved later)
// to store this overloaed default value #209
auto overloaded_ref_sch = std::make_shared<schema_ref>(uris[0].to_string(), root);
overloaded_ref_sch->set_target(sch, true);
overloaded_ref_sch->set_default_value(attr.value());
sch = overloaded_ref_sch;
} else if (auto *type_sch = dynamic_cast<type_schema *>(sch.get())) {
sch = std::make_shared<type_schema>(*type_sch);
sch->set_default_value(attr.value());
if (auto new_sch = sch->make_for_default_(sch, root, uris, attr.value())) {
sch = new_sch;
}
schema.erase(attr);
}

View File

@ -61,7 +61,7 @@ protected:
std::tuple<std::string, std::string, std::string, std::string, std::string> as_tuple() const
{
return std::make_tuple(urn_, scheme_, authority_, path_, identifier_ != "" ? identifier_ : pointer_);
return std::make_tuple(urn_, scheme_, authority_, path_, identifier_ != "" ? identifier_ : pointer_.to_string());
}
public:
@ -80,7 +80,7 @@ public:
std::string fragment() const
{
if (identifier_ == "")
return pointer_;
return pointer_.to_string();
else
return identifier_;
}

View File

@ -1,5 +1,7 @@
#include <nlohmann/json-schema.hpp>
#include "smtp-address-validator.hpp"
#include <algorithm>
#include <exception>
#include <iostream>
@ -84,10 +86,10 @@ void rfc3339_time_check(const std::string &value)
}
/**
* @todo Could be made more exact by querying a leap second database and choosing the
* correct maximum in {58,59,60}. This current solution might match some invalid dates
* but it won't lead to false negatives. This only works if we know the full date, however
*/
* @todo Could be made more exact by querying a leap second database and choosing the
* correct maximum in {58,59,60}. This current solution might match some invalid dates
* but it won't lead to false negatives. This only works if we know the full date, however
*/
auto day_minutes = hour * 60 + minute - (offsetHour * 60 + offsetMinute);
if (day_minutes < 0)
@ -126,7 +128,7 @@ void rfc3339_time_check(const std::string &value)
* @endverbatim
* NOTE: Per [ABNF] and ISO8601, the "T" and "Z" characters in this
* syntax may alternatively be lower case "t" or "z" respectively.
*/
*/
void rfc3339_date_time_check(const std::string &value)
{
const static std::regex dateTimeRegex{R"(^([0-9]{4}\-[0-9]{2}\-[0-9]{2})[Tt]([0-9]{2}\:[0-9]{2}\:[0-9]{2}(?:\.[0-9]+)?(?:[Zz]|(?:\+|\-)[0-9]{2}\:[0-9]{2}))$)"};
@ -180,91 +182,151 @@ const std::string uuid{R"([0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-
// from http://stackoverflow.com/questions/106179/regular-expression-to-match-dns-hostname-or-ip-address
const std::string hostname{R"(^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$)"};
bool is_ascii(std::string const& value)
{
for (auto ch : value) {
if (ch & 0x80) {
return false;
}
}
return true;
}
/**
* @see https://tools.ietf.org/html/rfc5322#section-4.1
* @see
*
* @verbatim
* atom = [CFWS] 1*atext [CFWS]
* word = atom / quoted-string
* phrase = 1*word / obs-phrase
* obs-FWS = 1*WSP *(CRLF 1*WSP)
* FWS = ([*WSP CRLF] 1*WSP) / obs-FWS
* ; Folding white space
* ctext = %d33-39 / ; Printable US-ASCII
* %d42-91 / ; characters not including
* %d93-126 / ; "(", ")", or "\"
* obs-ctext
* ccontent = ctext / quoted-pair / comment
* comment = "(" *([FWS] ccontent) [FWS] ")"
* CFWS = (1*([FWS] comment) [FWS]) / FWS
* obs-local-part = word *("." word)
* obs-domain = atom *("." atom)
* obs-dtext = obs-NO-WS-CTL / quoted-pair
* quoted-pair = ("\" (VCHAR / WSP)) / obs-qp
* obs-NO-WS-CTL = %d1-8 / ; US-ASCII control
* %d11 / ; characters that do not
* %d12 / ; include the carriage
* %d14-31 / ; return, line feed, and
* %d127 ; white space characters
* obs-ctext = obs-NO-WS-CTL
* obs-qtext = obs-NO-WS-CTL
* obs-utext = %d0 / obs-NO-WS-CTL / VCHAR
* obs-qp = "\" (%d0 / obs-NO-WS-CTL / LF / CR)
* obs-body = *((*LF *CR *((%d0 / text) *LF *CR)) / CRLF)
* obs-unstruct = *((*LF *CR *(obs-utext *LF *CR)) / FWS)
* obs-phrase = word *(word / "." / CFWS)
* obs-phrase-list = [phrase / CFWS] *("," [phrase / CFWS])
* qtext = %d33 / ; Printable US-ASCII
* %d35-91 / ; characters not including
* %d93-126 / ; "\" or the quote character
* obs-qtext
* qcontent = qtext / quoted-pair
* quoted-string = [CFWS]
* DQUOTE *([FWS] qcontent) [FWS] DQUOTE
* [CFWS]
* atext = ALPHA / DIGIT / ; Printable US-ASCII
* "!" / "#" / ; characters not including
* "$" / "%" / ; specials. Used for atoms.
* "&" / "'" /
* "*" / "+" /
* "-" / "/" /
* "=" / "?" /
* "^" / "_" /
* "`" / "{" /
* "|" / "}" /
* "~"
* dot-atom-text = 1*atext *("." 1*atext)
* dot-atom = [CFWS] dot-atom-text [CFWS]
* addr-spec = local-part "@" domain
* local-part = dot-atom / quoted-string / obs-local-part
* domain = dot-atom / domain-literal / obs-domain
* domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS]
* dtext = %d33-90 / ; Printable US-ASCII
* %d94-126 / ; characters not including
* obs-dtext ; "[", "]", or "\"
* @endverbatim
* @todo Currently don't have a working tool for this larger ABNF to generate a regex.
* Other options:
* - https://github.com/ldthomas/apg-6.3
* - https://github.com/akr/abnf
* URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
*
* hier-part = "//" authority path-abempty
* / path-absolute
* / path-rootless
* / path-empty
*
* URI-reference = URI / relative-ref
*
* absolute-URI = scheme ":" hier-part [ "?" query ]
*
* relative-ref = relative-part [ "?" query ] [ "#" fragment ]
*
* relative-part = "//" authority path-abempty
* / path-absolute
* / path-noscheme
* / path-empty
*
* scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
*
* authority = [ userinfo "@" ] host [ ":" port ]
* userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
* host = IP-literal / IPv4address / reg-name
* port = *DIGIT
*
* IP-literal = "[" ( IPv6address / IPvFuture ) "]"
*
* IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" )
*
* IPv6address = 6( h16 ":" ) ls32
* / "::" 5( h16 ":" ) ls32
* / [ h16 ] "::" 4( h16 ":" ) ls32
* / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
* / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
* / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
* / [ *4( h16 ":" ) h16 ] "::" ls32
* / [ *5( h16 ":" ) h16 ] "::" h16
* / [ *6( h16 ":" ) h16 ] "::"
*
* h16 = 1*4HEXDIG
* ls32 = ( h16 ":" h16 ) / IPv4address
* IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
* dec-octet = DIGIT ; 0-9
* / %x31-39 DIGIT ; 10-99
* / "1" 2DIGIT ; 100-199
* / "2" %x30-34 DIGIT ; 200-249
* / "25" %x30-35 ; 250-255
*
* reg-name = *( unreserved / pct-encoded / sub-delims )
*
* path = path-abempty ; begins with "/" or is empty
* / path-absolute ; begins with "/" but not "//"
* / path-noscheme ; begins with a non-colon segment
* / path-rootless ; begins with a segment
* / path-empty ; zero characters
*
* path-abempty = *( "/" segment )
* path-absolute = "/" [ segment-nz *( "/" segment ) ]
* path-noscheme = segment-nz-nc *( "/" segment )
* path-rootless = segment-nz *( "/" segment )
* path-empty = 0<pchar>
*
* segment = *pchar
* segment-nz = 1*pchar
* segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" )
* ; non-zero-length segment without any colon ":"
*
* pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
*
* query = *( pchar / "/" / "?" )
*
* fragment = *( pchar / "/" / "?" )
*
* pct-encoded = "%" HEXDIG HEXDIG
*
* unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
* reserved = gen-delims / sub-delims
* gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
* sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
* / "*" / "+" / "," / ";" / "="
*
* @endverbatim
* @see adapted from: https://github.com/jhermsmeier/uri.regex/blob/master/uri.regex
*
* The problematic thing are the allowed whitespaces (even newlines) in the email.
* Ignoring those and starting with
* @see https://stackoverflow.com/questions/13992403/regex-validation-of-email-addresses-according-to-rfc5321-rfc5322
* and trying to divide up the complicated regex into understandable ABNF definitions from rfc5322 yields:
*/
const std::string obsnowsctl{R"([\x01-\x08\x0b\x0c\x0e-\x1f\x7f])"};
const std::string obsqp{R"(\\[\x01-\x09\x0b\x0c\x0e-\x7f])"};
const std::string qtext{R"((?:[\x21\x23-\x5b\x5d-\x7e]|)" + obsnowsctl + ")"};
const std::string dtext{R"([\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f])"};
const std::string quotedString{R"("(?:)" + qtext + "|" + obsqp + R"()*")"};
const std::string atext{R"([A-Za-z0-9!#$%&'*+/=?^_`{|}~-])"};
const std::string domainLiteral{R"(\[(?:(?:)" + decOctet + R"()\.){3}(?:)" + decOctet + R"(|[A-Za-z0-9-]*[A-Za-z0-9]:(?:)" + dtext + "|" + obsqp + R"()+)\])"};
void rfc3986_uri_check(const std::string &value)
{
const static std::string scheme{R"(([A-Za-z][A-Za-z0-9+\-.]*):)"};
const static std::string hierPart{
R"((?:(\/\/)(?:((?:[A-Za-z0-9\-._~!$&'()*+,;=:]|)"
R"(%[0-9A-Fa-f]{2})*)@)?((?:\[(?:(?:(?:(?:[0-9A-Fa-f]{1,4}:){6}|)"
R"(::(?:[0-9A-Fa-f]{1,4}:){5}|)"
R"((?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,1}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})?::)(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|)"
R"((?:(?:25[0-5]|2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)\.){3}(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?))|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})?::)|)"
R"([Vv][0-9A-Fa-f]+\.[A-Za-z0-9\-._~!$&'()*+,;=:]+)\]|)"
R"((?:(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)\.){3}(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)|)"
R"((?:[A-Za-z0-9\-._~!$&'()*+,;=]|)"
R"(%[0-9A-Fa-f]{2})*))(?::([0-9]*))?((?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)|)"
R"(\/((?:(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})+(?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)?)|)"
R"(((?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})+(?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)|))"};
const static std::string query{R"((?:\?((?:[A-Za-z0-9\-._~!$&'()*+,;=:@\/?]|%[0-9A-Fa-f]{2})*))?)"};
const static std::string fragment{
R"((?:\#((?:[A-Za-z0-9\-._~!$&'()*+,;=:@\/?]|%[0-9A-Fa-f]{2})*))?)"};
const static std::string uriFormat{scheme + hierPart + query + fragment};
const static std::regex uriRegex{uriFormat};
if (!std::regex_match(value, uriRegex)) {
throw std::invalid_argument(value + " is not a URI string according to RFC 3986.");
}
}
const std::string dotAtom{"(?:" + atext + R"(+(?:\.)" + atext + "+)*)"};
const std::string stackoverflowMagicPart{R"((?:[[:alnum:]](?:[[:alnum:]-]*[[:alnum:]])?\.)+)"
R"([[:alnum:]](?:[[:alnum:]-]*[[:alnum:]])?)"};
const std::string email{"(?:" + dotAtom + "|" + quotedString + ")@(?:" + stackoverflowMagicPart + "|" + domainLiteral + ")"};
} // namespace
namespace nlohmann
@ -286,10 +348,18 @@ void default_string_format_check(const std::string &format, const std::string &v
rfc3339_date_check(value);
} else if (format == "time") {
rfc3339_time_check(value);
} else if (format == "uri") {
rfc3986_uri_check(value);
} else if (format == "email") {
static const std::regex emailRegex{email};
if (!std::regex_match(value, emailRegex)) {
throw std::invalid_argument(value + " is not a valid email according to RFC 5322.");
if (!is_ascii(value)) {
throw std::invalid_argument(value + " contains non-ASCII values, not RFC 5321 compliant.");
}
if (!is_address(&*value.begin(), &*value.end())) {
throw std::invalid_argument(value + " is not a valid email according to RFC 5321.");
}
} else if (format == "idn-email") {
if (!is_address(&*value.begin(), &*value.end())) {
throw std::invalid_argument(value + " is not a valid idn-email according to RFC 6531.");
}
} else if (format == "hostname") {
static const std::regex hostRegex{hostname};

View File

@ -73,3 +73,7 @@ add_test(NAME issue-149-entry-selection COMMAND issue-149-entry-selection)
add_executable(issue-189-default-values issue-189-default-values.cpp)
target_link_libraries(issue-189-default-values nlohmann_json_schema_validator)
add_test(NAME issue-189-default-values COMMAND issue-189-default-values)
add_executable(issue-243-root-default-values issue-243-root-default-values.cpp)
target_link_libraries(issue-243-root-default-values nlohmann_json_schema_validator)
add_test(NAME issue-243-root-default-values COMMAND issue-243-root-default-values)

View File

@ -54,7 +54,6 @@ if(JSON_SCHEMA_TEST_SUITE_PATH)
JSON-Suite::Optional::float-overflow
JSON-Suite::Optional::ecmascript-regex
JSON-Suite::Optional::Format::idn-email
JSON-Suite::Optional::Format::idn-hostname
JSON-Suite::Optional::Format::iri-reference
JSON-Suite::Optional::Format::iri
@ -62,7 +61,6 @@ if(JSON_SCHEMA_TEST_SUITE_PATH)
JSON-Suite::Optional::Format::relative-json-pointer
JSON-Suite::Optional::Format::uri-reference
JSON-Suite::Optional::Format::uri-template
JSON-Suite::Optional::Format::uri
JSON-Suite::Optional::unicode
PROPERTIES

View File

@ -163,7 +163,7 @@ int main()
val.set_root_schema(array_of_types_without_binary);
val.validate({{"something", binary}}, err);
EXPECT_EQ(err.failed_pointers.size(), 1);
EXPECT_EQ(err.failed_pointers[0], "/something");
EXPECT_EQ(err.failed_pointers[0].to_string(), "/something");
err.reset();
// check that without content callback you get exception with schema with contentEncoding or contentMeditType

View File

@ -8,7 +8,8 @@ int main(void)
try {
validator.set_root_schema(nlBase); // this line will log the caught exception
} catch (const std::exception &e) {
if (std::string("after all files have been parsed, '<root>' has still undefined references.") == e.what())
if (std::string("after all files have been parsed, '<root>' has still the following undefined references: [/unknown/keywords]") == e.what())
return EXIT_SUCCESS;
}
return EXIT_FAILURE;

View File

@ -82,5 +82,21 @@ int main()
numberOfErrors += testStringFormat("ipv4", ipv4Checks);
const std::vector<std::pair<std::string, bool>> uriChecks{
{"http://www.google.com/search?q=regular%20expression", true},
{"http://www.google.com/", true},
{"http://www.google.com/search?q=regular%20expression", true},
{"www.google.com", false},
{"http://www.google.comj", true},
{"ldap://[2001:db8::7]/c=GB?objectClass?one", true},
{"mailto:John.Doe@example.com", true},
{"news:comp.infosystems.www.servers.unix", true},
{"https://john.doe@www.example.com:123/forum/questions/?tag=networking&order=newest#top", true},
{"tel:+1-816-555-1212", true},
{"telnet://192.0.2.16:80/", true},
{"urn:oasis:names:specification:docbook:dtd:xml:4.1.2", true}};
numberOfErrors += testStringFormat("uri", uriChecks);
return numberOfErrors;
}

View File

@ -75,11 +75,11 @@ static void pointer_plain_name(json_uri start,
a = a.derive("#foo/looks_like_json/poiner/but/isnt");
EXPECT_EQ(a, full + " # foo/looks_like_json/poiner/but/isnt");
EXPECT_EQ(a.identifier(), "foo/looks_like_json/poiner/but/isnt");
EXPECT_EQ(a.pointer(), "");
EXPECT_EQ(a.pointer().to_string(), "");
a = a.derive("#/looks_like_json/poiner/and/it/is");
EXPECT_EQ(a, full + " # /looks_like_json/poiner/and/it/is");
EXPECT_EQ(a.pointer(), "/looks_like_json/poiner/and/it/is");
EXPECT_EQ(a.pointer().to_string(), "/looks_like_json/poiner/and/it/is");
EXPECT_EQ(a.identifier(), "");
}