🐛 pull from upstream

This commit is contained in:
opsocket 2023-01-30 18:49:22 -05:00
parent 882e43d1a2
commit 823ab6db18
15 changed files with 380 additions and 174 deletions

View File

@ -6,12 +6,13 @@ on:
- develop - develop
- master - master
- release/* - release/*
- main
pull_request: pull_request:
jobs: jobs:
build_and_test: build_and_test:
runs-on: ubuntu-latest runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v1.0.0 container: ghcr.io/nlohmann/json-ci:v2.4.0
steps: steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event." - run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub." - run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub."
@ -21,7 +22,7 @@ jobs:
with: with:
repository: nlohmann/json repository: nlohmann/json
path: nlohmann-json path: nlohmann-json
ref: release/3.10.2 ref: v3.11.2
- name: Build and install nlohmann json - name: Build and install nlohmann json
run: | run: |
cd nlohmann-json cd nlohmann-json
@ -36,3 +37,47 @@ jobs:
run: cmake --build build --target all -j$(nproc) run: cmake --build build --target all -j$(nproc)
- name: test - name: test
run: cd build && ctest run: cd build && ctest
build_and_test_min_version:
runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v2.4.0
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub."
- run: echo "🔎 Branch name is ${{ github.ref }} and repository is ${{ github.repository }}."
- name: Clone nlohmann json
uses: actions/checkout@master
with:
repository: nlohmann/json
path: nlohmann-json
ref: v3.8.0
- name: Build and install nlohmann json
run: |
cd nlohmann-json
cmake -S . -B build
cmake --build build --target install -j$(nproc)
cd ..
- name: Clone json-schema-validator
uses: actions/checkout@v2
- name: cmake
run: cmake -S . -B build
- name: build
run: cmake --build build --target all -j$(nproc)
- name: test
run: cd build && ctest
build_conan:
runs-on: ubuntu-latest
container: ghcr.io/nlohmann/json-ci:v2.4.0
steps:
- run: echo "🎉 The job was automatically triggered by a ${{ github.event_name }} event."
- run: echo "🐧 This job is now running on a ${{ runner.os }} server hosted by GitHub."
- run: echo "🔎 Branch name is ${{ github.ref }} and repository is ${{ github.repository }}."
- name: Clone json-schema-validator
uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.10'
- run: python -m pip install --upgrade conan
- run: conan config init
- run: conan profile update settings.compiler.libcxx=libstdc++11 default
- name: conan create package
run: conan create .

View File

@ -1,7 +1,6 @@
cmake_minimum_required(VERSION 3.2) cmake_minimum_required(VERSION 3.2)
option(JSON_VALIDATOR_BUILD_TESTS "Build tests" ON) option(JSON_VALIDATOR_INSTALL "Install target" ON)
option(JSON_VALIDATOR_BUILD_EXAMPLES "Build examples" ON)
option(JSON_VALIDATOR_HUNTER "Enable Hunter package manager support" OFF) option(JSON_VALIDATOR_HUNTER "Enable Hunter package manager support" OFF)
if(JSON_VALIDATOR_HUNTER) if(JSON_VALIDATOR_HUNTER)
@ -16,7 +15,7 @@ endif()
project(nlohmann_json_schema_validator project(nlohmann_json_schema_validator
LANGUAGES CXX) LANGUAGES CXX)
set(PROJECT_VERSION 2.1.1) set(PROJECT_VERSION 2.2.0)
if(JSON_VALIDATOR_HUNTER) if(JSON_VALIDATOR_HUNTER)
hunter_add_package(nlohmann_json) hunter_add_package(nlohmann_json)
@ -24,6 +23,7 @@ endif()
# the library # the library
add_library(nlohmann_json_schema_validator add_library(nlohmann_json_schema_validator
src/smtp-address-validator.cpp
src/json-schema-draft7.json.cpp src/json-schema-draft7.json.cpp
src/json-uri.cpp src/json-uri.cpp
src/json-validator.cpp src/json-validator.cpp
@ -42,21 +42,16 @@ target_compile_features(nlohmann_json_schema_validator
set_target_properties(nlohmann_json_schema_validator set_target_properties(nlohmann_json_schema_validator
PROPERTIES PROPERTIES
VERSION ${PROJECT_VERSION} VERSION ${PROJECT_VERSION}
SOVERSION 1) SOVERSION 2)
# disable tests and examples if project is not super project # disable tests and examples if project is not super project
if(CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR) if(CMAKE_SOURCE_DIR STREQUAL PROJECT_SOURCE_DIR)
# I am top-level project. # I am top-level project.
set(JSON_VALIDATOR_IS_TOP_LEVEL TRUE) set(JSON_VALIDATOR_IS_TOP_LEVEL ON)
endif() endif()
if(JSON_VALIDATOR_IS_TOP_LEVEL) option(JSON_VALIDATOR_BUILD_TESTS "Build tests" ${JSON_VALIDATOR_IS_TOP_LEVEL})
set(JSON_VALIDATOR_BUILD_TESTS ON) option(JSON_VALIDATOR_BUILD_EXAMPLES "Build examples" ${JSON_VALIDATOR_IS_TOP_LEVEL})
set(JSON_VALIDATOR_BUILD_EXAMPLES ON)
else()
set(JSON_VALIDATOR_BUILD_TESTS OFF)
set(JSON_VALIDATOR_BUILD_EXAMPLES OFF)
endif()
if(NOT TARGET nlohmann_json::nlohmann_json) if(NOT TARGET nlohmann_json::nlohmann_json)
find_package(nlohmann_json REQUIRED) find_package(nlohmann_json REQUIRED)
@ -95,6 +90,7 @@ if(CMAKE_CXX_COMPILER_ID STREQUAL "GNU")
endif() endif()
endif() endif()
if(JSON_VALIDATOR_INSTALL)
install(TARGETS nlohmann_json_schema_validator install(TARGETS nlohmann_json_schema_validator
EXPORT ${PROJECT_NAME}Targets EXPORT ${PROJECT_NAME}Targets
LIBRARY DESTINATION lib LIBRARY DESTINATION lib
@ -103,6 +99,7 @@ install(TARGETS nlohmann_json_schema_validator
install(FILES src/nlohmann/json-schema.hpp install(FILES src/nlohmann/json-schema.hpp
DESTINATION include/nlohmann) DESTINATION include/nlohmann)
endif()
if (JSON_VALIDATOR_BUILD_EXAMPLES) if (JSON_VALIDATOR_BUILD_EXAMPLES)
# simple nlohmann_json_schema_validator-executable # simple nlohmann_json_schema_validator-executable
@ -127,6 +124,7 @@ endif()
# Set Up the Project Targets and Config Files for CMake # Set Up the Project Targets and Config Files for CMake
if(JSON_VALIDATOR_INSTALL)
# Set the install path to the cmake config files (Relative, so install works correctly under Hunter as well) # Set the install path to the cmake config files (Relative, so install works correctly under Hunter as well)
set(INSTALL_CMAKE_DIR "lib/cmake/${PROJECT_NAME}") set(INSTALL_CMAKE_DIR "lib/cmake/${PROJECT_NAME}")
set(INSTALL_CMAKEDIR_ROOT share/cmake) set(INSTALL_CMAKEDIR_ROOT share/cmake)
@ -156,3 +154,4 @@ install(
DESTINATION DESTINATION
${INSTALL_CMAKE_DIR} ${INSTALL_CMAKE_DIR}
) )
endif()

View File

@ -10,7 +10,7 @@ This is a C++ library for validating JSON documents based on a
[draft-7 of JSON Schema Validation](http://json-schema.org/schema). [draft-7 of JSON Schema Validation](http://json-schema.org/schema).
First a disclaimer: *It is work in progress and First a disclaimer: *It is work in progress and
contributions or hints or discussions are welcome.* Even though a 2.0.0 release is imminent. contributions or hints or discussions are welcome.*
Niels Lohmann et al develop a great JSON parser for C++ called [JSON for Modern Niels Lohmann et al develop a great JSON parser for C++ called [JSON for Modern
C++](https://github.com/nlohmann/json). This validator is based on this C++](https://github.com/nlohmann/json). This validator is based on this

View File

@ -15,7 +15,6 @@ def get_version():
except: except:
return None return None
class JsonSchemaValidatorConan(ConanFile): class JsonSchemaValidatorConan(ConanFile):
name = 'JsonSchemaValidator' name = 'JsonSchemaValidator'
version = get_version() version = get_version()
@ -24,34 +23,49 @@ class JsonSchemaValidatorConan(ConanFile):
settings = 'os', 'compiler', 'build_type', 'arch' settings = 'os', 'compiler', 'build_type', 'arch'
options = { options = {
'shared': [True, False], 'shared': [True, False],
'fPIC': [True, False] 'fPIC': [True, False],
'build_examples': [True, False],
'build_tests': [True, False]
} }
default_options = { default_options = {
'shared': False, 'shared': False,
'fPIC': True 'fPIC': True,
'build_examples': True,
'build_tests': False
} }
generators = "cmake" generators = "CMakeDeps"
exports_sources = [ exports_sources = [
'CMakeLists.txt', 'CMakeLists.txt',
'nlohmann_json_schema_validatorConfig.cmake.in', 'nlohmann_json_schema_validatorConfig.cmake.in',
'src/*', 'src/*',
'app/*', 'app/*',
'test/*',
] ]
requires = ( requires = (
'nlohmann_json/3.7.3' 'nlohmann_json/3.11.2'
) )
_cmake = None
def _configure_cmake(self):
if self._cmake:
return self._cmake
self._cmake = CMake(self)
self._cmake.definitions['JSON_VALIDATOR_BUILD_EXAMPLES'] = self.options.build_examples
self._cmake.definitions['JSON_VALIDATOR_BUILD_TESTS'] = self.options.build_tests
self._cmake.configure()
return self._cmake
def layout(self):
build_type = str(self.settings.build_type).lower()
self.folders.build = "build-{}".format(build_type)
def build(self): def build(self):
cmake = CMake(self) cmake = self._configure_cmake()
cmake.definitions['nlohmann_json_DIR'] = os.path.join(self.deps_cpp_info['nlohmann_json'].rootpath, 'include')
cmake.definitions['JSON_VALIDATOR_BUILD_EXAMPLES'] = True
cmake.definitions['JSON_VALIDATOR_BUILD_TESTS'] = False
cmake.configure() cmake.configure()
cmake.build() cmake.build()
def package(self): def package(self):
cmake = CMake(self) cmake = self._configure_cmake()
cmake.install() cmake.install()
def package_info(self): def package_info(self):

View File

@ -1,6 +1,7 @@
@PACKAGE_INIT@ @PACKAGE_INIT@
find_package(nlohmann_json 3.8.0 REQUIRED) include(CMakeFindDependencyMacro)
find_dependency(nlohmann_json)
include("${CMAKE_CURRENT_LIST_DIR}/nlohmann_json_schema_validatorTargets.cmake") include("${CMAKE_CURRENT_LIST_DIR}/nlohmann_json_schema_validatorTargets.cmake")
check_required_components( check_required_components(

View File

@ -85,19 +85,19 @@ json_patch::json_patch(const json &patch)
json_patch &json_patch::add(const json::json_pointer &ptr, json value) json_patch &json_patch::add(const json::json_pointer &ptr, json value)
{ {
j_.push_back(json{{"op", "add"}, {"path", ptr}, {"value", std::move(value)}}); j_.push_back(json{{"op", "add"}, {"path", ptr.to_string()}, {"value", std::move(value)}});
return *this; return *this;
} }
json_patch &json_patch::replace(const json::json_pointer &ptr, json value) json_patch &json_patch::replace(const json::json_pointer &ptr, json value)
{ {
j_.push_back(json{{"op", "replace"}, {"path", ptr}, {"value", std::move(value)}}); j_.push_back(json{{"op", "replace"}, {"path", ptr.to_string()}, {"value", std::move(value)}});
return *this; return *this;
} }
json_patch &json_patch::remove(const json::json_pointer &ptr) json_patch &json_patch::remove(const json::json_pointer &ptr)
{ {
j_.push_back(json{{"op", "remove"}, {"path", ptr}}); j_.push_back(json{{"op", "remove"}, {"path", ptr.to_string()}});
return *this; return *this;
} }

View File

@ -14,6 +14,7 @@
#include <memory> #include <memory>
#include <set> #include <set>
#include <sstream> #include <sstream>
#include <string>
using nlohmann::json; using nlohmann::json;
using nlohmann::json_patch; using nlohmann::json_patch;
@ -40,6 +41,16 @@ protected:
root_schema *root_; root_schema *root_;
json default_value_ = nullptr; json default_value_ = nullptr;
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> & /* sch */,
root_schema * /* root */,
std::vector<nlohmann::json_uri> & /* uris */,
nlohmann::json & /* default_value */) const
{
return nullptr;
};
public: public:
virtual ~schema() = default; virtual ~schema() = default;
@ -92,6 +103,21 @@ class schema_ref : public schema
return default_value_; return default_value_;
} }
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> &sch,
root_schema *root,
std::vector<nlohmann::json_uri> &uris,
nlohmann::json &default_value) const override
{
// create a new reference schema using the original reference (which will be resolved later)
// to store this overloaded default value #209
auto result = std::make_shared<schema_ref>(uris[0].to_string(), root);
result->set_target(sch, true);
result->set_default_value(default_value);
return result;
};
public: public:
schema_ref(const std::string &id, root_schema *root) schema_ref(const std::string &id, root_schema *root)
: schema(root), id_(id) {} : schema(root), id_(id) {}
@ -179,7 +205,7 @@ public:
auto fragment = new_uri.pointer(); auto fragment = new_uri.pointer();
// is there a reference looking for this unknown-keyword, which is thus no longer a unknown keyword but a schema // is there a reference looking for this unknown-keyword, which is thus no longer a unknown keyword but a schema
auto unresolved = file.unresolved.find(fragment); auto unresolved = file.unresolved.find(fragment.to_string());
if (unresolved != file.unresolved.end()) if (unresolved != file.unresolved.end())
schema::make(value, this, {}, {{new_uri}}); schema::make(value, this, {}, {{new_uri}});
else { // no, nothing ref'd it, keep for later else { // no, nothing ref'd it, keep for later
@ -283,11 +309,31 @@ public:
break; break;
} while (1); } while (1);
for (const auto &file : files_) for (const auto &file : files_) {
if (file.second.unresolved.size() != 0) if (file.second.unresolved.size() != 0) {
// Build a representation of the undefined
// references as a list of comma-separated strings.
auto n_urefs = file.second.unresolved.size();
std::string urefs = "[";
decltype(n_urefs) counter = 0;
for (const auto &p : file.second.unresolved) {
urefs += p.first;
if (counter != n_urefs - 1u) {
urefs += ", ";
}
++counter;
}
urefs += "]";
throw std::invalid_argument("after all files have been parsed, '" + throw std::invalid_argument("after all files have been parsed, '" +
(file.first == "" ? "<root>" : file.first) + (file.first == "" ? "<root>" : file.first) +
"' has still undefined references."); "' has still the following undefined references: " + urefs);
}
}
} }
void validate(const json::json_pointer &ptr, void validate(const json::json_pointer &ptr,
@ -507,7 +553,22 @@ class type_schema : public schema
else_->validate(ptr, instance, patch, e); else_->validate(ptr, instance, patch, e);
} }
} }
if (instance.is_null()) {
patch.add(nlohmann::json::json_pointer{}, default_value_);
} }
}
protected:
virtual std::shared_ptr<schema> make_for_default_(
std::shared_ptr<::schema> & /* sch */,
root_schema * /* root */,
std::vector<nlohmann::json_uri> & /* uris */,
nlohmann::json &default_value) const override
{
auto result = std::make_shared<type_schema>(*this);
result->set_default_value(default_value);
return result;
};
public: public:
type_schema(json &sch, type_schema(json &sch,
@ -1076,6 +1137,11 @@ public:
propertyNames_ = schema::make(attr.value(), root, {"propertyNames"}, uris); propertyNames_ = schema::make(attr.value(), root, {"propertyNames"}, uris);
sch.erase(attr); sch.erase(attr);
} }
attr = sch.find("default");
if (attr != sch.end()) {
set_default_value(*attr);
}
} }
}; };
@ -1289,16 +1355,8 @@ std::shared_ptr<schema> schema::make(json &schema,
attr = schema.find("default"); attr = schema.find("default");
if (attr != schema.end()) { if (attr != schema.end()) {
// copy the referenced schema depending on the underlying type and modify the default value // copy the referenced schema depending on the underlying type and modify the default value
if (dynamic_cast<schema_ref *>(sch.get())) { if (auto new_sch = sch->make_for_default_(sch, root, uris, attr.value())) {
// create a new reference schema use the original reference (which will be resolved later) sch = new_sch;
// to store this overloaed default value #209
auto overloaded_ref_sch = std::make_shared<schema_ref>(uris[0].to_string(), root);
overloaded_ref_sch->set_target(sch, true);
overloaded_ref_sch->set_default_value(attr.value());
sch = overloaded_ref_sch;
} else if (auto *type_sch = dynamic_cast<type_schema *>(sch.get())) {
sch = std::make_shared<type_schema>(*type_sch);
sch->set_default_value(attr.value());
} }
schema.erase(attr); schema.erase(attr);
} }

View File

@ -61,7 +61,7 @@ protected:
std::tuple<std::string, std::string, std::string, std::string, std::string> as_tuple() const std::tuple<std::string, std::string, std::string, std::string, std::string> as_tuple() const
{ {
return std::make_tuple(urn_, scheme_, authority_, path_, identifier_ != "" ? identifier_ : pointer_); return std::make_tuple(urn_, scheme_, authority_, path_, identifier_ != "" ? identifier_ : pointer_.to_string());
} }
public: public:
@ -80,7 +80,7 @@ public:
std::string fragment() const std::string fragment() const
{ {
if (identifier_ == "") if (identifier_ == "")
return pointer_; return pointer_.to_string();
else else
return identifier_; return identifier_;
} }

View File

@ -1,5 +1,7 @@
#include <nlohmann/json-schema.hpp> #include <nlohmann/json-schema.hpp>
#include "smtp-address-validator.hpp"
#include <algorithm> #include <algorithm>
#include <exception> #include <exception>
#include <iostream> #include <iostream>
@ -180,91 +182,151 @@ const std::string uuid{R"([0-9a-fA-F]{8}\-[0-9a-fA-F]{4}\-[0-9a-fA-F]{4}\-[0-9a-
// from http://stackoverflow.com/questions/106179/regular-expression-to-match-dns-hostname-or-ip-address // from http://stackoverflow.com/questions/106179/regular-expression-to-match-dns-hostname-or-ip-address
const std::string hostname{R"(^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$)"}; const std::string hostname{R"(^([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9])(\.([a-zA-Z0-9]|[a-zA-Z0-9][a-zA-Z0-9\-]{0,61}[a-zA-Z0-9]))*$)"};
bool is_ascii(std::string const& value)
{
for (auto ch : value) {
if (ch & 0x80) {
return false;
}
}
return true;
}
/** /**
* @see https://tools.ietf.org/html/rfc5322#section-4.1 * @see
* *
* @verbatim * @verbatim
* atom = [CFWS] 1*atext [CFWS] * URI = scheme ":" hier-part [ "?" query ] [ "#" fragment ]
* word = atom / quoted-string *
* phrase = 1*word / obs-phrase * hier-part = "//" authority path-abempty
* obs-FWS = 1*WSP *(CRLF 1*WSP) * / path-absolute
* FWS = ([*WSP CRLF] 1*WSP) / obs-FWS * / path-rootless
* ; Folding white space * / path-empty
* ctext = %d33-39 / ; Printable US-ASCII *
* %d42-91 / ; characters not including * URI-reference = URI / relative-ref
* %d93-126 / ; "(", ")", or "\" *
* obs-ctext * absolute-URI = scheme ":" hier-part [ "?" query ]
* ccontent = ctext / quoted-pair / comment *
* comment = "(" *([FWS] ccontent) [FWS] ")" * relative-ref = relative-part [ "?" query ] [ "#" fragment ]
* CFWS = (1*([FWS] comment) [FWS]) / FWS *
* obs-local-part = word *("." word) * relative-part = "//" authority path-abempty
* obs-domain = atom *("." atom) * / path-absolute
* obs-dtext = obs-NO-WS-CTL / quoted-pair * / path-noscheme
* quoted-pair = ("\" (VCHAR / WSP)) / obs-qp * / path-empty
* obs-NO-WS-CTL = %d1-8 / ; US-ASCII control *
* %d11 / ; characters that do not * scheme = ALPHA *( ALPHA / DIGIT / "+" / "-" / "." )
* %d12 / ; include the carriage *
* %d14-31 / ; return, line feed, and * authority = [ userinfo "@" ] host [ ":" port ]
* %d127 ; white space characters * userinfo = *( unreserved / pct-encoded / sub-delims / ":" )
* obs-ctext = obs-NO-WS-CTL * host = IP-literal / IPv4address / reg-name
* obs-qtext = obs-NO-WS-CTL * port = *DIGIT
* obs-utext = %d0 / obs-NO-WS-CTL / VCHAR *
* obs-qp = "\" (%d0 / obs-NO-WS-CTL / LF / CR) * IP-literal = "[" ( IPv6address / IPvFuture ) "]"
* obs-body = *((*LF *CR *((%d0 / text) *LF *CR)) / CRLF) *
* obs-unstruct = *((*LF *CR *(obs-utext *LF *CR)) / FWS) * IPvFuture = "v" 1*HEXDIG "." 1*( unreserved / sub-delims / ":" )
* obs-phrase = word *(word / "." / CFWS) *
* obs-phrase-list = [phrase / CFWS] *("," [phrase / CFWS]) * IPv6address = 6( h16 ":" ) ls32
* qtext = %d33 / ; Printable US-ASCII * / "::" 5( h16 ":" ) ls32
* %d35-91 / ; characters not including * / [ h16 ] "::" 4( h16 ":" ) ls32
* %d93-126 / ; "\" or the quote character * / [ *1( h16 ":" ) h16 ] "::" 3( h16 ":" ) ls32
* obs-qtext * / [ *2( h16 ":" ) h16 ] "::" 2( h16 ":" ) ls32
* qcontent = qtext / quoted-pair * / [ *3( h16 ":" ) h16 ] "::" h16 ":" ls32
* quoted-string = [CFWS] * / [ *4( h16 ":" ) h16 ] "::" ls32
* DQUOTE *([FWS] qcontent) [FWS] DQUOTE * / [ *5( h16 ":" ) h16 ] "::" h16
* [CFWS] * / [ *6( h16 ":" ) h16 ] "::"
* atext = ALPHA / DIGIT / ; Printable US-ASCII *
* "!" / "#" / ; characters not including * h16 = 1*4HEXDIG
* "$" / "%" / ; specials. Used for atoms. * ls32 = ( h16 ":" h16 ) / IPv4address
* "&" / "'" / * IPv4address = dec-octet "." dec-octet "." dec-octet "." dec-octet
* "*" / "+" / * dec-octet = DIGIT ; 0-9
* "-" / "/" / * / %x31-39 DIGIT ; 10-99
* "=" / "?" / * / "1" 2DIGIT ; 100-199
* "^" / "_" / * / "2" %x30-34 DIGIT ; 200-249
* "`" / "{" / * / "25" %x30-35 ; 250-255
* "|" / "}" / *
* "~" * reg-name = *( unreserved / pct-encoded / sub-delims )
* dot-atom-text = 1*atext *("." 1*atext) *
* dot-atom = [CFWS] dot-atom-text [CFWS] * path = path-abempty ; begins with "/" or is empty
* addr-spec = local-part "@" domain * / path-absolute ; begins with "/" but not "//"
* local-part = dot-atom / quoted-string / obs-local-part * / path-noscheme ; begins with a non-colon segment
* domain = dot-atom / domain-literal / obs-domain * / path-rootless ; begins with a segment
* domain-literal = [CFWS] "[" *([FWS] dtext) [FWS] "]" [CFWS] * / path-empty ; zero characters
* dtext = %d33-90 / ; Printable US-ASCII *
* %d94-126 / ; characters not including * path-abempty = *( "/" segment )
* obs-dtext ; "[", "]", or "\" * path-absolute = "/" [ segment-nz *( "/" segment ) ]
* @endverbatim * path-noscheme = segment-nz-nc *( "/" segment )
* @todo Currently don't have a working tool for this larger ABNF to generate a regex. * path-rootless = segment-nz *( "/" segment )
* Other options: * path-empty = 0<pchar>
* - https://github.com/ldthomas/apg-6.3 *
* - https://github.com/akr/abnf * segment = *pchar
* segment-nz = 1*pchar
* segment-nz-nc = 1*( unreserved / pct-encoded / sub-delims / "@" )
* ; non-zero-length segment without any colon ":"
*
* pchar = unreserved / pct-encoded / sub-delims / ":" / "@"
*
* query = *( pchar / "/" / "?" )
*
* fragment = *( pchar / "/" / "?" )
*
* pct-encoded = "%" HEXDIG HEXDIG
*
* unreserved = ALPHA / DIGIT / "-" / "." / "_" / "~"
* reserved = gen-delims / sub-delims
* gen-delims = ":" / "/" / "?" / "#" / "[" / "]" / "@"
* sub-delims = "!" / "$" / "&" / "'" / "(" / ")"
* / "*" / "+" / "," / ";" / "="
*
* @endverbatim
* @see adapted from: https://github.com/jhermsmeier/uri.regex/blob/master/uri.regex
* *
* The problematic thing are the allowed whitespaces (even newlines) in the email.
* Ignoring those and starting with
* @see https://stackoverflow.com/questions/13992403/regex-validation-of-email-addresses-according-to-rfc5321-rfc5322
* and trying to divide up the complicated regex into understandable ABNF definitions from rfc5322 yields:
*/ */
const std::string obsnowsctl{R"([\x01-\x08\x0b\x0c\x0e-\x1f\x7f])"}; void rfc3986_uri_check(const std::string &value)
const std::string obsqp{R"(\\[\x01-\x09\x0b\x0c\x0e-\x7f])"}; {
const std::string qtext{R"((?:[\x21\x23-\x5b\x5d-\x7e]|)" + obsnowsctl + ")"}; const static std::string scheme{R"(([A-Za-z][A-Za-z0-9+\-.]*):)"};
const std::string dtext{R"([\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f])"}; const static std::string hierPart{
const std::string quotedString{R"("(?:)" + qtext + "|" + obsqp + R"()*")"}; R"((?:(\/\/)(?:((?:[A-Za-z0-9\-._~!$&'()*+,;=:]|)"
const std::string atext{R"([A-Za-z0-9!#$%&'*+/=?^_`{|}~-])"}; R"(%[0-9A-Fa-f]{2})*)@)?((?:\[(?:(?:(?:(?:[0-9A-Fa-f]{1,4}:){6}|)"
const std::string domainLiteral{R"(\[(?:(?:)" + decOctet + R"()\.){3}(?:)" + decOctet + R"(|[A-Za-z0-9-]*[A-Za-z0-9]:(?:)" + dtext + "|" + obsqp + R"()+)\])"}; R"(::(?:[0-9A-Fa-f]{1,4}:){5}|)"
R"((?:[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){4}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,1}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){3}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,2}[0-9A-Fa-f]{1,4})?::(?:[0-9A-Fa-f]{1,4}:){2}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,3}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}:|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,4}[0-9A-Fa-f]{1,4})?::)(?:[0-9A-Fa-f]{1,4}:[0-9A-Fa-f]{1,4}|)"
R"((?:(?:25[0-5]|2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)\.){3}(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?))|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,5}[0-9A-Fa-f]{1,4})?::[0-9A-Fa-f]{1,4}|)"
R"((?:(?:[0-9A-Fa-f]{1,4}:){0,6}[0-9A-Fa-f]{1,4})?::)|)"
R"([Vv][0-9A-Fa-f]+\.[A-Za-z0-9\-._~!$&'()*+,;=:]+)\]|)"
R"((?:(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)\.){3}(?:25[0-5]|)"
R"(2[0-4][0-9]|)"
R"([01]?[0-9][0-9]?)|)"
R"((?:[A-Za-z0-9\-._~!$&'()*+,;=]|)"
R"(%[0-9A-Fa-f]{2})*))(?::([0-9]*))?((?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)|)"
R"(\/((?:(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})+(?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)?)|)"
R"(((?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})+(?:\/(?:[A-Za-z0-9\-._~!$&'()*+,;=:@]|)"
R"(%[0-9A-Fa-f]{2})*)*)|))"};
const static std::string query{R"((?:\?((?:[A-Za-z0-9\-._~!$&'()*+,;=:@\/?]|%[0-9A-Fa-f]{2})*))?)"};
const static std::string fragment{
R"((?:\#((?:[A-Za-z0-9\-._~!$&'()*+,;=:@\/?]|%[0-9A-Fa-f]{2})*))?)"};
const static std::string uriFormat{scheme + hierPart + query + fragment};
const static std::regex uriRegex{uriFormat};
if (!std::regex_match(value, uriRegex)) {
throw std::invalid_argument(value + " is not a URI string according to RFC 3986.");
}
}
const std::string dotAtom{"(?:" + atext + R"(+(?:\.)" + atext + "+)*)"};
const std::string stackoverflowMagicPart{R"((?:[[:alnum:]](?:[[:alnum:]-]*[[:alnum:]])?\.)+)"
R"([[:alnum:]](?:[[:alnum:]-]*[[:alnum:]])?)"};
const std::string email{"(?:" + dotAtom + "|" + quotedString + ")@(?:" + stackoverflowMagicPart + "|" + domainLiteral + ")"};
} // namespace } // namespace
namespace nlohmann namespace nlohmann
@ -286,10 +348,18 @@ void default_string_format_check(const std::string &format, const std::string &v
rfc3339_date_check(value); rfc3339_date_check(value);
} else if (format == "time") { } else if (format == "time") {
rfc3339_time_check(value); rfc3339_time_check(value);
} else if (format == "uri") {
rfc3986_uri_check(value);
} else if (format == "email") { } else if (format == "email") {
static const std::regex emailRegex{email}; if (!is_ascii(value)) {
if (!std::regex_match(value, emailRegex)) { throw std::invalid_argument(value + " contains non-ASCII values, not RFC 5321 compliant.");
throw std::invalid_argument(value + " is not a valid email according to RFC 5322."); }
if (!is_address(&*value.begin(), &*value.end())) {
throw std::invalid_argument(value + " is not a valid email according to RFC 5321.");
}
} else if (format == "idn-email") {
if (!is_address(&*value.begin(), &*value.end())) {
throw std::invalid_argument(value + " is not a valid idn-email according to RFC 6531.");
} }
} else if (format == "hostname") { } else if (format == "hostname") {
static const std::regex hostRegex{hostname}; static const std::regex hostRegex{hostname};

View File

@ -73,3 +73,7 @@ add_test(NAME issue-149-entry-selection COMMAND issue-149-entry-selection)
add_executable(issue-189-default-values issue-189-default-values.cpp) add_executable(issue-189-default-values issue-189-default-values.cpp)
target_link_libraries(issue-189-default-values nlohmann_json_schema_validator) target_link_libraries(issue-189-default-values nlohmann_json_schema_validator)
add_test(NAME issue-189-default-values COMMAND issue-189-default-values) add_test(NAME issue-189-default-values COMMAND issue-189-default-values)
add_executable(issue-243-root-default-values issue-243-root-default-values.cpp)
target_link_libraries(issue-243-root-default-values nlohmann_json_schema_validator)
add_test(NAME issue-243-root-default-values COMMAND issue-243-root-default-values)

View File

@ -54,7 +54,6 @@ if(JSON_SCHEMA_TEST_SUITE_PATH)
JSON-Suite::Optional::float-overflow JSON-Suite::Optional::float-overflow
JSON-Suite::Optional::ecmascript-regex JSON-Suite::Optional::ecmascript-regex
JSON-Suite::Optional::Format::idn-email
JSON-Suite::Optional::Format::idn-hostname JSON-Suite::Optional::Format::idn-hostname
JSON-Suite::Optional::Format::iri-reference JSON-Suite::Optional::Format::iri-reference
JSON-Suite::Optional::Format::iri JSON-Suite::Optional::Format::iri
@ -62,7 +61,6 @@ if(JSON_SCHEMA_TEST_SUITE_PATH)
JSON-Suite::Optional::Format::relative-json-pointer JSON-Suite::Optional::Format::relative-json-pointer
JSON-Suite::Optional::Format::uri-reference JSON-Suite::Optional::Format::uri-reference
JSON-Suite::Optional::Format::uri-template JSON-Suite::Optional::Format::uri-template
JSON-Suite::Optional::Format::uri
JSON-Suite::Optional::unicode JSON-Suite::Optional::unicode
PROPERTIES PROPERTIES

View File

@ -163,7 +163,7 @@ int main()
val.set_root_schema(array_of_types_without_binary); val.set_root_schema(array_of_types_without_binary);
val.validate({{"something", binary}}, err); val.validate({{"something", binary}}, err);
EXPECT_EQ(err.failed_pointers.size(), 1); EXPECT_EQ(err.failed_pointers.size(), 1);
EXPECT_EQ(err.failed_pointers[0], "/something"); EXPECT_EQ(err.failed_pointers[0].to_string(), "/something");
err.reset(); err.reset();
// check that without content callback you get exception with schema with contentEncoding or contentMeditType // check that without content callback you get exception with schema with contentEncoding or contentMeditType

View File

@ -8,7 +8,8 @@ int main(void)
try { try {
validator.set_root_schema(nlBase); // this line will log the caught exception validator.set_root_schema(nlBase); // this line will log the caught exception
} catch (const std::exception &e) { } catch (const std::exception &e) {
if (std::string("after all files have been parsed, '<root>' has still undefined references.") == e.what())
if (std::string("after all files have been parsed, '<root>' has still the following undefined references: [/unknown/keywords]") == e.what())
return EXIT_SUCCESS; return EXIT_SUCCESS;
} }
return EXIT_FAILURE; return EXIT_FAILURE;

View File

@ -82,5 +82,21 @@ int main()
numberOfErrors += testStringFormat("ipv4", ipv4Checks); numberOfErrors += testStringFormat("ipv4", ipv4Checks);
const std::vector<std::pair<std::string, bool>> uriChecks{
{"http://www.google.com/search?q=regular%20expression", true},
{"http://www.google.com/", true},
{"http://www.google.com/search?q=regular%20expression", true},
{"www.google.com", false},
{"http://www.google.comj", true},
{"ldap://[2001:db8::7]/c=GB?objectClass?one", true},
{"mailto:John.Doe@example.com", true},
{"news:comp.infosystems.www.servers.unix", true},
{"https://john.doe@www.example.com:123/forum/questions/?tag=networking&order=newest#top", true},
{"tel:+1-816-555-1212", true},
{"telnet://192.0.2.16:80/", true},
{"urn:oasis:names:specification:docbook:dtd:xml:4.1.2", true}};
numberOfErrors += testStringFormat("uri", uriChecks);
return numberOfErrors; return numberOfErrors;
} }

View File

@ -75,11 +75,11 @@ static void pointer_plain_name(json_uri start,
a = a.derive("#foo/looks_like_json/poiner/but/isnt"); a = a.derive("#foo/looks_like_json/poiner/but/isnt");
EXPECT_EQ(a, full + " # foo/looks_like_json/poiner/but/isnt"); EXPECT_EQ(a, full + " # foo/looks_like_json/poiner/but/isnt");
EXPECT_EQ(a.identifier(), "foo/looks_like_json/poiner/but/isnt"); EXPECT_EQ(a.identifier(), "foo/looks_like_json/poiner/but/isnt");
EXPECT_EQ(a.pointer(), ""); EXPECT_EQ(a.pointer().to_string(), "");
a = a.derive("#/looks_like_json/poiner/and/it/is"); a = a.derive("#/looks_like_json/poiner/and/it/is");
EXPECT_EQ(a, full + " # /looks_like_json/poiner/and/it/is"); EXPECT_EQ(a, full + " # /looks_like_json/poiner/and/it/is");
EXPECT_EQ(a.pointer(), "/looks_like_json/poiner/and/it/is"); EXPECT_EQ(a.pointer().to_string(), "/looks_like_json/poiner/and/it/is");
EXPECT_EQ(a.identifier(), ""); EXPECT_EQ(a.identifier(), "");
} }