event-based parser policy pt2: tests

This commit is contained in:
Joao Paulo Magalhaes
2024-05-05 18:51:08 +02:00
parent 735ba65bba
commit ad7568f4d2
86 changed files with 31050 additions and 12422 deletions

View File

@@ -27,24 +27,32 @@ jobs:
#----------------------------------------------------------------------------
coverage:
name: coverage/${{matrix.name}}
# if: github.ref == 'refs/heads/master'
name: coverage/c++${{matrix.std}}${{matrix.cmk}}
if: |
(!contains(github.event.head_commit.message, 'skip all')) ||
(!contains(github.event.head_commit.message, 'skip coverage')) ||
contains(github.event.head_commit.message, 'only coverage')
continue-on-error: true
if: always() # https://stackoverflow.com/questions/62045967/github-actions-is-there-a-way-to-continue-on-error-while-still-getting-correct
runs-on: ${{matrix.os}}
strategy:
fail-fast: false
matrix:
include:
- {name: c++11, std: 11, cxx: g++-9, cc: gcc-9, bt: Coverage, os: ubuntu-20.04}
- {name: c++17, std: 17, cxx: g++-9, cc: gcc-9, bt: Coverage, os: ubuntu-20.04}
#- {name: c++20, std: 20, cxx: g++-9, cc: gcc-9, bt: Coverage, os: ubuntu-20.04}
env: {
STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}", BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}", LINT: "${{matrix.lint}}", OS: "${{matrix.os}}",
CODECOV_TOKEN: "${{secrets.CODECOV_TOKEN}}",
COVERALLS_REPO_TOKEN: "${{secrets.COVERALLS_REPO_TOKEN}}",
COVERALLS_PARALLEL: true,
}
- {std: 11, cxx: g++-9, bt: Coverage, os: ubuntu-20.04}
- {std: 17, cxx: g++-9, bt: Coverage, os: ubuntu-20.04}
# test also with the debug code enabled
- {std: 11, cxx: g++-9, bt: Coverage, os: ubuntu-20.04, cmk: "-DRYML_DBG=ON"}
- {std: 17, cxx: g++-9, bt: Coverage, os: ubuntu-20.04, cmk: "-DRYML_DBG=ON"}
env: {STD: "${{matrix.std}}", CXX_: "${{matrix.cxx}}", BT: "${{matrix.bt}}",
BITLINKS: "${{matrix.bitlinks}}", VG: "${{matrix.vg}}", SAN: "${{matrix.san}}",
LINT: "${{matrix.lint}}", OS: "${{matrix.os}}",
CMAKE_FLAGS: "${{matrix.cmk}}",
CODECOV_TOKEN: "${{secrets.CODECOV_TOKEN}}",
COVERALLS_REPO_TOKEN: "${{secrets.COVERALLS_REPO_TOKEN}}",
# coveralls disabled: https://github.com/lemurheavy/coveralls-public/issues/1665
# https://docs.coveralls.io/parallel-build-webhook
#COVERALLS_PARALLEL: true
}
steps:
- {name: checkout, uses: actions/checkout@v4, with: {submodules: recursive}}
- {name: install requirements, run: source .github/reqs.sh && c4_install_test_requirements $OS}

View File

@@ -59,7 +59,7 @@ jobs:
- {name: checkout, uses: actions/checkout@v4, with: {submodules: recursive}}
- name: setup emscripten cache
id: cache-system-libraries
uses: actions/cache@v3
uses: actions/cache@v4
with: {path: "${{env.EMSCRIPTEN_CACHE_FOLDER}}", key: "${{matrix.emver}}-${{runner.os}}"}
- name: setup emscripten
uses: mymindstorm/setup-emsdk@v11

View File

@@ -128,6 +128,7 @@ jobs:
matrix:
config:
# name of the artifact | suffix (gen) | suffix (package) | cpack gen | mime type | os | cxx
- {name: Ubuntu 22.04 deb , sfxg: unix64.deb, sfxp: ubuntu-22.04.deb , gen: DEB , mime: vnd.debian.binary-package, os: ubuntu-22.04 }
- {name: Ubuntu 20.04 deb , sfxg: unix64.deb, sfxp: ubuntu-20.04.deb , gen: DEB , mime: vnd.debian.binary-package, os: ubuntu-20.04 }
#- {name: Ubuntu 18.04 deb , sfxg: unix64.deb, sfxp: ubuntu-18.04.deb , gen: DEB , mime: vnd.debian.binary-package, os: ubuntu-18.04 }
- {name: Windows VS2019 zip, sfxg: win64.zip , sfxp: windows-vs2019.zip , gen: ZIP , mime: zip , os: windows-2019, cxx: vs2019}

View File

@@ -12,13 +12,15 @@ c4_target_compile_flags(ryml-test-quickstart PUBLIC GCC -Wno-useless-cast)
c4_add_test(ryml-test-quickstart)
if(CMAKE_CXX_COMPILER_ID STREQUAL GNU)
if(CMAKE_CXX_COMPILER_VERSION VERSION_GREATER_EQUAL 7.0.0)
set_property(SOURCE test_case.cpp APPEND PROPERTY COMPILE_OPTIONS -Wno-stringop-overflow)
endif()
endif()
c4_add_library(ryml-_testlib LIBRARY_TYPE STATIC
SOURCES test_case.hpp test_case.cpp callbacks_tester.hpp
SOURCES
test_lib/callbacks_tester.hpp
test_lib/test_case_node.hpp
test_lib/test_case_node.cpp
test_lib/test_case.hpp
test_lib/test_case.cpp
test_lib/test_engine.hpp
test_lib/test_engine.cpp
INC_DIRS ${CMAKE_CURRENT_LIST_DIR}
LIBS ryml c4fs gtest
FOLDER test)
@@ -30,38 +32,37 @@ endif()
function(ryml_add_test test_name)
set(t ryml-test-${test_name})
c4_add_executable(${t}
SOURCES test_${test_name}.cpp
LIBS ${ARGN} ryml-_testlib gtest_main
SOURCES test_${test_name}.cpp ${ARGN}
LIBS ryml-_testlib gtest_main
FOLDER test)
if(RYML_DBG)
target_compile_definitions(${t} PRIVATE RYML_DBG)
endif()
c4_add_test(${t})
endfunction()
c4_add_library(ryml-_testgroup LIBRARY_TYPE OBJECT
SOURCES test_group.cpp test_group.hpp
c4_add_library(ryml-_testgroup LIBRARY_TYPE STATIC
SOURCES
test_lib/test_group.hpp
test_lib/test_group.def.hpp
test_lib/test_group.cpp
LIBS ryml ryml-_testlib c4fs
FOLDER test)
function(ryml_add_test_case_group name)
ryml_add_test(${name} ryml-_testgroup)
endfunction()
function(ryml_get_target_exe target_name target_file)
if(CMAKE_CROSSCOMPILING)
set(tgt ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:${target_name}>)
else()
set(tgt $<TARGET_FILE:${target_name}>)
endif()
set(${target_file} ${tgt} PARENT_SCOPE)
ryml_add_test(${name})
target_link_libraries(ryml-test-${name} PUBLIC ryml-_testgroup)
endfunction()
ryml_add_test(parse_engine
test_suite/test_suite_event_handler.cpp
test_suite/test_suite_event_handler.hpp)
ryml_add_test(yaml_events
test_suite/test_suite_event_handler.cpp
test_suite/test_suite_event_handler.hpp)
ryml_add_test(callbacks)
ryml_add_test(stack)
ryml_add_test(filter)
ryml_add_test(parser)
ryml_add_test(node_type)
ryml_add_test(tree)
ryml_add_test(noderef)
ryml_add_test(emit)
@@ -72,46 +73,59 @@ ryml_add_test(json)
ryml_add_test(preprocess)
ryml_add_test(merge)
ryml_add_test(location)
ryml_add_test(yaml_events)
ryml_add_test_case_group(empty_file)
ryml_add_test_case_group(empty_map)
ryml_add_test_case_group(empty_seq)
ryml_add_test_case_group(empty_scalar)
ryml_add_test_case_group(simple_doc)
ryml_add_test_case_group(simple_map)
ryml_add_test_case_group(simple_seq)
ryml_add_test_case_group(simple_set)
ryml_add_test_case_group(single_quoted)
ryml_add_test_case_group(double_quoted)
ryml_add_test_case_group(plain_scalar)
ryml_add_test_case_group(block_literal)
ryml_add_test_case_group(block_folded)
ryml_add_test_case_group(doc)
ryml_add_test_case_group(seq)
ryml_add_test_case_group(seq_empty)
ryml_add_test_case_group(seq_generic)
ryml_add_test_case_group(map)
ryml_add_test_case_group(map_empty)
ryml_add_test_case_group(map_generic)
ryml_add_test_case_group(map_set)
ryml_add_test_case_group(seq_of_map)
ryml_add_test_case_group(map_of_seq)
ryml_add_test_case_group(scalar_empty)
ryml_add_test_case_group(scalar_null)
ryml_add_test_case_group(scalar_squoted)
ryml_add_test_case_group(scalar_dquoted)
ryml_add_test_case_group(scalar_literal)
ryml_add_test_case_group(scalar_folded)
ryml_add_test_case_group(scalar_plain)
ryml_add_test_case_group(tag_property)
ryml_add_test_case_group(explicit_key)
ryml_add_test_case_group(nested_mapx2)
ryml_add_test_case_group(nested_seqx2)
ryml_add_test_case_group(nested_mapx3)
ryml_add_test_case_group(nested_seqx3)
ryml_add_test_case_group(nested_mapx4)
ryml_add_test_case_group(nested_seqx4)
ryml_add_test_case_group(map_of_seq)
ryml_add_test_case_group(seq_of_map)
ryml_add_test_case_group(generic_map)
ryml_add_test_case_group(generic_seq)
ryml_add_test_case_group(map_nestedx2)
ryml_add_test_case_group(seq_nestedx2)
ryml_add_test_case_group(map_nestedx3)
ryml_add_test_case_group(seq_nestedx3)
ryml_add_test_case_group(map_nestedx4)
ryml_add_test_case_group(seq_nestedx4)
ryml_add_test_case_group(scalar_names)
ryml_add_test_case_group(simple_anchor)
ryml_add_test_case_group(anchor)
ryml_add_test_case_group(indentation)
ryml_add_test_case_group(number)
ryml_add_test_case_group(null_val)
ryml_add_test_case_group(github_issues)
if(WIN32 AND MSVC)
target_compile_options(ryml-test-parse_engine PRIVATE /bigobj)
endif()
#-------------------------------------------------------------------------
# test the tools as well
function(ryml_get_target_exe target_name target_file)
if(CMAKE_CROSSCOMPILING)
set(tgt ${CMAKE_CROSSCOMPILING_EMULATOR} $<TARGET_FILE:${target_name}>)
else()
set(tgt $<TARGET_FILE:${target_name}>)
endif()
set(${target_file} ${tgt} PARENT_SCOPE)
endfunction()
if(NOT EMSCRIPTEN)
option(RYML_TEST_TOOLS "Enable tests for the tools. Requires file system access." ON)
endif()
if(RYML_TEST_TOOLS)
if(NOT RYML_BUILD_TOOLS)
add_subdirectory(../tools tools)
@@ -133,19 +147,22 @@ if(RYML_TEST_TOOLS)
")
set("${fileout}" "${filename}" PARENT_SCOPE)
endfunction()
function(ryml_add_event_tool_test name expect_success contents)
function(ryml_add_event_tool_test name expect_success flags contents)
ryml_create_file(${name}.yml "${contents}" file)
add_test(NAME ryml-test-tool-events-${name} COMMAND ${RYML_TGT_EVENTS} ${file})
add_test(NAME ryml-test-tool-events-${name} COMMAND ${RYML_TGT_EVENTS} ${flags} ${file})
if(NOT expect_success)
set_tests_properties(ryml-test-tool-events-${name} PROPERTIES WILL_FAIL TRUE)
endif()
endfunction()
ryml_get_target_exe(ryml-yaml-events RYML_TGT_EVENTS)
ryml_add_event_tool_test(success TRUE "{foo: bar, baz: [exactly]}")
ryml_add_event_tool_test(fail_squo FALSE "foo: 'bar")
ryml_add_event_tool_test(fail_dquo FALSE "foo: \"bar")
ryml_add_event_tool_test(fail_seq1 FALSE "[ a, b, c ] ]")
ryml_add_event_tool_test(fail_seq2 FALSE "[ [a, b, c ]")
ryml_add_event_tool_test(success_tree TRUE "-t" "{foo: bar, baz: [exactly]}")
ryml_add_event_tool_test(success_evts TRUE "" "{{this: is, a: keymap}: [seq,val]}")
ryml_add_event_tool_test(fail_squo_tree FALSE "-t" "foo: 'bar")
ryml_add_event_tool_test(fail_squo_evts FALSE "" "foo: 'bar")
ryml_add_event_tool_test(fail_dquo_tree FALSE "-t" "foo: \"bar")
ryml_add_event_tool_test(fail_dquo_evts FALSE "" "foo: \"bar")
ryml_add_event_tool_test(fail_seq1_tree FALSE "-t" "[ a, b, c ] ]")
ryml_add_event_tool_test(fail_seq2_evts FALSE "" "[ [a, b, c ]")
endif()
@@ -174,6 +191,8 @@ if(RYML_TEST_SUITE)
test_suite.cpp
test_suite/test_suite_common.hpp
test_suite/test_suite_events_emitter.cpp
test_suite/test_suite_event_handler.cpp
test_suite/test_suite_event_handler.hpp
test_suite/test_suite_events.cpp
test_suite/test_suite_events.hpp
test_suite/test_suite_parts.cpp
@@ -193,7 +212,8 @@ if(RYML_TEST_SUITE)
foreach(case_file ${case_files})
string(REPLACE "." "_" approach "${case_file}")
set(test_name ${case_name}-${approach})
#message("${test_name}: ${case_name}: ${case_dir} ${case_file}")
#message("${test_name}: ${case_name} ${case_dir} ${case_file}")
set(cmd_with_args ${tgt} "${test_name}" "${suite_dir}/${case_dir}" "${case_file}")
if("${case_file}" STREQUAL "===")
continue()
elseif("${case_file}" STREQUAL "test.event")
@@ -202,16 +222,24 @@ if(RYML_TEST_SUITE)
continue()
elseif("${case_file}" STREQUAL "error")
continue()
elseif("${case_file}" STREQUAL "in.yaml")
add_test(NAME ryml-test-suite-${test_name} COMMAND ${cmd_with_args} "--gtest_filter=-*events*:-*check_expected_error*")
add_test(NAME ryml-test-suite-${test_name}-events COMMAND ${cmd_with_args} "--gtest_filter=*events*")
elseif("${case_file}" STREQUAL "out.yaml")
add_test(NAME ryml-test-suite-${test_name} COMMAND ${cmd_with_args} "--gtest_filter=-*events*:-*check_expected_error*")
add_test(NAME ryml-test-suite-${test_name}-events COMMAND ${cmd_with_args} "--gtest_filter=-*ref_events*:*events*")
elseif("${case_file}" STREQUAL "emit.yaml")
add_test(NAME ryml-test-suite-${test_name} COMMAND ${cmd_with_args} "--gtest_filter=-*events*:-*check_expected_error*")
add_test(NAME ryml-test-suite-${test_name}-events COMMAND ${cmd_with_args} "--gtest_filter=-*ref_events*:*events*")
elseif("${case_file}" STREQUAL "in.json")
add_test(NAME ryml-test-suite-${test_name} COMMAND ${tgt} "${test_name}" "${suite_dir}/${case_dir}" in.json "--gtest_filter=-*events*:-*check_expected_error")
add_test(NAME ryml-test-suite-${test_name} COMMAND ${cmd_with_args} "--gtest_filter=-*events*:-*check_expected_error*")
else()
add_test(NAME ryml-test-suite-${test_name} COMMAND ${tgt} "${test_name}" "${suite_dir}/${case_dir}" "${case_file}" "--gtest_filter=-*events*:-*check_expected_error")
add_test(NAME ryml-test-suite-${test_name}-events COMMAND ${tgt} "${test_name}-events" "${suite_dir}/${case_dir}" "${case_file}" "--gtest_filter=*events*")
c4_err("unknown file: ${case_file}")
endif()
endforeach()
else()
set(test_name ${case_name}-error)
add_test(NAME ryml-test-suite-${test_name} COMMAND ${tgt} "${test_name}" "${suite_dir}/${case_dir}" in.yaml "--gtest_filter=*check_expected_error")
add_test(NAME ryml-test-suite-${test_name} COMMAND ${tgt} "${test_name}" "${suite_dir}/${case_dir}" in.yaml "--gtest_filter=*check_expected_error*:*check_expected_error*ref_events")
endif()
endfunction()

File diff suppressed because it is too large Load Diff

View File

@@ -7,7 +7,7 @@
#include <c4/yml/detail/print.hpp>
#endif
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include <gtest/gtest.h>
@@ -180,7 +180,7 @@ TEST(general, numbers)
// github issue 29: https://github.com/biojppm/rapidyaml/issues/29
TEST(general, newlines_on_maps_nested_in_seqs)
{
const char yaml[] = R"(enemy:
std::string yaml = R"(enemy:
- actors:
- {name: Enemy_Bokoblin_Junior, value: 4.0}
- {name: Enemy_Bokoblin_Middle, value: 16.0}
@@ -188,19 +188,15 @@ TEST(general, newlines_on_maps_nested_in_seqs)
- {name: Enemy_Bokoblin_Dark, value: 48.0}
species: BokoblinSeries
)";
std::string expected = R"(enemy:
std::string expected = R"(enemy:
- actors:
- name: Enemy_Bokoblin_Junior
value: 4.0
- name: Enemy_Bokoblin_Middle
value: 16.0
- name: Enemy_Bokoblin_Senior
value: 32.0
- name: Enemy_Bokoblin_Dark
value: 48.0
- {name: Enemy_Bokoblin_Junior,value: 4.0}
- {name: Enemy_Bokoblin_Middle,value: 16.0}
- {name: Enemy_Bokoblin_Senior,value: 32.0}
- {name: Enemy_Bokoblin_Dark,value: 48.0}
species: BokoblinSeries
)";
Tree t = parse_in_arena(yaml);
Tree t = parse_in_arena(to_csubstr(yaml));
auto s = emitrs_yaml<std::string>(t);
EXPECT_EQ(expected, s);
}
@@ -283,6 +279,47 @@ TEST(general, github_issue_124)
}
}
TEST(general, _c4prc)
{
const char *ptr = "abcdefgh"; // as ptr!
csubstr buf = ptr;
EXPECT_EQ(buf.len, 8u);
for(const char c : buf)
{
SCOPED_TRACE(c);
EXPECT_EQ(_c4prc(c).len, 1);
EXPECT_EQ(_c4prc(c).str, &c);
}
ptr = "\n\t\0\r\f\b\v\a"; // as ptr!
buf = {ptr, 8u};
EXPECT_EQ(buf.len, 8u);
for(const char c : buf)
{
SCOPED_TRACE(c);
EXPECT_EQ(_c4prc(c).len, 2);
EXPECT_NE(_c4prc(c).str, &c);
}
}
#ifdef RYML_DBG
TEST(general, _c4presc)
{
const char buf_[] = {
'a','b','c','d','e','f','g','h','\n',
'\t','\0','\r','\f','\b','\v','\a','\x1b',
detail::_charconstant_t<-0x3e,0xc2u>::value, detail::_charconstant_t<-0x60,0xa0u>::value, // \_
detail::_charconstant_t<-0x3e,0xc2u>::value, detail::_charconstant_t<-0x7b,0x85u>::value, // \N
detail::_charconstant_t<-0x1e,0xe2u>::value, detail::_charconstant_t<-0x58,0x80u>::value, // \N
detail::_charconstant_t<-0x1e,0xe2u>::value, detail::_charconstant_t<-0x57,0xa9u>::value, // \N
'a','b','c','d','e','f','g','h',
'a','b','c','d','e','f','g','h',
'a','b','c','d','e','f','g','h',
};
csubstr buf = buf_;
_c4presc(buf, false);
_c4presc(buf, true);
}
#endif
//-------------------------------------------

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,6 +1,7 @@
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#ifndef RYML_SINGLE_HEADER
#include "c4/yml/common.hpp"
#include "c4/dump.hpp"
#endif
#include <stdexcept>
#include <csetjmp>
@@ -235,6 +236,7 @@ TEST(Callbacks, cmp_user_data)
{
#ifndef C4_UBSAN
Callbacks before = get_callbacks();
before.m_user_data = (void*)1u;
Callbacks cp = before;
EXPECT_EQ(cp, before);
cp.m_user_data = (void*)(((char*)before.m_user_data) + 100u); // ubsan: runtime error: applying non-zero offset 100 to null pointer
@@ -391,6 +393,79 @@ TEST(RYML_ASSERT, basic)
}
//-----------------------------------------------------------------------------
struct Dumper
{
char errmsg[RYML_ERRMSG_SIZE] = {0};
detail::_SubstrWriter writer{errmsg};
void operator()(csubstr s)
{
writer.append(s);
}
};
TEST(_parse_dump, small_args)
{
const std::string str(/*count*/RYML_LOGBUF_SIZE-1, 's');
const csubstr fmt = "smaller={}";
const std::string expected = formatrs<std::string>(fmt, str);
{
Dumper dumper;
char writebuf[RYML_LOGBUF_SIZE];
c4::DumpResults results = c4::format_dump_resume(dumper, writebuf, fmt, str);
EXPECT_EQ(results.bufsize, str.size());
EXPECT_EQ(dumper.writer.curr(), to_csubstr(expected));
}
{
Dumper dumper;
detail::_dump(dumper, fmt, str);
EXPECT_EQ(dumper.writer.curr(), to_csubstr(expected));
}
}
TEST(_parse_dump, large_args)
{
const std::string str(/*count*/RYML_LOGBUF_SIZE+1, 'l');
const csubstr fmt = "larger={}";
{
Dumper dumper;
char writebuf[RYML_LOGBUF_SIZE];
c4::DumpResults results = c4::format_dump_resume(dumper, writebuf, fmt, str);
const csubstr expected = "larger=";
EXPECT_EQ(results.bufsize, str.size());
EXPECT_EQ(dumper.writer.curr(), expected);
}
{
Dumper dumper;
detail::_dump(dumper, fmt, str);
const std::string expected = formatrs<std::string>(fmt, str);
EXPECT_EQ(dumper.writer.curr(), to_csubstr(expected));
}
}
TEST(_parse_dump, unprintable_args)
{
const std::string str(/*count*/RYML_LOGBUF_SIZE_MAX+1, 'u');
const csubstr fmt = "unprintable={}";
const csubstr expected = "unprintable=";
{
Dumper dumper;
char writebuf[RYML_LOGBUF_SIZE];
c4::DumpResults results = c4::format_dump_resume(dumper, writebuf, fmt, str);
EXPECT_EQ(results.bufsize, str.size());
EXPECT_EQ(dumper.writer.curr(), expected);
}
{
Dumper dumper;
detail::_dump(dumper, fmt, str);
size_t unprintable_size = (size_t)(RYML_LOGBUF_SIZE_MAX+1);
const std::string zeros(/*count*/unprintable_size, '\0');
EXPECT_EQ(to_csubstr(zeros).len, unprintable_size);
EXPECT_EQ(dumper.writer.pos, expected.size());
EXPECT_EQ(dumper.writer.curr(), expected);
}
}
// FIXME this is here merely to avoid a linker error
Case const* get_case(csubstr)
{

View File

@@ -1,545 +0,0 @@
#ifndef _TEST_CASE_HPP_
#define _TEST_CASE_HPP_
#ifdef RYML_SINGLE_HEADER
#include <ryml_all.hpp>
#else
#include "c4/std/vector.hpp"
#include "c4/std/string.hpp"
#include "c4/format.hpp"
#include <c4/yml/yml.hpp>
#include <c4/yml/detail/parser_dbg.hpp>
#endif
#include <gtest/gtest.h>
#include <functional>
// no pragma push for these warnings! they will be suppressed in the
// files including this header (most test files)
#ifdef __clang__
# pragma clang diagnostic ignored "-Wold-style-cast"
#elif defined(__GNUC__)
# pragma GCC diagnostic ignored "-Wold-style-cast"
#endif
#ifdef __clang__
# pragma clang diagnostic push
#elif defined(__GNUC__)
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wtype-limits"
#elif defined(_MSC_VER)
# pragma warning(push)
# pragma warning(disable: 4296/*expression is always 'boolean_value'*/)
# pragma warning(disable: 4389/*'==': signed/unsigned mismatch*/)
# if C4_MSVC_VERSION != C4_MSVC_VERSION_2017
# pragma warning(disable: 4800/*'int': forcing value to bool 'true' or 'false' (performance warning)*/)
# endif
#endif
#ifdef RYML_DBG
# include <c4/yml/detail/print.hpp>
#endif
namespace c4 {
inline void PrintTo(substr s, ::std::ostream* os) { os->write(s.str, (std::streamsize)s.len); }
inline void PrintTo(csubstr s, ::std::ostream* os) { os->write(s.str, (std::streamsize)s.len); }
namespace yml {
inline void PrintTo(NodeType ty, ::std::ostream* os)
{
*os << ty.type_str();
}
inline void PrintTo(NodeType_e ty, ::std::ostream* os)
{
*os << NodeType::type_str(ty);
}
inline void PrintTo(Callbacks const& cb, ::std::ostream* os)
{
#ifdef __GNUC__
#define RYML_GNUC_EXTENSION __extension__
#else
#define RYML_GNUC_EXTENSION
#endif
*os << '{'
<< "userdata." << (void*)cb.m_user_data << ','
<< "allocate." << RYML_GNUC_EXTENSION (void*)cb.m_allocate << ','
<< "free." << RYML_GNUC_EXTENSION (void*)cb.m_free << ','
<< "error." << RYML_GNUC_EXTENSION (void*)cb.m_error << '}';
#undef RYML_GNUC_EXTENSION
}
struct Case;
struct CaseNode;
struct CaseData;
Case const* get_case(csubstr name);
CaseData* get_data(csubstr name);
void test_compare(Tree const& actual, Tree const& expected);
void test_compare(Tree const& actual, size_t node_actual,
Tree const& expected, size_t node_expected,
size_t level=0);
void test_arena_not_shared(Tree const& a, Tree const& b);
void test_invariants(Tree const& t);
void test_invariants(ConstNodeRef const& n);
void print_node(CaseNode const& t, int level=0);
void print_tree(CaseNode const& p, int level=0);
void print_path(ConstNodeRef const& p);
template<class CheckFn>
void test_check_emit_check(Tree const& t, CheckFn check_fn)
{
#ifdef RYML_DBG
print_tree(t);
#endif
{
SCOPED_TRACE("original yaml");
test_invariants(t);
check_fn(t);
}
auto emit_and_parse = [&check_fn](Tree const& tp, const char* identifier){
SCOPED_TRACE(identifier);
std::string emitted = emitrs_yaml<std::string>(tp);
#ifdef RYML_DBG
printf("~~~%s~~~\n%.*s", identifier, (int)emitted.size(), emitted.data());
#endif
Tree cp = parse_in_arena(to_csubstr(emitted));
#ifdef RYML_DBG
print_tree(cp);
#endif
test_invariants(cp);
check_fn(cp);
return cp;
};
Tree cp = emit_and_parse(t, "emitted 1");
cp = emit_and_parse(cp, "emitted 2");
cp = emit_and_parse(cp, "emitted 3");
}
template<class CheckFn>
void test_check_emit_check(csubstr yaml, CheckFn check_fn)
{
Tree t = parse_in_arena(yaml);
test_check_emit_check(t, check_fn);
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
inline c4::substr replace_all(c4::csubstr pattern, c4::csubstr repl, c4::csubstr subject, std::string *dst)
{
RYML_CHECK(!subject.overlaps(to_csubstr(*dst)));
size_t ret = subject.replace_all(to_substr(*dst), pattern, repl);
if(ret != dst->size())
{
dst->resize(ret);
ret = subject.replace_all(to_substr(*dst), pattern, repl);
}
RYML_CHECK(ret == dst->size());
return c4::to_substr(*dst);
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
struct ExpectError
{
bool m_got_an_error;
Tree *m_tree;
c4::yml::Callbacks m_glob_prev;
c4::yml::Callbacks m_tree_prev;
Location expected_location;
ExpectError(Location loc={}) : ExpectError(nullptr, loc) {}
ExpectError(Tree *tree, Location loc={});
~ExpectError();
static void do_check( std::function<void()> fn, Location expected={}) { do_check(nullptr, fn, expected); }
static void do_check(Tree *tree, std::function<void()> fn, Location expected={});
static void check_success( std::function<void()> fn) { check_success(nullptr, fn); }
static void check_success(Tree *tree, std::function<void()> fn);
static void check_assertion( std::function<void()> fn, Location expected={}) { check_assertion(nullptr, fn, expected); }
static void check_assertion(Tree *tree, std::function<void()> fn, Location expected={});
};
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
struct TaggedScalar
{
csubstr tag;
csubstr scalar;
template<size_t N, size_t M>
TaggedScalar(const char (&t)[N], const char (&s)[M]) : tag(t), scalar(s) {}
template<size_t N>
TaggedScalar(const char (&t)[N], std::nullptr_t) : tag(t), scalar() {}
};
struct AnchorRef
{
NodeType_e type;
csubstr str;
AnchorRef() : type(NOTYPE), str() {}
AnchorRef(NodeType_e t) : type(t), str() {}
AnchorRef(NodeType_e t, csubstr v) : type(t), str(v) {}
};
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
/** a node class against which ryml structures are tested. Uses initializer
* lists to facilitate minimal specification. */
struct CaseNode
{
public:
using seqmap = std::vector<CaseNode>;
using iseqmap = std::initializer_list<CaseNode>;
struct TaggedList
{
csubstr tag;
iseqmap ilist;
template<size_t N> TaggedList(const char (&t)[N], iseqmap l) : tag(t), ilist(l) {}
};
public:
NodeType type;
csubstr key, key_tag; AnchorRef key_anchor;
csubstr val, val_tag; AnchorRef val_anchor;
seqmap children;
CaseNode * parent;
public:
CaseNode(CaseNode && that) noexcept { _move(std::move(that)); }
CaseNode(CaseNode const& that) noexcept { _copy(that); }
CaseNode& operator= (CaseNode && that) noexcept { _move(std::move(that)); return *this; }
CaseNode& operator= (CaseNode const& that) noexcept { _copy(that); return *this; }
~CaseNode() = default;
public:
// brace yourself: what you are about to see is ... crazy.
CaseNode() : CaseNode(NOTYPE) {}
CaseNode(NodeType_e t) : type(t), key(), key_tag(), key_anchor(), val(), val_tag(), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// val
template<size_t N> explicit CaseNode(const char (&v)[N] ) : type((VAL )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(TaggedScalar const& v) : type((VAL|VALTAG)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(std::nullptr_t ) : type((VAL )), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// val, with anchor/ref
template<size_t N> explicit CaseNode(const char (&v)[N] , AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|VAL|VALTAG)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(std::nullptr_t , AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode( AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val(arv.str ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); RYML_ASSERT(arv.type == VALREF); }
// val, explicit type
template<size_t N> explicit CaseNode(NodeType t, const char (&v)[N] ) : type((VAL|t )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& v) : type((VAL|VALTAG|t)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, std::nullptr_t ) : type((VAL |t)), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// val, explicit type, with val anchor/ref
template<size_t N> explicit CaseNode(NodeType t, const char (&v)[N] , AnchorRef const& arv) : type((arv.type|VAL|t )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|VAL|VALTAG|t)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, std::nullptr_t , AnchorRef const& arv) : type((arv.type|VAL |t)), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// keyval
template<size_t N, size_t M> explicit CaseNode(const char (&k)[N] , const char (&v)[M] ) : type((KEYVAL )), key(k ), key_tag( ), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(std::nullptr_t , const char (&v)[M] ) : type((KEYVAL )), key( ), key_tag( ), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , std::nullptr_t ) : type((KEYVAL )), key(k ), key_tag( ), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , TaggedScalar const& v) : type((KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(TaggedScalar const& k, const char (&v)[M] ) : type((KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(TaggedScalar const& k, TaggedScalar const& v) : type((KEYVAL|KEYTAG|VALTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(std::nullptr_t , TaggedScalar const& v) : type((KEYVAL |VALTAG )), key( ), key_tag( ), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(TaggedScalar const& k, std::nullptr_t ) : type((KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(std::nullptr_t , std::nullptr_t ) : type((KEYVAL )), key( ), key_tag( ), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(AnchorRef const& ark, AnchorRef const& arv) : type((KEYVAL|ark.type|arv.type)), key(ark.str ), key_tag( ), key_anchor(ark), val(arv.str ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); RYML_ASSERT(ark.type == KEYREF); RYML_ASSERT(arv.type == VALREF); }
// keyval, with val anchor/ref
template<size_t N, size_t M> explicit CaseNode(const char (&k)[N] , const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(TaggedScalar const& k, const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(TaggedScalar const& k, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// keyval, with key anchor/ref
template<size_t N, size_t M> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// keyval, with key anchor/ref + val anchor/ref
template<size_t N, size_t M> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// keyval, explicit type
template<size_t N, size_t M> explicit CaseNode(NodeType t, const char (&k)[N] , const char (&v)[M] ) : type((KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , std::nullptr_t ) : type((KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(NodeType t, std::nullptr_t , const char (&v)[M] ) : type((KEYVAL|t )), key( ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , TaggedScalar const& v) : type((KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(NodeType t, TaggedScalar const& k, const char (&v)[M] ) : type((KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, TaggedScalar const& v) : type((KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, std::nullptr_t ) : type((KEYVAL|KEYTAG |t)), key(k.scalar), key_tag(k.tag), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, std::nullptr_t , TaggedScalar const& v) : type((KEYVAL |VALTAG|t)), key( ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, std::nullptr_t , std::nullptr_t ) : type((KEYVAL |t)), key( ), key_tag( ), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// keyval, explicit type, with val anchor/ref
template<size_t N, size_t M> explicit CaseNode(NodeType t, const char (&k)[N] , const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(NodeType t, TaggedScalar const& k, const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// keyval, explicit type, with key anchor/ref
template<size_t N, size_t M> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// keyval, explicit type, with key anchor/ref + val anchor/ref
template<size_t N, size_t M> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// container
template<size_t N> explicit CaseNode(const char (&k)[N] , iseqmap s) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , TaggedList s) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode(TaggedScalar const& k, iseqmap s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode(TaggedScalar const& k, TaggedList s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode( iseqmap m) : CaseNode("", m) {}
explicit CaseNode( TaggedList m) : CaseNode("", m) {}
// container, with val anchor/ref
template<size_t N> explicit CaseNode(const char (&k)[N] , iseqmap s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , TaggedList s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode(TaggedScalar const& k, iseqmap s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode(TaggedScalar const& k, TaggedList s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode( iseqmap m, AnchorRef const& arv) : CaseNode("", m, arv) {}
explicit CaseNode( TaggedList m, AnchorRef const& arv) : CaseNode("", m, arv) {}
// container, with key anchor/ref
template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, iseqmap s) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedList s) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, iseqmap s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedList s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
// container, with key anchor/ref + val anchor/ref
template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
template<size_t N> explicit CaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
explicit CaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
// container, explicit type
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , iseqmap s) : type((t )), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , TaggedList s) : type((t|VALTAG)), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, iseqmap s) : type((t|KEYTAG)), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, iseqmap s) : type((t )), key( ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedList s) : type((t|VALTAG)), key( ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
// container, explicit type, with val anchor/ref
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , iseqmap s, AnchorRef const& arv) : type((t |VALANCH)), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , TaggedList s, AnchorRef const& arv) : type((t|VALTAG|VALANCH)), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, iseqmap s, AnchorRef const& arv) : type((t|KEYTAG|VALANCH)), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, iseqmap s, AnchorRef const& arv) : type((t |VALANCH)), key( ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedList s, AnchorRef const& arv) : type((t|VALTAG|VALANCH)), key( ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
// container, explicit type, with key anchor/ref
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, iseqmap s) : type((t |KEYANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedList s) : type((t|VALTAG|KEYANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, iseqmap s) : type((t|KEYTAG|KEYANCH)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
// container, explicit type, with key anchor/ref + val anchor/ref
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type((t |KEYANCH|VALANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
template<size_t N> explicit CaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type((t|VALTAG|KEYANCH|VALANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit CaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type((t|KEYTAG|KEYANCH|VALANCH)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
public:
void _move(CaseNode&& that)
{
type = that.type;
key = that.key;
key_tag = that.key_tag;
key_anchor = that.key_anchor;
val = that.val;
val_tag = that.val_tag;
val_anchor = that.val_anchor;
children = std::move(that.children);
parent = nullptr;
_set_parent();
}
void _copy(CaseNode const& that)
{
type = that.type;
key = that.key;
key_tag = that.key_tag;
key_anchor = that.key_anchor;
val = that.val;
val_tag = that.val_tag;
val_anchor = that.val_anchor;
children = that.children;
parent = nullptr;
_set_parent();
}
void _set_parent()
{
for(auto &ch : children)
{
ch.parent = this;
}
}
NodeType_e _guess() const;
bool is_root() const { return parent; }
bool is_doc() const { return type & DOC; }
bool is_map() const { return type & MAP; }
bool is_seq() const { return type & SEQ; }
bool has_val() const { return type & VAL; }
bool has_key() const { return type & KEY; }
bool is_container() const { return type & (SEQ|MAP); }
bool has_key_anchor() const { return type & KEYANCH; }
bool has_val_anchor() const { return type & VALANCH; }
public:
CaseNode const& operator[] (size_t i) const
{
C4_ASSERT(i >= 0 && i < children.size());
return children[i];
}
CaseNode const& operator[] (csubstr const& name) const
{
auto ch = lookup(name);
C4_ASSERT(ch != nullptr);
return *ch;
}
CaseNode const* lookup(csubstr const& name) const
{
C4_ASSERT( ! children.empty());
for(auto const& ch : children)
if(ch.key == name)
return &ch;
return nullptr;
}
public:
void compare(yml::ConstNodeRef const& n, bool ignore_quote=false) const;
void compare_child(yml::ConstNodeRef const& n, size_t pos) const;
size_t reccount() const
{
size_t c = 1;
for(auto const& ch : children)
c += ch.reccount();
return c;
}
void recreate(yml::NodeRef *n) const;
};
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
typedef enum {
EXPECT_PARSE_ERROR = (1<<0),
RESOLVE_REFS = (1<<1),
JSON_ALSO = (1<<2), // TODO: make it the opposite: opt-out instead of opt-in
} TestCaseFlags_e;
struct Case
{
std::string filelinebuf;
csubstr fileline;
csubstr name;
csubstr src;
CaseNode root;
TestCaseFlags_e flags;
Location expected_location;
//! create a standard test case: name, source and expected CaseNode structure
template<class... Args> Case(csubstr file, int line, const char *name_, const char *src_, Args&& ...args) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(std::forward<Args>(args)...), flags(), expected_location() {}
//! create a test case with explicit flags: name, source flags, and expected CaseNode structure
template<class... Args> Case(csubstr file, int line, const char *name_, int f_, const char *src_, Args&& ...args) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(std::forward<Args>(args)...), flags((TestCaseFlags_e)f_), expected_location() {}
//! create a test case with an error on an expected location
Case(csubstr file, int line, const char *name_, int f_, const char *src_, LineCol loc) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(), flags((TestCaseFlags_e)f_), expected_location(name, loc.line, loc.col) {}
};
//-----------------------------------------------------------------------------
// a persistent data store to avoid repeating operations on every test
struct CaseDataLineEndings
{
std::vector<char> src_buf;
substr src;
Tree parsed_tree;
size_t numbytes_stdout;
size_t numbytes_stdout_json;
std::string emit_buf;
csubstr emitted_yml;
std::string emitjson_buf;
csubstr emitted_json;
std::string parse_buf;
substr parsed_yml;
std::string parse_buf_json;
substr parsed_json;
Tree emitted_tree;
Tree emitted_tree_json;
Tree recreated;
};
struct CaseData
{
CaseDataLineEndings unix_style;
CaseDataLineEndings unix_style_json;
CaseDataLineEndings windows_style;
CaseDataLineEndings windows_style_json;
};
} // namespace yml
} // namespace c4
#ifdef __clang__
# pragma clang diagnostic pop
#elif defined(__GNUC__)
# pragma GCC diagnostic pop
#elif defined(_MSC_VER)
# pragma warning(pop)
#endif
#endif /* _TEST_CASE_HPP_ */

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
@@ -20,8 +21,7 @@ TEST(simple_doc, issue_251)
NodeRef root = tree.rootref();
root |= MAP;
root["test"] = "...";
root["test"] |= VALQUO;
root["test"] |= VAL_SQUO;
std::string s = emitrs_yaml<std::string>(tree);
test_check_emit_check(to_csubstr(s), [](Tree const &t){
EXPECT_EQ(t["test"].val(), "...");
@@ -40,9 +40,9 @@ scalar
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_stream());
ASSERT_EQ(t.rootref().num_children(), 1u);
ASSERT_TRUE(t.rootref().first_child().is_doc());
ASSERT_TRUE(t.rootref().first_child().is_val());
EXPECT_EQ(t.rootref().first_child().val(), csubstr("scalar %YAML 1.2"));
ASSERT_TRUE(t.docref(0).is_doc());
ASSERT_TRUE(t.docref(0).is_val());
EXPECT_EQ(t.docref(0).val(), csubstr("scalar %YAML 1.2"));
});
}
@@ -57,21 +57,104 @@ CASE_GROUP(SIMPLE_DOC)
ADD_CASE_TO_GROUP("one empty doc",
R"(---
)",
N(STREAM, L{DOCVAL})
N(STREAM, L{DOC|VP})
);
ADD_CASE_TO_GROUP("one empty doc, indented",
R"( ---
)",
N(VP, "---")
);
ADD_CASE_TO_GROUP("one termination",
R"(...
)",
NOTYPE
);
ADD_CASE_TO_GROUP("one termination, indented",
R"( ...
)",
N(VP, "...")
);
ADD_CASE_TO_GROUP("two terminations",
R"(...
...
)",
NOTYPE
);
ADD_CASE_TO_GROUP("two terminations, indented",
R"( ...
...
)",
N(VP, "... ...")
);
ADD_CASE_TO_GROUP("three terminations",
R"(...
...
...
)",
NOTYPE
);
ADD_CASE_TO_GROUP("three terminations and one explicit, v0",
R"(...
...
...
---
)",
N(STREAM, L{DOC|VP})
);
ADD_CASE_TO_GROUP("three terminations and one explicit, v1",
R"(...
...
---
...
)",
N(STREAM, L{DOC|VP})
);
ADD_CASE_TO_GROUP("three terminations and one explicit, v2",
R"(...
---
...
...
)",
N(STREAM, L{DOC|VP})
);
ADD_CASE_TO_GROUP("three terminations and one explicit, v3",
R"(---
...
...
...
)",
N(STREAM, L{DOC|VP})
);
ADD_CASE_TO_GROUP("one empty doc, explicit termination",
R"(---
...
)",
N(STREAM, L{DOCVAL})
N(STREAM, L{DOC|VP})
);
ADD_CASE_TO_GROUP("one empty doc, explicit termination, first indented",
R"( ---
...
)",
N(VP, "---")
);
ADD_CASE_TO_GROUP("two empty docs",
R"(---
---
)",
N(STREAM, L{DOCVAL, DOCVAL})
N(STREAM, L{DOC|VP, DOC|VP})
);
ADD_CASE_TO_GROUP("two empty docs, with termination",
@@ -79,19 +162,19 @@ R"(---
...
---
)",
N(STREAM, L{DOCVAL, DOCVAL})
N(STREAM, L{DOC|VP, DOC|VP})
);
ADD_CASE_TO_GROUP("doc with single scalar",
R"(a scalar
)",
N(DOCVAL, "a scalar")
N(VP, "a scalar")
);
ADD_CASE_TO_GROUP("doc with single scalar, explicit",
R"(--- a scalar
)",
N(STREAM, L{N(DOCVAL, "a scalar")})
N(STREAM, L{N(DOC|VP, "a scalar")})
);
ADD_CASE_TO_GROUP("simple doc, empty docs",
@@ -100,7 +183,7 @@ R"(---
---
---
)",
N(STREAM, L{DOCVAL, DOCVAL, DOCVAL, DOCVAL})
N(STREAM, L{DOC|VP, DOC|VP, DOC|VP, DOC|VP})
);
ADD_CASE_TO_GROUP("simple doc, empty docs, indented",
@@ -109,7 +192,7 @@ R"( ---
---
---
)",
N(STREAM, L{DOCVAL, DOCVAL, DOCVAL, DOCVAL})
N(VP, "--- --- --- ---")
);
ADD_CASE_TO_GROUP("simple doc, empty docs, term",
@@ -124,23 +207,23 @@ R"(---
---
...
)",
N(STREAM, L{DOCVAL, DOCVAL, DOCVAL, DOCVAL})
N(STREAM, L{DOC|VP, DOC|VP, DOC|VP, DOC|VP})
);
ADD_CASE_TO_GROUP("simple doc, empty docs, term, indented",
R"(
---
...
---
...
---
...
---
...
)",
N(STREAM, L{DOCVAL, DOCVAL, DOCVAL, DOCVAL})
N(VP, "---\n... --- ...\n--- ... --- ...")
);
ADD_CASE_TO_GROUP("simple doc, plain scalar, multiple docs, implicit 2nd doc",
@@ -152,20 +235,20 @@ R"(---
with several lines
)",
N(STREAM, L{
N(DOCSEQ, L{N("a plain scalar with several lines")}),
N(DOCSEQ, L{N("a second plain scalar with several lines")}),
N(DOC|SB, L{N(VP, "a plain scalar with several lines")}),
N(DOC|SB, L{N(VP, "a second plain scalar with several lines")}),
}));
ADD_CASE_TO_GROUP("simple doc, single scalar, implicit doc",
R"(a scalar with some spaces inside
)",
N(DOCVAL, "a scalar with some spaces inside")
N(VP, "a scalar with some spaces inside")
);
ADD_CASE_TO_GROUP("simple doc, single scalar, implicit doc, indented",
R"( a scalar with some spaces inside
)",
N(DOCVAL,"a scalar with some spaces inside")
N(VP,"a scalar with some spaces inside")
);
ADD_CASE_TO_GROUP("simple doc, multi scalar, implicit doc",
@@ -173,11 +256,7 @@ R"(a scalar with some spaces inside,
and yet another one with more spaces inside,
and it doesn't really stop
)",
N(L{
N("a scalar with some spaces inside"),
N("and yet another one with more spaces inside"),
N("and it doesn't really stop"),
})
N(VP, "a scalar with some spaces inside, and yet another one with more spaces inside, and it doesn't really stop")
);
ADD_CASE_TO_GROUP("simple doc, multi scalar, implicit doc, indented",
@@ -186,25 +265,21 @@ R"(
and yet another one with more spaces inside,
and it doesn't really stop
)",
N(L{
N("a scalar with some spaces inside"),
N("and yet another one with more spaces inside"),
N("and it doesn't really stop"),
})
N(VP, "a scalar with some spaces inside, and yet another one with more spaces inside, and it doesn't really stop")
);
ADD_CASE_TO_GROUP("simple doc, single scalar, explicit doc, implicit termination",
R"(---
a scalar with some spaces inside
)",
N(STREAM, L{N(DOCVAL, "a scalar with some spaces inside")})
N(STREAM, L{N(DOC|VP, "a scalar with some spaces inside")})
);
ADD_CASE_TO_GROUP("simple doc, single scalar, explicit doc, implicit termination, indented",
R"( ---
a scalar with some spaces inside
)",
N(STREAM, L{N(DOCVAL, "a scalar with some spaces inside")})
N(VP, "--- a scalar with some spaces inside")
);
ADD_CASE_TO_GROUP("simple doc, single scalar, explicit doc, explicit termination",
@@ -212,7 +287,7 @@ R"(---
a scalar with some spaces inside
...
)",
N(STREAM, L{N(DOCVAL, "a scalar with some spaces inside")})
N(STREAM, L{N(DOC|VP, "a scalar with some spaces inside")})
);
ADD_CASE_TO_GROUP("simple doc, single scalar, explicit doc, explicit termination, indented",
@@ -220,7 +295,7 @@ R"( ---
a scalar with some spaces inside
...
)",
N(STREAM, L{N(DOCVAL, "a scalar with some spaces inside")})
N(VP, "--- a scalar with some spaces inside ...")
);
ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map",
@@ -235,26 +310,47 @@ b: 1
c: 2
)",
N(STREAM, L{
N(DOCSEQ, L{N("a"), N("b"), N("c")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
N(DOC|SB, L{N(VP, "a"), N(VP, "b"), N(VP, "c")}),
N(DOC|MB, L{N(KP|VP, "a", "0"), N(KP|VP, "b", "1"), N(KP|VP, "c", "2")})
})
);
ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map, indented",
R"( ---
R"(
---
- a
- b
- c
...
---
a: 0
b: 1
c: 2
)",
N(VP, "--- - a - b - c ... ---")
);
ADD_CASE_TO_GROUP("simple doc, 2XXW",
R"(
--- !!set
? Mark McGwire
? Sammy Sosa
? Ken Griff
)",
N(STREAM, L{
N(DOCSEQ, L{N("a"), N("b"), N("c")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
})
N(DOC|MB, TL("!!set", L{
N(KP|VP, "Mark McGwire", {}),
N(KP|VP, "Sammy Sosa", {}),
N(KP|VP, "Ken Griff", {}),
}))
})
);
ADD_CASE_TO_GROUP("simple doc, 2XXW, indented",
R"(
--- !!set
? Mark McGwire
? Sammy Sosa
? Ken Griff
)",
N(VP, "--- !!set ? Mark McGwire ? Sammy Sosa ? Ken Griff")
);
ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map, no term",
@@ -267,14 +363,14 @@ a: 0
b: 1
c: 2
)",
N(STREAM, L{
N(DOCSEQ, L{N("a"), N("b"), N("c")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
})
N(STREAM, L{
N(DOC|SB, L{N(VP, "a"), N(VP, "b"), N(VP, "c")}),
N(DOC|MB, L{N(KP|VP, "a", "0"), N(KP|VP, "b", "1"), N(KP|VP, "c", "2")})
})
);
ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map, no term, indented",
R"(
ADD_CASE_TO_GROUP("simple doc, multi doc, seq-map, no term, indented", EXPECT_PARSE_ERROR,
R"(# the first : should cause a parse error
---
- a
- b
@@ -284,10 +380,7 @@ R"(
b: 1
c: 2
)",
N(STREAM, L{
N(DOCSEQ, L{N("a"), N("b"), N("c")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
})
LineCol(7, 6)
);
ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq",
@@ -302,13 +395,13 @@ c: 2
- c
...
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
N(DOCSEQ, L{N("a"), N("b"), N("c")})
})
N(STREAM, L{
N(DOC|MB, L{N(KP|VP, "a", "0"), N(KP|VP, "b", "1"), N(KP|VP, "c", "2")}),
N(DOC|SB, L{N(VP, "a"), N(VP, "b"), N(VP, "c")}),
})
);
ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq, indented",
ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq, indented", EXPECT_PARSE_ERROR,
R"(
---
a: 0
@@ -321,10 +414,7 @@ R"(
- c
...
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
N(DOCSEQ, L{N("a"), N("b"), N("c")})
})
LineCol(3, 6)
);
ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq, no term",
@@ -337,13 +427,13 @@ c: 2
- b
- c
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
N(DOCSEQ, L{N("a"), N("b"), N("c")})
})
N(STREAM, L{
N(DOC|MB, L{N(KP|VP, "a", "0"), N(KP|VP, "b", "1"), N(KP|VP, "c", "2")}),
N(DOC|SB, L{N(VP, "a"), N(VP, "b"), N(VP, "c")}),
})
);
ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq, no term, indented",
ADD_CASE_TO_GROUP("simple doc, multi doc, map-seq, no term, indented", EXPECT_PARSE_ERROR,
R"(
---
a: 0
@@ -354,10 +444,7 @@ R"(
- b
- c
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
N(DOCSEQ, L{N("a"), N("b"), N("c")})
})
LineCol(3, 6)
);
ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map",
@@ -368,13 +455,13 @@ R"(---
{a: 0, b: 1, c: 2}
...
)",
N(STREAM, L{
N(DOCSEQ, L{N("a"), N("b"), N("c")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
})
N(STREAM, L{
N(DOC|SFS, L{N(VP, "a"), N(VP, "b"), N(VP, "c")}),
N(DOC|MFS, L{N(KP|VP, "a", "0"), N(KP|VP, "b", "1"), N(KP|VP, "c", "2")}),
})
);
ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map, indented",
ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map, indented", EXPECT_PARSE_ERROR,
R"(
---
[a, b, c]
@@ -383,10 +470,7 @@ R"(
{a: 0, b: 1, c: 2}
...
)",
N(STREAM, L{
N(DOCSEQ, L{N("a"), N("b"), N("c")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
})
LineCol(6, 7)
);
ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map, no term",
@@ -395,23 +479,20 @@ R"(---
---
{a: 0, b: 1, c: 2}
)",
N(STREAM, L{
N(DOCSEQ, L{N("a"), N("b"), N("c")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
})
N(STREAM, L{
N(DOC|SFS, L{N(VP, "a"), N(VP, "b"), N(VP, "c")}),
N(DOC|MFS, L{N(KP|VP, "a", "0"), N(KP|VP, "b", "1"), N(KP|VP, "c", "2")}),
})
);
ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map, no term, indented",
ADD_CASE_TO_GROUP("simple doc, multi doc, impl seq-map, no term, indented", EXPECT_PARSE_ERROR,
R"(
---
[a, b, c]
---
{a: 0, b: 1, c: 2}
)",
N(STREAM, L{
N(DOCSEQ, L{N("a"), N("b"), N("c")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")})
})
LineCol(5, 7)
);
ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq",
@@ -422,13 +503,13 @@ R"(---
[a, b, c]
...
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
N(DOCSEQ, L{N("a"), N("b"), N("c")})
})
N(STREAM, L{
N(DOC|MFS, L{N(KP|VP, "a", "0"), N(KP|VP, "b", "1"), N(KP|VP, "c", "2")}),
N(DOC|SFS, L{N(VP, "a"), N(VP, "b"), N(VP, "c")}),
})
);
ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq, indented",
ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq, indented", EXPECT_PARSE_ERROR,
R"(
---
{a: 0, b: 1, c: 2}
@@ -437,10 +518,7 @@ R"(
[a, b, c]
...
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
N(DOCSEQ, L{N("a"), N("b"), N("c")})
})
LineCol(3, 7)
);
ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq, no term",
@@ -449,26 +527,23 @@ R"(---
---
[a, b, c]
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
N(DOCSEQ, L{N("a"), N("b"), N("c")})
})
N(STREAM, L{
N(DOC|MFS, L{N(KP|VP, "a", "0"), N(KP|VP, "b", "1"), N(KP|VP, "c", "2")}),
N(DOC|SFS, L{N(VP, "a"), N(VP, "b"), N(VP, "c")}),
})
);
ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq, no term, indented",
ADD_CASE_TO_GROUP("simple doc, multi doc, impl map-seq, no term, indented", EXPECT_PARSE_ERROR,
R"(
---
{a: 0, b: 1, c: 2}
---
[a, b, c]
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2")}),
N(DOCSEQ, L{N("a"), N("b"), N("c")})
})
LineCol(3, 7)
);
ADD_CASE_TO_GROUP("simple doc, indented with empty lines",
ADD_CASE_TO_GROUP("simple doc, indented with empty lines", EXPECT_PARSE_ERROR,
R"(
---
{a: 0, b: 1, c: 2,
@@ -496,10 +571,7 @@ R"(
d:
some scalar
)",
N(STREAM, L{
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2"), N("d", "some scalar")}),
N(DOCMAP, L{N("a", "0"), N("b", "1"), N("c", "2"), N("d", "some scalar")}),
})
LineCol(3, 7)
);
@@ -513,11 +585,11 @@ R"(# Private
!foo "bar"
)",
N(STREAM, L{
N(DOCVAL|VALQUO, TS("!foo", "bar")),
N(DOC|VD, TS("!foo", "bar")),
// strict YAML should result in this for the second doc:
//N(DOCVAL|VALQUO, TS("<tag:example.com,2000:app/foo>", "bar")),
//N(DOC|VD, TS("<tag:example.com,2000:app/foo>", "bar")),
// but since we don't do lookup, it should result in:
N(DOCVAL|VALQUO, TS("!foo", "bar")),
N(DOC|VD, TS("!foo", "bar")),
})
);
}

View File

@@ -1,610 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
TEST(double_quoted, escaped_chars)
{
csubstr yaml = R"("\\\"\n\r\t\ \/\ \0\b\f\a\v\e\_\N\L\P")";
// build the string like this because some of the characters are
// filtered out under the double quotes
std::string expected;
expected += '\\';
expected += '"';
expected += '\n';
expected += '\r';
expected += '\t';
expected += '\t';
expected += '/';
expected += ' ';
expected += '\0';
expected += '\b';
expected += '\f';
expected += '\a';
expected += '\v';
expected += INT8_C(0x1b); // \e
//
// wrap explicitly to avoid overflow
expected += _RYML_CHCONST(-0x3e, 0xc2); // \_ (1)
expected += _RYML_CHCONST(-0x60, 0xa0); // \_ (2)
//
expected += _RYML_CHCONST(-0x3e, 0xc2); // \N (1)
expected += _RYML_CHCONST(-0x7b, 0x85); // \N (2)
//
expected += _RYML_CHCONST(-0x1e, 0xe2); // \L (1)
expected += _RYML_CHCONST(-0x80, 0x80); // \L (2)
expected += _RYML_CHCONST(-0x58, 0xa8); // \L (3)
//
expected += _RYML_CHCONST(-0x1e, 0xe2); // \P (1)
expected += _RYML_CHCONST(-0x80, 0x80); // \P (2)
expected += _RYML_CHCONST(-0x57, 0xa9); // \P (3)
//
Tree t = parse_in_arena(yaml);
csubstr v = t.rootref().val();
std::string actual = {v.str, v.len};
EXPECT_EQ(actual, expected);
}
TEST(double_quoted, test_suite_3RLN)
{
csubstr yaml = R"(---
"1 leading
\ttab"
---
"2 leading
\ tab"
---
"3 leading
tab"
---
"4 leading
\t tab"
---
"5 leading
\ tab"
---
"6 leading
tab"
)";
test_check_emit_check(yaml, [](Tree const &t){
EXPECT_EQ(t.docref(0).val(), "1 leading \ttab");
EXPECT_EQ(t.docref(1).val(), "2 leading \ttab");
EXPECT_EQ(t.docref(2).val(), "3 leading tab");
EXPECT_EQ(t.docref(3).val(), "4 leading \t tab");
EXPECT_EQ(t.docref(4).val(), "5 leading \t tab");
EXPECT_EQ(t.docref(5).val(), "6 leading tab");
});
}
TEST(double_quoted, test_suite_5GBF)
{
csubstr yaml = R"(
Folding:
"Empty line
as a line feed"
Folding2:
"Empty line
as a line feed"
Folding3:
"Empty line
as a line feed"
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_map());
EXPECT_EQ(t["Folding"].val(), csubstr("Empty line\nas a line feed"));
EXPECT_EQ(t["Folding2"].val(), csubstr("Empty line\nas a line feed"));
EXPECT_EQ(t["Folding3"].val(), csubstr("Empty line\nas a line feed"));
});
}
TEST(double_quoted, test_suite_6SLA)
{
csubstr yaml = R"(
"foo\nbar:baz\tx \\$%^&*()x": 23
'x\ny:z\tx $%^&*()x': 24
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_map());
ASSERT_TRUE(t.rootref().has_child("foo\nbar:baz\tx \\$%^&*()x"));
ASSERT_TRUE(t.rootref().has_child("x\\ny:z\\tx $%^&*()x"));
ASSERT_EQ(t["foo\nbar:baz\tx \\$%^&*()x"].val(), csubstr("23"));
ASSERT_EQ(t["x\\ny:z\\tx $%^&*()x"].val(), csubstr("24"));
});
}
TEST(double_quoted, test_suite_6WPF)
{
csubstr yaml = R"(
"
foo
bar
baz
"
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_val());
EXPECT_EQ(t.rootref().val(), csubstr(" foo\nbar\nbaz "));
});
}
TEST(double_quoted, test_suite_9TFX)
{
csubstr yaml = R"(
" 1st non-empty
2nd non-empty
3rd non-empty "
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_val());
EXPECT_EQ(t.rootref().val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
});
}
TEST(double_quoted, test_suite_G4RS)
{
csubstr yaml = R"(---
unicode: "\u263A\u2705\U0001D11E"
control: "\b1998\t1999\t2000\n"
#hex esc: "\x0d\x0a is \r\n"
#---
#- "\x0d\x0a is \r\n"
#---
#{hex esc: "\x0d\x0a is \r\n"}
#---
#["\x0d\x0a is \r\n"]
)";
test_check_emit_check(yaml, [](Tree const &t){
EXPECT_EQ(t.docref(0)["unicode"].val(), csubstr(R"(☺✅𝄞)"));
EXPECT_EQ(t.docref(0)["control"].val(), csubstr("\b1998\t1999\t2000\n"));
//EXPECT_EQ(t.docref(0)["hex esc"].val(), csubstr("\r\n is \r\n")); TODO
//EXPECT_EQ(t.docref(1)[0].val(), csubstr("\r\n is \r\n"));
//EXPECT_EQ(t.docref(2)[0].val(), csubstr("\r\n is \r\n"));
//EXPECT_EQ(t.docref(3)[0].val(), csubstr("\r\n is \r\n"));
});
}
TEST(double_quoted, test_suite_KSS4)
{
csubstr yaml = R"(
---
"quoted
string"
--- "quoted
string"
---
- "quoted
string"
---
- "quoted
string"
---
"quoted
string": "quoted
string"
---
"quoted
string": "quoted
string"
)";
test_check_emit_check(yaml, [](Tree const &t){
EXPECT_EQ(t.docref(0).val(), "quoted string");
EXPECT_EQ(t.docref(1).val(), "quoted string");
EXPECT_EQ(t.docref(2)[0].val(), "quoted string");
EXPECT_EQ(t.docref(3)[0].val(), "quoted string");
EXPECT_EQ(t.docref(4)["quoted string"].val(), "quoted string");
EXPECT_EQ(t.docref(5)["quoted string"].val(), "quoted string");
});
}
TEST(double_quoted, test_suite_NAT4)
{
csubstr yaml = R"(
a: '
'
b: '
'
c: "
"
d: "
"
e: '
'
f: "
"
g: '
'
h: "
"
)";
test_check_emit_check(yaml, [](Tree const &t){
EXPECT_EQ(t["a"].val(), csubstr(" "));
EXPECT_EQ(t["b"].val(), csubstr(" "));
EXPECT_EQ(t["c"].val(), csubstr(" "));
EXPECT_EQ(t["d"].val(), csubstr(" "));
EXPECT_EQ(t["e"].val(), csubstr("\n"));
EXPECT_EQ(t["f"].val(), csubstr("\n"));
EXPECT_EQ(t["g"].val(), csubstr("\n\n"));
EXPECT_EQ(t["h"].val(), csubstr("\n\n"));
});
}
TEST(double_quoted, test_suite_NP9H)
{
csubstr yaml = R"(
"folded
to a space,
to a line feed, or \
\ non-content"
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_val());
EXPECT_EQ(t.rootref().val(), csubstr("folded to a space,\nto a line feed, or \t \tnon-content"));
});
}
TEST(double_quoted, test_suite_Q8AD)
{
csubstr yaml = R"(
"folded
to a space,
to a line feed, or \
\ non-content"
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_val());
EXPECT_EQ(t.rootref().val(), csubstr("folded to a space,\nto a line feed, or \t \tnon-content"));
});
}
TEST(double_quoted, test_suite_R4YG)
{
csubstr yaml = R"(
- "
detected
"
)";
test_check_emit_check(yaml, [](Tree const &t){
EXPECT_EQ(t[0].val(), csubstr("\t\ndetected\n"));
});
}
//-----------------------------------------------------------------------------
void verify_error_is_reported(csubstr case_name, csubstr yaml, Location loc={})
{
SCOPED_TRACE(case_name);
SCOPED_TRACE(yaml);
Tree tree;
ExpectError::do_check(&tree, [&](){
parse_in_arena(yaml, &tree);
}, loc);
}
TEST(double_quoted, error_on_unmatched_quotes)
{
verify_error_is_reported("map block", R"(foo: "'
bar: "")");
verify_error_is_reported("seq block", R"(- "'
- "")");
verify_error_is_reported("map flow", R"({foo: "', bar: ""})");
verify_error_is_reported("seq flow", R"(["', ""])");
}
TEST(double_quoted, error_on_unmatched_quotes_with_escapes)
{
verify_error_is_reported("map block", R"(foo: "\"'
bar: "")");
verify_error_is_reported("seq block", R"(- "\"'
- "")");
verify_error_is_reported("map flow", R"({foo: "\"', bar: ""})");
verify_error_is_reported("seq flow", R"(["\"', ""])");
}
TEST(double_quoted, error_on_unmatched_quotes_at_end)
{
verify_error_is_reported("map block", R"(foo: ""
bar: "')");
verify_error_is_reported("seq block", R"(- ""
- "')");
verify_error_is_reported("map flow", R"({foo: "", bar: "'})");
verify_error_is_reported("seq flow", R"(["", "'])");
}
TEST(double_quoted, error_on_unmatched_quotes_at_end_with_escapes)
{
verify_error_is_reported("map block", R"(foo: ""
bar: "\"')");
verify_error_is_reported("seq block", R"(- ""
- "\"')");
verify_error_is_reported("map flow", R"({foo: "", bar: "\"'})");
verify_error_is_reported("seq flow", R"(["", "\"'])");
}
TEST(double_quoted, error_on_unclosed_quotes)
{
verify_error_is_reported("map block", R"(foo: ",
bar: what)");
verify_error_is_reported("seq block", R"(- "
- what)");
verify_error_is_reported("map flow", R"({foo: ", bar: what})");
verify_error_is_reported("seq flow", R"([", what])");
}
TEST(double_quoted, error_on_unclosed_quotes_with_escapes)
{
verify_error_is_reported("map block", R"(foo: "\",
bar: what)");
verify_error_is_reported("seq block", R"(- "\"
- what)");
verify_error_is_reported("map flow", R"({foo: "\", bar: what})");
verify_error_is_reported("seq flow", R"(["\", what])");
}
TEST(double_quoted, error_on_unclosed_quotes_at_end)
{
verify_error_is_reported("map block", R"(foo: what
bar: ")");
verify_error_is_reported("seq block", R"(- what
- ")");
verify_error_is_reported("map flow", R"({foo: what, bar: "})");
verify_error_is_reported("seq flow", R"([what, "])");
}
TEST(double_quoted, error_on_unclosed_quotes_at_end_with_escapes)
{
verify_error_is_reported("map block", R"(foo: what
bar: "\")");
verify_error_is_reported("seq block", R"(- what
- "\")");
verify_error_is_reported("map flow", R"({foo: what, bar: "\"})");
verify_error_is_reported("seq flow", R"([what, "\"])");
}
TEST(double_quoted, error_on_bad_utf_codepoints)
{
verify_error_is_reported("incomplete \\x 0", R"(foo: "\x")");
verify_error_is_reported("incomplete \\x 1", R"(foo: "\x1")");
verify_error_is_reported("bad value \\x" , R"(foo: "\xko")");
verify_error_is_reported("incomplete \\u 0", R"(foo: "\u")");
verify_error_is_reported("incomplete \\u 1", R"(foo: "\u1")");
verify_error_is_reported("incomplete \\u 2", R"(foo: "\u12")");
verify_error_is_reported("incomplete \\u 3", R"(foo: "\u123")");
verify_error_is_reported("bad value \\u" , R"(foo: "\ukoko")");
verify_error_is_reported("incomplete \\U 0", R"(foo: "\U")");
verify_error_is_reported("incomplete \\U 1", R"(foo: "\U1")");
verify_error_is_reported("incomplete \\U 2", R"(foo: "\U12")");
verify_error_is_reported("incomplete \\U 3", R"(foo: "\U123")");
verify_error_is_reported("incomplete \\U 4", R"(foo: "\U1234")");
verify_error_is_reported("incomplete \\U 5", R"(foo: "\U12345")");
verify_error_is_reported("incomplete \\U 6", R"(foo: "\U123456")");
verify_error_is_reported("incomplete \\U 7", R"(foo: "\U1234567")");
verify_error_is_reported("bad value \\U" , R"(foo: "\Ukokokoko")");
}
TEST(double_quoted, github253)
{
{
Tree tree;
NodeRef root = tree.rootref();
root |= MAP;
root["t"] = "t't\\nt";
root["t"] |= _WIP_VAL_DQUO;
std::string s = emitrs_yaml<std::string>(tree);
Tree tree2 = parse_in_arena(to_csubstr(s));
EXPECT_EQ(tree2["t"].val(), tree["t"].val());
}
{
Tree tree;
NodeRef root = tree.rootref();
root |= MAP;
root["t"] = "t't\\nt";
root["t"] |= _WIP_VAL_SQUO;
std::string s = emitrs_yaml<std::string>(tree);
Tree tree2 = parse_in_arena(to_csubstr(s));
EXPECT_EQ(tree2["t"].val(), tree["t"].val());
}
{
Tree tree;
NodeRef root = tree.rootref();
root |= MAP;
root["s"] = "t\rt";
root["s"] |= _WIP_VAL_DQUO;
std::string s = emitrs_yaml<std::string>(tree);
EXPECT_EQ(s, "s: \"t\\rt\"\n");
Tree tree2 = parse_in_arena(to_csubstr(s));
EXPECT_EQ(tree2["s"].val(), tree["s"].val());
}
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
CASE_GROUP(DOUBLE_QUOTED)
{
ADD_CASE_TO_GROUP("dquoted, only text",
R"("Some text without any quotes."
)",
N(DOCVAL | VALQUO, "Some text without any quotes.")
);
ADD_CASE_TO_GROUP("dquoted, with single quotes",
R"("Some text 'with single quotes'")",
N(DOCVAL|VALQUO, "Some text 'with single quotes'")
);
ADD_CASE_TO_GROUP("dquoted, with double quotes",
R"("Some \"text\" \"with double quotes\"")",
N(DOCVAL|VALQUO, "Some \"text\" \"with double quotes\"")
);
ADD_CASE_TO_GROUP("dquoted, with single and double quotes",
R"("Some text 'with single quotes' \"and double quotes\".")",
N(DOCVAL|VALQUO, "Some text 'with single quotes' \"and double quotes\".")
);
ADD_CASE_TO_GROUP("dquoted, with escapes",
R"("Some text with escapes \\n \\r \\t")",
N(DOCVAL|VALQUO, "Some text with escapes \\n \\r \\t")
);
ADD_CASE_TO_GROUP("dquoted, with newline",
R"("Some text with\nnewline")",
N(DOCVAL|VALQUO, "Some text with\nnewline")
);
ADD_CASE_TO_GROUP("dquoted, with tabs",
R"("\tSome\ttext\twith\ttabs\t")",
N(DOCVAL|VALQUO, "\tSome\ttext\twith\ttabs\t")
);
ADD_CASE_TO_GROUP("dquoted, with tabs 4ZYM",
R"(plain: text
lines
quoted: "text
lines"
block: |
text
lines
)",
L{N("plain", "text lines"),
N(KEYVAL|VALQUO, "quoted", "text lines"),
N(KEYVAL|VALQUO,"block", "text\n \tlines\n")}
);
ADD_CASE_TO_GROUP("dquoted, with tabs 7A4E",
R"(" 1st non-empty
2nd non-empty
3rd non-empty ")",
N(DOCVAL|VALQUO, " 1st non-empty\n2nd non-empty 3rd non-empty ")
);
ADD_CASE_TO_GROUP("dquoted, with tabs TL85",
R"("
foo
bar
baz
")", N(DOCVAL|VALQUO, " foo\nbar\nbaz "));
ADD_CASE_TO_GROUP("dquoted, all",
R"("Several lines of text,
containing 'single quotes' and \"double quotes\". \
Escapes (like \\n) work.\nIn addition,
newlines can be esc\
aped to prevent them from being converted to a space.
Newlines can also be added by leaving a blank line.
Leading whitespace on lines is ignored."
)",
N(DOCVAL|VALQUO, "Several lines of text, containing 'single quotes' and \"double quotes\". Escapes (like \\n) work.\nIn addition, newlines can be escaped to prevent them from being converted to a space.\nNewlines can also be added by leaving a blank line. Leading whitespace on lines is ignored.")
);
ADD_CASE_TO_GROUP("dquoted, empty",
R"("")",
N(DOCVAL|VALQUO, "")
);
ADD_CASE_TO_GROUP("dquoted, blank",
R"(
- ""
- " "
- " "
- " "
- " "
)",
L{N(QV, ""), N(QV, " "), N(QV, " "), N(QV, " "), N(QV, " ")}
);
ADD_CASE_TO_GROUP("dquoted, numbers", // these should not be quoted when emitting
R"(
- -1
- -1.0
- +1.0
- 1e-2
- 1e+2
)",
L{N("-1"), N("-1.0"), N("+1.0"), N("1e-2"), N("1e+2")}
);
ADD_CASE_TO_GROUP("dquoted, trailing space",
R"('a aaaa ')",
N(DOCVAL|VALQUO, "a aaaa ")
);
ADD_CASE_TO_GROUP("dquoted, leading space",
R"(' a aaaa')",
N(DOCVAL|VALQUO, " a aaaa")
);
ADD_CASE_TO_GROUP("dquoted, trailing and leading space",
R"(' 012345 ')",
N(DOCVAL|VALQUO, " 012345 ")
);
ADD_CASE_TO_GROUP("dquoted, 1 dquote",
R"("\"")",
N(DOCVAL|VALQUO, "\"")
);
ADD_CASE_TO_GROUP("dquoted, 2 dquotes",
R"("\"\"")",
N(DOCVAL|VALQUO, "\"\"")
);
ADD_CASE_TO_GROUP("dquoted, 3 dquotes",
R"("\"\"\"")",
N(DOCVAL|VALQUO, "\"\"\"")
);
ADD_CASE_TO_GROUP("dquoted, 4 dquotes",
R"("\"\"\"\"")",
N(DOCVAL|VALQUO, "\"\"\"\"")
);
ADD_CASE_TO_GROUP("dquoted, 5 dquotes",
R"("\"\"\"\"\"")",
N(DOCVAL|VALQUO, "\"\"\"\"\"")
);
ADD_CASE_TO_GROUP("dquoted, example 2",
R"("This is a key\nthat has multiple lines\n": and this is its value
)",
L{N(QK, "This is a key\nthat has multiple lines\n", "and this is its value")}
);
ADD_CASE_TO_GROUP("dquoted, example 2.1",
R"("This is a key
that has multiple lines
": and this is its value
)",
L{N(QK, "This is a key\nthat has multiple lines\n", "and this is its value")}
);
}
} // namespace yml
} // namespace c4

View File

@@ -8,7 +8,7 @@
#endif
#include <c4/fs/fs.hpp>
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include <gtest/gtest.h>
@@ -137,17 +137,26 @@ TEST(emit, empty_tree)
TEST(emit, existing_tree)
{
const Tree t = parse_in_arena("[foo, bar]");
std::string expected = "- foo\n- bar\n";
std::string expected = "[foo,bar]";
std::string expected_json = R"(["foo","bar"])";
test_emits(t, expected, expected_json);
}
TEST(emit, no_node)
{
const Tree t = parse_in_arena("[foo, bar]");
std::string expected = "[foo,bar]";
std::string expected_json = R"(["foo","bar"])";
test_emits(t, NONE, expected, expected_json);
}
TEST(emit, existing_seq_node)
{
Tree nct = parse_in_arena("[foo, bar, [nested, seq], {nested: map}]");
Tree const& t = nct;
{
std::string expected = "- foo\n- bar\n- - nested\n - seq\n- nested: map\n";
SCOPED_TRACE("full");
std::string expected = "[foo,bar,[nested,seq],{nested: map}]";
std::string expected_json = R"(["foo","bar",["nested","seq"],{"nested": "map"}])";
{
SCOPED_TRACE("rootref");
@@ -163,6 +172,7 @@ TEST(emit, existing_seq_node)
}
}
{
SCOPED_TRACE("t[0]");
ConstNodeRef n = t[0];
std::string expected = "foo\n";
std::string expected_json = "\"foo\"";
@@ -174,7 +184,7 @@ TEST(emit, existing_seq_node)
SCOPED_TRACE("t, id");
test_emits(t, n.id(), expected, expected_json);
}
nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
nct._add_flags(n.id(), FLOW_SL);
expected = "foo";
{
SCOPED_TRACE("t, id");
@@ -186,6 +196,7 @@ TEST(emit, existing_seq_node)
}
}
{
SCOPED_TRACE("t[1]");
ConstNodeRef n = t[1];
std::string expected = "bar\n";
std::string expected_json = "\"bar\"";
@@ -197,7 +208,7 @@ TEST(emit, existing_seq_node)
SCOPED_TRACE("t, id");
test_emits(t, n.id(), expected, expected_json);
}
nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
nct._add_flags(n.id(), FLOW_SL);
expected = "bar";
{
SCOPED_TRACE("t, id");
@@ -210,8 +221,9 @@ TEST(emit, existing_seq_node)
}
{
SCOPED_TRACE("t[2]");
ConstNodeRef n = t[2];
std::string expected = "- nested\n- seq\n";
std::string expected = "[nested,seq]";
std::string expected_json = "[\"nested\",\"seq\"]";
{
SCOPED_TRACE("noderef");
@@ -222,7 +234,7 @@ TEST(emit, existing_seq_node)
test_emits(t, n.id(), expected, expected_json);
}
expected = "[nested,seq]";
nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
nct._add_flags(n.id(), FLOW_SL);
{
SCOPED_TRACE("t, id");
test_emits(n, expected, expected_json);
@@ -233,8 +245,9 @@ TEST(emit, existing_seq_node)
}
}
{
SCOPED_TRACE("t[3]");
ConstNodeRef n = t[3];
std::string expected = "nested: map\n";
std::string expected = "{nested: map}";
std::string expected_json = "{\"nested\": \"map\"}";
{
SCOPED_TRACE("noderef");
@@ -245,7 +258,7 @@ TEST(emit, existing_seq_node)
test_emits(t, n.id(), expected, expected_json);
}
expected = "{nested: map}";
nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
nct._add_flags(n.id(), FLOW_SL);
{
SCOPED_TRACE("t, id");
test_emits(n, expected, expected_json);
@@ -262,7 +275,8 @@ TEST(emit, existing_map_node)
Tree nct = parse_in_arena("{0: foo, 1: bar, 2: [nested, seq], 3: {nested: map}}");
Tree const& t = nct;
{
std::string expected = "0: foo\n1: bar\n2:\n - nested\n - seq\n3:\n nested: map\n";
SCOPED_TRACE("root");
std::string expected = "{0: foo,1: bar,2: [nested,seq],3: {nested: map}}";
std::string expected_json = R"({"0": "foo","1": "bar","2": ["nested","seq"],"3": {"nested": "map"}})";
{
SCOPED_TRACE("rootref");
@@ -278,6 +292,7 @@ TEST(emit, existing_map_node)
}
}
{
SCOPED_TRACE("t[0]");
ConstNodeRef n = t[0];
std::string expected = "0: foo\n";
std::string expected_json = "\"0\": \"foo\"";
@@ -290,7 +305,7 @@ TEST(emit, existing_map_node)
test_emits(t, n.id(), expected, expected_json);
}
expected = "0: foo";
nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
nct._add_flags(n.id(), FLOW_SL);
{
SCOPED_TRACE("t, id");
test_emits(n, expected, expected_json);
@@ -301,6 +316,7 @@ TEST(emit, existing_map_node)
}
}
{
SCOPED_TRACE("t[1]");
ConstNodeRef n = t[1];
std::string expected = "1: bar\n";
std::string expected_json = "\"1\": \"bar\"";
@@ -313,7 +329,7 @@ TEST(emit, existing_map_node)
test_emits(t, n.id(), expected, expected_json);
}
expected = "1: bar";
nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
nct._add_flags(n.id(), FLOW_SL);
{
SCOPED_TRACE("t, id");
test_emits(n, expected, expected_json);
@@ -324,8 +340,9 @@ TEST(emit, existing_map_node)
}
}
{
SCOPED_TRACE("t[2]");
ConstNodeRef n = t[2];
std::string expected = "2:\n - nested\n - seq\n";
std::string expected = "2: [nested,seq]";
std::string expected_json = "\"2\": [\"nested\",\"seq\"]";
{
SCOPED_TRACE("noderef");
@@ -335,8 +352,9 @@ TEST(emit, existing_map_node)
SCOPED_TRACE("t, id");
test_emits(t, n.id(), expected, expected_json);
}
expected = "2: [nested,seq]";
nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
expected = "2:\n - nested\n - seq\n";
nct._rem_flags(n.id(), CONTAINER_STYLE);
nct._add_flags(n.id(), BLOCK);
{
SCOPED_TRACE("t, id");
test_emits(n, expected, expected_json);
@@ -347,8 +365,9 @@ TEST(emit, existing_map_node)
}
}
{
SCOPED_TRACE("t[3]");
ConstNodeRef n = t[3];
std::string expected = "3:\n nested: map\n";
std::string expected = "3: {nested: map}";
std::string expected_json = "\"3\": {\"nested\": \"map\"}";
{
SCOPED_TRACE("noderef");
@@ -358,8 +377,9 @@ TEST(emit, existing_map_node)
SCOPED_TRACE("t, id");
test_emits(t, n.id(), expected, expected_json);
}
expected = "3: {nested: map}";
nct._add_flags(n.id(), _WIP_STYLE_FLOW_SL);
expected = "3:\n nested: map\n";
nct._rem_flags(n.id(), CONTAINER_STYLE);
nct._add_flags(n.id(), BLOCK);
{
SCOPED_TRACE("t, id");
test_emits(n, expected, expected_json);
@@ -395,7 +415,7 @@ TEST(emit, percent_is_quoted)
TEST(emit, at_is_quoted__issue_309)
{
Tree ti = parse_in_arena("{at: [], backtick: []");
Tree ti = parse_in_arena("{at: [], backtick: []}");
ti["at"][0] << "@test";
ti["at"][1] = "@test2";
ti["at"][2] << "@";
@@ -432,7 +452,7 @@ TEST(emit, at_is_quoted__issue_309)
TEST(emit, at_is_quoted_only_in_the_beggining__issue_320)
{
Tree ti = parse_in_arena("{at: [], backtick: []");
Tree ti = parse_in_arena("{at: [], backtick: []}");
ti["at"].append_child() << "@test";
ti["at"].append_child() << "t@est";
ti["at"].append_child() << "test@";
@@ -480,6 +500,71 @@ TEST(emit, at_is_quoted_only_in_the_beggining__issue_320)
}
TEST(emit, error_on_emit_yaml_to_short_buffer)
{
csubstr yaml = "this is: not empty\n";
const Tree tree = parse_in_arena(yaml);
{
char too_small[2];
ExpectError::do_check([&]{
emit_yaml(tree, too_small);
});
ExpectError::do_check([&]{
emit_yaml(tree, too_small, /*error_on_excess*/true);
});
substr required = emit_yaml(tree, too_small, /*error_on_excess*/false);
EXPECT_EQ(required.str, nullptr);
EXPECT_EQ(required.len, yaml.len);
}
{
substr nothing;
EXPECT_EQ(nothing.str, nullptr);
EXPECT_EQ(nothing.len, 0u);
ExpectError::do_check([&]{
emit_yaml(tree, nothing);
});
ExpectError::do_check([&]{
emit_yaml(tree, nothing, /*error_on_excess*/true);
});
substr required = emit_yaml(tree, nothing, /*error_on_excess*/false);
EXPECT_EQ(required.str, nullptr);
EXPECT_EQ(required.len, yaml.len);
}
}
TEST(emit, error_on_emit_json_to_short_buffer)
{
csubstr json = "{\"this is\": \"not empty\"}";
const Tree tree = parse_in_arena(json);
{
char too_small[2];
ExpectError::do_check([&]{
emit_json(tree, too_small);
});
ExpectError::do_check([&]{
emit_json(tree, too_small, /*error_on_excess*/true);
});
substr required = emit_json(tree, too_small, /*error_on_excess*/false);
EXPECT_EQ(required.str, nullptr);
EXPECT_EQ(required.len, json.len);
}
{
substr nothing;
EXPECT_EQ(nothing.str, nullptr);
EXPECT_EQ(nothing.len, 0u);
ExpectError::do_check([&]{
emit_json(tree, nothing);
});
ExpectError::do_check([&]{
emit_json(tree, nothing, /*error_on_excess*/true);
});
substr required = emit_json(tree, nothing, /*error_on_excess*/false);
EXPECT_EQ(required.str, nullptr);
EXPECT_EQ(required.len, json.len);
}
}
//-------------------------------------------
// this is needed to use the test case library
Case const* get_case(csubstr /*name*/)

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {

View File

@@ -1,9 +1,9 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
TEST(explicit_key, test_suite_5WE3)
{
csubstr yaml = R"(
@@ -24,6 +24,26 @@ TEST(explicit_key, test_suite_5WE3)
});
}
TEST(explicit_key, test_suite_652Z)
{
csubstr yaml = R"(
?foo: bar # not an explicit key in RUNK
?bar: 42 # not an explicit key in RMAP|RKEY
?baz:
?bat: 24 # not an explicit key in RMAP|RVAL
)";
test_check_emit_check(yaml, [](Tree const &t){
ConstNodeRef r = t.rootref();
ASSERT_TRUE(r.has_child("?foo"));
ASSERT_TRUE(r.has_child("?bar"));
ASSERT_TRUE(r.has_child("?baz"));
EXPECT_EQ(r["?foo"].val(), "bar");
EXPECT_EQ(r["?bar"].val(), "42");
ASSERT_TRUE(r["?baz"].has_child("?bat"));
ASSERT_EQ(r["?baz"]["?bat"], "24");
});
}
TEST(explicit_key, test_suite_DFF7_v1)
{
@@ -91,29 +111,6 @@ TEST(explicit_key, test_suite_FRK4)
}
TEST(explicit_key, test_suite_M2N8)
{
csubstr yaml = R"(
- ? : x
- ? :
- ? :
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_seq());
ASSERT_EQ(t.rootref().num_children(), 3u);
ASSERT_EQ(t[0].num_children(), 1u);
EXPECT_EQ(t[0][0].key(), csubstr{});
EXPECT_EQ(t[0][0].val(), "x");
ASSERT_EQ(t[1].num_children(), 1u);
EXPECT_EQ(t[1][0].key(), csubstr{});
EXPECT_EQ(t[1][0].val(), csubstr{});
ASSERT_EQ(t[2].num_children(), 1u);
EXPECT_EQ(t[2][0].key(), csubstr{});
EXPECT_EQ(t[2][0].val(), csubstr{});
});
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
@@ -121,7 +118,7 @@ TEST(explicit_key, test_suite_M2N8)
CASE_GROUP(EXPLICIT_KEY)
{
//
ADD_CASE_TO_GROUP("explicit key, last value missing",
R"(
? a
@@ -134,22 +131,68 @@ R"(
? a
? b
?
)",
N(STREAM, L{
N(DOC|MB, L{
N(KP|VP, "a", {}),
N(KP|VP, "b", {}),
N(KP|VP, "", {})
}),
N(DOC|MB, TL("!!set", L{
N(KP|VP, "a", {}),
N(KP|VP, "b", {}),
})),
N(DOC|MB, TL("!!set", L{
N(KP|VP, "a", {}),
N(KP|VP, "b", {}),
N(KP|VP, "", {})
})),
})
);
ADD_CASE_TO_GROUP("explicit key, all values missing",
R"(
?
?
?
)",
N(MB, L{
N(KP|VP, "", {}),
N(KP|VP, "", {}),
N(KP|VP, "", {}),
})
);
ADD_CASE_TO_GROUP("explicit key, last value missing, end doc",
R"(
? a
? b
?
...
--- !!set # test that we do not add any last item
? a
? b
...
--- !!set # test that we do add the last item
? a
? b
?
...
)",
N(STREAM, L{
N(DOCMAP, L{
N(KEYVAL, "a", {}),
N(KEYVAL, "b", {}),
N(KEYVAL, "", {})
N(DOC|MB, L{
N(KP|VP, "a", {}),
N(KP|VP, "b", {}),
N(KP|VP, "", {})
}),
N(DOCMAP, TL("!!set", L{
N(KEYVAL, "a", {}),
N(KEYVAL, "b", {}),
N(DOC|MB, TL("!!set", L{
N(KP|VP, "a", {}),
N(KP|VP, "b", {}),
})),
N(DOCMAP, TL("!!set", L{
N(KEYVAL, "a", {}),
N(KEYVAL, "b", {}),
N(KEYVAL, "", {})
N(DOC|MB, TL("!!set", L{
N(KP|VP, "a", {}),
N(KP|VP, "b", {}),
N(KP|VP, "", {})
})),
})
);
@@ -162,31 +205,16 @@ a!"#$%&'()*+,-./09:;<=>?@AZ[\]^_`az{|}~: safe
-foo: safe dash
this is#not: a comment
)",
L{
N("a!\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~", "safe"),
N("?foo", "safe question mark"),
N(":foo", "safe colon"),
N("-foo", "safe dash"),
N("this is#not", "a comment"),
});
N(MB, L{
N(KP|VP, "a!\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~", "safe"),
N(KP|VP, "?foo", "safe question mark"),
N(KP|VP, ":foo", "safe colon"),
N(KP|VP, "-foo", "safe dash"),
N(KP|VP, "this is#not", "a comment"),
})
);
ADD_CASE_TO_GROUP("explicit key, ambiguity 2EBW, expl",
R"({
a!"#$%&'()*+-./09:;<=>?@AZ[\]^_`az{|~: safe,
?foo: safe question mark,
:foo: safe colon,
-foo: safe dash,
this is#not: a comment,
})",
L{
N("a!\"#$%&'()*+-./09:;<=>?@AZ[\\]^_`az{|~", "safe"),
N("?foo", "safe question mark"),
N(":foo", "safe colon"),
N("-foo", "safe dash"),
N("this is#not", "a comment"),
});
ADD_CASE_TO_GROUP("explicit key, ambiguity 2EBW, impl seq",
ADD_CASE_TO_GROUP("explicit key, ambiguity 2EBW, block seq",
R"(
- a!"#$%&'()*+,-./09:;<=>?@AZ[\]^_`az{|}~
- ?foo
@@ -194,36 +222,21 @@ R"(
- -foo
- this is#not:a comment
)",
L{
N("a!\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~"),
N("?foo"),
N(":foo"),
N("-foo"),
N("this is#not:a comment"),
});
ADD_CASE_TO_GROUP("explicit key, ambiguity 2EBW, expl seq",
R"([
a!"#$%&'()*+-./09:;<=>?@AZ[\^_`az{|}~,
?foo,
:foo,
-foo,
this is#not:a comment,
])",
L{
N("a!\"#$%&'()*+-./09:;<=>?@AZ[\\^_`az{|}~"),
N("?foo"),
N(":foo"),
N("-foo"),
N("this is#not:a comment"),
});
N(SB, L{
N(VP, "a!\"#$%&'()*+,-./09:;<=>?@AZ[\\]^_`az{|}~"),
N(VP, "?foo"),
N(VP, ":foo"),
N(VP, "-foo"),
N(VP, "this is#not:a comment"),
})
);
ADD_CASE_TO_GROUP("explicit key with line break in between",
R"(
? an explicit key
: its value
)",
L{N("an explicit key", "its value")}
N(MB, L{N(KP|VP, "an explicit key", "its value")})
);
ADD_CASE_TO_GROUP("explicit key 2nd, inside explicit map",
@@ -233,10 +246,10 @@ R"(
? an explicit key: another value,
}
)",
L{
N("a simple key", "a value"),
N("an explicit key", "another value"),
}
N(MFS, L{
N(KP|VP, "a simple key", "a value"),
N(KP|VP, "an explicit key", "another value"),
})
);
ADD_CASE_TO_GROUP("explicit key 1st, inside explicit map",
@@ -246,63 +259,54 @@ R"(
a simple key: a value,
}
)",
L{
N("an explicit key", "another value"),
N("a simple key", "a value"),
}
N(MFS, L{
N(KP|VP, "an explicit key", "another value"),
N(KP|VP, "a simple key", "a value"),
})
);
ADD_CASE_TO_GROUP("explicit key 2nd",
ADD_CASE_TO_GROUP("M2N8", EXPECT_PARSE_ERROR,
R"(
- ? : x
- ? :
- ? :
)",
LineCol(2, 5)
);
ADD_CASE_TO_GROUP("explicit key 2nd", EXPECT_PARSE_ERROR,
R"(
a simple key: a value
? an explicit key: another value
)",
L{
N("a simple key", "a value"),
N("an explicit key", "another value"),
}
LineCol(3, 19)
);
ADD_CASE_TO_GROUP("explicit key 1st",
ADD_CASE_TO_GROUP("explicit key 1st", EXPECT_PARSE_ERROR,
R"(
? an explicit key: another value
a simple key: a value
)",
L{
N("an explicit key", "another value"),
N("a simple key", "a value"),
}
LineCol(2, 19)
);
ADD_CASE_TO_GROUP("explicit key nested in a map, 1st",
ADD_CASE_TO_GROUP("explicit key nested in a map, 1st", EXPECT_PARSE_ERROR,
R"(
map:
? an explicit key: another value
a simple key: a value
? an explicit key deindented: its value
)",
L{
N("map", L{
N("an explicit key", "another value"),
N("a simple key", "a value"),
}),
N("an explicit key deindented", "its value")
}
LineCol(3, 21)
);
ADD_CASE_TO_GROUP("explicit key nested in a seq, 1st",
ADD_CASE_TO_GROUP("explicit key nested in a seq, 1st", EXPECT_PARSE_ERROR,
R"(
- ? an explicit key: another value
a simple key: a value
- ? another explicit key: its value
)",
L{
N(L{
N("an explicit key", "another value"),
N("a simple key", "a value"),
}),
N(L{N("another explicit key", "its value")})
}
LineCol(2, 21)
);
ADD_CASE_TO_GROUP("explicit block key, literal, clip",
@@ -312,9 +316,9 @@ R"(? |
: and this is its value
)",
L{
N(QK, "This is a key\nthat has multiple lines\n", "and this is its value")
}
N(MB, L{
N(KL|VP, "This is a key\nthat has multiple lines\n", "and this is its value")
})
);
ADD_CASE_TO_GROUP("explicit block key, literal, keep",
@@ -324,9 +328,9 @@ R"(? |+
: and this is its value
)",
L{
N(QK, "This is a key\nthat has multiple lines\n\n", "and this is its value")
}
N(MB, L{
N(KL|VP, "This is a key\nthat has multiple lines\n\n", "and this is its value")
})
);
ADD_CASE_TO_GROUP("explicit block key, literal, strip",
@@ -336,9 +340,9 @@ R"(? |-
: and this is its value
)",
L{
N(QK, "This is a key\nthat has multiple lines", "and this is its value")
}
N(MB, L{
N(KL|VP, "This is a key\nthat has multiple lines", "and this is its value")
})
);
ADD_CASE_TO_GROUP("explicit block key, folded, clip",
@@ -348,9 +352,9 @@ R"(? >
: and this is its value
)",
L{
N(QK, "This is a key that has multiple lines\n", "and this is its value")
}
N(MB, L{
N(KF|VP, "This is a key that has multiple lines\n", "and this is its value")
})
);
ADD_CASE_TO_GROUP("explicit block key, folded, keep",
@@ -360,9 +364,9 @@ R"(? >+
: and this is its value
)",
L{
N(QK, "This is a key that has multiple lines\n\n", "and this is its value")
}
N(MB, L{
N(KF|VP, "This is a key that has multiple lines\n\n", "and this is its value")
})
);
ADD_CASE_TO_GROUP("explicit block key, folded, strip",
@@ -372,9 +376,9 @@ R"(? >-
: and this is its value
)",
L{
N(QK, "This is a key that has multiple lines", "and this is its value")
}
N(MB, L{
N(KF|VP, "This is a key that has multiple lines", "and this is its value")
})
);
ADD_CASE_TO_GROUP("explicit key, missing val 7W2P",
@@ -385,12 +389,12 @@ c:
? d
e:
)",
N(MAP, L{
N(KEYVAL, "a", {}),
N(KEYVAL, "b", {}),
N(KEYVAL, "c", {}),
N(KEYVAL, "d", {}),
N(KEYVAL, "e", {}),
N(MB, L{
N(KP|VP, "a", {}),
N(KP|VP, "b", {}),
N(KP|VP, "c", {}),
N(KP|VP, "d", {}),
N(KP|VP, "e", {}),
})
);
@@ -403,13 +407,13 @@ a: 1
!!str e: 4
? f
)",
N(MAP, L{
N("a", "1"),
N(KEYVAL, "b", {}),
N("c", AR(KEYANCH, "anchor"), "3"),
N(KEYVAL, "d", {}),
N(TS("!!str", "e"), "4"),
N(KEYVAL, "f", {}),
N(MB, L{
N(KP|VP, "a", "1"),
N(KP|VP, "b", {}),
N(KP|VP, "c", AR(KEYANCH, "anchor"), "3"),
N(KP|VP, "d", {}),
N(KP|VP, TS("!!str", "e"), "4"),
N(KP|VP, "f", {}),
})
);

1563
test/test_filter.cpp Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,89 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(GENERIC_MAP)
{
ADD_CASE_TO_GROUP("generic map",
R"(
a simple key: a value # The KEY token is produced here.
? a complex key
: another value
a mapping:
key 1: value 1
key 2: value 2
a sequence:
- item 1
- item 2
)",
L{
N("a simple key", "a value"),
N("a complex key", "another value"),
N("a mapping", L{N("key 1", "value 1"), N("key 2", "value 2")}),
N("a sequence", L{N("item 1"), N("item 2")}),
}
);
ADD_CASE_TO_GROUP("seq nested in map",
R"(
items:
- part_no: A4786
descrip: Water Bucket (Filled)
price: 1.47
quantity: 4
- part_no: E1628
descrip: High Heeled "Ruby" Slippers
size: 8
price: 133.7
quantity: 1
)",
L{
N{"items", L{
N{L{N{"part_no", "A4786"},
N{"descrip", "Water Bucket (Filled)"},
N{"price", "1.47"},
N{"quantity", "4"},}},
N{L{N{"part_no", "E1628"},
N{"descrip", "High Heeled \"Ruby\" Slippers"},
N{"size", "8"},
N{"price", "133.7"},
N{"quantity", "1"},}}}},
}
);
ADD_CASE_TO_GROUP("seq nested in map, v2",
R"(
items:
-
part_no: A4786
descrip: Water Bucket (Filled)
price: 1.47
quantity: 4
-
part_no: E1628
descrip: High Heeled "Ruby" Slippers
size: 8
price: 133.7
quantity: 1
)",
L{
N{"items", L{
N{L{N{"part_no", "A4786"},
N{"descrip", "Water Bucket (Filled)"},
N{"price", "1.47"},
N{"quantity", "4"},}},
N{L{N{"part_no", "E1628"},
N{"descrip", "High Heeled \"Ruby\" Slippers"},
N{"size", "8"},
N{"price", "133.7"},
N{"quantity", "1"},}}}},
}
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,47 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(GENERIC_SEQ)
{
ADD_CASE_TO_GROUP("generic seq v0",
R"(
- item 1
- item 2
- - item 3.1
- item 3.2
- key 1: value 1
key 2: value 2
)",
L{
N("item 1"),
N("item 2"),
N(L{N("item 3.1"), N("item 3.2")}),
N(L{N("key 1", "value 1"), N("key 2", "value 2")})
}
);
ADD_CASE_TO_GROUP("generic seq v1",
R"(
- item 1
- item 2
-
- item 3.1
- item 3.2
-
key 1: value 1
key 2: value 2
)",
L{
N("item 1"),
N("item 2"),
N(L{N("item 3.1"), N("item 3.2")}),
N(L{N("key 1", "value 1"), N("key 2", "value 2")})
}
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
@@ -12,7 +13,7 @@ TEST(github, 268)
node: *bar
)");
tree.resolve();
auto root = tree.rootref();
ConstNodeRef root = tree.crootref();
ASSERT_TRUE(root["map"].is_map());
ASSERT_TRUE(root["map"].has_child("node"));
ASSERT_EQ(root["map"]["node"], "bar");
@@ -31,8 +32,15 @@ TEST(github, 277)
)");
const char *keys[] = {"V", "W", "X"};
const char *vals[] = {"5", "4", "6"};
#ifdef RYML_DBG
print_tree("parsed", tree);
#endif
test_invariants(tree);
tree.resolve();
auto root = tree.rootref();
#ifdef RYML_DBG
print_tree("resolved", tree);
#endif
ConstNodeRef root = tree.crootref();
ASSERT_TRUE(root["B"].is_map());
size_t num_childs = root["B"].num_children();
size_t child = 0;
@@ -175,13 +183,13 @@ TEST(github, 31)
plist |= SEQ;
{
auto lumi = plist.append_child();
NodeRef lumi = plist.append_child();
lumi << "Lumi";
EXPECT_TRUE(lumi.is_val());
}
{
auto lumi = plist.append_child();
NodeRef lumi = plist.append_child();
lumi |= MAP;
lumi["value"] << 1;
lumi["relErr"] << 0.1;
@@ -190,7 +198,7 @@ TEST(github, 31)
{
ExpectError::check_assertion(&tree, [&](){
auto lumi = plist.append_child();
NodeRef lumi = plist.append_child();
lumi << "Lumi";
lumi |= MAP;
});
@@ -198,7 +206,7 @@ TEST(github, 31)
{
ExpectError::check_assertion(&tree, [&](){
auto lumi = plist.append_child();
NodeRef lumi = plist.append_child();
lumi << "Lumi";
lumi |= SEQ;
});
@@ -206,7 +214,7 @@ TEST(github, 31)
{
ExpectError::check_assertion(&tree, [&](){
auto lumi = plist.append_child();
NodeRef lumi = plist.append_child();
lumi |= MAP;
lumi << "Lumi";
});
@@ -224,7 +232,7 @@ CASE_GROUP(GITHUB_ISSUES)
ADD_CASE_TO_GROUP("github3-problem1",
R"(
translation: [-2, -2, 5])",
L{N("translation", L{N("-2"), N("-2"), N("5")})}
N(MB, L{N(KP|SFS, "translation", L{N(VP, "-2"), N(VP, "-2"), N(VP, "5")})})
);
// these must work without quotes
@@ -232,15 +240,16 @@ ADD_CASE_TO_GROUP("github3-problem2-ex1",
R"(
audio resource:
)",
L{N(KEYVAL, "audio resource", /*"~"*/{})}
N(MB, L{N(KP|VP, "audio resource", /*"~"*/{})})
);
ADD_CASE_TO_GROUP("github3-problem2-ex2",
R"(
audio resource:
more:
example: y
)",
L{N(KEYVAL, "audio resource", /*"~"*/{}), N("more", L{N("example", "y")})}
N(MB, L{N(KP|VP, "audio resource", /*"~"*/{}), N(KP|MB, "more", L{N(KP|VP, "example", "y")})})
);
ADD_CASE_TO_GROUP("github3-problem3",
@@ -250,12 +259,13 @@ R"(component:
data:
{} # but this was not working
)",
L{N("component", L{
N("type", "perspective camera component"),
N(KEYMAP, "some_data", L{}),
N(KEYMAP, "data", L{})
N(MB, L{
N(KP|MB, "component", L{
N(KP|VP, "type", "perspective camera component"),
N(KP|MFS, "some_data", L{}),
N(KP|MFS, "data", L{})
}
)}
)})
);
/* THIS IS CAUSING VS TO CRASH OUT OF HEAP SPACE
@@ -378,11 +388,11 @@ R"(
- 2.mp4
- 3.mp4
)",
L{
N(L{N("UQxRibHKEDI", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
N(L{N("DcYsg8VFdC0", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
N(L{N("Yt3ymqZXzLY", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
}
N(SB, L{
N(MB, L{N(KP|SB, "UQxRibHKEDI", L{N(VP,"0.mp4"), N(VP,"1.mp4"), N(VP,"2.mp4"), N(VP,"3.mp4")})}),
N(MB, L{N(KP|SB, "DcYsg8VFdC0", L{N(VP,"0.mp4"), N(VP,"1.mp4"), N(VP,"2.mp4"), N(VP,"3.mp4")})}),
N(MB, L{N(KP|SB, "Yt3ymqZXzLY", L{N(VP,"0.mp4"), N(VP,"1.mp4"), N(VP,"2.mp4"), N(VP,"3.mp4")})}),
})
);
ADD_CASE_TO_GROUP("github6",
@@ -403,11 +413,13 @@ R"(videos:
- 2.mp4
- 3.mp4
)",
L{N("videos", L{
N(L{N("UQxRibHKEDI", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
N(L{N("DcYsg8VFdC0", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
N(L{N("Yt3ymqZXzLY", L{N("0.mp4"), N("1.mp4"), N("2.mp4"), N("3.mp4")})}),
})}
N(MB, L{
N(KP|SB, "videos", L{
N(MB, L{N(KP|SB, "UQxRibHKEDI", L{N(VP,"0.mp4"), N(VP,"1.mp4"), N(VP,"2.mp4"), N(VP,"3.mp4")})}),
N(MB, L{N(KP|SB, "DcYsg8VFdC0", L{N(VP,"0.mp4"), N(VP,"1.mp4"), N(VP,"2.mp4"), N(VP,"3.mp4")})}),
N(MB, L{N(KP|SB, "Yt3ymqZXzLY", L{N(VP,"0.mp4"), N(VP,"1.mp4"), N(VP,"2.mp4"), N(VP,"3.mp4")})}),
})
})
);
ADD_CASE_TO_GROUP("github34/ex1",
@@ -425,18 +437,18 @@ MessageID8: "MapRegion_HyrulePrairie"
MessageID9: 'MapRegion_HyrulePrairie'
MessageID0: "MapRegion_HyrulePrairie"
)",
L{
N(QV, "MessageID1", "MapRegion_HyrulePrairie"),
N(QV, "MessageID2", "MapRegion_HyrulePrairie"),
N(QV, "MessageID3", "MapRegion_HyrulePrairie"),
N(QV, "MessageID4", "MapRegion_HyrulePrairie"),
N(QV, "MessageID5", "MapRegion_HyrulePrairie"),
N(QV, "MessageID6", "MapRegion_HyrulePrairie"),
N(QV, "MessageID7", "MapRegion_HyrulePrairie"),
N(QV, "MessageID8", "MapRegion_HyrulePrairie"),
N(QV, "MessageID9", "MapRegion_HyrulePrairie"),
N(QV, "MessageID0", "MapRegion_HyrulePrairie"),
}
N(MB, L{
N(KP|VS, "MessageID1", "MapRegion_HyrulePrairie"),
N(KP|VD, "MessageID2", "MapRegion_HyrulePrairie"),
N(KP|VS, "MessageID3", "MapRegion_HyrulePrairie"),
N(KP|VD, "MessageID4", "MapRegion_HyrulePrairie"),
N(KP|VS, "MessageID5", "MapRegion_HyrulePrairie"),
N(KP|VD, "MessageID6", "MapRegion_HyrulePrairie"),
N(KP|VS, "MessageID7", "MapRegion_HyrulePrairie"),
N(KP|VD, "MessageID8", "MapRegion_HyrulePrairie"),
N(KP|VS, "MessageID9", "MapRegion_HyrulePrairie"),
N(KP|VD, "MessageID0", "MapRegion_HyrulePrairie"),
})
);
ADD_CASE_TO_GROUP("github34/ex2",
@@ -454,18 +466,18 @@ R"(
- MessageID9: 'MapRegion_HyrulePrairie'
- MessageID0: "MapRegion_HyrulePrairie"
)",
L{
N(L{N(QV, "MessageID1", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID2", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID3", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID4", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID5", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID6", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID7", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID8", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID9", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID0", "MapRegion_HyrulePrairie")}),
}
N(SB, L{
N(MB, L{N(KP|VS, "MessageID1", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VD, "MessageID2", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VS, "MessageID3", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VD, "MessageID4", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VS, "MessageID5", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VD, "MessageID6", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VS, "MessageID7", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VD, "MessageID8", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VS, "MessageID9", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VD, "MessageID0", "MapRegion_HyrulePrairie")}),
})
);
ADD_CASE_TO_GROUP("github34",
@@ -498,22 +510,22 @@ R"(
- key2: true2
MessageID2: "MapRegion_HyrulePrairie2 "
)",
L{
N(L{N(QV, "MessageID1", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID2", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID3", "MapRegion_HyrulePrairie ")}),
N(L{N(QV, "MessageID4", "MapRegion_HyrulePrairie ")}),
N(L{N(QV, "MessageID5", "MapRegion_HyrulePrairie ")}),
N(L{N(QV, "MessageID6", "MapRegion_HyrulePrairie ")}),
N(L{N(QV, "MessageID7", "MapRegion_HyrulePrairie ")}),
N(L{N(QV, "MessageID8", "MapRegion_HyrulePrairie ")}),
N(L{N(QV, "MessageID9", "MapRegion_HyrulePrairie ")}),
N(L{N(QV, "MessageID10", "MapRegion_HyrulePrairie ")}),
N(L{N(QV, "MessageID11", "MapRegion_HyrulePrairie")}),
N(L{N(QV, "MessageID12", "MapRegion_HyrulePrairie")}),
N(L{N("key1", "true1"), N(QV, "MessageID1", "MapRegion_HyrulePrairie1 ")}),
N(L{N("key2", "true2"), N(QV, "MessageID2", "MapRegion_HyrulePrairie2 ")}),
}
N(SB, L{
N(MB, L{N(KP|VS, "MessageID1", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VD, "MessageID2", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VS, "MessageID3", "MapRegion_HyrulePrairie ")}),
N(MB, L{N(KP|VD, "MessageID4", "MapRegion_HyrulePrairie ")}),
N(MB, L{N(KP|VS, "MessageID5", "MapRegion_HyrulePrairie ")}),
N(MB, L{N(KP|VS, "MessageID6", "MapRegion_HyrulePrairie ")}),
N(MB, L{N(KP|VD, "MessageID7", "MapRegion_HyrulePrairie ")}),
N(MB, L{N(KP|VD, "MessageID8", "MapRegion_HyrulePrairie ")}),
N(MB, L{N(KP|VS, "MessageID9", "MapRegion_HyrulePrairie ")}),
N(MB, L{N(KP|VD, "MessageID10", "MapRegion_HyrulePrairie ")}),
N(MB, L{N(KP|VS, "MessageID11", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VD, "MessageID12", "MapRegion_HyrulePrairie")}),
N(MB, L{N(KP|VP, "key1", "true1"), N(KP|VS, "MessageID1", "MapRegion_HyrulePrairie1 ")}),
N(MB, L{N(KP|VP, "key2", "true2"), N(KP|VD, "MessageID2", "MapRegion_HyrulePrairie2 ")}),
})
);
ADD_CASE_TO_GROUP("github35/expected_error11", EXPECT_PARSE_ERROR,
@@ -553,7 +565,9 @@ R"(
);
ADD_CASE_TO_GROUP("github128/1", RESOLVE_REFS | EXPECT_PARSE_ERROR, "a: *invalid");
ADD_CASE_TO_GROUP("github128/2", RESOLVE_REFS/* | HAS_PARSE_ERROR*/, "*", N(DOCVAL, "*"));
ADD_CASE_TO_GROUP("github128/2", RESOLVE_REFS | EXPECT_PARSE_ERROR, "*");
ADD_CASE_TO_GROUP("github128/3", RESOLVE_REFS | EXPECT_PARSE_ERROR, "*abc");
ADD_CASE_TO_GROUP("github128/4", "*abc", N(VAL, "*abc", AR(VALREF, "*abc")));
ADD_CASE_TO_GROUP("github129", RESOLVE_REFS, R"(
ref: &ref ref_val
@@ -572,19 +586,21 @@ h: |- # don't resolve, it's just a string
*ref
i: |+ # don't resolve, it's just a string
*ref
)", L{
N("ref", "ref_val"),
N("a", "ref_val"), // this should be resolved
N(QV, "b", "*ref"), // this should not be resolved (just a string)
N(QV, "c", "*ref"), // this should not be resolved (just a string)
N(QV, "d", "*ref\n"), // this should not be resolved (just a string)
N(QV, "e", "*ref"), // this should not be resolved (just a string)
N(QV, "f", "*ref\n"), // this should not be resolved (just a string)
N(QV, "g", "*ref\n"), // this should not be resolved (just a string)
N(QV, "h", "*ref"), // this should not be resolved (just a string)
N(QV, "i", "*ref\n"), // this should not be resolved (just a string)
}
)",
N(MB, L{
N(KP|VP, "ref", "ref_val"),
N(KP|VP, "a", "ref_val"), // this should be resolved
N(KP|VS, "b", "*ref"), // this should not be resolved (just a string)
N(KP|VD, "c", "*ref"), // this should not be resolved (just a string)
N(KP|VF, "d", "*ref\n"), // this should not be resolved (just a string)
N(KP|VF, "e", "*ref"), // this should not be resolved (just a string)
N(KP|VF, "f", "*ref\n"), // this should not be resolved (just a string)
N(KP|VL, "g", "*ref\n"), // this should not be resolved (just a string)
N(KP|VL, "h", "*ref"), // this should not be resolved (just a string)
N(KP|VL, "i", "*ref\n"), // this should not be resolved (just a string)
})
);
}
} // namespace yml

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
@@ -6,14 +7,75 @@ namespace yml {
CASE_GROUP(INDENTATION)
{
ADD_CASE_TO_GROUP("indented doc", R"(
# this is an indented doc
---
- foo
- bar
- baz
ADD_CASE_TO_GROUP("plain scalar indentation, 0", R"(
plain
scalar
follows
)",
N(STREAM, L{N(DOCSEQ, L{N("foo"), N("bar"), N("baz")})})
N(VP, "plain scalar follows")
);
ADD_CASE_TO_GROUP("plain scalar indentation, 0.1", R"(
plain
scalar
follows
)",
N(VP, "plain scalar follows")
);
ADD_CASE_TO_GROUP("plain scalar indentation, 1",
R"(a: plain
scalar
follows
)",
N(MB, L{N(KP|VP, "a", "plain scalar follows")})
);
ADD_CASE_TO_GROUP("plain scalar indentation, 1, err", EXPECT_PARSE_ERROR,
R"(a: plain
scalar
follows
)",
LineCol(2, 7)
);
ADD_CASE_TO_GROUP("plain scalar indentation, 2",
R"(- plain
scalar
follows
)",
N(SB, L{N(VP, "plain scalar follows")})
);
ADD_CASE_TO_GROUP("plain scalar indentation, 2, err", EXPECT_PARSE_ERROR,
R"(- plain
scalar
follows
)",
LineCol(2, 1)
);
ADD_CASE_TO_GROUP("plain scalar indentation, 3",
R"(a: [plain
scalar
follows
]
)",
N(MB, L{N(KP|SFS, "a", L{N(VP, "plain scalar follows")})})
);
// FIXME: this should be a parse error
ADD_CASE_TO_GROUP("plain scalar indentation, 3.1",
R"(a: [plain
scalar
follows
]
b: c
)",
N(MB, L{
N(KP|SFS, "a", L{N(VP, "plain scalar follows")}),
N(KP|VP, "b", "c"),
})
);
ADD_CASE_TO_GROUP("4 chars",
@@ -31,14 +93,15 @@ another_key:
- val4
- val5
)",
L{
N("key", "value"),
N("another_key", L{
N("sub_key0", L{N("val0"), N("val1")}),
N("sub_key1", L{N("val2"), N("val3")}),
N("sub_key2", L{N("val4"), N("val5")}),
N(MB, L{
N(KP|VP, "key", "value"),
N(KP|MB, "another_key", L{
N(KP|SB, "sub_key0", L{N(VP, "val0"), N(VP, "val1")}),
N(KP|SB, "sub_key1", L{N(VP, "val2"), N(VP, "val3")}),
N(KP|SB, "sub_key2", L{N(VP, "val4"), N(VP, "val5")}),
})
});
})
);
ADD_CASE_TO_GROUP("2 chars + 4 chars, ex0",
R"(
@@ -55,14 +118,15 @@ another_key:
- val4
- val5
)",
L{
N("key", "value"),
N("another_key", L{
N("sub_key0", L{N("val0"), N("val1")}),
N("sub_key1", L{N("val2"), N("val3")}),
N("sub_key2", L{N("val4"), N("val5")}),
N(MB, L{
N(KP|VP, "key", "value"),
N(KP|MB, "another_key", L{
N(KP|SB, "sub_key0", L{N(VP, "val0"), N(VP, "val1")}),
N(KP|SB, "sub_key1", L{N(VP, "val2"), N(VP, "val3")}),
N(KP|SB, "sub_key2", L{N(VP, "val4"), N(VP, "val5")}),
})
});
})
);
ADD_CASE_TO_GROUP("2 chars + 4 chars, ex1",
R"(
@@ -79,14 +143,15 @@ another_key:
- val4
- val5
)",
L{
N("key", "value"),
N("another_key", L{
N("sub_key0", L{N("val0"), N("val1")}),
N("sub_key1", L{N("val2"), N("val3")}),
N("sub_key2", L{N("val4"), N("val5")}),
N(MB, L{
N(KP|VP, "key", "value"),
N(KP|MB, "another_key", L{
N(KP|SB, "sub_key0", L{N(VP, "val0"), N(VP, "val1")}),
N(KP|SB, "sub_key1", L{N(VP, "val2"), N(VP, "val3")}),
N(KP|SB, "sub_key2", L{N(VP, "val4"), N(VP, "val5")}),
})
});
})
);
ADD_CASE_TO_GROUP("2 chars + 4 chars, ex2",
R"(
@@ -103,18 +168,18 @@ another_key:
- val4
- val5
)",
L{
N("key", "value"),
N("another_key", L{
N("sub_key0", L{N("val0"), N("val1")}),
N("sub_key1", L{N("val2"), N("val3")}),
N("sub_key2", L{N("val4"), N("val5")}),
N(MB, L{
N(KP|VP, "key", "value"),
N(KP|MB, "another_key", L{
N(KP|SB, "sub_key0", L{N(VP, "val0"), N(VP, "val1")}),
N(KP|SB, "sub_key1", L{N(VP, "val2"), N(VP, "val3")}),
N(KP|SB, "sub_key2", L{N(VP, "val4"), N(VP, "val5")}),
})
});
})
);
ADD_CASE_TO_GROUP("non-indented blank lines",
R"(
matrix:
R"(matrix:
include: # next line is blank
@@ -153,18 +218,19 @@ matrix:
- env63
- env64 # next line has five spaces
)",
L{N("matrix", L{
N("include", L{
N("env01"), N("env02"), N("env03"), N("env04"),
N("env11"), N("env12"), N("env13"), N("env14"),
N("env21"), N("env22"), N("env23"), N("env24"),
N("env31"), N("env32"), N("env33"), N("env34"),
N("env41"), N("env42"), N("env43"), N("env44"),
N("env51"), N("env52"), N("env53"), N("env54"),
N("env61"), N("env62"), N("env63"), N("env64"),
}
)})
});
N(MB, L{N(KP|MB, "matrix", L{
N(KP|SB, "include", L{
N(VP, "env01"), N(VP, "env02"), N(VP, "env03"), N(VP, "env04"),
N(VP, "env11"), N(VP, "env12"), N(VP, "env13"), N(VP, "env14"),
N(VP, "env21"), N(VP, "env22"), N(VP, "env23"), N(VP, "env24"),
N(VP, "env31"), N(VP, "env32"), N(VP, "env33"), N(VP, "env34"),
N(VP, "env41"), N(VP, "env42"), N(VP, "env43"), N(VP, "env44"),
N(VP, "env51"), N(VP, "env52"), N(VP, "env53"), N(VP, "env54"),
N(VP, "env61"), N(VP, "env62"), N(VP, "env63"), N(VP, "env64"),
})
})
})
);
ADD_CASE_TO_GROUP("unnecessary indentation",
R"(
@@ -194,17 +260,18 @@ more_skip:
- a
- b
)",
L{
N("skip_commits", L{
N("files", L{N("a"), N("b"), N("c"), N("d"), N("e"), N("f"),}),
N("more_files", L{N("a"), N("b"),}),
N("even_more_files", L{N("a"), N("b"),}),
N(MB, L{
N(KP|MB, "skip_commits", L{
N(KP|SB, "files", L{N(VP, "a"), N(VP, "b"), N(VP, "c"), N(VP, "d"), N(VP, "e"), N(VP, "f"),}),
N(KP|SB, "more_files", L{N(VP, "a"), N(VP, "b"),}),
N(KP|SB, "even_more_files", L{N(VP, "a"), N(VP, "b"),}),
}),
N("more_skip", L{
N("files", L{N("a"), N("b"), N("c"), N("d"), N("e"), N("f"),}),
N("more_files", L{N("a"), N("b"),}),
N(KP|MB, "more_skip", L{
N(KP|SB, "files", L{N(VP, "a"), N(VP, "b"), N(VP, "c"), N(VP, "d"), N(VP, "e"), N(VP, "f"),}),
N(KP|SB, "more_files", L{N(VP, "a"), N(VP, "b"),}),
})
});
})
);
ADD_CASE_TO_GROUP("blank lines indented, 1 - at same scope",
@@ -219,11 +286,12 @@ skip_commits:
- d
)",
L{
N("skip_commits", L{
N("files", L{N("a"), N("b"), N("c"), N("d"),}),
N(MB, L{
N(KP|MB, "skip_commits", L{
N(KP|SB, "files", L{N(VP, "a"), N(VP, "b"), N(VP, "c"), N(VP, "d"),}),
}),
});
})
);
ADD_CASE_TO_GROUP("indentation at start",
R"(
@@ -234,11 +302,12 @@ R"(
- c
- d
)",
L{
N("foo", L{N("a"), N("b"),}),
N("bar", L{N("c"), N("d"),}),
});
N(MB, L{
N(KP|SB, "foo", L{N(VP, "a"), N(VP, "b"),}),
N(KP|SB, "bar", L{N(VP, "c"), N(VP, "d"),}),
})
);
ADD_CASE_TO_GROUP("unaligned comments",
R"(
stand2sit:
@@ -311,12 +380,12 @@ R"(
- f
- g
)",
L{
N("stand2sit", L{
N("map", "mirror"),
N("dat", L{N("a"), N("b"), N("b1"), N("b2"), N("b3"), N("b4"), N("b5"), N("b6"), N("b61"), N("b62"), N("b63"), N("b64"), N("b65"), N("b66"), N("b7"), N("b8"), N("b9"), N("b10"), N("e"), N("f"), N("g")}),
N(MB, L{
N(KP|MB, "stand2sit", L{
N(KP|VP, "map", "mirror"),
N(KP|SB, "dat", L{N(VP, "a"), N(VP, "b"), N(VP, "b1"), N(VP, "b2"), N(VP, "b3"), N(VP, "b4"), N(VP, "b5"), N(VP, "b6"), N(VP, "b61"), N(VP, "b62"), N(VP, "b63"), N(VP, "b64"), N(VP, "b65"), N(VP, "b66"), N(VP, "b7"), N(VP, "b8"), N(VP, "b9"), N(VP, "b10"), N(VP, "e"), N(VP, "f"), N(VP, "g")}),
}),
});
}));
ADD_CASE_TO_GROUP("issue83",
R"(
@@ -328,12 +397,14 @@ a:
c: d
)",
L{
N("e", L{N("f")}),
N("g", "h"),
N("a", L{N("b")}),
N("c", "d"),
});
N(MB, L{
N(KP|SB, "e", L{N(VP, "f")}),
N(KP|VP, "g", "h"),
N(KP|SB, "a", L{N(VP, "b")}),
N(KP|VP, "c", "d"),
})
);
}
} // namespace yml

View File

@@ -7,7 +7,7 @@
#include <c4/yml/detail/print.hpp>
#endif
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include <gtest/gtest.h>
@@ -165,7 +165,7 @@ TEST(general, json_stream_operator)
ss << as_json(t);
str = ss.str();
}
Tree res = c4::yml::parse_in_place(to_substr(str));
Tree res = c4::yml::parse_json_in_place(to_substr(str));
EXPECT_EQ(res["foo"].val(), "1");
EXPECT_EQ(res["bar"].val(), "2");
EXPECT_EQ(res["foobar_barfoo:barfoo_foobar"].val(), "1001");
@@ -201,28 +201,22 @@ TEST(emit_json, issue72)
TEST(emit_json, issue121)
{
Tree t = parse_in_arena(R"(
string_value: "string"
number_value: "9001"
broken_value: "0.30.2"
)");
EXPECT_TRUE(t["string_value"].get()->m_type.type & VALQUO);
EXPECT_TRUE(t["number_value"].get()->m_type.type & VALQUO);
EXPECT_TRUE(t["broken_value"].get()->m_type.type & VALQUO);
csubstr json = R"({"string_value": "string","number_value": "9001","broken_value": "0.30.2"})";
const Tree t = parse_json_in_arena(json);
EXPECT_TRUE(t["string_value"].get()->m_type.type & VAL_DQUO);
EXPECT_TRUE(t["number_value"].get()->m_type.type & VAL_DQUO);
EXPECT_TRUE(t["broken_value"].get()->m_type.type & VAL_DQUO);
std::string out;
emitrs_json(t, &out);
EXPECT_EQ(out, R"({"string_value": "string","number_value": "9001","broken_value": "0.30.2"})");
EXPECT_EQ(out, json);
out.clear();
emitrs_yaml(t, &out);
EXPECT_EQ(out, R"(string_value: 'string'
number_value: '9001'
broken_value: '0.30.2'
)");
EXPECT_EQ(out, json);
}
TEST(emit_json, issue291)
{
Tree t = parse_in_arena("{}");
Tree t = parse_json_in_arena("{}");
t["james"] = "045";
auto s = emitrs_json<std::string>(t);
EXPECT_EQ(s, "{\"james\": \"045\"}");
@@ -245,7 +239,7 @@ TEST(emit_json, issue292)
EXPECT_FALSE(csubstr("1.2.3").is_number());
EXPECT_FALSE(csubstr("1.2.3").is_integer());
EXPECT_FALSE(csubstr("1.2.3").is_real());
Tree t = parse_in_arena("{}");
Tree t = parse_json_in_arena("{}");
t["james"] = "0.0.0";
EXPECT_EQ(emitrs_json<std::string>(t), "{\"james\": \"0.0.0\"}");
t["james"] = "0.1.0";
@@ -272,7 +266,7 @@ comment: |
TEST(emit_json, issue297_escaped_chars)
{
Tree t = parse_in_arena("{}");
Tree t = parse_json_in_arena("{}");
t["quote"] = "abc\"def";
t["newline"] = "abc\ndef";
t["tab"] = "abc\tdef";
@@ -309,7 +303,7 @@ TEST(emit_json, issue313_quoted_numbers__1)
EXPECT_TRUE(csubstr("0.13215841352939606").is_number()); // [REALLY_WEIRD5][9][0]
EXPECT_TRUE(csubstr("0.13215841352939606").is_real()); // [REALLY_WEIRD5][9][0]
EXPECT_FALSE(csubstr("0.13215841352939606").is_integer()); // [REALLY_WEIRD5][9][0]
const Tree t0 = parse_in_arena(R"([
const Tree t0 = parse_json_in_arena(R"([
0.99356698989868164,
0.0064908224157989025,
0.0064917667768895626,
@@ -330,13 +324,13 @@ TEST(emit_json, issue313_quoted_numbers__1)
TEST(emit_json, issue313_quoted_numbers__2)
{
Tree ti = parse_in_arena(R"({
WEIRD0: [0.99356698989868164, 1.0605627298355103],
OK1: [0, 0, 0],
WEIRD2: [0.0064908224157989025, 0.0064917667768895626, 0.0064947893843054771],
OK3: [6.6227097511291504, 6.8674740791320801, 7.0403199195861816, 7.5792555809020996, 7.9916787147521973, 8.136042594909668, 8.5505847930908203, 8.701807975769043, 8.926518440246582, 8.9484291076660156, 9.0740194320678711, 9.3788108825683594, 9.406926155090332],
WEIRD4: [0.91054189205169678, 0.98725020885467529, 1.070807933807373],
REALLY_WEIRD5: [
Tree ti = parse_json_in_arena(R"({
"WEIRD0": [0.99356698989868164, 1.0605627298355103],
"OK1": [0, 0, 0],
"WEIRD2": [0.0064908224157989025, 0.0064917667768895626, 0.0064947893843054771],
"OK3": [6.6227097511291504, 6.8674740791320801, 7.0403199195861816, 7.5792555809020996, 7.9916787147521973, 8.136042594909668, 8.5505847930908203, 8.701807975769043, 8.926518440246582, 8.9484291076660156, 9.0740194320678711, 9.3788108825683594, 9.406926155090332],
"WEIRD4": [0.91054189205169678, 0.98725020885467529, 1.070807933807373],
"REALLY_WEIRD5": [
[1.5158847570419312, 1.6361792087554932], # 0
[1.0741721391677856, 1.1791903972625732], # 1
[1.4423576593399048, 1.7063977718353271], # 2
@@ -390,8 +384,8 @@ REALLY_WEIRD5: [
{ \
SCOPED_TRACE(__LINE__); \
csubstr file = __FILE__ ":" C4_XQUOTE(__LINE__); \
const Tree actual = parse_in_arena(file, actual_src); \
const Tree expected = parse_in_arena(file, expected_src); \
const Tree actual = parse_json_in_arena(file, actual_src); \
const Tree expected = parse_json_in_arena(file, expected_src); \
print_tree(actual); \
test_compare(actual, expected); \
}
@@ -402,26 +396,12 @@ TEST(json, compact_map)
_test("", "");
_test("{}", "{}");
_test(R"("a":"b")", R"("a": "b")");
_test(R"('a':'b')", R"('a': 'b')");
_test(R"({'a':'b'})", R"({'a': 'b'})");
_test(R"({"a":"b"})", R"({"a": "b"})");
_test(R"("a":{"a":"b"})", R"("a": {"a": "b"})");
_test(R"('a':{'a':'b'})", R"('a': {'a': 'b'})");
_test(R"({"a":{"a":"b"}})", R"({"a": {"a": "b"}})");
_test(R"({'a':{'a':'b'}})", R"({'a': {'a': 'b'}})");
}
TEST(json, compact_seq)
{
_test(R"("a",["a","b"])", R"("a", ["a", "b"])");
_test(R"('a',['a','b'])", R"('a', ['a', 'b'])");
_test(R"(["a",["a","b"]])", R"(["a", ["a", "b"]])");
_test(R"(['a',['a','b']])", R"(['a', ['a', 'b']])");
}
TEST(json, github142)
@@ -438,19 +418,6 @@ TEST(json, github142)
R"({"A": ["B]","[C","[D]"]})");
//_test(R"({"A":["B\"]","[\"C","\"[D]\""]})", // VS2019 chokes on this.
// R"({"A": ["B\"]","[\"C","\"[D]\""]})");
_test(R"({'A':'B}'})",
R"({'A': 'B}'})");
_test(R"({'A':'{B'})",
R"({'A': '{B'})");
_test(R"({'A':'{B}'})",
R"({'A': '{B}'})");
_test(R"({ 'A':'B}' })",
R"({ 'A': 'B}' })");
_test(R"({'A':['B]','[C','[D]']})",
R"({'A': ['B]','[C','[D]']})");
_test(R"({'A':['B'']','[''C','''[D]''']})",
R"({'A': ['B'']','[''C','''[D]''']})");
}
TEST(json, github52)
@@ -511,6 +478,8 @@ TEST(json, nested_end)
R"({"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {"a": "b","d": "e","c": {}}}}}})");
}
#undef _test
TEST(json, issue390)
{
const Tree tree = parse_in_arena(R"(quntity: 9.5e7
@@ -519,7 +488,242 @@ quntity2: 95000000)");
EXPECT_EQ(emitrs_json<std::string>(tree), R"({"quntity": 9.5e7,"quntity2": 95000000})");
}
#undef _test
TEST(parse_json, error_on_missing_seq_val)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("[foo, , ]");
});
}
TEST(parse_json, error_on_double_seq_val)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("[0 1, ]");
});
}
TEST(parse_json, error_on_double_seq_val_quoted)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("[\"0\" 1, ]");
});
}
TEST(parse_json, error_on_double_seq_val_quoted_2)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("[\"0\" \"1\", ]");
});
}
TEST(parse_json, error_on_double_seq_val_quoted_3)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("[0 \"1\", ]");
});
}
TEST(parse_json, error_on_double_map_val)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{\"key\": 0 1}");
});
}
TEST(parse_json, error_on_double_map_val_quoted)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{\"key\": 0 \"1\"}");
});
}
TEST(parse_json, error_on_double_map_val_quoted_2)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{\"key\": \"0\" 1}");
});
}
TEST(parse_json, error_on_double_map_val_quoted_3)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{\"key\": \"0\" \"1\"}");
});
}
TEST(parse_json, error_on_missing_seq_term)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("[foo, ");
});
}
TEST(parse_json, error_on_missing_map_val)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{\"foo\": }");
});
}
TEST(parse_json, error_on_missing_map_term)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{\"foo\": 0");
});
}
TEST(parse_json, error_on_missing_map_colon)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{\"foo\" }");
});
}
TEST(parse_json, error_on_bad_map_val)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{\"foo\": , }");
});
}
TEST(parse_json, error_on_wrong_key_character)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{'foo': 1}");
});
}
TEST(parse_json, error_on_unquoted_key_character)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("{foo: 1}");
});
}
TEST(parse_json, error_on_bare_keyval)
{
ExpectError::do_check([]{
Tree tree = parse_json_in_arena("\"fails\": true");
});
}
TEST(parse_json, scalar_src_dquoted)
{
Tree expected;
expected.rootref() = "dquoted";
Tree actual = parse_json_in_arena("\"dquoted\"");
test_compare(expected, actual);
}
TEST(parse_json, scalar_src_plain)
{
Tree expected;
expected.rootref() = "true";
Tree actual = parse_json_in_arena("true");
test_compare(expected, actual);
}
TEST(parse_json, leading_whitespace_scalar_dquoted)
{
csubstr json = " \n \"dquoted\"";
Tree expected;
expected.rootref() = "dquoted";
Tree actual = parse_json_in_arena(json);
test_compare(expected, actual);
}
TEST(parse_json, leading_whitespace_scalar_src_plain)
{
csubstr json = " \n true";
Tree expected;
expected.rootref() = "true";
Tree actual = parse_json_in_arena(json);
test_compare(expected, actual);
}
TEST(parse_json, empty_lines_on_seq)
{
csubstr json = R"([
0,
1
,
2
,3
])";
Tree expected;
NodeRef root = expected.rootref();
root |= SEQ|FLOW_SL;
root.append_child() = "0";
root.append_child() = "1";
root.append_child() = "2";
root.append_child() = "3";
Tree actual = parse_json_in_arena(json);
test_compare(expected, actual);
}
TEST(parse_json, empty_lines_on_map)
{
csubstr json = R"({"0"
:0,
"1"
:
1
,
"2":
2
,"3":3
})";
Tree expected;
NodeRef root = expected.rootref();
root |= MAP|FLOW_SL;
root.append_child({"0", "0"});
root.append_child({"1", "1"});
root.append_child({"2", "2"});
root.append_child({"3", "3"});
Tree actual = parse_json_in_arena(json);
test_compare(expected, actual);
}
TEST(parse_json, seq_nested_on_map)
{
csubstr json = R"({"seq":[0,1],"key":val})";
Tree expected;
NodeRef root = expected.rootref();
root |= MAP|FLOW_SL;
NodeRef seq = root.append_child({KEYSEQ, "seq"});
seq.append_child() = "0";
seq.append_child() = "1";
root.append_child({"key", "val"});
Tree actual = parse_json_in_arena(json);
test_compare(expected, actual);
}
TEST(parse_json, seq_nested_on_seq_with_trailing_comma)
{
csubstr json = R"([[0,1,],2,3,])";
Tree expected;
NodeRef root = expected.rootref();
root |= SEQ|FLOW_SL;
NodeRef seq = root.append_child(SEQ);
seq.append_child() = "0";
seq.append_child() = "1";
root.append_child() = "2";
root.append_child() = "3";
Tree actual = parse_json_in_arena(json);
test_compare(expected, actual);
}
//-------------------------------------------

View File

@@ -3,6 +3,9 @@
#ifndef RYML_SINGLE_HEADER
#include "c4/yml/common.hpp"
#include <c4/charconv.hpp>
#include "c4/yml/detail/parser_dbg.hpp"
#else
#endif
#include <vector>
#include <iostream>
@@ -39,8 +42,8 @@ struct CallbacksTester
void check()
{
std::cout << "size: alloc=" << alloc_size << " dealloc=" << dealloc_size << std::endl;
std::cout << "count: #allocs=" << num_allocs << " #deallocs=" << num_deallocs << std::endl;
_c4dbgpf("size: alloc={} dealloc={}", alloc_size, dealloc_size);
_c4dbgpf("count: #allocs={} #deallocs={}", num_allocs, num_deallocs);
RYML_CHECK(num_allocs == num_deallocs);
RYML_CHECK(alloc_size == dealloc_size);
}
@@ -56,7 +59,7 @@ struct CallbacksTester
void *allocate(size_t len)
{
std::cout << "alloc[" << num_allocs << "]=" << len << "B\n";
_c4dbgpf("alloc[{}]={}B", num_allocs, len);
void *ptr = &memory_pool[alloc_size];
alloc_size += len;
++num_allocs;
@@ -68,7 +71,7 @@ struct CallbacksTester
{
RYML_CHECK((char*)mem >= &memory_pool.front() && (char*)mem < &memory_pool.back());
RYML_CHECK((char*)mem+len >= &memory_pool.front() && (char*)mem+len <= &memory_pool.back());
std::cout << "free[" << num_deallocs << "]=" << len << "B\n";
_c4dbgpf("free[{}]={}B", num_deallocs, len);
dealloc_size += len;
++num_deallocs;
// no need to free here

View File

@@ -1,4 +1,4 @@
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#ifndef RYML_SINGLE_HEADER
#include "c4/yml/common.hpp"
#include "c4/format.hpp"
@@ -9,7 +9,9 @@
#endif
#include <gtest/gtest.h>
#ifndef C4_EXCEPTIONS
#ifdef C4_EXCEPTIONS
#include <exception>
#else
#include <csetjmp>
#endif
@@ -25,16 +27,20 @@
# if __GNUC__ >= 6
# pragma GCC diagnostic ignored "-Wnull-dereference"
# endif
# if __GNUC__ >= 7
# pragma GCC diagnostic ignored "-Wstringop-overflow"
# endif
#endif
namespace c4 {
namespace yml {
size_t _num_leaves(Tree const& t, size_t node)
id_type _num_leaves(Tree const& t, id_type node)
{
size_t count = 0;
for(size_t ch = t.first_child(node); ch != NONE; ch = t.next_sibling(ch))
id_type count = 0;
for(id_type ch = t.first_child(node); ch != NONE; ch = t.next_sibling(ch))
count += _num_leaves(t, ch);
return count;
}
@@ -51,38 +57,41 @@ void test_compare(Tree const& actual, Tree const& expected)
}
void test_compare(Tree const& actual, size_t node_actual,
Tree const& expected, size_t node_expected,
size_t level)
void test_compare(Tree const& actual, id_type node_actual,
Tree const& expected, id_type node_expected,
id_type level)
{
#define _MORE_INFO "actual=" << node_actual << " vs expected=" << node_expected
RYML_TRACE_FMT("actual={} vs expected={}", node_actual, node_expected);
ASSERT_NE(node_actual, (size_t)NONE);
ASSERT_NE(node_expected, (size_t)NONE);
ASSERT_NE(node_actual, (id_type)NONE);
ASSERT_NE(node_expected, (id_type)NONE);
ASSERT_LT(node_actual, actual.capacity());
ASSERT_LT(node_expected, expected.capacity());
EXPECT_EQ((type_bits)(actual.type(node_actual)&_TYMASK), (type_bits)(expected.type(node_expected)&_TYMASK)) << _MORE_INFO;
NodeType type_actual = actual.type(node_actual)&_TYMASK;
NodeType type_expected = expected.type(node_expected)&_TYMASK;
RYML_COMPARE_NODE_TYPE(type_actual, type_expected, ==, EQ);
//EXPECT_EQ((type_bits)(actual.type(node_actual)&_TYMASK), (type_bits)(expected.type(node_expected)&_TYMASK));
EXPECT_EQ(actual.has_key(node_actual), expected.has_key(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.has_key(node_actual), expected.has_key(node_expected));
if(actual.has_key(node_actual) && expected.has_key(node_expected))
{
EXPECT_EQ(actual.key(node_actual), expected.key(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.key(node_actual), expected.key(node_expected));
}
EXPECT_EQ(actual.has_val(node_actual), expected.has_val(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.has_val(node_actual), expected.has_val(node_expected));
if(actual.has_val(node_actual) && expected.has_val(node_expected))
{
EXPECT_EQ(actual.val(node_actual), expected.val(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.val(node_actual), expected.val(node_expected));
}
EXPECT_EQ(actual.has_key_tag(node_actual), expected.has_key_tag(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.has_key_tag(node_actual), expected.has_key_tag(node_expected));
if(actual.has_key_tag(node_actual) && expected.has_key_tag(node_expected))
{
EXPECT_EQ(actual.key_tag(node_actual), expected.key_tag(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.key_tag(node_actual), expected.key_tag(node_expected));
}
EXPECT_EQ(actual.has_val_tag(node_actual), expected.has_val_tag(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.has_val_tag(node_actual), expected.has_val_tag(node_expected));
if(actual.has_val_tag(node_actual) && expected.has_val_tag(node_expected))
{
auto filtered = [](csubstr tag) {
@@ -92,30 +101,28 @@ void test_compare(Tree const& actual, size_t node_actual,
};
csubstr actual_tag = filtered(actual.val_tag(node_actual));
csubstr expected_tag = filtered(actual.val_tag(node_expected));
EXPECT_EQ(actual_tag, expected_tag) << _MORE_INFO;
EXPECT_EQ(actual_tag, expected_tag);
}
EXPECT_EQ(actual.has_key_anchor(node_actual), expected.has_key_anchor(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.has_key_anchor(node_actual), expected.has_key_anchor(node_expected));
if(actual.has_key_anchor(node_actual) && expected.has_key_anchor(node_expected))
{
EXPECT_EQ(actual.key_anchor(node_actual), expected.key_anchor(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.key_anchor(node_actual), expected.key_anchor(node_expected));
}
EXPECT_EQ(actual.has_val_anchor(node_actual), expected.has_val_anchor(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.has_val_anchor(node_actual), expected.has_val_anchor(node_expected));
if(actual.has_val_anchor(node_actual) && expected.has_val_anchor(node_expected))
{
EXPECT_EQ(actual.val_anchor(node_actual), expected.val_anchor(node_expected)) << _MORE_INFO;
EXPECT_EQ(actual.val_anchor(node_actual), expected.val_anchor(node_expected));
}
EXPECT_EQ(actual.num_children(node_actual), expected.num_children(node_expected)) << _MORE_INFO;
for(size_t ia = actual.first_child(node_actual), ib = expected.first_child(node_expected);
EXPECT_EQ(actual.num_children(node_actual), expected.num_children(node_expected));
for(id_type ia = actual.first_child(node_actual), ib = expected.first_child(node_expected);
ia != NONE && ib != NONE;
ia = actual.next_sibling(ia), ib = expected.next_sibling(ib))
{
test_compare(actual, ia, expected, ib, level+1);
}
#undef _MORE_INFO
}
void test_arena_not_shared(Tree const& a, Tree const& b)
@@ -166,11 +173,14 @@ std::string format_error(const char* msg, size_t len, Location loc)
#ifndef RYML_NO_DEFAULT_CALLBACKS
report_error_impl(msg, len, loc, nullptr);
#endif
if(!loc) return msg;
if(!loc)
return msg;
std::string out;
if(!loc.name.empty()) c4::formatrs_append(&out, "{}:", loc.name);
if(!loc.name.empty())
c4::formatrs_append(&out, "{}:", loc.name);
c4::formatrs_append(&out, "{}:{}:", loc.line, loc.col);
if(loc.offset) c4::formatrs_append(&out, " (@{}B):", loc.offset);
if(loc.offset)
c4::formatrs_append(&out, " (@{}B):", loc.offset);
c4::formatrs_append(&out, "{}:", csubstr(msg, len));
return out;
}
@@ -207,14 +217,15 @@ ExpectError::ExpectError(Tree *tree, Location loc)
, expected_location(loc)
{
auto err = [](const char* msg, size_t len, Location errloc, void *this_) {
((ExpectError*)this_)->m_got_an_error = true;
_c4dbgpf("called error callback! (withlocation={})", bool(errloc));
((ExpectError*)this_)->m_got_an_error = true; // assign in here to ensure the exception was thrown here
C4_IF_EXCEPTIONS(
throw ExpectedError(msg, len, errloc);
C4_UNREACHABLE_AFTER_ERR();
,
s_jmp_err = ExpectedError(msg, len, errloc);
std::longjmp(s_jmp_env_expect_error, 1);
);
C4_UNREACHABLE_AFTER_ERR();
};
#ifdef RYML_NO_DEFAULT_CALLBACKS
c4::yml::Callbacks tcb((void*)this, nullptr, nullptr, err);
@@ -223,6 +234,7 @@ ExpectError::ExpectError(Tree *tree, Location loc)
c4::yml::Callbacks tcb((void*)this, tree ? m_tree_prev.m_allocate : nullptr, tree ? m_tree_prev.m_free : nullptr, err);
c4::yml::Callbacks gcb((void*)this, m_glob_prev.m_allocate, m_glob_prev.m_free, err);
#endif
_c4dbgp("setting error callback");
if(tree)
tree->callbacks(tcb);
set_callbacks(gcb);
@@ -233,6 +245,7 @@ ExpectError::~ExpectError()
if(m_tree)
m_tree->callbacks(m_tree_prev);
set_callbacks(m_tree_prev);
_c4dbgp("resetting error callback");
}
void ExpectError::check_success(Tree *tree, std::function<void()> fn)
@@ -254,7 +267,9 @@ void ExpectError::do_check(Tree *tree, std::function<void()> fn, Location expect
auto context = ExpectError(tree, expected_location);
C4_IF_EXCEPTIONS_(try, if(setjmp(s_jmp_env_expect_error) == 0))
{
_c4dbgp("check expected error");
fn();
_c4dbgp("check expected error: failed!");
}
C4_IF_EXCEPTIONS_(catch(ExpectedError const& e), else)
{
@@ -266,8 +281,8 @@ void ExpectError::do_check(Tree *tree, std::function<void()> fn, Location expect
#endif
if(context.expected_location)
{
EXPECT_EQ(static_cast<bool>(context.expected_location),
static_cast<bool>(e.error_location));
_c4dbgp("checking expected location...");
EXPECT_EQ(static_cast<bool>(e.error_location), static_cast<bool>(context.expected_location));
EXPECT_EQ(e.error_location.line, context.expected_location.line);
EXPECT_EQ(e.error_location.col, context.expected_location.col);
if(context.expected_location.offset)
@@ -275,7 +290,13 @@ void ExpectError::do_check(Tree *tree, std::function<void()> fn, Location expect
EXPECT_EQ(e.error_location.offset, context.expected_location.offset);
}
}
};
}
C4_IF_EXCEPTIONS_(catch(...)
{
_c4dbgp("---------------\n"
"got an unexpected exception!\n"
"---------------\n");
}, )
EXPECT_TRUE(context.m_got_an_error);
}
@@ -291,251 +312,6 @@ void ExpectError::check_assertion(Tree *tree, std::function<void()> fn, Location
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
using N = CaseNode;
using L = CaseNode::iseqmap;
TEST(CaseNode, setting_up)
{
L tl1 = {DOC, DOC};
L tl2 = {(DOC), (DOC)};
ASSERT_EQ(tl1.size(), tl2.size());
N const& d1 = *tl1.begin();
N const& d2 = *(tl1.begin() + 1);
ASSERT_EQ(d1.reccount(), d2.reccount());
ASSERT_EQ((type_bits)d1.type, (type_bits)DOC);
ASSERT_EQ((type_bits)d2.type, (type_bits)DOC);
N n1(tl1);
N n2(tl2);
ASSERT_EQ(n1.reccount(), n2.reccount());
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
NodeType_e CaseNode::_guess() const
{
NodeType t;
C4_ASSERT(!val.empty() != !children.empty() || (val.empty() && children.empty()));
if(children.empty())
{
C4_ASSERT(parent);
if(key.empty())
{
t = VAL;
}
else
{
t = KEYVAL;
}
}
else
{
NodeType_e has_key = key.empty() ? NOTYPE : KEY;
auto const& ch = children.front();
if(ch.key.empty())
{
t = (has_key|SEQ);
}
else
{
t = (has_key|MAP);
}
}
if( ! key_tag.empty())
{
C4_ASSERT( ! key.empty());
t.add(KEYTAG);
}
if( ! val_tag.empty())
{
C4_ASSERT( ! val.empty() || ! children.empty());
t.add(VALTAG);
}
if( ! key_anchor.str.empty())
{
t.add(key_anchor.type);
}
if( ! val_anchor.str.empty())
{
t.add(val_anchor.type);
}
return t;
}
//-----------------------------------------------------------------------------
void CaseNode::compare_child(yml::ConstNodeRef const& n, size_t pos) const
{
EXPECT_TRUE(pos < n.num_children());
EXPECT_TRUE(pos < children.size());
if(pos >= n.num_children() || pos >= children.size()) return;
ASSERT_GT(n.num_children(), pos);
auto const& expectedch = children[pos];
if(type & MAP)
{
ConstNodeRef actualch = n.find_child(expectedch.key);
if(!actualch.invalid())
{
// there may be duplicate keys.
if(actualch.id() != n[pos].id())
actualch = n[pos];
//EXPECT_EQ(fch, n[ch.key]);
EXPECT_EQ(actualch.get(), n[pos].get());
//EXPECT_EQ(n[pos], n[ch.key]);
EXPECT_EQ(n[expectedch.key].key(), expectedch.key);
}
else
{
printf("error: node should have child %.*s: ", (int)expectedch.key.len, expectedch.key.str);
print_path(n);
printf("\n");
print_node(n);
GTEST_FAIL();
}
}
if(type & SEQ)
{
EXPECT_FALSE(n[pos].has_key());
EXPECT_EQ(n[pos].get()->m_key.scalar, children[pos].key);
auto actualch = n.child(pos);
EXPECT_EQ(actualch.get(), n[pos].get());
}
if(expectedch.type & KEY)
{
auto actualfch = n[pos];
EXPECT_TRUE(actualfch.has_key()) << "id=" << actualfch.id();
if(actualfch.has_key())
{
EXPECT_EQ(actualfch.key(), expectedch.key) << "id=" << actualfch.id();
}
if( ! expectedch.key_tag.empty())
{
EXPECT_TRUE(actualfch.has_key_tag()) << "id=" << actualfch.id();
if(actualfch.has_key_tag())
{
EXPECT_EQ(actualfch.key_tag(), expectedch.key_tag) << "id=" << actualfch.id();
}
}
}
if(expectedch.type & VAL)
{
auto actualch = n[pos];
EXPECT_TRUE(actualch.has_val()) << "id=" << actualch.id();
if(actualch.has_val())
{
EXPECT_EQ(actualch.val(), expectedch.val) << "id=" << actualch.id();
}
if( ! expectedch.val_tag.empty())
{
EXPECT_TRUE(actualch.has_val_tag()) << "id=" << actualch.id();
if(actualch.has_val_tag())
{
EXPECT_EQ(actualch.val_tag(), expectedch.val_tag) << "id=" << actualch.id();
}
}
}
}
void CaseNode::compare(yml::ConstNodeRef const& actual, bool ignore_quote) const
{
if(ignore_quote)
{
const auto actual_type = actual.get()->m_type & ~(VALQUO | KEYQUO);
const auto expected_type = type & ~(VALQUO | KEYQUO);
EXPECT_EQ(expected_type, actual_type) << "id=" << actual.id();
}
else
{
EXPECT_EQ((int)actual.get()->m_type, (int)type) << "id=" << actual.id(); // the type() method masks the type, and thus tag flags are omitted on its return value
}
EXPECT_EQ(actual.num_children(), children.size()) << "id=" << actual.id();
if(actual.has_key())
{
EXPECT_EQ(actual.key(), key) << "id=" << actual.id();
}
if(actual.has_val())
{
EXPECT_EQ(actual.val(), val) << "id=" << actual.id();
}
// check that the children are in the same order
{
EXPECT_EQ(children.size(), actual.num_children()) << "id=" << actual.id();
size_t ic = 0;
for(auto const &expectedch : children)
{
SCOPED_TRACE("comparing: iteration based on the ref children");
(void)expectedch; // unused
compare_child(actual, ic++);
}
ic = 0;
for(auto const actualch : actual.children())
{
SCOPED_TRACE("comparing: iteration based on the yml::Node children");
(void)actualch; // unused
compare_child(actual, ic++);
}
if(!actual.first_child().invalid())
{
ic = 0;
for(auto const ch : actual.first_child().siblings())
{
SCOPED_TRACE("comparing: iteration based on the yml::Node siblings");
(void)ch; // unused
compare_child(actual, ic++);
}
}
}
for(size_t i = 0, ei = actual.num_children(), j = 0, ej = children.size(); i < ei && j < ej; ++i, ++j)
{
children[j].compare(actual[i], ignore_quote);
}
}
void CaseNode::recreate(yml::NodeRef *n) const
{
C4_ASSERT( ! n->has_children());
auto *nd = n->get();
nd->m_type = type|key_anchor.type|val_anchor.type;
nd->m_key.scalar = key;
nd->m_key.tag = (key_tag);
nd->m_key.anchor = key_anchor.str;
nd->m_val.scalar = val;
nd->m_val.tag = (val_tag);
nd->m_val.anchor = val_anchor.str;
auto &tree = *n->tree();
size_t nid = n->id(); // don't use node from now on
for(auto const& ch : children)
{
size_t id = tree.append_child(nid);
NodeRef chn(n->tree(), id);
ch.recreate(&chn);
}
}
//-----------------------------------------------------------------------------
void print_path(ConstNodeRef const& n)
@@ -551,7 +327,7 @@ void print_path(ConstNodeRef const& n)
}
else
{
int ret = snprintf(buf, sizeof(buf), "/%zd", p.has_parent() ? p.parent().child_pos(p) : 0);
int ret = snprintf(buf, sizeof(buf), "/%zu", p.has_parent() ? (size_t)p.parent().child_pos(p) : (size_t)0);
RYML_ASSERT(ret >= 0);
len += static_cast<size_t>(ret);
}
@@ -571,12 +347,12 @@ void print_path(ConstNodeRef const& n)
}
else if(p.has_parent())
{
pos = p.parent().child_pos(p);
int ret = snprintf(buf, 0, "/%zd", pos);
pos = (size_t)p.parent().child_pos(p);
int ret = snprintf(buf, 0, "/%zu", pos);
RYML_ASSERT(ret >= 0);
size_t tl = static_cast<size_t>(ret);
RYML_ASSERT(pos >= tl);
ret = snprintf(buf + static_cast<size_t>(pos - tl), tl, "/%zd", pos);
ret = snprintf(buf + static_cast<size_t>(pos - tl), tl, "/%zu", pos);
RYML_ASSERT(ret >= 0);
pos -= static_cast<size_t>(ret);
}
@@ -587,7 +363,7 @@ void print_path(ConstNodeRef const& n)
void print_node(CaseNode const& p, int level)
void print_test_node(TestCaseNode const& p, int level)
{
printf("%*s%p", (2*level), "", (void const*)&p);
if( ! p.parent)
@@ -597,6 +373,7 @@ void print_node(CaseNode const& p, int level)
printf(" %s:", NodeType::type_str(p.type));
if(p.has_key())
{
const char code = _scalar_code_key(p.type);
if(p.has_key_anchor())
{
csubstr ka = p.key_anchor.str;
@@ -605,27 +382,28 @@ void print_node(CaseNode const& p, int level)
if(p.key_tag.empty())
{
csubstr v = p.key;
printf(" '%.*s'", (int)v.len, v.str);
printf(" %c%.*s%c", code, (int)v.len, v.str, code);
}
else
{
csubstr vt = p.key_tag;
csubstr v = p.key;
printf(" '%.*s %.*s'", (int)vt.len, vt.str, (int)v.len, v.str);
printf(" %.*s %c%.*s%c'", (int)vt.len, vt.str, code, (int)v.len, v.str, code);
}
}
if(p.has_val())
{
const char code = _scalar_code_val(p.type);
if(p.val_tag.empty())
{
csubstr v = p.val;
printf(" '%.*s'", (int)v.len, v.str);
printf(" %c%.*s%c", code, (int)v.len, v.str, code);
}
else
{
csubstr vt = p.val_tag;
csubstr v = p.val;
printf(" '%.*s %.*s'", (int)vt.len, vt.str, (int)v.len, v.str);
printf(" %.*s%c%.*s%c", (int)vt.len, vt.str, code, (int)v.len, v.str, code);
}
}
else
@@ -650,114 +428,107 @@ void print_node(CaseNode const& p, int level)
}
void print_tree(ConstNodeRef const& p, int level)
void print_test_tree(TestCaseNode const& p, int level)
{
print_node(p, level);
for(ConstNodeRef ch : p.children())
{
print_tree(ch, level+1);
}
}
void print_tree(CaseNode const& p, int level)
{
print_node(p, level);
print_test_node(p, level);
for(auto const& ch : p.children)
print_tree(ch, level+1);
print_test_tree(ch, level+1);
}
void print_tree(CaseNode const& t)
void print_test_tree(const char *message, TestCaseNode const& t)
{
printf("--------------------------------------\n");
print_tree(t, 0);
printf("#nodes: %zd\n", t.reccount());
if(message != nullptr)
printf("%s:\n", message);
print_test_tree(t, 0);
printf("#nodes: %zu\n", (size_t)t.reccount());
printf("--------------------------------------\n");
}
void test_invariants(ConstNodeRef const& n)
{
#define _MORE_INFO << "id=" << n.id()
SCOPED_TRACE(n.id());
if(n.is_root())
{
EXPECT_FALSE(n.has_other_siblings()) _MORE_INFO;
}
// keys or vals cannot be root
if(n.has_key() || n.is_val() || n.is_keyval())
{
EXPECT_TRUE(!n.is_root() || (n.is_doc() && !n.has_key())) _MORE_INFO;
EXPECT_FALSE(n.has_other_siblings());
}
// vals cannot be containers
if( ! n.empty() && ! n.is_doc())
{
EXPECT_NE(n.has_val(), n.is_container()) _MORE_INFO;
EXPECT_NE(n.has_val(), n.is_container());
}
if(n.has_children())
{
EXPECT_TRUE(n.is_container()) _MORE_INFO;
EXPECT_FALSE(n.is_val()) _MORE_INFO;
EXPECT_TRUE(n.is_container());
EXPECT_FALSE(n.is_val());
}
// check parent & sibling reciprocity
for(ConstNodeRef s : n.siblings())
{
EXPECT_TRUE(n.has_sibling(s)) _MORE_INFO;
EXPECT_TRUE(s.has_sibling(n)) _MORE_INFO;
EXPECT_EQ(s.parent().get(), n.parent().get()) _MORE_INFO;
EXPECT_TRUE(n.has_sibling(s));
EXPECT_TRUE(s.has_sibling(n));
if(n.has_key())
{
EXPECT_TRUE(n.has_sibling(s.key()));
EXPECT_TRUE(s.has_sibling(n.key()));
}
EXPECT_EQ(s.parent().get(), n.parent().get());
}
if(n.parent().readable())
{
EXPECT_EQ(n.parent().num_children() > 1, n.has_other_siblings()) _MORE_INFO;
EXPECT_TRUE(n.parent().has_child(n)) _MORE_INFO;
EXPECT_EQ(n.parent().num_children(), 1 + n.num_other_siblings()) _MORE_INFO;
EXPECT_EQ(n.parent().num_children() > 1, n.has_other_siblings());
EXPECT_TRUE(n.parent().has_child(n));
EXPECT_EQ(n.parent().num_children(), n.num_siblings());
EXPECT_EQ(n.parent().num_children(), n.num_other_siblings()+1u);
// doc parent must be a seq and a stream
if(n.is_doc())
{
EXPECT_TRUE(n.parent().is_seq()) _MORE_INFO;
EXPECT_TRUE(n.parent().is_stream()) _MORE_INFO;
EXPECT_TRUE(n.parent().is_seq());
EXPECT_TRUE(n.parent().is_stream());
}
}
else
{
EXPECT_TRUE(n.is_root()) _MORE_INFO;
EXPECT_TRUE(n.is_root());
}
if(n.is_seq())
{
EXPECT_TRUE(n.is_container()) _MORE_INFO;
EXPECT_FALSE(n.is_map()) _MORE_INFO;
EXPECT_TRUE(n.is_container());
EXPECT_FALSE(n.is_map());
for(ConstNodeRef ch : n.children())
{
EXPECT_FALSE(ch.is_keyval()) _MORE_INFO;
EXPECT_FALSE(ch.has_key()) _MORE_INFO;
EXPECT_FALSE(ch.is_keyval());
EXPECT_FALSE(ch.has_key());
}
}
if(n.is_map())
{
EXPECT_TRUE(n.is_container()) _MORE_INFO;
EXPECT_FALSE(n.is_seq()) _MORE_INFO;
EXPECT_TRUE(n.is_container());
EXPECT_FALSE(n.is_seq());
for(ConstNodeRef ch : n.children())
{
EXPECT_TRUE(ch.has_key()) _MORE_INFO;
EXPECT_TRUE(ch.has_key());
}
}
if(n.has_key_anchor())
{
EXPECT_FALSE(n.key_anchor().empty()) _MORE_INFO;
EXPECT_FALSE(n.is_key_ref()) _MORE_INFO;
EXPECT_FALSE(n.key_anchor().empty());
EXPECT_FALSE(n.is_key_ref());
}
if(n.has_val_anchor())
{
EXPECT_FALSE(n.val_anchor().empty()) _MORE_INFO;
EXPECT_FALSE(n.is_val_ref()) _MORE_INFO;
EXPECT_FALSE(n.val_anchor().empty());
EXPECT_FALSE(n.is_val_ref());
}
if(n.is_key_ref())
{
EXPECT_FALSE(n.key_ref().empty()) _MORE_INFO;
EXPECT_FALSE(n.has_key_anchor()) _MORE_INFO;
EXPECT_FALSE(n.key_ref().empty());
EXPECT_FALSE(n.has_key_anchor());
}
if(n.is_val_ref())
{
EXPECT_FALSE(n.val_ref().empty()) _MORE_INFO;
EXPECT_FALSE(n.has_val_anchor()) _MORE_INFO;
EXPECT_FALSE(n.val_ref().empty());
EXPECT_FALSE(n.has_val_anchor());
}
// ... add more tests here

298
test/test_lib/test_case.hpp Normal file
View File

@@ -0,0 +1,298 @@
#ifndef _TEST_CASE_HPP_
#define _TEST_CASE_HPP_
#ifdef RYML_SINGLE_HEADER
#include <ryml_all.hpp>
#else
#include "c4/std/vector.hpp"
#include "c4/std/string.hpp"
#include "c4/format.hpp"
#include <c4/yml/yml.hpp>
#include <c4/yml/detail/parser_dbg.hpp>
#include <c4/yml/detail/print.hpp>
#endif
#include <gtest/gtest.h>
#include <functional>
// no pragma push for these warnings! they will be suppressed in the
// files including this header (most test files)
#ifdef __clang__
# pragma clang diagnostic ignored "-Wold-style-cast"
#elif defined(__GNUC__)
# pragma GCC diagnostic ignored "-Wold-style-cast"
#endif
#ifdef __clang__
# pragma clang diagnostic push
#elif defined(__GNUC__)
# pragma GCC diagnostic push
# pragma GCC diagnostic ignored "-Wtype-limits"
#elif defined(_MSC_VER)
# pragma warning(push)
# pragma warning(disable: 4296/*expression is always 'boolean_value'*/)
# pragma warning(disable: 4389/*'==': signed/unsigned mismatch*/)
# if C4_MSVC_VERSION != C4_MSVC_VERSION_2017
# pragma warning(disable: 4800/*'int': forcing value to bool 'true' or 'false' (performance warning)*/)
# endif
#endif
#ifdef RYML_DBG
# include <c4/yml/detail/print.hpp>
#endif
#include "test_lib/test_case_node.hpp"
/** @todo use a matcher and EXPECT_THAT():
* see http://google.github.io/googletest/reference/assertions.html#EXPECT_THAT
* see http://google.github.io/googletest/gmock_cook_book.html#NewMatchers
*/
#define RYML_COMPARE_NODE_TYPE(lhs, rhs, op, testop) \
do \
{ \
if(!((lhs) op (rhs))) \
{ \
char ltypebuf[256]; \
char rtypebuf[256]; \
csubstr ltype = NodeType::type_str(ltypebuf, (NodeType_e)lhs); \
csubstr rtype = NodeType::type_str(rtypebuf, (NodeType_e)rhs); \
if(ltype.str && rtype.str) \
{ \
EXPECT_##testop(lhs, rhs) \
<< " " << ltype.str << " (" << (lhs) << ")" << "=" << #lhs \
<< "\n" \
<< " " << rtype.str << " (" << (rhs) << ")" << "=" << #rhs; \
} \
else \
{ \
EXPECT_##testop(lhs, rhs) \
<< "(type too large to fit print buffer)"; \
} \
} \
} while(0)
namespace c4 {
inline void PrintTo(substr s, ::std::ostream* os) { *os << "'"; os->write(s.str, (std::streamsize)s.len); *os << "'"; }
inline void PrintTo(csubstr s, ::std::ostream* os) { *os << "'"; os->write(s.str, (std::streamsize)s.len); *os << "'"; }
namespace yml {
#define RYML_TRACE_FMT(fmt, ...) SCOPED_TRACE([&]{ return formatrs<std::string>(fmt, __VA_ARGS__); }())
inline void PrintTo(NodeType ty, ::std::ostream* os)
{
*os << ty.type_str();
}
inline void PrintTo(NodeType_e ty, ::std::ostream* os)
{
*os << NodeType::type_str(ty);
}
inline void PrintTo(Callbacks const& cb, ::std::ostream* os)
{
#ifdef __GNUC__
#define RYML_GNUC_EXTENSION __extension__
#else
#define RYML_GNUC_EXTENSION
#endif
*os << '{'
<< "userdata." << (void*)cb.m_user_data << ','
<< "allocate." << RYML_GNUC_EXTENSION (void*)cb.m_allocate << ','
<< "free." << RYML_GNUC_EXTENSION (void*)cb.m_free << ','
<< "error." << RYML_GNUC_EXTENSION (void*)cb.m_error << '}';
#undef RYML_GNUC_EXTENSION
}
struct Case;
struct TestCaseNode;
struct CaseData;
Case const* get_case(csubstr name);
CaseData* get_data(csubstr name);
void test_compare(Tree const& actual, Tree const& expected);
void test_compare(Tree const& actual, id_type node_actual,
Tree const& expected, id_type node_expected,
id_type level=0);
void test_arena_not_shared(Tree const& a, Tree const& b);
void test_invariants(Tree const& t);
void test_invariants(ConstNodeRef const& n);
void print_test_node(TestCaseNode const& t, int level=0);
void print_test_tree(TestCaseNode const& p, int level=0);
void print_test_tree(const char *message, TestCaseNode const& t);
void print_path(ConstNodeRef const& p);
template<class CheckFn>
void test_check_emit_check(Tree const& t, CheckFn check_fn)
{
#ifdef RYML_DBG
print_tree(t);
#endif
{
SCOPED_TRACE("original yaml");
test_invariants(t);
check_fn(t);
}
auto emit_and_parse = [&check_fn](Tree const& tp, const char* identifier){
SCOPED_TRACE(identifier);
std::string emitted = emitrs_yaml<std::string>(tp);
#ifdef RYML_DBG
printf("~~~%s~~~\n%.*s", identifier, (int)emitted.size(), emitted.data());
#endif
Tree cp = parse_in_arena(to_csubstr(emitted));
#ifdef RYML_DBG
print_tree(cp);
#endif
test_invariants(cp);
check_fn(cp);
return cp;
};
Tree cp = emit_and_parse(t, "emitted 1");
cp = emit_and_parse(cp, "emitted 2");
cp = emit_and_parse(cp, "emitted 3");
}
template<class CheckFn>
void test_check_emit_check(csubstr yaml, CheckFn check_fn)
{
Tree t = parse_in_arena(yaml);
test_check_emit_check(t, check_fn);
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
inline c4::substr replace_all(c4::csubstr pattern, c4::csubstr repl, c4::csubstr subject, std::string *dst)
{
RYML_CHECK(!subject.overlaps(to_csubstr(*dst)));
size_t ret = subject.replace_all(to_substr(*dst), pattern, repl);
if(ret != dst->size())
{
dst->resize(ret);
ret = subject.replace_all(to_substr(*dst), pattern, repl);
}
RYML_CHECK(ret == dst->size());
return c4::to_substr(*dst);
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
struct ExpectError
{
bool m_got_an_error;
Tree *m_tree;
c4::yml::Callbacks m_glob_prev;
c4::yml::Callbacks m_tree_prev;
Location expected_location;
ExpectError(Location loc={}) : ExpectError(nullptr, loc) {}
ExpectError(Tree *tree, Location loc={});
~ExpectError();
static void do_check( std::function<void()> fn, Location expected={}) { do_check(nullptr, fn, expected); }
static void do_check(Tree *tree, std::function<void()> fn, Location expected={});
static void check_assertion( std::function<void()> fn, Location expected={}) { check_assertion(nullptr, fn, expected); }
static void check_assertion(Tree *tree, std::function<void()> fn, Location expected={});
static void check_success( std::function<void()> fn) { check_success(nullptr, fn); };
static void check_success(Tree *tree, std::function<void()> fn);
};
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
typedef enum {
EXPECT_PARSE_ERROR = (1<<0),
RESOLVE_REFS = (1<<1),
JSON_WRITE = (1<<2), // TODO: make it the opposite: opt-out instead of opt-in
JSON_READ = (1<<3),
HAS_CONTAINER_KEYS = (1<<4),
HAS_MULTILINE_SCALAR = (1<<5),
} TestCaseFlags_e;
struct Case
{
std::string filelinebuf;
csubstr fileline;
csubstr name;
csubstr src;
TestCaseNode root;
TestCaseFlags_e flags;
Location expected_location;
//! create a standard test case: name, source and expected CaseNode structure
template<class... Args> Case(csubstr file, int line, const char *name_, const char *src_, Args&& ...args) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(std::forward<Args>(args)...), flags(), expected_location() {}
//! create a test case with explicit flags: name, source flags, and expected CaseNode structure
template<class... Args> Case(csubstr file, int line, const char *name_, int f_, const char *src_, Args&& ...args) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(std::forward<Args>(args)...), flags((TestCaseFlags_e)f_), expected_location() {}
//! create a test case with an error on an expected location
Case(csubstr file, int line, const char *name_, int f_, const char *src_, LineCol loc) : filelinebuf(catrs<std::string>(file, ':', line)), fileline(to_csubstr(filelinebuf)), name(to_csubstr(name_)), src(to_csubstr(src_)), root(), flags((TestCaseFlags_e)f_), expected_location(name, loc.line, loc.col) {}
};
//-----------------------------------------------------------------------------
// a persistent data store to avoid repeating operations on every test
struct CaseDataLineEndings
{
std::vector<char> src_buf;
substr src;
Tree parsed_tree;
size_t numbytes_stdout;
size_t numbytes_stdout_json;
std::string emit_buf;
csubstr emitted_yml;
std::string emitjson_buf;
csubstr emitted_json;
std::string parse_buf;
substr parsed_yml;
std::string parse_buf_json;
substr parsed_json;
Tree emitted_tree;
Tree emitted_tree_json;
Tree recreated;
};
struct CaseData
{
CaseDataLineEndings unix_style;
CaseDataLineEndings unix_style_json;
CaseDataLineEndings windows_style;
CaseDataLineEndings windows_style_json;
};
} // namespace yml
} // namespace c4
#ifdef __clang__
# pragma clang diagnostic pop
#elif defined(__GNUC__)
# pragma GCC diagnostic pop
#elif defined(_MSC_VER)
# pragma warning(pop)
#endif
#endif /* _TEST_CASE_HPP_ */

View File

@@ -0,0 +1,267 @@
#include "test_lib/test_case_node.hpp"
#include "test_lib/test_case.hpp"
#include <gtest/gtest.h>
C4_SUPPRESS_WARNING_GCC_CLANG_PUSH
C4_SUPPRESS_WARNING_GCC("-Wuseless-cast")
namespace c4 {
namespace yml {
using N = TestCaseNode;
using L = TestCaseNode::iseqmap;
TEST(CaseNode, setting_up)
{
L tl1 = {DOC, DOC};
L tl2 = {(DOC), (DOC)};
ASSERT_EQ(tl1.size(), tl2.size());
N const& d1 = *tl1.begin();
N const& d2 = *(tl1.begin() + 1);
ASSERT_EQ(d1.reccount(), d2.reccount());
ASSERT_EQ((type_bits)d1.type, (type_bits)DOC);
ASSERT_EQ((type_bits)d2.type, (type_bits)DOC);
N n1(tl1);
N n2(tl2);
ASSERT_EQ(n1.reccount(), n2.reccount());
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
NodeType_e TestCaseNode::_guess() const
{
NodeType t;
C4_ASSERT(!val.empty() != !children.empty() || (val.empty() && children.empty()));
if(children.empty())
{
C4_ASSERT(parent);
if(key.empty())
{
t = VAL;
}
else
{
t = KEYVAL;
}
}
else
{
NodeType_e has_key = key.empty() ? NOTYPE : KEY;
auto const& ch = children.front();
if(ch.key.empty())
{
t = (has_key|SEQ);
}
else
{
t = (has_key|MAP);
}
}
if( ! key_tag.empty())
{
C4_ASSERT( ! key.empty());
t.add(KEYTAG);
}
if( ! val_tag.empty())
{
C4_ASSERT( ! val.empty() || ! children.empty());
t.add(VALTAG);
}
if( ! key_anchor.str.empty())
{
t.add(key_anchor.type);
}
if( ! val_anchor.str.empty())
{
t.add(val_anchor.type);
}
return t;
}
//-----------------------------------------------------------------------------
void TestCaseNode::compare_child(yml::ConstNodeRef const& n, id_type pos) const
{
SCOPED_TRACE(pos);
SCOPED_TRACE(n.id());
EXPECT_TRUE(pos < n.num_children());
EXPECT_TRUE((size_t)pos < children.size());
if(pos >= n.num_children() || (size_t)pos >= children.size()) return;
ASSERT_GT(n.num_children(), pos);
auto const& expectedch = children[(size_t)pos];
if(type & MAP)
{
ConstNodeRef actualch = n.find_child(expectedch.key);
SCOPED_TRACE(actualch.id());
if(!actualch.invalid())
{
// there may be duplicate keys.
if(actualch.id() != n[pos].id())
actualch = n[pos];
SCOPED_TRACE(actualch.id());
//EXPECT_EQ(fch, n[ch.key]);
EXPECT_EQ(actualch.get(), n[pos].get());
//EXPECT_EQ(n[pos], n[ch.key]);
EXPECT_EQ(n[expectedch.key].key(), expectedch.key);
}
else
{
printf("error: node should have child %.*s: ", (int)expectedch.key.len, expectedch.key.str);
fflush(stdout);
print_path(n);
fflush(stdout);
printf("\n");
print_node(n);
GTEST_FAIL();
}
}
if(type & SEQ)
{
EXPECT_FALSE(n[pos].has_key());
EXPECT_EQ(n[pos].get()->m_key.scalar, children[(size_t)pos].key);
ConstNodeRef actualch = n.child(pos);
SCOPED_TRACE(actualch.id());
EXPECT_EQ(actualch.get(), n[pos].get());
}
if(expectedch.type & KEY)
{
ConstNodeRef actualfch = n[pos];
SCOPED_TRACE(actualfch.id());
EXPECT_TRUE(actualfch.has_key());
if(actualfch.has_key())
{
EXPECT_EQ(actualfch.key(), expectedch.key);
}
if( ! expectedch.key_tag.empty())
{
EXPECT_TRUE(actualfch.has_key_tag());
if(actualfch.has_key_tag())
{
EXPECT_EQ(actualfch.key_tag(), expectedch.key_tag);
}
}
}
if(expectedch.type & VAL)
{
ConstNodeRef actualch = n[pos];
SCOPED_TRACE(actualch.id());
EXPECT_TRUE(actualch.has_val());
if(actualch.has_val())
{
EXPECT_EQ(actualch.val(), expectedch.val);
}
if( ! expectedch.val_tag.empty())
{
EXPECT_TRUE(actualch.has_val_tag());
if(actualch.has_val_tag())
{
EXPECT_EQ(actualch.val_tag(), expectedch.val_tag);
}
}
}
}
void TestCaseNode::compare(yml::ConstNodeRef const& actual, bool ignore_quote) const
{
SCOPED_TRACE(actual.id());
if(ignore_quote)
{
const type_bits mask = VALQUO | KEYQUO | KEY_STYLE | VAL_STYLE | CONTAINER_STYLE;
const type_bits actual_type = actual.get()->m_type & ~mask;
const type_bits expected_type = type & ~mask;
EXPECT_EQ(expected_type, actual_type);
RYML_COMPARE_NODE_TYPE(expected_type, actual_type, ==, EQ);
}
else
{
// the type() method masks the type, and thus tag flags are omitted on its return value
RYML_COMPARE_NODE_TYPE(actual.get()->m_type, this->type, ==, EQ);
}
EXPECT_EQ(actual.num_children(), children.size());
if(actual.has_key())
{
EXPECT_EQ(actual.key(), key);
}
if(actual.has_val())
{
EXPECT_EQ(actual.val(), val);
}
// check that the children are in the same order
{
EXPECT_EQ(children.size(), actual.num_children());
id_type ic = 0;
for(auto const &expectedch : children)
{
SCOPED_TRACE("comparing: iteration based on the ref children");
(void)expectedch; // unused
compare_child(actual, ic++);
}
ic = 0;
for(auto const actualch : actual.children())
{
SCOPED_TRACE("comparing: iteration based on the yml::Node children");
(void)actualch; // unused
compare_child(actual, ic++);
}
if(!actual.first_child().invalid())
{
ic = 0;
for(auto const ch : actual.first_child().siblings())
{
SCOPED_TRACE("comparing: iteration based on the yml::Node siblings");
(void)ch; // unused
compare_child(actual, ic++);
}
}
}
C4_SUPPRESS_WARNING_GCC_WITH_PUSH("-Wuseless-cast")
for(id_type i = 0, ei = actual.num_children(), j = 0, ej = (id_type)children.size(); i < ei && j < ej; ++i, ++j)
{
children[(size_t)j].compare(actual[i], ignore_quote);
}
C4_SUPPRESS_WARNING_GCC_POP
}
void TestCaseNode::recreate(yml::NodeRef *n) const
{
C4_ASSERT( ! n->has_children());
NodeData *nd = n->get();
nd->m_type = type|key_anchor.type|val_anchor.type;
nd->m_key.scalar = key;
nd->m_key.tag = (key_tag);
nd->m_key.anchor = key_anchor.str;
nd->m_val.scalar = val;
nd->m_val.tag = (val_tag);
nd->m_val.anchor = val_anchor.str;
Tree &tree = *n->tree();
id_type nid = n->id(); // don't use node from now on
for(TestCaseNode const& ch : children)
{
id_type id = tree.append_child(nid);
NodeRef chn(n->tree(), id);
ch.recreate(&chn);
}
}
C4_SUPPRESS_WARNING_GCC_CLANG_PUSH
} // namespace yml
} // namespace c4

View File

@@ -0,0 +1,299 @@
#ifndef _C4_YML_TEST_TEST_CASE_NODE_HPP_
#define _C4_YML_TEST_TEST_CASE_NODE_HPP_
#ifdef RYML_SINGLE_HEADER
#include <ryml_all.hpp>
#else
#include "c4/std/vector.hpp"
#include "c4/std/string.hpp"
#include "c4/format.hpp"
#include <c4/yml/yml.hpp>
#endif
namespace c4 {
namespace yml {
struct TaggedScalar
{
csubstr tag;
csubstr scalar;
template<size_t N, size_t M>
TaggedScalar(const char (&t)[N], const char (&s)[M]) : tag(t), scalar(s) {}
template<size_t N>
TaggedScalar(const char (&t)[N], std::nullptr_t) : tag(t), scalar() {}
};
struct AnchorRef
{
NodeType_e type;
csubstr str;
AnchorRef() : type(NOTYPE), str() {}
AnchorRef(NodeType_e t) : type(t), str() {}
AnchorRef(NodeType_e t, csubstr v) : type(t), str(v) {}
};
/** a node class against which ryml structures are tested. Uses initializer
* lists to facilitate minimal specification. */
struct TestCaseNode
{
public:
using seqmap = std::vector<TestCaseNode>;
using iseqmap = std::initializer_list<TestCaseNode>;
struct TaggedList
{
csubstr tag;
iseqmap ilist;
template<size_t N> TaggedList(const char (&t)[N], iseqmap l) : tag(t), ilist(l) {}
};
public:
NodeType type;
csubstr key, key_tag; AnchorRef key_anchor;
csubstr val, val_tag; AnchorRef val_anchor;
seqmap children;
TestCaseNode * parent;
public:
TestCaseNode(TestCaseNode && that) noexcept { _move(std::move(that)); }
TestCaseNode(TestCaseNode const& that) noexcept { _copy(that); }
TestCaseNode& operator= (TestCaseNode && that) noexcept { _move(std::move(that)); return *this; }
TestCaseNode& operator= (TestCaseNode const& that) noexcept { _copy(that); return *this; }
~TestCaseNode() = default;
public:
// brace yourself: what you are about to see is ... crazy.
TestCaseNode() : TestCaseNode(NOTYPE) {}
TestCaseNode(NodeType_e t) : type(t), key(), key_tag(), key_anchor(), val(), val_tag(), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// val
template<size_t N> explicit TestCaseNode(const char (&v)[N] ) : type((VAL )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(TaggedScalar const& v) : type((VAL|VALTAG)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(std::nullptr_t ) : type((VAL )), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// val, with anchor/ref
template<size_t N> explicit TestCaseNode(const char (&v)[N] , AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|VAL|VALTAG)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(std::nullptr_t , AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode( AnchorRef const& arv) : type((arv.type|VAL )), key(), key_tag(), key_anchor(), val(arv.str ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); RYML_ASSERT(arv.type == VALREF); }
// val, explicit type
template<size_t N> explicit TestCaseNode(NodeType t, const char (&v)[N] ) : type((VAL|t )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& v) : type((VAL|VALTAG|t)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, std::nullptr_t ) : type((VAL |t)), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// val, explicit type, with val anchor/ref
template<size_t N> explicit TestCaseNode(NodeType t, const char (&v)[N] , AnchorRef const& arv) : type((arv.type|VAL|t )), key(), key_tag(), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|VAL|VALTAG|t)), key(), key_tag(), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, std::nullptr_t , AnchorRef const& arv) : type((arv.type|VAL |t)), key(), key_tag(), key_anchor(), val( ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// keyval
template<size_t N, size_t M> explicit TestCaseNode(const char (&k)[N] , const char (&v)[M] ) : type((KEYVAL )), key(k ), key_tag( ), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(std::nullptr_t , const char (&v)[M] ) : type((KEYVAL )), key( ), key_tag( ), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , std::nullptr_t ) : type((KEYVAL )), key(k ), key_tag( ), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , TaggedScalar const& v) : type((KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(TaggedScalar const& k, const char (&v)[M] ) : type((KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val(v ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(TaggedScalar const& k, TaggedScalar const& v) : type((KEYVAL|KEYTAG|VALTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(std::nullptr_t , TaggedScalar const& v) : type((KEYVAL |VALTAG )), key( ), key_tag( ), key_anchor( ), val(v.scalar), val_tag(v.tag), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(TaggedScalar const& k, std::nullptr_t ) : type((KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(std::nullptr_t , std::nullptr_t ) : type((KEYVAL )), key( ), key_tag( ), key_anchor( ), val( ), val_tag( ), val_anchor( ), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(AnchorRef const& ark, AnchorRef const& arv) : type((KEYVAL|ark.type|arv.type)), key(ark.str ), key_tag( ), key_anchor(ark), val(arv.str ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); RYML_ASSERT(ark.type == KEYREF); RYML_ASSERT(arv.type == VALREF); }
// keyval, with val anchor/ref
template<size_t N, size_t M> explicit TestCaseNode(const char (&k)[N] , const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(TaggedScalar const& k, const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(TaggedScalar const& k, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// keyval, with key anchor/ref
template<size_t N, size_t M> explicit TestCaseNode(const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// keyval, with key anchor/ref + val anchor/ref
template<size_t N, size_t M> explicit TestCaseNode(const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|VALTAG )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// keyval, explicit type
template<size_t N, size_t M> explicit TestCaseNode(NodeType t, const char (&k)[N] , const char (&v)[M] ) : type((KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , std::nullptr_t ) : type((KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(NodeType t, std::nullptr_t , const char (&v)[M] ) : type((KEYVAL|t )), key( ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , TaggedScalar const& v) : type((KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(NodeType t, TaggedScalar const& k, const char (&v)[M] ) : type((KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, TaggedScalar const& v) : type((KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, std::nullptr_t ) : type((KEYVAL|KEYTAG |t)), key(k.scalar), key_tag(k.tag), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, std::nullptr_t , TaggedScalar const& v) : type((KEYVAL |VALTAG|t)), key( ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, std::nullptr_t , std::nullptr_t ) : type((KEYVAL |t)), key( ), key_tag( ), key_anchor(), val( ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// keyval, explicit type, with val anchor/ref
template<size_t N, size_t M> explicit TestCaseNode(NodeType t, const char (&k)[N] , const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(NodeType t, TaggedScalar const& k, const char (&v)[M] , AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, TaggedScalar const& v, AnchorRef const& arv) : type((arv.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// keyval, explicit type, with key anchor/ref
template<size_t N, size_t M> explicit TestCaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] ) : type((ark.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v) : type((ark.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(), children(), parent(nullptr) { _set_parent(); }
// keyval, explicit type, with key anchor/ref + val anchor/ref
template<size_t N, size_t M> explicit TestCaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|t )), key(k ), key_tag( ), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|VALTAG|t )), key(k ), key_tag( ), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
template<size_t M> explicit TestCaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, const char (&v)[M] , AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|t )), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v ), val_tag( ), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, TaggedScalar const& v, AnchorRef const& arv) : type((ark.type|arv.type|KEYVAL|KEYTAG|VALTAG|t)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(v.scalar), val_tag(v.tag), val_anchor(arv), children(), parent(nullptr) { _set_parent(); }
// container
template<size_t N> explicit TestCaseNode(const char (&k)[N] , iseqmap s) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , TaggedList s) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode(TaggedScalar const& k, iseqmap s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode(TaggedScalar const& k, TaggedList s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode( iseqmap m) : TestCaseNode("", m) {}
explicit TestCaseNode( TaggedList m) : TestCaseNode("", m) {}
// container, with val anchor/ref
template<size_t N> explicit TestCaseNode(const char (&k)[N] , iseqmap s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , TaggedList s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode(TaggedScalar const& k, iseqmap s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode(TaggedScalar const& k, TaggedList s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode( iseqmap m, AnchorRef const& arv) : TestCaseNode("", m, arv) {}
explicit TestCaseNode( TaggedList m, AnchorRef const& arv) : TestCaseNode("", m, arv) {}
// container, with key anchor/ref
template<size_t N> explicit TestCaseNode(const char (&k)[N] , AnchorRef const& ark, iseqmap s) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedList s) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode(TaggedScalar const& k, AnchorRef const& ark, iseqmap s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedList s) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
// container, with key anchor/ref + val anchor/ref
template<size_t N> explicit TestCaseNode(const char (&k)[N] , AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
template<size_t N> explicit TestCaseNode(const char (&k)[N] , AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type(), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode(TaggedScalar const& k, AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); type = _guess(); }
explicit TestCaseNode(TaggedScalar const& k, AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type(), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); type = _guess(); }
// container, explicit type
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , iseqmap s) : type((t )), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , TaggedList s) : type((t |VALTAG)), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, iseqmap s) : type((t |KEYTAG)), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, iseqmap s) : type((t )), key( ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedList s) : type((t |VALTAG)), key( ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, TaggedList s) : type((t|KEYTAG|VALTAG)), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
// container, explicit type, with val anchor/ref
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , iseqmap s, AnchorRef const& arv) : type((t |VALANCH)), key(k ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , TaggedList s, AnchorRef const& arv) : type((t|VALTAG|VALANCH)), key(k ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, iseqmap s, AnchorRef const& arv) : type((t|KEYTAG|VALANCH)), key(k.scalar), key_tag(k.tag), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, iseqmap s, AnchorRef const& arv) : type((t |VALANCH)), key( ), key_tag( ), key_anchor(), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedList s, AnchorRef const& arv) : type((t|VALTAG|VALANCH)), key( ), key_tag( ), key_anchor(), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
// container, explicit type, with key anchor/ref
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, iseqmap s) : type((t |KEYANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedList s) : type((t|VALTAG|KEYANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, iseqmap s) : type((t|KEYTAG|KEYANCH)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(), children(s ), parent(nullptr) { _set_parent(); }
// container, explicit type, with key anchor/ref + val anchor/ref
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type((t |KEYANCH|VALANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
template<size_t N> explicit TestCaseNode(NodeType t, const char (&k)[N] , AnchorRef const& ark, TaggedList s, AnchorRef const& arv) : type((t|VALTAG|KEYANCH|VALANCH)), key(k ), key_tag( ), key_anchor(ark), val(), val_tag(s.tag), val_anchor(arv), children(s.ilist), parent(nullptr) { _set_parent(); }
explicit TestCaseNode(NodeType t, TaggedScalar const& k, AnchorRef const& ark, iseqmap s, AnchorRef const& arv) : type((t|KEYTAG|KEYANCH|VALANCH)), key(k.scalar), key_tag(k.tag), key_anchor(ark), val(), val_tag( ), val_anchor(arv), children(s ), parent(nullptr) { _set_parent(); }
public:
void _move(TestCaseNode&& that)
{
type = that.type;
key = that.key;
key_tag = that.key_tag;
key_anchor = that.key_anchor;
val = that.val;
val_tag = that.val_tag;
val_anchor = that.val_anchor;
children = std::move(that.children);
parent = nullptr;
_set_parent();
}
void _copy(TestCaseNode const& that)
{
type = that.type;
key = that.key;
key_tag = that.key_tag;
key_anchor = that.key_anchor;
val = that.val;
val_tag = that.val_tag;
val_anchor = that.val_anchor;
children = that.children;
parent = nullptr;
_set_parent();
}
void _set_parent()
{
C4_SUPPRESS_WARNING_GCC_PUSH
#if defined(__GNUC__) && __GNUC__ > 9
C4_SUPPRESS_WARNING_GCC("-Wanalyzer-possible-null-dereference")
#endif
for(auto &ch : children)
{
ch.parent = this;
}
C4_SUPPRESS_WARNING_GCC_POP
}
NodeType_e _guess() const;
bool is_root() const { return parent; }
bool is_doc() const { return type & DOC; }
bool is_map() const { return type & MAP; }
bool is_seq() const { return type & SEQ; }
bool has_val() const { return type & VAL; }
bool has_key() const { return type & KEY; }
bool is_container() const { return type & (SEQ|MAP); }
bool has_key_anchor() const { return type & KEYANCH; }
bool has_val_anchor() const { return type & VALANCH; }
public:
TestCaseNode const& operator[] (size_t i) const
{
C4_ASSERT(i < children.size());
return children[i];
}
TestCaseNode const& operator[] (csubstr const& name) const
{
auto ch = lookup(name);
C4_ASSERT(ch != nullptr);
return *ch;
}
TestCaseNode const* lookup(csubstr const& name) const
{
C4_ASSERT( ! children.empty());
for(auto const& ch : children)
if(ch.key == name)
return &ch;
return nullptr;
}
public:
void compare(yml::ConstNodeRef const& n, bool ignore_quote=false) const;
void compare_child(yml::ConstNodeRef const& n, id_type pos) const;
id_type reccount() const
{
id_type c = 1;
for(auto const& ch : children)
c += ch.reccount();
return c;
}
void recreate(yml::NodeRef *n) const;
};
} // namespace yml
} // namespace c4
#endif // _C4_YML_TEST_TEST_CASE_NODE_HPP_

View File

@@ -0,0 +1,143 @@
#include "./test_engine.hpp"
namespace c4 {
namespace yml {
namespace {
// inject comments on every line
std::vector<std::string> inject_comments(std::string const& src_)
{
std::vector<std::string> result;
csubstr src = to_csubstr(src_);
csubstr comment = " # this is a comment\n";
// inject a comment before the contents
{
std::string curr;
curr.append(comment.str, comment.len);
curr.append(src.str, src.len);
result.emplace_back(std::move(curr));
}
// inject a comment after each newline
size_t pos = src.find('\n');
do
{
csubstr before = src.first(pos);
csubstr after = pos != npos ? src.sub(pos) : src.last(0);
std::string curr;
curr.append(before.str, before.len);
curr += '\n';
curr.append(comment.str, comment.len);
curr.append(after.str, after.len);
result.emplace_back(std::move(curr));
pos = src.find('\n', pos+1);
} while(pos != npos);
return result;
}
} // anon
void test_expected_error_events_from_yaml(std::string const& parsed_yaml, Location const& expected_error_location)
{
ExpectError::do_check([&]{
EventHandlerYamlStd::EventSink sink;
EventHandlerYamlStd handler(&sink);
handler.reset();
ParseEngine<EventHandlerYamlStd> parser(&handler);
std::string copy = parsed_yaml;
parser.parse_in_place_ev("(testyaml)", to_substr(copy));
}, expected_error_location);
}
void test_expected_error_tree_from_yaml(std::string const& parsed_yaml, Location const& expected_error_location)
{
Tree tree = {};
ExpectError::do_check(&tree, [&]{
EventHandlerTree handler(&tree, tree.root_id());
ASSERT_EQ(&tree, handler.m_tree);
ParseEngine<EventHandlerTree> parser(&handler);
ASSERT_EQ(&handler, parser.m_evt_handler);
ASSERT_EQ(&tree, parser.m_evt_handler->m_tree);
std::string copy = parsed_yaml;
parser.parse_in_place_ev("(testyaml)", to_substr(copy));
}, expected_error_location);
}
void test_new_parser_events_from_yaml(ReferenceYaml const& yaml, std::string const& expected_events)
{
EventHandlerYamlStd::EventSink sink;
EventHandlerYamlStd handler(&sink);
handler.reset();
ParseEngine<EventHandlerYamlStd> parser(&handler);
std::string copy = yaml.parsed;
parser.parse_in_place_ev("(testyaml)", to_substr(copy));
_c4dbgpf("~~~\n{}~~~\n", sink.result);
EXPECT_EQ(sink.result, expected_events);
}
void test_new_parser_tree_from_yaml(ReferenceYaml const& yaml)
{
if(yaml.test_case_flags & HAS_CONTAINER_KEYS)
{
test_expected_error_tree_from_yaml(yaml.parsed, yaml.expected_error_location);
}
else
{
Tree tree = {};
EventHandlerTree handler(&tree, tree.root_id());
ASSERT_EQ(&tree, handler.m_tree);
ParseEngine<EventHandlerTree> parser(&handler);
ASSERT_EQ(&handler, parser.m_evt_handler);
ASSERT_EQ(&tree, parser.m_evt_handler->m_tree);
std::string copy = yaml.parsed;
parser.parse_in_place_ev("(testyaml)", to_substr(copy));
#ifdef RYML_DBG
print_tree(tree);
#endif
std::string actual = emitrs_yaml<std::string>(tree);
_c4dbgpf("~~~\n{}~~~\n", actual);
EXPECT_EQ(actual, yaml.emitted);
}
}
void test_new_parser_events_from_yaml_with_comments(ReferenceYaml const& yaml, std::string const& expected_events)
{
if(yaml.test_case_flags & HAS_CONTAINER_KEYS)
return;
if(yaml.test_case_flags & HAS_MULTILINE_SCALAR)
return;
ReferenceYaml transformed = yaml;
const auto injected_comments = inject_comments(yaml.parsed);
for(size_t i = 0; i < injected_comments.size(); ++i)
{
const auto & transformed_str = injected_comments[i];
_c4dbgpf("transformed[{}/{}]=~~~[{}]\n{}\n~~~", i, injected_comments.size(), transformed_str.size(), to_csubstr(transformed_str));
SCOPED_TRACE(transformed_str);
SCOPED_TRACE("commented");
transformed.parsed = transformed_str;
test_new_parser_events_from_yaml(transformed, expected_events);
}
}
void test_new_parser_tree_from_yaml_with_comments(ReferenceYaml const& yaml)
{
if(yaml.test_case_flags & HAS_CONTAINER_KEYS)
return;
if(yaml.test_case_flags & HAS_MULTILINE_SCALAR)
return;
ReferenceYaml transformed = yaml;
const auto injected_comments = inject_comments(yaml.parsed);
for(size_t i = 0; i < injected_comments.size(); ++i)
{
const auto & transformed_str = injected_comments[i];
_c4dbgpf("transformed[{}/{}]=~~~[{}]\n{}\n~~~", i, injected_comments.size(), transformed_str.size(), to_csubstr(transformed_str));
SCOPED_TRACE(transformed_str);
SCOPED_TRACE("commented");
transformed.parsed = transformed_str;
test_new_parser_tree_from_yaml(transformed);
}
}
} // namespace yml
} // namespace c4

View File

@@ -0,0 +1,249 @@
#ifndef _C4_YML_TEST_TEST_ENGINE_HPP_
#define _C4_YML_TEST_TEST_ENGINE_HPP_
#ifdef RYML_SINGLE_HEADER
#include "ryml_all.hpp"
#else
#include "c4/yml/event_handler_tree.hpp"
#include "c4/yml/parse_engine.hpp"
#include "c4/yml/emit.hpp"
#include "c4/yml/detail/print.hpp"
#endif
#include <gtest/gtest.h>
#include "./test_lib/test_case.hpp"
#include "./test_suite/test_suite_event_handler.hpp"
namespace c4 {
namespace yml {
struct ReferenceYaml
{
ReferenceYaml( std::string const& s ) : test_case_flags(), expected_error_location(), parsed(s), emitted(s) {}
ReferenceYaml( std::string const& p, std::string const& e) : test_case_flags(), expected_error_location(), parsed(p), emitted(e) {}
ReferenceYaml(TestCaseFlags_e tf, std::string const& p, std::string const& e) : test_case_flags(tf), expected_error_location(), parsed(p), emitted(e) {}
ReferenceYaml(TestCaseFlags_e tf, std::string const& p ) : test_case_flags(tf), expected_error_location(), parsed(p), emitted(p) {}
ReferenceYaml( Location linecol_, std::string const& p ) : test_case_flags(), expected_error_location(linecol_), parsed(p), emitted(p) { RYML_ASSERT(linecol_); }
ReferenceYaml(TestCaseFlags_e tf, Location linecol_, std::string const& p, std::string const& e) : test_case_flags(tf), expected_error_location(linecol_), parsed(p), emitted(e) { RYML_ASSERT(linecol_); }
ReferenceYaml(TestCaseFlags_e tf, Location linecol_, std::string const& p ) : test_case_flags(tf), expected_error_location(linecol_), parsed(p), emitted(p) { RYML_ASSERT(linecol_); }
TestCaseFlags_e test_case_flags;
Location expected_error_location;
std::string parsed;
std::string emitted;
};
template<template<class> class EventProducerFn>
C4_NO_INLINE void test_new_parser_str_from_events(std::string const& expected_events)
{
EventHandlerYamlStd::EventSink sink;
EventHandlerYamlStd handler(&sink);
handler.reset();
EventProducerFn<EventHandlerYamlStd> event_producer;
event_producer(handler);
_c4dbgpf("~~~\n{}~~~\n", sink.result);
EXPECT_EQ(sink.result, expected_events);
}
template<template<class> class EventProducerFn>
C4_NO_INLINE void test_new_parser_tree_from_events(ReferenceYaml const& yaml)
{
if(yaml.test_case_flags & HAS_CONTAINER_KEYS)
{
ExpectError::do_check([&]{
Tree tree = {};
EventHandlerTree handler(&tree, tree.root_id());
EventProducerFn<EventHandlerTree> event_producer;
event_producer(handler);
});
}
else
{
Tree tree = {};
EventHandlerTree handler(&tree, tree.root_id());
EventProducerFn<EventHandlerTree> event_producer;
event_producer(handler);
#ifdef RYML_DBG
print_tree(tree);
#endif
std::string actual = emitrs_yaml<std::string>(tree);
_c4dbgpf("~~~\n{}~~~\n", actual);
EXPECT_EQ(actual, yaml.emitted);
}
}
void test_new_parser_events_from_yaml(ReferenceYaml const& yaml, std::string const& expected_events);
void test_new_parser_tree_from_yaml(ReferenceYaml const& yaml);
void test_new_parser_events_from_yaml_with_comments(ReferenceYaml const& yaml, std::string const& expected_events);
void test_new_parser_tree_from_yaml_with_comments(ReferenceYaml const& yaml);
void test_expected_error_events_from_yaml(std::string const& parsed_yaml, Location const& expected_error_location);
void test_expected_error_tree_from_yaml(std::string const& parsed_yaml, Location const& expected_error_location);
//-----------------------------------------------------------------------------
#ifdef RYML_DBG
#define _RYML_SHOWFILELINE(name) printf("%s:%d: " #name "\n", __FILE__, __LINE__)
#else
#define _RYML_SHOWFILELINE(name)
#endif
//-----------------------------------------------------------------------------
#define ENGINE_TEST_ERRLOC(name, location, refyaml) \
\
\
TEST(EngineTest, name##_err_events_from_yaml) \
{ \
_RYML_SHOWFILELINE(name); \
SCOPED_TRACE(#name "_err_events_from_yaml"); \
test_expected_error_events_from_yaml(refyaml, location); \
_RYML_SHOWFILELINE(name); \
} \
\
\
TEST(EngineTest, name##_err_tree_from_yaml) \
{ \
_RYML_SHOWFILELINE(name); \
SCOPED_TRACE(#name "_err_tree_from_yaml"); \
test_expected_error_tree_from_yaml(refyaml, location); \
_RYML_SHOWFILELINE(name); \
}
//-----------------------------------------------------------------------------
/* declare a common parser test for the existing event handlers */ \
#define ENGINE_TEST(name, refyaml, events) \
\
\
/* declare a function that will produce a \
sequence of events */ \
template<class Ps> \
void name##_impl(Ps &ps); \
\
\
/* package the function into a class */ \
template<class Ps> \
struct name \
{ \
void operator() (Ps &ps) \
{ \
name##_impl(ps); \
} \
}; \
\
\
TEST(EngineTest, name##_str_from_events) \
{ \
_RYML_SHOWFILELINE(name); \
SCOPED_TRACE(#name "_str_from_events"); \
test_new_parser_str_from_events<name>(events); \
_RYML_SHOWFILELINE(name); \
} \
\
TEST(EngineTest, name##_tree_from_events) \
{ \
_RYML_SHOWFILELINE(name); \
SCOPED_TRACE(#name "_wtree_from_events"); \
ReferenceYaml yaml refyaml; \
test_new_parser_tree_from_events<name>(yaml); \
_RYML_SHOWFILELINE(name); \
} \
\
TEST(EngineTest, name##_events_from_yaml) \
{ \
_RYML_SHOWFILELINE(name); \
SCOPED_TRACE(#name "_events_from_yaml"); \
ReferenceYaml yaml refyaml; \
test_new_parser_events_from_yaml(yaml, events); \
_RYML_SHOWFILELINE(name); \
} \
\
TEST(EngineTest, name##_tree_from_yaml) \
{ \
_RYML_SHOWFILELINE(name); \
SCOPED_TRACE(#name "_wtree_from_yaml"); \
ReferenceYaml yaml refyaml; \
test_new_parser_tree_from_yaml(yaml); \
_RYML_SHOWFILELINE(name); \
} \
\
TEST(EngineTest, name##_events_from_yaml_with_comments) \
{ \
_RYML_SHOWFILELINE(name); \
SCOPED_TRACE(#name "_events_from_yaml_with_comments"); \
ReferenceYaml yaml refyaml; \
test_new_parser_events_from_yaml_with_comments(yaml, events); \
} \
\
TEST(EngineTest, name##_tree_from_yaml_with_comments) \
{ \
_RYML_SHOWFILELINE(name); \
SCOPED_TRACE(#name "_wtree_from_yaml"); \
ReferenceYaml yaml refyaml; \
test_new_parser_tree_from_yaml_with_comments(yaml); \
_RYML_SHOWFILELINE(name); \
} \
\
\
/* define the function that will produce the \
* sequence of events */ \
template<class Ps> \
void name##_impl(Ps &ps)
//-----------------------------------------------------------------------------
#if !defined(RYML_DBG)
#define ___(stmt) stmt
#else
inline void _print_handler_info(EventHandlerYamlStd const& ps, csubstr stmt)
{
_c4dbgpf("{}", stmt);
auto indent = [](id_type n){
for(id_type level = 0; level < n; ++level)
{
_dbg_printf(" ");
}
};
for(id_type i = 0; i < ps.m_stack.size(); ++i)
{
auto const& str = ps._ev_buf_(i).get();
indent(i);
_dbg_printf("[{}]\n", i);
for(csubstr line : str.split('\n'))
{
indent(i);
_dbg_printf("{}\n", line);
}
}
}
inline void _print_handler_info(EventHandlerTree const& ps, csubstr stmt)
{
if(ps.m_parent)
_c4dbgpf("parent.id={} curr.id={} {}\n",
ps.m_parent->node_id, ps.m_curr->node_id, stmt);
else
_c4dbgpf("parent.id=-- curr.id={} {}\n",
ps.m_curr->node_id, stmt);
print_tree(*ps.m_tree);
}
#define ___(stmt) \
do \
{ \
stmt; \
_print_handler_info(ps, #stmt); \
} while(0)
#endif
} // namespace yml
} // namespace c4
#endif // _C4_YML_TEST_TEST_ENGINE_HPP_

View File

@@ -1,13 +1,12 @@
#ifndef RYML_SINGLE_HEADER
#include "c4/yml/detail/print.hpp"
#endif
#include "test_group.hpp"
#include "test_case.hpp"
#include "test_lib/test_group.hpp"
#include "test_lib/test_case.hpp"
#include <c4/fs/fs.hpp>
#include <fstream>
#include <stdexcept>
#define RYML_NFO (RYML_DBG || 0)
//-----------------------------------------------------------------------------
namespace c4 {
@@ -15,10 +14,9 @@ namespace yml {
void YmlTestCase::_test_parse_using_ryml(CaseDataLineEndings *cd)
{
#ifdef RYML_NFO
std::cout << "---------------\n";
std::cout << c->src;
std::cout << "---------------\n";
#ifdef RYML_DBG
if(_dbg_enabled())
printf("---------------\n%.*s\n---------------\n", (int)c->src.len, c->src.str);
#endif
if(c->flags & EXPECT_PARSE_ERROR)
@@ -28,12 +26,9 @@ void YmlTestCase::_test_parse_using_ryml(CaseDataLineEndings *cd)
parse_in_place(c->fileline, cd->src, &cd->parsed_tree);
if(flags & RESOLVE_REFS)
cd->parsed_tree.resolve();
#ifdef RYML_DBG
// if this point was reached, then it means that the expected
// error failed to occur. So print debugging info.
std::cout << "failed to catch expected error while parsing.\nPARSED TREE:\n";
print_tree(cd->parsed_tree);
#endif
_c4dbg_tree("UNEXPECTED PARSED TREE", cd->parsed_tree);
}, c->expected_location);
return;
}
@@ -41,11 +36,12 @@ void YmlTestCase::_test_parse_using_ryml(CaseDataLineEndings *cd)
cd->parsed_tree.clear();
parse_in_place(c->fileline, cd->src, &cd->parsed_tree);
#ifdef RYML_NFO
std::cout << "REF TREE:\n";
print_tree(c->root);
std::cout << "PARSED TREE:\n";
print_tree(cd->parsed_tree);
#ifdef RYML_DBG
if(_dbg_enabled())
{
print_test_tree("REF TREE", c->root);
_c4dbg_tree("PARSED TREE", cd->parsed_tree);
}
#endif
{
@@ -60,10 +56,7 @@ void YmlTestCase::_test_parse_using_ryml(CaseDataLineEndings *cd)
if(c->flags & RESOLVE_REFS)
{
cd->parsed_tree.resolve();
#ifdef RYML_NFO
std::cout << "resolved tree!!!\n";
print_tree(cd->parsed_tree);
#endif
_c4dbg_tree("resolved tree!!!", cd->parsed_tree);
{
SCOPED_TRACE("checking tree invariants of resolved parsed tree");
test_invariants(cd->parsed_tree);
@@ -84,10 +77,7 @@ void YmlTestCase::_test_parse_using_ryml(CaseDataLineEndings *cd)
if(c->flags & RESOLVE_REFS)
{
cd->parsed_tree.reorder();
#ifdef RYML_NFO
std::cout << "reordered tree!!!\n";
print_tree(cd->parsed_tree);
#endif
_c4dbg_tree("reordered tree!!!", cd->parsed_tree);
{
SCOPED_TRACE("checking tree invariants of reordered parsed tree after resolving");
test_invariants(cd->parsed_tree);
@@ -96,7 +86,6 @@ void YmlTestCase::_test_parse_using_ryml(CaseDataLineEndings *cd)
SCOPED_TRACE("checking node invariants of reordered parsed tree after resolving");
test_invariants(cd->parsed_tree.rootref());
}
{
SCOPED_TRACE("comparing parsed tree to ref tree");
EXPECT_GE(cd->parsed_tree.capacity(), c->root.reccount());
@@ -121,7 +110,7 @@ void YmlTestCase::_test_emit_yml_stdout(CaseDataLineEndings *cd)
//-----------------------------------------------------------------------------
void YmlTestCase::_test_emit_json_stdout(CaseDataLineEndings *cd)
{
if(!(c->flags & JSON_ALSO))
if(!(c->flags & JSON_WRITE))
return;
if(c->flags & EXPECT_PARSE_ERROR)
return;
@@ -145,7 +134,7 @@ void YmlTestCase::_test_emit_yml_cout(CaseDataLineEndings *cd)
//-----------------------------------------------------------------------------
void YmlTestCase::_test_emit_json_cout(CaseDataLineEndings *cd)
{
if(!(c->flags & JSON_ALSO))
if(!(c->flags & JSON_WRITE))
return;
if(c->flags & EXPECT_PARSE_ERROR)
return;
@@ -180,7 +169,7 @@ void YmlTestCase::_test_emit_yml_stringstream(CaseDataLineEndings *cd)
//-----------------------------------------------------------------------------
void YmlTestCase::_test_emit_json_stringstream(CaseDataLineEndings *cd)
{
if(!(c->flags & JSON_ALSO))
if(!(c->flags & JSON_WRITE))
return;
if(c->flags & EXPECT_PARSE_ERROR)
return;
@@ -232,7 +221,7 @@ void YmlTestCase::_test_emit_yml_ofstream(CaseDataLineEndings *cd)
//-----------------------------------------------------------------------------
void YmlTestCase::_test_emit_json_ofstream(CaseDataLineEndings *cd)
{
if(!(c->flags & JSON_ALSO))
if(!(c->flags & JSON_WRITE))
return;
if(c->flags & EXPECT_PARSE_ERROR)
return;
@@ -267,28 +256,28 @@ void YmlTestCase::_test_emit_yml_string(CaseDataLineEndings *cd)
return;
_ensure_parse(cd);
_ensure_emit(cd);
auto em = emitrs_yaml(cd->parsed_tree, &cd->emit_buf);
EXPECT_EQ(em.len, cd->emit_buf.size());
EXPECT_EQ(em.len, cd->numbytes_stdout);
#ifdef RYML_NFO
std::cout << em;
csubstr emitted = emitrs_yaml(cd->parsed_tree, &cd->emit_buf);
EXPECT_EQ(emitted.len, cd->emit_buf.size());
EXPECT_EQ(emitted.len, cd->numbytes_stdout);
#ifdef RYML_DBG
printf("%.*s", (int)emitted.len, emitted.str);
#endif
}
//-----------------------------------------------------------------------------
void YmlTestCase::_test_emit_json_string(CaseDataLineEndings *cd)
{
if(!(c->flags & JSON_ALSO))
if(!(c->flags & JSON_WRITE))
return;
if(c->flags & EXPECT_PARSE_ERROR)
return;
_ensure_parse(cd);
_ensure_emit_json(cd);
auto em = emitrs_json(cd->parsed_tree, &cd->emit_buf);
EXPECT_EQ(em.len, cd->emitjson_buf.size());
EXPECT_EQ(em.len, cd->numbytes_stdout_json);
#ifdef RYML_NFO
std::cout << em;
auto emitted = emitrs_json(cd->parsed_tree, &cd->emit_buf);
EXPECT_EQ(emitted.len, cd->emitjson_buf.size());
EXPECT_EQ(emitted.len, cd->numbytes_stdout_json);
#ifdef RYML_DBG
printf("%.*s", (int)emitted.len, emitted.str);
#endif
}
@@ -311,7 +300,7 @@ void YmlTestCase::_test_emitrs(CaseDataLineEndings *cd)
//-----------------------------------------------------------------------------
void YmlTestCase::_test_emitrs_json(CaseDataLineEndings *cd)
{
if(!(c->flags & JSON_ALSO))
if(!(c->flags & JSON_WRITE))
return;
if(c->flags & EXPECT_PARSE_ERROR)
return;
@@ -346,7 +335,7 @@ void YmlTestCase::_test_emitrs_cfile(CaseDataLineEndings *cd)
//-----------------------------------------------------------------------------
void YmlTestCase::_test_emitrs_json_cfile(CaseDataLineEndings *cd)
{
if(!(c->flags & JSON_ALSO))
if(!(c->flags & JSON_WRITE))
return;
if(c->flags & EXPECT_PARSE_ERROR)
return;
@@ -370,22 +359,27 @@ void YmlTestCase::_test_complete_round_trip(CaseDataLineEndings *cd)
return;
_ensure_parse(cd);
_ensure_emit(cd);
#ifdef RYML_DBG
printf("~~~~~~~~~~~~~~ emitted yml:\n");
_c4presc(cd->emitted_yml, /*keep_newlines*/true);
printf("~~~~~~~~~~~~~~\n");
#endif
{
SCOPED_TRACE("parsing emitted yml");
cd->parse_buf = cd->emit_buf;
cd->parsed_yml = to_substr(cd->parse_buf);
parse_in_place(c->fileline, cd->parsed_yml, &cd->emitted_tree);
}
#ifdef RYML_NFO
std::cout << "~~~~~~~~~~~~~~ src yml:\n";
_c4presc(cd->src);
std::cout << "~~~~~~~~~~~~~~ parsed tree:\n";
#ifdef RYML_DBG
printf("~~~~~~~~~~~~~~ src yml:\n");
_c4presc(cd->src, /*keep_newlines*/true);
printf("~~~~~~~~~~~~~~ parsed tree:\n");
print_tree(cd->parsed_tree);
std::cout << "~~~~~~~~~~~~~~ emitted yml:\n";
_c4presc(cd->emitted_yml);
std::cout << "~~~~~~~~~~~~~~ emitted tree:\n";
printf("~~~~~~~~~~~~~~ emitted yml:\n");
_c4presc(cd->emitted_yml, /*keep_newlines*/true);
printf("~~~~~~~~~~~~~~ emitted tree:\n");
print_tree(cd->emitted_tree);
std::cout << "~~~~~~~~~~~~~~\n";
printf("~~~~~~~~~~~~~~\n");
#endif
{
SCOPED_TRACE("checking node invariants of emitted tree");
@@ -416,7 +410,7 @@ void YmlTestCase::_test_complete_round_trip(CaseDataLineEndings *cd)
// in this case, we can ignore whether scalars are quoted.
// Because it can happen that a scalar was quoted in the
// original file, but the re-emitted data does not quote the
// scalars.
// scalars. FIXME!
c->root.compare(cd->emitted_tree.rootref(), true);
}
}
@@ -425,28 +419,33 @@ void YmlTestCase::_test_complete_round_trip(CaseDataLineEndings *cd)
//-----------------------------------------------------------------------------
void YmlTestCase::_test_complete_round_trip_json(CaseDataLineEndings *cd)
{
if(!(c->flags & JSON_ALSO))
if(!(c->flags & JSON_WRITE))
return;
if(c->flags & EXPECT_PARSE_ERROR)
return;
_ensure_parse(cd);
_ensure_emit_json(cd);
#ifdef RYML_DBG
printf("~~~~~~~~~~~~~~ emitted json:\n");
_c4presc(cd->emitted_json);
printf("~~~~~~~~~~~~~~\n");
#endif
{
SCOPED_TRACE("parsing emitted json");
cd->parse_buf_json = cd->emitjson_buf;
cd->parsed_json = to_substr(cd->parse_buf_json);
parse_in_place(c->fileline, cd->parsed_json, &cd->emitted_tree_json);
parse_json_in_place(c->fileline, cd->parsed_json, &cd->emitted_tree_json);
}
#ifdef RYML_NFO
std::cout << "~~~~~~~~~~~~~~ src yml:\n";
#ifdef RYML_DBG
printf("~~~~~~~~~~~~~~ src yml:\n");
_c4presc(cd->src);
std::cout << "~~~~~~~~~~~~~~ parsed tree:\n";
printf("~~~~~~~~~~~~~~ parsed tree:\n");
print_tree(cd->parsed_tree);
std::cout << "~~~~~~~~~~~~~~ emitted json:\n";
printf("~~~~~~~~~~~~~~ emitted json:\n");
_c4presc(cd->emitted_json);
std::cout << "~~~~~~~~~~~~~~ emitted json tree:\n";
printf("~~~~~~~~~~~~~~ emitted json tree:\n");
print_tree(cd->emitted_tree_json);
std::cout << "~~~~~~~~~~~~~~\n";
printf("~~~~~~~~~~~~~~\n");
#endif
{
SCOPED_TRACE("checking node invariants of emitted tree");
@@ -497,10 +496,10 @@ void YmlTestCase::_test_recreate_from_ref(CaseDataLineEndings *cd)
NodeRef r = cd->recreated.rootref();
c->root.recreate(&r);
}
#ifdef RYML_NFO
std::cout << "REF TREE:\n";
print_tree(c->root);
std::cout << "RECREATED TREE:\n";
#ifdef RYML_DBG
printf("REF TREE:\n");
print_test_tree(c->root);
printf("RECREATED TREE:\n");
print_tree(cd->recreated);
#endif
{
@@ -517,216 +516,5 @@ void YmlTestCase::_test_recreate_from_ref(CaseDataLineEndings *cd)
}
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, parse_unix)
{
SCOPED_TRACE("unix style");
_test_parse_using_ryml(&d->unix_style);
}
TEST_P(YmlTestCase, parse_windows)
{
SCOPED_TRACE("windows style");
_test_parse_using_ryml(&d->windows_style);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_stdout)
{
SCOPED_TRACE("unix style");
_test_emit_yml_stdout(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_stdout)
{
SCOPED_TRACE("unix style json");
_test_emit_json_stdout(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_stdout)
{
SCOPED_TRACE("windows style");
_test_emit_yml_stdout(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_stdout)
{
SCOPED_TRACE("windows style json");
_test_emit_json_stdout(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_cout)
{
SCOPED_TRACE("unix style");
_test_emit_yml_cout(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_cout)
{
SCOPED_TRACE("unix style json");
_test_emit_json_cout(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_cout)
{
SCOPED_TRACE("windows style");
_test_emit_yml_cout(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_cout)
{
SCOPED_TRACE("windows style json");
_test_emit_json_cout(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_stringstream)
{
SCOPED_TRACE("unix style");
_test_emit_yml_stringstream(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_stringstream)
{
SCOPED_TRACE("unix style json");
_test_emit_json_stringstream(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_stringstream)
{
SCOPED_TRACE("windows style");
_test_emit_yml_stringstream(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_stringstream)
{
SCOPED_TRACE("windows style json");
_test_emit_json_stringstream(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_ofstream)
{
SCOPED_TRACE("unix style");
_test_emit_yml_ofstream(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_ofstream)
{
SCOPED_TRACE("unix style json");
_test_emit_json_ofstream(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_ofstream)
{
SCOPED_TRACE("windows style");
_test_emit_yml_ofstream(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_ofstream)
{
SCOPED_TRACE("windows style json");
_test_emit_json_ofstream(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_string)
{
SCOPED_TRACE("unix style");
_test_emit_yml_string(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_string)
{
SCOPED_TRACE("unix style json");
_test_emit_json_string(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_string)
{
SCOPED_TRACE("windows style");
_test_emit_yml_string(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_string)
{
SCOPED_TRACE("windows style json");
_test_emit_json_string(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, unix_emitrs)
{
SCOPED_TRACE("unix style");
_test_emitrs(&d->unix_style);
}
TEST_P(YmlTestCase, unix_emitrs_json)
{
SCOPED_TRACE("unix style json");
_test_emitrs_json(&d->unix_style_json);
}
TEST_P(YmlTestCase, windows_emitrs)
{
SCOPED_TRACE("windows style");
_test_emitrs(&d->windows_style);
}
TEST_P(YmlTestCase, windows_emitrs_json)
{
SCOPED_TRACE("windows style json");
_test_emitrs_json(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, unix_emitrs_cfile)
{
SCOPED_TRACE("unix style");
_test_emitrs_cfile(&d->unix_style);
}
TEST_P(YmlTestCase, unix_emitrs_json_cfile)
{
SCOPED_TRACE("unix style json");
_test_emitrs_json_cfile(&d->unix_style_json);
}
TEST_P(YmlTestCase, windows_emitrs_cfile)
{
SCOPED_TRACE("windows style");
_test_emitrs_cfile(&d->windows_style);
}
TEST_P(YmlTestCase, windows_emitrs_json_cfile)
{
SCOPED_TRACE("windows style json");
_test_emitrs_json_cfile(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, complete_unix_round_trip)
{
SCOPED_TRACE("unix style");
_test_complete_round_trip(&d->unix_style);
}
TEST_P(YmlTestCase, complete_unix_round_trip_json)
{
SCOPED_TRACE("unix style json");
_test_complete_round_trip_json(&d->unix_style_json);
}
TEST_P(YmlTestCase, complete_windows_round_trip)
{
SCOPED_TRACE("windows style");
_test_complete_round_trip(&d->windows_style);
}
TEST_P(YmlTestCase, complete_windows_round_trip_json)
{
SCOPED_TRACE("windows style json");
_test_complete_round_trip_json(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, unix_recreate_from_ref)
{
SCOPED_TRACE("unix style");
_test_recreate_from_ref(&d->unix_style);
}
TEST_P(YmlTestCase, windows_recreate_from_ref)
{
SCOPED_TRACE("windows style");
_test_recreate_from_ref(&d->windows_style);
}
} // namespace yml
} // namespace c4

View File

@@ -0,0 +1,223 @@
#ifndef _C4_YML_TEST_TEST_GROUP_TEST_GROUP_DEF_HPP_
#define _C4_YML_TEST_TEST_GROUP_TEST_GROUP_DEF_HPP_
namespace c4 {
namespace yml {
int YmlTestCaseDefsWereIncluded() { return 42; }
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, parse_unix)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_parse_using_ryml(&d->unix_style);
}
TEST_P(YmlTestCase, parse_windows)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_parse_using_ryml(&d->windows_style);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_stdout)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_emit_yml_stdout(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_stdout)
{
SCOPED_TRACE("unix style json\n" + c->filelinebuf + ": case");
_test_emit_json_stdout(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_stdout)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_emit_yml_stdout(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_stdout)
{
SCOPED_TRACE("windows style json\n" + c->filelinebuf + ": case");
_test_emit_json_stdout(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_cout)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_emit_yml_cout(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_cout)
{
SCOPED_TRACE("unix style json\n" + c->filelinebuf + ": case");
_test_emit_json_cout(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_cout)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_emit_yml_cout(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_cout)
{
SCOPED_TRACE("windows style json\n" + c->filelinebuf + ": case");
_test_emit_json_cout(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_stringstream)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_emit_yml_stringstream(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_stringstream)
{
SCOPED_TRACE("unix style json\n" + c->filelinebuf + ": case");
_test_emit_json_stringstream(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_stringstream)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_emit_yml_stringstream(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_stringstream)
{
SCOPED_TRACE("windows style json\n" + c->filelinebuf + ": case");
_test_emit_json_stringstream(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_ofstream)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_emit_yml_ofstream(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_ofstream)
{
SCOPED_TRACE("unix style json\n" + c->filelinebuf + ": case");
_test_emit_json_ofstream(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_ofstream)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_emit_yml_ofstream(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_ofstream)
{
SCOPED_TRACE("windows style json\n" + c->filelinebuf + ": case");
_test_emit_json_ofstream(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, emit_yml_unix_string)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_emit_yml_string(&d->unix_style);
}
TEST_P(YmlTestCase, emit_json_unix_string)
{
SCOPED_TRACE("unix style json\n" + c->filelinebuf + ": case");
_test_emit_json_string(&d->unix_style_json);
}
TEST_P(YmlTestCase, emit_yml_windows_string)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_emit_yml_string(&d->windows_style);
}
TEST_P(YmlTestCase, emit_json_windows_string)
{
SCOPED_TRACE("windows style json\n" + c->filelinebuf + ": case");
_test_emit_json_string(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, unix_emitrs)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_emitrs(&d->unix_style);
}
TEST_P(YmlTestCase, unix_emitrs_json)
{
SCOPED_TRACE("unix style json\n" + c->filelinebuf + ": case");
_test_emitrs_json(&d->unix_style_json);
}
TEST_P(YmlTestCase, windows_emitrs)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_emitrs(&d->windows_style);
}
TEST_P(YmlTestCase, windows_emitrs_json)
{
SCOPED_TRACE("windows style json\n" + c->filelinebuf + ": case");
_test_emitrs_json(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, unix_emitrs_cfile)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_emitrs_cfile(&d->unix_style);
}
TEST_P(YmlTestCase, unix_emitrs_json_cfile)
{
SCOPED_TRACE("unix style json\n" + c->filelinebuf + ": case");
_test_emitrs_json_cfile(&d->unix_style_json);
}
TEST_P(YmlTestCase, windows_emitrs_cfile)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_emitrs_cfile(&d->windows_style);
}
TEST_P(YmlTestCase, windows_emitrs_json_cfile)
{
SCOPED_TRACE("windows style json\n" + c->filelinebuf + ": case");
_test_emitrs_json_cfile(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, complete_unix_round_trip)
{
SCOPED_TRACE("unix style:\n" + c->filelinebuf + ": case");
_test_complete_round_trip(&d->unix_style);
}
TEST_P(YmlTestCase, complete_unix_round_trip_json)
{
SCOPED_TRACE("unix style json\n" + c->filelinebuf + ": case");
_test_complete_round_trip_json(&d->unix_style_json);
}
TEST_P(YmlTestCase, complete_windows_round_trip)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_complete_round_trip(&d->windows_style);
}
TEST_P(YmlTestCase, complete_windows_round_trip_json)
{
SCOPED_TRACE("windows style json\n" + c->filelinebuf + ": case");
_test_complete_round_trip_json(&d->windows_style_json);
}
//-----------------------------------------------------------------------------
TEST_P(YmlTestCase, unix_recreate_from_ref)
{
SCOPED_TRACE("unix style\n" + c->filelinebuf + ": case");
_test_recreate_from_ref(&d->unix_style);
}
TEST_P(YmlTestCase, windows_recreate_from_ref)
{
SCOPED_TRACE("windows style\n" + c->filelinebuf + ": case");
_test_recreate_from_ref(&d->windows_style);
}
} // namespace c4
} // namespace yml
#endif // _C4_YML_TEST_TEST_GROUP_TEST_GROUP_DEF_HPP_

View File

@@ -2,7 +2,7 @@
#ifndef C4_RYML_TEST_GROUP_HPP_
#define C4_RYML_TEST_GROUP_HPP_
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include "c4/span.hpp"
#include <algorithm>
@@ -21,6 +21,15 @@
//# pragma GCC diagnostic ignored "-Wpragma-system-header-outside-header"
#endif
#if defined(RYML_WITH_TAB_TOKENS)
#define _RYML_WITH_TAB_TOKENS(...) __VA_ARGS__
#define _RYML_WITHOUT_TAB_TOKENS(...)
#define _RYML_WITH_OR_WITHOUT_TAB_TOKENS(with, without) with
#else
#define _RYML_WITH_TAB_TOKENS(...)
#define _RYML_WITHOUT_TAB_TOKENS(...) __VA_ARGS__
#define _RYML_WITH_OR_WITHOUT_TAB_TOKENS(with, without) without
#endif
namespace c4 {
namespace yml {
@@ -43,10 +52,20 @@ struct YmlTestCase : public ::testing::TestWithParam<csubstr>
void SetUp() override
{
// Code here will be called immediately after the constructor (right
// before each test).
_show_origin();
}
void TearDown() override
{
#ifdef RYML_DBG
_show_origin();
#endif
}
void _show_origin()
{
std::cout << "-------------------------------------------\n";
std::cout << "running test case '" << name << "'\n";
std::cout << c->filelinebuf << ": " << name << "'\n";
std::cout << "-------------------------------------------\n";
}
@@ -117,19 +136,56 @@ struct YmlTestCase : public ::testing::TestWithParam<csubstr>
//-----------------------------------------------------------------------------
// facilities for declaring test data
using N = CaseNode;
using L = CaseNode::iseqmap;
using N = TestCaseNode;
using L = TestCaseNode::iseqmap;
using TS = TaggedScalar;
using TL = CaseNode::TaggedList;
using TL = TestCaseNode::TaggedList;
using AR = AnchorRef;
constexpr const NodeType_e QK = (NodeType_e)(VAL | KEYQUO);
constexpr const NodeType_e QV = (NodeType_e)(VAL | VALQUO);
constexpr const NodeType_e QKV = (NodeType_e)(VAL | KEYQUO | VALQUO);
C4_SUPPRESS_WARNING_GCC_PUSH
#ifdef __GNUC__
#if __GNUC__ == 4 && __GNUC_MINOR__ >= 8
struct CaseAdder {
#if defined(__GNUC__) && (__GNUC__ > 5)
C4_SUPPRESS_WARNING_GCC("-Wunused-const-variable")
#endif
constexpr const NodeType_e KP = (KEY|KEY_PLAIN); ///< key, plain scalar
constexpr const NodeType_e KS = (KEY|KEY_SQUO); ///< key, single-quoted scalar
constexpr const NodeType_e KD = (KEY|KEY_DQUO); ///< key, double-quoted scalar
constexpr const NodeType_e KL = (KEY|KEY_LITERAL); ///< key, block-literal scalar
constexpr const NodeType_e KF = (KEY|KEY_FOLDED); ///< key, block-folded scalar
constexpr const NodeType_e VP = (VAL|VAL_PLAIN); ///< val, plain scalar
constexpr const NodeType_e VS = (VAL|VAL_SQUO); ///< val, single-quoted scalar
constexpr const NodeType_e VD = (VAL|VAL_DQUO); ///< val, double-quoted scalar
constexpr const NodeType_e VL = (VAL|VAL_LITERAL); ///< val, block-literal scalar
constexpr const NodeType_e VF = (VAL|VAL_FOLDED); ///< val, block-folded scalar
constexpr const NodeType_e SB = (SEQ|BLOCK); ///< sequence, block-style
constexpr const NodeType_e SFS = (SEQ|FLOW_SL); ///< sequence, flow-style, single-line
constexpr const NodeType_e SFM = (SEQ|FLOW_ML); ///< sequence, flow-style, multi-line
constexpr const NodeType_e MB = (MAP|BLOCK); ///< map, flow-style
constexpr const NodeType_e MFS = (MAP|FLOW_SL); ///< map, flow-style, single-line
constexpr const NodeType_e MFM = (MAP|FLOW_ML); ///< map, flow-style, multi-line
C4_SUPPRESS_WARNING_GCC_POP
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
// utilities to create the parameterized cases for each test group
#if !(defined(__GNUC__) && (__GNUC__ == 4) && (__GNUC_MINOR__ >= 8))
/** use this macro to add a case to the test group. */
#define ADD_CASE_TO_GROUP(...) \
group_cases__->emplace_back(csubstr(__FILE__), __LINE__+1, __VA_ARGS__)
#else
struct CaseAdderGcc4_8
{
std::vector<Case> *group_cases;
const csubstr file;
const int line;
@@ -141,21 +197,20 @@ struct CaseAdder {
};
/* all arguments are to the constructor of Case */
#define ADD_CASE_TO_GROUP CaseAdder{group_cases__, csubstr(__FILE__), __LINE__+1}
#endif
#endif
#ifndef ADD_CASE_TO_GROUP
#define ADD_CASE_TO_GROUP(...) \
group_cases__->emplace_back(csubstr(__FILE__), __LINE__+1, __VA_ARGS__)
#define ADD_CASE_TO_GROUP CaseAdderGcc4_8{group_cases__, csubstr(__FILE__), __LINE__+1}
#endif
/** declares a function where we can call ADD_CASE_TO_GROUP()
* to populate the group */
#define CASE_GROUP(group_name) \
\
/* fwd declaration to fill the container with cases */ \
\
/* fwd-declare a function to fill a container of case data */ \
void add_cases_##group_name(std::vector<Case> *group_cases); \
\
/* container with the cases */ \
\
/* container with cases data. not the parameterized container */ \
std::vector<Case> const& get_cases_##group_name() \
{ \
static std::vector<Case> cases_##group_name; \
@@ -164,7 +219,8 @@ std::vector<Case> const& get_cases_##group_name() \
return cases_##group_name; \
} \
\
/* container with the case names */ \
\
/* container with case names. this is the parameterized container. */ \
std::vector<csubstr> const& get_case_names_##group_name() \
{ \
static std::vector<csubstr> case_names_##group_name; \
@@ -176,14 +232,20 @@ std::vector<csubstr> const& get_case_names_##group_name() \
std::vector<csubstr> cp = case_names_##group_name; \
std::sort(cp.begin(), cp.end()); \
for(size_t i = 0; i+1 < cp.size(); ++i) \
{ \
if(cp[i] == cp[i+1]) \
C4_ERROR("duplicate case name: '%.*s'", _c4prsp(cp[i])); \
{ \
printf("duplicate case name: '%.*s'", (int)cp[i].len, cp[i].str); \
C4_ERROR("duplicate case name: '%.*s'", (int)cp[i].len, cp[i].str); \
} \
} \
} \
return case_names_##group_name; \
} \
\
\
INSTANTIATE_TEST_SUITE_P(group_name, YmlTestCase, ::testing::ValuesIn(get_case_names_##group_name())); \
GTEST_ALLOW_UNINSTANTIATED_PARAMETERIZED_TEST(YmlTestCase); \
\
\
/* used by the fixture to obtain a case by name */ \
Case const* get_case(csubstr name) \
@@ -191,10 +253,15 @@ Case const* get_case(csubstr name) \
for(Case const& c : get_cases_##group_name()) \
if(c.name == name) \
return &c; \
C4_ERROR("case not found: '%.*s'", _c4prsp(name)); \
printf("case not found: '%.*s' defs_included=%d\n", \
(int)name.len, name.str, \
/*call this function to ensure the tests were included*/ \
YmlTestCaseDefsWereIncluded()); \
C4_ERROR("case not found: '%.*s'", (int)name.len, name.str); \
return nullptr; \
} \
\
\
/* finally, define the cases by calling ADD_CASE_TO_GROUP() */ \
void add_cases_##group_name(std::vector<Case> *group_cases__)

View File

@@ -2,7 +2,7 @@
#include <c4/yml/std/std.hpp>
#include <c4/yml/yml.hpp>
#endif
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include <gtest/gtest.h>
@@ -16,19 +16,23 @@ TEST(locations, default_is_no_location)
EXPECT_EQ(opts.locations(), false);
}
{
Parser parser;
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler);
EXPECT_EQ(parser.options().locations(), false);
}
{
Parser parser(ParserOptions{});
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, ParserOptions{});
EXPECT_EQ(parser.options().locations(), false);
}
{
Parser parser(ParserOptions().locations(false));
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, ParserOptions().locations(false));
EXPECT_EQ(parser.options().locations(), false);
}
{
Parser parser(ParserOptions().locations(true));
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, ParserOptions().locations(true));
EXPECT_EQ(parser.options().locations(), true);
}
}
@@ -38,8 +42,9 @@ TEST(locations, error_is_triggered_querying_with_locations_disabled)
{
bool parsed_ok = false;
ExpectError::do_check([&]{
Parser parser(ParserOptions().locations(false));
Tree t = parser.parse_in_arena("test", "foo: bar");
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, ParserOptions().locations(false));
Tree t = parse_in_arena(&parser, "test", "foo: bar");
parsed_ok = true;
(void)parser.location(t["foo"]);
});
@@ -59,9 +64,10 @@ TEST(locations, error_is_triggered_querying_with_locations_disabled)
TEST(locations, no_error_is_triggered_querying_with_locations)
{
Parser parser(ParserOptions().locations(true));
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, ParserOptions().locations(true));
EXPECT_EQ(parser.options().locations(), true);
Tree t = parser.parse_in_arena("myfile.yml", "foo: bar");
Tree t = parse_in_arena(&parser, "myfile.yml", "foo: bar");
_checkloc(t["foo"], 0, 0, "foo");
}
@@ -69,24 +75,26 @@ TEST(locations, no_error_is_triggered_querying_with_locations)
TEST(locations, docval)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Tree t = parser.parse_in_arena("myfile.yml", "docval");
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
Tree t = parse_in_arena(&parser, "myfile.yml", "docval");
_checkloc(t.rootref(), 0u, 0u, "docval");
t = parser.parse_in_arena("myfile.yml", "\n docval");
t = parse_in_arena(&parser, "myfile.yml", "\n docval");
_checkloc(t.rootref(), 1u, 1u, "docval");
t = parser.parse_in_arena("myfile.yml", "\n\n docval");
t = parse_in_arena(&parser, "myfile.yml", "\n\n docval");
_checkloc(t.rootref(), 2u, 1u, "docval");
}
TEST(locations, docval_null)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Tree t = parser.parse_in_arena("myfile.yml", "~");
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
Tree t = parse_in_arena(&parser, "myfile.yml", "~");
_checkloc(t.rootref(), 0u, 0u, "~");
t = parser.parse_in_arena("myfile.yml", "");
t = parse_in_arena(&parser, "myfile.yml", "");
_checkloc(t.rootref(), 0u, 0u, "");
t = parser.parse_in_arena("myfile.yml", R"(#
t = parse_in_arena(&parser, "myfile.yml", R"(#
#
#
#
@@ -98,7 +106,8 @@ TEST(locations, docval_null)
TEST(locations, seq_block)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
csubstr yaml = R"(
- this
- is
@@ -116,7 +125,7 @@ TEST(locations, seq_block)
- another val
- yet another val
)";
Tree t = parser.parse_in_arena("myfile.yml", yaml);
Tree t = parse_in_arena(&parser, "myfile.yml", yaml);
ConstNodeRef seq = t.rootref();
ASSERT_TRUE(seq.is_seq());
_checkloc(seq , 1u, 0u, "- ");
@@ -141,7 +150,8 @@ TEST(locations, seq_block)
TEST(locations, map_block)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
csubstr yaml = R"(
this: ~
is: ~
@@ -156,7 +166,7 @@ and:
val: here
hah: here
)";
Tree t = parser.parse_in_arena("myfile.yml", yaml);
Tree t = parse_in_arena(&parser, "myfile.yml", yaml);
ConstNodeRef map = t.rootref();
ASSERT_TRUE(map.is_map());
_checkloc(map , 1u, 0u, "this:");
@@ -176,8 +186,9 @@ and:
TEST(locations, seq_block_null)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
const Tree t = parser.parse_in_arena("myfile.yml", R"(---
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
const Tree t = parse_in_arena(&parser, "myfile.yml", R"(---
- ~
- ~
- notnull
@@ -266,8 +277,9 @@ TEST(locations, seq_block_null)
TEST(locations, map_block_null)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Tree t = parser.parse_in_arena("myfile.yml", R"(---
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
Tree t = parse_in_arena(&parser, "myfile.yml", R"(---
~: v
---
null: v
@@ -286,8 +298,9 @@ null: v
TEST(locations, empty_seq)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Tree t = parser.parse_in_arena("myfile.yml", R"(---
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
Tree t = parse_in_arena(&parser, "myfile.yml", R"(---
- []
- []
- notnull
@@ -327,8 +340,9 @@ key: []
TEST(locations, empty_map)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Tree t = parser.parse_in_arena("myfile.yml", R"(---
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
Tree t = parse_in_arena(&parser, "myfile.yml", R"(---
- {}
- {}
- notnull
@@ -370,9 +384,10 @@ TEST(locations, seq_flow)
{
Tree t;
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
csubstr yaml = R"([one,two,three,four,items])";
parser.parse_in_arena("myfile.yml", yaml, &t);
parse_in_arena(&parser, "myfile.yml", yaml, &t);
ConstNodeRef seq = t.rootref();
ASSERT_TRUE(seq.is_seq());
_checkloc(seq , 0u, 0u, "[");
@@ -387,9 +402,10 @@ TEST(locations, map_flow)
{
Tree t;
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
csubstr yaml = R"({one: item,two: items,three: items,four: items})";
parser.parse_in_arena("myfile.yml", yaml, &t);
parse_in_arena(&parser, "myfile.yml", yaml, &t);
ConstNodeRef map = t.rootref();
ASSERT_TRUE(map.is_map());
_checkloc(map , 0u, 0u, "{");
@@ -403,7 +419,8 @@ TEST(locations, seq_flow_nested)
{
Tree t;
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
csubstr yaml = R"([
one,
two,
@@ -414,7 +431,7 @@ TEST(locations, seq_flow_nested)
it,
was
])";
parser.parse_in_arena("myfile.yml", yaml, &t);
parse_in_arena(&parser, "myfile.yml", yaml, &t);
ConstNodeRef seq = t.rootref();
ASSERT_TRUE(seq.is_seq());
_checkloc(seq , 0u, 0u, "[");
@@ -444,10 +461,11 @@ TEST(locations, seq_flow_nested)
TEST(locations, grow_array)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Tree t = parser.parse_in_arena("myfile.yml", "docval");
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
Tree t = parse_in_arena(&parser, "myfile.yml", "docval");
_checkloc(t.rootref(), 0u, 0u, "docval");
t = parser.parse_in_arena("myfile.yml", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ndocval");
t = parse_in_arena(&parser, "myfile.yml", "\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\n\ndocval");
_checkloc(t.rootref(), 47u, 0u, "docval");
}
@@ -457,7 +475,8 @@ TEST(locations, small_array)
{
Tree t;
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
csubstr yaml = R"(---
foo: yes
bar:
@@ -468,7 +487,7 @@ baz:
- 2_
- 3_
)";
parser.parse_in_arena("myfile.yml", yaml, &t);
parse_in_arena(&parser, "myfile.yml", yaml, &t);
ConstNodeRef stream = t.rootref();
ConstNodeRef map = t.docref(0);
ASSERT_TRUE(map.is_map());
@@ -491,7 +510,8 @@ TEST(locations, large_array)
{
Tree t;
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
csubstr yaml = R"(---
foo1: definitely # 1
bar1:
@@ -557,7 +577,7 @@ baz6:
- 2_
- 3_
)";
parser.parse_in_arena("myfile.yml", yaml, &t);
parse_in_arena(&parser, "myfile.yml", yaml, &t);
ConstNodeRef map = t.docref(0);
ASSERT_TRUE(map.is_map());
ASSERT_TRUE(map.is_doc());
@@ -622,8 +642,9 @@ baz6:
TEST(locations, issue260_0)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Tree tree = parser.parse_in_arena("source.yml", R"(Body:
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
Tree tree = parse_in_arena(&parser, "source.yml", R"(Body:
- Id: 1
Name: Apple
Script: |
@@ -646,8 +667,9 @@ TEST(locations, issue260_0)
TEST(locations, issue260_1)
{
ParserOptions opts = ParserOptions().locations(true);
Parser parser(opts);
Tree tree = parser.parse_in_arena("source.yml", R"(Body: # 0
Parser::handler_type evt_handler = {};
Parser parser(&evt_handler, opts);
Tree tree = parse_in_arena(&parser, "source.yml", R"(Body: # 0
- Id: 1 # line 1
Name: Apple
- Id: 2 # line 3

1545
test/test_map.cpp Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
@@ -9,13 +10,13 @@ CASE_GROUP(EMPTY_MAP)
ADD_CASE_TO_GROUP("empty map, explicit",
"{}",
MAP
MFS
);
ADD_CASE_TO_GROUP("empty map, explicit, whitespace",
" {}",
MAP
MFS
);
@@ -24,7 +25,7 @@ R"({
}
)",
MAP
MFS
);
@@ -35,7 +36,7 @@ R"({
}
)",
MAP
MFS
);
}

95
test/test_map_generic.cpp Normal file
View File

@@ -0,0 +1,95 @@
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(GENERIC_MAP)
{
ADD_CASE_TO_GROUP("generic map",
R"(
a simple key: a value # The KEY token is produced here.
? a complex key
: another value
a mapping:
key 1: value 1
key 2: value 2
a sequence:
- item 1
- item 2
)",
N(MB, L{
N(KP|VP, "a simple key", "a value"),
N(KP|VP, "a complex key", "another value"),
N(KP|MB, "a mapping", L{N(KP|VP, "key 1", "value 1"), N(KP|VP, "key 2", "value 2")}),
N(KP|SB, "a sequence", L{N(VP, "item 1"), N(VP, "item 2")}),
})
);
ADD_CASE_TO_GROUP("seq nested in map",
R"(
items:
- part_no: A4786
descrip: Water Bucket (Filled)
price: 1.47
quantity: 4
- part_no: E1628
descrip: High Heeled "Ruby" Slippers
size: 8
price: 133.7
quantity: 1
)",
N(MB, L{
N{KP|SB, "items", L{
N{MB, L{
N{KP|VP, "part_no", "A4786"},
N{KP|VP, "descrip", "Water Bucket (Filled)"},
N{KP|VP, "price", "1.47"},
N{KP|VP, "quantity", "4"},}},
N{MB, L{
N{KP|VP, "part_no", "E1628"},
N{KP|VP, "descrip", "High Heeled \"Ruby\" Slippers"},
N{KP|VP, "size", "8"},
N{KP|VP, "price", "133.7"},
N{KP|VP, "quantity", "1"},}}}},
})
);
ADD_CASE_TO_GROUP("seq nested in map, v2",
R"(
items:
-
part_no: A4786
descrip: Water Bucket (Filled)
price: 1.47
quantity: 4
-
part_no: E1628
descrip: High Heeled "Ruby" Slippers
size: 8
price: 133.7
quantity: 1
)",
N(MB, L{
N{KP|SB, "items", L{
N{MB, L{
N{KP|VP, "part_no", "A4786"},
N{KP|VP, "descrip", "Water Bucket (Filled)"},
N{KP|VP, "price", "1.47"},
N{KP|VP, "quantity", "4"},}},
N{MB, L{
N{KP|VP, "part_no", "E1628"},
N{KP|VP, "descrip", "High Heeled \"Ruby\" Slippers"},
N{KP|VP, "size", "8"},
N{KP|VP, "price", "133.7"},
N{KP|VP, "quantity", "1"},}}}},
})
);
}
} // namespace yml
} // namespace c4

View File

@@ -0,0 +1,75 @@
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_MAPX2)
{
ADD_CASE_TO_GROUP("nested map x2, explicit, same line",
R"({foo: {foo0: 00, bar0: 01, baz0: 02}, bar: {foo1: 10, bar1: 11, baz1: 12}, baz: {foo2: 20, bar2: 21, baz2: 22}})",
N(MFS,L{
N{KP|MFS, "foo", L{N{KP|VP, "foo0", "00"}, N{KP|VP, "bar0", "01"}, N{KP|VP, "baz0", "02"}}},
N{KP|MFS, "bar", L{N{KP|VP, "foo1", "10"}, N{KP|VP, "bar1", "11"}, N{KP|VP, "baz1", "12"}}},
N{KP|MFS, "baz", L{N{KP|VP, "foo2", "20"}, N{KP|VP, "bar2", "21"}, N{KP|VP, "baz2", "22"}}},
})
);
ADD_CASE_TO_GROUP("nested map x2, explicit",
R"({
foo: {foo0: 00, bar0: 01, baz0: 02},
bar: {foo1: 10, bar1: 11, baz1: 12},
baz: {foo2: 20, bar2: 21, baz2: 22}
})",
N(MFS, L{
N{KP|MFS, "foo", L{N{KP|VP, "foo0", "00"}, N{KP|VP, "bar0", "01"}, N{KP|VP, "baz0", "02"}}},
N{KP|MFS, "bar", L{N{KP|VP, "foo1", "10"}, N{KP|VP, "bar1", "11"}, N{KP|VP, "baz1", "12"}}},
N{KP|MFS, "baz", L{N{KP|VP, "foo2", "20"}, N{KP|VP, "bar2", "21"}, N{KP|VP, "baz2", "22"}}},
})
);
ADD_CASE_TO_GROUP("nested map x2",
R"(
foo:
foo0: 00
bar0: 01
baz0: 02
bar:
foo1: 10
bar1: 11
baz1: 12
baz:
foo2: 20
bar2: 21
baz2: 22
)",
N(MB, L{
N{KP|MB, "foo", L{N{KP|VP, "foo0", "00"}, N{KP|VP, "bar0", "01"}, N{KP|VP, "baz0", "02"}}},
N{KP|MB, "bar", L{N{KP|VP, "foo1", "10"}, N{KP|VP, "bar1", "11"}, N{KP|VP, "baz1", "12"}}},
N{KP|MB, "baz", L{N{KP|VP, "foo2", "20"}, N{KP|VP, "bar2", "21"}, N{KP|VP, "baz2", "22"}}},
})
);
ADD_CASE_TO_GROUP("nested map x2, commented",
R"(
send_to:
#host: 192.168.1.100
#port: 7000
host: 192.168.1.101
port: 7001
#host: 192.168.1.102
#port: 7002
)",
N(MB, L{
N(KP|MB, "send_to", L{
N(KP|VP, "host", "192.168.1.101"),
N(KP|VP, "port", "7001") })
})
);
} // end CASE_GROUP()
} // namespace yml
} // namespace c4

104
test/test_map_nestedx3.cpp Normal file
View File

@@ -0,0 +1,104 @@
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_MAPX3)
{
ADD_CASE_TO_GROUP("nested map x3, explicit",
R"({
foo0: {
foo1: {foo2: 000, bar2: 001, baz2: 002},
bar1: {foo2: 010, bar2: 011, baz2: 012},
baz1: {foo2: 020, bar2: 021, baz2: 022}
},
bar0: {
foo1: {foo2: 100, bar2: 101, baz2: 102},
bar1: {foo2: 110, bar2: 111, baz2: 112},
baz1: {foo2: 120, bar2: 121, baz2: 122}
},
baz0: {
foo1: {foo2: 200, bar2: 201, baz2: 202},
bar1: {foo2: 210, bar2: 211, baz2: 212},
baz1: {foo2: 220, bar2: 221, baz2: 222}
}
})",
N(MFS, L{
N{KP|MFS, "foo0", L{
N{KP|MFS, "foo1", L{N{KP|VP, "foo2", "000"}, N{KP|VP, "bar2", "001"}, N{KP|VP, "baz2", "002"}}},
N{KP|MFS, "bar1", L{N{KP|VP, "foo2", "010"}, N{KP|VP, "bar2", "011"}, N{KP|VP, "baz2", "012"}}},
N{KP|MFS, "baz1", L{N{KP|VP, "foo2", "020"}, N{KP|VP, "bar2", "021"}, N{KP|VP, "baz2", "022"}}} }},
N{KP|MFS, "bar0", L{
N{KP|MFS, "foo1", L{N{KP|VP, "foo2", "100"}, N{KP|VP, "bar2", "101"}, N{KP|VP, "baz2", "102"}}},
N{KP|MFS, "bar1", L{N{KP|VP, "foo2", "110"}, N{KP|VP, "bar2", "111"}, N{KP|VP, "baz2", "112"}}},
N{KP|MFS, "baz1", L{N{KP|VP, "foo2", "120"}, N{KP|VP, "bar2", "121"}, N{KP|VP, "baz2", "122"}}} }},
N{KP|MFS, "baz0", L{
N{KP|MFS, "foo1", L{N{KP|VP, "foo2", "200"}, N{KP|VP, "bar2", "201"}, N{KP|VP, "baz2", "202"}}},
N{KP|MFS, "bar1", L{N{KP|VP, "foo2", "210"}, N{KP|VP, "bar2", "211"}, N{KP|VP, "baz2", "212"}}},
N{KP|MFS, "baz1", L{N{KP|VP, "foo2", "220"}, N{KP|VP, "bar2", "221"}, N{KP|VP, "baz2", "222"}}} }},
})
);
ADD_CASE_TO_GROUP("nested map x3",
R"(
foo0:
foo1:
foo2: 000
bar2: 001
baz2: 002
bar1:
foo2: 010
bar2: 011
baz2: 012
baz1:
foo2: 020
bar2: 021
baz2: 022
bar0:
foo1:
foo2: 100
bar2: 101
baz2: 102
bar1:
foo2: 110
bar2: 111
baz2: 112
baz1:
foo2: 120
bar2: 121
baz2: 122
baz0:
foo1:
foo2: 200
bar2: 201
baz2: 202
bar1:
foo2: 210
bar2: 211
baz2: 212
baz1:
foo2: 220
bar2: 221
baz2: 222
)",
N(MB, L{
N{KP|MB, "foo0", L{
N{KP|MB, "foo1", L{N{KP|VP, "foo2", "000"}, N{KP|VP, "bar2", "001"}, N{KP|VP, "baz2", "002"}}},
N{KP|MB, "bar1", L{N{KP|VP, "foo2", "010"}, N{KP|VP, "bar2", "011"}, N{KP|VP, "baz2", "012"}}},
N{KP|MB, "baz1", L{N{KP|VP, "foo2", "020"}, N{KP|VP, "bar2", "021"}, N{KP|VP, "baz2", "022"}}} }},
N{KP|MB, "bar0", L{
N{KP|MB, "foo1", L{N{KP|VP, "foo2", "100"}, N{KP|VP, "bar2", "101"}, N{KP|VP, "baz2", "102"}}},
N{KP|MB, "bar1", L{N{KP|VP, "foo2", "110"}, N{KP|VP, "bar2", "111"}, N{KP|VP, "baz2", "112"}}},
N{KP|MB, "baz1", L{N{KP|VP, "foo2", "120"}, N{KP|VP, "bar2", "121"}, N{KP|VP, "baz2", "122"}}} }},
N{KP|MB, "baz0", L{
N{KP|MB, "foo1", L{N{KP|VP, "foo2", "200"}, N{KP|VP, "bar2", "201"}, N{KP|VP, "baz2", "202"}}},
N{KP|MB, "bar1", L{N{KP|VP, "foo2", "210"}, N{KP|VP, "bar2", "211"}, N{KP|VP, "baz2", "212"}}},
N{KP|MB, "baz1", L{N{KP|VP, "foo2", "220"}, N{KP|VP, "bar2", "221"}, N{KP|VP, "baz2", "222"}}} }},
})
);
}
} // namespace yml
} // namespace c4

191
test/test_map_nestedx4.cpp Normal file
View File

@@ -0,0 +1,191 @@
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_MAPX4)
{
ADD_CASE_TO_GROUP("nested map x4, explicit",
R"({
foo0: {
foo1: { foo2: {foo3: 0000, bar3: 0001, baz3: 0002}, bar2: {foo3: 0010, bar3: 0011, baz3: 0012}, baz2: {foo3: 0020, bar3: 0021, baz3: 0022} },
bar1: { foo2: {foo3: 0100, bar3: 0101, baz3: 0102}, bar2: {foo3: 0110, bar3: 0111, baz3: 0112}, baz2: {foo3: 0120, bar3: 0121, baz3: 0122} },
baz1: { foo2: {foo3: 0200, bar3: 0201, baz3: 0202}, bar2: {foo3: 0210, bar3: 0211, baz3: 0212}, baz2: {foo3: 0220, bar3: 0221, baz3: 0222} },
},
bar0: {
foo1: { foo2: {foo3: 1000, bar3: 1001, baz3: 1002}, bar2: {foo3: 1010, bar3: 1011, baz3: 1012}, baz2: {foo3: 1020, bar3: 1021, baz3: 1022} },
bar1: { foo2: {foo3: 1100, bar3: 1101, baz3: 1102}, bar2: {foo3: 1110, bar3: 1111, baz3: 1112}, baz2: {foo3: 1120, bar3: 1121, baz3: 1122} },
baz1: { foo2: {foo3: 1200, bar3: 1201, baz3: 1202}, bar2: {foo3: 1210, bar3: 1211, baz3: 1212}, baz2: {foo3: 1220, bar3: 1221, baz3: 1222} },
},
baz0: {
foo1: { foo2: {foo3: 2000, bar3: 2001, baz3: 2002}, bar2: {foo3: 2010, bar3: 2011, baz3: 2012}, baz2: {foo3: 2020, bar3: 2021, baz3: 2022} },
bar1: { foo2: {foo3: 2100, bar3: 2101, baz3: 2102}, bar2: {foo3: 2110, bar3: 2111, baz3: 2112}, baz2: {foo3: 2120, bar3: 2121, baz3: 2122} },
baz1: { foo2: {foo3: 2200, bar3: 2201, baz3: 2202}, bar2: {foo3: 2210, bar3: 2211, baz3: 2212}, baz2: {foo3: 2220, bar3: 2221, baz3: 2222} },
},
})",
N(MFS, L{
N(KP|MFS, "foo0", L{
N(KP|MFS, "foo1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "0000"), N(KP|VP, "bar3", "0001"), N(KP|VP, "baz3", "0002")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "0010"), N(KP|VP, "bar3", "0011"), N(KP|VP, "baz3", "0012")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "0020"), N(KP|VP, "bar3", "0021"), N(KP|VP, "baz3", "0022")})}),
N(KP|MFS, "bar1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "0100"), N(KP|VP, "bar3", "0101"), N(KP|VP, "baz3", "0102")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "0110"), N(KP|VP, "bar3", "0111"), N(KP|VP, "baz3", "0112")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "0120"), N(KP|VP, "bar3", "0121"), N(KP|VP, "baz3", "0122")})}),
N(KP|MFS, "baz1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "0200"), N(KP|VP, "bar3", "0201"), N(KP|VP, "baz3", "0202")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "0210"), N(KP|VP, "bar3", "0211"), N(KP|VP, "baz3", "0212")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "0220"), N(KP|VP, "bar3", "0221"), N(KP|VP, "baz3", "0222")})}),
}),
N(KP|MFS, "bar0", L{
N(KP|MFS, "foo1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "1000"), N(KP|VP, "bar3", "1001"), N(KP|VP, "baz3", "1002")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "1010"), N(KP|VP, "bar3", "1011"), N(KP|VP, "baz3", "1012")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "1020"), N(KP|VP, "bar3", "1021"), N(KP|VP, "baz3", "1022")})}),
N(KP|MFS, "bar1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "1100"), N(KP|VP, "bar3", "1101"), N(KP|VP, "baz3", "1102")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "1110"), N(KP|VP, "bar3", "1111"), N(KP|VP, "baz3", "1112")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "1120"), N(KP|VP, "bar3", "1121"), N(KP|VP, "baz3", "1122")})}),
N(KP|MFS, "baz1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "1200"), N(KP|VP, "bar3", "1201"), N(KP|VP, "baz3", "1202")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "1210"), N(KP|VP, "bar3", "1211"), N(KP|VP, "baz3", "1212")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "1220"), N(KP|VP, "bar3", "1221"), N(KP|VP, "baz3", "1222")})}),
}),
N(KP|MFS, "baz0", L{
N(KP|MFS, "foo1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "2000"), N(KP|VP, "bar3", "2001"), N(KP|VP, "baz3", "2002")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "2010"), N(KP|VP, "bar3", "2011"), N(KP|VP, "baz3", "2012")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "2020"), N(KP|VP, "bar3", "2021"), N(KP|VP, "baz3", "2022")})}),
N(KP|MFS, "bar1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "2100"), N(KP|VP, "bar3", "2101"), N(KP|VP, "baz3", "2102")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "2110"), N(KP|VP, "bar3", "2111"), N(KP|VP, "baz3", "2112")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "2120"), N(KP|VP, "bar3", "2121"), N(KP|VP, "baz3", "2122")})}),
N(KP|MFS, "baz1", L{N(KP|MFS, "foo2", L{N(KP|VP, "foo3", "2200"), N(KP|VP, "bar3", "2201"), N(KP|VP, "baz3", "2202")}), N(KP|MFS, "bar2", L{N(KP|VP, "foo3", "2210"), N(KP|VP, "bar3", "2211"), N(KP|VP, "baz3", "2212")}), N(KP|MFS, "baz2", L{N(KP|VP, "foo3", "2220"), N(KP|VP, "bar3", "2221"), N(KP|VP, "baz3", "2222")})}),
})
})
);
ADD_CASE_TO_GROUP("nested map x4",
R"(
foo0:
foo1:
foo2:
foo3: 0000
bar3: 0001
baz3: 0002
bar2:
foo3: 0010
bar3: 0011
baz3: 0012
baz2:
foo3: 0020
bar3: 0021
baz3: 0022
bar1:
foo2:
foo3: 0100
bar3: 0101
baz3: 0102
bar2:
foo3: 0110
bar3: 0111
baz3: 0112
baz2:
foo3: 0120
bar3: 0121
baz3: 0122
baz1:
foo2:
foo3: 0200
bar3: 0201
baz3: 0202
bar2:
foo3: 0210
bar3: 0211
baz3: 0212
baz2:
foo3: 0220
bar3: 0221
baz3: 0222
bar0:
foo1:
foo2:
foo3: 1000
bar3: 1001
baz3: 1002
bar2:
foo3: 1010
bar3: 1011
baz3: 1012
baz2:
foo3: 1020
bar3: 1021
baz3: 1022
bar1:
foo2:
foo3: 1100
bar3: 1101
baz3: 1102
bar2:
foo3: 1110
bar3: 1111
baz3: 1112
baz2:
foo3: 1120
bar3: 1121
baz3: 1122
baz1:
foo2:
foo3: 1200
bar3: 1201
baz3: 1202
bar2:
foo3: 1210
bar3: 1211
baz3: 1212
baz2:
foo3: 1220
bar3: 1221
baz3: 1222
baz0:
foo1:
foo2:
foo3: 2000
bar3: 2001
baz3: 2002
bar2:
foo3: 2010
bar3: 2011
baz3: 2012
baz2:
foo3: 2020
bar3: 2021
baz3: 2022
bar1:
foo2:
foo3: 2100
bar3: 2101
baz3: 2102
bar2:
foo3: 2110
bar3: 2111
baz3: 2112
baz2:
foo3: 2120
bar3: 2121
baz3: 2122
baz1:
foo2:
foo3: 2200
bar3: 2201
baz3: 2202
bar2:
foo3: 2210
bar3: 2211
baz3: 2212
baz2:
foo3: 2220
bar3: 2221
baz3: 2222
)",
N(MB, L{
N(KP|MB, "foo0", L{
N(KP|MB, "foo1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "0000"), N(KP|VP, "bar3", "0001"), N(KP|VP, "baz3", "0002")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "0010"), N(KP|VP, "bar3", "0011"), N(KP|VP, "baz3", "0012")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "0020"), N(KP|VP, "bar3", "0021"), N(KP|VP, "baz3", "0022")})}),
N(KP|MB, "bar1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "0100"), N(KP|VP, "bar3", "0101"), N(KP|VP, "baz3", "0102")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "0110"), N(KP|VP, "bar3", "0111"), N(KP|VP, "baz3", "0112")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "0120"), N(KP|VP, "bar3", "0121"), N(KP|VP, "baz3", "0122")})}),
N(KP|MB, "baz1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "0200"), N(KP|VP, "bar3", "0201"), N(KP|VP, "baz3", "0202")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "0210"), N(KP|VP, "bar3", "0211"), N(KP|VP, "baz3", "0212")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "0220"), N(KP|VP, "bar3", "0221"), N(KP|VP, "baz3", "0222")})}),
}),
N(KP|MB, "bar0", L{
N(KP|MB, "foo1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "1000"), N(KP|VP, "bar3", "1001"), N(KP|VP, "baz3", "1002")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "1010"), N(KP|VP, "bar3", "1011"), N(KP|VP, "baz3", "1012")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "1020"), N(KP|VP, "bar3", "1021"), N(KP|VP, "baz3", "1022")})}),
N(KP|MB, "bar1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "1100"), N(KP|VP, "bar3", "1101"), N(KP|VP, "baz3", "1102")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "1110"), N(KP|VP, "bar3", "1111"), N(KP|VP, "baz3", "1112")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "1120"), N(KP|VP, "bar3", "1121"), N(KP|VP, "baz3", "1122")})}),
N(KP|MB, "baz1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "1200"), N(KP|VP, "bar3", "1201"), N(KP|VP, "baz3", "1202")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "1210"), N(KP|VP, "bar3", "1211"), N(KP|VP, "baz3", "1212")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "1220"), N(KP|VP, "bar3", "1221"), N(KP|VP, "baz3", "1222")})}),
}),
N(KP|MB, "baz0", L{
N(KP|MB, "foo1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "2000"), N(KP|VP, "bar3", "2001"), N(KP|VP, "baz3", "2002")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "2010"), N(KP|VP, "bar3", "2011"), N(KP|VP, "baz3", "2012")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "2020"), N(KP|VP, "bar3", "2021"), N(KP|VP, "baz3", "2022")})}),
N(KP|MB, "bar1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "2100"), N(KP|VP, "bar3", "2101"), N(KP|VP, "baz3", "2102")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "2110"), N(KP|VP, "bar3", "2111"), N(KP|VP, "baz3", "2112")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "2120"), N(KP|VP, "bar3", "2121"), N(KP|VP, "baz3", "2122")})}),
N(KP|MB, "baz1", L{N(KP|MB, "foo2", L{N(KP|VP, "foo3", "2200"), N(KP|VP, "bar3", "2201"), N(KP|VP, "baz3", "2202")}), N(KP|MB, "bar2", L{N(KP|VP, "foo3", "2210"), N(KP|VP, "bar3", "2211"), N(KP|VP, "baz3", "2212")}), N(KP|MB, "baz2", L{N(KP|VP, "foo3", "2220"), N(KP|VP, "bar3", "2221"), N(KP|VP, "baz3", "2222")})}),
})
})
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
@@ -9,19 +10,19 @@ CASE_GROUP(MAP_OF_SEQ)
ADD_CASE_TO_GROUP("map of empty seqs",
R"({foo: [], bar: [], baz: []})",
L{
N(KEYSEQ, "foo", L()),
N(KEYSEQ, "bar", L()),
N(KEYSEQ, "baz", L()),
}
N(MFS, L{
N(KP|SFS, "foo", L()),
N(KP|SFS, "bar", L()),
N(KP|SFS, "baz", L()),
})
);
ADD_CASE_TO_GROUP("map of seqs, one line",
R"({men: [John Smith, Bill Jones], women: [Mary Smith, Susan Williams]})",
L{
N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
}
N(MFS, L{
N(KP|SFS, "men", L{N{VP, "John Smith"}, N{VP, "Bill Jones"}}),
N(KP|SFS, "women", L{N{VP, "Mary Smith"}, N{VP, "Susan Williams"}})
})
);
ADD_CASE_TO_GROUP("map of seqs",
@@ -33,10 +34,10 @@ women:
- Mary Smith
- Susan Williams
)",
L{
N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
}
N(MB, L{
N(KP|SB, "men", L{N{VP, "John Smith"}, N{VP, "Bill Jones"}}),
N(KP|SB, "women", L{N{VP, "Mary Smith"}, N{VP, "Susan Williams"}})
})
);
ADD_CASE_TO_GROUP("map of seqs, not indented",
@@ -48,10 +49,10 @@ women:
- Mary Smith
- Susan Williams
)",
L{
N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
}
N(MB, L{
N(KP|SB, "men", L{N{VP, "John Smith"}, N{VP, "Bill Jones"}}),
N(KP|SB, "women", L{N{VP, "Mary Smith"}, N{VP, "Susan Williams"}})
})
);
ADD_CASE_TO_GROUP("map of seqs, not indented, more",
@@ -104,42 +105,43 @@ product4:
tax2: 789.10 # we must jump two levels
tax5: 1234.5
)",
L{
N("product", L{
N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
N(MB, L{
N(KP|SB, "product", L{
N(MB, L{N(KP|VP, "sku", "BL4438H"), N(KP|VP, "quantity", "1"), N(KP|VP, "description", "Super Hoop"), N(KP|VP, "price", "2392.00")}),
}),
N("tax", "1234.5"),
N("product2", L{
N("subproduct1", L{
N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
N(KP|VP, "tax", "1234.5"),
N(KP|MB, "product2", L{
N(KP|SB, "subproduct1", L{
N(MB, L{N(KP|VP, "sku", "BL4438H"), N(KP|VP, "quantity", "1"), N(KP|VP, "description", "Super Hoop"), N(KP|VP, "price", "2392.00")}),
}),
N("subproduct2", L{
N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
N(KP|SB, "subproduct2", L{
N(MB, L{N(KP|VP, "sku", "BL4438H"), N(KP|VP, "quantity", "1"), N(KP|VP, "description", "Super Hoop"), N(KP|VP, "price", "2392.00")}),
}),
N("tax2", "789.10"),
N(KP|VP, "tax2", "789.10"),
}),
N("tax3", "1234.5"),
N("product3", L{
N("subproduct1", L{
N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
N(KP|VP, "tax3", "1234.5"),
N(KP|MB, "product3", L{
N(KP|SB, "subproduct1", L{
N(MB, L{N(KP|VP, "sku", "BL4438H"), N(KP|VP, "quantity", "1"), N(KP|VP, "description", "Super Hoop"), N(KP|VP, "price", "2392.00")}),
}),
N("subproduct2", L{
N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
N(KP|SB, "subproduct2", L{
N(MB, L{N(KP|VP, "sku", "BL4438H"), N(KP|VP, "quantity", "1"), N(KP|VP, "description", "Super Hoop"), N(KP|VP, "price", "2392.00")}),
}),
N("tax2", "789.10"),
N(KP|VP, "tax2", "789.10"),
}),
N("tax4", "1234.5"),
N("product4", L{
N("subproduct1", L{
N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
N(KP|VP, "tax4", "1234.5"),
N(KP|MB, "product4", L{
N(KP|SB, "subproduct1", L{
N(MB, L{N(KP|VP, "sku", "BL4438H"), N(KP|VP, "quantity", "1"), N(KP|VP, "description", "Super Hoop"), N(KP|VP, "price", "2392.00")}),
}),
N("subproduct2", L{
N(L{N("sku", "BL4438H"), N("quantity", "1"), N("description", "Super Hoop"), N("price", "2392.00")}),
N(KP|SB, "subproduct2", L{
N(MB, L{N(KP|VP, "sku", "BL4438H"), N(KP|VP, "quantity", "1"), N(KP|VP, "description", "Super Hoop"), N(KP|VP, "price", "2392.00")}),
}),
N("tax2", "789.10"),
N(KP|VP, "tax2", "789.10"),
}),
N("tax5", "1234.5"),
});
N(KP|VP, "tax5", "1234.5"),
})
);
ADD_CASE_TO_GROUP("map of seqs, next line",
R"(
@@ -154,10 +156,10 @@ women:
-
Susan Williams
)",
L{
N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
}
N(MB, L{
N(KP|SB, "men", L{N{VP, "John Smith"}, N{VP, "Bill Jones"}}),
N(KP|SB, "women", L{N{VP, "Mary Smith"}, N{VP, "Susan Williams"}})
})
);
ADD_CASE_TO_GROUP("map of seqs, next line without space",
@@ -173,10 +175,10 @@ women:
-
Susan Williams
)",
L{
N("men", L{N{"John Smith"}, N{"Bill Jones"}}),
N("women", L{N{"Mary Smith"}, N{"Susan Williams"}})
}
N(MB, L{
N(KP|SB, "men", L{N{VP, "John Smith"}, N{VP, "Bill Jones"}}),
N(KP|SB, "women", L{N{VP, "Mary Smith"}, N{VP, "Susan Williams"}})
})
);
ADD_CASE_TO_GROUP("map of seqs, deal with unk",
@@ -189,11 +191,13 @@ skip_commits:
- d
- e
)",
L{
N("skip_commits", L{N("files",
L{N("a"), N("b"), N("c"), N("d"), N("e")}
)}),
}
N(MB, L{
N(KP|MB, "skip_commits", L{
N(KP|SB, "files", L{
N(VP, "a"), N(VP, "b"), N(VP, "c"), N(VP, "d"), N(VP, "e")
})
})
})
);
}

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
@@ -53,7 +54,7 @@ R"(!!set
? b
?
)",
N(TL("!!set", L{N(KEYVAL, "a", {}), N(KEYVAL, "b", {}), N(KEYVAL, {}, "")}))
N(MB, TL("!!set", L{N(KP|VP, "a", {}), N(KP|VP, "b", {}), N(KP|VP, {}, "")}))
);
ADD_CASE_TO_GROUP("doc as set, implicit",
@@ -61,7 +62,7 @@ R"(!!set
? a
? b
)",
N(TL("!!set", L{N(KEYVAL, "a", {}), N(KEYVAL, "b", {})}))
N(MB, TL("!!set", L{N(KP|VP, "a", {}), N(KP|VP, "b", {})}))
);
ADD_CASE_TO_GROUP("doc as set",
@@ -70,11 +71,14 @@ R"(--- !!set
? bb
? cc
)",
N(STREAM, L{N(DOCMAP, TL("!!set", L{
N(KEYVAL, "aa", /*"~"*/{}),
N(KEYVAL, "bb", /*"~"*/{}),
N(KEYVAL, "cc", /*"~"*/{})})
)}));
N(STREAM, L{
N(DOC|MB, TL("!!set", L{
N(KP|VP, "aa", /*"~"*/{}),
N(KP|VP, "bb", /*"~"*/{}),
N(KP|VP, "cc", /*"~"*/{})
}))
})
);
ADD_CASE_TO_GROUP("sets 2XXW",
R"(
@@ -83,24 +87,14 @@ R"(
? Sammy Sosa
? Ken Griff
)",
N(STREAM, L{N(DOCMAP, TL("!!set", L{
N(KEYVAL, "Mark McGwire", /*"~"*/{}),
N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
N(KEYVAL, "Ken Griff", /*"~"*/{}),})
)}));
ADD_CASE_TO_GROUP("sets 2XXW, indented",
R"(
--- !!set
? Mark McGwire
? Sammy Sosa
? Ken Griff
)",
N(STREAM, L{N(DOCMAP, TL("!!set", L{
N(KEYVAL, "Mark McGwire", /*"~"*/{}),
N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
N(KEYVAL, "Ken Griff", /*"~"*/{}),})
)}));
N(STREAM, L{
N(DOC|MB, TL("!!set", L{
N(KP|VP, "Mark McGwire", /*"~"*/{}),
N(KP|VP, "Sammy Sosa", /*"~"*/{}),
N(KP|VP, "Ken Griff", /*"~"*/{}),
}))
})
);
ADD_CASE_TO_GROUP("sets 2XXW, no set",
R"(
@@ -109,11 +103,14 @@ R"(
? Sammy Sosa
? Ken Griff
)",
N(STREAM, L{N(DOCMAP, L{
N(KEYVAL, "Mark McGwire", /*"~"*/{}),
N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
N(KEYVAL, "Ken Griff", /*"~"*/{}),}
)}));
N(STREAM, L{
N(DOC|MB, L{
N(KP|VP, "Mark McGwire", /*"~"*/{}),
N(KP|VP, "Sammy Sosa", /*"~"*/{}),
N(KP|VP, "Ken Griff", /*"~"*/{}),
})
})
);
ADD_CASE_TO_GROUP("sets 2XXW, no doc",
R"(!!set
@@ -121,11 +118,12 @@ R"(!!set
? Sammy Sosa
? Ken Griff
)",
TL("!!set", L{
N(KEYVAL, "Mark McGwire", /*"~"*/{}),
N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
N(KEYVAL, "Ken Griff", /*"~"*/{}),
}));
N(MB, TL("!!set", L{
N(KP|VP, "Mark McGwire", /*"~"*/{}),
N(KP|VP, "Sammy Sosa", /*"~"*/{}),
N(KP|VP, "Ken Griff", /*"~"*/{}),
}))
);
ADD_CASE_TO_GROUP("sets 2XXW, no doc, no set",
R"(
@@ -133,11 +131,13 @@ R"(
? Sammy Sosa
? Ken Griff
)",
L{
N(KEYVAL, "Mark McGwire", /*"~"*/{}),
N(KEYVAL, "Sammy Sosa", /*"~"*/{}),
N(KEYVAL, "Ken Griff", /*"~"*/{}),
});
N(MB, L{
N(KP|VP, "Mark McGwire", /*"~"*/{}),
N(KP|VP, "Sammy Sosa", /*"~"*/{}),
N(KP|VP, "Ken Griff", /*"~"*/{}),
})
);
}
} // namespace yml

View File

@@ -4,7 +4,7 @@
#endif
#include <gtest/gtest.h>
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
namespace c4 {
namespace yml {
@@ -39,11 +39,16 @@ void test_merge(std::initializer_list<csubstr> li, csubstr expected)
{
loaded.clear(); // do not clear the arena of the loaded tree
parse_in_arena(src, &loaded);
_c4dbg_tree("loaded", loaded);
merged.merge_with(&loaded);
_c4dbg_tree("merged", merged);
}
auto buf_result = emitrs_yaml<std::string>(merged);
auto buf_expected = emitrs_yaml<std::string>(ref);
_c4dbg_tree("ref", ref);
test_compare(merged, ref);
std::string buf_result = emitrs_yaml<std::string>(merged);
std::string buf_expected = emitrs_yaml<std::string>(ref);
EXPECT_EQ(buf_result, buf_expected);
}
@@ -64,6 +69,103 @@ TEST(merge, basic)
);
}
TEST(merge, dst_scalar_keeps_style)
{
Tree dst = parse_in_arena("a: b");
const Tree src = parse_in_arena("'a': 'c'");
EXPECT_EQ(dst["a"], "b");
EXPECT_EQ(src["a"], "c");
EXPECT_TRUE(dst["a"].type().is_key_plain());
EXPECT_TRUE(dst["a"].type().is_val_plain());
EXPECT_TRUE(src["a"].type().is_key_squo());
EXPECT_TRUE(src["a"].type().is_val_squo());
_c4dbg_tree("src", src);
_c4dbg_tree("dst", dst);
dst.merge_with(&src);
_c4dbg_tree("merged", dst);
EXPECT_EQ(dst["a"], "c");
EXPECT_EQ(src["a"], "c");
EXPECT_TRUE(dst["a"].type().is_key_plain());
EXPECT_TRUE(dst["a"].type().is_val_plain());
}
TEST(merge, src_scalar_assigns_style_0)
{
test_merge(
{
"{}",
"{a: 'b'}",
},
"{a: 'b'}"
);
}
TEST(merge, src_scalar_assigns_style_1)
{
test_merge(
{
"{}",
"{a: 'b'}",
"{a: \"c\"}",
},
"{a: 'c'}"
);
}
TEST(merge, src_scalar_assigns_style_2)
{
test_merge(
{
"{}",
"{a: 'b'}",
"{a: \"c\"}",
"{a: d}",
},
"{a: 'd'}"
);
}
TEST(merge, src_map_assigns_style_0)
{
test_merge(
{
"{}",
"a: 'b'",
"{aa: \"bb\"}",
},
"{a: 'b', aa: \"bb\"}"
);
}
TEST(merge, src_map_assigns_style_1)
{
test_merge(
{
"foo: bar",
"{a: 'b'}",
"{foo: \"bar\"}",
},
"foo: bar\na: 'b'\n"
);
}
TEST(merge, src_seq_assigns_style_0)
{
test_merge(
{
"[]",
"- 0\n- 1\n",
"- 2\n- 3\n",
},
"[0,1,2,3]"
);
}
TEST(merge, src_seq_assigns_style_1)
{
test_merge(
{
"- 0\n- 1\n",
"[a,b]",
"[c,d]",
},
"- 0\n- 1\n- a\n- b\n- c\n- d\n"
);
}
TEST(merge, val_to_seq)
{
test_merge(
@@ -108,7 +210,7 @@ TEST(merge, map_to_val)
);
}
TEST(merge, seq_no_overlap_explicit)
TEST(merge, seq_no_overlap)
{
test_merge(
{"[0, 1, 2]", "[3, 4, 5]", "[6, 7, 8]"},
@@ -117,16 +219,8 @@ TEST(merge, seq_no_overlap_explicit)
}
TEST(merge, seq_no_overlap_implicit)
{
test_merge(
{"0, 1, 2", "3, 4, 5", "6, 7, 8"},
"0, 1, 2, 3, 4, 5, 6, 7, 8"
);
}
TEST(merge, seq_overlap_explicit)
TEST(merge, seq_overlap)
{
test_merge(
{"[0, 1, 2]", "[1, 2, 3]", "[2, 3, 4]"},
@@ -136,21 +230,15 @@ TEST(merge, seq_overlap_explicit)
}
TEST(merge, seq_overlap_implicit)
{
// now a bit more difficult
test_merge(
{"0, 1, 2", "1, 2, 3", "2, 3, 4"},
"0, 1, 2, 1, 2, 3, 2, 3, 4"
// or this? "0, 1, 2, 3, 4"
);
}
TEST(merge, map_orthogonal)
{
test_merge(
{"a: 0", "b: 1", "c: 2"},
{
"{a: 0}",
"{b: 1}",
"{c: 2}"
},
"{a: 0, b: 1, c: 2}"
);
}
@@ -160,9 +248,9 @@ TEST(merge, map_overriding)
{
test_merge(
{
"a: 0",
"{a: 0}",
"{a: 1, b: 1}",
"c: 2"
"{c: 2}"
},
"{a: 1, b: 1, c: 2}"
);
@@ -172,14 +260,14 @@ TEST(merge, map_overriding_multiple)
{
test_merge(
{
"a: 0",
"{a: 0}",
"{a: 1, b: 1}",
"c: 2",
"a: 2",
"a: 3",
"c: 4",
"c: 5",
"a: 4",
"{c: 2}",
"{a: 2}",
"{a: 3}",
"{c: 4}",
"{c: 5}",
"{a: 4}",
},
"{a: 4, b: 1, c: 5}"
);

View File

@@ -1,73 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_MAPX2)
{
ADD_CASE_TO_GROUP("nested map x2, explicit, same line",
R"({foo: {foo0: 00, bar0: 01, baz0: 02}, bar: {foo1: 10, bar1: 11, baz1: 12}, baz: {foo2: 20, bar2: 21, baz2: 22}})",
L{
N{"foo", L{N{"foo0", "00"}, N{"bar0", "01"}, N{"baz0", "02"}}},
N{"bar", L{N{"foo1", "10"}, N{"bar1", "11"}, N{"baz1", "12"}}},
N{"baz", L{N{"foo2", "20"}, N{"bar2", "21"}, N{"baz2", "22"}}},
}
);
ADD_CASE_TO_GROUP("nested map x2, explicit",
R"({
foo: {foo0: 00, bar0: 01, baz0: 02},
bar: {foo1: 10, bar1: 11, baz1: 12},
baz: {foo2: 20, bar2: 21, baz2: 22}
})",
L{
N{"foo", L{N{"foo0", "00"}, N{"bar0", "01"}, N{"baz0", "02"}}},
N{"bar", L{N{"foo1", "10"}, N{"bar1", "11"}, N{"baz1", "12"}}},
N{"baz", L{N{"foo2", "20"}, N{"bar2", "21"}, N{"baz2", "22"}}},
}
);
ADD_CASE_TO_GROUP("nested map x2",
R"(
foo:
foo0: 00
bar0: 01
baz0: 02
bar:
foo1: 10
bar1: 11
baz1: 12
baz:
foo2: 20
bar2: 21
baz2: 22
)",
L{
N{"foo", L{N{"foo0", "00"}, N{"bar0", "01"}, N{"baz0", "02"}}},
N{"bar", L{N{"foo1", "10"}, N{"bar1", "11"}, N{"baz1", "12"}}},
N{"baz", L{N{"foo2", "20"}, N{"bar2", "21"}, N{"baz2", "22"}}},
}
);
ADD_CASE_TO_GROUP("nested map x2, commented",
R"(
send_to:
#host: 192.168.1.100
#port: 7000
host: 192.168.1.101
port: 7001
#host: 192.168.1.102
#port: 7002
)",
L{
N("send_to", L{
N("host", "192.168.1.101"),
N("port", "7001") })
}
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,103 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_MAPX3)
{
ADD_CASE_TO_GROUP("nested map x3, explicit",
R"({
foo0: {
foo1: {foo2: 000, bar2: 001, baz2: 002},
bar1: {foo2: 010, bar2: 011, baz2: 012},
baz1: {foo2: 020, bar2: 021, baz2: 022}
},
bar0: {
foo1: {foo2: 100, bar2: 101, baz2: 102},
bar1: {foo2: 110, bar2: 111, baz2: 112},
baz1: {foo2: 120, bar2: 121, baz2: 122}
},
baz0: {
foo1: {foo2: 200, bar2: 201, baz2: 202},
bar1: {foo2: 210, bar2: 211, baz2: 212},
baz1: {foo2: 220, bar2: 221, baz2: 222}
}
})",
L{
N{"foo0", L{
N{"foo1", L{N{"foo2", "000"}, N{"bar2", "001"}, N{"baz2", "002"}}},
N{"bar1", L{N{"foo2", "010"}, N{"bar2", "011"}, N{"baz2", "012"}}},
N{"baz1", L{N{"foo2", "020"}, N{"bar2", "021"}, N{"baz2", "022"}}} }},
N{"bar0", L{
N{"foo1", L{N{"foo2", "100"}, N{"bar2", "101"}, N{"baz2", "102"}}},
N{"bar1", L{N{"foo2", "110"}, N{"bar2", "111"}, N{"baz2", "112"}}},
N{"baz1", L{N{"foo2", "120"}, N{"bar2", "121"}, N{"baz2", "122"}}} }},
N{"baz0", L{
N{"foo1", L{N{"foo2", "200"}, N{"bar2", "201"}, N{"baz2", "202"}}},
N{"bar1", L{N{"foo2", "210"}, N{"bar2", "211"}, N{"baz2", "212"}}},
N{"baz1", L{N{"foo2", "220"}, N{"bar2", "221"}, N{"baz2", "222"}}} }},
}
);
ADD_CASE_TO_GROUP("nested map x3",
R"(
foo0:
foo1:
foo2: 000
bar2: 001
baz2: 002
bar1:
foo2: 010
bar2: 011
baz2: 012
baz1:
foo2: 020
bar2: 021
baz2: 022
bar0:
foo1:
foo2: 100
bar2: 101
baz2: 102
bar1:
foo2: 110
bar2: 111
baz2: 112
baz1:
foo2: 120
bar2: 121
baz2: 122
baz0:
foo1:
foo2: 200
bar2: 201
baz2: 202
bar1:
foo2: 210
bar2: 211
baz2: 212
baz1:
foo2: 220
bar2: 221
baz2: 222
)",
L{
N{"foo0", L{
N{"foo1", L{N{"foo2", "000"}, N{"bar2", "001"}, N{"baz2", "002"}}},
N{"bar1", L{N{"foo2", "010"}, N{"bar2", "011"}, N{"baz2", "012"}}},
N{"baz1", L{N{"foo2", "020"}, N{"bar2", "021"}, N{"baz2", "022"}}} }},
N{"bar0", L{
N{"foo1", L{N{"foo2", "100"}, N{"bar2", "101"}, N{"baz2", "102"}}},
N{"bar1", L{N{"foo2", "110"}, N{"bar2", "111"}, N{"baz2", "112"}}},
N{"baz1", L{N{"foo2", "120"}, N{"bar2", "121"}, N{"baz2", "122"}}} }},
N{"baz0", L{
N{"foo1", L{N{"foo2", "200"}, N{"bar2", "201"}, N{"baz2", "202"}}},
N{"bar1", L{N{"foo2", "210"}, N{"bar2", "211"}, N{"baz2", "212"}}},
N{"baz1", L{N{"foo2", "220"}, N{"bar2", "221"}, N{"baz2", "222"}}} }},
}
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,190 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_MAPX4)
{
ADD_CASE_TO_GROUP("nested map x4, explicit",
R"({
foo0: {
foo1: { foo2: {foo3: 0000, bar3: 0001, baz3: 0002}, bar2: {foo3: 0010, bar3: 0011, baz3: 0012}, baz2: {foo3: 0020, bar3: 0021, baz3: 0022} },
bar1: { foo2: {foo3: 0100, bar3: 0101, baz3: 0102}, bar2: {foo3: 0110, bar3: 0111, baz3: 0112}, baz2: {foo3: 0120, bar3: 0121, baz3: 0122} },
baz1: { foo2: {foo3: 0200, bar3: 0201, baz3: 0202}, bar2: {foo3: 0210, bar3: 0211, baz3: 0212}, baz2: {foo3: 0220, bar3: 0221, baz3: 0222} },
},
bar0: {
foo1: { foo2: {foo3: 1000, bar3: 1001, baz3: 1002}, bar2: {foo3: 1010, bar3: 1011, baz3: 1012}, baz2: {foo3: 1020, bar3: 1021, baz3: 1022} },
bar1: { foo2: {foo3: 1100, bar3: 1101, baz3: 1102}, bar2: {foo3: 1110, bar3: 1111, baz3: 1112}, baz2: {foo3: 1120, bar3: 1121, baz3: 1122} },
baz1: { foo2: {foo3: 1200, bar3: 1201, baz3: 1202}, bar2: {foo3: 1210, bar3: 1211, baz3: 1212}, baz2: {foo3: 1220, bar3: 1221, baz3: 1222} },
},
baz0: {
foo1: { foo2: {foo3: 2000, bar3: 2001, baz3: 2002}, bar2: {foo3: 2010, bar3: 2011, baz3: 2012}, baz2: {foo3: 2020, bar3: 2021, baz3: 2022} },
bar1: { foo2: {foo3: 2100, bar3: 2101, baz3: 2102}, bar2: {foo3: 2110, bar3: 2111, baz3: 2112}, baz2: {foo3: 2120, bar3: 2121, baz3: 2122} },
baz1: { foo2: {foo3: 2200, bar3: 2201, baz3: 2202}, bar2: {foo3: 2210, bar3: 2211, baz3: 2212}, baz2: {foo3: 2220, bar3: 2221, baz3: 2222} },
},
})",
L{
N("foo0", L{
N("foo1", L{N("foo2", L{N("foo3", "0000"), N("bar3", "0001"), N("baz3", "0002")}), N("bar2", L{N("foo3", "0010"), N("bar3", "0011"), N("baz3", "0012")}), N("baz2", L{N("foo3", "0020"), N("bar3", "0021"), N("baz3", "0022")})}),
N("bar1", L{N("foo2", L{N("foo3", "0100"), N("bar3", "0101"), N("baz3", "0102")}), N("bar2", L{N("foo3", "0110"), N("bar3", "0111"), N("baz3", "0112")}), N("baz2", L{N("foo3", "0120"), N("bar3", "0121"), N("baz3", "0122")})}),
N("baz1", L{N("foo2", L{N("foo3", "0200"), N("bar3", "0201"), N("baz3", "0202")}), N("bar2", L{N("foo3", "0210"), N("bar3", "0211"), N("baz3", "0212")}), N("baz2", L{N("foo3", "0220"), N("bar3", "0221"), N("baz3", "0222")})}),
}),
N("bar0", L{
N("foo1", L{N("foo2", L{N("foo3", "1000"), N("bar3", "1001"), N("baz3", "1002")}), N("bar2", L{N("foo3", "1010"), N("bar3", "1011"), N("baz3", "1012")}), N("baz2", L{N("foo3", "1020"), N("bar3", "1021"), N("baz3", "1022")})}),
N("bar1", L{N("foo2", L{N("foo3", "1100"), N("bar3", "1101"), N("baz3", "1102")}), N("bar2", L{N("foo3", "1110"), N("bar3", "1111"), N("baz3", "1112")}), N("baz2", L{N("foo3", "1120"), N("bar3", "1121"), N("baz3", "1122")})}),
N("baz1", L{N("foo2", L{N("foo3", "1200"), N("bar3", "1201"), N("baz3", "1202")}), N("bar2", L{N("foo3", "1210"), N("bar3", "1211"), N("baz3", "1212")}), N("baz2", L{N("foo3", "1220"), N("bar3", "1221"), N("baz3", "1222")})}),
}),
N("baz0", L{
N("foo1", L{N("foo2", L{N("foo3", "2000"), N("bar3", "2001"), N("baz3", "2002")}), N("bar2", L{N("foo3", "2010"), N("bar3", "2011"), N("baz3", "2012")}), N("baz2", L{N("foo3", "2020"), N("bar3", "2021"), N("baz3", "2022")})}),
N("bar1", L{N("foo2", L{N("foo3", "2100"), N("bar3", "2101"), N("baz3", "2102")}), N("bar2", L{N("foo3", "2110"), N("bar3", "2111"), N("baz3", "2112")}), N("baz2", L{N("foo3", "2120"), N("bar3", "2121"), N("baz3", "2122")})}),
N("baz1", L{N("foo2", L{N("foo3", "2200"), N("bar3", "2201"), N("baz3", "2202")}), N("bar2", L{N("foo3", "2210"), N("bar3", "2211"), N("baz3", "2212")}), N("baz2", L{N("foo3", "2220"), N("bar3", "2221"), N("baz3", "2222")})}),
})
}
);
ADD_CASE_TO_GROUP("nested map x4",
R"(
foo0:
foo1:
foo2:
foo3: 0000
bar3: 0001
baz3: 0002
bar2:
foo3: 0010
bar3: 0011
baz3: 0012
baz2:
foo3: 0020
bar3: 0021
baz3: 0022
bar1:
foo2:
foo3: 0100
bar3: 0101
baz3: 0102
bar2:
foo3: 0110
bar3: 0111
baz3: 0112
baz2:
foo3: 0120
bar3: 0121
baz3: 0122
baz1:
foo2:
foo3: 0200
bar3: 0201
baz3: 0202
bar2:
foo3: 0210
bar3: 0211
baz3: 0212
baz2:
foo3: 0220
bar3: 0221
baz3: 0222
bar0:
foo1:
foo2:
foo3: 1000
bar3: 1001
baz3: 1002
bar2:
foo3: 1010
bar3: 1011
baz3: 1012
baz2:
foo3: 1020
bar3: 1021
baz3: 1022
bar1:
foo2:
foo3: 1100
bar3: 1101
baz3: 1102
bar2:
foo3: 1110
bar3: 1111
baz3: 1112
baz2:
foo3: 1120
bar3: 1121
baz3: 1122
baz1:
foo2:
foo3: 1200
bar3: 1201
baz3: 1202
bar2:
foo3: 1210
bar3: 1211
baz3: 1212
baz2:
foo3: 1220
bar3: 1221
baz3: 1222
baz0:
foo1:
foo2:
foo3: 2000
bar3: 2001
baz3: 2002
bar2:
foo3: 2010
bar3: 2011
baz3: 2012
baz2:
foo3: 2020
bar3: 2021
baz3: 2022
bar1:
foo2:
foo3: 2100
bar3: 2101
baz3: 2102
bar2:
foo3: 2110
bar3: 2111
baz3: 2112
baz2:
foo3: 2120
bar3: 2121
baz3: 2122
baz1:
foo2:
foo3: 2200
bar3: 2201
baz3: 2202
bar2:
foo3: 2210
bar3: 2211
baz3: 2212
baz2:
foo3: 2220
bar3: 2221
baz3: 2222
)",
L{
N("foo0", L{
N("foo1", L{N("foo2", L{N("foo3", "0000"), N("bar3", "0001"), N("baz3", "0002")}), N("bar2", L{N("foo3", "0010"), N("bar3", "0011"), N("baz3", "0012")}), N("baz2", L{N("foo3", "0020"), N("bar3", "0021"), N("baz3", "0022")})}),
N("bar1", L{N("foo2", L{N("foo3", "0100"), N("bar3", "0101"), N("baz3", "0102")}), N("bar2", L{N("foo3", "0110"), N("bar3", "0111"), N("baz3", "0112")}), N("baz2", L{N("foo3", "0120"), N("bar3", "0121"), N("baz3", "0122")})}),
N("baz1", L{N("foo2", L{N("foo3", "0200"), N("bar3", "0201"), N("baz3", "0202")}), N("bar2", L{N("foo3", "0210"), N("bar3", "0211"), N("baz3", "0212")}), N("baz2", L{N("foo3", "0220"), N("bar3", "0221"), N("baz3", "0222")})}),
}),
N("bar0", L{
N("foo1", L{N("foo2", L{N("foo3", "1000"), N("bar3", "1001"), N("baz3", "1002")}), N("bar2", L{N("foo3", "1010"), N("bar3", "1011"), N("baz3", "1012")}), N("baz2", L{N("foo3", "1020"), N("bar3", "1021"), N("baz3", "1022")})}),
N("bar1", L{N("foo2", L{N("foo3", "1100"), N("bar3", "1101"), N("baz3", "1102")}), N("bar2", L{N("foo3", "1110"), N("bar3", "1111"), N("baz3", "1112")}), N("baz2", L{N("foo3", "1120"), N("bar3", "1121"), N("baz3", "1122")})}),
N("baz1", L{N("foo2", L{N("foo3", "1200"), N("bar3", "1201"), N("baz3", "1202")}), N("bar2", L{N("foo3", "1210"), N("bar3", "1211"), N("baz3", "1212")}), N("baz2", L{N("foo3", "1220"), N("bar3", "1221"), N("baz3", "1222")})}),
}),
N("baz0", L{
N("foo1", L{N("foo2", L{N("foo3", "2000"), N("bar3", "2001"), N("baz3", "2002")}), N("bar2", L{N("foo3", "2010"), N("bar3", "2011"), N("baz3", "2012")}), N("baz2", L{N("foo3", "2020"), N("bar3", "2021"), N("baz3", "2022")})}),
N("bar1", L{N("foo2", L{N("foo3", "2100"), N("bar3", "2101"), N("baz3", "2102")}), N("bar2", L{N("foo3", "2110"), N("bar3", "2111"), N("baz3", "2112")}), N("baz2", L{N("foo3", "2120"), N("bar3", "2121"), N("baz3", "2122")})}),
N("baz1", L{N("foo2", L{N("foo3", "2200"), N("bar3", "2201"), N("baz3", "2202")}), N("bar2", L{N("foo3", "2210"), N("bar3", "2211"), N("baz3", "2212")}), N("baz2", L{N("foo3", "2220"), N("bar3", "2221"), N("baz3", "2222")})}),
})
}
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,133 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_SEQX2)
{
ADD_CASE_TO_GROUP("nested seq x2, empty, oneline",
R"([[], [], []])",
L{SEQ, SEQ, SEQ}
);
ADD_CASE_TO_GROUP("nested seq x2, explicit, same line",
R"([[00, 01, 02], [10, 11, 12], [20, 21, 22]])",
L{
N{L{N{"00"}, N{"01"}, N{"02"}}},
N{L{N{"10"}, N{"11"}, N{"12"}}},
N{L{N{"20"}, N{"21"}, N{"22"}}},
}
);
ADD_CASE_TO_GROUP("nested seq x2, explicit first+last level, same line, no spaces",
R"([[00,01,02],[10,11,12],[20,21,22]])",
L{
N{L{N{"00"}, N{"01"}, N{"02"}}},
N{L{N{"10"}, N{"11"}, N{"12"}}},
N{L{N{"20"}, N{"21"}, N{"22"}}},
}
);
ADD_CASE_TO_GROUP("nested seq x2, explicit",
R"([
[00, 01, 02],
[10, 11, 12],
[20, 21, 22],
])",
L{
N{L{N{"00"}, N{"01"}, N{"02"}}},
N{L{N{"10"}, N{"11"}, N{"12"}}},
N{L{N{"20"}, N{"21"}, N{"22"}}},
}
);
ADD_CASE_TO_GROUP("nested seq x2",
R"(
- - 00
- 01
- 02
- - 10
- 11
- 12
- - 20
- 21
- 22
)",
L{
N{L{N{"00"}, N{"01"}, N{"02"}}},
N{L{N{"10"}, N{"11"}, N{"12"}}},
N{L{N{"20"}, N{"21"}, N{"22"}}},
}
);
ADD_CASE_TO_GROUP("nested seq x2, next line",
R"(
-
- 00
- 01
- 02
-
- 10
- 11
- 12
-
- 20
- 21
- 22
)",
L{
N{L{N{"00"}, N{"01"}, N{"02"}}},
N{L{N{"10"}, N{"11"}, N{"12"}}},
N{L{N{"20"}, N{"21"}, N{"22"}}},
}
);
ADD_CASE_TO_GROUP("nested seq x2, all next line",
R"(
-
-
00
-
01
-
02
-
-
10
-
11
-
12
-
-
20
-
21
-
22
)",
L{
N{L{N{"00"}, N{"01"}, N{"02"}}},
N{L{N{"10"}, N{"11"}, N{"12"}}},
N{L{N{"20"}, N{"21"}, N{"22"}}},
}
);
ADD_CASE_TO_GROUP("nested seq x2, implicit first, explicit last level",
R"(
- [00, 01, 02]
- [10, 11, 12]
- [20, 21, 22]
)",
L{
N{L{N{"00"}, N{"01"}, N{"02"}}},
N{L{N{"10"}, N{"11"}, N{"12"}}},
N{L{N{"20"}, N{"21"}, N{"22"}}},
}
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,187 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_SEQX3)
{
ADD_CASE_TO_GROUP("nested seq x3, explicit",
R"([
[[000, 001, 002], [010, 011, 012], [020, 021, 022]],
[[100, 101, 102], [110, 111, 112], [120, 121, 122]],
[[200, 201, 202], [210, 211, 212], [220, 221, 222]],
])",
L{
N{L{N{L{N{"000"}, N{"001"}, N{"002"}}}, N{L{N{"010"}, N{"011"}, N{"012"}}}, N{L{N{"020"}, N{"021"}, N{"022"}}}}},
N{L{N{L{N{"100"}, N{"101"}, N{"102"}}}, N{L{N{"110"}, N{"111"}, N{"112"}}}, N{L{N{"120"}, N{"121"}, N{"122"}}}}},
N{L{N{L{N{"200"}, N{"201"}, N{"202"}}}, N{L{N{"210"}, N{"211"}, N{"212"}}}, N{L{N{"220"}, N{"221"}, N{"222"}}}}},
}
);
ADD_CASE_TO_GROUP("nested seq x3",
R"(
- - - 000
- 001
- 002
- - 010
- 011
- 012
- - 020
- 021
- 022
- - - 100
- 101
- 102
- - 110
- 111
- 112
- - 120
- 121
- 122
- - - 200
- 201
- 202
- - 210
- 211
- 212
- - 220
- 221
- 222
)",
L{
N{L{N{L{N{"000"}, N{"001"}, N{"002"}}}, N{L{N{"010"}, N{"011"}, N{"012"}}}, N{L{N{"020"}, N{"021"}, N{"022"}}}}},
N{L{N{L{N{"100"}, N{"101"}, N{"102"}}}, N{L{N{"110"}, N{"111"}, N{"112"}}}, N{L{N{"120"}, N{"121"}, N{"122"}}}}},
N{L{N{L{N{"200"}, N{"201"}, N{"202"}}}, N{L{N{"210"}, N{"211"}, N{"212"}}}, N{L{N{"220"}, N{"221"}, N{"222"}}}}},
}
);
ADD_CASE_TO_GROUP("nested seq x3, continued on next line",
R"(
-
-
- 000
- 001
- 002
-
- 010
- 011
- 012
-
- 020
- 021
- 022
-
-
- 100
- 101
- 102
-
- 110
- 111
- 112
-
- 120
- 121
- 122
-
-
- 200
- 201
- 202
-
- 210
- 211
- 212
-
- 220
- 221
- 222
)",
L{
N{L{N{L{N{"000"}, N{"001"}, N{"002"}}}, N{L{N{"010"}, N{"011"}, N{"012"}}}, N{L{N{"020"}, N{"021"}, N{"022"}}}}},
N{L{N{L{N{"100"}, N{"101"}, N{"102"}}}, N{L{N{"110"}, N{"111"}, N{"112"}}}, N{L{N{"120"}, N{"121"}, N{"122"}}}}},
N{L{N{L{N{"200"}, N{"201"}, N{"202"}}}, N{L{N{"210"}, N{"211"}, N{"212"}}}, N{L{N{"220"}, N{"221"}, N{"222"}}}}},
}
);
ADD_CASE_TO_GROUP("nested seq x3, all continued on next line",
R"(
-
-
-
000
-
001
-
002
-
-
010
-
011
-
012
-
-
020
-
021
-
022
-
-
-
100
-
101
-
102
-
-
110
-
111
-
112
-
-
120
-
121
-
122
-
-
-
200
-
201
-
202
-
-
210
-
211
-
212
-
-
220
-
221
-
222
)",
L{
N{L{N{L{N{"000"}, N{"001"}, N{"002"}}}, N{L{N{"010"}, N{"011"}, N{"012"}}}, N{L{N{"020"}, N{"021"}, N{"022"}}}}},
N{L{N{L{N{"100"}, N{"101"}, N{"102"}}}, N{L{N{"110"}, N{"111"}, N{"112"}}}, N{L{N{"120"}, N{"121"}, N{"122"}}}}},
N{L{N{L{N{"200"}, N{"201"}, N{"202"}}}, N{L{N{"210"}, N{"211"}, N{"212"}}}, N{L{N{"220"}, N{"221"}, N{"222"}}}}},
}
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,124 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_SEQX4)
{
ADD_CASE_TO_GROUP("nested seq x4, explicit",
R"([
[[[0000, 0001, 0002], [0010, 0011, 0012], [0020, 0021, 0022]],
[[0100, 0101, 0102], [0110, 0111, 0112], [0120, 0121, 0122]],
[[0200, 0201, 0202], [0210, 0211, 0212], [0220, 0221, 0222]]],
[[[1000, 1001, 1002], [1010, 1011, 1012], [1020, 1021, 1022]],
[[1100, 1101, 1102], [1110, 1111, 1112], [1120, 1121, 1122]],
[[1200, 1201, 1202], [1210, 1211, 1212], [1220, 1221, 1222]]],
[[[2000, 2001, 2002], [2010, 2011, 2012], [2020, 2021, 2022]],
[[2100, 2101, 2102], [2110, 2111, 2112], [2120, 2121, 2122]],
[[2200, 2201, 2202], [2210, 2211, 2212], [2220, 2221, 2222]]],
])",
L{
N{L{N{L{N{L{N{"0000"}, N{"0001"}, N{"0002"}}}, N{L{N{"0010"}, N{"0011"}, N{"0012"}}}, N{L{N{"0020"}, N{"0021"}, N{"0022"}}}}}, N{L{N{L{N{"0100"}, N{"0101"}, N{"0102"}}}, N{L{N{"0110"}, N{"0111"}, N{"0112"}}}, N{L{N{"0120"}, N{"0121"}, N{"0122"}}}}}, N{L{N{L{N{"0200"}, N{"0201"}, N{"0202"}}}, N{L{N{"0210"}, N{"0211"}, N{"0212"}}}, N{L{N{"0220"}, N{"0221"}, N{"0222"}}}}}}},
N{L{N{L{N{L{N{"1000"}, N{"1001"}, N{"1002"}}}, N{L{N{"1010"}, N{"1011"}, N{"1012"}}}, N{L{N{"1020"}, N{"1021"}, N{"1022"}}}}}, N{L{N{L{N{"1100"}, N{"1101"}, N{"1102"}}}, N{L{N{"1110"}, N{"1111"}, N{"1112"}}}, N{L{N{"1120"}, N{"1121"}, N{"1122"}}}}}, N{L{N{L{N{"1200"}, N{"1201"}, N{"1202"}}}, N{L{N{"1210"}, N{"1211"}, N{"1212"}}}, N{L{N{"1220"}, N{"1221"}, N{"1222"}}}}}}},
N{L{N{L{N{L{N{"2000"}, N{"2001"}, N{"2002"}}}, N{L{N{"2010"}, N{"2011"}, N{"2012"}}}, N{L{N{"2020"}, N{"2021"}, N{"2022"}}}}}, N{L{N{L{N{"2100"}, N{"2101"}, N{"2102"}}}, N{L{N{"2110"}, N{"2111"}, N{"2112"}}}, N{L{N{"2120"}, N{"2121"}, N{"2122"}}}}}, N{L{N{L{N{"2200"}, N{"2201"}, N{"2202"}}}, N{L{N{"2210"}, N{"2211"}, N{"2212"}}}, N{L{N{"2220"}, N{"2221"}, N{"2222"}}}}}}},
}
);
ADD_CASE_TO_GROUP("nested seq x4",
R"(
- - - - 0000
- 0001
- 0002
- - 0010
- 0011
- 0012
- - 0020
- 0021
- 0022
- - - 0100
- 0101
- 0102
- - 0110
- 0111
- 0112
- - 0120
- 0121
- 0122
- - - 0200
- 0201
- 0202
- - 0210
- 0211
- 0212
- - 0220
- 0221
- 0222
- - - - 1000
- 1001
- 1002
- - 1010
- 1011
- 1012
- - 1020
- 1021
- 1022
- - - 1100
- 1101
- 1102
- - 1110
- 1111
- 1112
- - 1120
- 1121
- 1122
- - - 1200
- 1201
- 1202
- - 1210
- 1211
- 1212
- - 1220
- 1221
- 1222
- - - - 2000
- 2001
- 2002
- - 2010
- 2011
- 2012
- - 2020
- 2021
- 2022
- - - 2100
- 2101
- 2102
- - 2110
- 2111
- 2112
- - 2120
- 2121
- 2122
- - - 2200
- 2201
- 2202
- - 2210
- 2211
- 2212
- - 2220
- 2221
- 2222
)",
L{
N{L{N{L{N{L{N{"0000"}, N{"0001"}, N{"0002"}}}, N{L{N{"0010"}, N{"0011"}, N{"0012"}}}, N{L{N{"0020"}, N{"0021"}, N{"0022"}}}}}, N{L{N{L{N{"0100"}, N{"0101"}, N{"0102"}}}, N{L{N{"0110"}, N{"0111"}, N{"0112"}}}, N{L{N{"0120"}, N{"0121"}, N{"0122"}}}}}, N{L{N{L{N{"0200"}, N{"0201"}, N{"0202"}}}, N{L{N{"0210"}, N{"0211"}, N{"0212"}}}, N{L{N{"0220"}, N{"0221"}, N{"0222"}}}}}}},
N{L{N{L{N{L{N{"1000"}, N{"1001"}, N{"1002"}}}, N{L{N{"1010"}, N{"1011"}, N{"1012"}}}, N{L{N{"1020"}, N{"1021"}, N{"1022"}}}}}, N{L{N{L{N{"1100"}, N{"1101"}, N{"1102"}}}, N{L{N{"1110"}, N{"1111"}, N{"1112"}}}, N{L{N{"1120"}, N{"1121"}, N{"1122"}}}}}, N{L{N{L{N{"1200"}, N{"1201"}, N{"1202"}}}, N{L{N{"1210"}, N{"1211"}, N{"1212"}}}, N{L{N{"1220"}, N{"1221"}, N{"1222"}}}}}}},
N{L{N{L{N{L{N{"2000"}, N{"2001"}, N{"2002"}}}, N{L{N{"2010"}, N{"2011"}, N{"2012"}}}, N{L{N{"2020"}, N{"2021"}, N{"2022"}}}}}, N{L{N{L{N{"2100"}, N{"2101"}, N{"2102"}}}, N{L{N{"2110"}, N{"2111"}, N{"2112"}}}, N{L{N{"2120"}, N{"2121"}, N{"2122"}}}}}, N{L{N{L{N{"2200"}, N{"2201"}, N{"2202"}}}, N{L{N{"2210"}, N{"2211"}, N{"2212"}}}, N{L{N{"2220"}, N{"2221"}, N{"2222"}}}}}}},
}
);
}
} // namespace yml
} // namespace c4

341
test/test_node_type.cpp Normal file
View File

@@ -0,0 +1,341 @@
#ifndef RYML_SINGLE_HEADER
#include "c4/yml/std/std.hpp"
#include "c4/yml/parse.hpp"
#include "c4/yml/emit.hpp"
#include <c4/format.hpp>
#include <c4/yml/detail/checks.hpp>
#include <c4/yml/detail/print.hpp>
#endif
#include "./test_lib/test_case.hpp"
#include "./test_lib/callbacks_tester.hpp"
#include <gtest/gtest.h>
namespace c4 {
namespace yml {
TEST(NodeType, type_str_preset)
{
// avoid coverage misses
EXPECT_EQ(to_csubstr(NodeType(KEYVAL).type_str()), "KEYVAL");
EXPECT_EQ(to_csubstr(NodeType(KEY).type_str()), "KEY");
EXPECT_EQ(to_csubstr(NodeType(VAL).type_str()), "VAL");
EXPECT_EQ(to_csubstr(NodeType(MAP).type_str()), "MAP");
EXPECT_EQ(to_csubstr(NodeType(SEQ).type_str()), "SEQ");
EXPECT_EQ(to_csubstr(NodeType(KEYMAP).type_str()), "KEYMAP");
EXPECT_EQ(to_csubstr(NodeType(KEYSEQ).type_str()), "KEYSEQ");
EXPECT_EQ(to_csubstr(NodeType(DOCSEQ).type_str()), "DOCSEQ");
EXPECT_EQ(to_csubstr(NodeType(DOCMAP).type_str()), "DOCMAP");
EXPECT_EQ(to_csubstr(NodeType(DOCVAL).type_str()), "DOCVAL");
EXPECT_EQ(to_csubstr(NodeType(DOC).type_str()), "DOC");
EXPECT_EQ(to_csubstr(NodeType(STREAM).type_str()), "STREAM");
EXPECT_EQ(to_csubstr(NodeType(NOTYPE).type_str()), "NOTYPE");
EXPECT_EQ(to_csubstr(NodeType(KEYVAL|KEYREF).type_str()), "KEYVAL***");
EXPECT_EQ(to_csubstr(NodeType(KEYVAL|VALREF).type_str()), "KEYVAL***");
EXPECT_EQ(to_csubstr(NodeType(KEYVAL|KEYANCH).type_str()), "KEYVAL***");
EXPECT_EQ(to_csubstr(NodeType(KEYVAL|VALANCH).type_str()), "KEYVAL***");
EXPECT_EQ(to_csubstr(NodeType(KEYVAL|KEYREF|VALANCH).type_str()), "KEYVAL***");
EXPECT_EQ(to_csubstr(NodeType(KEYVAL|KEYANCH|VALREF).type_str()), "KEYVAL***");
EXPECT_EQ(to_csubstr(NodeType(KEYMAP|KEYREF).type_str()), "KEYMAP***");
EXPECT_EQ(to_csubstr(NodeType(KEYMAP|VALREF).type_str()), "KEYMAP***");
EXPECT_EQ(to_csubstr(NodeType(KEYMAP|KEYANCH).type_str()), "KEYMAP***");
EXPECT_EQ(to_csubstr(NodeType(KEYMAP|VALANCH).type_str()), "KEYMAP***");
EXPECT_EQ(to_csubstr(NodeType(KEYMAP|KEYREF|VALANCH).type_str()), "KEYMAP***");
EXPECT_EQ(to_csubstr(NodeType(KEYMAP|KEYANCH|VALREF).type_str()), "KEYMAP***");
EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|KEYREF).type_str()), "KEYSEQ***");
EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|VALREF).type_str()), "KEYSEQ***");
EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|KEYANCH).type_str()), "KEYSEQ***");
EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|VALANCH).type_str()), "KEYSEQ***");
EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|KEYREF|VALANCH).type_str()), "KEYSEQ***");
EXPECT_EQ(to_csubstr(NodeType(KEYSEQ|KEYANCH|VALREF).type_str()), "KEYSEQ***");
EXPECT_EQ(to_csubstr(NodeType(DOCSEQ|VALANCH).type_str()), "DOCSEQ***");
EXPECT_EQ(to_csubstr(NodeType(DOCSEQ|VALREF).type_str()), "DOCSEQ***");
EXPECT_EQ(to_csubstr(NodeType(DOCMAP|VALANCH).type_str()), "DOCMAP***");
EXPECT_EQ(to_csubstr(NodeType(DOCMAP|VALREF).type_str()), "DOCMAP***");
EXPECT_EQ(to_csubstr(NodeType(DOCVAL|VALANCH).type_str()), "DOCVAL***");
EXPECT_EQ(to_csubstr(NodeType(DOCVAL|VALREF).type_str()), "DOCVAL***");
EXPECT_EQ(to_csubstr(NodeType(KEY|KEYREF).type_str()), "KEY***");
EXPECT_EQ(to_csubstr(NodeType(KEY|KEYANCH).type_str()), "KEY***");
EXPECT_EQ(to_csubstr(NodeType(VAL|VALREF).type_str()), "VAL***");
EXPECT_EQ(to_csubstr(NodeType(VAL|VALANCH).type_str()), "VAL***");
EXPECT_EQ(to_csubstr(NodeType(MAP|VALREF).type_str()), "MAP***");
EXPECT_EQ(to_csubstr(NodeType(MAP|VALANCH).type_str()), "MAP***");
EXPECT_EQ(to_csubstr(NodeType(SEQ|VALREF).type_str()), "SEQ***");
EXPECT_EQ(to_csubstr(NodeType(SEQ|VALANCH).type_str()), "SEQ***");
EXPECT_EQ(to_csubstr(NodeType(DOC|VALREF).type_str()), "DOC***");
EXPECT_EQ(to_csubstr(NodeType(DOC|VALANCH).type_str()), "DOC***");
EXPECT_EQ(to_csubstr(NodeType(KEYREF).type_str()), "(unk)");
EXPECT_EQ(to_csubstr(NodeType(VALREF).type_str()), "(unk)");
EXPECT_EQ(to_csubstr(NodeType(KEYANCH).type_str()), "(unk)");
EXPECT_EQ(to_csubstr(NodeType(VALANCH).type_str()), "(unk)");
EXPECT_EQ(to_csubstr(NodeType(NOTYPE).type_str()), "NOTYPE");
}
TEST(NodeType, type_str)
{
{
char bufsmall[2];
EXPECT_EQ(NodeType().type_str(bufsmall).len, 1 + 6); // NOTYPE
EXPECT_EQ(NodeType().type_str(bufsmall).str, nullptr); // NOTYPE
EXPECT_EQ(NodeType(VAL).type_str(bufsmall).len, 1 + 3);
EXPECT_EQ(NodeType(VAL).type_str(bufsmall).str, nullptr);
EXPECT_EQ(NodeType(KEYVAL).type_str(bufsmall).len, 1 + 7);
EXPECT_EQ(NodeType(KEYVAL|KEYANCH|VALANCH).type_str(bufsmall).len, 1 + 19);
EXPECT_EQ(NodeType(KEYVAL|KEYANCH|VALANCH).type_str(bufsmall).str, nullptr);
}
#define teststr(bits, str) \
{ \
char buf[128] = {0}; \
memset(buf, 1, sizeof(buf)); \
csubstr expected = str; \
csubstr actual = NodeType(bits).type_str(buf); \
ASSERT_LT(actual.len + 1, C4_COUNTOF(buf)); \
EXPECT_EQ(actual, expected); \
EXPECT_EQ(NodeType(bits).type_str(buf), expected); \
EXPECT_EQ(buf[expected.len], '\0'); \
}
teststr(0, "NOTYPE")
teststr(NOTYPE, "NOTYPE")
teststr(STREAM, "STREAM")
teststr(DOC, "DOC")
teststr(KEY, "KEY")
teststr(KEYTAG, "KTAG")
teststr(KEYANCH, "KANCH")
teststr(KEYREF, "KREF")
teststr(KEY_LITERAL, "KLITERAL")
teststr(KEY_FOLDED, "KFOLDED")
teststr(KEY_SQUO, "KSQUO")
teststr(KEY_DQUO, "KDQUO")
teststr(KEY_PLAIN, "KPLAIN")
teststr(KEY_UNFILT, "KUNFILT")
teststr(VAL, "VAL")
teststr(VALTAG, "VTAG")
teststr(VALANCH, "VANCH")
teststr(VALREF, "VREF")
teststr(VAL_LITERAL, "VLITERAL")
teststr(VAL_FOLDED, "VFOLDED")
teststr(VAL_SQUO, "VSQUO")
teststr(VAL_DQUO, "VDQUO")
teststr(VAL_PLAIN, "VPLAIN")
teststr(VAL_UNFILT, "VUNFILT")
teststr(MAP, "MAP")
teststr(SEQ, "SEQ")
teststr(FLOW_SL, "FLOWSL")
teststr(FLOW_ML, "FLOWML")
teststr(BLOCK, "BLCK")
teststr(KEYVAL, "KEY|VAL")
teststr(KEYMAP, "KEY|MAP")
teststr(KEYVAL|KEYANCH|VALANCH, "KEY|KANCH|VAL|VANCH")
#undef teststr
}
TEST(NodeType, scalar_style_choose)
{
EXPECT_EQ(scalar_style_choose(" \n\t"), SCALAR_DQUO);
EXPECT_EQ(scalar_style_choose("01"), SCALAR_PLAIN);
}
TEST(NodeType, scalar_style_choose_json)
{
EXPECT_EQ(scalar_style_json_choose("true"), SCALAR_PLAIN);
EXPECT_EQ(scalar_style_json_choose("false"), SCALAR_PLAIN);
EXPECT_EQ(scalar_style_json_choose("null"), SCALAR_PLAIN);
EXPECT_EQ(scalar_style_json_choose("0.1"), SCALAR_PLAIN);
EXPECT_EQ(scalar_style_json_choose("01"), SCALAR_DQUO);
EXPECT_EQ(scalar_style_json_choose("foo"), SCALAR_DQUO);
EXPECT_EQ(scalar_style_json_choose("bar"), SCALAR_DQUO);
}
TEST(NodeType, scalar_style_query_plain)
{
EXPECT_TRUE(scalar_style_query_plain("-.inf"));
EXPECT_TRUE(scalar_style_query_plain("-.INF"));
EXPECT_TRUE(scalar_style_query_plain("-.034"));
EXPECT_FALSE(scalar_style_query_plain("-.034x"));
}
TEST(NodeType, is_stream)
{
EXPECT_FALSE(NodeType(NOTYPE).is_stream());
EXPECT_TRUE(NodeType(STREAM).is_stream());
}
TEST(NodeType, is_doc)
{
EXPECT_FALSE(NodeType(NOTYPE).is_doc());
EXPECT_TRUE(NodeType(DOC).is_doc());
}
TEST(NodeType, is_container)
{
EXPECT_FALSE(NodeType(NOTYPE).is_container());
EXPECT_FALSE(NodeType(VAL).is_container());
EXPECT_FALSE(NodeType(KEY).is_container());
EXPECT_FALSE(NodeType(KEYVAL).is_container());
EXPECT_TRUE(NodeType(MAP).is_container());
EXPECT_TRUE(NodeType(SEQ).is_container());
EXPECT_TRUE(NodeType(KEYMAP).is_container());
EXPECT_TRUE(NodeType(KEYSEQ).is_container());
EXPECT_TRUE(NodeType(DOCMAP).is_container());
EXPECT_TRUE(NodeType(DOCSEQ).is_container());
}
TEST(NodeType, is_map)
{
EXPECT_FALSE(NodeType(NOTYPE).is_map());
EXPECT_FALSE(NodeType(VAL).is_map());
EXPECT_FALSE(NodeType(KEY).is_map());
EXPECT_TRUE(NodeType(MAP).is_map());
EXPECT_TRUE(NodeType(KEYMAP).is_map());
EXPECT_FALSE(NodeType(SEQ).is_map());
EXPECT_FALSE(NodeType(KEYSEQ).is_map());
}
TEST(NodeType, is_seq)
{
EXPECT_FALSE(NodeType(NOTYPE).is_seq());
EXPECT_FALSE(NodeType(VAL).is_seq());
EXPECT_FALSE(NodeType(KEY).is_seq());
EXPECT_FALSE(NodeType(MAP).is_seq());
EXPECT_FALSE(NodeType(KEYMAP).is_seq());
EXPECT_TRUE(NodeType(SEQ).is_seq());
EXPECT_TRUE(NodeType(KEYSEQ).is_seq());
}
TEST(NodeType, has_val)
{
EXPECT_FALSE(NodeType(NOTYPE).has_val());
EXPECT_FALSE(NodeType(KEY).has_val());
EXPECT_TRUE(NodeType(VAL).has_val());
EXPECT_TRUE(NodeType(DOCVAL).has_val());
EXPECT_TRUE(NodeType(KEYVAL).has_val());
EXPECT_FALSE(NodeType(KEYMAP).has_val());
EXPECT_FALSE(NodeType(KEYSEQ).has_val());
}
TEST(NodeType, is_val)
{
EXPECT_FALSE(NodeType(NOTYPE).is_val());
EXPECT_FALSE(NodeType(KEY).is_val());
EXPECT_TRUE(NodeType(VAL).is_val());
EXPECT_TRUE(NodeType(DOCVAL).is_val());
EXPECT_FALSE(NodeType(KEYVAL).is_val());
EXPECT_FALSE(NodeType(KEYMAP).is_val());
EXPECT_FALSE(NodeType(KEYSEQ).is_val());
}
TEST(NodeType, has_key)
{
EXPECT_FALSE(NodeType(NOTYPE).has_key());
EXPECT_TRUE(NodeType(KEY).has_key());
EXPECT_FALSE(NodeType(VAL).has_key());
EXPECT_TRUE(NodeType(KEYVAL).has_key());
EXPECT_TRUE(NodeType(KEYMAP).has_key());
EXPECT_TRUE(NodeType(KEYSEQ).has_key());
}
TEST(NodeType, is_keyval)
{
EXPECT_FALSE(NodeType(NOTYPE).is_keyval());
EXPECT_FALSE(NodeType(KEY).is_keyval());
EXPECT_FALSE(NodeType(VAL).is_keyval());
EXPECT_TRUE(NodeType(KEYVAL).is_keyval());
EXPECT_FALSE(NodeType(DOCVAL).is_keyval());
EXPECT_FALSE(NodeType(KEYMAP).is_keyval());
EXPECT_FALSE(NodeType(KEYSEQ).is_keyval());
}
TEST(NodeType, has_key_tag)
{
EXPECT_FALSE(NodeType().has_key_tag());
EXPECT_TRUE(NodeType(KEYTAG).has_key_tag());
EXPECT_TRUE(NodeType(KEY|KEYTAG).has_key_tag());
}
TEST(NodeType, has_val_tag)
{
EXPECT_FALSE(NodeType().has_val_tag());
EXPECT_TRUE(NodeType(VALTAG).has_val_tag());
EXPECT_TRUE(NodeType(VAL|VALTAG).has_val_tag());
}
TEST(NodeType, has_key_anchor)
{
EXPECT_FALSE(NodeType().has_key_anchor());
EXPECT_TRUE(NodeType(KEYANCH).has_key_anchor());
EXPECT_TRUE(NodeType(KEY|KEYANCH).has_key_anchor());
}
TEST(NodeType, has_val_anchor)
{
EXPECT_FALSE(NodeType().has_val_anchor());
EXPECT_TRUE(NodeType(VALANCH).has_val_anchor());
EXPECT_TRUE(NodeType(VAL|VALANCH).has_val_anchor());
}
TEST(NodeType, has_anchor)
{
EXPECT_FALSE(NodeType().has_anchor());
EXPECT_TRUE(NodeType(VALANCH).has_anchor());
EXPECT_TRUE(NodeType(KEYANCH).has_anchor());
EXPECT_TRUE(NodeType(KEYANCH|VALANCH).has_anchor());
EXPECT_TRUE(NodeType(KEY|VALANCH).has_anchor());
EXPECT_TRUE(NodeType(VAL|KEYANCH).has_anchor());
EXPECT_TRUE(NodeType(KEY|KEYANCH).has_anchor());
EXPECT_TRUE(NodeType(VAL|VALANCH).has_anchor());
}
TEST(NodeType, is_key_ref)
{
EXPECT_FALSE(NodeType().is_key_ref());
EXPECT_TRUE(NodeType(KEYREF).is_key_ref());
EXPECT_TRUE(NodeType(KEY|KEYREF).is_key_ref());
}
TEST(NodeType, is_val_ref)
{
EXPECT_FALSE(NodeType().is_val_ref());
EXPECT_TRUE(NodeType(VALREF).is_val_ref());
EXPECT_TRUE(NodeType(VAL|VALREF).is_val_ref());
}
TEST(NodeType, is_ref)
{
EXPECT_FALSE(NodeType().is_ref());
EXPECT_FALSE(NodeType(KEYVAL).is_ref());
EXPECT_TRUE(NodeType(KEYREF).is_ref());
EXPECT_TRUE(NodeType(VALREF).is_ref());
EXPECT_TRUE(NodeType(KEY|VALREF).is_ref());
EXPECT_TRUE(NodeType(VAL|KEYREF).is_ref());
EXPECT_TRUE(NodeType(KEYREF|VALREF).is_ref());
}
TEST(NodeType, is_key_quoted)
{
EXPECT_FALSE(NodeType().is_key_quoted());
EXPECT_TRUE(NodeType(KEYQUO).is_key_quoted());
EXPECT_TRUE(NodeType(KEY|KEYQUO).is_key_quoted());
}
TEST(NodeType, is_val_quoted)
{
EXPECT_FALSE(NodeType().is_val_quoted());
EXPECT_TRUE(NodeType(VALQUO).is_val_quoted());
EXPECT_TRUE(NodeType(VAL|VALQUO).is_val_quoted());
}
TEST(NodeType, is_quoted)
{
EXPECT_FALSE(NodeType().is_quoted());
EXPECT_TRUE(NodeType(KEYQUO).is_quoted());
EXPECT_TRUE(NodeType(VALQUO).is_quoted());
EXPECT_TRUE(NodeType(KEYQUO|VALQUO).is_quoted());
EXPECT_TRUE(NodeType(KEY|KEYQUO).is_quoted());
EXPECT_TRUE(NodeType(VAL|VALQUO).is_quoted());
EXPECT_TRUE(NodeType(KEY|VALQUO).is_quoted());
EXPECT_TRUE(NodeType(VAL|KEYQUO).is_quoted());
}
} // namespace yml
} // namespace c4

View File

@@ -6,8 +6,8 @@
#include <c4/yml/detail/checks.hpp>
#include <c4/yml/detail/print.hpp>
#endif
#include "./test_case.hpp"
#include "./callbacks_tester.hpp"
#include "./test_lib/test_case.hpp"
#include "./test_lib/callbacks_tester.hpp"
#include <gtest/gtest.h>
@@ -222,128 +222,146 @@ TEST(NodeRef, valid_vs_seed_vs_readable)
EXPECT_FALSE(none.readable());
}
#define _TEST_FAIL(method_expr) \
{ \
SCOPED_TRACE(#method_expr); \
if(tree) \
ExpectError::check_assertion(tree, [&]{ return method_expr; }); \
else \
ExpectError::check_assertion([&]{ return method_expr; }); \
#define _TEST_FAIL(tree, method_expr) \
{ \
SCOPED_TRACE(#method_expr); \
ExpectError::check_assertion(tree, [&]{ \
auto ret = (method_expr); \
C4_DONT_OPTIMIZE(ret); \
}); \
}
#define _TEST_SUCCEED(method_expr) \
{ \
SCOPED_TRACE(#method_expr); \
if(tree) \
ExpectError::check_success(tree, [&]{ return method_expr; }); \
else \
ExpectError::check_success([&]{ return method_expr; }); \
#define _TEST_SUCCEED(tree, method_expr) \
{ \
SCOPED_TRACE(#method_expr); \
ExpectError::check_success(tree, [&]{ \
auto ret = (method_expr); \
C4_DONT_OPTIMIZE(ret); \
}); \
}
template<class NodeT>
void test_fail_read(Tree *tree, NodeT node)
{
_TEST_SUCCEED(node.get())
_TEST_FAIL(node.type())
_TEST_FAIL(node.type_str())
_TEST_FAIL(node.key())
_TEST_FAIL(node.key_tag())
_TEST_FAIL(node.key_anchor())
_TEST_FAIL(node.key_ref())
_TEST_FAIL(node.key_is_null())
_TEST_FAIL(node.keysc())
_TEST_FAIL(node.val())
_TEST_FAIL(node.val_tag())
_TEST_FAIL(node.val_anchor())
_TEST_FAIL(node.val_ref())
_TEST_FAIL(node.val_is_null())
_TEST_FAIL(node.valsc())
_TEST_FAIL(node.is_map())
_TEST_FAIL(node.empty())
_TEST_FAIL(node.is_stream())
_TEST_FAIL(node.is_doc())
_TEST_FAIL(node.is_container())
_TEST_FAIL(node.is_map())
_TEST_FAIL(node.is_seq())
_TEST_FAIL(node.has_val())
_TEST_FAIL(node.has_key())
_TEST_FAIL(node.is_keyval())
_TEST_FAIL(node.has_key_tag())
_TEST_FAIL(node.has_val_tag())
_TEST_FAIL(node.has_key_anchor())
_TEST_FAIL(node.has_val_anchor())
_TEST_FAIL(node.is_val_anchor())
_TEST_FAIL(node.has_anchor())
_TEST_FAIL(node.is_anchor())
_TEST_FAIL(node.is_key_ref())
_TEST_FAIL(node.is_val_ref())
_TEST_FAIL(node.is_ref())
_TEST_FAIL(node.is_anchor_or_ref())
_TEST_FAIL(node.is_key_quoted())
_TEST_FAIL(node.is_val_quoted())
_TEST_FAIL(node.parent_is_seq())
_TEST_FAIL(node.parent_is_map())
_TEST_FAIL(node.is_root())
_TEST_FAIL(node.has_parent())
_TEST_FAIL(node.has_child(0))
_TEST_FAIL(node.has_child("key"))
_TEST_FAIL(node.has_children())
_TEST_FAIL(node.has_sibling("key"))
_TEST_FAIL(node.has_other_siblings())
_TEST_FAIL(node.doc(0))
_TEST_FAIL(node.parent())
_TEST_FAIL(node.num_children())
_TEST_FAIL(node.first_child())
_TEST_FAIL(node.last_child())
_TEST_FAIL(node.child(0))
_TEST_FAIL(node.find_child("key"))
_TEST_FAIL(node.prev_sibling())
_TEST_FAIL(node.next_sibling())
_TEST_FAIL(node.first_sibling())
_TEST_FAIL(node.last_sibling())
_TEST_FAIL(node.sibling(0))
_TEST_FAIL(node.find_sibling("key"))
_TEST_FAIL(node.num_children())
_TEST_FAIL(node.num_siblings())
_TEST_FAIL(node.num_other_siblings())
_TEST_FAIL(node["key"])
_TEST_FAIL(node[0])
_TEST_FAIL(node.at("key"))
_TEST_FAIL(node.at(0))
_TEST_SUCCEED(tree, node.get())
_TEST_FAIL(tree, node.type())
_TEST_FAIL(tree, node.type_str())
_TEST_FAIL(tree, node.key())
_TEST_FAIL(tree, node.key_tag())
_TEST_FAIL(tree, node.key_anchor())
_TEST_FAIL(tree, node.key_ref())
_TEST_FAIL(tree, node.key_is_null())
_TEST_FAIL(tree, node.keysc())
_TEST_FAIL(tree, node.val())
_TEST_FAIL(tree, node.val_tag())
_TEST_FAIL(tree, node.val_anchor())
_TEST_FAIL(tree, node.val_ref())
_TEST_FAIL(tree, node.val_is_null())
_TEST_FAIL(tree, node.valsc())
_TEST_FAIL(tree, node.is_map())
_TEST_FAIL(tree, node.empty())
_TEST_FAIL(tree, node.is_stream())
_TEST_FAIL(tree, node.is_doc())
_TEST_FAIL(tree, node.is_container())
_TEST_FAIL(tree, node.is_map())
_TEST_FAIL(tree, node.is_seq())
_TEST_FAIL(tree, node.has_val())
_TEST_FAIL(tree, node.has_key())
_TEST_FAIL(tree, node.is_keyval())
_TEST_FAIL(tree, node.has_key_tag())
_TEST_FAIL(tree, node.has_val_tag())
_TEST_FAIL(tree, node.has_key_anchor())
_TEST_FAIL(tree, node.has_val_anchor())
_TEST_FAIL(tree, node.has_anchor())
_TEST_FAIL(tree, node.is_key_ref())
_TEST_FAIL(tree, node.is_val_ref())
_TEST_FAIL(tree, node.is_ref())
_TEST_FAIL(tree, node.parent_is_seq())
_TEST_FAIL(tree, node.parent_is_map())
_TEST_FAIL(tree, node.type_has_any(MAP|SEQ))
_TEST_FAIL(tree, node.type_has_all(MAP|SEQ))
_TEST_FAIL(tree, node.type_has_none(MAP|SEQ))
_TEST_FAIL(tree, node.is_container_styled())
_TEST_FAIL(tree, node.is_block())
_TEST_FAIL(tree, node.is_flow())
_TEST_FAIL(tree, node.is_flow_sl())
_TEST_FAIL(tree, node.is_flow_ml())
_TEST_FAIL(tree, node.is_key_styled())
_TEST_FAIL(tree, node.is_val_styled())
_TEST_FAIL(tree, node.is_key_literal())
_TEST_FAIL(tree, node.is_val_literal())
_TEST_FAIL(tree, node.is_key_folded())
_TEST_FAIL(tree, node.is_val_folded())
_TEST_FAIL(tree, node.is_key_squo())
_TEST_FAIL(tree, node.is_val_squo())
_TEST_FAIL(tree, node.is_key_dquo())
_TEST_FAIL(tree, node.is_val_dquo())
_TEST_FAIL(tree, node.is_key_plain())
_TEST_FAIL(tree, node.is_val_plain())
_TEST_FAIL(tree, node.is_key_quoted())
_TEST_FAIL(tree, node.is_val_quoted())
_TEST_FAIL(tree, node.is_quoted())
_TEST_FAIL(tree, node.is_root())
_TEST_FAIL(tree, node.has_parent())
_TEST_FAIL(tree, node.has_child(0))
_TEST_FAIL(tree, node.has_child("key"))
_TEST_FAIL(tree, node.has_children())
_TEST_FAIL(tree, node.has_sibling("key"))
_TEST_FAIL(tree, node.has_other_siblings())
_TEST_FAIL(tree, node.doc(0))
_TEST_FAIL(tree, node.parent())
_TEST_FAIL(tree, node.num_children())
_TEST_FAIL(tree, node.first_child())
_TEST_FAIL(tree, node.last_child())
_TEST_FAIL(tree, node.child(0))
_TEST_FAIL(tree, node.find_child("key"))
_TEST_FAIL(tree, node.prev_sibling())
_TEST_FAIL(tree, node.next_sibling())
_TEST_FAIL(tree, node.first_sibling())
_TEST_FAIL(tree, node.last_sibling())
_TEST_FAIL(tree, node.sibling(0))
_TEST_FAIL(tree, node.find_sibling("key"))
_TEST_FAIL(tree, node.num_children())
_TEST_FAIL(tree, node.num_siblings())
_TEST_FAIL(tree, node.num_other_siblings())
_TEST_FAIL(tree, node["key"])
_TEST_FAIL(tree, node[0])
_TEST_FAIL(tree, node.at("key"))
_TEST_FAIL(tree, node.at(0))
int val;
_TEST_FAIL(node >> val)
_TEST_FAIL(node >> key(val))
_TEST_FAIL(node >> fmt::base64(val))
_TEST_FAIL(node >> key(fmt::base64(val)))
_TEST_FAIL(node.deserialize_key(fmt::base64(val)))
_TEST_FAIL(node.deserialize_val(fmt::base64(val)))
_TEST_FAIL(node.get_if("key", &val));
_TEST_FAIL(node.get_if("key", &val, 0));
_TEST_FAIL(tree, node >> val)
_TEST_FAIL(tree, node >> key(val))
_TEST_FAIL(tree, node >> fmt::base64(val))
_TEST_FAIL(tree, node >> key(fmt::base64(val)))
_TEST_FAIL(tree, node.deserialize_key(fmt::base64(val)))
_TEST_FAIL(tree, node.deserialize_val(fmt::base64(val)))
_TEST_FAIL(tree, node.get_if("key", &val));
_TEST_FAIL(tree, node.get_if("key", &val, 0));
const NodeT const_node = node;
_TEST_FAIL(node.begin());
_TEST_FAIL(node.cbegin());
_TEST_FAIL(const_node.begin());
_TEST_FAIL(const_node.cbegin());
_TEST_FAIL(node.end());
_TEST_FAIL(node.end());
_TEST_FAIL(const_node.end());
_TEST_FAIL(const_node.end());
_TEST_FAIL(node.children());
_TEST_FAIL(node.children());
_TEST_FAIL(const_node.children());
_TEST_FAIL(const_node.children());
_TEST_FAIL(node.siblings());
_TEST_FAIL(node.siblings());
_TEST_FAIL(const_node.siblings());
_TEST_FAIL(const_node.siblings());
//_TEST_FAIL(node.visit([](NodeT &n, size_t level){ (void)n; (void)level; return false; }));
//_TEST_FAIL(const_node.visit([](const NodeT &n, size_t level){ (void)n; (void)level; return false; }));
_TEST_SUCCEED(const_node == node);
_TEST_SUCCEED(const_node != node);
_TEST_FAIL(tree, node.begin());
_TEST_FAIL(tree, node.cbegin());
_TEST_FAIL(tree, const_node.begin());
_TEST_FAIL(tree, const_node.cbegin());
_TEST_FAIL(tree, node.end());
_TEST_FAIL(tree, node.end());
_TEST_FAIL(tree, const_node.end());
_TEST_FAIL(tree, const_node.end());
_TEST_FAIL(tree, node.children());
_TEST_FAIL(tree, node.children());
_TEST_FAIL(tree, const_node.children());
_TEST_FAIL(tree, const_node.children());
_TEST_FAIL(tree, node.siblings());
_TEST_FAIL(tree, node.siblings());
_TEST_FAIL(tree, const_node.siblings());
_TEST_FAIL(tree, const_node.siblings());
//_TEST_FAIL(tree, node.visit([](NodeT &n, size_t level){ (void)n; (void)level; return false; }));
//_TEST_FAIL(tree, const_node.visit([](const NodeT &n, size_t level){ (void)n; (void)level; return false; }));
_TEST_SUCCEED(tree, const_node == node);
_TEST_SUCCEED(tree, const_node != node);
if(std::is_same<NodeT, NodeRef>::value)
{
ConstNodeRef other;
_TEST_SUCCEED(node == other);
_TEST_SUCCEED(node != node);
_TEST_SUCCEED(tree, node == other);
_TEST_SUCCEED(tree, node != node);
}
}
template<class NodeT>
@@ -351,24 +369,24 @@ void test_fail_read_subject(Tree *tree, NodeT node, NodeT subject)
{
if(node.readable())
{
_TEST_SUCCEED(node.has_child(subject))
_TEST_SUCCEED(node.has_sibling(subject))
_TEST_SUCCEED(tree, node.has_child(subject))
_TEST_SUCCEED(tree, node.has_sibling(subject))
}
else
{
_TEST_FAIL(node.has_child(subject))
_TEST_FAIL(node.has_sibling(subject))
_TEST_FAIL(tree, node.has_child(subject))
_TEST_FAIL(tree, node.has_sibling(subject))
}
_TEST_FAIL(node.child_pos(subject))
_TEST_FAIL(node.sibling_pos(subject))
_TEST_FAIL(tree, node.child_pos(subject))
_TEST_FAIL(tree, node.sibling_pos(subject))
}
#undef _TEST_FAIL_READ
#undef _TEST_SUCCEED_READ
TEST(NodeRef, cannot_read_from_invalid)
{
SCOPED_TRACE("here");
NodeRef none;
SCOPED_TRACE("here");
ASSERT_EQ(none.tree(), nullptr);
ASSERT_EQ(none.id(), NONE);
EXPECT_TRUE(none.invalid());
@@ -724,10 +742,9 @@ formats:
const std::string expected = R"(formats:
rtt: json
)";
auto test_formats = [&](const char *id){
SCOPED_TRACE(id);
auto test_formats = [&](const char *desc){
NodeRef formats = root["formats"];
std::cout << id << " id=" << formats.id() << "\n";
RYML_TRACE_FMT("desc={} formats.id={} ", desc, formats.id());
EXPECT_TRUE(formats.readable());
print_tree(tree);
check_invariants(tree);

View File

@@ -1,4 +1,10 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
#ifdef RYML_SINGLE_HEADER
#include <ryml_all.hpp>
#else
#include <c4/charconv.hpp>
#endif
namespace c4 {
namespace yml {
@@ -14,6 +20,7 @@ auto mkvals() -> typename std::enable_if<!std::is_signed<I>::value, std::vector<
return std::vector<I>({0, 1, 5, 10, std::numeric_limits<I>::max(),});
}
template<class I>
C4_NO_UBSAN_IOVRFLW
void test_ints()
{
C4_SUPPRESS_WARNING_GCC_WITH_PUSH("-Wuseless-cast")
@@ -117,37 +124,39 @@ TEST(number, idec)
}
CASE_GROUP(NUMBER)
{
ADD_CASE_TO_GROUP("integer numbers, flow", JSON_ALSO,
ADD_CASE_TO_GROUP("integer numbers, flow", JSON_WRITE,
R"(translation: [-2, -2, 5, 0xa, -0xb, 0XA, -0XA, 0b10, -0b10, 0B10, -0B10, 0o17, -0o17, 0O17, -0O17])",
L{N("translation", L{
N("-2"), N("-2"), N("5"),
N("0xa"), N("-0xb"),
N("0XA"), N("-0XA"),
N("0b10"), N("-0b10"),
N("0B10"), N("-0B10"),
N("0o17"), N("-0o17"),
N("0O17"), N("-0O17"),
})});
N(MB, L{
N(KP|SFS, "translation", L{
N(VP, "-2"), N(VP, "-2"), N(VP, "5"),
N(VP, "0xa"), N(VP, "-0xb"),
N(VP, "0XA"), N(VP, "-0XA"),
N(VP, "0b10"), N(VP, "-0b10"),
N(VP, "0B10"), N(VP, "-0B10"),
N(VP, "0o17"), N(VP, "-0o17"),
N(VP, "0O17"), N(VP, "-0O17"),
})
})
);
ADD_CASE_TO_GROUP("integer numbers, block", JSON_ALSO,
ADD_CASE_TO_GROUP("integer numbers, block", JSON_WRITE,
R"(translation:
- -2
- -2
- -5
)",
L{N("translation", L{N("-2"), N("-2"), N("-5")})}
N(MB, L{N(KP|SB, "translation", L{N(VP, "-2"), N(VP, "-2"), N(VP, "-5")})})
);
ADD_CASE_TO_GROUP("floating point numbers, flow", JSON_ALSO,
ADD_CASE_TO_GROUP("floating point numbers, flow", JSON_WRITE,
R"([-2.0, -2.1, 0.1, .1, -.2, -2.e+6, -3e-6, 1.12345e+011])",
L{N("-2.0"), N("-2.1"), N("0.1"), N(".1"), N("-.2"), N("-2.e+6"), N("-3e-6"), N("1.12345e+011")}
N(SFS, L{N(VP, "-2.0"), N(VP, "-2.1"), N(VP, "0.1"), N(VP, ".1"), N(VP, "-.2"), N(VP, "-2.e+6"), N(VP, "-3e-6"), N(VP, "1.12345e+011")})
);
ADD_CASE_TO_GROUP("floating point numbers, block", JSON_ALSO,
ADD_CASE_TO_GROUP("floating point numbers, block", JSON_WRITE,
R"(
- -2.0
- -2.1
@@ -158,10 +167,10 @@ R"(
- -3e-6
- 1.12345e+011
)",
L{N("-2.0"), N("-2.1"), N("0.1"), N(".1"), N("-.2"), N("-2.e+6"), N("-3e-6"), N("1.12345e+011")}
N(SB, L{N(VP, "-2.0"), N(VP, "-2.1"), N(VP, "0.1"), N(VP, ".1"), N(VP, "-.2"), N(VP, "-2.e+6"), N(VP, "-3e-6"), N(VP, "1.12345e+011")})
);
ADD_CASE_TO_GROUP("hex floating point numbers, block", JSON_ALSO,
ADD_CASE_TO_GROUP("hex floating point numbers, block", JSON_WRITE,
R"(
- -2.0
- -2.1
@@ -172,10 +181,10 @@ R"(
- -3e-6
- 1.12345e+011
)",
L{N("-2.0"), N("-2.1"), N("0.1"), N(".1"), N("-.2"), N("-2.e+6"), N("-3e-6"), N("1.12345e+011")}
N(SB, L{N(VP, "-2.0"), N(VP, "-2.1"), N(VP, "0.1"), N(VP, ".1"), N(VP, "-.2"), N(VP, "-2.e+6"), N(VP, "-3e-6"), N(VP, "1.12345e+011")})
);
ADD_CASE_TO_GROUP("version numbers", JSON_ALSO,
ADD_CASE_TO_GROUP("version numbers", JSON_WRITE,
R"(
- 1.2.3
- 1.2.3.4
@@ -196,22 +205,22 @@ R"(
- {a: 1.2.3, b: 4.5.6}
- {a: 1.2.3.4, b: 4.5.6.7}
)",
L{
N("1.2.3"),
N("1.2.3.4"),
N(L{N("1.2.3"), N("4.5.6")}),
N(L{N("1.2.3.4"), N("4.5.6.7")}),
N(L{N("1.2.3"), N("4.5.6")}),
N(L{N("1.2.3.4"), N("4.5.6.7")}),
N(L{N("a", "1.2.3")}),
N(L{N("a", "1.2.3.4")}),
N(L{N("a", "1.2.3")}),
N(L{N("a", "1.2.3.4")}),
N(L{N("a", "1.2.3"), N("b", "4.5.6")}),
N(L{N("a", "1.2.3.4"), N("b", "4.5.6.7")}),
N(L{N("a", "1.2.3"), N("b", "4.5.6")}),
N(L{N("a", "1.2.3.4"), N("b", "4.5.6.7")}),
});
N(SB, L{
N(VP, "1.2.3"),
N(VP, "1.2.3.4"),
N(SFS, L{N(VP, "1.2.3"), N(VP, "4.5.6")}),
N(SFS, L{N(VP, "1.2.3.4"), N(VP, "4.5.6.7")}),
N(SB, L{N(VP, "1.2.3"), N(VP, "4.5.6")}),
N(SB, L{N(VP, "1.2.3.4"), N(VP, "4.5.6.7")}),
N(MB, L{N(KP|VP, "a", "1.2.3")}),
N(MB, L{N(KP|VP, "a", "1.2.3.4")}),
N(MFS, L{N(KP|VP, "a", "1.2.3")}),
N(MFS, L{N(KP|VP, "a", "1.2.3.4")}),
N(MB, L{N(KP|VP, "a", "1.2.3"), N(KP|VP, "b", "4.5.6")}),
N(MB, L{N(KP|VP, "a", "1.2.3.4"), N(KP|VP, "b", "4.5.6.7")}),
N(MFS, L{N(KP|VP, "a", "1.2.3"), N(KP|VP, "b", "4.5.6")}),
N(MFS, L{N(KP|VP, "a", "1.2.3.4"), N(KP|VP, "b", "4.5.6.7")}),
}));
}
} // namespace yml

6796
test/test_parse_engine.cpp Normal file

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,815 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
TEST(plain_scalar, issue153_seq)
{
Tree t = parse_in_arena("- A\n \n");
EXPECT_EQ(t[0].val(), "A");
}
TEST(plain_scalar, issue153_map)
{
Tree t = parse_in_arena("foo: A\n \n");
EXPECT_EQ(t["foo"].val(), "A");
}
TEST(plain_scalar, test_suite_BS4K)
{
Tree t;
ExpectError::do_check(&t, [&]{
t = parse_in_arena(R"(word1 # comment
word2
word3
)");
});
ExpectError::do_check(&t, [&]{
t = parse_in_arena(R"(word1 # comment
word2
)");
});
}
TEST(plain_scalar, test_suite_7TMG)
{
csubstr yaml = R"(---
word1
# comment
---
# first value is NOT a multiline plain scalar
[ word1
# comment
, word2]
)";
test_check_emit_check(yaml, [](Tree const &t){
EXPECT_TRUE(t.rootref().is_stream());
ConstNodeRef doc = t.rootref().first_child();
ASSERT_TRUE(doc.is_doc());
ASSERT_TRUE(doc.is_val());
EXPECT_EQ(doc.val(), "word1");
doc = t.rootref().child(1);
ASSERT_TRUE(doc.is_doc());
ASSERT_TRUE(doc.is_seq());
EXPECT_EQ(doc[0].val(), "word1");
EXPECT_EQ(doc[1].val(), "word2");
});
}
TEST(plain_scalar, test_suite_82AN)
{
csubstr yaml = R"(
---word1
word2
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_doc());
ASSERT_TRUE(t.rootref().is_val());
EXPECT_EQ(t.rootref().val(), csubstr("---word1 word2"));
});
}
TEST(plain_scalar, test_suite_EXG3)
{
csubstr yaml = R"(
---
---word1
word2
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_stream());
ASSERT_TRUE(t.rootref().first_child().is_doc());
ASSERT_TRUE(t.rootref().first_child().is_val());
EXPECT_EQ(t.rootref().first_child().val(), csubstr("---word1 word2"));
});
}
TEST(plain_scalar, test_suite_9YRD)
{
csubstr yaml = R"(
a
b
c
d
e
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_doc());
ASSERT_TRUE(t.rootref().is_val());
EXPECT_EQ(t.rootref().val(), csubstr("a b c d\ne"));
});
}
TEST(plain_scalar, test_suite_EX5H)
{
csubstr yaml = R"(
---
a
b
c
d
e
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_stream());
ASSERT_TRUE(t.rootref().child(0).is_doc());
ASSERT_TRUE(t.rootref().child(0).is_val());
EXPECT_EQ(t.rootref().child(0).val(), csubstr("a b c d\ne"));
});
}
TEST(plain_scalar, test_suite_M7A3)
{
csubstr yaml = R"(
Bare
document
...
# No document
...
|
%!PS-Adobe-2.0 # Not the first line
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_stream());
ASSERT_EQ(t.rootref().num_children(), 2u);
EXPECT_EQ(t.rootref().child(0).val(), csubstr("Bare document"));
EXPECT_EQ(t.rootref().child(1).val(), csubstr("%!PS-Adobe-2.0 # Not the first line\n"));
});
}
TEST(plain_scalar, test_suite_HS5T)
{
csubstr yaml = R"(
1st non-empty
2nd non-empty
3rd non-empty
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_doc());
ASSERT_TRUE(t.rootref().is_val());
EXPECT_EQ(t.rootref().val(), csubstr("1st non-empty\n2nd non-empty 3rd non-empty"));
});
}
TEST(plain_scalar, test_suite_NB6Z)
{
csubstr yaml = R"(
key:
value
with
tabs
tabs
foo
bar
baz
key1:
value
with
tabs
tabs
foo
bar
baz
key2: something
else
key3: something
else
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_map());
ASSERT_TRUE(t.rootref().has_child("key"));
ASSERT_TRUE(t.rootref().has_child("key1"));
ASSERT_TRUE(t.rootref().has_child("key2"));
ASSERT_TRUE(t.rootref().has_child("key3"));
EXPECT_EQ(t["key"].val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
EXPECT_EQ(t["key1"].val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
EXPECT_EQ(t["key2"].val(), csubstr("something else"));
EXPECT_EQ(t["key3"].val(), csubstr("something else"));
});
}
TEST(plain_scalar, test_suite_NB6Z_seq)
{
csubstr yaml = R"(
- value
with
tabs
tabs
foo
bar
baz
- value
with
tabs
tabs
foo
bar
baz
- more
value
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_seq());
ASSERT_EQ(t.rootref().num_children(), 3u);
EXPECT_EQ(t[0].val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
EXPECT_EQ(t[1].val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
EXPECT_EQ(t[2].val(), csubstr("more value"));
});
}
TEST(plain_scalar, test_suite_NB6Z_docval)
{
csubstr yaml = R"(
value
with
tabs
tabs
foo
bar
baz
)";
test_check_emit_check(yaml, [](Tree const &t){
ASSERT_TRUE(t.rootref().is_doc());
ASSERT_TRUE(t.rootref().is_val());
EXPECT_EQ(t.rootref().val(), csubstr("value with\ntabs tabs\nfoo\nbar baz"));
});
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
CASE_GROUP(PLAIN_SCALAR)
{
//
ADD_CASE_TO_GROUP("plain scalar, 1 word only",
R"(a_single_word_scalar_to_test)",
N(DOCVAL, "a_single_word_scalar_to_test")
);
ADD_CASE_TO_GROUP("plain scalar, 1 line with spaces",
R"(a scalar with spaces in it all in one line)",
N(DOCVAL, "a scalar with spaces in it all in one line")
);
ADD_CASE_TO_GROUP("plain scalar, multiline",
R"(
a scalar with several lines in it
of course also with spaces but for now there are no quotes
and also no blank lines to speak of)",
N(DOCVAL, "a scalar with several lines in it of course also with spaces but for now there are no quotes and also no blank lines to speak of")
);
ADD_CASE_TO_GROUP("plain scalar, multiline, unindented",
R"(
a scalar with several lines in it
of course also with spaces but for now there are no quotes
and also no blank lines to speak of)",
N(DOCVAL, "a scalar with several lines in it of course also with spaces but for now there are no quotes and also no blank lines to speak of")
);
ADD_CASE_TO_GROUP("plain scalar, multiline, quotes, escapes",
R"(
a scalar with several lines in it and also 'single quotes'
and "double quotes" and assorted escapes such as \r or \n)",
N(DOCVAL, "a scalar with several lines in it and also 'single quotes' and \"double quotes\" and assorted escapes such as \\r or \\n")
);
ADD_CASE_TO_GROUP("plain scalar, multiline, quotes, escapes, blank lines middle",
R"(
A scalar with several lines in it and also 'single quotes'.
A blank line follows after this one.
And "double quotes" and assorted escapes such as \r or \n)",
N(DOCVAL, "A scalar with several lines in it and also 'single quotes'. A blank line follows after this one.\nAnd \"double quotes\" and assorted escapes such as \\r or \\n")
);
ADD_CASE_TO_GROUP("plain scalar, multiline, quotes, escapes, blank lines first",
R"(
A scalar with several lines in it and also 'single quotes'.
A blank line precedes this one.
And "double quotes" and assorted escapes such as \r or \n)",
N(DOCVAL, "A scalar with several lines in it and also 'single quotes'.\nA blank line precedes this one. And \"double quotes\" and assorted escapes such as \\r or \\n")
);
ADD_CASE_TO_GROUP("plain scalar, multiline, quotes, escapes, blank lines last",
R"(
A scalar with several lines in it and also 'single quotes'.
And "double quotes" and assorted escapes such as \r or \n.
A blank line follows after this one.
)",
N(DOCVAL, "A scalar with several lines in it and also 'single quotes'. And \"double quotes\" and assorted escapes such as \\r or \\n. A blank line follows after this one.")
);
ADD_CASE_TO_GROUP("plain scalar, example",
R"(
Several lines of text
with some "quotes" of various 'types'.
Escapes (like \n) don't do anything.
Newlines can be added by leaving a blank line.
Additional leading whitespace is ignored.)",
N(DOCVAL, "Several lines of text with some \"quotes\" of various 'types'. Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Additional leading whitespace is ignored.")
);
ADD_CASE_TO_GROUP("plain scalar, map example 1",
R"(
example: Several lines of text,
with some "quotes" of various 'types'.
Escapes (like \n) don't do anything.
Newlines can be added by leaving a blank line.
Additional leading whitespace is ignored.
another example: Several lines of text,
but the second line is empty, and _indented_.
There are more lines that follow.
yet another example: Several lines of text,
but the second line is empty, and _unindented_.
There are more lines that follow.
final example: Several lines of text,
but the second line is empty, and _unindented_.
There are more lines that follow. And the last line
terminates at the end of the file.)",
L{
N("example", "Several lines of text, with some \"quotes\" of various 'types'. "
"Escapes (like \\n) don't do anything.\n"
"Newlines can be added by leaving a blank line. "
"Additional leading whitespace is ignored."),
N("another example", "Several lines of text,\n"
"but the second line is empty, and _indented_. "
"There are more lines that follow."),
N("yet another example", "Several lines of text,\n"
"but the second line is empty, and _unindented_. "
"There are more lines that follow."),
N("final example", "Several lines of text,\n\n"
"but the second line is empty, and _unindented_. "
"There are more lines that follow. "
"And the last line terminates at the end of the file."),
}
);
/*
ADD_CASE_TO_GROUP("plain scalar, map example 2", IGNORE_LIBYAML_PARSE_FAIL|IGNORE_YAMLCPP_PARSE_FAIL,
R"(
example:
Several lines of text,
with some "quotes" of various 'types'.
Escapes (like \n) don't do anything.
Newlines can be added by leaving a blank line.
Additional leading whitespace is ignored.
)",
L{N("example", "Several lines of text, with some \"quotes\" of various 'types'. Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Additional leading whitespace is ignored.")}
);
*/
ADD_CASE_TO_GROUP("plain scalar, seq example 1",
R"(
- Several lines of text,
with some "quotes" of various 'types'.
Escapes (like \n) don't do anything.
Newlines can be added by leaving a blank line.
Additional leading whitespace is ignored.)",
L{N("Several lines of text, with some \"quotes\" of various 'types'. "
"Escapes (like \\n) don't do anything.\n"
"Newlines can be added by leaving a blank line. "
"Additional leading whitespace is ignored.")}
);
/*
ADD_CASE_TO_GROUP("plain scalar, seq example 2", IGNORE_LIBYAML_PARSE_FAIL|IGNORE_YAMLCPP_PARSE_FAIL,
R"(
-
Several lines of text,
with some "quotes" of various 'types'.
Escapes (like \n) don't do anything.
Newlines can be added by leaving a blank line.
Additional leading whitespace is ignored.
)",
L{N("Several lines of text, with some \"quotes\" of various 'types'. Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Additional leading whitespace is ignored.")}
);
*/
ADD_CASE_TO_GROUP("plain scalar, special characters 1",
R"(
- Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
How about empty lines?
Can we also have [] or {} inside?
Guess we can.
And how about at the beginning?
{ - for example }
[ - for example ]
- - for example
::- for example
and now two empty lines -
and now three empty lines -
and an empty line, unindented -
followed by more text
and another four at the end -
)",
L{N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'. "
"How about empty lines?\n"
"Can we also have [] or {} inside? Guess we can. "
"And how about at the beginning? { - for example } [ - for example ] - - for example ::- for example\n"
"and now two empty lines -\n\n"
"and now three empty lines -\n\n\n"
"and an empty line, unindented -\n"
"followed by more text "
"and another four at the end -"
)}
);
ADD_CASE_TO_GROUP("plain scalar, special characters 3MYT",
R"(--- # ZWK4
a: 1
? b
&anchor c: 3 # the anchor is for the scalar 'c'
? d
!!str e: 4
? f
---
k:#foo &a !t s
---
"k:#foo &a !t s"
---
'k:#foo &a !t s'
--- # 3MYT
k:#foo
&a !t s
---
k:#foo
&a !t s
---
k:#foo
&a !t s
---
k:#foo
&a !t s
--- # 3MYT
k:#foo
!t s
---
k:#foo
!t s
---
k:#foo
!t s
---
k:#foo
!t s
)",
N(STREAM, L{
N(DOCMAP, L{
N("a", "1"),
N(KEYVAL, "b", {}),
N("c", AR(KEYANCH, "anchor"), "3"),
N(KEYVAL, "d", {}),
N(TS("!!str", "e"), "4"),
N(KEYVAL, "f", {}),
}),
N(DOCVAL, "k:#foo &a !t s"),
N(DOCVAL|VALQUO, "k:#foo &a !t s"),
N(DOCVAL|VALQUO, "k:#foo &a !t s"),
N(DOCVAL, "k:#foo &a !t s"),
N(DOCVAL, "k:#foo &a !t s"),
N(DOCVAL, "k:#foo &a !t s"),
N(DOCVAL, "k:#foo &a !t s"),
N(DOCVAL, "k:#foo !t s"),
N(DOCVAL, "k:#foo !t s"),
N(DOCVAL, "k:#foo !t s"),
N(DOCVAL, "k:#foo !t s"),
})
);
// make sure there is no ambiguity with this case
ADD_CASE_TO_GROUP("plain scalar, sequence ambiguity",
R"(
- - some text
- and this is a sequence
- some text
- and this is /not/ a sequence
- - some text
- and this is a sequence
- some text
- and this is /not/ a sequence
)",
L{
N(L{N("some text"), N("and this is a sequence")}),
N("some text - and this is /not/ a sequence"),
N(L{N("some text"), N("and this is a sequence")}),
N("some text - and this is /not/ a sequence"),
}
);
ADD_CASE_TO_GROUP("plain scalar, empty lines at the beginning",
R"(
-
Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
-
Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
-
Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
)",
L{
N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
}
);
ADD_CASE_TO_GROUP("plain scalar, empty continuation lines",
R"(
- the next lines have 2cols, 0cols, 2cols,
and this line has some text in it. -> 0
now 0, 0, 2, 2, 0, 1, 1, 0, 4, 4, 0, 0
and finally some more text
)",
L{
N("the next lines have 2cols, 0cols, 2cols,"
"\n\n\n"
"and this line has some text in it. -> 0"
"\n"
"now 0, 0, 2, 2, 0, 1, 1, 0, 4, 4, 0, 0"
"\n\n\n\n\n\n\n\n\n\n\n\n"
"and finally some more text"),
}
);
ADD_CASE_TO_GROUP("plain scalar, indented first line",
R"(
- Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
-
Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
-
Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
)",
L{
N("Several lines of text,\nwith special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
N("Several lines of text, with special:characters, like:this-or-this - - and some \"quotes\" of various 'types'."),
}
);
ADD_CASE_TO_GROUP("plain scalar, do not accept ': ' mid line", EXPECT_PARSE_ERROR,
R"(- Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
But this: must cause a parse error.
)",
LineCol(4, 11)
);
ADD_CASE_TO_GROUP("plain scalar, do not accept ': ' start line", EXPECT_PARSE_ERROR,
R"(
- Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
But this must cause a parse error -
: foo bar
)",
LineCol(6, 3)
);
ADD_CASE_TO_GROUP("plain scalar, do not accept ': ' at line end", EXPECT_PARSE_ERROR,
R"(- Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
But this must cause a parse error:
)",
LineCol(4, 36)
);
ADD_CASE_TO_GROUP("plain scalar, do not accept ':' at line end", EXPECT_PARSE_ERROR,
R"(- Several lines of text,
with special:characters, like:this-or-this -
- and some "quotes" of various 'types'.
But this must cause a parse error:
- well, did it?
)",
LineCol(4, 36)
);
ADD_CASE_TO_GROUP("plain scalar, accept ' #' at line start",
R"(- Several lines of text,
and this is valid -
#with special:characters, like:this-or-this -
)",
L{N("Several lines of text, and this is valid -"),}
);
ADD_CASE_TO_GROUP("plain scalar, accept ' #' on first line",
R"(- Several lines of text, and this is valid -
#with special:characters, like:this-or-this -
)",
L{N("Several lines of text, and this is valid -")}
);
ADD_CASE_TO_GROUP("plain scalar, accept ' #' at line end",
R"(- Several lines of text,
with special:characters, #comment at the end
)",
L{N("Several lines of text, with special:characters,")}
);
ADD_CASE_TO_GROUP("plain scalar, accept '#'",
R"(
- Several lines of text, # with a comment
- Several lines of text,
with special#characters, like#this_#_-or-#-:this -
- and some "quotes" of various 'types'.
)",
L{
N("Several lines of text,"),
N("Several lines of text, "
"with special#characters, like#this_#_-or-#-:this - "
"- and some \"quotes\" of various 'types'."),
}
);
ADD_CASE_TO_GROUP("plain scalar, explicit",
R"(
[
a plain scalar
with several lines
and blank lines
as well
,
and another plain scalar
,
and yet another one
with many lines
and yet more, deindented
]
)",
L{
N("a plain scalar with several lines\nand blank lines\nas well"),
N("and another plain scalar"),
N("and yet another one\n\n\nwith many lines\nand yet more"),
N("deindented"),
}
);
ADD_CASE_TO_GROUP("plain scalar, explicit, early end, seq", EXPECT_PARSE_ERROR,
R"([
a plain scalar
with several lines
)",
LineCol(4, 1)
);
ADD_CASE_TO_GROUP("plain scalar, explicit, early end, map", EXPECT_PARSE_ERROR,
R"({foo:
a plain scalar
with several lines
)",
LineCol(4, 1)
);
ADD_CASE_TO_GROUP("plain scalar, multiple docs",
R"(---
- a plain scalar
with several lines
---
- a second plain scalar
with several lines
)",
N(STREAM, L{
N(DOCSEQ, L{N("a plain scalar with several lines")}),
N(DOCSEQ, L{N("a second plain scalar with several lines")}),
})
);
ADD_CASE_TO_GROUP("plain scalar, multiple docs, termination",
R"(---
- a plain scalar
with several lines
...
---
- a second plain scalar
with several lines
)",
N(STREAM, L{
N(DOCSEQ, L{N("a plain scalar with several lines")}),
N(DOCSEQ, L{N("a second plain scalar with several lines")}),
})
);
ADD_CASE_TO_GROUP("plain scalar, trailing whitespace",
R"(---
foo
---
foo
---
foo
)",
N(STREAM, L{
N(DOCVAL, "foo"),
N(DOCVAL, "foo"),
N(DOCVAL, "foo"),
})
);
}
} // namespace yml
} // namespace c4

View File

@@ -2,7 +2,7 @@
#include <c4/yml/std/string.hpp>
#include <c4/yml/preprocess.hpp>
#endif
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include <gtest/gtest.h>
namespace c4 {

1027
test/test_scalar_dquoted.cpp Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
#include <string>
namespace c4 {
@@ -10,8 +11,6 @@ namespace yml {
C4_SUPPRESS_WARNING_GCC_WITH_PUSH("-Wuseless-cast")
constexpr const NodeType_e DQV = (NodeType_e)(DOC | QV);
TEST(empty_scalar, parse_zero_length_strings)
{
char inp[] = R"(
@@ -47,37 +46,43 @@ map:
}
}
TEST(empty_scalar, flow_seq)
void test_empty_squo(ConstNodeRef ch)
{
SCOPED_TRACE(ch.id());
EXPECT_NE((ch.type() & VALQUO), 0u);
EXPECT_NE((ch.type() & VAL_SQUO), 0u);
EXPECT_TRUE((ch.type() & VAL_SQUO) == VAL_SQUO);
EXPECT_TRUE(ch.tree()->is_val_quoted(ch.id()));
EXPECT_TRUE(ch.is_val_quoted());
EXPECT_FALSE(ch.val_is_null());
EXPECT_EQ(ch.val().len, 0);
EXPECT_NE(ch.val().str, nullptr);
EXPECT_NE(ch.val(), nullptr);
}
TEST(empty_scalar, flow_seq0)
{
test_check_emit_check("['', '']", [&](Tree const &t){
ASSERT_TRUE(t.rootref().has_children());
EXPECT_TRUE(t.rootref().has_children());
EXPECT_EQ(t.rootref().num_children(), 2);
for(ConstNodeRef ch : t.rootref().children())
{
EXPECT_TRUE(ch.is_val_quoted());
EXPECT_FALSE(ch.val_is_null());
EXPECT_EQ(ch.val().len, 0);
EXPECT_NE(ch.val().str, nullptr);
EXPECT_NE(ch.val(), nullptr);
}
test_empty_squo(ch);
});
test_check_emit_check("[ , ]", [&](Tree const &t){
ASSERT_TRUE(t.rootref().has_children());
}
TEST(empty_scalar, flow_seq1)
{
test_check_emit_check("['', ]", [&](Tree const &t){
EXPECT_TRUE(t.rootref().has_children());
EXPECT_EQ(t.rootref().num_children(), 1);
for(ConstNodeRef ch : t.rootref().children())
{
EXPECT_FALSE(ch.is_val_quoted());
EXPECT_TRUE(ch.val_is_null());
EXPECT_EQ(ch.val().len, 0);
EXPECT_EQ(ch.val().str, nullptr);
EXPECT_EQ(ch.val(), nullptr);
}
test_empty_squo(ch);
});
}
TEST(empty_scalar, parse_empty_strings)
{
// use multiple empty entries to ensure the parser
// correctly deals with the several cases
char inp[] = R"(
# use multiple empty entries to ensure the parser
# correctly deals with the several cases
seq:
-
-
@@ -90,18 +95,25 @@ map:
d:
)";
const Tree tr = parse_in_place(inp);
#ifdef RYML_DBG
print_tree(tr);
#endif
for(const char *name : {"seq", "map"})
{
SCOPED_TRACE(name);
ConstNodeRef node = tr[to_csubstr(name)];
ASSERT_EQ(node.num_children(), 4);
size_t pos = 0;
for(const auto &child : node.children())
{
SCOPED_TRACE(pos);
EXPECT_FALSE(child.type().is_val_quoted());
EXPECT_EQ(child.val(), "");
EXPECT_EQ(child.val(), nullptr);
EXPECT_EQ(child.val().str, nullptr);
EXPECT_EQ(child.val().len, 0u);
EXPECT_TRUE(child.val_is_null());
++pos;
}
}
}
@@ -123,13 +135,7 @@ TEST(empty_scalar, std_string)
tree["eq"]["stdstr"] = stdss;
tree["eq"]["nullss"] = nullss;
EXPECT_EQ(emitrs_yaml<std::string>(tree),
"ser:\n"
" stdstr: ''\n"
" nullss: \n"
"eq:\n"
" stdstr: ''\n"
" nullss: \n"
);
"{ser: {stdstr: '',nullss: },eq: {stdstr: '',nullss: }}");
}
TEST(empty_scalar, to_arena)
@@ -184,14 +190,18 @@ TEST(empty_scalar, gcc_error)
csubstr nullstr = {};
ASSERT_EQ(nullstr.str, nullptr);
ASSERT_EQ(nullstr.len, 0);
std::cout << "\nserializing with empty arena...\n";
csubstr result = tr.to_arena(nullstr);
EXPECT_EQ(result.str, nullptr); // fails!
EXPECT_EQ(result.len, 0);
std::cout << "\nserializing with nonempty arena...\n";
result = tr.to_arena(nullstr);
EXPECT_EQ(result.str, nullptr); // fails!
EXPECT_EQ(result.len, 0);
{
SCOPED_TRACE("serializing with empty arena");
csubstr result = tr.to_arena(nullstr);
EXPECT_EQ(result.str, nullptr); // fails!
EXPECT_EQ(result.len, 0);
}
{
SCOPED_TRACE("serializing with nonempty arena");
csubstr result = tr.to_arena(nullstr);
EXPECT_EQ(result.str, nullptr); // fails!
EXPECT_EQ(result.len, 0);
}
}
TEST(empty_scalar, build_zero_length_string)
@@ -219,20 +229,29 @@ TEST(empty_scalar, build_zero_length_string)
// regarded
{
NodeRef quoted = addseq("quoted");
{NodeRef r = quoted.append_child(); r = "" ; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted.append_child(); r << "" ; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted.append_child(); r = empty ; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted.append_child(); r << empty ; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted.append_child(); r = stdss ; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted.append_child(); r << stdss ; r.set_type(r.type() | VALQUO);}
NodeRef quoted = addseq("s-quoted");
{NodeRef r = quoted.append_child(); r = "" ; r.set_type(r.type() | VAL_SQUO);}
{NodeRef r = quoted.append_child(); r << "" ; r.set_type(r.type() | VAL_SQUO);}
{NodeRef r = quoted.append_child(); r = empty ; r.set_type(r.type() | VAL_SQUO);}
{NodeRef r = quoted.append_child(); r << empty ; r.set_type(r.type() | VAL_SQUO);}
{NodeRef r = quoted.append_child(); r = stdss ; r.set_type(r.type() | VAL_SQUO);}
{NodeRef r = quoted.append_child(); r << stdss ; r.set_type(r.type() | VAL_SQUO);}
}
{
NodeRef quoted = addseq("d-quoted");
{NodeRef r = quoted.append_child(); r = "" ; r.set_type(r.type() | VAL_DQUO);}
{NodeRef r = quoted.append_child(); r << "" ; r.set_type(r.type() | VAL_DQUO);}
{NodeRef r = quoted.append_child(); r = empty ; r.set_type(r.type() | VAL_DQUO);}
{NodeRef r = quoted.append_child(); r << empty ; r.set_type(r.type() | VAL_DQUO);}
{NodeRef r = quoted.append_child(); r = stdss ; r.set_type(r.type() | VAL_DQUO);}
{NodeRef r = quoted.append_child(); r << stdss ; r.set_type(r.type() | VAL_DQUO);}
}
{
NodeRef quoted_null = addseq("quoted_null");
{NodeRef r = quoted_null.append_child(); r = nullss ; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted_null.append_child(); r << nullss ; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted_null.append_child(); r = nullptr ; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted_null.append_child(); r << nullptr; r.set_type(r.type() | VALQUO);}
{NodeRef r = quoted_null.append_child(); r = nullss ; r.set_type(r.type() | VAL_SQUO);}
{NodeRef r = quoted_null.append_child(); r << nullss ; r.set_type(r.type() | VAL_SQUO);}
{NodeRef r = quoted_null.append_child(); r = nullptr ; r.set_type(r.type() | VAL_SQUO);}
{NodeRef r = quoted_null.append_child(); r << nullptr; r.set_type(r.type() | VAL_SQUO);}
}
{
NodeRef non_quoted = addseq("nonquoted");
@@ -260,12 +279,13 @@ TEST(empty_scalar, build_zero_length_string)
size_t pos = 0;
for(ConstNodeRef child : node.cchildren())
{
EXPECT_TRUE(child.is_val_quoted()) << "pos=" << pos;
EXPECT_EQ(child.val().len, 0u) << "pos=" << pos;
EXPECT_NE(child.val().str, nullptr) << "pos=" << pos;
EXPECT_NE(child.val(), nullptr) << "pos=" << pos;
EXPECT_EQ(child.val(), "") << "pos=" << pos;
EXPECT_FALSE(child.val_is_null()) << "pos=" << pos;
SCOPED_TRACE(pos);
EXPECT_TRUE(child.is_val_quoted());
EXPECT_EQ(child.val().len, 0u);
EXPECT_NE(child.val().str, nullptr);
EXPECT_NE(child.val(), nullptr);
EXPECT_EQ(child.val(), "");
EXPECT_FALSE(child.val_is_null());
pos++;
}
}
@@ -276,12 +296,13 @@ TEST(empty_scalar, build_zero_length_string)
size_t pos = 0;
for(ConstNodeRef child : node.cchildren())
{
EXPECT_TRUE(child.is_val_quoted()) << "pos=" << pos;
EXPECT_FALSE(child.val_is_null()) << "pos=" << pos; // because it's quoted
EXPECT_EQ(child.val().len, 0u) << "pos=" << pos;
EXPECT_EQ(child.val().str, nullptr) << "pos=" << pos;
EXPECT_EQ(child.val(), nullptr) << "pos=" << pos;
EXPECT_EQ(child.val(), "") << "pos=" << pos;
SCOPED_TRACE(pos);
EXPECT_TRUE(child.is_val_quoted());
EXPECT_FALSE(child.val_is_null()); // because it's quoted
EXPECT_EQ(child.val().len, 0u);
EXPECT_EQ(child.val().str, nullptr);
EXPECT_EQ(child.val(), nullptr);
EXPECT_EQ(child.val(), "");
pos++;
}
};
@@ -293,12 +314,13 @@ TEST(empty_scalar, build_zero_length_string)
size_t pos = 0;
for(ConstNodeRef child : node.cchildren())
{
EXPECT_TRUE(child.is_val()) << "pos=" << pos;
EXPECT_FALSE(child.val_is_null()) << "pos=" << pos; // because it's quoted
EXPECT_EQ(child.val(), "") << "pos=" << pos;
EXPECT_NE(child.val(), nullptr) << "pos=" << pos;
EXPECT_EQ(child.val().len, 0u) << "pos=" << pos;
EXPECT_NE(child.val().str, nullptr) << "pos=" << pos;
SCOPED_TRACE(pos);
EXPECT_TRUE(child.is_val());
EXPECT_FALSE(child.val_is_null()); // because it's quoted
EXPECT_EQ(child.val(), "");
EXPECT_NE(child.val(), nullptr);
EXPECT_EQ(child.val().len, 0u);
EXPECT_NE(child.val().str, nullptr);
++pos;
}
};
@@ -308,30 +330,37 @@ TEST(empty_scalar, build_zero_length_string)
size_t pos = 0;
for(ConstNodeRef child : node.cchildren())
{
EXPECT_TRUE(child.is_val()) << "pos=" << pos;
EXPECT_EQ(child.val(), "") << "pos=" << pos;
EXPECT_EQ(child.val(), nullptr) << "pos=" << pos;
EXPECT_EQ(child.val().len, 0u) << "pos=" << pos;
EXPECT_EQ(child.val().str, nullptr) << "pos=" << pos;
EXPECT_TRUE(child.val_is_null()) << "pos=" << pos;
SCOPED_TRACE(pos);
EXPECT_TRUE(child.is_val());
EXPECT_EQ(child.val(), "");
EXPECT_EQ(child.val(), nullptr);
EXPECT_EQ(child.val().len, 0u);
EXPECT_EQ(child.val().str, nullptr);
EXPECT_TRUE(child.val_is_null());
++pos;
}
};
std::string yaml = emitrs_yaml<std::string>(tr);
#ifdef RYML_DBG
print_tree(tr);
printf("~~~~~\n%.*s~~~~\n", (int)yaml.size(), yaml.c_str());
#endif
{
SCOPED_TRACE("input tree");
test_quoted_empty(tr["quoted"]);
test_quoted_empty(tr["s-quoted"]);
test_quoted_empty(tr["d-quoted"]);
// in the built tree, the values will be quoted and null
test_quoted_null(tr["quoted_null"]);
test_non_quoted_empty(tr["nonquoted"]);
test_non_quoted_null(tr["nonquoted_null"]);
}
std::string yaml = emitrs_yaml<std::string>(tr);
std::cout << yaml;
test_check_emit_check(to_csubstr(yaml), [&](Tree const &t){
SCOPED_TRACE("output tree");
test_quoted_empty(t["quoted"]);
test_quoted_empty(t["s-quoted"]);
test_quoted_empty(t["d-quoted"]);
// after a roundtrip, they will be nonnull, because the quotes win.
test_quoted_empty(t["quoted_null"]);
test_non_quoted_empty(t["nonquoted"]);
@@ -343,7 +372,7 @@ CASE_GROUP(EMPTY_SCALAR)
{
ADD_CASE_TO_GROUP("empty scalar, single quoted",
"''",
N(DQV, "")
N(VS, "")
);
}

2635
test/test_scalar_folded.cpp Normal file

File diff suppressed because it is too large Load Diff

2380
test/test_scalar_literal.cpp Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
#if defined(_MSC_VER)
# pragma warning(push)
@@ -14,13 +15,13 @@
namespace c4 {
namespace yml {
#define _(name) N(#name) // makes it simpler
#define __(name) N(#name, #name) // makes it simpler
#define _(name) N(VP, #name) // makes it simpler
#define __(name) N(KP|VP, #name, #name) // makes it simpler
CASE_GROUP(SCALAR_NAMES)
{
ADD_CASE_TO_GROUP("funny names, seq",
ADD_CASE_TO_GROUP("funny names, seq block",
R"(
- a
- b:b
@@ -46,13 +47,14 @@ R"(
- >-
*a
)",
L{_(a), _(b:b), _(c{c), _(cc{), _(c}c), _(cc}), _(c!c), _(cc!), _(.foo), _(.), _(-a), _(+b), _(/b), _(:c), _($g),
N(QV, "*"), N(QV, "*"), N(QV, "*"), N(QV, "*a"), N(QV, "*a"), N(QV, "*a")}
N(SB, L{_(a), _(b:b), _(c{c), _(cc{), _(c}c), _(cc}), _(c!c), _(cc!), _(.foo), _(.), _(-a), _(+b), _(/b), _(:c), _($g),
N(VD, "*"), N(VS, "*"), N(VF, "*"),
N(VD, "*a"), N(VS, "*a"), N(VF, "*a")})
);
ADD_CASE_TO_GROUP("funny names, seq expl",
ADD_CASE_TO_GROUP("funny names, seq flow",
R"([a, b, c, .foo, ., -a, +b, /b, :c, $g])",
L{_(a), _(b), _(c), _(.foo), _(.), _(-a), _(+b), _(/b), _(:c), _($g)}
N(SFS, L{_(a), _(b), _(c), _(.foo), _(.), _(-a), _(+b), _(/b), _(:c), _($g)})
);
ADD_CASE_TO_GROUP("funny names, map",
@@ -70,15 +72,15 @@ $g: $g
'*': '*'
'*a': '*a'
)",
L{__(a), __(b), __(c), __(.foo), __(.), __(-a), __(+b), __(/b), __(:c), __($g),
N(QKV, "*", "*"), N(QKV, "*a", "*a")}
N(MB, L{__(a), __(b), __(c), __(.foo), __(.), __(-a), __(+b), __(/b), __(:c), __($g),
N(KS|VS, "*", "*"), N(KS|VS, "*a", "*a")})
);
ADD_CASE_TO_GROUP("funny names, map expl",
R"({a: a, b: b, c: c, .foo: .foo, .: ., -a: -a, +b: +b, /b: /b, :c: :c, $g: $g,
'*': '*', '*a':'*a'})",
L{__(a), __(b), __(c), __(.foo), __(.), __(-a), __(+b), __(/b), __(:c), __($g),
N(QKV, "*", "*"), N(QKV, "*a", "*a")}
N(MFS, L{__(a), __(b), __(c), __(.foo), __(.), __(-a), __(+b), __(/b), __(:c), __($g),
N(KS|VS, "*", "*"), N(KS|VS, "*a", "*a")})
);
}

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
#include "c4/error.hpp"
namespace c4 {
@@ -28,26 +29,6 @@ csubstr getafter(csubstr yaml, csubstr pattern)
} while(0)
TEST(null_val, simple)
{
Tree tree = parse_in_arena("{foo: , bar: '', baz: [,,,], bat: [ , , , ], two: [,,], one: [,], empty: []}");
_check_null_pointing_at(tree["foo"], val, " ,", tree.arena());
ASSERT_EQ(tree["baz"].num_children(), 3u);
_check_null_pointing_at(tree["baz"][0], val, "[,,,]", tree.arena());
_check_null_pointing_at(tree["baz"][1], val, ",,,]", tree.arena());
_check_null_pointing_at(tree["baz"][2], val, ",,]", tree.arena());
ASSERT_EQ(tree["bat"].num_children(), 3u);
_check_null_pointing_at(tree["bat"][0], val, " , , , ]", tree.arena());
_check_null_pointing_at(tree["bat"][1], val, " , , ]", tree.arena());
_check_null_pointing_at(tree["bat"][2], val, " , ]", tree.arena());
ASSERT_EQ(tree["two"].num_children(), 2u);
_check_null_pointing_at(tree["two"][0], val, "[,,]", tree.arena());
_check_null_pointing_at(tree["two"][1], val, ",,]", tree.arena());
ASSERT_EQ(tree["one"].num_children(), 1u);
_check_null_pointing_at(tree["one"][0], val, "[,]", tree.arena());
ASSERT_EQ(tree["empty"].num_children(), 0u);
}
TEST(null_val, block_seq)
{
csubstr yaml = R"(
@@ -252,8 +233,7 @@ TEST(null_val, issue103)
TEST(null_val, null_key)
{
auto tree = parse_in_arena(R"({null: null})");
Tree tree = parse_in_arena(R"({null: null})");
ASSERT_EQ(tree.size(), 2u);
_check_null_pointing_at(tree[0], key, "null: ", tree.arena());
_check_null_pointing_at(tree[0], val, "null}", tree.arena());
@@ -278,8 +258,7 @@ map:
# a comment
val4:
)";
Parser p;
Tree t = p.parse_in_arena("file.yml", yaml);
Tree t = parse_in_arena("file.yml", yaml);
// as expected: (len is null, str is pointing at the value where the node starts)
EXPECT_EQ(t["seq"][0].val(), "~");
EXPECT_EQ(t["seq"][1].val(), "null");
@@ -321,25 +300,26 @@ R"(
- ~: null
- null: ~
)",
L{
N(VAL, nullptr),
N(VAL, nullptr),
N(VAL, "null"),
N(VAL, "Null"),
N(VAL, "NULL"),
N(VAL, "~"),
N(MAP, L{N(KEYVAL, "null", "null")}),
N(MAP, L{N(KEYVAL, "Null", "Null")}),
N(MAP, L{N(KEYVAL, "NULL", "NULL")}),
N(MAP, L{N(KEYVAL, "~", "~")}),
N(MAP, L{N(KEYVAL, "~", "null")}),
N(MAP, L{N(KEYVAL, "null", "~")}),
});
N(SB, L{
N(VP, nullptr),
N(VP, nullptr),
N(VP, "null"),
N(VP, "Null"),
N(VP, "NULL"),
N(VP, "~"),
N(MB, L{N(KP|VP, "null", "null")}),
N(MB, L{N(KP|VP, "Null", "Null")}),
N(MB, L{N(KP|VP, "NULL", "NULL")}),
N(MB, L{N(KP|VP, "~", "~")}),
N(MB, L{N(KP|VP, "~", "null")}),
N(MB, L{N(KP|VP, "null", "~")}),
})
);
ADD_CASE_TO_GROUP("null map vals, expl",
R"({foo: , bar: , baz: }
)",
L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}
N(MFS, L{N(KP|VP, "foo", nullptr), N(KP|VP, "bar", nullptr), N(KP|VP, "baz", nullptr)})
);
ADD_CASE_TO_GROUP("null map vals, impl",
@@ -348,7 +328,7 @@ foo:
bar:
baz:
)",
L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}
N(MB, L{N(KP|VP, "foo", nullptr), N(KP|VP, "bar", nullptr), N(KP|VP, "baz", nullptr)})
);
ADD_CASE_TO_GROUP("null seq vals, impl",
@@ -356,7 +336,7 @@ R"(-
-
-
)",
L{N(VAL, nullptr), N(VAL, nullptr), N(VAL, nullptr)}
N(SB, L{N(VP, nullptr), N(VP, nullptr), N(VP, nullptr)})
);
ADD_CASE_TO_GROUP("null seq vals in map, impl, mixed 1",
@@ -368,7 +348,7 @@ foo:
bar:
baz:
)",
L{N("foo", L{N(VAL, nullptr), N(VAL, nullptr), N(VAL, nullptr)}), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}
N(MB, L{N(KP|SB, "foo", L{N(VP, nullptr), N(VP, nullptr), N(VP, nullptr)}), N(KP|VP, "bar", nullptr), N(KP|VP, "baz", nullptr)})
);
ADD_CASE_TO_GROUP("null seq vals in map, impl, mixed 2",
@@ -380,7 +360,7 @@ bar:
-
baz:
)",
L{N(KEYVAL, "foo", nullptr), N("bar", L{N(VAL, nullptr), N(VAL, nullptr), N(VAL, nullptr)}), N(KEYVAL, "baz", nullptr)}
N(MB, L{N(KP|VP, "foo", nullptr), N(KP|SB, "bar", L{N(VP, nullptr), N(VP, nullptr), N(VP, nullptr)}), N(KP|VP, "baz", nullptr)})
);
ADD_CASE_TO_GROUP("null seq vals in map, impl, mixed 3",
@@ -392,7 +372,7 @@ baz:
-
-
)",
L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N("baz", L{N(VAL, nullptr), N(VAL, nullptr), N(VAL, nullptr)})}
N(MB, L{N(KP|VP, "foo", nullptr), N(KP|VP, "bar", nullptr), N(KP|SB, "baz", L{N(VP, nullptr), N(VP, nullptr), N(VP, nullptr)})})
);
ADD_CASE_TO_GROUP("null map vals in seq, impl, mixed 1",
@@ -403,7 +383,15 @@ R"(
-
-
)",
L{N(L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}), N(VAL, nullptr), N(VAL, nullptr)}
N(SB, L{
N(MB, L{
N(KP|VP, "foo", nullptr),
N(KP|VP, "bar", nullptr),
N(KP|VP, "baz", nullptr)
}),
N(VP, nullptr),
N(VP, nullptr)
})
);
ADD_CASE_TO_GROUP("null map vals in seq, impl, mixed 2",
@@ -414,7 +402,15 @@ R"(
baz:
-
)",
L{N(VAL, nullptr), N(L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)}), N(VAL, nullptr)}
N(SB, L{
N(VP, nullptr),
N(MB, L{
N(KP|VP, "foo", nullptr),
N(KP|VP, "bar", nullptr),
N(KP|VP, "baz", nullptr)
}),
N(VP, nullptr)
})
);
ADD_CASE_TO_GROUP("null map vals in seq, impl, mixed 3",
@@ -425,7 +421,15 @@ R"(
bar:
baz:
)",
L{N(VAL, nullptr), N(VAL, nullptr), N(L{N(KEYVAL, "foo", nullptr), N(KEYVAL, "bar", nullptr), N(KEYVAL, "baz", nullptr)})}
N(SB, L{
N(VP, nullptr),
N(VP, nullptr),
N(MB, L{
N(KP|VP, "foo", nullptr),
N(KP|VP, "bar", nullptr),
N(KP|VP, "baz", nullptr)
}),
})
);
ADD_CASE_TO_GROUP("issue84.1",
@@ -438,11 +442,12 @@ your case:
bar: ''
whatever: baz
)",
L{
N("fixed case", L{N("foo", "a"), N(KEYVAL, "bar", nullptr)}),
N("your case", L{N("foo", "a"), N(QV, "bar", "")}),
N("whatever", "baz"),
});
N(MB, L{
N(KP|MB, "fixed case", L{N(KP|VP, "foo", "a"), N(KP|VP, "bar", nullptr)}),
N(KP|MB, "your case", L{N(KP|VP, "foo", "a"), N(KP|VS, "bar", "")}),
N(KP|VP, "whatever", "baz"),
})
);
ADD_CASE_TO_GROUP("issue84.2",
R"(
@@ -461,28 +466,29 @@ param_root:
IsBurnOutBornIdent: false
ChangeDropTableName: ''
)",
L{
N("version", "0"),
N("type", "xml"),
N("param_root", L{
N("objects", L{
N("System", L{
N(QV, "SameGroupActorName", ""),
N("IsGetItemSelf", "false")
}),
N("General", L{
N("Speed", "1.0"),
N("Life", "100"),
N("IsLifeInfinite", "false"),
N("ElectricalDischarge", "1.0"),
N("IsBurnOutBorn", "false"),
N(KEYVAL, "BurnOutBornName", nullptr),
N("IsBurnOutBornIdent", "false"),
N(QV, "ChangeDropTableName", ""),
}),
})
}),
});
N(MB, L{
N(KP|VP, "version", "0"),
N(KP|VP, "type", "xml"),
N(KP|MB, "param_root", L{
N(KP|MB, "objects", L{
N(KP|MFS, "System", L{
N(KP|VS, "SameGroupActorName", ""),
N(KP|VP, "IsGetItemSelf", "false")
}),
N(KP|MB, "General", L{
N(KP|VP, "Speed", "1.0"),
N(KP|VP, "Life", "100"),
N(KP|VP, "IsLifeInfinite", "false"),
N(KP|VP, "ElectricalDischarge", "1.0"),
N(KP|VP, "IsBurnOutBorn", "false"),
N(KP|VP, "BurnOutBornName", nullptr),
N(KP|VP, "IsBurnOutBornIdent", "false"),
N(KP|VS, "ChangeDropTableName", ""),
}),
})
}),
})
);
ADD_CASE_TO_GROUP("issue84.3",
R"(
@@ -496,20 +502,21 @@ param_root:
Str64_empty3: ''
lists: {}
)",
L{
N("version", "10"),
N("type", "test"),
N("param_root", L{
N("objects", L{
N("TestContent", L{
N(QV, "Str64_empty", ""),
N(KEYVAL, "Str64_empty2", nullptr),
N(QV, "Str64_empty3", ""),
}),
}),
N(KEYMAP, "lists", L{})
}),
});
N(MB, L{
N(KP|VP, "version", "10"),
N(KP|VP, "type", "test"),
N(KP|MB, "param_root", L{
N(KP|MB, "objects", L{
N(KP|MB, "TestContent", L{
N(KP|VS, "Str64_empty", ""),
N(KP|VP, "Str64_empty2", nullptr),
N(KP|VS, "Str64_empty3", ""),
}),
}),
N(KP|MFS, "lists", L{})
}),
})
);
}

1244
test/test_scalar_plain.cpp Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,8 +1,141 @@
#include "./test_group.hpp"
#include "./test_lib/test_case.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
struct squoted_case
{
csubstr input, output;
};
void test_filter(csubstr input, csubstr expected)
{
RYML_TRACE_FMT("\nstr=[{}]~~~{}~~~\nexp=[{}]~~~{}~~~", input.len, input, expected.len, expected);
ASSERT_LE(expected.len, input.len);
std::string subject_;
subject_.resize(2 * input.size());
c4::substr dst = to_substr(subject_);
Parser::handler_type event_handler = {};
Parser proc(&event_handler);
FilterResult result = proc.filter_scalar_squoted(input, dst);
ASSERT_TRUE(result.valid());
csubstr out = result.get();
if(input != expected)
{
EXPECT_TRUE(out.is_sub(dst));// << "\ninput=" << input << "\nexpected=" << expected;
}
EXPECT_EQ(out, expected);
std::cout << "OK! ~~~" << input << "~~~ ---> ~~~" << out << "~~~\n";
}
void test_filter_inplace(csubstr input, csubstr expected)
{
RYML_TRACE_FMT("\nstr=[{}]~~~{}~~~\nexp=[{}]~~~{}~~~", input.len, input, expected.len, expected);
ASSERT_LE(expected.len, input.len);
std::string subject_(input.str, input.len);
std::string subject_2 = subject_;
c4::substr dst = to_substr(subject_);
Parser::handler_type event_handler1 = {};
Parser parser1(&event_handler1);
FilterResult result = parser1.filter_scalar_squoted_in_place(dst, subject_.size());
Parser::handler_type event_handler2 = {};
Parser parser2(&event_handler2);
Tree tree = parse_in_arena(&parser2, "file", "# set the tree in the parser");
csubstr sresult = parser2._filter_scalar_squot(to_substr(subject_2));
EXPECT_GE(result.required_len(), expected.len);
EXPECT_EQ(sresult.len, result.str.len);
ASSERT_TRUE(result.valid());
csubstr out = result.get();
ASSERT_TRUE(out.str);
EXPECT_TRUE(out.is_sub(dst));// << "\ninput=" << input << "\nexpected=" << expected;
EXPECT_EQ(out, expected);
std::cout << "OK! ~~~" << input << "~~~ ---> ~~~" << out << "~~~\n";
}
struct SQuotedFilterTest : public ::testing::TestWithParam<squoted_case>
{
};
TEST_P(SQuotedFilterTest, filter)
{
squoted_case sqc = GetParam();
test_filter(sqc.input, sqc.output);
}
TEST_P(SQuotedFilterTest, filter_inplace)
{
squoted_case sqc = GetParam();
test_filter_inplace(sqc.input, sqc.output);
}
squoted_case test_cases_filter[] = {
#define sqc(input, output) squoted_case{csubstr(input), csubstr(output)}
// 0
sqc("", ""),
sqc(" ", " "),
sqc(" ", " "),
sqc(" ", " "),
sqc(" ", " "),
// 5
sqc("foo", "foo"),
sqc("quoted\nstring", "quoted string"),
sqc("quoted\n\nstring", "quoted\nstring"),
sqc("quoted\n\n\nstring", "quoted\n\nstring"),
sqc("quoted\n\n\n\nstring", "quoted\n\n\nstring"),
// 10
sqc("quoted\n string", "quoted string"),
sqc("\"Howdy!\" he cried.", "\"Howdy!\" he cried."),
sqc(" # Not a ''comment''.", " # Not a 'comment'."),
sqc("|\\-*-/|", "|\\-*-/|"),
sqc("\t\n\ndetected\n\n", "\t\ndetected\n"),
// 15
sqc(" 1st non-empty\n\n 2nd non-empty \n 3rd non-empty ", " 1st non-empty\n2nd non-empty 3rd non-empty "),
sqc(" 1st non-empty\n\n 2nd non-empty \t\n \t3rd non-empty ", " 1st non-empty\n2nd non-empty 3rd non-empty "),
sqc(" 1st non-empty\n\n 2nd non-empty\t \n\t 3rd non-empty ", " 1st non-empty\n2nd non-empty 3rd non-empty "),
sqc("Several lines of text,\ncontaining ''single quotes'' and \"double quotes\". Escapes (like \\n) don''t do anything.\n\nNewlines can be added by leaving a blank line.\n Leading whitespace on lines is ignored.",
"Several lines of text, containing 'single quotes' and \"double quotes\". Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Leading whitespace on lines is ignored."),
sqc(R"(Some text ''with single quotes'' "and double quotes".)", "Some text 'with single quotes' \"and double quotes\"."),
// 20
sqc(R"(Some text with escapes \n \r \t)", "Some text with escapes \\n \\r \\t"),
sqc("''", "'"),
sqc("''''", "''"),
sqc("''''''", "'''"),
sqc("''''''''", "''''"),
// 25
sqc("''''''''''", "'''''"),
sqc("''''''''''''", "''''''"),
sqc(R"(a aaaa )", "a aaaa "),
sqc(R"(a aaaa )", "a aaaa "),
sqc(R"(a aaaa )", "a aaaa "),
// 30
sqc(R"(a aaaa )", "a aaaa "),
sqc(R"(a aaaa )", "a aaaa "),
sqc(R"( a aaaa)", " a aaaa"),
sqc(R"( a aaaa)", " a aaaa"),
sqc(R"( a aaaa)", " a aaaa"),
// 35
sqc(R"( a aaaa)", " a aaaa"),
sqc(R"( a aaaa)", " a aaaa"),
sqc(R"( a aaaa )", " a aaaa "),
sqc(R"( a aaaa )", " a aaaa "),
sqc(R"( a aaaa )", " a aaaa "),
// 40
sqc(R"( a aaaa )", " a aaaa "),
sqc(R"( a aaaa )", " a aaaa "),
sqc(R"(x\ny:z\tx $%^&*()x)", "x\\ny:z\\tx $%^&*()x"),
#undef sqc
};
INSTANTIATE_TEST_SUITE_P(single_quoted_filter,
SQuotedFilterTest,
testing::ValuesIn(test_cases_filter));
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
TEST(single_quoted, test_suite_KSS4)
{
csubstr yaml = R"(
@@ -55,24 +188,26 @@ detected
TEST(single_quoted, test_suite_PRH3)
{
csubstr yaml = R"(
- ' 1st non-empty
csubstr yaml = R"(---
' 1st non-empty
2nd non-empty
3rd non-empty '
- ' 1st non-empty
---
' 1st non-empty
2nd non-empty
3rd non-empty '
- ' 1st non-empty
---
' 1st non-empty
2nd non-empty
3rd non-empty '
)";
test_check_emit_check(yaml, [](Tree const &t){
EXPECT_EQ(t[0].val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
EXPECT_EQ(t[1].val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
EXPECT_EQ(t[2].val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
EXPECT_EQ(t.docref(0).val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
EXPECT_EQ(t.docref(1).val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
EXPECT_EQ(t.docref(2).val(), csubstr(" 1st non-empty\n2nd non-empty 3rd non-empty "));
});
}
@@ -239,27 +374,27 @@ CASE_GROUP(SINGLE_QUOTED)
ADD_CASE_TO_GROUP("squoted, only text",
R"('Some text without any quotes.'
)",
N(DOCVAL | VALQUO, "Some text without any quotes.")
N(VS, "Some text without any quotes.")
);
ADD_CASE_TO_GROUP("squoted, with double quotes",
R"('Some "text" "with double quotes"')",
N(DOCVAL | VALQUO, "Some \"text\" \"with double quotes\"")
N(VS, "Some \"text\" \"with double quotes\"")
);
ADD_CASE_TO_GROUP("squoted, with single quotes",
R"('Some text ''with single quotes''')",
N(DOCVAL | VALQUO, "Some text 'with single quotes'")
N(VS, "Some text 'with single quotes'")
);
ADD_CASE_TO_GROUP("squoted, with single and double quotes",
R"('Some text ''with single quotes'' "and double quotes".')",
N(DOCVAL | VALQUO, "Some text 'with single quotes' \"and double quotes\".")
N(VS, "Some text 'with single quotes' \"and double quotes\".")
);
ADD_CASE_TO_GROUP("squoted, with escapes",
R"('Some text with escapes \n \r \t')",
N(DOCVAL | VALQUO, "Some text with escapes \\n \\r \\t")
N(VS, "Some text with escapes \\n \\r \\t")
);
ADD_CASE_TO_GROUP("squoted, all",
@@ -269,12 +404,12 @@ containing ''single quotes'' and "double quotes". Escapes (like \n) don''t do an
Newlines can be added by leaving a blank line.
Leading whitespace on lines is ignored.'
)",
N(DOCVAL | VALQUO, "Several lines of text, containing 'single quotes' and \"double quotes\". Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Leading whitespace on lines is ignored.")
N(VS, "Several lines of text, containing 'single quotes' and \"double quotes\". Escapes (like \\n) don't do anything.\nNewlines can be added by leaving a blank line. Leading whitespace on lines is ignored.")
);
ADD_CASE_TO_GROUP("squoted, empty",
R"('')",
N(DOCVAL | VALQUO, "")
N(VS, "")
);
ADD_CASE_TO_GROUP("squoted, blank",
@@ -285,7 +420,7 @@ R"(
- ' '
- ' '
)",
L{N(QV, ""), N(QV, " "), N(QV, " "), N(QV, " "), N(QV, " ")}
N(SB, L{N(VS, ""), N(VS, " "), N(VS, " "), N(VS, " "), N(VS, " ")})
);
ADD_CASE_TO_GROUP("squoted, numbers", // these should not be quoted when emitting
@@ -296,50 +431,49 @@ R"(
- 1e-2
- 1e+2
)",
L{N("-1"), N("-1.0"), N("+1.0"), N("1e-2"), N("1e+2")}
N(SB, L{N(VP,"-1"), N(VP,"-1.0"), N(VP,"+1.0"), N(VP,"1e-2"), N(VP,"1e+2")})
);
ADD_CASE_TO_GROUP("squoted, trailing space",
R"('a aaaa ')",
N(DOCVAL | VALQUO, "a aaaa ")
N(VS, "a aaaa ")
);
ADD_CASE_TO_GROUP("squoted, leading space",
R"(' a aaaa')",
N(DOCVAL | VALQUO, " a aaaa")
N(VS, " a aaaa")
);
ADD_CASE_TO_GROUP("squoted, trailing and leading space",
R"(' 012345 ')",
N(DOCVAL | VALQUO, " 012345 ")
N(VS, " 012345 ")
);
ADD_CASE_TO_GROUP("squoted, 1 squote",
R"('''')",
N(DOCVAL | VALQUO, "'")
N(VS, "'")
);
ADD_CASE_TO_GROUP("squoted, 2 squotes",
R"('''''')",
N(DOCVAL | VALQUO, "''")
N(VS, "''")
);
ADD_CASE_TO_GROUP("squoted, 3 squotes",
R"('''''''')",
N(DOCVAL | VALQUO, "'''")
N(VS, "'''")
);
ADD_CASE_TO_GROUP("squoted, 4 squotes",
R"('''''''''')",
N(DOCVAL | VALQUO, "''''")
N(VS, "''''")
);
ADD_CASE_TO_GROUP("squoted, 5 squotes",
R"('''''''''''')",
N(DOCVAL | VALQUO, "'''''")
N(VS, "'''''")
);
/*
ADD_CASE_TO_GROUP("squoted, example 2",
R"('This is a key
@@ -347,9 +481,38 @@ that has multiple lines
': and this is its value
)",
L{N("This is a key\nthat has multiple lines\n", "and this is its value")}
N(MB, L{N(KS|VP, "This is a key\nthat has multiple lines\n", "and this is its value")})
);
ADD_CASE_TO_GROUP("squoted indentation, 0",
R"(' 1st non-empty
2nd non-empty
3rd non-empty '
)",
N(VS, " 1st non-empty\n2nd non-empty 3rd non-empty ")
);
/* FIXME - tab is invalid indentation on line 4 (before 3rd non-empty)
ADD_CASE_TO_GROUP("squoted indentation, 1", EXPECT_PARSE_ERROR,
R"(- ' 1st non-empty
2nd non-empty
3rd non-empty '
)",
LineCol(1,1)
);
*/
ADD_CASE_TO_GROUP("squoted indentation, 2",
R"(- ' 1st non-empty
2nd non-empty
3rd non-empty '
)",
N(SB, L{N(VS, " 1st non-empty\n2nd non-empty 3rd non-empty ")})
);
}
} // namespace yml

1087
test/test_seq.cpp Normal file

File diff suppressed because it is too large Load Diff

View File

@@ -1,4 +1,5 @@
#include "./test_group.hpp"
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
@@ -8,13 +9,13 @@ CASE_GROUP(EMPTY_SEQ)
ADD_CASE_TO_GROUP("empty seq, explicit",
"[]",
SEQ
SFS
);
ADD_CASE_TO_GROUP("empty seq, explicit, whitespace",
" []",
SEQ
SFS
);
@@ -22,7 +23,7 @@ ADD_CASE_TO_GROUP("empty seq, multiline",
R"([
]
)",
SEQ
SFS
);
ADD_CASE_TO_GROUP("empty seq, multilines",
@@ -32,7 +33,7 @@ R"([
]
)",
SEQ
SFS
);
}

49
test/test_seq_generic.cpp Normal file
View File

@@ -0,0 +1,49 @@
#include "./test_lib/test_group.hpp"
#include "test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(GENERIC_SEQ)
{
ADD_CASE_TO_GROUP("generic seq v0",
R"(
- item 1
- item 2
- - item 3.1
- item 3.2
- key 1: value 1
key 2: value 2
)",
N(SB, L{
N(VP, "item 1"),
N(VP, "item 2"),
N(SB, L{N(VP, "item 3.1"), N(VP, "item 3.2")}),
N(MB, L{N(KP|VP, "key 1", "value 1"), N(KP|VP, "key 2", "value 2")})
})
);
ADD_CASE_TO_GROUP("generic seq v1",
R"(
- item 1
- item 2
-
- item 3.1
- item 3.2
-
key 1: value 1
key 2: value 2
)",
N(SB, L{
N(VP, "item 1"),
N(VP, "item 2"),
N(SB, L{N(VP, "item 3.1"), N(VP, "item 3.2")}),
N(MB, L{N(KP|VP, "key 1", "value 1"), N(KP|VP, "key 2", "value 2")})
})
);
}
} // namespace yml
} // namespace c4

173
test/test_seq_nestedx2.cpp Normal file
View File

@@ -0,0 +1,173 @@
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_SEQX2)
{
ADD_CASE_TO_GROUP("nested seq x2, empty, oneline",
R"([[], [], []])",
N(SFS, L{SFS, SFS, SFS})
);
ADD_CASE_TO_GROUP("nested seq x2, explicit, same line",
R"([[00, 01, 02], [10, 11, 12], [20, 21, 22]])",
N(SFS, L{
N(SFS, L{N(VP, "00"), N(VP, "01"), N(VP, "02")}),
N(SFS, L{N(VP, "10"), N(VP, "11"), N(VP, "12")}),
N(SFS, L{N(VP, "20"), N(VP, "21"), N(VP, "22")}),
})
);
ADD_CASE_TO_GROUP("nested seq x2, explicit first+last level, same line, no spaces",
R"([[00,01,02],[10,11,12],[20,21,22]])",
N(SFS, L{
N(SFS, L{N(VP,"00"), N(VP,"01"), N(VP,"02")}),
N(SFS, L{N(VP,"10"), N(VP,"11"), N(VP,"12")}),
N(SFS, L{N(VP,"20"), N(VP,"21"), N(VP,"22")}),
})
);
ADD_CASE_TO_GROUP("nested seq x2, explicit",
R"([
[00, 01, 02],
[10, 11, 12],
[20, 21, 22],
])",
N(SFS, L{
N(SFS, L{N(VP,"00"), N(VP,"01"), N(VP,"02")}),
N(SFS, L{N(VP,"10"), N(VP,"11"), N(VP,"12")}),
N(SFS, L{N(VP,"20"), N(VP,"21"), N(VP,"22")}),
})
);
ADD_CASE_TO_GROUP("nested seq x2",
R"(
- - 00
- 01
- 02
- - 10
- 11
- 12
- - 20
- 21
- 22
)",
N(SB, L{
N(SB, L{N(VP,"00"), N(VP,"01"), N(VP,"02")}),
N(SB, L{N(VP,"10"), N(VP,"11"), N(VP,"12")}),
N(SB, L{N(VP,"20"), N(VP,"21"), N(VP,"22")}),
})
);
ADD_CASE_TO_GROUP("nested seq x2, next line",
R"(
-
- 00
- 01
- 02
-
- 10
- 11
- 12
-
- 20
- 21
- 22
)",
N(SB, L{
N(SB, L{N(VP,"00"), N(VP,"01"), N(VP,"02")}),
N(SB, L{N(VP,"10"), N(VP,"11"), N(VP,"12")}),
N(SB, L{N(VP,"20"), N(VP,"21"), N(VP,"22")}),
})
);
ADD_CASE_TO_GROUP("nested seq x2, all next line",
R"(
-
-
00
-
01
-
02
-
-
10
-
11
-
12
-
-
20
-
21
-
22
)",
N(SB, L{
N(SB, L{N(VP,"00"), N(VP,"01"), N(VP,"02")}),
N(SB, L{N(VP,"10"), N(VP,"11"), N(VP,"12")}),
N(SB, L{N(VP,"20"), N(VP,"21"), N(VP,"22")}),
})
);
ADD_CASE_TO_GROUP("nested seq x2, all next line, with whitespace",
R"(
-
-
00
-
01
-
02
-
-
10
-
11
-
12
-
-
20
-
21
-
22
)",
N(SB, L{
N(SB, L{N(VP,"00"), N(VP,"01"), N(VP,"02")}),
N(SB, L{N(VP,"10"), N(VP,"11"), N(VP,"12")}),
N(SB, L{N(VP,"20"), N(VP,"21"), N(VP,"22")}),
})
);
ADD_CASE_TO_GROUP("nested seq x2, implicit first, explicit last level",
R"(
- [00, 01, 02]
- [10, 11, 12]
- [20, 21, 22]
)",
N(SB, L{
N(SFS, L{N(VP,"00"), N(VP,"01"), N(VP,"02")}),
N(SFS, L{N(VP,"10"), N(VP,"11"), N(VP,"12")}),
N(SFS, L{N(VP,"20"), N(VP,"21"), N(VP,"22")}),
})
);
}
} // namespace yml
} // namespace c4

188
test/test_seq_nestedx3.cpp Normal file
View File

@@ -0,0 +1,188 @@
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_SEQX3)
{
ADD_CASE_TO_GROUP("nested seq x3, explicit",
R"([
[[000, 001, 002], [010, 011, 012], [020, 021, 022]],
[[100, 101, 102], [110, 111, 112], [120, 121, 122]],
[[200, 201, 202], [210, 211, 212], [220, 221, 222]],
])",
N(SFS, L{
N{SFS, L{N{SFS, L{N(VP, "000"), N(VP, "001"), N(VP, "002")}}, N{SFS, L{N(VP, "010"), N(VP, "011"), N(VP, "012")}}, N{SFS, L{N(VP, "020"), N(VP, "021"), N(VP, "022")}}}},
N{SFS, L{N{SFS, L{N(VP, "100"), N(VP, "101"), N(VP, "102")}}, N{SFS, L{N(VP, "110"), N(VP, "111"), N(VP, "112")}}, N{SFS, L{N(VP, "120"), N(VP, "121"), N(VP, "122")}}}},
N{SFS, L{N{SFS, L{N(VP, "200"), N(VP, "201"), N(VP, "202")}}, N{SFS, L{N(VP, "210"), N(VP, "211"), N(VP, "212")}}, N{SFS, L{N(VP, "220"), N(VP, "221"), N(VP, "222")}}}},
})
);
ADD_CASE_TO_GROUP("nested seq x3",
R"(
- - - 000
- 001
- 002
- - 010
- 011
- 012
- - 020
- 021
- 022
- - - 100
- 101
- 102
- - 110
- 111
- 112
- - 120
- 121
- 122
- - - 200
- 201
- 202
- - 210
- 211
- 212
- - 220
- 221
- 222
)",
N(SB, L{
N{SB, L{N{SB, L{N(VP, "000"), N(VP, "001"), N(VP, "002")}}, N{SB, L{N(VP, "010"), N(VP, "011"), N(VP, "012")}}, N{SB, L{N(VP, "020"), N(VP, "021"), N(VP, "022")}}}},
N{SB, L{N{SB, L{N(VP, "100"), N(VP, "101"), N(VP, "102")}}, N{SB, L{N(VP, "110"), N(VP, "111"), N(VP, "112")}}, N{SB, L{N(VP, "120"), N(VP, "121"), N(VP, "122")}}}},
N{SB, L{N{SB, L{N(VP, "200"), N(VP, "201"), N(VP, "202")}}, N{SB, L{N(VP, "210"), N(VP, "211"), N(VP, "212")}}, N{SB, L{N(VP, "220"), N(VP, "221"), N(VP, "222")}}}},
})
);
ADD_CASE_TO_GROUP("nested seq x3, continued on next line",
R"(
-
-
- 000
- 001
- 002
-
- 010
- 011
- 012
-
- 020
- 021
- 022
-
-
- 100
- 101
- 102
-
- 110
- 111
- 112
-
- 120
- 121
- 122
-
-
- 200
- 201
- 202
-
- 210
- 211
- 212
-
- 220
- 221
- 222
)",
N(SB, L{
N{SB, L{N{SB, L{N(VP, "000"), N(VP, "001"), N(VP, "002")}}, N{SB, L{N(VP, "010"), N(VP, "011"), N(VP, "012")}}, N{SB, L{N(VP, "020"), N(VP, "021"), N(VP, "022")}}}},
N{SB, L{N{SB, L{N(VP, "100"), N(VP, "101"), N(VP, "102")}}, N{SB, L{N(VP, "110"), N(VP, "111"), N(VP, "112")}}, N{SB, L{N(VP, "120"), N(VP, "121"), N(VP, "122")}}}},
N{SB, L{N{SB, L{N(VP, "200"), N(VP, "201"), N(VP, "202")}}, N{SB, L{N(VP, "210"), N(VP, "211"), N(VP, "212")}}, N{SB, L{N(VP, "220"), N(VP, "221"), N(VP, "222")}}}},
})
);
ADD_CASE_TO_GROUP("nested seq x3, all continued on next line",
R"(
-
-
-
000
-
001
-
002
-
-
010
-
011
-
012
-
-
020
-
021
-
022
-
-
-
100
-
101
-
102
-
-
110
-
111
-
112
-
-
120
-
121
-
122
-
-
-
200
-
201
-
202
-
-
210
-
211
-
212
-
-
220
-
221
-
222
)",
N(SB, L{
N{SB, L{N{SB, L{N(VP, "000"), N(VP, "001"), N(VP, "002")}}, N{SB, L{N(VP, "010"), N(VP, "011"), N(VP, "012")}}, N{SB, L{N(VP, "020"), N(VP, "021"), N(VP, "022")}}}},
N{SB, L{N{SB, L{N(VP, "100"), N(VP, "101"), N(VP, "102")}}, N{SB, L{N(VP, "110"), N(VP, "111"), N(VP, "112")}}, N{SB, L{N(VP, "120"), N(VP, "121"), N(VP, "122")}}}},
N{SB, L{N{SB, L{N(VP, "200"), N(VP, "201"), N(VP, "202")}}, N{SB, L{N(VP, "210"), N(VP, "211"), N(VP, "212")}}, N{SB, L{N(VP, "220"), N(VP, "221"), N(VP, "222")}}}},
})
);
}
} // namespace yml
} // namespace c4

125
test/test_seq_nestedx4.cpp Normal file
View File

@@ -0,0 +1,125 @@
#include "./test_lib/test_group.hpp"
#include "./test_lib/test_group.def.hpp"
namespace c4 {
namespace yml {
CASE_GROUP(NESTED_SEQX4)
{
ADD_CASE_TO_GROUP("nested seq x4, explicit",
R"([
[[[0000, 0001, 0002], [0010, 0011, 0012], [0020, 0021, 0022]],
[[0100, 0101, 0102], [0110, 0111, 0112], [0120, 0121, 0122]],
[[0200, 0201, 0202], [0210, 0211, 0212], [0220, 0221, 0222]]],
[[[1000, 1001, 1002], [1010, 1011, 1012], [1020, 1021, 1022]],
[[1100, 1101, 1102], [1110, 1111, 1112], [1120, 1121, 1122]],
[[1200, 1201, 1202], [1210, 1211, 1212], [1220, 1221, 1222]]],
[[[2000, 2001, 2002], [2010, 2011, 2012], [2020, 2021, 2022]],
[[2100, 2101, 2102], [2110, 2111, 2112], [2120, 2121, 2122]],
[[2200, 2201, 2202], [2210, 2211, 2212], [2220, 2221, 2222]]],
])",
N(SFS, L{
N{SFS, L{N{SFS, L{N{SFS, L{N(VP, "0000"), N(VP, "0001"), N(VP, "0002")}}, N{SFS, L{N(VP, "0010"), N(VP, "0011"), N(VP, "0012")}}, N{SFS, L{N(VP, "0020"), N(VP, "0021"), N(VP, "0022")}}}}, N{SFS, L{N{SFS, L{N(VP, "0100"), N(VP, "0101"), N(VP, "0102")}}, N{SFS, L{N(VP, "0110"), N(VP, "0111"), N(VP, "0112")}}, N{SFS, L{N(VP, "0120"), N(VP, "0121"), N(VP, "0122")}}}}, N{SFS, L{N{SFS, L{N(VP, "0200"), N(VP, "0201"), N(VP, "0202")}}, N{SFS, L{N(VP, "0210"), N(VP, "0211"), N(VP, "0212")}}, N{SFS, L{N(VP, "0220"), N(VP, "0221"), N(VP, "0222")}}}}}},
N{SFS, L{N{SFS, L{N{SFS, L{N(VP, "1000"), N(VP, "1001"), N(VP, "1002")}}, N{SFS, L{N(VP, "1010"), N(VP, "1011"), N(VP, "1012")}}, N{SFS, L{N(VP, "1020"), N(VP, "1021"), N(VP, "1022")}}}}, N{SFS, L{N{SFS, L{N(VP, "1100"), N(VP, "1101"), N(VP, "1102")}}, N{SFS, L{N(VP, "1110"), N(VP, "1111"), N(VP, "1112")}}, N{SFS, L{N(VP, "1120"), N(VP, "1121"), N(VP, "1122")}}}}, N{SFS, L{N{SFS, L{N(VP, "1200"), N(VP, "1201"), N(VP, "1202")}}, N{SFS, L{N(VP, "1210"), N(VP, "1211"), N(VP, "1212")}}, N{SFS, L{N(VP, "1220"), N(VP, "1221"), N(VP, "1222")}}}}}},
N{SFS, L{N{SFS, L{N{SFS, L{N(VP, "2000"), N(VP, "2001"), N(VP, "2002")}}, N{SFS, L{N(VP, "2010"), N(VP, "2011"), N(VP, "2012")}}, N{SFS, L{N(VP, "2020"), N(VP, "2021"), N(VP, "2022")}}}}, N{SFS, L{N{SFS, L{N(VP, "2100"), N(VP, "2101"), N(VP, "2102")}}, N{SFS, L{N(VP, "2110"), N(VP, "2111"), N(VP, "2112")}}, N{SFS, L{N(VP, "2120"), N(VP, "2121"), N(VP, "2122")}}}}, N{SFS, L{N{SFS, L{N(VP, "2200"), N(VP, "2201"), N(VP, "2202")}}, N{SFS, L{N(VP, "2210"), N(VP, "2211"), N(VP, "2212")}}, N{SFS, L{N(VP, "2220"), N(VP, "2221"), N(VP, "2222")}}}}}},
})
);
ADD_CASE_TO_GROUP("nested seq x4",
R"(
- - - - 0000
- 0001
- 0002
- - 0010
- 0011
- 0012
- - 0020
- 0021
- 0022
- - - 0100
- 0101
- 0102
- - 0110
- 0111
- 0112
- - 0120
- 0121
- 0122
- - - 0200
- 0201
- 0202
- - 0210
- 0211
- 0212
- - 0220
- 0221
- 0222
- - - - 1000
- 1001
- 1002
- - 1010
- 1011
- 1012
- - 1020
- 1021
- 1022
- - - 1100
- 1101
- 1102
- - 1110
- 1111
- 1112
- - 1120
- 1121
- 1122
- - - 1200
- 1201
- 1202
- - 1210
- 1211
- 1212
- - 1220
- 1221
- 1222
- - - - 2000
- 2001
- 2002
- - 2010
- 2011
- 2012
- - 2020
- 2021
- 2022
- - - 2100
- 2101
- 2102
- - 2110
- 2111
- 2112
- - 2120
- 2121
- 2122
- - - 2200
- 2201
- 2202
- - 2210
- 2211
- 2212
- - 2220
- 2221
- 2222
)",
N(SB, L{
N{SB, L{N{SB, L{N{SB, L{N(VP, "0000"), N(VP, "0001"), N(VP, "0002")}}, N{SB, L{N(VP, "0010"), N(VP, "0011"), N(VP, "0012")}}, N{SB, L{N(VP, "0020"), N(VP, "0021"), N(VP, "0022")}}}}, N{SB, L{N{SB, L{N(VP, "0100"), N(VP, "0101"), N(VP, "0102")}}, N{SB, L{N(VP, "0110"), N(VP, "0111"), N(VP, "0112")}}, N{SB, L{N(VP, "0120"), N(VP, "0121"), N(VP, "0122")}}}}, N{SB, L{N{SB, L{N(VP, "0200"), N(VP, "0201"), N(VP, "0202")}}, N{SB, L{N(VP, "0210"), N(VP, "0211"), N(VP, "0212")}}, N{SB, L{N(VP, "0220"), N(VP, "0221"), N(VP, "0222")}}}}}},
N{SB, L{N{SB, L{N{SB, L{N(VP, "1000"), N(VP, "1001"), N(VP, "1002")}}, N{SB, L{N(VP, "1010"), N(VP, "1011"), N(VP, "1012")}}, N{SB, L{N(VP, "1020"), N(VP, "1021"), N(VP, "1022")}}}}, N{SB, L{N{SB, L{N(VP, "1100"), N(VP, "1101"), N(VP, "1102")}}, N{SB, L{N(VP, "1110"), N(VP, "1111"), N(VP, "1112")}}, N{SB, L{N(VP, "1120"), N(VP, "1121"), N(VP, "1122")}}}}, N{SB, L{N{SB, L{N(VP, "1200"), N(VP, "1201"), N(VP, "1202")}}, N{SB, L{N(VP, "1210"), N(VP, "1211"), N(VP, "1212")}}, N{SB, L{N(VP, "1220"), N(VP, "1221"), N(VP, "1222")}}}}}},
N{SB, L{N{SB, L{N{SB, L{N(VP, "2000"), N(VP, "2001"), N(VP, "2002")}}, N{SB, L{N(VP, "2010"), N(VP, "2011"), N(VP, "2012")}}, N{SB, L{N(VP, "2020"), N(VP, "2021"), N(VP, "2022")}}}}, N{SB, L{N{SB, L{N(VP, "2100"), N(VP, "2101"), N(VP, "2102")}}, N{SB, L{N(VP, "2110"), N(VP, "2111"), N(VP, "2112")}}, N{SB, L{N(VP, "2120"), N(VP, "2121"), N(VP, "2122")}}}}, N{SB, L{N{SB, L{N(VP, "2200"), N(VP, "2201"), N(VP, "2202")}}, N{SB, L{N(VP, "2210"), N(VP, "2211"), N(VP, "2212")}}, N{SB, L{N(VP, "2220"), N(VP, "2221"), N(VP, "2222")}}}}}},
})
);
}
} // namespace yml
} // namespace c4

View File

@@ -1,5 +1,6 @@
#include "./test_group.hpp"
#include "test_case.hpp"
#include "./test_lib/test_group.hpp"
#include "test_lib/test_group.def.hpp"
#include "test_lib/test_case.hpp"
namespace c4 {
namespace yml {
@@ -13,9 +14,9 @@ TEST(seq_of_map, with_anchors)
&a2 a2: v2
&a3 a3: v3
- a0: w0
*a1: w1
*a2: w2
*a3: w3
*a1 : w1
*a2 : w2
*a3 : w3
- &seq
a4: v4
)";
@@ -31,9 +32,9 @@ TEST(seq_of_map, with_anchors)
csubstr yaml = R"(- &a1 a1: v1
&a2 a2: v2
&a3 a3: v3
- *a1: w1
*a2: w2
*a3: w3
- *a1 : w1
*a2 : w2
*a3 : w3
)";
Tree t = parse_in_arena(yaml);
EXPECT_EQ(emitrs_yaml<std::string>(t), yaml);
@@ -107,11 +108,15 @@ TEST(seq_of_map, missing_scalars_v2)
#endif
ASSERT_EQ(t["a"].num_children(), 2u);
ASSERT_EQ(t["a"][0].num_children(), 1u);
EXPECT_EQ(t["a"][0].first_child().key(), nullptr);
EXPECT_EQ(t["a"][0].first_child().val(), nullptr);
EXPECT_EQ(t["a"][0].first_child().key().len, 0);
EXPECT_EQ(t["a"][0].first_child().val().len, 0);
EXPECT_EQ(t["a"][0].first_child().key().str, nullptr) << (const void*)t["a"][0].first_child().key().str;
EXPECT_EQ(t["a"][0].first_child().val().str, nullptr) << (const void*)t["a"][0].first_child().val().str;
ASSERT_EQ(t["a"][1].num_children(), 1u);
EXPECT_EQ(t["a"][1].first_child().key(), nullptr);
EXPECT_EQ(t["a"][1].first_child().val(), nullptr);
EXPECT_EQ(t["a"][1].first_child().key().len, 0);
EXPECT_EQ(t["a"][1].first_child().val().len, 0);
EXPECT_EQ(t["a"][1].first_child().key().str, nullptr) << (const void*)t["a"][1].first_child().key().str;
EXPECT_EQ(t["a"][1].first_child().val().str, nullptr) << (const void*)t["a"][1].first_child().val().str;
}
TEST(seq_of_map, missing_scalars_v3)
@@ -123,13 +128,16 @@ TEST(seq_of_map, missing_scalars_v3)
#ifdef RYML_DBG
print_tree(t);
#endif
ASSERT_EQ(t["a"].num_children(), 2u);
ASSERT_EQ(t["a"][0].num_children(), 1u);
EXPECT_EQ(t["a"][0].first_child().key(), nullptr);
EXPECT_EQ(t["a"][0].first_child().val(), nullptr);
EXPECT_EQ(t["a"][0].first_child().key().len, 0);
EXPECT_EQ(t["a"][0].first_child().val().len, 0);
EXPECT_EQ(t["a"][0].first_child().key().str, nullptr) << (const void*)t["a"][0].first_child().key().str;
EXPECT_EQ(t["a"][0].first_child().val().str, nullptr) << (const void*)t["a"][0].first_child().val().str;
ASSERT_EQ(t["a"][1].num_children(), 1u);
EXPECT_EQ(t["a"][1].first_child().key(), nullptr);
EXPECT_EQ(t["a"][1].first_child().val(), nullptr);
EXPECT_EQ(t["a"][1].first_child().key().len, 0);
EXPECT_EQ(t["a"][1].first_child().val().len, 0);
EXPECT_EQ(t["a"][1].first_child().key().str, nullptr) << (const void*)t["a"][1].first_child().key().str;
EXPECT_EQ(t["a"][1].first_child().val().str, nullptr) << (const void*)t["a"][1].first_child().val().str;
}
#ifdef RYML_WITH_TAB_TOKENS
@@ -163,15 +171,15 @@ CASE_GROUP(SEQ_OF_MAP)
ADD_CASE_TO_GROUP("seq of empty maps, one line",
R"([{}, {}, {}])",
L{MAP, MAP, MAP}
N(SFS, L{MFS, MFS, MFS})
);
ADD_CASE_TO_GROUP("seq of maps, one line",
R"([{name: John Smith, age: 33}, {name: Mary Smith, age: 27}])",
L{
N{L{N("name", "John Smith"), N("age", "33")}},
N{L{N("name", "Mary Smith"), N("age", "27")}}
}
N(SFS, L{
N{MFS, L{N(KP|VP, "name", "John Smith"), N(KP|VP, "age", "33")}},
N{MFS, L{N(KP|VP, "name", "Mary Smith"), N(KP|VP, "age", "27")}}
})
);
ADD_CASE_TO_GROUP("seq of maps, implicit seq, explicit maps",
@@ -179,10 +187,10 @@ R"(
- {name: John Smith, age: 33}
- {name: Mary Smith, age: 27}
)",
L{
N{L{N("name", "John Smith"), N("age", "33")}},
N{L{N("name", "Mary Smith"), N("age", "27")}}
}
N(SB, L{
N{MFS, L{N(KP|VP, "name", "John Smith"), N(KP|VP, "age", "33")}},
N{MFS, L{N(KP|VP, "name", "Mary Smith"), N(KP|VP, "age", "27")}}
})
);
ADD_CASE_TO_GROUP("seq of maps",
@@ -192,10 +200,10 @@ R"(
- name: Mary Smith
age: 27
)",
L{
N{L{N("name", "John Smith"), N("age", "33")}},
N{L{N("name", "Mary Smith"), N("age", "27")}}
}
N(SB, L{
N{MB, L{N(KP|VP, "name", "John Smith"), N(KP|VP, "age", "33")}},
N{MB, L{N(KP|VP, "name", "Mary Smith"), N(KP|VP, "age", "27")}}
})
);
ADD_CASE_TO_GROUP("seq of maps, next line",
@@ -211,10 +219,10 @@ R"(
age:
27
)",
L{
N{L{N("name", "John Smith"), N("age", "33")}},
N{L{N("name", "Mary Smith"), N("age", "27")}}
}
N(SB, L{
N{MB, L{N(KP|VP, "name", "John Smith"), N(KP|VP, "age", "33")}},
N{MB, L{N(KP|VP, "name", "Mary Smith"), N(KP|VP, "age", "27")}}
})
);
ADD_CASE_TO_GROUP("seq of maps, bug #32 ex1",
@@ -222,9 +230,9 @@ R"(
- 'a': 1
b: 2
)",
L{
N{L{N(QK, "a", "1"), N("b", "2")}}
}
N(SB, L{
N{MB, L{N(KS|VP, "a", "1"), N(KP|VP, "b", "2")}}
})
);
ADD_CASE_TO_GROUP("seq of maps, bug #32 ex2",
@@ -238,12 +246,12 @@ R"(
c: 3
- {'a': 1, b: 2}
)",
L{
N{L{N("a", "1"), N("b", "2")}},
N{L{N("b", "2"), N(QK, "a", "1")}},
N{L{N("b", "2"), N(QK, "a", "1"), N("c", "3")}},
N{L{N(QK, "a", "1"), N("b", "2")}},
}
N(SB, L{
N{MB, L{N(KP|VP, "a", "1"), N(KP|VP, "b", "2")}},
N{MB, L{N(KP|VP, "b", "2"), N(KS|VP, "a", "1")}},
N{MB, L{N(KP|VP, "b", "2"), N(KS|VP, "a", "1"), N(KP|VP, "c", "3")}},
N{MFS, L{N(KS|VP, "a", "1"), N(KP|VP, "b", "2")}},
})
);
ADD_CASE_TO_GROUP("seq of maps, bug #32 ex3",
@@ -253,9 +261,10 @@ b: 2
b: 2
'a': 1
)",
L{
N(QK, "a", "1"), N("b", "2"), N("b", "2"), N(QK, "a", "1"),
});
N(MB, L{
N(KS|VP, "a", "1"), N(KP|VP, "b", "2"), N(KP|VP, "b", "2"), N(KS|VP, "a", "1"),
})
);
ADD_CASE_TO_GROUP("seq of maps, implicit map in seq",
@@ -266,13 +275,16 @@ R"('implicit block key' : [
'implicit flow key m' : {key1: val1, key2: val2},
'implicit flow key s' : [val1, val2],
])",
L{N(KEYSEQ|KEYQUO, "implicit block key", L{
N(L{N(KEYVAL|KEYQUO, "implicit flow key 1", "value1")}),
N(L{N(KEYVAL|KEYQUO, "implicit flow key 2", "value2")}),
N(L{N(KEYVAL|KEYQUO, "implicit flow key 3", "value3")}),
N(L{N(KEYMAP|KEYQUO, "implicit flow key m", L{N("key1", "val1"), N("key2", "val2")})}),
N(L{N(KEYSEQ|KEYQUO, "implicit flow key s", L{N("val1"), N("val2")})}),
})});
N(MB, L{
N(KS|SFS, "implicit block key", L{
N(MFS, L{N(KS|VP, "implicit flow key 1", "value1")}),
N(MFS, L{N(KS|VP, "implicit flow key 2", "value2")}),
N(MFS, L{N(KS|VP, "implicit flow key 3", "value3")}),
N(MFS, L{N(KS|MFS, "implicit flow key m", L{N(KP|VP, "key1", "val1"), N(KP|VP, "key2", "val2")})}),
N(MFS, L{N(KS|SFS, "implicit flow key s", L{N(VP, "val1"), N(VP, "val2")})}),
})
})
);
ADD_CASE_TO_GROUP("seq of maps, implicit map in seq, missing scalar",
@@ -288,11 +300,12 @@ c : [
,
:
]})",
L{
N("a", L{N(MAP, L{N("", "foo")}),}),
N("b", L{N(MAP, L{N("", "foo")}),}),
N("c", L{N(MAP, L{N(KEYVAL, "", {})}), N(MAP, L{N(KEYVAL, "", {})}),}),
});
N(MFS, L{
N(KP|SFS, "a", L{N(MFS, L{N(KP|VP, "", "foo")}),}),
N(KP|SFS, "b", L{N(MFS, L{N(KP|VP, "", "foo")}),}),
N(KP|SFS, "c", L{N(MFS, L{N(KP|VP, "", {})}), N(MFS, L{N(KP|VP, "", {})}),}),
})
);
ADD_CASE_TO_GROUP("seq of maps, implicit with anchors, unresolved",
@@ -300,14 +313,15 @@ R"(
- &a1 a1: v1
&a2 a2: v2
&a3 a3: v3
- *a1: w1
*a2: w2
*a3: w3
- *a1 : w1
*a2 : w2
*a3 : w3
)",
L{
N(L{N( "a1", AR(KEYANCH, "a1"), "v1"), N( "a2", AR(KEYANCH, "a2"), "v2"), N( "a3", AR(KEYANCH, "a3"), "v3")}),
N(L{N("*a1", AR(KEYREF, "*a1"), "w1"), N("*a2", AR(KEYREF, "*a2"), "w2"), N("*a3", AR(KEYREF, "*a3"), "w3")}),
});
N(SB, L{
N(MB, L{N(KP|VP, "a1", AR(KEYANCH, "a1"), "v1"), N(KP|VP, "a2", AR(KEYANCH, "a2"), "v2"), N(KP|VP, "a3", AR(KEYANCH, "a3"), "v3")}),
N(MB, L{N(KEY|VP, "*a1", AR(KEYREF, "*a1"), "w1"), N(KEY|VP, "*a2", AR(KEYREF, "*a2"), "w2"), N(KEY|VP, "*a3", AR(KEYREF, "*a3"), "w3")}),
})
);
ADD_CASE_TO_GROUP("seq of maps, implicit with anchors, resolved", RESOLVE_REFS,
@@ -315,14 +329,15 @@ R"(
- &a1 a1: v1
&a2 a2: v2
&a3 a3: v3
- *a1: w1
*a2: w2
*a3: w3
- *a1 : w1
*a2 : w2
*a3 : w3
)",
L{
N(L{N("a1", "v1"), N("a2", "v2"), N("a3", "v3")}),
N(L{N("a1", "w1"), N("a2", "w2"), N("a3", "w3")}),
});
N(SB, L{
N(MB, L{N(KP|VP, "a1", "v1"), N(KP|VP, "a2", "v2"), N(KP|VP, "a3", "v3")}),
N(MB, L{N(KP|VP, "a1", "w1"), N(KP|VP, "a2", "w2"), N(KP|VP, "a3", "w3")}),
})
);
ADD_CASE_TO_GROUP("seq of maps, implicit with tags",
@@ -337,11 +352,13 @@ R"(
!foo a2: v2
!foo a3: v3
)",
L{
N(L{N(TS("!!str", "a1"), "v1"), N(TS("!!str", "a2"), "v2"), N(TS("!!str", "a3"), "v3")}),
N(L{N("a1", TS("!!str", "w1")), N("a2", TS("!!str", "w2")), N("a3", TS("!!str", "w3"))}),
N(L{N(TS("!foo", "a1"), "v1"), N(TS("!foo", "a2"), "v2"), N(TS("!foo", "a3"), "v3")}),
});
N(SB, L{
N(MB, L{N(KP|VP, TS("!!str", "a1"), "v1"), N(KP|VP, TS("!!str", "a2"), "v2"), N(KP|VP, TS("!!str", "a3"), "v3")}),
N(MB, L{N(KP|VP, "a1", TS("!!str", "w1")), N(KP|VP, "a2", TS("!!str", "w2")), N(KP|VP, "a3", TS("!!str", "w3"))}),
N(MB, L{N(KP|VP, TS("!foo", "a1"), "v1"), N(KP|VP, TS("!foo", "a2"), "v2"), N(KP|VP, TS("!foo", "a3"), "v3")}),
})
);
}
} // namespace yml

View File

@@ -7,7 +7,7 @@
#include <c4/yml/detail/print.hpp>
#endif
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include <gtest/gtest.h>
@@ -203,9 +203,23 @@ TEST(serialize, bool)
EXPECT_EQ(w, false);
}
TEST(serialize, nan)
TEST(serialize, nan_0)
{
Tree t = parse_in_arena(R"(
Tree t;
t.rootref() |= SEQ;
t[0] << std::numeric_limits<float>::quiet_NaN();
t[1] << std::numeric_limits<double>::quiet_NaN();
EXPECT_EQ(t[0].val(), ".nan");
EXPECT_EQ(t[1].val(), ".nan");
EXPECT_EQ(emitrs_yaml<std::string>(t),
R"(- .nan
- .nan
)");
}
TEST(serialize, nan_1)
{
csubstr yaml = R"(
good:
- .nan
- .nan
@@ -217,61 +231,82 @@ good:
set:
- nothing
- nothing
})");
t["set"][0] << std::numeric_limits<float>::quiet_NaN();
t["set"][1] << std::numeric_limits<double>::quiet_NaN();
EXPECT_EQ(t["set"][0].val(), ".nan");
EXPECT_EQ(t["set"][1].val(), ".nan");
EXPECT_EQ(t["good"][0].val(), ".nan");
EXPECT_EQ(t["good"][1].val(), ".nan");
EXPECT_EQ(t["good"][2].val(), ".NaN");
EXPECT_EQ(t["good"][3].val(), ".NAN");
EXPECT_EQ(t["good"][4].val(), "nan");
EXPECT_EQ(t["good"][5].val(), ".nan");
float f;
double d;
f = 0.f;
d = 0.;
t["good"][0] >> f;
t["good"][0] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][1] >> f;
t["good"][1] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][2] >> f;
t["good"][2] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][3] >> f;
t["good"][3] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][4] >> f;
t["good"][4] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][5] >> f;
t["good"][5] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
)";
test_check_emit_check(yaml, [](Tree const& t){
EXPECT_EQ(t["good"][0].val(), ".nan");
EXPECT_EQ(t["good"][1].val(), ".nan");
EXPECT_EQ(t["good"][2].val(), ".NaN");
EXPECT_EQ(t["good"][3].val(), ".NAN");
EXPECT_EQ(t["good"][4].val(), "nan");
EXPECT_EQ(t["good"][5].val(), ".nan");
float f;
double d;
f = 0.f;
d = 0.;
t["good"][0] >> f;
t["good"][0] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][1] >> f;
t["good"][1] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][2] >> f;
t["good"][2] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][3] >> f;
t["good"][3] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][4] >> f;
t["good"][4] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
f = 0.f;
d = 0.;
t["good"][5] >> f;
t["good"][5] >> d;
EXPECT_TRUE(std::isnan(f));
EXPECT_TRUE(std::isnan(d));
});
}
TEST(serialize, inf)
TEST(serialize, inf_0)
{
Tree t;
t.rootref() |= SEQ;
const float finf = std::numeric_limits<float>::infinity();
const double dinf = std::numeric_limits<double>::infinity();
t[0] << finf;
t[1] << dinf;
t[2] << -finf;
t[3] << -dinf;
EXPECT_EQ(t[0].val(), ".inf");
EXPECT_EQ(t[1].val(), ".inf");
EXPECT_EQ(t[2].val(), "-.inf");
EXPECT_EQ(t[3].val(), "-.inf");
EXPECT_EQ(scalar_style_choose("-.inf"), SCALAR_PLAIN);
EXPECT_EQ(emitrs_yaml<std::string>(t),
R"(- .inf
- .inf
- -.inf
- -.inf
)");
}
TEST(serialize, inf_1)
{
C4_SUPPRESS_WARNING_GCC_CLANG_WITH_PUSH("-Wfloat-equal");
Tree t = parse_in_arena(R"(
csubstr yaml = R"(
good:
- .inf
- .inf
@@ -284,66 +319,69 @@ good:
set:
- nothing
- nothing
})");
float finf = std::numeric_limits<float>::infinity();
double dinf = std::numeric_limits<double>::infinity();
t["set"][0] << finf;
t["set"][1] << dinf;
EXPECT_EQ(t["set"][0].val(), ".inf");
EXPECT_EQ(t["set"][1].val(), ".inf");
EXPECT_EQ(t["good"][0].val(), ".inf");
EXPECT_EQ(t["good"][1].val(), ".inf");
EXPECT_EQ(t["good"][2].val(), ".Inf");
EXPECT_EQ(t["good"][3].val(), ".INF");
EXPECT_EQ(t["good"][4].val(), "inf");
EXPECT_EQ(t["good"][5].val(), "infinity");
EXPECT_EQ(t["good"][6].val(), ".inf");
float f;
double d;
f = 0.f;
d = 0.;
t["good"][0] >> f;
t["good"][0] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][1] >> f;
t["good"][1] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][2] >> f;
t["good"][2] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][3] >> f;
t["good"][3] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][4] >> f;
t["good"][4] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][5] >> f;
t["good"][5] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][6] >> f;
t["good"][6] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
)";
test_check_emit_check(yaml, [](Tree const& t){
float finf = std::numeric_limits<float>::infinity();
double dinf = std::numeric_limits<double>::infinity();
EXPECT_EQ(t["good"][0].val(), ".inf");
EXPECT_EQ(t["good"][1].val(), ".inf");
EXPECT_EQ(t["good"][2].val(), ".Inf");
EXPECT_EQ(t["good"][3].val(), ".INF");
EXPECT_EQ(t["good"][4].val(), "inf");
EXPECT_EQ(t["good"][5].val(), "infinity");
EXPECT_EQ(t["good"][6].val(), ".inf");
float f;
double d;
f = 0.f;
d = 0.;
t["good"][0] >> f;
t["good"][0] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][1] >> f;
t["good"][1] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][2] >> f;
t["good"][2] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][3] >> f;
t["good"][3] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][4] >> f;
t["good"][4] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][5] >> f;
t["good"][5] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
f = 0.f;
d = 0.;
t["good"][6] >> f;
t["good"][6] >> d;
EXPECT_TRUE(f == finf);
EXPECT_TRUE(d == dinf);
});
C4_SUPPRESS_WARNING_GCC_CLANG_POP
}
t = parse_in_arena(R"(
TEST(serialize, inf_2)
{
C4_SUPPRESS_WARNING_GCC_CLANG_WITH_PUSH("-Wfloat-equal");
csubstr yaml = R"(
good:
- -.inf
- -.inf
@@ -356,60 +394,62 @@ good:
set:
- nothing
- nothing
})");
t["set"][0] << -finf;
t["set"][1] << -dinf;
EXPECT_EQ(t["set"][0].val(), "-.inf");
EXPECT_EQ(t["set"][1].val(), "-.inf");
EXPECT_EQ(t["good"][0].val(), "-.inf");
EXPECT_EQ(t["good"][1].val(), "-.inf");
EXPECT_EQ(t["good"][2].val(), "-.Inf");
EXPECT_EQ(t["good"][3].val(), "-.INF");
EXPECT_EQ(t["good"][4].val(), "-inf");
EXPECT_EQ(t["good"][5].val(), "-infinity");
EXPECT_EQ(t["good"][6].val(), "-.inf");
f = 0.f;
d = 0.;
t["good"][0] >> f;
t["good"][0] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][1] >> f;
t["good"][1] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][2] >> f;
t["good"][2] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][3] >> f;
t["good"][3] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][4] >> f;
t["good"][4] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][5] >> f;
t["good"][5] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][6] >> f;
t["good"][6] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
)";
test_check_emit_check(yaml, [](Tree const& t){
float finf = std::numeric_limits<float>::infinity();
double dinf = std::numeric_limits<double>::infinity();
EXPECT_EQ(t["good"][0].val(), "-.inf");
EXPECT_EQ(t["good"][1].val(), "-.inf");
EXPECT_EQ(t["good"][2].val(), "-.Inf");
EXPECT_EQ(t["good"][3].val(), "-.INF");
EXPECT_EQ(t["good"][4].val(), "-inf");
EXPECT_EQ(t["good"][5].val(), "-infinity");
EXPECT_EQ(t["good"][6].val(), "-.inf");
float f;
double d;
f = 0.f;
d = 0.;
t["good"][0] >> f;
t["good"][0] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][1] >> f;
t["good"][1] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][2] >> f;
t["good"][2] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][3] >> f;
t["good"][3] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][4] >> f;
t["good"][4] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][5] >> f;
t["good"][5] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
f = 0.f;
d = 0.;
t["good"][6] >> f;
t["good"][6] >> d;
EXPECT_TRUE(f == -finf);
EXPECT_TRUE(d == -dinf);
});
C4_SUPPRESS_WARNING_GCC_CLANG_POP
}
@@ -455,28 +495,26 @@ reference_list:
)";
Tree tree;
auto root_id = tree.root_id();
const size_t root_id = tree.root_id();
tree.to_map(root_id);
auto anchor_list_id = tree.append_child(root_id);
const size_t anchor_list_id = tree.append_child(root_id);
tree.to_seq(anchor_list_id, "anchor_objects");
auto anchor_map0 = tree.append_child(anchor_list_id);
const size_t anchor_map0 = tree.append_child(anchor_list_id);
tree.to_map(anchor_map0);
tree.set_val_anchor(anchor_map0, "id001");
auto anchor_elem0 = tree.append_child(anchor_map0);
const size_t anchor_elem0 = tree.append_child(anchor_map0);
tree.to_keyval(anchor_elem0, "name", "a_name");
auto ref_list_id = tree.append_child(root_id);
const size_t ref_list_id = tree.append_child(root_id);
tree.to_seq(ref_list_id, "reference_list");
auto elem0_id = tree.append_child(ref_list_id);
const size_t elem0_id = tree.append_child(ref_list_id);
tree.set_val_ref(elem0_id, "id001");
std::string cmpbuf;
emitrs_yaml(tree, &cmpbuf);
EXPECT_EQ(cmpbuf, expected_yaml);
EXPECT_EQ(emitrs_yaml<std::string>(tree), expected_yaml);
}

File diff suppressed because it is too large Load Diff

View File

@@ -1,695 +0,0 @@
#include "./test_group.hpp"
namespace c4 {
namespace yml {
TEST(simple_seq, bad_seq1)
{
Tree tree;
ExpectError::do_check(&tree, [&]{
parse_in_arena(R"(
---
[ a, b, c ] ]
)", &tree);
});
}
TEST(simple_seq, bad_seq2)
{
Tree tree;
ExpectError::do_check(&tree, [&]{
parse_in_arena(R"(
---
[ [ a, b, c ]
)", &tree);
});
}
TEST(simple_seq, two_nested_flow_seqs)
{
Tree tree = parse_in_arena("[[]]");
EXPECT_TRUE(tree.rootref().is_seq());
ASSERT_TRUE(tree.rootref().has_children());
EXPECT_TRUE(tree.rootref().first_child().is_seq());
ASSERT_FALSE(tree.rootref().first_child().has_children());
}
TEST(simple_seq, many_unmatched_brackets)
{
std::string src;
src.reserve(10000000u);
for(size_t num_brackets : {4u, 8u, 32u})
{
SCOPED_TRACE(num_brackets);
for(size_t i = src.size(); i < num_brackets; ++i)
src += '[';
Tree tree;
Location loc = {};
loc.line = 1;
loc.col = num_brackets + 1u;
ExpectError::do_check(&tree, [&]{
parse_in_place(to_substr(src), &tree);
}, loc);
}
}
TEST(simple_seq, missing_quoted_key)
{
csubstr yaml = R"(
"top1" :
["0", "1", ]
'top2' :
["0", "1", ]
---
"top1" :
- "0"
- "1"
'top2' :
- "0"
- "1"
)";
test_check_emit_check(yaml, [](Tree const &t){
size_t doc = 0;
EXPECT_TRUE(t.docref(doc)["top1"].is_key_quoted());
EXPECT_TRUE(t.docref(doc)["top2"].is_key_quoted());
EXPECT_TRUE(t.docref(doc)["top1"][0].is_val_quoted());
EXPECT_TRUE(t.docref(doc)["top1"][1].is_val_quoted());
EXPECT_TRUE(t.docref(doc)["top2"][0].is_val_quoted());
EXPECT_TRUE(t.docref(doc)["top2"][1].is_val_quoted());
++doc;
EXPECT_TRUE(t.docref(doc)["top1"].is_key_quoted());
EXPECT_TRUE(t.docref(doc)["top2"].is_key_quoted());
EXPECT_TRUE(t.docref(doc)["top1"][0].is_val_quoted());
EXPECT_TRUE(t.docref(doc)["top1"][1].is_val_quoted());
EXPECT_TRUE(t.docref(doc)["top2"][0].is_val_quoted());
EXPECT_TRUE(t.docref(doc)["top2"][1].is_val_quoted());
});
}
TEST(simple_seq, deeply_nested_to_cover_parse_stack_resizes)
{
csubstr yaml = R"(
[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[[0, 1, 2, 3, 4, 5, 6, 7]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]]
)";
Tree t = parse_in_arena(yaml);
size_t id = t.root_id();
while(t.has_children(id))
id = t.first_child(id);
ASSERT_TRUE(t.ref(id).has_parent());
NodeRef seq = t.ref(id).parent();
ASSERT_TRUE(seq.is_seq());
EXPECT_EQ(seq[0].val(), csubstr("0"));
EXPECT_EQ(seq[1].val(), csubstr("1"));
EXPECT_EQ(seq[2].val(), csubstr("2"));
EXPECT_EQ(seq[3].val(), csubstr("3"));
EXPECT_EQ(seq[4].val(), csubstr("4"));
EXPECT_EQ(seq[5].val(), csubstr("5"));
EXPECT_EQ(seq[6].val(), csubstr("6"));
EXPECT_EQ(seq[7].val(), csubstr("7"));
}
#ifdef RYML_WITH_TAB_TOKENS
TEST(simple_seq, block_tab_tokens)
{
Tree tree = parse_in_arena(R"(
--- # block, spaces only
- 0
- 1
- 2
--- # block, tabs after
- 0
- 1
- 2
--- # block, tabs after token, and after val
- 0
- 1
- 2
)");
EXPECT_EQ(tree.docref(0)[0].val(), csubstr("0"));
EXPECT_EQ(tree.docref(0)[1].val(), csubstr("1"));
EXPECT_EQ(tree.docref(0)[2].val(), csubstr("2"));
EXPECT_EQ(tree.docref(1)[0].val(), csubstr("0"));
EXPECT_EQ(tree.docref(1)[1].val(), csubstr("1"));
EXPECT_EQ(tree.docref(1)[2].val(), csubstr("2"));
}
TEST(simple_seq, flow_tab_tokens)
{
Tree tree = parse_in_arena(R"(
--- # flow, no tabs
[0, 1, 2]
--- # flow, tabs after
[0, 1, 2]
--- # flow, tabs before and after
[0 , 1 , 2]
--- # flow, tabs everywhere
[
0 ,
1 ,
2 ,
]
)");
EXPECT_EQ(tree.docref(0)[0].val(), csubstr("0"));
EXPECT_EQ(tree.docref(0)[1].val(), csubstr("1"));
EXPECT_EQ(tree.docref(0)[2].val(), csubstr("2"));
EXPECT_EQ(tree.docref(1)[0].val(), csubstr("0"));
EXPECT_EQ(tree.docref(1)[1].val(), csubstr("1"));
EXPECT_EQ(tree.docref(1)[2].val(), csubstr("2"));
EXPECT_EQ(tree.docref(2)[0].val(), csubstr("0"));
EXPECT_EQ(tree.docref(2)[1].val(), csubstr("1"));
EXPECT_EQ(tree.docref(2)[2].val(), csubstr("2"));
EXPECT_EQ(tree.docref(3)[0].val(), csubstr("0"));
EXPECT_EQ(tree.docref(3)[1].val(), csubstr("1"));
EXPECT_EQ(tree.docref(3)[2].val(), csubstr("2"));
}
#endif // RYML_WITH_TAB_TOKENS
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
CASE_GROUP(SIMPLE_SEQ)
{
ADD_CASE_TO_GROUP("simple seq",
R"(- 0
- 1
- 2
- 3
)",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, explicit, single line",
"[0, 1, 2, 3]",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, explicit, single line, trailcomma",
"[0, 1, 2, 3,]",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"},}
);
ADD_CASE_TO_GROUP("simple seq, explicit, multiline, unindented",
R"([
0,
1,
2,
3
])",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, explicit, multiline, unindented, trailcomma",
R"([
0,
1,
2,
3,
])",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, explicit, multiline, comments inline",
R"([
0, # bla0
1, # bla1
2, # bla2
3 # bla3
])",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, explicit, multiline, comments prev line",
R"([
# bla0
0,
# bla1
1,
# bla2
2,
# bla3
3
])",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, explicit, multiline, indented",
R"([
0,
1,
2,
3
])",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, comments inline",
R"(
- 0 # this is a foo
- 1 # this is a bar
- 2 # this is a bar
- 3 # this is a bar
)",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, comments prev line",
R"(
# this is a foo
- 0
# this is a bar
- 1
# this is a baz
- 2
# this is a bat
- 3
)",
L{N{"0"}, N{"1"}, N{"2"}, N{"3"}}
);
ADD_CASE_TO_GROUP("simple seq, scalars with special chars, comma",
R"(
- a,b
- c,d
- e,f
- a, b
- c, d
- e, f
- a , b
- c , d
- e , f
- a ,b
- c ,d
- e ,f
)",
L{N{"a,b"}, N{"c,d"}, N{"e,f"},
N{"a, b"}, N{"c, d"}, N{"e, f"},
N{"a , b"}, N{"c , d"}, N{"e , f"},
N{"a ,b"}, N{"c ,d"}, N{"e ,f"},
}
);
ADD_CASE_TO_GROUP("simple seq, scalars with special chars, colon",
R"(
- a:b
- "c:d"
- 'e:f'
- a :b
- "c :d"
- 'e :f'
- a : b # THIS IS A KEY-VAL!!!
- "c : d"
- 'e : f'
- a: b # THIS IS A KEY-VAL!!!
- "c: d"
- 'e: f'
)",
L{
N("a:b"), N(QV, "c:d"), N(QV, "e:f"),
N("a :b"), N(QV, "c :d"), N(QV, "e :f"),
N(L{N("a", "b")}), N(QV, "c : d"), N(QV, "e : f"),
N(L{N("a", "b")}), N(QV, "c: d"), N(QV, "e: f"),
}
);
ADD_CASE_TO_GROUP("simple seq, scalars with special chars, cardinal",
R"(
- a#b
- "a#b"
- 'a#b'
- a# b
- "a# b"
- 'a# b'
- a # b
- "a # b"
- 'a # b'
- a #b
- "a #b"
- 'a #b'
)",
L{
N{"a#b"}, N{QV, "a#b"}, N{QV, "a#b"},
N{"a# b"}, N{QV, "a# b"}, N{QV, "a# b"},
N{"a"}, N{QV, "a # b"}, N{QV, "a # b"},
N{"a"}, N{QV, "a #b"}, N{QV, "a #b"},
}
);
ADD_CASE_TO_GROUP("simple seq, scalars with special chars, dash",
R"(
- a-b
- "a-b"
- 'a-b'
- a- b
- "a- b"
- 'a- b'
- a - b
- "a - b"
- 'a - b'
- a -b
- "a -b"
- 'a -b'
)",
L{
N{"a-b"}, N{QV, "a-b"}, N{QV, "a-b"},
N{"a- b"}, N{QV, "a- b"}, N{QV, "a- b"},
N{"a - b"}, N{QV, "a - b"}, N{QV, "a - b"},
N{"a -b"}, N{QV, "a -b"}, N{QV, "a -b"},
}
);
ADD_CASE_TO_GROUP("simple seq, scalars with special chars, left-curly",
R"(
- a{b
- "a{b"
- 'a{b'
- a{ b
- "a{ b"
- 'a{ b'
- a { b
- "a { b"
- 'a { b'
- a {b
- "a {b"
- 'a {b'
)",
L{
N{"a{b"}, N{QV, "a{b"}, N{QV, "a{b"},
N{"a{ b"}, N{QV, "a{ b"}, N{QV, "a{ b"},
N{"a { b"}, N{QV, "a { b"}, N{QV, "a { b"},
N{"a {b"}, N{QV, "a {b"}, N{QV, "a {b"},
}
);
ADD_CASE_TO_GROUP("simple seq, scalars with special chars, right-curly",
R"(
- a}b
- "a}b"
- 'a}b'
- a} b
- "a} b"
- 'a} b'
- a } b
- "a } b"
- 'a } b'
- a }b
- "a }b"
- 'a }b'
)",
L{
N{"a}b"}, N{QV, "a}b"}, N{QV, "a}b"},
N{"a} b"}, N{QV, "a} b"}, N{QV, "a} b"},
N{"a } b"}, N{QV, "a } b"}, N{QV, "a } b"},
N{"a }b"}, N{QV, "a }b"}, N{QV, "a }b"},
}
);
ADD_CASE_TO_GROUP("simple seq, scalars with special chars, left-bracket",
R"(
- a[b
- "a[b"
- 'a[b'
- a[ b
- "a[ b"
- 'a[ b'
- a [ b
- "a [ b"
- 'a [ b'
- a [b
- "a [b"
- 'a [b'
)",
L{
N{"a[b"}, N{QV, "a[b"}, N{QV, "a[b"},
N{"a[ b"}, N{QV, "a[ b"}, N{QV, "a[ b"},
N{"a [ b"}, N{QV, "a [ b"}, N{QV, "a [ b"},
N{"a [b"}, N{QV, "a [b"}, N{QV, "a [b"},
}
);
ADD_CASE_TO_GROUP("simple seq, scalars with special chars, right-bracket",
R"(
- a]b
- "a]b"
- 'a]b'
- a] b
- "a] b"
- 'a] b'
- a ] b
- "a ] b"
- 'a ] b'
- a ]b
- "a ]b"
- 'a ]b'
)",
L{
N{"a]b"}, N{QV, "a]b"}, N{QV, "a]b"},
N{"a] b"}, N{QV, "a] b"}, N{QV, "a] b"},
N{"a ] b"}, N{QV, "a ] b"}, N{QV, "a ] b"},
N{"a ]b"}, N{QV, "a ]b"}, N{QV, "a ]b"},
}
);
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, comma",
R"([
a,b, "c,d", 'e,f',
a, b, "c, d", 'e, f',
a , b, "c , d", 'e , f',
a ,b, "c ,d", 'e ,f',
])",
L{
N{"a"}, N("b"), N(QV, "c,d"), N(QV, "e,f"),
N{"a"}, N("b"), N(QV, "c, d"), N(QV, "e, f"),
N{"a"}, N("b"), N(QV, "c , d"), N(QV, "e , f"),
N{"a"}, N("b"), N(QV, "c ,d"), N(QV, "e ,f"),
}
);
#ifdef RYML_WITH_TAB_TOKENS
#define _ryml_with_or_without_tabs(with, without) with
#else
#define _ryml_with_or_without_tabs(with, without) without
#endif
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, colon",
R"(
- [[], :@]
- [[], :%]
- [[], :^]
- [[], :$]
#- [[], ::]
- [[], : ]
- [[], :`]
)",
L{
N(L{N(SEQ), N(":@")}),
N(L{N(SEQ), N(":%")}),
N(L{N(SEQ), N(":^")}),
N(L{N(SEQ), N(":$")}),
//N(L{N(SEQ), N("::")}), TODO: yaml playground
N(L{N(SEQ), _ryml_with_or_without_tabs(N(MAP, L{N("", "")}), N(": "))}),
N(L{N(SEQ), N(":`")}),
}
);
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, colon 2",
R"([
# a:b, # not legal
"c:d", 'e:f',
# a: b, # not legal
"c: d", 'e: f',
# a : b, # not legal
"c : d", 'e : f',
# a :b, # not legal
"c :d", 'e :f',
])",
L{/*...not legal...*/
/*N{"a"}, N("b"),*/ N(QV, "c:d"), N(QV, "e:f"),
/*N{"a"}, N("b"),*/ N(QV, "c: d"), N(QV, "e: f"),
/*N{"a"}, N("b"),*/ N(QV, "c : d"), N(QV, "e : f"),
/*N{"a"}, N("b"),*/ N(QV, "c :d"), N(QV, "e :f"),
}
);
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, cardinal",
R"([
a#b, "c#d", 'e#f',
a# b, "c# d", 'e# f',
a # b, "c # d", 'e # f',
, # this is needed because of the comment above
a #b, "c #d", 'e #f',
])",
L{
N{"a#b"}, N(QV, "c#d"), N(QV, "e#f"),
N{"a# b"}, N(QV, "c# d"), N(QV, "e# f"),
N{"a"},
N{"a"},
}
);
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, dash",
R"([
a-b, "c-d", 'e-f',
a- b, "c- d", 'e- f',
a - b, "c - d", 'e - f',
a -b, "c -d", 'e -f',
])",
L{
N{"a-b"}, N(QV, "c-d"), N(QV, "e-f"),
N{"a- b"}, N(QV, "c- d"), N(QV, "e- f"),
N{"a - b"}, N(QV, "c - d"), N(QV, "e - f"),
N{"a -b"}, N(QV, "c -d"), N(QV, "e -f"),
}
);
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, left-bracket",
R"([
# a[b,
"c[d", 'e[f',
# a[ b,
"c[ d", 'e[ f',
# a [ b,
"c [ d", 'e [ f',
# a [b,
"c [d", 'e [f',
])",
L{
/*N{"a[b"}, */ N(QV, "c[d"), N(QV, "e[f"),
/*N{"a[ b"}, */ N(QV, "c[ d"), N(QV, "e[ f"),
/*N{"a [ b"},*/ N(QV, "c [ d"), N(QV, "e [ f"),
/*N{"a [b"}, */ N(QV, "c [d"), N(QV, "e [f"),
}
);
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, right-bracket",
R"([
# a]b,
"c]d", 'e]f',
# a] b,
"c] d", 'e] f',
# a ] b,
"c ] d", 'e ] f',
# a ]b,
"c ]d", 'e ]f',
])",
L{
/*N{"a]b"}, */ N(QV, "c]d"), N(QV, "e]f"),
/*N{"a] b"}, */ N(QV, "c] d"), N(QV, "e] f"),
/*N{"a ] b"},*/ N(QV, "c ] d"), N(QV, "e ] f"),
/*N{"a ]b"}, */ N(QV, "c ]d"), N(QV, "e ]f"),
}
);
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, left-curly",
R"([
# a{b,
"c{d", 'e{f',
# a{ b,
"c{ d", 'e{ f',
# a { b,
"c { d", 'e { f',
# a {b,
"c {d", 'e {f',
])",
L{
/*N{"a{b"}, */ N(QV, "c{d"), N(QV, "e{f"),
/*N{"a{ b"}, */ N(QV, "c{ d"), N(QV, "e{ f"),
/*N{"a { b"},*/ N(QV, "c { d"), N(QV, "e { f"),
/*N{"a {b"}, */ N(QV, "c {d"), N(QV, "e {f"),
}
);
ADD_CASE_TO_GROUP("simple seq expl, scalars with special chars, right-curly",
R"([
# a}b,
"c}d", 'e}f',
# a} b,
"c} d", 'e} f',
# a } b,
"c } d", 'e } f',
# a }b,
"c }d", 'e }f',
])",
L{
/*N{"a}b"}, */ N(QV, "c}d"), N(QV, "e}f"),
/*N{"a} b"}, */ N(QV, "c} d"), N(QV, "e} f"),
/*N{"a } b"},*/ N(QV, "c } d"), N(QV, "e } f"),
/*N{"a }b"}, */ N(QV, "c }d"), N(QV, "e }f"),
}
);
ADD_CASE_TO_GROUP("simple seq, issue 28",
R"(# was failing on https://github.com/biojppm/rapidyaml/issues/28
enemy:
- actors:
- {name: Enemy_Bokoblin_Junior, value: 4.0}
- {name: Enemy_Bokoblin_Middle, value: 16.0}
- {name: Enemy_Bokoblin_Senior, value: 32.0}
- {name: Enemy_Bokoblin_Dark, value: 48.0}
species: BokoblinSeries
enemy2:
- actors:
- {name: Enemy_Bokoblin_Junior, value: 4.0}
- {name: Enemy_Bokoblin_Middle, value: 16.0}
- {name: Enemy_Bokoblin_Senior, value: 32.0}
- {name: Enemy_Bokoblin_Dark, value: 48.0}
species: BokoblinSeries
)",
L{
N("enemy", L{N(L{
N("actors", L{
N(L{N("name", "Enemy_Bokoblin_Junior"), N("value", "4.0"),}),
N(L{N("name", "Enemy_Bokoblin_Middle"), N("value", "16.0"),}),
N(L{N("name", "Enemy_Bokoblin_Senior"), N("value", "32.0"),}),
N(L{N("name", "Enemy_Bokoblin_Dark"), N("value", "48.0"),}),
}),
N("species", "BokoblinSeries"),
})
}),
N("enemy2", L{N(L{
N("actors", L{
N(L{N("name", "Enemy_Bokoblin_Junior"), N("value", "4.0"),}),
N(L{N("name", "Enemy_Bokoblin_Middle"), N("value", "16.0"),}),
N(L{N("name", "Enemy_Bokoblin_Senior"), N("value", "32.0"),}),
N(L{N("name", "Enemy_Bokoblin_Dark"), N("value", "48.0"),}),
}),
N("species", "BokoblinSeries"),
})
}),
});
ADD_CASE_TO_GROUP("simple seq, invalid character 1", EXPECT_PARSE_ERROR,
R"(- 0 # this is a foo
}
)",
LineCol(2, 1)
);
ADD_CASE_TO_GROUP("simple seq, invalid character 2", EXPECT_PARSE_ERROR,
R"(- 0 # this is a foo
]
)",
LineCol(2, 1)
);
ADD_CASE_TO_GROUP("simple seq, invalid character 3", EXPECT_PARSE_ERROR,
R"(- 0 # this is a foo
:
)",
LineCol(2, 1)
);
ADD_CASE_TO_GROUP("simple seq, invalid character 4", EXPECT_PARSE_ERROR,
R"(- 0 # this is a foo
abcdef!
)",
LineCol(2, 1)
);
}
} // namespace yml
} // namespace c4

View File

@@ -4,7 +4,7 @@
#include "c4/yml/detail/stack.hpp"
#endif
#include <gtest/gtest.h>
#include "./callbacks_tester.hpp"
#include "./test_lib/callbacks_tester.hpp"
//-------------------------------------------

View File

@@ -7,7 +7,7 @@
#include <c4/yml/detail/print.hpp>
#endif
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include <gtest/gtest.h>
@@ -19,25 +19,171 @@ std::string emit2str(Tree const& t)
return emitrs_yaml<std::string>(t);
}
inline void test_container_nostyle(ConstNodeRef n)
{
EXPECT_FALSE(n.type().is_container_styled());
EXPECT_FALSE(n.type().is_block());
EXPECT_FALSE(n.type().is_flow());
EXPECT_FALSE(n.type().is_flow_sl());
EXPECT_FALSE(n.type().is_flow_ml());
}
inline void test_container_block(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_container_styled());
EXPECT_TRUE(n.type().is_block());
EXPECT_FALSE(n.type().is_flow());
EXPECT_FALSE(n.type().is_flow_sl());
EXPECT_FALSE(n.type().is_flow_ml());
}
inline void test_container_flow_sl(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_container_styled());
EXPECT_FALSE(n.type().is_block());
EXPECT_TRUE(n.type().is_flow());
EXPECT_TRUE(n.type().is_flow_sl());
EXPECT_FALSE(n.type().is_flow_ml());
}
inline void test_container_flow_ml(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_container_styled());
EXPECT_FALSE(n.type().is_block());
EXPECT_TRUE(n.type().is_flow());
EXPECT_FALSE(n.type().is_flow_sl());
EXPECT_TRUE(n.type().is_flow_ml());
}
inline void test_key_plain(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_key_styled());
EXPECT_TRUE(n.type().is_key_plain());
EXPECT_FALSE(n.type().is_key_squo());
EXPECT_FALSE(n.type().is_key_dquo());
EXPECT_FALSE(n.type().is_key_literal());
EXPECT_FALSE(n.type().is_key_folded());
}
inline void test_val_plain(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_val_styled());
EXPECT_TRUE(n.type().is_val_plain());
EXPECT_FALSE(n.type().is_val_squo());
EXPECT_FALSE(n.type().is_val_dquo());
EXPECT_FALSE(n.type().is_val_literal());
EXPECT_FALSE(n.type().is_val_folded());
}
inline void test_key_squo(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_key_styled());
EXPECT_FALSE(n.type().is_key_plain());
EXPECT_TRUE(n.type().is_key_squo());
EXPECT_FALSE(n.type().is_key_dquo());
EXPECT_FALSE(n.type().is_key_literal());
EXPECT_FALSE(n.type().is_key_folded());
}
inline void test_val_squo(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_val_styled());
EXPECT_FALSE(n.type().is_val_plain());
EXPECT_TRUE(n.type().is_val_squo());
EXPECT_FALSE(n.type().is_val_dquo());
EXPECT_FALSE(n.type().is_val_literal());
EXPECT_FALSE(n.type().is_val_folded());
}
inline void test_key_dquo(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_key_styled());
EXPECT_FALSE(n.type().is_key_plain());
EXPECT_FALSE(n.type().is_key_squo());
EXPECT_TRUE(n.type().is_key_dquo());
EXPECT_FALSE(n.type().is_key_literal());
EXPECT_FALSE(n.type().is_key_folded());
}
inline void test_val_dquo(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_val_styled());
EXPECT_FALSE(n.type().is_val_plain());
EXPECT_FALSE(n.type().is_val_squo());
EXPECT_TRUE(n.type().is_val_dquo());
EXPECT_FALSE(n.type().is_val_literal());
EXPECT_FALSE(n.type().is_val_folded());
}
inline void test_key_literal(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_key_styled());
EXPECT_FALSE(n.type().is_key_plain());
EXPECT_FALSE(n.type().is_key_squo());
EXPECT_FALSE(n.type().is_key_dquo());
EXPECT_TRUE(n.type().is_key_literal());
EXPECT_FALSE(n.type().is_key_folded());
}
inline void test_val_literal(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_val_styled());
EXPECT_FALSE(n.type().is_val_plain());
EXPECT_FALSE(n.type().is_val_squo());
EXPECT_FALSE(n.type().is_val_dquo());
EXPECT_TRUE(n.type().is_val_literal());
EXPECT_FALSE(n.type().is_val_folded());
}
inline void test_key_folded(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_key_styled());
EXPECT_FALSE(n.type().is_key_plain());
EXPECT_FALSE(n.type().is_key_squo());
EXPECT_FALSE(n.type().is_key_dquo());
EXPECT_FALSE(n.type().is_key_literal());
EXPECT_TRUE(n.type().is_key_folded());
}
inline void test_val_folded(ConstNodeRef n)
{
EXPECT_TRUE(n.type().is_val_styled());
EXPECT_FALSE(n.type().is_val_plain());
EXPECT_FALSE(n.type().is_val_squo());
EXPECT_FALSE(n.type().is_val_dquo());
EXPECT_FALSE(n.type().is_val_literal());
EXPECT_TRUE(n.type().is_val_folded());
}
inline void test_key_nostyle(ConstNodeRef n)
{
EXPECT_FALSE(n.type().is_key_styled());
EXPECT_FALSE(n.type().is_key_plain());
EXPECT_FALSE(n.type().is_key_squo());
EXPECT_FALSE(n.type().is_key_dquo());
EXPECT_FALSE(n.type().is_key_literal());
EXPECT_FALSE(n.type().is_key_folded());
}
inline void test_val_nostyle(ConstNodeRef n)
{
EXPECT_FALSE(n.type().is_val_styled());
EXPECT_FALSE(n.type().is_val_plain());
EXPECT_FALSE(n.type().is_val_squo());
EXPECT_FALSE(n.type().is_val_dquo());
EXPECT_FALSE(n.type().is_val_literal());
EXPECT_FALSE(n.type().is_val_folded());
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
TEST(style, flags)
{
Tree tree = parse_in_arena("foo: bar");
EXPECT_TRUE(tree.rootref().type().default_block());
EXPECT_FALSE(tree.rootref().type().marked_flow());
EXPECT_FALSE(tree.rootref().type().marked_flow_sl());
EXPECT_FALSE(tree.rootref().type().marked_flow_ml());
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
EXPECT_FALSE(tree.rootref().type().default_block());
EXPECT_TRUE(tree.rootref().type().marked_flow());
EXPECT_TRUE(tree.rootref().type().marked_flow_sl());
EXPECT_FALSE(tree.rootref().type().marked_flow_ml());
tree._rem_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_ML);
EXPECT_FALSE(tree.rootref().type().default_block());
EXPECT_TRUE(tree.rootref().type().marked_flow());
EXPECT_FALSE(tree.rootref().type().marked_flow_sl());
EXPECT_TRUE(tree.rootref().type().marked_flow_ml());
test_container_block(tree.rootref());
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), FLOW_SL);
test_container_flow_sl(tree.rootref());
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), FLOW_ML);
test_container_flow_ml(tree.rootref());
}
@@ -54,16 +200,18 @@ this is the key: >-
void check_same_emit(Tree const& expected)
{
#if 0
#ifndef RYML_DBG
#define _showtrees(num)
#else
#define _showtrees(num) \
{ \
std::cout << "--------\nEMITTED" #num "\n--------\n"; \
std::cout << ws ## num; \
std::cout << "--------\nACTUAL" #num "\n--------\n"; \
print_tree(actual ## num); \
std::cout << "--------\nEXPECTED" #num "\n--------\n"; \
print_tree(expected)
#else
#define _showtrees(num)
print_tree(expected); \
}
#endif
std::string ws1, ws2, ws3, ws4;
@@ -104,39 +252,63 @@ void check_same_emit(Tree const& expected)
TEST(style, noflags)
{
Tree expected = parse_in_arena("{}");
NodeRef r = expected.rootref();
r["normal"] |= MAP;
r["normal"]["singleline"] = "foo";
r["normal"]["multiline"] |= MAP;
r["normal"]["multiline"]["____________"] = "foo";
r["normal"]["multiline"]["____mid_____"] = "foo\nbar";
r["normal"]["multiline"]["____mid_end1"] = "foo\nbar\n";
r["normal"]["multiline"]["____mid_end2"] = "foo\nbar\n\n";
r["normal"]["multiline"]["____mid_end3"] = "foo\nbar\n\n\n";
r["normal"]["multiline"]["____________"] = "foo";
r["normal"]["multiline"]["____________"] = "foo bar";
r["normal"]["multiline"]["________end1"] = "foo bar\n";
r["normal"]["multiline"]["________end2"] = "foo bar\n\n";
r["normal"]["multiline"]["________end3"] = "foo bar\n\n\n";
r["normal"]["multiline"]["beg_________"] = "\nfoo";
r["normal"]["multiline"]["beg_mid_____"] = "\nfoo\nbar";
r["normal"]["multiline"]["beg_mid_end1"] = "\nfoo\nbar\n";
r["normal"]["multiline"]["beg_mid_end2"] = "\nfoo\nbar\n\n";
r["normal"]["multiline"]["beg_mid_end3"] = "\nfoo\nbar\n\n\n";
r["leading_ws"] |= MAP;
r["leading_ws"]["singleline"] |= MAP;
r["leading_ws"]["singleline"]["space"] = " foo";
r["leading_ws"]["singleline"]["tab"] = "\tfoo";
r["leading_ws"]["singleline"]["space_and_tab0"] = " \tfoo";
r["leading_ws"]["singleline"]["space_and_tab1"] = "\t foo";
r["leading_ws"]["multiline"] |= MAP;
r["leading_ws"]["multiline"]["beg_________"] = "\n \tfoo";
r["leading_ws"]["multiline"]["beg_mid_____"] = "\n \tfoo\nbar";
r["leading_ws"]["multiline"]["beg_mid_end1"] = "\n \tfoo\nbar\n";
r["leading_ws"]["multiline"]["beg_mid_end2"] = "\n \tfoo\nbar\n\n";
r["leading_ws"]["multiline"]["beg_mid_end3"] = "\n \tfoo\nbar\n\n\n";
check_same_emit(expected);
auto setcont = [](NodeRef n, NodeType t){
n |= t;
test_container_nostyle(n);
return n;
};
auto setval = [](NodeRef n, csubstr key, csubstr val){
NodeRef ch = n[key];
ch = val;
test_key_nostyle(ch);
test_val_nostyle(ch);
};
Tree orig = parse_in_arena("{}");
NodeRef r = orig.rootref();
test_container_flow_sl(r);
{
NodeRef n = setcont(r["normal"], MAP);
setval(n, "singleline", "foo");
NodeRef ml = setcont(n["multiline"], MAP);
setval(ml, "____________", "foo");
setval(ml, "____mid_____", "foo\nbar");
setval(ml, "____mid_end1", "foo\nbar\n");
setval(ml, "____mid_end2", "foo\nbar\n\n");
setval(ml, "____mid_end3", "foo\nbar\n\n\n");
setval(ml, "____________", "foo");
setval(ml, "____________", "foo bar");
setval(ml, "________end1", "foo bar\n");
setval(ml, "________end2", "foo bar\n\n");
setval(ml, "________end3", "foo bar\n\n\n");
setval(ml, "beg_________", "\nfoo");
setval(ml, "beg_mid_____", "\nfoo\nbar");
setval(ml, "beg_mid_end1", "\nfoo\nbar\n");
setval(ml, "beg_mid_end2", "\nfoo\nbar\n\n");
setval(ml, "beg_mid_end3", "\nfoo\nbar\n\n\n");
}
{
NodeRef n = setcont(r["leading_ws"], MAP);
{
NodeRef sl = setcont(n["singleline"], MAP);
sl["space"] = " foo";
sl["tab"] = "\tfoo";
sl["space_and_tab0"] = " \tfoo";
sl["space_and_tab1"] = "\t foo";
}
{
NodeRef ml = setcont(n["multiline"], MAP);
ml["beg_________"] = "\n \tfoo";
ml["beg_mid_____"] = "\n \tfoo\nbar";
ml["beg_mid_end1"] = "\n \tfoo\nbar\n";
ml["beg_mid_end2"] = "\n \tfoo\nbar\n\n";
ml["beg_mid_end3"] = "\n \tfoo\nbar\n\n\n";
}
}
std::string emitted = emit2str(orig);
const Tree parsed = parse_in_place(to_substr(emitted));
_c4dbg_tree("original", orig);
_c4dbg_tree("parsed", parsed);
test_compare(parsed, orig);
}
@@ -144,58 +316,53 @@ TEST(style, noflags)
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
#ifdef WIP
TEST(style, scalar_retains_style_after_parse)
TEST(style, scalar_retains_style_after_parse__plain)
{
{
Tree t = parse_in_arena("foo");
EXPECT_TRUE(t.rootref().type().val_marked_plain());
EXPECT_FALSE(t.rootref().type().val_marked_squo());
EXPECT_FALSE(t.rootref().type().val_marked_dquo());
EXPECT_FALSE(t.rootref().type().val_marked_literal());
EXPECT_FALSE(t.rootref().type().val_marked_folded());
EXPECT_EQ(emitrs<std::string>(t), std::string("foo\n"));
}
{
Tree t = parse_in_arena("'foo'");
EXPECT_FALSE(t.rootref().type().val_marked_plain());
EXPECT_TRUE(t.rootref().type().val_marked_squo());
EXPECT_FALSE(t.rootref().type().val_marked_dquo());
EXPECT_FALSE(t.rootref().type().val_marked_literal());
EXPECT_FALSE(t.rootref().type().val_marked_folded());
EXPECT_EQ(emitrs<std::string>(t), std::string("'foo'\n"));
}
{
Tree t = parse_in_arena("'foo'");
EXPECT_FALSE(t.rootref().type().val_marked_plain());
EXPECT_FALSE(t.rootref().type().val_marked_squo());
EXPECT_TRUE(t.rootref().type().val_marked_dquo());
EXPECT_FALSE(t.rootref().type().val_marked_literal());
EXPECT_FALSE(t.rootref().type().val_marked_folded());
EXPECT_EQ(emitrs<std::string>(t), std::string("'foo'\n"));
}
{
Tree t = parse_in_arena("[foo, 'baz', \"bat\"]");
EXPECT_TRUE(t.rootref().type().marked_flow());
EXPECT_TRUE(t[0].type().val_marked_plain());
EXPECT_FALSE(t[0].type().val_marked_squo());
EXPECT_FALSE(t[0].type().val_marked_dquo());
EXPECT_FALSE(t[0].type().val_marked_literal());
EXPECT_FALSE(t[0].type().val_marked_folded());
EXPECT_FALSE(t[1].type().val_marked_plain());
EXPECT_TRUE(t[1].type().val_marked_squo());
EXPECT_FALSE(t[1].type().val_marked_dquo());
EXPECT_FALSE(t[1].type().val_marked_literal());
EXPECT_FALSE(t[1].type().val_marked_folded());
EXPECT_FALSE(t[2].type().val_marked_plain());
EXPECT_FALSE(t[2].type().val_marked_squo());
EXPECT_TRUE(t[2].type().val_marked_dquo());
EXPECT_FALSE(t[2].type().val_marked_literal());
EXPECT_FALSE(t[2].type().val_marked_folded());
EXPECT_EQ(emitrs<std::string>(t), std::string("foo"));
}
const Tree t = parse_in_arena("foo");
test_val_plain(t.rootref());
EXPECT_EQ(emitrs_yaml<std::string>(t), std::string("foo\n"));
}
TEST(style, scalar_retains_style_after_parse__squo)
{
const Tree t = parse_in_arena("'foo'");
test_val_squo(t.rootref());
EXPECT_EQ(emitrs_yaml<std::string>(t), std::string("'foo'\n"));
}
TEST(style, scalar_retains_style_after_parse__dquo)
{
const Tree t = parse_in_arena("\"foo\"");
test_val_dquo(t.rootref());
EXPECT_EQ(emitrs_yaml<std::string>(t), std::string("\"foo\"\n"));
}
TEST(style, scalar_retains_style_after_parse__literal)
{
const Tree t = parse_in_arena("|\n foo");
test_val_literal(t.rootref());
EXPECT_EQ(emitrs_yaml<std::string>(t), std::string("|\n foo\n"));
}
TEST(style, scalar_retains_style_after_parse__folded)
{
const Tree t = parse_in_arena(">\n foo");
test_val_folded(t.rootref());
EXPECT_EQ(emitrs_yaml<std::string>(t), std::string(">\n foo\n"));
}
TEST(style, scalar_retains_style_after_parse__mixed)
{
std::string yaml = "- foo\n- 'baz'\n- \"bat\"\n- |\n baq\n- >\n bax\n";
const Tree t = parse_in_arena(to_csubstr(yaml));
test_container_block(t.rootref());
test_val_plain(t[0]);
test_val_squo(t[1]);
test_val_dquo(t[2]);
test_val_literal(t[3]);
test_val_folded(t[4]);
EXPECT_EQ(emitrs_yaml<std::string>(t), yaml);
}
#endif
//-----------------------------------------------------------------------------
@@ -204,9 +371,50 @@ TEST(style, scalar_retains_style_after_parse)
TEST(scalar, base)
{
Tree tree = parse_in_arena(scalar_yaml);
const Tree tree = parse_in_arena(scalar_yaml);
test_key_plain(tree[0]);
test_val_folded(tree[0]);
EXPECT_EQ(tree[0].key(), csubstr("this is the key"));
EXPECT_EQ(tree[0].val(), csubstr("this is the multiline \"val\" with\n'empty' lines"));
EXPECT_EQ(emit2str(tree), R"(this is the key: >-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
//-----------------------------------------------------------------------------
TEST(scalar, block_literal__key)
{
Tree tree = parse_in_arena(scalar_yaml);
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), KEY_STYLE);
tree._add_flags(tree[0].id(), KEY_LITERAL);
test_key_literal(tree[0]);
test_val_folded(tree[0]);
EXPECT_EQ(emit2str(tree), R"(? |-
this is the key
: >-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
TEST(scalar, block_literal__val)
{
Tree tree = parse_in_arena(scalar_yaml);
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), VAL_STYLE);
tree._add_flags(tree[0].id(), VAL_LITERAL);
test_key_plain(tree[0]);
test_val_literal(tree[0]);
EXPECT_EQ(emit2str(tree), R"(this is the key: |-
this is the multiline "val" with
'empty' lines
@@ -214,222 +422,132 @@ TEST(scalar, base)
check_same_emit(tree);
}
TEST(scalar, block_literal)
TEST(scalar, block_literal__key_val)
{
Tree tree = parse_in_arena(scalar_yaml);
{
SCOPED_TRACE("val only");
EXPECT_FALSE(tree[0].type().key_marked_literal());
EXPECT_FALSE(tree[0].type().val_marked_literal());
tree._add_flags(tree[0].id(), _WIP_VAL_LITERAL);
EXPECT_FALSE(tree[0].type().key_marked_literal());
EXPECT_TRUE(tree[0].type().val_marked_literal());
EXPECT_EQ(emit2str(tree), R"(this is the key: |-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
{
SCOPED_TRACE("key+val");
tree._add_flags(tree[0].id(), _WIP_KEY_LITERAL);
EXPECT_TRUE(tree[0].type().key_marked_literal());
EXPECT_TRUE(tree[0].type().val_marked_literal());
EXPECT_EQ(emit2str(tree), R"(? |-
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), KEY_STYLE|VAL_STYLE);
tree._add_flags(tree[0].id(), KEY_LITERAL|VAL_LITERAL);
test_key_literal(tree[0]);
test_val_literal(tree[0]);
EXPECT_EQ(emit2str(tree), R"(? |-
this is the key
: |-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
{
SCOPED_TRACE("key only");
tree._rem_flags(tree[0].id(), _WIP_VAL_LITERAL);
EXPECT_TRUE(tree[0].type().key_marked_literal());
EXPECT_FALSE(tree[0].type().val_marked_literal());
EXPECT_EQ(emit2str(tree), R"(? |-
this is the key
: |-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
check_same_emit(tree);
}
TEST(scalar, block_folded)
//-----------------------------------------------------------------------------
TEST(scalar, block_folded__key)
{
Tree tree = parse_in_arena(scalar_yaml);
{
SCOPED_TRACE("val only");
EXPECT_FALSE(tree[0].type().key_marked_folded());
EXPECT_FALSE(tree[0].type().val_marked_folded());
tree._add_flags(tree[0].id(), _WIP_VAL_FOLDED);
EXPECT_FALSE(tree[0].type().key_marked_folded());
EXPECT_TRUE(tree[0].type().val_marked_folded());
EXPECT_EQ(emit2str(tree), R"(this is the key: >-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
{
SCOPED_TRACE("key+val");
tree._add_flags(tree[0].id(), _WIP_KEY_FOLDED);
EXPECT_TRUE(tree[0].type().key_marked_folded());
EXPECT_TRUE(tree[0].type().val_marked_folded());
EXPECT_EQ(emit2str(tree), R"(? >-
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), KEY_STYLE);
tree._add_flags(tree[0].id(), KEY_FOLDED);
test_key_folded(tree[0]);
test_val_folded(tree[0]);
EXPECT_EQ(emit2str(tree), R"(? >-
this is the key
: >-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
{
SCOPED_TRACE("val only");
tree._rem_flags(tree[0].id(), _WIP_VAL_FOLDED);
EXPECT_TRUE(tree[0].type().key_marked_folded());
EXPECT_FALSE(tree[0].type().val_marked_folded());
EXPECT_EQ(emit2str(tree), R"(? >-
check_same_emit(tree);
}
TEST(scalar, block_folded__val)
{
Tree tree = parse_in_arena(scalar_yaml);
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), VAL_STYLE);
tree._add_flags(tree[0].id(), VAL_FOLDED);
test_key_plain(tree[0]);
test_val_folded(tree[0]);
EXPECT_EQ(emit2str(tree), R"(this is the key: >-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
TEST(scalar, block_folded__key_val)
{
Tree tree = parse_in_arena(scalar_yaml);
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), KEY_STYLE|VAL_STYLE);
tree._add_flags(tree[0].id(), KEY_FOLDED|VAL_FOLDED);
test_key_folded(tree[0]);
test_val_folded(tree[0]);
EXPECT_EQ(emit2str(tree), R"(? >-
this is the key
: |-
: >-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
check_same_emit(tree);
}
TEST(scalar, squot)
//-----------------------------------------------------------------------------
TEST(scalar, squo__key)
{
Tree tree = parse_in_arena(scalar_yaml);
EXPECT_FALSE(tree[0].type().key_marked_squo());
EXPECT_FALSE(tree[0].type().val_marked_squo());
{
SCOPED_TRACE("val only");
tree._add_flags(tree[0].id(), _WIP_VAL_SQUO);
EXPECT_FALSE(tree[0].type().key_marked_squo());
EXPECT_TRUE(tree[0].type().val_marked_squo());
EXPECT_EQ(emit2str(tree), R"(this is the key: 'this is the multiline "val" with
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), KEY_STYLE);
tree._add_flags(tree[0].id(), KEY_SQUO);
test_key_squo(tree[0]);
test_val_folded(tree[0]);
EXPECT_EQ(emit2str(tree), R"('this is the key': >-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
TEST(scalar, squo__val)
{
Tree tree = parse_in_arena(scalar_yaml);
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), VAL_STYLE);
tree._add_flags(tree[0].id(), VAL_SQUO);
test_key_plain(tree[0]);
test_val_squo(tree[0]);
EXPECT_EQ(emit2str(tree), R"(this is the key: 'this is the multiline "val" with
''empty'' lines'
)");
check_same_emit(tree);
}
{
SCOPED_TRACE("key+val");
tree._add_flags(tree[0].id(), _WIP_KEY_SQUO);
EXPECT_TRUE(tree[0].type().key_marked_squo());
EXPECT_TRUE(tree[0].type().val_marked_squo());
EXPECT_EQ(emit2str(tree), R"('this is the key': 'this is the multiline "val" with
check_same_emit(tree);
}
TEST(scalar, squo__key_val)
{
Tree tree = parse_in_arena(scalar_yaml);
test_key_plain(tree[0]);
test_val_folded(tree[0]);
tree._rem_flags(tree[0].id(), KEY_STYLE|VAL_STYLE);
tree._add_flags(tree[0].id(), KEY_SQUO|VAL_SQUO);
test_key_squo(tree[0]);
test_val_squo(tree[0]);
EXPECT_EQ(emit2str(tree), R"('this is the key': 'this is the multiline "val" with
''empty'' lines'
)");
check_same_emit(tree);
}
{
SCOPED_TRACE("key only");
tree._rem_flags(tree[0].id(), _WIP_VAL_SQUO);
EXPECT_TRUE(tree[0].type().key_marked_squo());
EXPECT_FALSE(tree[0].type().val_marked_squo());
EXPECT_EQ(emit2str(tree), R"('this is the key': |-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
}
TEST(scalar, dquot)
{
Tree tree = parse_in_arena(scalar_yaml);
EXPECT_FALSE(tree[0].type().key_marked_dquo());
EXPECT_FALSE(tree[0].type().val_marked_dquo());
{
SCOPED_TRACE("val only");
tree._add_flags(tree[0].id(), _WIP_VAL_DQUO);
EXPECT_FALSE(tree[0].type().key_marked_dquo());
EXPECT_TRUE(tree[0].type().val_marked_dquo());
// visual studio fails to compile this string when used inside
// the EXPECT_EQ() macro below. So we declare it separately
// instead:
csubstr yaml = R"(this is the key: "this is the multiline \"val\" with
'empty' lines"
)";
EXPECT_EQ(emit2str(tree), yaml);
check_same_emit(tree);
}
{
SCOPED_TRACE("key+val");
tree._add_flags(tree[0].id(), _WIP_KEY_DQUO);
EXPECT_TRUE(tree[0].type().key_marked_dquo());
EXPECT_TRUE(tree[0].type().val_marked_dquo());
// visual studio fails to compile this string when used inside
// the EXPECT_EQ() macro below. So we declare it separately
// instead:
csubstr yaml = R"("this is the key": "this is the multiline \"val\" with
'empty' lines"
)";
EXPECT_EQ(emit2str(tree), yaml);
check_same_emit(tree);
}
{
SCOPED_TRACE("key only");
tree._rem_flags(tree[0].id(), _WIP_VAL_DQUO);
EXPECT_TRUE(tree[0].type().key_marked_dquo());
EXPECT_FALSE(tree[0].type().val_marked_dquo());
EXPECT_EQ(emit2str(tree), R"("this is the key": |-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
}
TEST(scalar, plain)
{
Tree tree = parse_in_arena(scalar_yaml);
EXPECT_FALSE(tree[0].type().key_marked_plain());
EXPECT_FALSE(tree[0].type().val_marked_plain());
{
SCOPED_TRACE("val only");
tree._add_flags(tree[0].id(), _WIP_VAL_PLAIN);
EXPECT_FALSE(tree[0].type().key_marked_plain());
EXPECT_TRUE(tree[0].type().val_marked_plain());
EXPECT_EQ(emit2str(tree), R"(this is the key: this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
{
SCOPED_TRACE("key+val");
tree._add_flags(tree[0].id(), _WIP_KEY_PLAIN);
EXPECT_TRUE(tree[0].type().key_marked_plain());
EXPECT_TRUE(tree[0].type().val_marked_plain());
EXPECT_EQ(emit2str(tree), R"(this is the key: this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
{
SCOPED_TRACE("key only");
tree._rem_flags(tree[0].id(), _WIP_VAL_PLAIN);
EXPECT_TRUE(tree[0].type().key_marked_plain());
EXPECT_FALSE(tree[0].type().val_marked_plain());
EXPECT_EQ(emit2str(tree), R"(this is the key: |-
this is the multiline "val" with
'empty' lines
)");
check_same_emit(tree);
}
check_same_emit(tree);
}
@@ -452,7 +570,7 @@ bar
EXPECT_TRUE(tree.docref(0).is_doc());
EXPECT_TRUE(tree.docref(0).is_val());
EXPECT_EQ(emit2str(tree), "--- scalar %YAML 1.2\n--- foo\n--- bar\n");
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
tree._add_flags(tree.root_id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), "--- scalar %YAML 1.2\n--- foo\n--- bar\n");
}
@@ -464,6 +582,8 @@ bar
TEST(seq, block)
{
Tree tree = parse_in_arena("[1, 2, 3, 4, 5, 6]");
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), BLOCK);
EXPECT_EQ(emit2str(tree), R"(- 1
- 2
- 3
@@ -476,7 +596,8 @@ TEST(seq, block)
TEST(seq, flow_sl)
{
Tree tree = parse_in_arena("[1, 2, 3, 4, 5, 6]");
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), R"([1,2,3,4,5,6])");
}
@@ -488,7 +609,12 @@ TEST(seq, flow_sl)
TEST(keyseq, block)
{
Tree tree = parse_in_arena("{foo: [1, 2, 3, 4, 5, 6]}");
EXPECT_TRUE(tree.rootref().type().default_block());
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), BLOCK);
EXPECT_EQ(emit2str(tree), R"(foo: [1,2,3,4,5,6]
)");
tree._rem_flags(tree["foo"].id(), CONTAINER_STYLE);
tree._add_flags(tree["foo"].id(), BLOCK);
EXPECT_EQ(emit2str(tree), R"(foo:
- 1
- 2
@@ -498,31 +624,38 @@ TEST(keyseq, block)
- 6
)");
tree = parse_in_arena("{foo: [1, [2, 3], 4, [5, 6]]}");
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), BLOCK);
EXPECT_EQ(emit2str(tree), R"(foo: [1,[2,3],4,[5,6]]
)");
tree._rem_flags(tree["foo"].id(), CONTAINER_STYLE);
tree._add_flags(tree["foo"].id(), BLOCK);
EXPECT_EQ(emit2str(tree), R"(foo:
- 1
- - 2
- 3
- [2,3]
- 4
- - 5
- 6
- [5,6]
)");
}
TEST(keyseq, flow_sl)
{
Tree tree = parse_in_arena("{foo: [1, 2, 3, 4, 5, 6]}");
EXPECT_TRUE(tree.rootref().type().default_block());
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
EXPECT_FALSE(tree.rootref().type().default_block());
Tree tree = parse_in_arena("foo: [1, 2, 3, 4, 5, 6]");
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), R"({foo: [1,2,3,4,5,6]})");
//
tree = parse_in_arena("{foo: [1, [2, 3], 4, [5, 6]]}");
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
tree = parse_in_arena("foo: [1, [2, 3], 4, [5, 6]]");
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), R"({foo: [1,[2,3],4,[5,6]]})");
//
tree._rem_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
tree._add_flags(tree["foo"][1].id(), _WIP_STYLE_FLOW_SL);
tree._add_flags(tree["foo"][3].id(), _WIP_STYLE_FLOW_SL);
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), BLOCK);
tree._rem_flags(tree["foo"].id(), CONTAINER_STYLE);
tree._add_flags(tree["foo"].id(), BLOCK);
tree._add_flags(tree["foo"][1].id(), FLOW_SL);
tree._add_flags(tree["foo"][3].id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), R"(foo:
- 1
- [2,3]
@@ -539,6 +672,8 @@ TEST(keyseq, flow_sl)
TEST(map, block)
{
Tree tree = parse_in_arena("{1: 10, 2: 10, 3: 10, 4: 10, 5: 10, 6: 10}");
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), BLOCK);
EXPECT_EQ(emit2str(tree), R"(1: 10
2: 10
3: 10
@@ -550,8 +685,15 @@ TEST(map, block)
TEST(map, flow_sl)
{
Tree tree = parse_in_arena("{1: 10, 2: 10, 3: 10, 4: 10, 5: 10, 6: 10}");
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
Tree tree = parse_in_arena(R"(1: 10
2: 10
3: 10
4: 10
5: 10
6: 10
)");
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), R"({1: 10,2: 10,3: 10,4: 10,5: 10,6: 10})");
}
@@ -563,6 +705,10 @@ TEST(map, flow_sl)
TEST(keymap, block)
{
Tree tree = parse_in_arena("{foo: {1: 10, 2: 10, 3: 10, 4: 10, 5: 10, 6: 10}}");
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), BLOCK);
tree._rem_flags(tree["foo"].id(), CONTAINER_STYLE);
tree._add_flags(tree["foo"].id(), BLOCK);
EXPECT_EQ(emit2str(tree), R"(foo:
1: 10
2: 10
@@ -576,12 +722,19 @@ TEST(keymap, block)
TEST(keymap, flow_sl)
{
Tree tree = parse_in_arena("{foo: {1: 10, 2: 10, 3: 10, 4: 10, 5: 10, 6: 10}}");
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
Tree tree = parse_in_arena(R"(foo:
1: 10
2: 10
3: 10
4: 10
5: 10
6: 10
)");
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), R"({foo: {1: 10,2: 10,3: 10,4: 10,5: 10,6: 10}})");
//
tree = parse_in_arena("{foo: {1: 10, 2: {2: 10, 3: 10}, 4: 10, 5: {5: 10, 6: 10}}}");
EXPECT_EQ(emit2str(tree), R"(foo:
tree = parse_in_arena(R"(foo:
1: 10
2:
2: 10
@@ -591,11 +744,13 @@ TEST(keymap, flow_sl)
5: 10
6: 10
)");
tree._add_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), R"({foo: {1: 10,2: {2: 10,3: 10},4: 10,5: {5: 10,6: 10}}})");
tree._rem_flags(tree.root_id(), _WIP_STYLE_FLOW_SL);
tree._add_flags(tree["foo"][1].id(), _WIP_STYLE_FLOW_SL);
tree._add_flags(tree["foo"][3].id(), _WIP_STYLE_FLOW_SL);
tree._rem_flags(tree.root_id(), CONTAINER_STYLE);
tree._add_flags(tree.root_id(), BLOCK);
tree._add_flags(tree["foo"][1].id(), FLOW_SL);
tree._add_flags(tree["foo"][3].id(), FLOW_SL);
EXPECT_EQ(emit2str(tree), R"(foo:
1: 10
2: {2: 10,3: 10}

File diff suppressed because it is too large Load Diff

View File

@@ -13,7 +13,7 @@
#include <c4/fs/fs.hpp>
#include <c4/log/log.hpp>
#include "test_case.hpp"
#include "test_lib/test_case.hpp"
#include <gtest/gtest.h>
#define RYML_NFO (0 || RYML_DBG)

View File

@@ -0,0 +1,81 @@
#ifndef RYML_SINGLE_HEADER
#include <c4/yml/node.hpp>
#include <c4/yml/std/string.hpp>
#include <c4/yml/parse_engine.def.hpp>
#endif
#include "./test_suite_event_handler.hpp"
namespace c4 {
namespace yml {
// instantiate the template
template class ParseEngine<EventHandlerYamlStd>;
inline void EventHandlerYamlStd::EventSink::append_escaped(csubstr val)
{
#define _c4flush_use_instead(repl, skip) \
do { \
this->append(val.range(prev, i)); \
this->append(repl); \
prev = i + skip; \
} \
while(0)
uint8_t const* C4_RESTRICT s = reinterpret_cast<uint8_t const*>(val.str);
size_t prev = 0;
for(size_t i = 0; i < val.len; ++i)
{
switch(s[i])
{
case UINT8_C(0x0a): // \n
_c4flush_use_instead("\\n", 1); break;
case UINT8_C(0x5c): // '\\'
_c4flush_use_instead("\\\\", 1); break;
case UINT8_C(0x09): // \t
_c4flush_use_instead("\\t", 1); break;
case UINT8_C(0x0d): // \r
_c4flush_use_instead("\\r", 1); break;
case UINT8_C(0x00): // \0
_c4flush_use_instead("\\0", 1); break;
case UINT8_C(0x0c): // \f (form feed)
_c4flush_use_instead("\\f", 1); break;
case UINT8_C(0x08): // \b (backspace)
_c4flush_use_instead("\\b", 1); break;
case UINT8_C(0x07): // \a (bell)
_c4flush_use_instead("\\a", 1); break;
case UINT8_C(0x0b): // \v (vertical tab)
_c4flush_use_instead("\\v", 1); break;
case UINT8_C(0x1b): // \e (escape)
_c4flush_use_instead("\\e", 1); break;
case UINT8_C(0xc2):
if(i+1 < val.len)
{
const uint8_t np1 = s[i+1];
if(np1 == UINT8_C(0xa0))
_c4flush_use_instead("\\_", 2);
else if(np1 == UINT8_C(0x85))
_c4flush_use_instead("\\N", 2);
}
break;
case UINT8_C(0xe2):
if(i+2 < val.len)
{
if(s[i+1] == UINT8_C(0x80))
{
if(s[i+2] == UINT8_C(0xa8))
_c4flush_use_instead("\\L", 3);
else if(s[i+2] == UINT8_C(0xa9))
_c4flush_use_instead("\\P", 3);
}
}
break;
}
}
// flush the rest
this->append(val.sub(prev));
#undef _c4flush_use_instead
}
} // namespace yml
} // namespace c4

View File

@@ -0,0 +1,782 @@
#ifndef _C4_YML_EVENT_HANDLER_YAMLSTD_HPP_
#define _C4_YML_EVENT_HANDLER_YAMLSTD_HPP_
#ifdef RYML_SINGLE_HEADER
#include <ryml_all.hpp>
#else
#ifndef _C4_YML_EVENT_HANDLER_STACK_HPP_
#include "c4/yml/event_handler_stack.hpp"
#endif
#ifndef _C4_YML_STD_STRING_HPP_
#include "c4/yml/std/string.hpp"
#endif
#ifndef _C4_YML_DETAIL_PRINT_HPP_
#include "c4/yml/detail/print.hpp"
#endif
#endif
#include <vector>
C4_SUPPRESS_WARNING_GCC_CLANG_PUSH
C4_SUPPRESS_WARNING_GCC_CLANG("-Wold-style-cast")
C4_SUPPRESS_WARNING_GCC("-Wuseless-cast")
namespace c4 {
namespace yml {
/** @addtogroup doc_event_handlers
* @{ */
struct EventHandlerYamlStdState : public ParserState
{
NodeData ev_data;
};
/** The event handler producing standard YAML events as used in the
* [YAML test suite](https://github.com/yaml/yaml-test-suite).
* See the documentation for @ref doc_event_handlers, which has
* important notes about the event model used by rapidyaml.
*
* This classe is used only in the CI of this project, and in the
* application used as part of the [standard YAML
* playground](https://play.yaml.io/main/parser). This is not part of
* the library and is not installed. *
*/
struct EventHandlerYamlStd : public EventHandlerStack<EventHandlerYamlStd, EventHandlerYamlStdState>
{
/** @name types
* @{ */
// our internal state must inherit from parser state
using state = EventHandlerYamlStdState;
struct EventSink
{
std::string result;
void reset() noexcept { result.clear(); }
void append(csubstr s) noexcept { result.append(s.str, s.len); }
void append(char c) noexcept { result += c; }
void insert(csubstr s, size_t pos) noexcept { result.insert(pos, s.str, s.len); }
void insert(char c, size_t pos) noexcept { result.insert(pos, 1, c); }
csubstr get() const { return csubstr(&result[0], result.size()); }
substr get() { return substr(&result[0], result.size()); }
size_t find_last(csubstr s) const { return result.rfind(s.str, std::string::npos, s.len); }
void append_escaped(csubstr val);
};
/** @} */
public:
/** @cond dev */
static constexpr const bool is_events = true; // remove
static constexpr const bool is_wtree = false; // remove
EventSink *C4_RESTRICT m_ev_sink;
std::vector<EventSink> m_ev_val_buffers; // FIXME: don't use std::vector
// TODO: use this for both tree and events (ie remove the tree directives)
char m_ev_key_tag_buf[256];
char m_ev_val_tag_buf[256];
TagDirective m_ev_tag_directives[RYML_MAX_TAG_DIRECTIVES];
std::string m_ev_arena;
// undefined at the end
#define _enable_(bits) _enable__<bits>()
#define _disable_(bits) _disable__<bits>()
#define _has_any_(bits) _has_any__<bits>()
/** @endcond */
public:
/** @name construction and resetting
* @{ */
EventHandlerYamlStd() : EventHandlerStack(), m_ev_sink(), m_ev_val_buffers() {}
EventHandlerYamlStd(Callbacks const& cb) : EventHandlerStack(cb), m_ev_sink(), m_ev_val_buffers() {}
EventHandlerYamlStd(EventSink *sink, Callbacks const& cb) : EventHandlerStack(cb), m_ev_sink(sink), m_ev_val_buffers()
{
reset();
}
EventHandlerYamlStd(EventSink *sink) : EventHandlerYamlStd(sink, get_callbacks()) {}
void reset()
{
_stack_reset_root();
m_curr->flags |= RUNK|RTOP;
for(auto &td : m_ev_tag_directives)
td = {};
m_ev_val_buffers.resize((size_t)m_stack.size());
m_ev_arena.clear();
}
/** @} */
public:
/** @name parse events
* @{ */
void start_parse(const char* filename)
{
m_curr->start_parse(filename, m_curr->node_id);
}
void finish_parse()
{
}
void cancel_parse()
{
while(m_stack.size() > 1)
_pop();
_ev_buf_flush_();
}
/** @} */
public:
/** @name YAML stream events */
/** @{ */
void begin_stream()
{
_ev_send_("+STR\n");
}
void end_stream()
{
_ev_send_("-STR\n");
_ev_buf_flush_();
}
/** @} */
public:
/** @name YAML document events */
/** @{ */
/** implicit doc start (without ---) */
void begin_doc()
{
_c4dbgp("begin_doc");
if(_stack_should_push_on_begin_doc())
{
_c4dbgp("push!");
_push();
_enable_(DOC);
}
_ev_send_("+DOC\n");
}
/** implicit doc end (without ...) */
void end_doc()
{
_c4dbgp("end_doc");
_ev_send_("-DOC\n");
if(_stack_should_pop_on_end_doc())
{
_c4dbgp("pop!");
_pop();
}
}
/** explicit doc start, with --- */
void begin_doc_expl()
{
_c4dbgp("begin_doc_expl");
if(_stack_should_push_on_begin_doc())
{
_c4dbgp("push!");
_push();
}
_ev_send_("+DOC ---\n");
_enable_(DOC);
}
/** explicit doc end, with ... */
void end_doc_expl()
{
_c4dbgp("end_doc_expl");
_ev_send_("-DOC ...\n");
if(_stack_should_pop_on_end_doc())
{
_c4dbgp("pop!");
_pop();
}
}
/** @} */
public:
/** @name YAML map functions */
/** @{ */
void begin_map_key_flow()
{
_ev_send_("+MAP {}");
_ev_send_key_props_();
_ev_send_('\n');
_ev_mark_parent_with_children_();
_enable_(MAP|FLOW_SL);
_push();
}
void begin_map_key_block()
{
_ev_send_("+MAP");
_ev_send_key_props_();
_ev_send_('\n');
_ev_mark_parent_with_children_();
_enable_(MAP|BLOCK);
_push();
}
void begin_map_val_flow()
{
_ev_send_("+MAP {}");
_ev_send_val_props_();
_ev_send_('\n');
_ev_mark_parent_with_children_();
_enable_(MAP|FLOW_SL);
_push();
}
void begin_map_val_block()
{
_ev_send_("+MAP");
_ev_send_val_props_();
_ev_send_('\n');
_ev_mark_parent_with_children_();
_enable_(MAP|BLOCK);
_push();
}
void end_map()
{
_pop();
_ev_send_("-MAP\n");
}
/** @} */
public:
/** @name YAML seq events */
/** @{ */
void begin_seq_key_flow()
{
_ev_send_("+SEQ []");
_ev_send_key_props_();
_ev_send_('\n');
_ev_mark_parent_with_children_();
_enable_(SEQ|FLOW_SL);
_push();
}
void begin_seq_key_block()
{
_ev_send_("+SEQ");
_ev_send_key_props_();
_ev_send_('\n');
_ev_mark_parent_with_children_();
_enable_(SEQ|BLOCK);
_push();
}
void begin_seq_val_flow()
{
_ev_send_("+SEQ []");
_ev_send_val_props_();
_ev_send_('\n');
_ev_mark_parent_with_children_();
_enable_(SEQ|FLOW_SL);
_push();
}
void begin_seq_val_block()
{
_ev_send_("+SEQ");
_ev_send_val_props_();
_ev_send_('\n');
_ev_mark_parent_with_children_();
_enable_(SEQ|BLOCK);
_push();
}
void end_seq()
{
_pop();
_ev_send_("-SEQ\n"); // before popping
}
/** @} */
public:
/** @name YAML structure events */
/** @{ */
void add_sibling()
{
_RYML_CB_ASSERT(m_stack.m_callbacks, m_parent);
_ev_buf_flush_to_(m_curr->level, m_parent->level);
m_curr->ev_data = {};
}
/** set the previous val as the first key of a new map, with flow style.
*
* See the documentation for @ref doc_event_handlers, which has
* important notes about this event.
*/
void actually_val_is_first_key_of_new_map_flow()
{
// ensure we have a temporary buffer to save the current val
const id_type tmp = m_curr->level + id_type(2);
_ev_buf_ensure_(tmp + id_type(2));
// save the current val to the temporary buffer
_ev_buf_flush_to_(m_curr->level, tmp);
// create the map.
// this will push a new level, and tmp is one further
begin_map_val_flow();
_RYML_CB_ASSERT(m_stack.m_callbacks, tmp != m_curr->level);
// now move the saved val as the first key
_ev_buf_flush_to_(tmp, m_curr->level);
}
/** like its flow counterpart, but this function can only be
* called after the end of a flow-val at root or doc level.
*
* See the documentation for @ref doc_event_handlers, which has
* important notes about this event.
*/
void actually_val_is_first_key_of_new_map_block()
{
EventSink &sink = _ev_buf_();
substr full = sink.get();(void)full;
// interpolate +MAP\n after the last +DOC\n
_RYML_CB_ASSERT(m_stack.m_callbacks, full.len);
_RYML_CB_ASSERT(m_stack.m_callbacks, !full.count('\r'));
size_t docpos = sink.find_last("+DOC\n");
if(docpos != npos)
{
_RYML_CB_ASSERT(m_stack.m_callbacks, (m_stack.size() == 1u) ? (docpos >= 5u) : (docpos == 0u));
_RYML_CB_ASSERT(m_stack.m_callbacks, docpos + 5u < full.len);
sink.insert("+MAP\n", docpos + 5u);
}
else
{
// ... or interpolate +MAP\n after the last +DOC ---\n
docpos = sink.find_last("+DOC ---\n");
_RYML_CB_ASSERT(m_stack.m_callbacks, docpos != npos);
_RYML_CB_ASSERT(m_stack.m_callbacks, (m_stack.size() == 1u) ? (docpos >= 5u) : (docpos == 0u));
_RYML_CB_ASSERT(m_stack.m_callbacks, docpos + 9u < full.len);
sink.insert("+MAP\n", docpos + 9u);
}
_push();
}
/** @} */
public:
/** @name YAML scalar events */
/** @{ */
C4_ALWAYS_INLINE void set_key_scalar_plain(csubstr scalar)
{
_c4dbgpf("node[{}]: set key scalar plain: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_key_scalar_(scalar, ':');
_enable_(KEY|KEY_PLAIN);
}
C4_ALWAYS_INLINE void set_val_scalar_plain(csubstr scalar)
{
_c4dbgpf("node[{}]: set val scalar plain: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_val_scalar_(scalar, ':');
_enable_(VAL|VAL_PLAIN);
}
C4_ALWAYS_INLINE void set_key_scalar_dquoted(csubstr scalar)
{
_c4dbgpf("node[{}]: set key scalar dquot: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_key_scalar_(scalar, '"');
_enable_(KEY|KEY_DQUO);
}
C4_ALWAYS_INLINE void set_val_scalar_dquoted(csubstr scalar)
{
_c4dbgpf("node[{}]: set val scalar dquot: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_val_scalar_(scalar, '"');
_enable_(VAL|VAL_DQUO);
}
C4_ALWAYS_INLINE void set_key_scalar_squoted(csubstr scalar)
{
_c4dbgpf("node[{}]: set key scalar squot: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_key_scalar_(scalar, '\'');
_enable_(KEY|KEY_SQUO);
}
C4_ALWAYS_INLINE void set_val_scalar_squoted(csubstr scalar)
{
_c4dbgpf("node[{}]: set val scalar squot: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_val_scalar_(scalar, '\'');
_enable_(VAL|VAL_SQUO);
}
C4_ALWAYS_INLINE void set_key_scalar_literal(csubstr scalar)
{
_c4dbgpf("node[{}]: set key scalar literal: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_key_scalar_(scalar, '|');
_enable_(KEY|KEY_LITERAL);
}
C4_ALWAYS_INLINE void set_val_scalar_literal(csubstr scalar)
{
_c4dbgpf("node[{}]: set val scalar literal: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_val_scalar_(scalar, '|');
_enable_(VAL|VAL_LITERAL);
}
C4_ALWAYS_INLINE void set_key_scalar_folded(csubstr scalar)
{
_c4dbgpf("node[{}]: set key scalar folded: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_key_scalar_(scalar, '>');
_enable_(KEY|KEY_FOLDED);
}
C4_ALWAYS_INLINE void set_val_scalar_folded(csubstr scalar)
{
_c4dbgpf("node[{}]: set val scalar folded: [{}]~~~{}~~~ ({})", m_curr->node_id, scalar.len, scalar, reinterpret_cast<void const*>(scalar.str));
_ev_send_val_scalar_(scalar, '>');
_enable_(VAL|VAL_FOLDED);
}
C4_ALWAYS_INLINE void mark_key_scalar_unfiltered()
{
C4_NOT_IMPLEMENTED();
}
C4_ALWAYS_INLINE void mark_val_scalar_unfiltered()
{
C4_NOT_IMPLEMENTED();
}
/** @} */
public:
/** @name YAML anchor/reference events */
/** @{ */
void set_key_anchor(csubstr anchor)
{
_c4dbgpf("node[{}]: set key anchor: [{}]~~~{}~~~", m_curr->node_id, anchor.len, anchor);
RYML_ASSERT(!anchor.begins_with('&'));
_enable_(KEYANCH);
m_curr->ev_data.m_key.anchor = anchor;
}
void set_val_anchor(csubstr anchor)
{
_c4dbgpf("node[{}]: set val anchor: [{}]~~~{}~~~", m_curr->node_id, anchor.len, anchor);
RYML_ASSERT(!anchor.begins_with('&'));
_enable_(VALANCH);
m_curr->ev_data.m_val.anchor = anchor;
}
C4_ALWAYS_INLINE bool has_key_anchor() const
{
return _has_any_(KEYANCH);
}
C4_ALWAYS_INLINE bool has_val_anchor() const
{
return _has_any_(VALANCH);
}
void set_key_ref(csubstr ref)
{
_c4dbgpf("node[{}]: set key ref: [{}]~~~{}~~~", m_curr->node_id, ref.len, ref);
RYML_ASSERT(ref.begins_with('*'));
_enable_(KEY|KEYREF);
_ev_send_("=ALI ");
_ev_send_(ref);
_ev_send_('\n');
}
void set_val_ref(csubstr ref)
{
_c4dbgpf("node[{}]: set val ref: [{}]~~~{}~~~", m_curr->node_id, ref.len, ref);
RYML_ASSERT(ref.begins_with('*'));
_enable_(VAL|VALREF);
_ev_send_("=ALI ");
_ev_send_(ref);
_ev_send_('\n');
}
/** @} */
public:
/** @name YAML tag events */
/** @{ */
void set_key_tag(csubstr tag)
{
_c4dbgpf("node[{}]: set key tag: [{}]~~~{}~~~", m_curr->node_id, tag.len, tag);
_enable_(KEYTAG);
m_curr->ev_data.m_key.tag = _ev_transform_directive(tag, m_ev_key_tag_buf);
}
void set_val_tag(csubstr tag)
{
_c4dbgpf("node[{}]: set val tag: [{}]~~~{}~~~", m_curr->node_id, tag.len, tag);
_enable_(VALTAG);
m_curr->ev_data.m_val.tag = _ev_transform_directive(tag, m_ev_val_tag_buf);
}
/** @} */
public:
/** @name YAML directive events */
/** @{ */
void add_directive(csubstr directive)
{
_RYML_CB_ASSERT(m_stack.m_callbacks, directive.begins_with('%'));
if(directive.begins_with("%TAG"))
{
const id_type pos = _ev_num_tag_directives();
_RYML_CB_CHECK(m_stack.m_callbacks, pos < RYML_MAX_TAG_DIRECTIVES);
_RYML_CB_CHECK(m_stack.m_callbacks, m_ev_tag_directives[pos].create_from_str(directive));
}
}
/** @} */
public:
/** @name YAML arena events */
/** @{ */
substr alloc_arena(size_t len)
{
const size_t curr = m_ev_arena.size();
m_ev_arena.resize(curr + len);
return to_substr(m_ev_arena).sub(curr);
}
/** @} */
public:
/** @cond dev */
/** push a new parent, add a child to the new parent, and set the
* child as the current node */
void _push()
{
_stack_push();
_ev_buf_ensure_(m_stack.size() + id_type(1));
_ev_buf_().reset();
m_curr->ev_data = {};
_c4dbgpf("pushed! level={}", m_curr->level);
}
/** end the current scope */
void _pop()
{
_ev_buf_flush_to_(m_curr->level, m_parent->level);
_stack_pop();
}
template<type_bits bits> C4_ALWAYS_INLINE void _enable__() noexcept
{
m_curr->ev_data.m_type.type = static_cast<NodeType_e>(m_curr->ev_data.m_type.type | bits);
}
template<type_bits bits> C4_ALWAYS_INLINE void _disable__() noexcept
{
m_curr->ev_data.m_type.type = static_cast<NodeType_e>(m_curr->ev_data.m_type.type & (~bits));
}
template<type_bits bits> C4_ALWAYS_INLINE bool _has_any__() const noexcept
{
return (m_curr->ev_data.m_type.type & bits) != 0;
}
void _ev_mark_parent_with_children_()
{
if(m_parent)
m_parent->has_children = true;
}
EventSink& _ev_buf_() noexcept
{
_RYML_CB_ASSERT(m_stack.m_callbacks, (size_t)m_curr->level < m_ev_val_buffers.size());
return m_ev_val_buffers[(size_t)m_curr->level];
}
EventSink& _ev_buf_(id_type level) noexcept
{
_RYML_CB_ASSERT(m_stack.m_callbacks, (size_t)level < m_ev_val_buffers.size());
return m_ev_val_buffers[(size_t)level];
}
EventSink const& _ev_buf_(id_type level) const noexcept
{
_RYML_CB_ASSERT(m_stack.m_callbacks, (size_t)level < m_ev_val_buffers.size());
return m_ev_val_buffers[(size_t)level];
}
static void _ev_buf_flush_to_(EventSink &C4_RESTRICT src, EventSink &C4_RESTRICT dst) noexcept
{
dst.append(src.get());
src.reset();
}
void _ev_buf_flush_to_(id_type level_src, id_type level_dst) noexcept
{
auto &src = _ev_buf_(level_src);
auto &dst = _ev_buf_(level_dst);
_ev_buf_flush_to_(src, dst);
}
void _ev_buf_flush_() noexcept
{
_ev_buf_flush_to_(_ev_buf_(), *m_ev_sink);
}
void _ev_buf_ensure_(id_type size_needed) noexcept
{
if((size_t)size_needed > m_ev_val_buffers.size())
m_ev_val_buffers.resize((size_t)size_needed);
}
C4_ALWAYS_INLINE void _ev_send_(csubstr s) noexcept { _ev_buf_().append(s); }
C4_ALWAYS_INLINE void _ev_send_(char c) noexcept { _ev_buf_().append(c); }
void _ev_send_key_scalar_(csubstr scalar, char scalar_type_code)
{
_ev_send_("=VAL");
_ev_send_key_props_();
_ev_send_(' ');
_ev_send_(scalar_type_code);
_ev_buf_().append_escaped(scalar);
_ev_send_('\n');
}
void _ev_send_val_scalar_(csubstr scalar, char scalar_type_code)
{
_ev_send_("=VAL");
_ev_send_val_props_();
_ev_send_(' ');
_ev_send_(scalar_type_code);
_ev_buf_().append_escaped(scalar);
_ev_send_('\n');
}
void _ev_send_key_props_()
{
if(_has_any_(KEYANCH|KEYREF))
{
_ev_send_(" &");
_ev_send_(m_curr->ev_data.m_key.anchor);
}
if(_has_any_(KEYTAG))
{
_ev_send_tag_(m_curr->ev_data.m_key.tag);
}
m_curr->ev_data.m_key = {};
_disable_(KEYANCH|KEYREF|KEYTAG);
}
void _ev_send_val_props_()
{
if(_has_any_(VALANCH|VALREF))
{
_ev_send_(" &");
_ev_send_(m_curr->ev_data.m_val.anchor);
}
if(m_curr->ev_data.m_type.type & VALTAG)
{
_ev_send_tag_(m_curr->ev_data.m_val.tag);
}
m_curr->ev_data.m_val = {};
_disable_(VALANCH|VALREF|VALTAG);
}
void _ev_send_tag_(csubstr tag)
{
_RYML_CB_ASSERT(m_stack.m_callbacks, !tag.empty());
if(tag.str[0] == '<')
{
_ev_send_(' ');
_ev_send_(tag);
}
else
{
_ev_send_(" <");
_ev_send_(tag);
_ev_send_('>');
}
}
void _ev_clear_tag_directives_()
{
for(TagDirective &td : m_ev_tag_directives)
td = {};
}
id_type _ev_num_tag_directives() const
{
// this assumes we have a very small number of tag directives
for(id_type i = 0; i < RYML_MAX_TAG_DIRECTIVES; ++i)
if(m_ev_tag_directives[i].handle.empty())
return i;
return RYML_MAX_TAG_DIRECTIVES;
}
csubstr _ev_transform_directive(csubstr tag, substr output)
{
// lookup from the end. We want to find the first directive that
// matches the tag and has a target node id leq than the given
// node_id.
for(id_type i = RYML_MAX_TAG_DIRECTIVES-1; i != NONE; --i)
{
TagDirective const& td = m_ev_tag_directives[i];
if(td.handle.empty())
continue;
if(tag.begins_with(td.handle))
{
size_t len = td.transform(tag, output, m_stack.m_callbacks);
if(len == 0)
{
if(tag.begins_with("!<"))
return tag.sub(1);
return tag;
}
_RYML_CB_CHECK(m_stack.m_callbacks, len <= output.len);
return output.first(len);
}
}
if(tag.begins_with('!'))
{
if(is_custom_tag(tag))
{
_RYML_CB_ERR_(m_stack.m_callbacks, "tag not found", m_curr->pos);
}
}
csubstr result = normalize_tag_long(tag, output);
_RYML_CB_CHECK(m_stack.m_callbacks, result.len > 0);
_RYML_CB_CHECK(m_stack.m_callbacks, result.str);
return result;
}
#undef _enable_
#undef _disable_
#undef _has_any_
/** @endcond */
};
/** @} */
} // namespace yml
} // namespace c4
C4_SUPPRESS_WARNING_GCC_POP
#endif /* _C4_YML_EVENT_HANDLER_YAMLSTD_HPP_ */

View File

@@ -1,4 +1,5 @@
#include "test_suite_events.hpp"
#include "test_suite_event_handler.hpp"
#include "test_suite_common.hpp"
#ifndef RYML_SINGLE_HEADER
#include <c4/yml/detail/stack.hpp>
@@ -7,65 +8,21 @@
namespace c4 {
namespace yml {
std::string emit_events_from_source(substr src)
{
EventHandlerYamlStd::EventSink sink;
EventHandlerYamlStd handler(&sink);
ParseEngine<EventHandlerYamlStd> parser(&handler);
parser.parse_in_place_ev("(testyaml)", src);
return sink.result;
}
namespace /*anon*/ {
struct ScalarType
csubstr filtered_scalar(csubstr str, Tree *tree)
{
typedef enum {
PLAIN = 0,
SQUOTED,
DQUOTED,
LITERAL,
FOLDED
} ScalarType_e;
ScalarType_e val = PLAIN;
bool operator== (ScalarType_e v) const { return val == v; }
bool operator!= (ScalarType_e v) const { return val != v; }
ScalarType& operator= (ScalarType_e v) { val = v; return *this; }
csubstr to_str() const
{
switch(val)
{
case ScalarType::PLAIN: return csubstr("PLAIN");
case ScalarType::SQUOTED: return csubstr("SQUOTED");
case ScalarType::DQUOTED: return csubstr("DQUOTED");
case ScalarType::LITERAL: return csubstr("LITERAL");
case ScalarType::FOLDED: return csubstr("FOLDED");
}
C4_ERROR("");
return csubstr("");
}
bool is_quoted() const { return val == ScalarType::SQUOTED || val == ScalarType::DQUOTED; }
};
struct OptionalScalar
{
csubstr val = {};
bool was_set = false;
inline operator csubstr() const { return get(); }
inline operator bool() const { return was_set; }
void operator= (csubstr v) { val = v; was_set = true; }
csubstr get() const { RYML_ASSERT(was_set); return val; }
};
#if RYML_NFO
size_t to_chars(c4::substr buf, OptionalScalar const& s)
{
if(!s)
return 0u;
if(s.val.len <= buf.len)
memcpy(buf.str, s.val.str, s.val.len);
return s.val.len;
}
#endif
csubstr filtered_scalar(csubstr str, ScalarType scalar_type, Tree *tree)
{
(void)scalar_type;
csubstr tokens[] = {R"(\n)", R"(\t)", R"(\\)"};
if(!str.first_of_any_iter(std::begin(tokens), std::end(tokens)))
return str;
@@ -120,6 +77,16 @@ csubstr filtered_scalar(csubstr str, ScalarType scalar_type, Tree *tree)
append_str(i);
append_chars("\t", 2u);
}
else if(next1 == 'b')
{
append_str(i);
append_chars("\b", 2u);
}
else if(next1 == 'r')
{
append_str(i);
append_chars("\r", 2u);
}
}
}
append_str(str.len);
@@ -134,62 +101,74 @@ struct Scalar
OptionalScalar anchor = {};
OptionalScalar ref = {};
OptionalScalar tag = {};
ScalarType type = {};
NodeType flags = {};
inline operator bool() const { if(anchor || tag) { RYML_ASSERT(scalar); } return scalar.was_set; }
void add_key_props(Tree *tree, size_t node) const
{
if(ref)
{
_nfo_logf("node[{}]: set key ref: '{}'", node, ref);
_nfo_logf("node[{}]: set key ref: '{}'", node, ref.get());
tree->set_key_ref(node, ref);
}
if(anchor)
{
_nfo_logf("node[{}]: set key anchor: '{}'", node, anchor);
_nfo_logf("node[{}]: set key anchor: '{}'", node, anchor.get());
tree->set_key_anchor(node, anchor);
}
if(tag)
{
csubstr ntag = normalize_tag(tag);
_nfo_logf("node[{}]: set key tag: '{}' -> '{}'", node, tag, ntag);
csubstr ntag = normalize_tag_long(tag);
_nfo_logf("node[{}]: set key tag: '{}' -> '{}'", node, tag.get(), ntag);
tree->set_key_tag(node, ntag);
}
if(type.is_quoted())
if(flags != NOTYPE)
{
_nfo_logf("node[{}]: set key as quoted", node);
tree->_add_flags(node, KEYQUO);
#ifdef RYML_DBG
char buf1[128];
char buf2[128];
char buf3[128];
#endif
_nfo_logf("node[{}]: set key flags: {}: {}->{}", node, flags.type_str(buf1), flags.type_str(buf2), flags.type_str(buf3));
tree->_add_flags(node, flags & KEY_STYLE);
}
}
void add_val_props(Tree *tree, size_t node) const
{
if(ref)
{
_nfo_logf("node[{}]: set val ref: '{}'", node, ref);
_nfo_logf("node[{}]: set val ref: '{}'", node, ref.get());
tree->set_val_ref(node, ref);
}
if(anchor)
{
_nfo_logf("node[{}]: set val anchor: '{}'", node, anchor);
_nfo_logf("node[{}]: set val anchor: '{}'", node, anchor.get());
tree->set_val_anchor(node, anchor);
}
if(tag)
{
csubstr ntag = normalize_tag(tag);
_nfo_logf("node[{}]: set val tag: '{}' -> '{}'", node, tag, ntag);
csubstr ntag = normalize_tag_long(tag);
_nfo_logf("node[{}]: set val tag: '{}' -> '{}'", node, tag.get(), ntag);
tree->set_val_tag(node, ntag);
}
if(type.is_quoted())
if(flags != NOTYPE)
{
_nfo_logf("node[{}]: set val as quoted", node);
tree->_add_flags(node, VALQUO);
#ifdef RYML_DBG
char buf1[128];
char buf2[128];
char buf3[128];
#endif
_nfo_logf("node[{}]: set val flags: {}: {}->{}", node, flags.type_str(buf1), flags.type_str(buf2), flags.type_str(buf3));
tree->_add_flags(node, flags & VAL_STYLE);
}
}
csubstr filtered_scalar(Tree *tree) const
{
return ::c4::yml::filtered_scalar(scalar, type, tree);
return ::c4::yml::filtered_scalar(scalar, tree);
}
};
} // namespace /*anon*/
csubstr parse_anchor_and_tag(csubstr tokens, OptionalScalar *anchor, OptionalScalar *tag)
{
*anchor = OptionalScalar{};
@@ -207,7 +186,7 @@ csubstr parse_anchor_and_tag(csubstr tokens, OptionalScalar *anchor, OptionalSca
*anchor = tokens.first(pos).sub(1);
tokens = tokens.right_of(pos);
}
_nfo_logf("anchor: {}", *anchor);
_nfo_logf("anchor: {}", anchor->get());
}
if(tokens.begins_with('<'))
{
@@ -215,14 +194,114 @@ csubstr parse_anchor_and_tag(csubstr tokens, OptionalScalar *anchor, OptionalSca
RYML_ASSERT(pos != (size_t)csubstr::npos);
*tag = tokens.first(pos + 1);
tokens = tokens.right_of(pos).triml(' ');
_nfo_logf("tag: {}", *tag);
_nfo_logf("tag: {}", tag->maybe_get());
}
return tokens;
}
} // namespace /*anon*/
bool compare_events(csubstr ref_evts, csubstr emt_evts, bool ignore_container_style=false, bool ignore_scalar_style=false)
{
auto diff_line_with_optional_ending = [](csubstr ref, csubstr emt, csubstr optional_ending){
RYML_ASSERT(ref != emt);
ref = ref.stripr(optional_ending).trimr(' ');
emt = emt.stripr(optional_ending).trimr(' ');
bool diff = ref != emt;
return diff;
};
auto diff_val_with_scalar_wildcard = [](csubstr ref, csubstr emt){
RYML_ASSERT(ref.begins_with("=VAL "));
RYML_ASSERT(emt.begins_with("=VAL "));
ref = ref.sub(5);
emt = emt.sub(5);
OptionalScalar reftag = {}, refanchor = {};
OptionalScalar emttag = {}, emtanchor = {};
if((bool(reftag) != bool(emttag)) || (reftag && (reftag.get() != emttag.get())))
return true;
if((bool(refanchor) != bool(emtanchor)) || (refanchor && (refanchor.get() != emtanchor.get())))
return true;
ref = parse_anchor_and_tag(ref, &refanchor, &reftag).triml(' ');
emt = parse_anchor_and_tag(emt, &emtanchor, &emttag).triml(' ');
RYML_ASSERT(ref.len > 0);
RYML_ASSERT(emt.len > 0);
RYML_ASSERT(ref[0] == ':' || ref[0] == '\'' || ref[0] == '"' || ref[0] == '|' || ref[0] == '>');
RYML_ASSERT(emt[0] == ':' || emt[0] == '\'' || emt[0] == '"' || emt[0] == '|' || emt[0] == '>');
ref = ref.sub(1);
emt = emt.sub(1);
if(ref != emt)
return true;
return false;
};
if(bool(ref_evts.len) != bool(emt_evts.len))
return true;
size_t posref = 0;
size_t posemt = 0;
bool fail = false;
while(posref < ref_evts.len && posemt < emt_evts.len)
{
const size_t endref = ref_evts.find('\n', posref);
const size_t endemt = emt_evts.find('\n', posemt);
if((endref == npos || endemt == npos) && (endref != endemt))
{
fail = true;
break;
}
csubstr ref = ref_evts.range(posref, endref);
csubstr emt = emt_evts.range(posemt, endemt);
if(ref != emt)
{
if(ref.begins_with("+DOC"))
{
if(diff_line_with_optional_ending(ref, emt, "---"))
{
fail = true;
break;
}
}
else if(ref.begins_with("-DOC"))
{
if(diff_line_with_optional_ending(ref, emt, "..."))
{
fail = true;
break;
}
}
else if(ignore_container_style && ref.begins_with("+MAP"))
{
if(diff_line_with_optional_ending(ref, emt, "{}"))
{
fail = true;
break;
}
}
else if(ignore_container_style && ref.begins_with("+SEQ"))
{
if(diff_line_with_optional_ending(ref, emt, "[]"))
{
fail = true;
break;
}
}
else if(ignore_scalar_style && ref.begins_with("=VAL"))
{
if(diff_val_with_scalar_wildcard(ref, emt))
{
fail = true;
break;
}
}
else
{
fail = true;
break;
}
}
posref = endref + 1u;
posemt = endemt + 1u;
}
return fail;
}
void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
void parse_events_to_tree(csubstr src, Tree *C4_RESTRICT tree_)
{
struct ParseLevel { size_t tree_node; };
detail::stack<ParseLevel> m_stack = {};
@@ -247,33 +326,34 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
{
_nfo_llog("double-quoted scalar!");
curr.scalar = line.sub(1);
curr.type = ScalarType::DQUOTED;
curr.flags = SCALAR_DQUO;
}
else if(line.begins_with('\''))
{
_nfo_llog("single-quoted scalar!");
curr.scalar = line.sub(1);
curr.type = ScalarType::SQUOTED;
curr.flags = SCALAR_SQUO;
}
else if(line.begins_with('|'))
{
_nfo_llog("block literal scalar!");
curr.scalar = line.sub(1);
curr.type = ScalarType::LITERAL;
curr.flags = SCALAR_LITERAL;
}
else if(line.begins_with('>'))
{
_nfo_llog("block folded scalar!");
curr.scalar = line.sub(1);
curr.type = ScalarType::FOLDED;
curr.flags = SCALAR_FOLDED;
}
else
{
_nfo_llog("plain scalar");
ASSERT_TRUE(line.begins_with(':'));
curr.scalar = line.sub(1);
curr.flags = SCALAR_PLAIN;
}
_nfo_logf("parsed scalar: '{}'", curr.scalar);
_nfo_logf("parsed scalar: '{}'", curr.scalar.maybe_get());
if(m_stack.empty())
{
_nfo_log("stack was empty, pushing root as DOC...");
@@ -288,8 +368,8 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
ASSERT_TRUE(curr);
_nfo_logf("seq[{}]: adding child", top.tree_node);
size_t node = tree.append_child(top.tree_node);
NodeType_e as_doc = tree.is_stream(top.tree_node) ? DOC : NOTYPE;
_nfo_logf("seq[{}]: child={} val='{}' as_doc=", top.tree_node, node, curr.scalar, NodeType::type_str(as_doc));
NodeType as_doc = tree.is_stream(top.tree_node) ? DOC : NOTYPE;
_nfo_logf("seq[{}]: child={} val='{}' as_doc=", top.tree_node, node, curr.scalar.maybe_get(), as_doc.type_str());
tree.to_val(node, curr.filtered_scalar(&tree), as_doc);
curr.add_val_props(&tree, node);
}
@@ -298,26 +378,27 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
_nfo_logf("is map! map_id={}", top.tree_node);
if(!key)
{
_nfo_logf("store key='{}' anchor='{}' tag='{}' type={}", curr.scalar, curr.anchor, curr.tag, curr.type.to_str());
char buf[128];(void)buf;
_nfo_logf("store key='{}' anchor='{}' tag='{}' type={}", curr.scalar.maybe_get(), curr.anchor.maybe_get(), curr.tag.maybe_get(), curr.flags.type_str(buf));
key = curr;
}
else
{
_nfo_logf("map[{}]: adding child", top.tree_node);
size_t node = tree.append_child(top.tree_node);
NodeType_e as_doc = tree.is_stream(top.tree_node) ? DOC : NOTYPE;
_nfo_logf("map[{}]: child={} key='{}' val='{}' as_doc={}", top.tree_node, node, key.scalar, curr.scalar, NodeType::type_str(as_doc));
NodeType as_doc = tree.is_stream(top.tree_node) ? DOC : NOTYPE;
_nfo_logf("map[{}]: child={} key='{}' val='{}' as_doc={}", top.tree_node, node, key.scalar.maybe_get(), curr.scalar.maybe_get(), as_doc.type_str());
tree.to_keyval(node, key.filtered_scalar(&tree), curr.filtered_scalar(&tree), as_doc);
key.add_key_props(&tree, node);
curr.add_val_props(&tree, node);
_nfo_logf("clear key='{}'", key.scalar);
_nfo_logf("clear key='{}'", key.scalar.maybe_get());
key = {};
}
}
else
{
_nfo_logf("setting tree_node={} to DOCVAL...", top.tree_node);
tree.to_val(top.tree_node, curr.filtered_scalar(&tree), DOC);
tree.to_val(top.tree_node, curr.filtered_scalar(&tree), tree.type(top.tree_node));
curr.add_val_props(&tree, top.tree_node);
}
}
@@ -338,7 +419,7 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
{
if(key)
{
_nfo_logf("node[{}] is map and key '{}' is pending: set {} as val ref", top.tree_node, key.scalar, alias);
_nfo_logf("node[{}] is map and key '{}' is pending: set {} as val ref", top.tree_node, key.scalar.maybe_get(), alias);
size_t node = tree.append_child(top.tree_node);
tree.to_keyval(node, key.filtered_scalar(&tree), alias);
key.add_key_props(&tree, node);
@@ -364,17 +445,25 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
OptionalScalar anchor = {};
OptionalScalar tag = {};
csubstr more_tokens = line.stripl("+SEQ").triml(' ');
NodeType more_flags = NOTYPE;
if(more_tokens.begins_with('['))
{
ASSERT_TRUE(more_tokens.begins_with("[]"));
more_tokens = more_tokens.offs(2, 0).triml(' ');
more_flags.add(FLOW_SL);
_nfo_log("seq is flow");
}
else
{
more_flags.add(BLOCK);
_nfo_log("seq is block");
}
parse_anchor_and_tag(more_tokens, &anchor, &tag);
size_t node = tree.root_id();
if(m_stack.empty())
{
_nfo_log("stack was empty, set root to SEQ");
tree._add_flags(node, SEQ);
tree._add_flags(node, SEQ|more_flags);
m_stack.push({node});
ASSERT_FALSE(key); // for the key to exist, the parent must exist and be a map
}
@@ -383,7 +472,6 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
size_t parent = m_stack.top().tree_node;
_nfo_logf("stack was not empty. parent={}", parent);
ASSERT_NE(parent, (size_t)NONE);
NodeType more_flags = NOTYPE;
if(tree.is_doc(parent) && !(tree.is_seq(parent) || tree.is_map(parent)))
{
_nfo_logf("set node to parent={}, add DOC", parent);
@@ -403,7 +491,7 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
ASSERT_EQ(tree.is_map(parent) || node == parent, true);
tree.to_seq(node, key.filtered_scalar(&tree), more_flags);
key.add_key_props(&tree, node);
_nfo_logf("clear key='{}'", key.scalar);
_nfo_logf("clear key='{}'", key.scalar.maybe_get());
key = {};
}
else
@@ -432,17 +520,25 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
OptionalScalar anchor = {};
OptionalScalar tag = {};
csubstr more_tokens = line.stripl("+MAP").triml(' ');
NodeType more_flags = NOTYPE;
if(more_tokens.begins_with('{'))
{
ASSERT_TRUE(more_tokens.begins_with("{}"));
more_tokens = more_tokens.offs(2, 0).triml(' ');
more_flags.add(FLOW_SL);
_nfo_log("map is flow");
}
else
{
more_flags.add(BLOCK);
_nfo_log("map is block");
}
parse_anchor_and_tag(more_tokens, &anchor, &tag);
size_t node = tree.root_id();
if(m_stack.empty())
{
_nfo_log("stack was empty, set root to MAP");
tree._add_flags(node, MAP);
tree._add_flags(node, MAP|more_flags);
m_stack.push({node});
ASSERT_FALSE(key); // for the key to exist, the parent must exist and be a map
}
@@ -451,7 +547,6 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
size_t parent = m_stack.top().tree_node;
_nfo_logf("stack was not empty. parent={}", parent);
ASSERT_NE(parent, (size_t)NONE);
NodeType more_flags = NOTYPE;
if(tree.is_doc(parent) && !(tree.is_seq(parent) || tree.is_map(parent)))
{
_nfo_logf("set node to parent={}, add DOC", parent);
@@ -471,7 +566,7 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
ASSERT_EQ(tree.is_map(parent) || node == parent, true);
tree.to_map(node, key.filtered_scalar(&tree), more_flags);
key.add_key_props(&tree, node);
_nfo_logf("clear key='{}'", key.scalar);
_nfo_logf("clear key='{}'", key.scalar.maybe_get());
key = {};
}
else
@@ -535,7 +630,7 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
else if(is_sep)
{
_nfo_logf("separator was specified: {}", rem);
if((!tree.is_container(node)) && (!tree.is_doc(node)))
if((!tree.is_container(node)) && (!tree.is_val(node)))
{
tree._add_flags(node, STREAM);
node = tree.append_child(node);
@@ -555,7 +650,7 @@ void EventsParser::parse(csubstr src, Tree *C4_RESTRICT tree_)
}
else
{
if(tree.is_doc(node))
if(tree.is_container(node) || tree.is_val(node))
{
_nfo_log("rearrange root as STREAM");
tree.set_root_as_stream();

View File

@@ -6,38 +6,52 @@
#else
#include <c4/yml/tree.hpp>
#endif
#include <string>
namespace c4 {
namespace yml {
struct EventsParser
{
void parse(csubstr src, Tree *C4_RESTRICT tree);
};
void parse_events_to_tree(csubstr events, Tree *C4_RESTRICT tree);
size_t emit_events(substr buf, Tree const& C4_RESTRICT tree);
size_t emit_events_from_tree(substr buf, Tree const& C4_RESTRICT tree);
std::string emit_events_from_source(substr src);
template<class CharContainer>
void emit_events(CharContainer *container, Tree const& C4_RESTRICT tree)
void emit_events_from_tree(CharContainer *container, Tree const& C4_RESTRICT tree)
{
size_t ret = emit_events(to_substr(*container), tree);
size_t ret = emit_events_from_tree(to_substr(*container), tree);
if(ret > container->size())
{
container->resize(ret);
ret = emit_events(to_substr(*container), tree);
ret = emit_events_from_tree(to_substr(*container), tree);
C4_CHECK(ret == container->size());
}
container->resize(ret);
}
template<class CharContainer>
CharContainer emit_events(Tree const& C4_RESTRICT tree)
CharContainer emit_events_from_tree(Tree const& C4_RESTRICT tree)
{
CharContainer result;
emit_events(&result, tree);
emit_events_from_tree(&result, tree);
return result;
}
struct OptionalScalar
{
csubstr val = {};
bool was_set = false;
inline operator csubstr() const { return get(); }
inline operator bool() const { return was_set; }
void operator= (csubstr v) { val = v; was_set = true; }
csubstr get() const { RYML_ASSERT(was_set); return val; }
csubstr maybe_get() const { return was_set ? val : csubstr(""); }
};
csubstr parse_anchor_and_tag(csubstr tokens, OptionalScalar *anchor, OptionalScalar *tag);
bool compare_events(csubstr ref_evts, csubstr emt_evts, bool ignore_container_style, bool ignore_scalar_style);
} // namespace yml
} // namespace c4

View File

@@ -16,7 +16,7 @@ struct EventsEmitter
Tree const* C4_RESTRICT m_tree;
EventsEmitter(Tree const& tree, substr buf_) : buf(buf_), pos(), m_tree(&tree) {}
void emit_tag(csubstr tag, size_t node);
void emit_scalar(csubstr val, bool quoted);
void emit_scalar(csubstr val, char openchar);
void emit_key_anchor_tag(size_t node);
void emit_val_anchor_tag(size_t node);
void emit_events(size_t node);
@@ -25,7 +25,7 @@ struct EventsEmitter
template<size_t N>
C4_ALWAYS_INLINE void pr(const char (&s)[N])
{
if(pos + N-1 <= buf.len)
if(N > 1 && pos + N-1 <= buf.len)
memcpy(buf.str + pos, s, N-1);
pos += N-1;
}
@@ -59,10 +59,40 @@ struct EventsEmitter
}
};
void EventsEmitter::emit_scalar(csubstr val, bool quoted)
inline char _ev_scalar_code(NodeType masked)
{
constexpr const char openchar[] = {':', '\''};
pr(openchar[quoted]);
if(masked & SCALAR_LITERAL)
return '|';
if(masked & SCALAR_FOLDED)
return '>';
if(masked & SCALAR_SQUO)
return '\'';
if(masked & SCALAR_DQUO)
return '"';
if(masked & SCALAR_PLAIN)
return ':';
return ':';
}
inline char _ev_scalar_code_key(NodeType t)
{
return _ev_scalar_code(t & KEY_STYLE);
}
inline char _ev_scalar_code_val(NodeType t)
{
return _ev_scalar_code(t & VAL_STYLE);
}
inline char _ev_scalar_code_key(Tree const* p, size_t node)
{
return _ev_scalar_code(p->_p(node)->m_type & KEY_STYLE);
}
inline char _ev_scalar_code_val(Tree const* p, size_t node)
{
return _ev_scalar_code(p->_p(node)->m_type & VAL_STYLE);
}
void EventsEmitter::emit_scalar(csubstr val, char openchar)
{
pr(openchar);
size_t prev = 0;
uint8_t const* C4_RESTRICT s = (uint8_t const* C4_RESTRICT) val.str;
for(size_t i = 0; i < val.len; ++i)
@@ -136,7 +166,12 @@ void EventsEmitter::emit_tag(csubstr tag, size_t node)
}
else
{
csubstr ntag = normalize_tag_long(tag);
csubstr ntag = normalize_tag_long(tag, to_substr(tagbuf));
if(!ntag.str)
{
tagbuf.resize(2 * ntag.len);
ntag = normalize_tag_long(tag, to_substr(tagbuf));
}
if(ntag.begins_with('<'))
{
pr(ntag);
@@ -203,7 +238,7 @@ void EventsEmitter::emit_events(size_t node)
pr("=VAL");
emit_key_anchor_tag(node);
pr(' ');
emit_scalar(m_tree->key(node), m_tree->is_key_quoted(node));
emit_scalar(m_tree->key(node), _ev_scalar_code_key(m_tree, node));
pr('\n');
}
}
@@ -220,13 +255,13 @@ void EventsEmitter::emit_events(size_t node)
pr("=VAL");
emit_val_anchor_tag(node);
pr(' ');
emit_scalar(m_tree->val(node), m_tree->is_val_quoted(node));
emit_scalar(m_tree->val(node), _ev_scalar_code_val(m_tree, node));
pr('\n');
}
}
else if(m_tree->is_map(node))
{
pr("+MAP");
pr((m_tree->type(node) & CONTAINER_STYLE_FLOW) ? csubstr("+MAP {}") : csubstr("+MAP"));
emit_val_anchor_tag(node);
pr('\n');
for(size_t child = m_tree->first_child(node); child != NONE; child = m_tree->next_sibling(child))
@@ -235,7 +270,7 @@ void EventsEmitter::emit_events(size_t node)
}
else if(m_tree->is_seq(node))
{
pr("+SEQ");
pr((m_tree->type(node) & CONTAINER_STYLE_FLOW) ? csubstr("+SEQ []") : csubstr("+SEQ"));
emit_val_anchor_tag(node);
pr('\n');
for(size_t child = m_tree->first_child(node); child != NONE; child = m_tree->next_sibling(child))
@@ -257,7 +292,7 @@ void EventsEmitter::emit_doc(size_t node)
pr("\n=VAL");
emit_val_anchor_tag(node);
pr(' ');
emit_scalar(m_tree->val(node), m_tree->is_val_quoted(node));
emit_scalar(m_tree->val(node), _ev_scalar_code_val(m_tree, node));
pr('\n');
}
else
@@ -283,7 +318,7 @@ void EventsEmitter::emit_events()
pr("-STR\n");
}
size_t emit_events(substr buf, Tree const& C4_RESTRICT tree)
size_t emit_events_from_tree(substr buf, Tree const& C4_RESTRICT tree)
{
EventsEmitter e(tree, buf);
e.emit_events();

View File

@@ -4,174 +4,119 @@ namespace c4 {
namespace yml {
// g++-5 does not like creating a csubstr directly from the literal.
// so we use this macro (undefined at the end) to make the
// declarations less verbose:
#define _(testcase, reason) AllowedFailure{csubstr(testcase), csubstr(reason)}
// To see the test case contents, refer to this URL:
// https://github.com/yaml/yaml-test-suite/tree/master/src
constexpr const AllowedFailure allowed_failures[] = {
// g++-5 does not like creating a csubstr directly from the literal.
// so we use this macro to remove cruft from the code:
#define _(testcase, reason) AllowedFailure{csubstr(testcase), csubstr(reason)}
//-------------------------------------------------------------------------
// SECTION 1. Known issues, TODO
//
// These tests are temporarily skipped, and cover issues that must be fixed.
// double quoted scalars
_("G4RS-in_json" , "special characters must be emitted in double quoted style"),
_("G4RS-in_yaml" , "special characters must be emitted in double quoted style"),
_("G4RS-out_yaml", "special characters must be emitted in double quoted style"),
// other
_("UKK6_01-in_yaml", "fails to parse double :: in UNK state"),
// errors that fail to materialize
_("3HFZ-error" , "should not accept scalar after ..."),
_("4EJS-error" , "should not accept double anchor for scalar"),
_("5TRB-error" , "should not accept document-end marker in double quoted string"),
_("5U3A-error" , "should not accept opening a sequence on same line as map key"),
_("7LBH-error" , "should not accept multiline double quoted implicit keys"),
_("9C9N-error" , "should not accept non-indented flow sequence"),
_("9HCY-error" , "should not accept tag directive in non-doc scope"),
_("9JBA-error" , "should not accept comment after flow seq terminating ]"),
_("9MMA-error" , "should not accept empty doc after %YAML directive"),
_("9MQT_01-error" , "should not accept scalars after ..."),
_("B63P-error" , "should not accept directive without doc"),
_("C2SP-error" , "should not accept flow sequence with terminating ] on the next line"),
_("CVW2-error" , "should not accept flow sequence with comment after ,"),
_("CXX2-error" , "should not accept mapping with anchor on document start line"),
_("D49Q-error" , "should not accept multiline single quoted implicit keys"),
_("DK4H-error" , "should not accept implicit key followed by newline"),
_("DK95_01-error" , "should not accept leading tabs in double quoted multiline scalar"),
_("DK95_06-error" , "should not accept tab indentation"),
_("EB22-error" , "should not accept missing document-end marker before directive"),
_("G5U8-error" , "should not accept [-, -]"),
_("H7TQ-error" , "should not accept extra words after directive"),
_("JKF3-error" , "should not accept multiline unindented double quoted scalar"),
_("LHL4-error" , "should not accept tag"),
_("MUS6_00-error" , "should not accept #... at the end of %YAML directive"),
_("MUS6_01-error" , "should not accept #... at the end of %YAML directive"),
_("N782-error" , "TBD"),
_("P2EQ-error" , "should not accept sequence item on same line as previous item"),
_("QB6E-error" , "should not accept indented multiline quoted scalar"),
_("QLJ7-error" , "tag directives should apply only to the next doc (?)"),
_("RXY3-error" , "should not accept document-end marker in single quoted string"),
_("S4GJ-error" , "should not accept text after block scalar indicator"),
_("S98Z-error" , "should not accept block scalar with more spaces than first content line"),
_("SF5V-error" , "should not accept duplicate YAML directive"),
_("SR86-error" , "TBD"),
_("SU5Z-error" , "should not accept comment without whitespace after double quoted scalar"),
_("SU74-error" , "should not accept anchor and alias as mapping key"),
_("SY6V-error" , "TBD"),
_("U99R-error" , "should not accept comma in a tag"),
_("VJP3_00-error" , "should not accept flow collections over many lines"),
_("X4QW-error" , "should not accept comment without whitespace after block scalar indicator"),
_("Y79Y_003-error" , "should not accept leading tabs in seq elmt"),
_("Y79Y_004-error" , "should not accept tab after -"),
_("Y79Y_005-error" , "TBD"),
_("Y79Y_006-error" , "should not accept tab after ?"),
_("Y79Y_007-error" , "tabs tokens"),
_("Y79Y_008-error" , "TBD"),
_("Y79Y_009-error" , "should not accept tab after ?"),
_("YJV2-error" , "should not accept [-]"),
_("ZCZ6-error" , "should not accept invalid mapping in plain single line value"),
_("ZL4Z-error" , "TBD"),
_("ZXT5-error" , "TBD"),
//-------------------------------------------------------------------------
// SECTION 2. Expected errors that fail to materialize.
// maps
_("236B-error" , "should not accept final scalar in a map"),
_("7MNF-error" , "should not accept final scalar in a map"),
_("62EZ-error" , "should not accept invalid block mapping key on same line as previous key"),
_("9CWY-error" , "should not accept final scalar in a map"),
_("CXX2-error" , "should not accept mapping with anchor on document start line"),
_("DK95_06-error" , "should not accept tab indentation"),
_("GDY7-error" , "should not accept comment that looks like a mapping key"),
_("D49Q-error" , "should not accept multiline single quoted implicit keys"),
_("DK4H-error" , "should not accept implicit key followed by newline"),
_("JY7Z-error" , "should not accept trailing content that looks like a mapping"),
_("SU74-error" , "should not accept anchor and alias as mapping key"),
_("T833-error" , "should not accept flow mapping missing a separating comma"),
_("VJP3_00-error" , "should not accept flow collections over many lines"),
_("Y79Y_006-error", "should not accept tab after ?"),
_("Y79Y_007-error", "should not accept tab after :"),
_("Y79Y_008-error", "should not accept tab after ?"),
_("Y79Y_009-error", "should not accept tab after ?"),
_("ZCZ6-error" , "should not accept invalid mapping in plain single line value"),
// seqs
_("5U3A-error" , "should not accept opening a sequence on same line as map key"),
_("6JTT-error" , "should not accept flow sequence without terminating ]"),
_("9C9N-error" , "should not accept non-indented flow sequence"),
_("9JBA-error" , "should not accept comment after flow seq terminating ]"),
_("9MAG-error" , "should not accept flow sequence with invalid comma at the beginning"),
_("CTN5-error" , "should not accept flow sequence with missing elements"),
_("CVW2-error" , "should not accept flow sequence with comment after ,"),
_("G5U8-error" , "should not accept [-, -]"),
_("KS4U-error" , "should not accept item after end of flow sequence"),
_("P2EQ-error" , "should not accept sequence item on same line as previous item"),
_("YJV2-error" , "should not accept [-]"),
_("Y79Y_003-error", "should not accept leading tabs in seq elmt"),
_("Y79Y_004-error", "should not accept tab after -"),
// block scalars
_("2G84_00-error" , "should not accept the block literal spec"),
_("2G84_01-error" , "should not accept the block literal spec"),
_("5LLU-error" , "should not accept folded scalar with wrong indented line after spaces only"),
_("S4GJ-error" , "should not accept text after block scalar indicator"),
_("S98Z-error" , "should not accept block scalar with more spaces than first content line"),
_("X4QW-error" , "should not accept comment without whitespace after block scalar indicator"),
_("Y79Y_000-error", "should not accept leading tabs in the block scalar"),
// quoted scalars
_("55WF-error" , "should not accept invalid escape in double quoted scalar"),
_("7LBH-error" , "should not accept multiline double quoted implicit keys"),
_("DK95_01-error", "should not accept leading tabs in double quoted multiline scalar"),
_("HRE5-error" , "should not accept double quoted scalar with escaped single quote"),
_("JKF3-error" , "should not accept multiline unindented double quoted scalar"),
_("QB6E-error" , "should not accept indented multiline quoted scalar"),
_("RXY3-error" , "should not accept document-end marker in single quoted string"),
_("SU5Z-error" , "should not accept comment without whitespace after double quoted scalar"),
// plain scalars
_("8XDJ-error" , "should not accept comment in multiline scalar"),
_("CML9-error" , "should not accept comment inside flow scalar"),
// documents/streams
_("3HFZ-error" , "should not accept scalar after ..."),
_("5TRB-error" , "should not accept document-end marker in double quoted string"),
_("9MMA-error" , "should not accept empty doc after %YAML directive"),
_("9MQT_01-error", "should not accept scalars after ..."),
_("B63P-error" , "should not accept directive without doc"),
_("EB22-error" , "should not accept missing document-end marker before directive"),
_("H7TQ-error" , "should not accept extra words after directive"),
_("MUS6_00-error", "should not accept #... at the end of %YAML directive"),
_("MUS6_01-error", "should not accept #... at the end of %YAML directive"),
_("N782-error" , "should not accept document markers in flow style"),
_("RHX7-error" , "should not accept directive without document end marker"),
_("SF5V-error" , "should not accept duplicate YAML directive"),
// anchors
_("4EJS-error" , "should not accept double anchor for scalar"),
_("4JVG-error" , "should not accept double anchor for scalar"),
_("SY6V-error" , "should not accept anchor before sequence entry on same line"),
// tags
_("9HCY-error" , "should not accept tag directive in non-doc scope"),
_("BU8L-error" , "should not accept node properties spread over multiple lines"),
_("LHL4-error" , "should not accept tag"),
_("U99R-error" , "should not accept comma in a tag"),
_("QLJ7-error" , "tag directives should apply only to the next doc (?)"),
//-------------------------------------------------------------------------
// SECTION 3. Deliberate ryml limitations.
// SECTION 2. Deliberate ryml limitations.
//
// These tests are skipped because they cover parts of YAML that
// are deliberately not implemented by ryml.
#ifndef RYML_WITH_TAB_TOKENS
// -<tab> or :<tab> are supported only when the above macro is defined
_("A2M4-in_yaml-events" , "tabs tokens"),
_("6BCT-in_yaml" , "tabs tokens"),
_("J3BT-in_yaml-events" , "tabs tokens"),
_("Y79Y_010-in_yaml-events", "tabs tokens"),
_("6BCT-in_yaml" , "tabs tokens"),
_("A2M4-in_yaml" , "tabs tokens"),
_("DC7X-in_yaml" , "tabs tokens"),
_("DK95_00-in_yaml" , "tabs tokens"),
_("DK95_04-in_yaml" , "tabs tokens"),
_("J3BT-in_yaml" , "tabs tokens"),
_("K54U-in_yaml" , "tabs tokens"),
_("Y79Y_010-in_yaml" , "tabs tokens"),
#endif
// container keys are not supported
_("4FJ6-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("4FJ6-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("6BFJ-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("6BFJ-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("6PBE-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("6PBE-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("6PBE-emit_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("9MMW-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("9MMW-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("KK5P-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("KK5P-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("KZN9-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("KZN9-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("LX3P-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("LX3P-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("M2N8_00-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("M2N8_00-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("M2N8_01-in_yaml-events" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("M2N8_01-out_yaml-events", "only scalar keys allowed (keys cannot be maps or seqs)"),
_("M5DY-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("M5DY-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("Q9WF-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("Q9WF-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("RZP5-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("RZP5-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("SBG9-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("SBG9-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("V9D5-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("V9D5-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("X38W-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("X38W-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("XW4D-in_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
_("XW4D-out_yaml" , "only scalar keys allowed (keys cannot be maps or seqs)"),
// anchors with : are not supported
_("2SXE-in_yaml-events" , "weird characters in anchors, anchors must not end with :"),
// container keys are deliberately not supported by the ryml tree.
// But they ARE supported by the parse engine. So they are dealt
// with separately using the event parser. See below.
//-------------------------------------------------------------------------
// SECTION 3. Problems with the test suite spec. (or our
// understanding of it?)
// malformed json in the test spec
_("35KP-in_json" , "malformed JSON from multiple documents"),
_("5TYM-in_json" , "malformed JSON from multiple documents"),
_("6XDY-in_json" , "malformed JSON from multiple documents"),
_("6WLZ-in_json" , "malformed JSON from multiple documents"),
_("6ZKB-in_json" , "malformed JSON from multiple documents"),
_("7Z25-in_json" , "malformed JSON from multiple documents"),
_("9DXL-in_json" , "malformed JSON from multiple documents"),
_("9KAX-in_json" , "malformed JSON from multiple documents"),
_("9WXW-in_json" , "malformed JSON from multiple documents"),
_("JHB9-in_json" , "malformed JSON from multiple documents"),
_("KSS4-in_json" , "malformed JSON from multiple documents"),
_("L383-in_json" , "malformed JSON from multiple documents"),
_("M7A3-in_json" , "malformed JSON from multiple documents"),
_("RZT7-in_json" , "malformed JSON from multiple documents"),
_("U9NS-in_json" , "malformed JSON from multiple documents"),
_("W4TN-in_json" , "malformed JSON from multiple documents"),
_("35KP-in_json" , "malformed JSON from multiple documents"),
_("5TYM-in_json" , "malformed JSON from multiple documents"),
_("6XDY-in_json" , "malformed JSON from multiple documents"),
_("6WLZ-in_json" , "malformed JSON from multiple documents"),
_("6ZKB-in_json" , "malformed JSON from multiple documents"),
_("7Z25-in_json" , "malformed JSON from multiple documents"),
_("9DXL-in_json" , "malformed JSON from multiple documents"),
_("9KAX-in_json" , "malformed JSON from multiple documents"),
_("9WXW-in_json" , "malformed JSON from multiple documents"),
_("JHB9-in_json" , "malformed JSON from multiple documents"),
_("KSS4-in_json" , "malformed JSON from multiple documents"),
_("L383-in_json" , "malformed JSON from multiple documents"),
_("M7A3-in_json" , "malformed JSON from multiple documents"),
_("PUW8-in_json" , "malformed JSON from multiple documents"),
_("RZT7-in_json" , "malformed JSON from multiple documents"),
_("U9NS-in_json" , "malformed JSON from multiple documents"),
_("UT92-in_json" , "malformed JSON from multiple documents"),
_("W4TN-in_json" , "malformed JSON from multiple documents"),
// malformed test spec?
_("4ABK-out_yaml-events" , "out-yaml contains null, while in-yaml and events contain empty scalars"),
_("4WA9-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
@@ -199,12 +144,54 @@ constexpr const AllowedFailure allowed_failures[] = {
_("T4YY-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
_("T5N4-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
_("VJP3_01-out_yaml-events" , "out-yaml test spec is missing a --- document token, which is required in the events"),
};
constexpr const AllowedFailure container_key_cases[] = {
// these cases have container keys, and cannot be parsed into the
// ryml tree. However, they CAN be parsed by the ryml parse engine.
// Therefore the tests are enabled if they only use the YAML event
// emitter based on the parse engine.
_("4FJ6-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("4FJ6-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("6BFJ-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("6BFJ-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("6PBE-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("6PBE-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("6PBE-emit_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("9MMW-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("9MMW-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("KK5P-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("KK5P-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("KZN9-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("KZN9-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("LX3P-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("LX3P-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("M2N8_00-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("M2N8_00-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("M2N8_01-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("M2N8_01-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("M5DY-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("M5DY-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("Q9WF-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("Q9WF-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("RZP5-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("RZP5-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("SBG9-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("SBG9-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("V9D5-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("V9D5-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("X38W-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("X38W-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("XW4D-in_yaml" , "only scalar keys allowed (keys cannot be containers)"),
_("XW4D-out_yaml" , "only scalar keys allowed (keys cannot be containers)"),
#undef _
};
cspan<AllowedFailure> g_allowed_failures = allowed_failures;
cspan<AllowedFailure> g_container_key_cases = container_key_cases;
AllowedFailure is_failure_expected(csubstr casename)
{
@@ -215,6 +202,15 @@ AllowedFailure is_failure_expected(csubstr casename)
return {};
}
AllowedFailure case_has_container_keys(csubstr casename)
{
RYML_CHECK(casename.not_empty());
for(AllowedFailure const& af : g_container_key_cases)
if(af.test_name == casename || casename.begins_with(af.test_name))
return af;
return {};
}
} // namespace c4
} // namespace yml

View File

@@ -20,6 +20,7 @@ struct AllowedFailure
};
AllowedFailure is_failure_expected(csubstr casename);
AllowedFailure case_has_container_keys(csubstr casename);
} // namespace c4
} // namespace yml

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -4,449 +4,469 @@
#endif
#include <gtest/gtest.h>
#include "./test_case.hpp"
#include "./test_lib/test_case.hpp"
#include "./test_suite/test_suite_events.hpp"
#include "./test_suite/test_suite_event_handler.hpp"
#include "./test_suite/test_suite_events_emitter.cpp" // HACK
namespace c4 {
namespace yml {
void test_evts(csubstr src, std::string expected)
struct EventsCase
{
Tree tree = parse_in_arena(src);
#if RYML_DBG
print_tree(tree);
#endif
auto actual = emit_events<std::string>(tree);
EXPECT_EQ(actual, expected);
const char* file;
int line;
// previously, the strings below were of type std::string, but
// valgrind was complaining of a problem during initialization of
// the parameterized test cases. Probably some SIOF?
//
// So we use csubstr:
csubstr name;
csubstr src;
csubstr expected_events_from_parser;
csubstr expected_events_from_tree;
EventsCase(const char *file_, int line_, csubstr name_, csubstr src_, csubstr from_parser, csubstr from_tree)
: file(file_)
, line(line_)
, name(name_)
, src(src_)
, expected_events_from_parser(from_parser)
, expected_events_from_tree(from_tree)
{
}
EventsCase(const char *file_, int line_, csubstr name_, csubstr src_, csubstr evts)
: file(file_)
, line(line_)
, name(name_)
, src(src_)
, expected_events_from_parser(evts)
, expected_events_from_tree(evts)
{
}
};
class EventsTest : public testing::TestWithParam<EventsCase> {};
TEST_P(EventsTest, from_parser)
{
EventsCase const& ec = GetParam();
printf("%s:%d: %s", ec.file, ec.line, ec.name.str);
RYML_TRACE_FMT("defined in:\n{}:{}: {}", ec.file, ec.line, ec.name);
EventHandlerYamlStd::EventSink sink;
EventHandlerYamlStd handler(&sink);
ParseEngine<EventHandlerYamlStd> parser(&handler);
std::string src_copy(ec.src.str, ec.src.len);
parser.parse_in_place_ev("(testyaml)", to_substr(src_copy));
_c4dbgpf("~~~\n{}~~~\n", sink.result);
std::string exp_copy(ec.expected_events_from_parser.str, ec.expected_events_from_parser.len);
EXPECT_EQ(sink.result, exp_copy); // use the diff from std::string which is nice
}
TEST(events, empty)
TEST_P(EventsTest, from_tree)
{
test_evts(
R"()",
R"(+STR
-STR
)"
);
EventsCase const& ec = GetParam();
printf("%s:%d: %s", ec.file, ec.line, ec.name.str);
RYML_TRACE_FMT("defined in:\n{}:{}: {}", ec.file, ec.line, ec.name);
const Tree tree = parse_in_arena(to_csubstr(ec.src));
_c4dbg_tree("parsed tree", tree);
std::string exp_copy(ec.expected_events_from_tree.str, ec.expected_events_from_tree.len);
EXPECT_EQ(emit_events_from_tree<std::string>(tree), exp_copy);
}
TEST(events, empty_whitespace)
{
test_evts(
R"( )",
R"(+STR
-STR
)"
);
}
TEST(events, empty_whitespace_newlines)
{
test_evts(
R"(
)",
R"(+STR
-STR
)"
);
}
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------
TEST(events, empty_whitespace_newlines_comments)
{
test_evts(
R"(
# a comment
)",
R"(+STR
-STR
)"
);
}
TEST(events, docval)
{
test_evts(
R"('quoted val'
)",
R"(+STR
+DOC
=VAL 'quoted val
-DOC
-STR
)"
);
}
TEST(events, docsep)
{
test_evts(
R"(--- 'quoted val'
--- another
...
--- and yet another
...
---
...
)",
R"(+STR
+DOC ---
=VAL 'quoted val
-DOC
+DOC ---
=VAL :another
-DOC
+DOC ---
=VAL :and yet another
-DOC
+DOC ---
=VAL :
-DOC
-STR
)"
);
}
TEST(events, docsep_v2)
{
test_evts(
R"(
doc1
---
doc2
...
doc3
)",
R"(+STR
+DOC ---
=VAL :doc1
-DOC
+DOC ---
=VAL :doc2
-DOC
+DOC ---
=VAL :doc3
-DOC
-STR
)"
);
}
TEST(events, basic_map)
{
test_evts(
"{foo: bar}",
R"(+STR
+DOC
+MAP
=VAL :foo
=VAL :bar
-MAP
-DOC
-STR
)"
);
}
TEST(events, basic_seq)
{
test_evts(
#define _ec(name, src, ...) EventsCase{__FILE__, __LINE__, name, src, __VA_ARGS__}
const EventsCase events_cases[] = {
//=======================================================
_ec("empty",
"",
"+STR\n"
"-STR\n"),
//=======================================================
_ec("empty_whitespace",
" ",
"+STR\n"
"-STR\n"),
//=======================================================
_ec("empty_whitespace_newlines",
"\n ",
"+STR\n"
"-STR\n"),
//=======================================================
_ec("empty_whitespace_newlines_comments",
"\n"
"# a comment\n"
" ",
"+STR\n"
"-STR\n"),
//=======================================================
_ec("docval",
"'quoted val'\n"
,
"+STR\n"
"+DOC\n"
"=VAL 'quoted val\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("docsep",
"--- 'quoted val'\n"
"--- another\n"
"...\n"
"--- and yet another\n"
"...\n"
"---\n"
"...\n"
,
"+STR\n"
"+DOC ---\n"
"=VAL 'quoted val\n"
"-DOC\n"
"+DOC ---\n"
"=VAL :another\n"
"-DOC ...\n"
"+DOC ---\n"
"=VAL :and yet another\n"
"-DOC ...\n"
"+DOC ---\n"
"=VAL :\n"
"-DOC ...\n"
"-STR\n"
,
"+STR\n"
"+DOC ---\n"
"=VAL 'quoted val\n"
"-DOC\n"
"+DOC ---\n"
"=VAL :another\n"
"-DOC\n"
"+DOC ---\n"
"=VAL :and yet another\n"
"-DOC\n"
"+DOC ---\n"
"=VAL :\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("docsep_v2"
,
"\n"
"doc1\n"
"---\n"
"doc2\n"
"...\n"
"doc3\n"
,
"+STR\n"
"+DOC\n"
"=VAL :doc1\n"
"-DOC\n"
"+DOC ---\n"
"=VAL :doc2\n"
"-DOC ...\n"
"+DOC\n"
"=VAL :doc3\n"
"-DOC\n"
"-STR\n"
,
"+STR\n"
"+DOC ---\n"
"=VAL :doc1\n"
"-DOC\n"
"+DOC ---\n"
"=VAL :doc2\n"
"-DOC\n"
"+DOC ---\n"
"=VAL :doc3\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("basic_map",
"{foo: bar}"
,
"+STR\n"
"+DOC\n"
"+MAP {}\n"
"=VAL :foo\n"
"=VAL :bar\n"
"-MAP\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("basic_seq",
"[foo, bar]",
R"(+STR
+DOC
+SEQ
=VAL :foo
=VAL :bar
-SEQ
-DOC
-STR
)"
);
}
TEST(events, escapes)
{
test_evts(
R"("\t\ \ \r\n\0\f\/\a\v\e\N\_\L\P \b")",
"+STR\n"
"+DOC\n"
"=VAL '\\t\\t \\r\\n\\0\\f/\\a\\v\\e\\N\\_\\L\\P \\b" "\n"
"+SEQ []\n"
"=VAL :foo\n"
"=VAL :bar\n"
"-SEQ\n"
"-DOC\n"
"-STR\n"
);
}
TEST(events, dquo_bytes)
{
test_evts(
R"("\x0a\x0a\u263A\x0a\x55\x56\x57\x0a\u2705\U0001D11E")",
"-STR\n"),
//=======================================================
_ec("escapes",
"\"\\t\\ \\ \\r\\n\\0\\f\\/\\a\\v\\e\\N\\_\\L\\P \\b\"",
"+STR\n"
"+DOC\n"
"=VAL '\\n\\n☺\\nUVW\\n✅𝄞" "\n"
"=VAL \"\\t\\t \\r\\n\\0\\f/\\a\\v\\e\\N\\_\\L\\P \\b" "\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("dquo_bytes",
"\"\\x0a\\x0a\\u263A\\x0a\\x55\\x56\\x57\\x0a\\u2705\\U0001D11E\"",
"+STR\n"
"+DOC\n"
"=VAL \"\\n\\n☺\\nUVW\\n✅𝄞" "\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("sets",
"--- !!set\n"
"? Mark McGwire\n"
"? Sammy Sosa\n"
"? Ken Griff\n",
"+STR\n"
"+DOC ---\n"
"+MAP <tag:yaml.org,2002:set>\n"
"=VAL :Mark McGwire\n"
"=VAL :\n"
"=VAL :Sammy Sosa\n"
"=VAL :\n"
"=VAL :Ken Griff\n"
"=VAL :\n"
"-MAP\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("binary"
,
"canonical: !!binary \"\\\n"
" R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\\\n"
" OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\\\n"
" +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\\\n"
" AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=\"\n"
"generic: !!binary |\n"
" R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\n"
" OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\n"
" +f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\n"
" AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=\n"
"description:\n"
" The binary value above is a tiny arrow encoded as a gif image.\n"
,
"+STR\n"
"+DOC\n"
"+MAP\n"
"=VAL :canonical\n"
"=VAL <tag:yaml.org,2002:binary> \"R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLCAgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=\n"
"=VAL :generic\n"
"=VAL <tag:yaml.org,2002:binary> |R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\\nOTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\\n+f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\\nAgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=\\n\n"
"=VAL :description\n"
"=VAL :The binary value above is a tiny arrow encoded as a gif image.\n"
"-MAP\n"
"-DOC\n"
"-STR\n"
);
}
TEST(events, sets)
{
test_evts(
R"(--- !!set
? Mark McGwire
? Sammy Sosa
? Ken Griff
)",
R"(+STR
+DOC ---
+MAP <tag:yaml.org,2002:set>
=VAL :Mark McGwire
=VAL :
=VAL :Sammy Sosa
=VAL :
=VAL :Ken Griff
=VAL :
-MAP
-DOC
-STR
)");
}
TEST(events, binary)
{
test_evts(
R"(canonical: !!binary "\
R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\
OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\
+f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\
AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs="
generic: !!binary |
R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5
OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+
+f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC
AgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
description:
The binary value above is a tiny arrow encoded as a gif image.
)",
R"(+STR
+DOC
+MAP
=VAL :canonical
=VAL <tag:yaml.org,2002:binary> 'R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5OTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLCAgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=
=VAL :generic
=VAL <tag:yaml.org,2002:binary> 'R0lGODlhDAAMAIQAAP//9/X17unp5WZmZgAAAOfn515eXvPz7Y6OjuDg4J+fn5\nOTk6enp56enmlpaWNjY6Ojo4SEhP/++f/++f/++f/++f/++f/++f/++f/++f/+\n+f/++f/++f/++f/++f/++SH+Dk1hZGUgd2l0aCBHSU1QACwAAAAADAAMAAAFLC\nAgjoEwnuNAFOhpEMTRiggcz4BNJHrv/zCFcLiwMWYNG84BwwEeECcgggoBADs=\n
=VAL :description
=VAL :The binary value above is a tiny arrow encoded as a gif image.
-MAP
-DOC
-STR
)");
}
),
//=======================================================
_ec("tag_directives_wtf",
"!!foo fluorescent",
"+STR\n"
"+DOC\n"
"=VAL <tag:yaml.org,2002:foo> :fluorescent\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("tag_directives_6CK3",
"\n"
"%TAG !e! tag:example.com,2000:app/\n"
"---\n"
"- !local foo\n"
"- !!str bar\n"
"- !e!tag%21 baz\n",
"+STR\n"
"+DOC ---\n"
"+SEQ\n"
"=VAL <!local> :foo\n"
"=VAL <tag:yaml.org,2002:str> :bar\n"
"=VAL <tag:example.com,2000:app/tag!> :baz\n"
"-SEQ\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("tag_directives_6VLF",
"\n"
"%FOO bar baz # Should be ignored\n"
" # with a warning.\n"
"--- \"foo\"\n",
"+STR\n"
"+DOC ---\n"
"=VAL \"foo\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("tag_directives_6WLZ",
"\n"
"# Private\n"
"---\n"
"!foo \"bar\"\n"
"...\n"
"# Global\n"
"%TAG ! tag:example.com,2000:app/\n"
"---\n"
"!foo \"bar\"\n",
"+STR\n"
"+DOC ---\n"
"=VAL <!foo> \"bar\n"
"-DOC ...\n"
"+DOC ---\n"
"=VAL <tag:example.com,2000:app/foo> \"bar\n"
"-DOC\n"
"-STR\n",
"+STR\n"
"+DOC ---\n"
"=VAL <!foo> \"bar\n"
"-DOC\n"
"+DOC ---\n"
"=VAL <tag:example.com,2000:app/foo> \"bar\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("tag_directives_9WXW",
"\n"
"# Private\n"
"#--- # note this is commented out\n"
"!foo \"bar\"\n"
"...\n"
"# Global\n"
"%TAG ! tag:example.com,2000:app/\n"
"---\n"
"!foo \"bar\"\n",
"+STR\n"
"+DOC\n"
"=VAL <!foo> \"bar\n"
"-DOC ...\n"
"+DOC ---\n"
"=VAL <tag:example.com,2000:app/foo> \"bar\n"
"-DOC\n"
"-STR\n",
"+STR\n"
"+DOC ---\n"
"=VAL <!foo> \"bar\n"
"-DOC\n"
"+DOC ---\n"
"=VAL <tag:example.com,2000:app/foo> \"bar\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("tag_directives_7FWL",
"!<tag:yaml.org,2002:str> foo :\n"
" !<!bar> baz\n",
"+STR\n"
"+DOC\n"
"+MAP\n"
"=VAL <tag:yaml.org,2002:str> :foo\n"
"=VAL <!bar> :baz\n"
"-MAP\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("tag_directives_P76L",
"\n"
"%TAG !! tag:example.com,2000:app/\n"
"---\n"
"!!int 1 - 3 # Interval, not integer\n",
"+STR\n"
"+DOC ---\n"
"=VAL <tag:example.com,2000:app/int> :1 - 3\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("tag_directives_S4JQ",
"\n"
"- \"12\"\n"
"- 12\n"
"- ! 12\n"
,
"+STR\n"
"+DOC\n"
"+SEQ\n"
"=VAL \"12\n"
"=VAL :12\n"
"=VAL <!> :12\n"
"-SEQ\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("tag_directives_lookup",
"\n"
"%TAG !m! !my-\n"
"--- # Bulb here\n"
"!m!light fluorescent\n"
"...\n"
"%TAG !m! !meta-\n"
"--- # Color here\n"
"!m!light green\n"
,
"+STR\n"
"+DOC ---\n"
"=VAL <!my-light> :fluorescent\n"
"-DOC ...\n"
"+DOC ---\n"
"=VAL <!meta-light> :green\n"
"-DOC\n"
"-STR\n"
,
"+STR\n"
"+DOC ---\n"
"=VAL <!my-light> :fluorescent\n"
"-DOC\n"
"+DOC ---\n"
"=VAL <!meta-light> :green\n"
"-DOC\n"
"-STR\n"),
//=======================================================
_ec("anchors_refs",
"\n"
"A: &A\n"
" V: 3\n"
" L:\n"
" - 1\n"
"B:\n"
" <<: *A\n"
" V: 4\n"
" L:\n"
" -5\n",
"+STR\n"
"+DOC\n"
"+MAP\n"
"=VAL :A\n"
"+MAP &A\n"
"=VAL :V\n"
"=VAL :3\n"
"=VAL :L\n"
"+SEQ\n"
"=VAL :1\n"
"-SEQ\n"
"-MAP\n"
"=VAL :B\n"
"+MAP\n"
"=VAL :<<\n"
"=ALI *A\n"
"=VAL :V\n"
"=VAL :4\n"
"=VAL :L\n"
"=VAL :-5\n"
"-MAP\n"
"-MAP\n"
"-DOC\n"
"-STR\n"),
};
TEST(events, tag_directives_6CK3)
{
test_evts(
R"(
%TAG !e! tag:example.com,2000:app/
---
- !local foo
- !!str bar
- !e!tag%21 baz
)",
R"(+STR
+DOC ---
+SEQ
=VAL <!local> :foo
=VAL <tag:yaml.org,2002:str> :bar
=VAL <tag:example.com,2000:app/tag!> :baz
-SEQ
-DOC
-STR
)");
}
TEST(events, tag_directives_6VLF)
{
test_evts(
R"(
%FOO bar baz # Should be ignored
# with a warning.
--- "foo"
)",
R"(+STR
+DOC ---
=VAL 'foo
-DOC
-STR
)");
}
TEST(events, tag_directives_6WLZ)
{
test_evts(
R"(
# Private
---
!foo "bar"
...
# Global
%TAG ! tag:example.com,2000:app/
---
!foo "bar"
)",
R"(+STR
+DOC ---
=VAL <!foo> 'bar
-DOC
+DOC ---
=VAL <tag:example.com,2000:app/foo> 'bar
-DOC
-STR
)");
}
TEST(events, tag_directives_9WXW)
{
test_evts(
R"(
# Private
#--- # note this is commented out
!foo "bar"
...
# Global
%TAG ! tag:example.com,2000:app/
---
!foo "bar"
)",
R"(+STR
+DOC ---
=VAL <!foo> 'bar
-DOC
+DOC ---
=VAL <tag:example.com,2000:app/foo> 'bar
-DOC
-STR
)");
}
TEST(events, tag_directives_7FWL)
{
test_evts(
R"(!<tag:yaml.org,2002:str> foo :
!<!bar> baz
)",
R"(+STR
+DOC
+MAP
=VAL <tag:yaml.org,2002:str> :foo
=VAL <!bar> :baz
-MAP
-DOC
-STR
)");
}
TEST(events, tag_directives_P76L)
{
test_evts(
R"(
%TAG !! tag:example.com,2000:app/
---
!!int 1 - 3 # Interval, not integer
)",
R"(+STR
+DOC ---
=VAL <tag:example.com,2000:app/int> :1 - 3
-DOC
-STR
)");
}
TEST(events, tag_directives_S4JQ)
{
test_evts(
R"(
- "12"
- 12
- ! 12
)",
R"(+STR
+DOC
+SEQ
=VAL '12
=VAL :12
=VAL <!> :12
-SEQ
-DOC
-STR
)");
}
TEST(events, tag_directives_lookup)
{
test_evts(
R"(
%TAG !m! !my-
--- # Bulb here
!m!light fluorescent
...
%TAG !m! !meta-
--- # Color here
!m!light green
)",
R"(+STR
+DOC ---
=VAL <!my-light> :fluorescent
-DOC
+DOC ---
=VAL <!meta-light> :green
-DOC
-STR
)");
}
TEST(events, anchors_refs)
{
test_evts(
R"(
A: &A
V: 3
L:
- 1
B:
<<: *A
V: 4
L:
-5
)",
R"(+STR
+DOC
+MAP
=VAL :A
+MAP &A
=VAL :V
=VAL :3
=VAL :L
+SEQ
=VAL :1
-SEQ
-MAP
=VAL :B
+MAP
=VAL :<<
=ALI *A
=VAL :V
=VAL :4
=VAL :L
=VAL :-5
-MAP
-MAP
-DOC
-STR
)");
}
INSTANTIATE_TEST_SUITE_P(Events, EventsTest, testing::ValuesIn(events_cases));
//-----------------------------------------------------------------------------
//-----------------------------------------------------------------------------