Make enum's into enum classes

This makes all enum's into enum classes except for:

* The internal enum's in src/stream.cpp:
    UtfIntroState
    UtfIntroCharType
* EMITTER_MANIP (covered separately in #989)
* Pseudo enum's (covered separately in #990)

Signed-off-by: Ted Lyngmo <ted@lyncon.se>
This commit is contained in:
Ted Lyngmo 2021-04-23 15:07:53 +02:00
parent 07c4846d04
commit 95d661d989
23 changed files with 219 additions and 217 deletions

View File

@ -327,7 +327,7 @@ inline const Node Node::operator[](const Key& key) const {
detail::node* value = detail::node* value =
static_cast<const detail::node&>(*m_pNode).get(key, m_pMemory); static_cast<const detail::node&>(*m_pNode).get(key, m_pMemory);
if (!value) { if (!value) {
return Node(ZombieNode, key_to_string(key)); return Node(Zombie::ZombieNode, key_to_string(key));
} }
return Node(*value, m_pMemory); return Node(*value, m_pMemory);
} }
@ -352,7 +352,7 @@ inline const Node Node::operator[](const Node& key) const {
detail::node* value = detail::node* value =
static_cast<const detail::node&>(*m_pNode).get(*key.m_pNode, m_pMemory); static_cast<const detail::node&>(*m_pNode).get(*key.m_pNode, m_pMemory);
if (!value) { if (!value) {
return Node(ZombieNode, key_to_string(key)); return Node(Zombie::ZombieNode, key_to_string(key));
} }
return Node(*value, m_pMemory); return Node(*value, m_pMemory);
} }

View File

@ -21,9 +21,9 @@ struct iterator_value : public Node, std::pair<Node, Node> {
iterator_value() = default; iterator_value() = default;
explicit iterator_value(const Node& rhs) explicit iterator_value(const Node& rhs)
: Node(rhs), : Node(rhs),
std::pair<Node, Node>(Node(Node::ZombieNode), Node(Node::ZombieNode)) {} std::pair<Node, Node>(Node(Node::Zombie::ZombieNode), Node(Node::Zombie::ZombieNode)) {}
explicit iterator_value(const Node& key, const Node& value) explicit iterator_value(const Node& key, const Node& value)
: Node(Node::ZombieNode), std::pair<Node, Node>(key, value) {} : Node(Node::Zombie::ZombieNode), std::pair<Node, Node>(key, value) {}
}; };
} }
} }

View File

@ -114,7 +114,7 @@ class YAML_CPP_API Node {
void force_insert(const Key& key, const Value& value); void force_insert(const Key& key, const Value& value);
private: private:
enum Zombie { ZombieNode }; enum class Zombie { ZombieNode };
explicit Node(Zombie); explicit Node(Zombie);
explicit Node(Zombie, const std::string&); explicit Node(Zombie, const std::string&);
explicit Node(detail::node& node, detail::shared_memory_holder pMemory); explicit Node(detail::node& node, detail::shared_memory_holder pMemory);

View File

@ -15,71 +15,71 @@
namespace YAML { namespace YAML {
template <typename> template <typename>
struct is_numeric { struct is_numeric {
enum { value = false }; static constexpr bool value = false;
}; };
template <> template <>
struct is_numeric<char> { struct is_numeric<char> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<unsigned char> { struct is_numeric<unsigned char> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<int> { struct is_numeric<int> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<unsigned int> { struct is_numeric<unsigned int> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<long int> { struct is_numeric<long int> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<unsigned long int> { struct is_numeric<unsigned long int> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<short int> { struct is_numeric<short int> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<unsigned short int> { struct is_numeric<unsigned short int> {
enum { value = true }; static constexpr bool value = true;
}; };
#if defined(_MSC_VER) && (_MSC_VER < 1310) #if defined(_MSC_VER) && (_MSC_VER < 1310)
template <> template <>
struct is_numeric<__int64> { struct is_numeric<__int64> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<unsigned __int64> { struct is_numeric<unsigned __int64> {
enum { value = true }; static constexpr bool value = true;
}; };
#else #else
template <> template <>
struct is_numeric<long long> { struct is_numeric<long long> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<unsigned long long> { struct is_numeric<unsigned long long> {
enum { value = true }; static constexpr bool value = true;
}; };
#endif #endif
template <> template <>
struct is_numeric<float> { struct is_numeric<float> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<double> { struct is_numeric<double> {
enum { value = true }; static constexpr bool value = true;
}; };
template <> template <>
struct is_numeric<long double> { struct is_numeric<long double> {
enum { value = true }; static constexpr bool value = true;
}; };
template <bool, class T = void> template <bool, class T = void>

View File

@ -15,7 +15,7 @@
namespace YAML { namespace YAML {
namespace Utils { namespace Utils {
namespace { namespace {
enum { REPLACEMENT_CHARACTER = 0xFFFD }; constexpr int REPLACEMENT_CHARACTER = 0xFFFD;
bool IsAnchorChar(int ch) { // test for ns-anchor-char bool IsAnchorChar(int ch) { // test for ns-anchor-char
switch (ch) { switch (ch) {

View File

@ -69,7 +69,7 @@ inline const RegEx& Hex() {
inline const RegEx& NotPrintable() { inline const RegEx& NotPrintable() {
static const RegEx e = static const RegEx e =
RegEx(0) | RegEx(0) |
RegEx("\x01\x02\x03\x04\x05\x06\x07\x08\x0B\x0C\x7F", REGEX_OR) | RegEx("\x01\x02\x03\x04\x05\x06\x07\x08\x0B\x0C\x7F", REGEX_OP::REGEX_OR) |
RegEx(0x0E, 0x1F) | RegEx(0x0E, 0x1F) |
(RegEx('\xC2') + (RegEx('\x80', '\x84') | RegEx('\x86', '\x9F'))); (RegEx('\xC2') + (RegEx('\x80', '\x84') | RegEx('\x86', '\x9F')));
return e; return e;
@ -110,7 +110,7 @@ inline const RegEx& Value() {
return e; return e;
} }
inline const RegEx& ValueInFlow() { inline const RegEx& ValueInFlow() {
static const RegEx e = RegEx(':') + (BlankOrBreak() | RegEx(",]}", REGEX_OR)); static const RegEx e = RegEx(':') + (BlankOrBreak() | RegEx(",]}", REGEX_OP::REGEX_OR));
return e; return e;
} }
inline const RegEx& ValueInJSONFlow() { inline const RegEx& ValueInJSONFlow() {
@ -122,20 +122,20 @@ inline const RegEx Comment() {
return e; return e;
} }
inline const RegEx& Anchor() { inline const RegEx& Anchor() {
static const RegEx e = !(RegEx("[]{},", REGEX_OR) | BlankOrBreak()); static const RegEx e = !(RegEx("[]{},", REGEX_OP::REGEX_OR) | BlankOrBreak());
return e; return e;
} }
inline const RegEx& AnchorEnd() { inline const RegEx& AnchorEnd() {
static const RegEx e = RegEx("?:,]}%@`", REGEX_OR) | BlankOrBreak(); static const RegEx e = RegEx("?:,]}%@`", REGEX_OP::REGEX_OR) | BlankOrBreak();
return e; return e;
} }
inline const RegEx& URI() { inline const RegEx& URI() {
static const RegEx e = Word() | RegEx("#;/?:@&=+$,_.!~*'()[]", REGEX_OR) | static const RegEx e = Word() | RegEx("#;/?:@&=+$,_.!~*'()[]", REGEX_OP::REGEX_OR) |
(RegEx('%') + Hex() + Hex()); (RegEx('%') + Hex() + Hex());
return e; return e;
} }
inline const RegEx& Tag() { inline const RegEx& Tag() {
static const RegEx e = Word() | RegEx("#;/?:@&=+$_.~*'()", REGEX_OR) | static const RegEx e = Word() | RegEx("#;/?:@&=+$_.~*'()", REGEX_OP::REGEX_OR) |
(RegEx('%') + Hex() + Hex()); (RegEx('%') + Hex() + Hex());
return e; return e;
} }
@ -148,14 +148,14 @@ inline const RegEx& Tag() {
// space. // space.
inline const RegEx& PlainScalar() { inline const RegEx& PlainScalar() {
static const RegEx e = static const RegEx e =
!(BlankOrBreak() | RegEx(",[]{}#&*!|>\'\"%@`", REGEX_OR) | !(BlankOrBreak() | RegEx(",[]{}#&*!|>\'\"%@`", REGEX_OP::REGEX_OR) |
(RegEx("-?:", REGEX_OR) + (BlankOrBreak() | RegEx()))); (RegEx("-?:", REGEX_OP::REGEX_OR) + (BlankOrBreak() | RegEx())));
return e; return e;
} }
inline const RegEx& PlainScalarInFlow() { inline const RegEx& PlainScalarInFlow() {
static const RegEx e = static const RegEx e =
!(BlankOrBreak() | RegEx("?,[]{}#&*!|>\'\"%@`", REGEX_OR) | !(BlankOrBreak() | RegEx("?,[]{}#&*!|>\'\"%@`", REGEX_OP::REGEX_OR) |
(RegEx("-:", REGEX_OR) + (Blank() | RegEx()))); (RegEx("-:", REGEX_OP::REGEX_OR) + (Blank() | RegEx())));
return e; return e;
} }
inline const RegEx& EndScalar() { inline const RegEx& EndScalar() {
@ -164,8 +164,8 @@ inline const RegEx& EndScalar() {
} }
inline const RegEx& EndScalarInFlow() { inline const RegEx& EndScalarInFlow() {
static const RegEx e = static const RegEx e =
(RegEx(':') + (BlankOrBreak() | RegEx() | RegEx(",]}", REGEX_OR))) | (RegEx(':') + (BlankOrBreak() | RegEx() | RegEx(",]}", REGEX_OP::REGEX_OR))) |
RegEx(",?[]{}", REGEX_OR); RegEx(",?[]{}", REGEX_OP::REGEX_OR);
return e; return e;
} }
@ -188,7 +188,7 @@ inline const RegEx& EscBreak() {
} }
inline const RegEx& ChompIndicator() { inline const RegEx& ChompIndicator() {
static const RegEx e = RegEx("+-", REGEX_OR); static const RegEx e = RegEx("+-", REGEX_OP::REGEX_OR);
return e; return e;
} }
inline const RegEx& Chomp() { inline const RegEx& Chomp() {

View File

@ -43,7 +43,7 @@ void Parser::ParseDirectives() {
while (!m_pScanner->empty()) { while (!m_pScanner->empty()) {
Token& token = m_pScanner->peek(); Token& token = m_pScanner->peek();
if (token.type != Token::DIRECTIVE) { if (token.type != Token::TYPE::DIRECTIVE) {
break; break;
} }

View File

@ -4,38 +4,38 @@ namespace YAML {
// constructors // constructors
RegEx::RegEx(REGEX_OP op) : m_op(op), m_a(0), m_z(0), m_params{} {} RegEx::RegEx(REGEX_OP op) : m_op(op), m_a(0), m_z(0), m_params{} {}
RegEx::RegEx() : RegEx(REGEX_EMPTY) {} RegEx::RegEx() : RegEx(REGEX_OP::REGEX_EMPTY) {}
RegEx::RegEx(char ch) : m_op(REGEX_MATCH), m_a(ch), m_z(0), m_params{} {} RegEx::RegEx(char ch) : m_op(REGEX_OP::REGEX_MATCH), m_a(ch), m_z(0), m_params{} {}
RegEx::RegEx(char a, char z) : m_op(REGEX_RANGE), m_a(a), m_z(z), m_params{} {} RegEx::RegEx(char a, char z) : m_op(REGEX_OP::REGEX_RANGE), m_a(a), m_z(z), m_params{} {}
RegEx::RegEx(const std::string& str, REGEX_OP op) RegEx::RegEx(const std::string& str, REGEX_OP op)
: m_op(op), m_a(0), m_z(0), m_params(str.begin(), str.end()) {} : m_op(op), m_a(0), m_z(0), m_params(str.begin(), str.end()) {}
// combination constructors // combination constructors
RegEx operator!(const RegEx& ex) { RegEx operator!(const RegEx& ex) {
RegEx ret(REGEX_NOT); RegEx ret(REGEX_OP::REGEX_NOT);
ret.m_params.push_back(ex); ret.m_params.push_back(ex);
return ret; return ret;
} }
RegEx operator|(const RegEx& ex1, const RegEx& ex2) { RegEx operator|(const RegEx& ex1, const RegEx& ex2) {
RegEx ret(REGEX_OR); RegEx ret(REGEX_OP::REGEX_OR);
ret.m_params.push_back(ex1); ret.m_params.push_back(ex1);
ret.m_params.push_back(ex2); ret.m_params.push_back(ex2);
return ret; return ret;
} }
RegEx operator&(const RegEx& ex1, const RegEx& ex2) { RegEx operator&(const RegEx& ex1, const RegEx& ex2) {
RegEx ret(REGEX_AND); RegEx ret(REGEX_OP::REGEX_AND);
ret.m_params.push_back(ex1); ret.m_params.push_back(ex1);
ret.m_params.push_back(ex2); ret.m_params.push_back(ex2);
return ret; return ret;
} }
RegEx operator+(const RegEx& ex1, const RegEx& ex2) { RegEx operator+(const RegEx& ex1, const RegEx& ex2) {
RegEx ret(REGEX_SEQ); RegEx ret(REGEX_OP::REGEX_SEQ);
ret.m_params.push_back(ex1); ret.m_params.push_back(ex1);
ret.m_params.push_back(ex2); ret.m_params.push_back(ex2);
return ret; return ret;

View File

@ -15,7 +15,7 @@
namespace YAML { namespace YAML {
class Stream; class Stream;
enum REGEX_OP { enum class REGEX_OP {
REGEX_EMPTY, REGEX_EMPTY,
REGEX_MATCH, REGEX_MATCH,
REGEX_RANGE, REGEX_RANGE,
@ -33,7 +33,7 @@ class YAML_CPP_API RegEx {
RegEx(); RegEx();
explicit RegEx(char ch); explicit RegEx(char ch);
RegEx(char a, char z); RegEx(char a, char z);
RegEx(const std::string& str, REGEX_OP op = REGEX_SEQ); RegEx(const std::string& str, REGEX_OP op = REGEX_OP::REGEX_SEQ);
~RegEx() = default; ~RegEx() = default;
friend YAML_CPP_API RegEx operator!(const RegEx& ex); friend YAML_CPP_API RegEx operator!(const RegEx& ex);

View File

@ -56,8 +56,8 @@ template <>
inline bool RegEx::IsValidSource<StringCharSource>( inline bool RegEx::IsValidSource<StringCharSource>(
const StringCharSource& source) const { const StringCharSource& source) const {
switch (m_op) { switch (m_op) {
case REGEX_MATCH: case REGEX_OP::REGEX_MATCH:
case REGEX_RANGE: case REGEX_OP::REGEX_RANGE:
return source; return source;
default: default:
return true; return true;
@ -72,19 +72,19 @@ inline int RegEx::Match(const Source& source) const {
template <typename Source> template <typename Source>
inline int RegEx::MatchUnchecked(const Source& source) const { inline int RegEx::MatchUnchecked(const Source& source) const {
switch (m_op) { switch (m_op) {
case REGEX_EMPTY: case REGEX_OP::REGEX_EMPTY:
return MatchOpEmpty(source); return MatchOpEmpty(source);
case REGEX_MATCH: case REGEX_OP::REGEX_MATCH:
return MatchOpMatch(source); return MatchOpMatch(source);
case REGEX_RANGE: case REGEX_OP::REGEX_RANGE:
return MatchOpRange(source); return MatchOpRange(source);
case REGEX_OR: case REGEX_OP::REGEX_OR:
return MatchOpOr(source); return MatchOpOr(source);
case REGEX_AND: case REGEX_OP::REGEX_AND:
return MatchOpAnd(source); return MatchOpAnd(source);
case REGEX_NOT: case REGEX_OP::REGEX_NOT:
return MatchOpNot(source); return MatchOpNot(source);
case REGEX_SEQ: case REGEX_OP::REGEX_SEQ:
return MatchOpSeq(source); return MatchOpSeq(source);
} }

View File

@ -56,12 +56,12 @@ void Scanner::EnsureTokensInQueue() {
Token& token = m_tokens.front(); Token& token = m_tokens.front();
// if this guy's valid, then we're done // if this guy's valid, then we're done
if (token.status == Token::VALID) { if (token.status == Token::STATUS::VALID) {
return; return;
} }
// here's where we clean up the impossible tokens // here's where we clean up the impossible tokens
if (token.status == Token::INVALID) { if (token.status == Token::STATUS::INVALID) {
m_tokens.pop(); m_tokens.pop();
continue; continue;
} }
@ -246,7 +246,7 @@ void Scanner::StartStream() {
m_startedStream = true; m_startedStream = true;
m_simpleKeyAllowed = true; m_simpleKeyAllowed = true;
std::unique_ptr<IndentMarker> pIndent( std::unique_ptr<IndentMarker> pIndent(
new IndentMarker(-1, IndentMarker::NONE)); new IndentMarker(-1, IndentMarker::INDENT_TYPE::NONE));
m_indentRefs.push_back(std::move(pIndent)); m_indentRefs.push_back(std::move(pIndent));
m_indents.push(&m_indentRefs.back()); m_indents.push(&m_indentRefs.back());
} }
@ -271,11 +271,11 @@ Token* Scanner::PushToken(Token::TYPE type) {
Token::TYPE Scanner::GetStartTokenFor(IndentMarker::INDENT_TYPE type) const { Token::TYPE Scanner::GetStartTokenFor(IndentMarker::INDENT_TYPE type) const {
switch (type) { switch (type) {
case IndentMarker::SEQ: case IndentMarker::INDENT_TYPE::SEQ:
return Token::BLOCK_SEQ_START; return Token::TYPE::BLOCK_SEQ_START;
case IndentMarker::MAP: case IndentMarker::INDENT_TYPE::MAP:
return Token::BLOCK_MAP_START; return Token::TYPE::BLOCK_MAP_START;
case IndentMarker::NONE: case IndentMarker::INDENT_TYPE::NONE:
assert(false); assert(false);
break; break;
} }
@ -299,8 +299,8 @@ Scanner::IndentMarker* Scanner::PushIndentTo(int column,
return nullptr; return nullptr;
} }
if (indent.column == lastIndent.column && if (indent.column == lastIndent.column &&
!(indent.type == IndentMarker::SEQ && !(indent.type == IndentMarker::INDENT_TYPE::SEQ &&
lastIndent.type == IndentMarker::MAP)) { lastIndent.type == IndentMarker::INDENT_TYPE::MAP)) {
return nullptr; return nullptr;
} }
@ -326,7 +326,7 @@ void Scanner::PopIndentToHere() {
break; break;
} }
if (indent.column == INPUT.column() && if (indent.column == INPUT.column() &&
!(indent.type == IndentMarker::SEQ && !(indent.type == IndentMarker::INDENT_TYPE::SEQ &&
!Exp::BlockEntry().Matches(INPUT))) { !Exp::BlockEntry().Matches(INPUT))) {
break; break;
} }
@ -335,7 +335,7 @@ void Scanner::PopIndentToHere() {
} }
while (!m_indents.empty() && while (!m_indents.empty() &&
m_indents.top()->status == IndentMarker::INVALID) { m_indents.top()->status == IndentMarker::STATUS::INVALID) {
PopIndent(); PopIndent();
} }
} }
@ -349,7 +349,7 @@ void Scanner::PopAllIndents() {
// now pop away // now pop away
while (!m_indents.empty()) { while (!m_indents.empty()) {
const IndentMarker& indent = *m_indents.top(); const IndentMarker& indent = *m_indents.top();
if (indent.type == IndentMarker::NONE) { if (indent.type == IndentMarker::INDENT_TYPE::NONE) {
break; break;
} }
@ -361,15 +361,15 @@ void Scanner::PopIndent() {
const IndentMarker& indent = *m_indents.top(); const IndentMarker& indent = *m_indents.top();
m_indents.pop(); m_indents.pop();
if (indent.status != IndentMarker::VALID) { if (indent.status != IndentMarker::STATUS::VALID) {
InvalidateSimpleKey(); InvalidateSimpleKey();
return; return;
} }
if (indent.type == IndentMarker::SEQ) { if (indent.type == IndentMarker::INDENT_TYPE::SEQ) {
m_tokens.push(Token(Token::BLOCK_SEQ_END, INPUT.mark())); m_tokens.push(Token(Token::TYPE::BLOCK_SEQ_END, INPUT.mark()));
} else if (indent.type == IndentMarker::MAP) { } else if (indent.type == IndentMarker::INDENT_TYPE::MAP) {
m_tokens.push(Token(Token::BLOCK_MAP_END, INPUT.mark())); m_tokens.push(Token(Token::TYPE::BLOCK_MAP_END, INPUT.mark()));
} }
} }

View File

@ -46,10 +46,10 @@ class Scanner {
private: private:
struct IndentMarker { struct IndentMarker {
enum INDENT_TYPE { MAP, SEQ, NONE }; enum class INDENT_TYPE { MAP, SEQ, NONE };
enum STATUS { VALID, INVALID, UNKNOWN }; enum class STATUS { VALID, INVALID, UNKNOWN };
IndentMarker(int column_, INDENT_TYPE type_) IndentMarker(int column_, INDENT_TYPE type_)
: column(column_), type(type_), status(VALID), pStartToken(nullptr) {} : column(column_), type(type_), status(STATUS::VALID), pStartToken(nullptr) {}
int column; int column;
INDENT_TYPE type; INDENT_TYPE type;
@ -57,7 +57,7 @@ class Scanner {
Token *pStartToken; Token *pStartToken;
}; };
enum FLOW_MARKER { FLOW_MAP, FLOW_SEQ }; enum class FLOW_MARKER { FLOW_MAP, FLOW_SEQ };
private: private:
// scanning // scanning

View File

@ -20,7 +20,7 @@ namespace YAML {
// and different places in the above flow. // and different places in the above flow.
std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) { std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
bool foundNonEmptyLine = false; bool foundNonEmptyLine = false;
bool pastOpeningBreak = (params.fold == FOLD_FLOW); bool pastOpeningBreak = (params.fold == FOLD::FOLD_FLOW);
bool emptyLine = false, moreIndented = false; bool emptyLine = false, moreIndented = false;
int foldedNewlineCount = 0; int foldedNewlineCount = 0;
bool foldedNewlineStartedMoreIndented = false; bool foldedNewlineStartedMoreIndented = false;
@ -45,10 +45,10 @@ std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
// document indicator? // document indicator?
if (INPUT.column() == 0 && Exp::DocIndicator().Matches(INPUT)) { if (INPUT.column() == 0 && Exp::DocIndicator().Matches(INPUT)) {
if (params.onDocIndicator == BREAK) { if (params.onDocIndicator == ACTION::BREAK) {
break; break;
} }
if (params.onDocIndicator == THROW) { if (params.onDocIndicator == ACTION::THROW) {
throw ParserException(INPUT.mark(), ErrorMsg::DOC_IN_SCALAR); throw ParserException(INPUT.mark(), ErrorMsg::DOC_IN_SCALAR);
} }
} }
@ -91,7 +91,7 @@ std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
} }
// doc indicator? // doc indicator?
if (params.onDocIndicator == BREAK && INPUT.column() == 0 && if (params.onDocIndicator == ACTION::BREAK && INPUT.column() == 0 &&
Exp::DocIndicator().Matches(INPUT)) { Exp::DocIndicator().Matches(INPUT)) {
break; break;
} }
@ -106,7 +106,7 @@ std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
} }
// do we remove trailing whitespace? // do we remove trailing whitespace?
if (params.fold == FOLD_FLOW) if (params.fold == FOLD::FOLD_FLOW)
scalar.erase(lastNonWhitespaceChar); scalar.erase(lastNonWhitespaceChar);
// ******************************** // ********************************
@ -134,7 +134,7 @@ std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
while (Exp::Blank().Matches(INPUT)) { while (Exp::Blank().Matches(INPUT)) {
// we check for tabs that masquerade as indentation // we check for tabs that masquerade as indentation
if (INPUT.peek() == '\t' && INPUT.column() < params.indent && if (INPUT.peek() == '\t' && INPUT.column() < params.indent &&
params.onTabInIndentation == THROW) { params.onTabInIndentation == ACTION::THROW) {
throw ParserException(INPUT.mark(), ErrorMsg::TAB_IN_INDENTATION); throw ParserException(INPUT.mark(), ErrorMsg::TAB_IN_INDENTATION);
} }
@ -152,17 +152,17 @@ std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
// was this an empty line? // was this an empty line?
bool nextEmptyLine = Exp::Break().Matches(INPUT); bool nextEmptyLine = Exp::Break().Matches(INPUT);
bool nextMoreIndented = Exp::Blank().Matches(INPUT); bool nextMoreIndented = Exp::Blank().Matches(INPUT);
if (params.fold == FOLD_BLOCK && foldedNewlineCount == 0 && nextEmptyLine) if (params.fold == FOLD::FOLD_BLOCK && foldedNewlineCount == 0 && nextEmptyLine)
foldedNewlineStartedMoreIndented = moreIndented; foldedNewlineStartedMoreIndented = moreIndented;
// for block scalars, we always start with a newline, so we should ignore it // for block scalars, we always start with a newline, so we should ignore it
// (not fold or keep) // (not fold or keep)
if (pastOpeningBreak) { if (pastOpeningBreak) {
switch (params.fold) { switch (params.fold) {
case DONT_FOLD: case FOLD::DONT_FOLD:
scalar += "\n"; scalar += "\n";
break; break;
case FOLD_BLOCK: case FOLD::FOLD_BLOCK:
if (!emptyLine && !nextEmptyLine && !moreIndented && if (!emptyLine && !nextEmptyLine && !moreIndented &&
!nextMoreIndented && INPUT.column() >= params.indent) { !nextMoreIndented && INPUT.column() >= params.indent) {
scalar += " "; scalar += " ";
@ -181,7 +181,7 @@ std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
foldedNewlineCount = 0; foldedNewlineCount = 0;
} }
break; break;
case FOLD_FLOW: case FOLD::FOLD_FLOW:
if (nextEmptyLine) { if (nextEmptyLine) {
scalar += "\n"; scalar += "\n";
} else if (!emptyLine && !escapedNewline) { } else if (!emptyLine && !escapedNewline) {
@ -216,7 +216,7 @@ std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
} }
switch (params.chomp) { switch (params.chomp) {
case CLIP: { case CHOMP::CLIP: {
std::size_t pos = scalar.find_last_not_of('\n'); std::size_t pos = scalar.find_last_not_of('\n');
if (lastEscapedChar != std::string::npos) { if (lastEscapedChar != std::string::npos) {
if (pos < lastEscapedChar || pos == std::string::npos) { if (pos < lastEscapedChar || pos == std::string::npos) {
@ -229,7 +229,7 @@ std::string ScanScalar(Stream& INPUT, ScanScalarParams& params) {
scalar.erase(pos + 2); scalar.erase(pos + 2);
} }
} break; } break;
case STRIP: { case CHOMP::STRIP: {
std::size_t pos = scalar.find_last_not_of('\n'); std::size_t pos = scalar.find_last_not_of('\n');
if (lastEscapedChar != std::string::npos) { if (lastEscapedChar != std::string::npos) {
if (pos < lastEscapedChar || pos == std::string::npos) { if (pos < lastEscapedChar || pos == std::string::npos) {

View File

@ -13,9 +13,9 @@
#include "stream.h" #include "stream.h"
namespace YAML { namespace YAML {
enum CHOMP { STRIP = -1, CLIP, KEEP }; enum class CHOMP { STRIP = -1, CLIP, KEEP };
enum ACTION { NONE, BREAK, THROW }; enum class ACTION { NONE, BREAK, THROW };
enum FOLD { DONT_FOLD, FOLD_BLOCK, FOLD_FLOW }; enum class FOLD { DONT_FOLD, FOLD_BLOCK, FOLD_FLOW };
struct ScanScalarParams { struct ScanScalarParams {
ScanScalarParams() ScanScalarParams()
@ -25,11 +25,11 @@ struct ScanScalarParams {
detectIndent(false), detectIndent(false),
eatLeadingWhitespace(0), eatLeadingWhitespace(0),
escape(0), escape(0),
fold(DONT_FOLD), fold(FOLD::DONT_FOLD),
trimTrailingSpaces(0), trimTrailingSpaces(0),
chomp(CLIP), chomp(CHOMP::CLIP),
onDocIndicator(NONE), onDocIndicator(ACTION::NONE),
onTabInIndentation(NONE), onTabInIndentation(ACTION::NONE),
leadingSpaces(false) {} leadingSpaces(false) {}
// input: // input:

View File

@ -29,7 +29,7 @@ void Scanner::ScanDirective() {
m_canBeJSONFlow = false; m_canBeJSONFlow = false;
// store pos and eat indicator // store pos and eat indicator
Token token(Token::DIRECTIVE, INPUT.mark()); Token token(Token::TYPE::DIRECTIVE, INPUT.mark());
INPUT.eat(1); INPUT.eat(1);
// read name // read name
@ -67,7 +67,7 @@ void Scanner::ScanDocStart() {
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(3); INPUT.eat(3);
m_tokens.push(Token(Token::DOC_START, mark)); m_tokens.push(Token(Token::TYPE::DOC_START, mark));
} }
// DocEnd // DocEnd
@ -80,7 +80,7 @@ void Scanner::ScanDocEnd() {
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(3); INPUT.eat(3);
m_tokens.push(Token(Token::DOC_END, mark)); m_tokens.push(Token(Token::TYPE::DOC_END, mark));
} }
// FlowStart // FlowStart
@ -93,10 +93,10 @@ void Scanner::ScanFlowStart() {
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
char ch = INPUT.get(); char ch = INPUT.get();
FLOW_MARKER flowType = (ch == Keys::FlowSeqStart ? FLOW_SEQ : FLOW_MAP); FLOW_MARKER flowType = (ch == Keys::FlowSeqStart ? FLOW_MARKER::FLOW_SEQ : FLOW_MARKER::FLOW_MAP);
m_flows.push(flowType); m_flows.push(flowType);
Token::TYPE type = Token::TYPE type =
(flowType == FLOW_SEQ ? Token::FLOW_SEQ_START : Token::FLOW_MAP_START); (flowType == FLOW_MARKER::FLOW_SEQ ? Token::TYPE::FLOW_SEQ_START : Token::TYPE::FLOW_MAP_START);
m_tokens.push(Token(type, mark)); m_tokens.push(Token(type, mark));
} }
@ -107,9 +107,9 @@ void Scanner::ScanFlowEnd() {
// we might have a solo entry in the flow context // we might have a solo entry in the flow context
if (InFlowContext()) { if (InFlowContext()) {
if (m_flows.top() == FLOW_MAP && VerifySimpleKey()) if (m_flows.top() == FLOW_MARKER::FLOW_MAP && VerifySimpleKey())
m_tokens.push(Token(Token::VALUE, INPUT.mark())); m_tokens.push(Token(Token::TYPE::VALUE, INPUT.mark()));
else if (m_flows.top() == FLOW_SEQ) else if (m_flows.top() == FLOW_MARKER::FLOW_SEQ)
InvalidateSimpleKey(); InvalidateSimpleKey();
} }
@ -121,12 +121,12 @@ void Scanner::ScanFlowEnd() {
char ch = INPUT.get(); char ch = INPUT.get();
// check that it matches the start // check that it matches the start
FLOW_MARKER flowType = (ch == Keys::FlowSeqEnd ? FLOW_SEQ : FLOW_MAP); FLOW_MARKER flowType = (ch == Keys::FlowSeqEnd ? FLOW_MARKER::FLOW_SEQ : FLOW_MARKER::FLOW_MAP);
if (m_flows.top() != flowType) if (m_flows.top() != flowType)
throw ParserException(mark, ErrorMsg::FLOW_END); throw ParserException(mark, ErrorMsg::FLOW_END);
m_flows.pop(); m_flows.pop();
Token::TYPE type = (flowType ? Token::FLOW_SEQ_END : Token::FLOW_MAP_END); Token::TYPE type = (flowType == FLOW_MARKER::FLOW_SEQ ? Token::TYPE::FLOW_SEQ_END : Token::TYPE::FLOW_MAP_END);
m_tokens.push(Token(type, mark)); m_tokens.push(Token(type, mark));
} }
@ -134,9 +134,9 @@ void Scanner::ScanFlowEnd() {
void Scanner::ScanFlowEntry() { void Scanner::ScanFlowEntry() {
// we might have a solo entry in the flow context // we might have a solo entry in the flow context
if (InFlowContext()) { if (InFlowContext()) {
if (m_flows.top() == FLOW_MAP && VerifySimpleKey()) if (m_flows.top() == FLOW_MARKER::FLOW_MAP && VerifySimpleKey())
m_tokens.push(Token(Token::VALUE, INPUT.mark())); m_tokens.push(Token(Token::TYPE::VALUE, INPUT.mark()));
else if (m_flows.top() == FLOW_SEQ) else if (m_flows.top() == FLOW_MARKER::FLOW_SEQ)
InvalidateSimpleKey(); InvalidateSimpleKey();
} }
@ -146,7 +146,7 @@ void Scanner::ScanFlowEntry() {
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(1); INPUT.eat(1);
m_tokens.push(Token(Token::FLOW_ENTRY, mark)); m_tokens.push(Token(Token::TYPE::FLOW_ENTRY, mark));
} }
// BlockEntry // BlockEntry
@ -159,14 +159,14 @@ void Scanner::ScanBlockEntry() {
if (!m_simpleKeyAllowed) if (!m_simpleKeyAllowed)
throw ParserException(INPUT.mark(), ErrorMsg::BLOCK_ENTRY); throw ParserException(INPUT.mark(), ErrorMsg::BLOCK_ENTRY);
PushIndentTo(INPUT.column(), IndentMarker::SEQ); PushIndentTo(INPUT.column(), IndentMarker::INDENT_TYPE::SEQ);
m_simpleKeyAllowed = true; m_simpleKeyAllowed = true;
m_canBeJSONFlow = false; m_canBeJSONFlow = false;
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(1); INPUT.eat(1);
m_tokens.push(Token(Token::BLOCK_ENTRY, mark)); m_tokens.push(Token(Token::TYPE::BLOCK_ENTRY, mark));
} }
// Key // Key
@ -176,7 +176,7 @@ void Scanner::ScanKey() {
if (!m_simpleKeyAllowed) if (!m_simpleKeyAllowed)
throw ParserException(INPUT.mark(), ErrorMsg::MAP_KEY); throw ParserException(INPUT.mark(), ErrorMsg::MAP_KEY);
PushIndentTo(INPUT.column(), IndentMarker::MAP); PushIndentTo(INPUT.column(), IndentMarker::INDENT_TYPE::MAP);
} }
// can only put a simple key here if we're in block context // can only put a simple key here if we're in block context
@ -185,7 +185,7 @@ void Scanner::ScanKey() {
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(1); INPUT.eat(1);
m_tokens.push(Token(Token::KEY, mark)); m_tokens.push(Token(Token::TYPE::KEY, mark));
} }
// Value // Value
@ -204,7 +204,7 @@ void Scanner::ScanValue() {
if (!m_simpleKeyAllowed) if (!m_simpleKeyAllowed)
throw ParserException(INPUT.mark(), ErrorMsg::MAP_VALUE); throw ParserException(INPUT.mark(), ErrorMsg::MAP_VALUE);
PushIndentTo(INPUT.column(), IndentMarker::MAP); PushIndentTo(INPUT.column(), IndentMarker::INDENT_TYPE::MAP);
} }
// can only put a simple key here if we're in block context // can only put a simple key here if we're in block context
@ -214,7 +214,7 @@ void Scanner::ScanValue() {
// eat // eat
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
INPUT.eat(1); INPUT.eat(1);
m_tokens.push(Token(Token::VALUE, mark)); m_tokens.push(Token(Token::TYPE::VALUE, mark));
} }
// AnchorOrAlias // AnchorOrAlias
@ -247,7 +247,7 @@ void Scanner::ScanAnchorOrAlias() {
: ErrorMsg::CHAR_IN_ANCHOR); : ErrorMsg::CHAR_IN_ANCHOR);
// and we're done // and we're done
Token token(alias ? Token::ALIAS : Token::ANCHOR, mark); Token token(alias ? Token::TYPE::ALIAS : Token::TYPE::ANCHOR, mark);
token.value = name; token.value = name;
m_tokens.push(token); m_tokens.push(token);
} }
@ -259,32 +259,34 @@ void Scanner::ScanTag() {
m_simpleKeyAllowed = false; m_simpleKeyAllowed = false;
m_canBeJSONFlow = false; m_canBeJSONFlow = false;
Token token(Token::TAG, INPUT.mark()); Token token(Token::TYPE::TAG, INPUT.mark());
// eat the indicator // eat the indicator
INPUT.get(); INPUT.get();
using token_data_t = decltype(token.data);
if (INPUT && INPUT.peek() == Keys::VerbatimTagStart) { if (INPUT && INPUT.peek() == Keys::VerbatimTagStart) {
std::string tag = ScanVerbatimTag(INPUT); std::string tag = ScanVerbatimTag(INPUT);
token.value = tag; token.value = tag;
token.data = Tag::VERBATIM; token.data = static_cast<token_data_t>(Tag::TYPE::VERBATIM);
} else { } else {
bool canBeHandle; bool canBeHandle;
token.value = ScanTagHandle(INPUT, canBeHandle); token.value = ScanTagHandle(INPUT, canBeHandle);
if (!canBeHandle && token.value.empty()) if (!canBeHandle && token.value.empty())
token.data = Tag::NON_SPECIFIC; token.data = static_cast<token_data_t>(Tag::TYPE::NON_SPECIFIC);
else if (token.value.empty()) else if (token.value.empty())
token.data = Tag::SECONDARY_HANDLE; token.data = static_cast<token_data_t>(Tag::TYPE::SECONDARY_HANDLE);
else else
token.data = Tag::PRIMARY_HANDLE; token.data = static_cast<token_data_t>(Tag::TYPE::PRIMARY_HANDLE);
// is there a suffix? // is there a suffix?
if (canBeHandle && INPUT.peek() == Keys::Tag) { if (canBeHandle && INPUT.peek() == Keys::Tag) {
// eat the indicator // eat the indicator
INPUT.get(); INPUT.get();
token.params.push_back(ScanTagSuffix(INPUT)); token.params.push_back(ScanTagSuffix(INPUT));
token.data = Tag::NAMED_HANDLE; token.data = static_cast<token_data_t>(Tag::TYPE::NAMED_HANDLE);
} }
} }
@ -301,12 +303,12 @@ void Scanner::ScanPlainScalar() {
(InFlowContext() ? &Exp::ScanScalarEndInFlow() : &Exp::ScanScalarEnd()); (InFlowContext() ? &Exp::ScanScalarEndInFlow() : &Exp::ScanScalarEnd());
params.eatEnd = false; params.eatEnd = false;
params.indent = (InFlowContext() ? 0 : GetTopIndent() + 1); params.indent = (InFlowContext() ? 0 : GetTopIndent() + 1);
params.fold = FOLD_FLOW; params.fold = FOLD::FOLD_FLOW;
params.eatLeadingWhitespace = true; params.eatLeadingWhitespace = true;
params.trimTrailingSpaces = true; params.trimTrailingSpaces = true;
params.chomp = STRIP; params.chomp = CHOMP::STRIP;
params.onDocIndicator = BREAK; params.onDocIndicator = ACTION::BREAK;
params.onTabInIndentation = THROW; params.onTabInIndentation = ACTION::THROW;
// insert a potential simple key // insert a potential simple key
InsertPotentialSimpleKey(); InsertPotentialSimpleKey();
@ -322,7 +324,7 @@ void Scanner::ScanPlainScalar() {
// if(Exp::IllegalCharInScalar.Matches(INPUT)) // if(Exp::IllegalCharInScalar.Matches(INPUT))
// throw ParserException(INPUT.mark(), ErrorMsg::CHAR_IN_SCALAR); // throw ParserException(INPUT.mark(), ErrorMsg::CHAR_IN_SCALAR);
Token token(Token::PLAIN_SCALAR, mark); Token token(Token::TYPE::PLAIN_SCALAR, mark);
token.value = scalar; token.value = scalar;
m_tokens.push(token); m_tokens.push(token);
} }
@ -343,11 +345,11 @@ void Scanner::ScanQuotedScalar() {
params.eatEnd = true; params.eatEnd = true;
params.escape = (single ? '\'' : '\\'); params.escape = (single ? '\'' : '\\');
params.indent = 0; params.indent = 0;
params.fold = FOLD_FLOW; params.fold = FOLD::FOLD_FLOW;
params.eatLeadingWhitespace = true; params.eatLeadingWhitespace = true;
params.trimTrailingSpaces = false; params.trimTrailingSpaces = false;
params.chomp = CLIP; params.chomp = CHOMP::CLIP;
params.onDocIndicator = THROW; params.onDocIndicator = ACTION::THROW;
// insert a potential simple key // insert a potential simple key
InsertPotentialSimpleKey(); InsertPotentialSimpleKey();
@ -362,7 +364,7 @@ void Scanner::ScanQuotedScalar() {
m_simpleKeyAllowed = false; m_simpleKeyAllowed = false;
m_canBeJSONFlow = true; m_canBeJSONFlow = true;
Token token(Token::NON_PLAIN_SCALAR, mark); Token token(Token::TYPE::NON_PLAIN_SCALAR, mark);
token.value = scalar; token.value = scalar;
m_tokens.push(token); m_tokens.push(token);
} }
@ -382,17 +384,17 @@ void Scanner::ScanBlockScalar() {
// eat block indicator ('|' or '>') // eat block indicator ('|' or '>')
Mark mark = INPUT.mark(); Mark mark = INPUT.mark();
char indicator = INPUT.get(); char indicator = INPUT.get();
params.fold = (indicator == Keys::FoldedScalar ? FOLD_BLOCK : DONT_FOLD); params.fold = (indicator == Keys::FoldedScalar ? FOLD::FOLD_BLOCK : FOLD::DONT_FOLD);
// eat chomping/indentation indicators // eat chomping/indentation indicators
params.chomp = CLIP; params.chomp = CHOMP::CLIP;
int n = Exp::Chomp().Match(INPUT); int n = Exp::Chomp().Match(INPUT);
for (int i = 0; i < n; i++) { for (int i = 0; i < n; i++) {
char ch = INPUT.get(); char ch = INPUT.get();
if (ch == '+') if (ch == '+')
params.chomp = KEEP; params.chomp = CHOMP::KEEP;
else if (ch == '-') else if (ch == '-')
params.chomp = STRIP; params.chomp = CHOMP::STRIP;
else if (Exp::Digit().Matches(ch)) { else if (Exp::Digit().Matches(ch)) {
if (ch == '0') if (ch == '0')
throw ParserException(INPUT.mark(), ErrorMsg::ZERO_INDENT_IN_BLOCK); throw ParserException(INPUT.mark(), ErrorMsg::ZERO_INDENT_IN_BLOCK);
@ -421,7 +423,7 @@ void Scanner::ScanBlockScalar() {
params.eatLeadingWhitespace = false; params.eatLeadingWhitespace = false;
params.trimTrailingSpaces = false; params.trimTrailingSpaces = false;
params.onTabInIndentation = THROW; params.onTabInIndentation = ACTION::THROW;
scalar = ScanScalar(INPUT, params); scalar = ScanScalar(INPUT, params);
@ -430,7 +432,7 @@ void Scanner::ScanBlockScalar() {
m_simpleKeyAllowed = true; m_simpleKeyAllowed = true;
m_canBeJSONFlow = false; m_canBeJSONFlow = false;
Token token(Token::NON_PLAIN_SCALAR, mark); Token token(Token::TYPE::NON_PLAIN_SCALAR, mark);
token.value = scalar; token.value = scalar;
m_tokens.push(token); m_tokens.push(token);
} }

View File

@ -16,20 +16,20 @@ void Scanner::SimpleKey::Validate() {
// we "garbage collect" them so we can // we "garbage collect" them so we can
// always refer to them // always refer to them
if (pIndent) if (pIndent)
pIndent->status = IndentMarker::VALID; pIndent->status = IndentMarker::STATUS::VALID;
if (pMapStart) if (pMapStart)
pMapStart->status = Token::VALID; pMapStart->status = Token::STATUS::VALID;
if (pKey) if (pKey)
pKey->status = Token::VALID; pKey->status = Token::STATUS::VALID;
} }
void Scanner::SimpleKey::Invalidate() { void Scanner::SimpleKey::Invalidate() {
if (pIndent) if (pIndent)
pIndent->status = IndentMarker::INVALID; pIndent->status = IndentMarker::STATUS::INVALID;
if (pMapStart) if (pMapStart)
pMapStart->status = Token::INVALID; pMapStart->status = Token::STATUS::INVALID;
if (pKey) if (pKey)
pKey->status = Token::INVALID; pKey->status = Token::STATUS::INVALID;
} }
// CanInsertPotentialSimpleKey // CanInsertPotentialSimpleKey
@ -63,18 +63,18 @@ void Scanner::InsertPotentialSimpleKey() {
// first add a map start, if necessary // first add a map start, if necessary
if (InBlockContext()) { if (InBlockContext()) {
key.pIndent = PushIndentTo(INPUT.column(), IndentMarker::MAP); key.pIndent = PushIndentTo(INPUT.column(), IndentMarker::INDENT_TYPE::MAP);
if (key.pIndent) { if (key.pIndent) {
key.pIndent->status = IndentMarker::UNKNOWN; key.pIndent->status = IndentMarker::STATUS::UNKNOWN;
key.pMapStart = key.pIndent->pStartToken; key.pMapStart = key.pIndent->pStartToken;
key.pMapStart->status = Token::UNVERIFIED; key.pMapStart->status = Token::STATUS::UNVERIFIED;
} }
} }
// then add the (now unverified) key // then add the (now unverified) key
m_tokens.push(Token(Token::KEY, INPUT.mark())); m_tokens.push(Token(Token::TYPE::KEY, INPUT.mark()));
key.pKey = &m_tokens.back(); key.pKey = &m_tokens.back();
key.pKey->status = Token::UNVERIFIED; key.pKey->status = Token::STATUS::UNVERIFIED;
m_simpleKeys.push(key); m_simpleKeys.push(key);
} }

View File

@ -34,7 +34,7 @@ void SingleDocParser::HandleDocument(EventHandler& eventHandler) {
eventHandler.OnDocumentStart(m_scanner.peek().mark); eventHandler.OnDocumentStart(m_scanner.peek().mark);
// eat doc start // eat doc start
if (m_scanner.peek().type == Token::DOC_START) if (m_scanner.peek().type == Token::TYPE::DOC_START)
m_scanner.pop(); m_scanner.pop();
// recurse! // recurse!
@ -43,7 +43,7 @@ void SingleDocParser::HandleDocument(EventHandler& eventHandler) {
eventHandler.OnDocumentEnd(); eventHandler.OnDocumentEnd();
// and finally eat any doc ends we see // and finally eat any doc ends we see
while (!m_scanner.empty() && m_scanner.peek().type == Token::DOC_END) while (!m_scanner.empty() && m_scanner.peek().type == Token::TYPE::DOC_END)
m_scanner.pop(); m_scanner.pop();
} }
@ -60,7 +60,7 @@ void SingleDocParser::HandleNode(EventHandler& eventHandler) {
Mark mark = m_scanner.peek().mark; Mark mark = m_scanner.peek().mark;
// special case: a value node by itself must be a map, with no header // special case: a value node by itself must be a map, with no header
if (m_scanner.peek().type == Token::VALUE) { if (m_scanner.peek().type == Token::TYPE::VALUE) {
eventHandler.OnMapStart(mark, "?", NullAnchor, EmitterStyle::Default); eventHandler.OnMapStart(mark, "?", NullAnchor, EmitterStyle::Default);
HandleMap(eventHandler); HandleMap(eventHandler);
eventHandler.OnMapEnd(); eventHandler.OnMapEnd();
@ -68,7 +68,7 @@ void SingleDocParser::HandleNode(EventHandler& eventHandler) {
} }
// special case: an alias node // special case: an alias node
if (m_scanner.peek().type == Token::ALIAS) { if (m_scanner.peek().type == Token::TYPE::ALIAS) {
eventHandler.OnAlias(mark, LookupAnchor(mark, m_scanner.peek().value)); eventHandler.OnAlias(mark, LookupAnchor(mark, m_scanner.peek().value));
m_scanner.pop(); m_scanner.pop();
return; return;
@ -92,9 +92,9 @@ void SingleDocParser::HandleNode(EventHandler& eventHandler) {
// add non-specific tags // add non-specific tags
if (tag.empty()) if (tag.empty())
tag = (token.type == Token::NON_PLAIN_SCALAR ? "!" : "?"); tag = (token.type == Token::TYPE::NON_PLAIN_SCALAR ? "!" : "?");
if (token.type == Token::PLAIN_SCALAR if (token.type == Token::TYPE::PLAIN_SCALAR
&& tag.compare("?") == 0 && IsNullString(token.value)) { && tag.compare("?") == 0 && IsNullString(token.value)) {
eventHandler.OnNull(mark, anchor); eventHandler.OnNull(mark, anchor);
m_scanner.pop(); m_scanner.pop();
@ -103,32 +103,32 @@ void SingleDocParser::HandleNode(EventHandler& eventHandler) {
// now split based on what kind of node we should be // now split based on what kind of node we should be
switch (token.type) { switch (token.type) {
case Token::PLAIN_SCALAR: case Token::TYPE::PLAIN_SCALAR:
case Token::NON_PLAIN_SCALAR: case Token::TYPE::NON_PLAIN_SCALAR:
eventHandler.OnScalar(mark, tag, anchor, token.value); eventHandler.OnScalar(mark, tag, anchor, token.value);
m_scanner.pop(); m_scanner.pop();
return; return;
case Token::FLOW_SEQ_START: case Token::TYPE::FLOW_SEQ_START:
eventHandler.OnSequenceStart(mark, tag, anchor, EmitterStyle::Flow); eventHandler.OnSequenceStart(mark, tag, anchor, EmitterStyle::Flow);
HandleSequence(eventHandler); HandleSequence(eventHandler);
eventHandler.OnSequenceEnd(); eventHandler.OnSequenceEnd();
return; return;
case Token::BLOCK_SEQ_START: case Token::TYPE::BLOCK_SEQ_START:
eventHandler.OnSequenceStart(mark, tag, anchor, EmitterStyle::Block); eventHandler.OnSequenceStart(mark, tag, anchor, EmitterStyle::Block);
HandleSequence(eventHandler); HandleSequence(eventHandler);
eventHandler.OnSequenceEnd(); eventHandler.OnSequenceEnd();
return; return;
case Token::FLOW_MAP_START: case Token::TYPE::FLOW_MAP_START:
eventHandler.OnMapStart(mark, tag, anchor, EmitterStyle::Flow); eventHandler.OnMapStart(mark, tag, anchor, EmitterStyle::Flow);
HandleMap(eventHandler); HandleMap(eventHandler);
eventHandler.OnMapEnd(); eventHandler.OnMapEnd();
return; return;
case Token::BLOCK_MAP_START: case Token::TYPE::BLOCK_MAP_START:
eventHandler.OnMapStart(mark, tag, anchor, EmitterStyle::Block); eventHandler.OnMapStart(mark, tag, anchor, EmitterStyle::Block);
HandleMap(eventHandler); HandleMap(eventHandler);
eventHandler.OnMapEnd(); eventHandler.OnMapEnd();
return; return;
case Token::KEY: case Token::TYPE::KEY:
// compact maps can only go in a flow sequence // compact maps can only go in a flow sequence
if (m_pCollectionStack->GetCurCollectionType() == if (m_pCollectionStack->GetCurCollectionType() ==
CollectionType::FlowSeq) { CollectionType::FlowSeq) {
@ -151,10 +151,10 @@ void SingleDocParser::HandleNode(EventHandler& eventHandler) {
void SingleDocParser::HandleSequence(EventHandler& eventHandler) { void SingleDocParser::HandleSequence(EventHandler& eventHandler) {
// split based on start token // split based on start token
switch (m_scanner.peek().type) { switch (m_scanner.peek().type) {
case Token::BLOCK_SEQ_START: case Token::TYPE::BLOCK_SEQ_START:
HandleBlockSequence(eventHandler); HandleBlockSequence(eventHandler);
break; break;
case Token::FLOW_SEQ_START: case Token::TYPE::FLOW_SEQ_START:
HandleFlowSequence(eventHandler); HandleFlowSequence(eventHandler);
break; break;
default: default:
@ -172,18 +172,18 @@ void SingleDocParser::HandleBlockSequence(EventHandler& eventHandler) {
throw ParserException(m_scanner.mark(), ErrorMsg::END_OF_SEQ); throw ParserException(m_scanner.mark(), ErrorMsg::END_OF_SEQ);
Token token = m_scanner.peek(); Token token = m_scanner.peek();
if (token.type != Token::BLOCK_ENTRY && token.type != Token::BLOCK_SEQ_END) if (token.type != Token::TYPE::BLOCK_ENTRY && token.type != Token::TYPE::BLOCK_SEQ_END)
throw ParserException(token.mark, ErrorMsg::END_OF_SEQ); throw ParserException(token.mark, ErrorMsg::END_OF_SEQ);
m_scanner.pop(); m_scanner.pop();
if (token.type == Token::BLOCK_SEQ_END) if (token.type == Token::TYPE::BLOCK_SEQ_END)
break; break;
// check for null // check for null
if (!m_scanner.empty()) { if (!m_scanner.empty()) {
const Token& nextToken = m_scanner.peek(); const Token& nextToken = m_scanner.peek();
if (nextToken.type == Token::BLOCK_ENTRY || if (nextToken.type == Token::TYPE::BLOCK_ENTRY ||
nextToken.type == Token::BLOCK_SEQ_END) { nextToken.type == Token::TYPE::BLOCK_SEQ_END) {
eventHandler.OnNull(nextToken.mark, NullAnchor); eventHandler.OnNull(nextToken.mark, NullAnchor);
continue; continue;
} }
@ -205,7 +205,7 @@ void SingleDocParser::HandleFlowSequence(EventHandler& eventHandler) {
throw ParserException(m_scanner.mark(), ErrorMsg::END_OF_SEQ_FLOW); throw ParserException(m_scanner.mark(), ErrorMsg::END_OF_SEQ_FLOW);
// first check for end // first check for end
if (m_scanner.peek().type == Token::FLOW_SEQ_END) { if (m_scanner.peek().type == Token::TYPE::FLOW_SEQ_END) {
m_scanner.pop(); m_scanner.pop();
break; break;
} }
@ -219,9 +219,9 @@ void SingleDocParser::HandleFlowSequence(EventHandler& eventHandler) {
// now eat the separator (or could be a sequence end, which we ignore - but // now eat the separator (or could be a sequence end, which we ignore - but
// if it's neither, then it's a bad node) // if it's neither, then it's a bad node)
Token& token = m_scanner.peek(); Token& token = m_scanner.peek();
if (token.type == Token::FLOW_ENTRY) if (token.type == Token::TYPE::FLOW_ENTRY)
m_scanner.pop(); m_scanner.pop();
else if (token.type != Token::FLOW_SEQ_END) else if (token.type != Token::TYPE::FLOW_SEQ_END)
throw ParserException(token.mark, ErrorMsg::END_OF_SEQ_FLOW); throw ParserException(token.mark, ErrorMsg::END_OF_SEQ_FLOW);
} }
@ -231,16 +231,16 @@ void SingleDocParser::HandleFlowSequence(EventHandler& eventHandler) {
void SingleDocParser::HandleMap(EventHandler& eventHandler) { void SingleDocParser::HandleMap(EventHandler& eventHandler) {
// split based on start token // split based on start token
switch (m_scanner.peek().type) { switch (m_scanner.peek().type) {
case Token::BLOCK_MAP_START: case Token::TYPE::BLOCK_MAP_START:
HandleBlockMap(eventHandler); HandleBlockMap(eventHandler);
break; break;
case Token::FLOW_MAP_START: case Token::TYPE::FLOW_MAP_START:
HandleFlowMap(eventHandler); HandleFlowMap(eventHandler);
break; break;
case Token::KEY: case Token::TYPE::KEY:
HandleCompactMap(eventHandler); HandleCompactMap(eventHandler);
break; break;
case Token::VALUE: case Token::TYPE::VALUE:
HandleCompactMapWithNoKey(eventHandler); HandleCompactMapWithNoKey(eventHandler);
break; break;
default: default:
@ -258,17 +258,17 @@ void SingleDocParser::HandleBlockMap(EventHandler& eventHandler) {
throw ParserException(m_scanner.mark(), ErrorMsg::END_OF_MAP); throw ParserException(m_scanner.mark(), ErrorMsg::END_OF_MAP);
Token token = m_scanner.peek(); Token token = m_scanner.peek();
if (token.type != Token::KEY && token.type != Token::VALUE && if (token.type != Token::TYPE::KEY && token.type != Token::TYPE::VALUE &&
token.type != Token::BLOCK_MAP_END) token.type != Token::TYPE::BLOCK_MAP_END)
throw ParserException(token.mark, ErrorMsg::END_OF_MAP); throw ParserException(token.mark, ErrorMsg::END_OF_MAP);
if (token.type == Token::BLOCK_MAP_END) { if (token.type == Token::TYPE::BLOCK_MAP_END) {
m_scanner.pop(); m_scanner.pop();
break; break;
} }
// grab key (if non-null) // grab key (if non-null)
if (token.type == Token::KEY) { if (token.type == Token::TYPE::KEY) {
m_scanner.pop(); m_scanner.pop();
HandleNode(eventHandler); HandleNode(eventHandler);
} else { } else {
@ -276,7 +276,7 @@ void SingleDocParser::HandleBlockMap(EventHandler& eventHandler) {
} }
// now grab value (optional) // now grab value (optional)
if (!m_scanner.empty() && m_scanner.peek().type == Token::VALUE) { if (!m_scanner.empty() && m_scanner.peek().type == Token::TYPE::VALUE) {
m_scanner.pop(); m_scanner.pop();
HandleNode(eventHandler); HandleNode(eventHandler);
} else { } else {
@ -299,13 +299,13 @@ void SingleDocParser::HandleFlowMap(EventHandler& eventHandler) {
Token& token = m_scanner.peek(); Token& token = m_scanner.peek();
const Mark mark = token.mark; const Mark mark = token.mark;
// first check for end // first check for end
if (token.type == Token::FLOW_MAP_END) { if (token.type == Token::TYPE::FLOW_MAP_END) {
m_scanner.pop(); m_scanner.pop();
break; break;
} }
// grab key (if non-null) // grab key (if non-null)
if (token.type == Token::KEY) { if (token.type == Token::TYPE::KEY) {
m_scanner.pop(); m_scanner.pop();
HandleNode(eventHandler); HandleNode(eventHandler);
} else { } else {
@ -313,7 +313,7 @@ void SingleDocParser::HandleFlowMap(EventHandler& eventHandler) {
} }
// now grab value (optional) // now grab value (optional)
if (!m_scanner.empty() && m_scanner.peek().type == Token::VALUE) { if (!m_scanner.empty() && m_scanner.peek().type == Token::TYPE::VALUE) {
m_scanner.pop(); m_scanner.pop();
HandleNode(eventHandler); HandleNode(eventHandler);
} else { } else {
@ -326,9 +326,9 @@ void SingleDocParser::HandleFlowMap(EventHandler& eventHandler) {
// now eat the separator (or could be a map end, which we ignore - but if // now eat the separator (or could be a map end, which we ignore - but if
// it's neither, then it's a bad node) // it's neither, then it's a bad node)
Token& nextToken = m_scanner.peek(); Token& nextToken = m_scanner.peek();
if (nextToken.type == Token::FLOW_ENTRY) if (nextToken.type == Token::TYPE::FLOW_ENTRY)
m_scanner.pop(); m_scanner.pop();
else if (nextToken.type != Token::FLOW_MAP_END) else if (nextToken.type != Token::TYPE::FLOW_MAP_END)
throw ParserException(nextToken.mark, ErrorMsg::END_OF_MAP_FLOW); throw ParserException(nextToken.mark, ErrorMsg::END_OF_MAP_FLOW);
} }
@ -345,7 +345,7 @@ void SingleDocParser::HandleCompactMap(EventHandler& eventHandler) {
HandleNode(eventHandler); HandleNode(eventHandler);
// now grab value (optional) // now grab value (optional)
if (!m_scanner.empty() && m_scanner.peek().type == Token::VALUE) { if (!m_scanner.empty() && m_scanner.peek().type == Token::TYPE::VALUE) {
m_scanner.pop(); m_scanner.pop();
HandleNode(eventHandler); HandleNode(eventHandler);
} else { } else {
@ -382,10 +382,10 @@ void SingleDocParser::ParseProperties(std::string& tag, anchor_t& anchor,
return; return;
switch (m_scanner.peek().type) { switch (m_scanner.peek().type) {
case Token::TAG: case Token::TYPE::TAG:
ParseTag(tag); ParseTag(tag);
break; break;
case Token::ANCHOR: case Token::TYPE::ANCHOR:
ParseAnchor(anchor, anchor_name); ParseAnchor(anchor, anchor_name);
break; break;
default: default:

View File

@ -218,22 +218,22 @@ Stream::Stream(std::istream& input)
switch (state) { switch (state) {
case uis_utf8: case uis_utf8:
m_charSet = utf8; m_charSet = CharacterSet::utf8;
break; break;
case uis_utf16le: case uis_utf16le:
m_charSet = utf16le; m_charSet = CharacterSet::utf16le;
break; break;
case uis_utf16be: case uis_utf16be:
m_charSet = utf16be; m_charSet = CharacterSet::utf16be;
break; break;
case uis_utf32le: case uis_utf32le:
m_charSet = utf32le; m_charSet = CharacterSet::utf32le;
break; break;
case uis_utf32be: case uis_utf32be:
m_charSet = utf32be; m_charSet = CharacterSet::utf32be;
break; break;
default: default:
m_charSet = utf8; m_charSet = CharacterSet::utf8;
break; break;
} }
@ -301,19 +301,19 @@ void Stream::AdvanceCurrent() {
bool Stream::_ReadAheadTo(size_t i) const { bool Stream::_ReadAheadTo(size_t i) const {
while (m_input.good() && (m_readahead.size() <= i)) { while (m_input.good() && (m_readahead.size() <= i)) {
switch (m_charSet) { switch (m_charSet) {
case utf8: case CharacterSet::utf8:
StreamInUtf8(); StreamInUtf8();
break; break;
case utf16le: case CharacterSet::utf16le:
StreamInUtf16(); StreamInUtf16();
break; break;
case utf16be: case CharacterSet::utf16be:
StreamInUtf16(); StreamInUtf16();
break; break;
case utf32le: case CharacterSet::utf32le:
StreamInUtf32(); StreamInUtf32();
break; break;
case utf32be: case CharacterSet::utf32be:
StreamInUtf32(); StreamInUtf32();
break; break;
} }
@ -336,7 +336,7 @@ void Stream::StreamInUtf8() const {
void Stream::StreamInUtf16() const { void Stream::StreamInUtf16() const {
unsigned long ch = 0; unsigned long ch = 0;
unsigned char bytes[2]; unsigned char bytes[2];
int nBigEnd = (m_charSet == utf16be) ? 0 : 1; int nBigEnd = (m_charSet == CharacterSet::utf16be) ? 0 : 1;
bytes[0] = GetNextByte(); bytes[0] = GetNextByte();
bytes[1] = GetNextByte(); bytes[1] = GetNextByte();
@ -426,7 +426,7 @@ void Stream::StreamInUtf32() const {
unsigned long ch = 0; unsigned long ch = 0;
unsigned char bytes[4]; unsigned char bytes[4];
int* pIndexes = (m_charSet == utf32be) ? indexes[1] : indexes[0]; int* pIndexes = (m_charSet == CharacterSet::utf32be) ? indexes[1] : indexes[0];
bytes[0] = GetNextByte(); bytes[0] = GetNextByte();
bytes[1] = GetNextByte(); bytes[1] = GetNextByte();

View File

@ -47,7 +47,7 @@ class Stream {
void ResetColumn() { m_mark.column = 0; } void ResetColumn() { m_mark.column = 0; }
private: private:
enum CharacterSet { utf8, utf16le, utf16be, utf32le, utf32be }; enum class CharacterSet { utf8, utf16le, utf16be, utf32le, utf32be };
std::istream& m_input; std::istream& m_input;
Mark m_mark; Mark m_mark;

View File

@ -9,20 +9,20 @@ namespace YAML {
Tag::Tag(const Token& token) Tag::Tag(const Token& token)
: type(static_cast<TYPE>(token.data)), handle{}, value{} { : type(static_cast<TYPE>(token.data)), handle{}, value{} {
switch (type) { switch (type) {
case VERBATIM: case TYPE::VERBATIM:
value = token.value; value = token.value;
break; break;
case PRIMARY_HANDLE: case TYPE::PRIMARY_HANDLE:
value = token.value; value = token.value;
break; break;
case SECONDARY_HANDLE: case TYPE::SECONDARY_HANDLE:
value = token.value; value = token.value;
break; break;
case NAMED_HANDLE: case TYPE::NAMED_HANDLE:
handle = token.value; handle = token.value;
value = token.params[0]; value = token.params[0];
break; break;
case NON_SPECIFIC: case TYPE::NON_SPECIFIC:
break; break;
default: default:
assert(false); assert(false);
@ -31,15 +31,15 @@ Tag::Tag(const Token& token)
const std::string Tag::Translate(const Directives& directives) { const std::string Tag::Translate(const Directives& directives) {
switch (type) { switch (type) {
case VERBATIM: case TYPE::VERBATIM:
return value; return value;
case PRIMARY_HANDLE: case TYPE::PRIMARY_HANDLE:
return directives.TranslateTagHandle("!") + value; return directives.TranslateTagHandle("!") + value;
case SECONDARY_HANDLE: case TYPE::SECONDARY_HANDLE:
return directives.TranslateTagHandle("!!") + value; return directives.TranslateTagHandle("!!") + value;
case NAMED_HANDLE: case TYPE::NAMED_HANDLE:
return directives.TranslateTagHandle("!" + handle + "!") + value; return directives.TranslateTagHandle("!" + handle + "!") + value;
case NON_SPECIFIC: case TYPE::NON_SPECIFIC:
// TODO: // TODO:
return "!"; return "!";
default: default:

View File

@ -14,7 +14,7 @@ struct Directives;
struct Token; struct Token;
struct Tag { struct Tag {
enum TYPE { enum class TYPE {
VERBATIM, VERBATIM,
PRIMARY_HANDLE, PRIMARY_HANDLE,
SECONDARY_HANDLE, SECONDARY_HANDLE,

View File

@ -22,8 +22,8 @@ const std::string TokenNames[] = {
struct Token { struct Token {
// enums // enums
enum STATUS { VALID, INVALID, UNVERIFIED }; enum class STATUS { VALID, INVALID, UNVERIFIED };
enum TYPE { enum class TYPE {
DIRECTIVE, DIRECTIVE,
DOC_START, DOC_START,
DOC_END, DOC_END,
@ -49,10 +49,10 @@ struct Token {
// data // data
Token(TYPE type_, const Mark& mark_) Token(TYPE type_, const Mark& mark_)
: status(VALID), type(type_), mark(mark_), value{}, params{}, data(0) {} : status(STATUS::VALID), type(type_), mark(mark_), value{}, params{}, data(0) {}
friend std::ostream& operator<<(std::ostream& out, const Token& token) { friend std::ostream& operator<<(std::ostream& out, const Token& token) {
out << TokenNames[token.type] << std::string(": ") << token.value; out << TokenNames[static_cast<int>(token.type)] << std::string(": ") << token.value;
for (const std::string& param : token.params) for (const std::string& param : token.params)
out << std::string(" ") << param; out << std::string(" ") << param;
return out; return out;

View File

@ -165,7 +165,7 @@ TEST(RegExTest, OperatorPlus) {
TEST(RegExTest, StringOr) { TEST(RegExTest, StringOr) {
std::string str = "abcde"; std::string str = "abcde";
RegEx ex = RegEx(str, YAML::REGEX_OR); RegEx ex = RegEx(str, YAML::REGEX_OP::REGEX_OR);
for (size_t i = 0; i < str.size(); ++i) { for (size_t i = 0; i < str.size(); ++i) {
EXPECT_TRUE(ex.Matches(str.substr(i, 1))); EXPECT_TRUE(ex.Matches(str.substr(i, 1)));