diff --git a/spicy/lib/spicy_rt.hlt b/spicy/lib/spicy_rt.hlt index 5a4fa3307f..8e6fad50b2 100644 --- a/spicy/lib/spicy_rt.hlt +++ b/spicy/lib/spicy_rt.hlt @@ -94,5 +94,6 @@ declare public void backtrack() &cxxname="spicy::rt::detail::backtrack" &have_pr declare public void initializeParsedUnit(inout ParsedUnit punit, any unit, TypeInfo ti) &cxxname="spicy::rt::ParsedUnit::initialize" &have_prototype; declare public bytes extractBytes(inout value_ref data, view cur, uint<64> n, bool eod_ok, string location, inout strong_ref filters) &cxxname="spicy::rt::detail::extractBytes" &have_prototype; +declare public void expectBytesLiteral(inout value_ref data, view cur, bytes literal, string location, inout strong_ref filters) &cxxname="spicy::rt::detail::expectBytesLiteral" &have_prototype; } diff --git a/spicy/runtime/include/parser.h b/spicy/runtime/include/parser.h index ad1a4ea4c4..f2aa22a09b 100644 --- a/spicy/runtime/include/parser.h +++ b/spicy/runtime/include/parser.h @@ -534,12 +534,29 @@ std::optional unitFind( * @param data stream view to extract from * @param cur view of *data* that's being parsed * @param size number of bytes to extract - * @param size eod_ok if true, will be ok if end-of-data is reached without *size* behind reached + * @param eod_ok if true, will be ok if end-of-data is reached without *size* behind reached + * @param location location associated with the situation + * @param filters filter state associated with current unit instance (which may be null) * @returns extracted bytes + * @throws ParseError if not enough data is available */ hilti::rt::Bytes extractBytes(hilti::rt::ValueReference& data, const hilti::rt::stream::View& cur, uint64_t size, bool eod_ok, std::string_view location, const hilti::rt::StrongReference& filters); +/** + * Confirms that a stream view begins with a given bytes literal. + * + * @param data stream view to extract from + * @param cur view of *data* that's being parsed + * @param literal raw bytes representation of the literal to extract + * @param location location associated with the situation + * @param filters filter state associated with current unit instance (which may be null) + * @throws ParseError if the literal isn't found at the beginning of *cur* + */ +void expectBytesLiteral(hilti::rt::ValueReference& data, const hilti::rt::stream::View& cur, + const hilti::rt::Bytes& literal, std::string_view location, + const hilti::rt::StrongReference& filters); + } // namespace detail } // namespace spicy::rt diff --git a/spicy/runtime/src/parser.cc b/spicy/runtime/src/parser.cc index 2b8d3199c0..4ed3b8d802 100644 --- a/spicy/runtime/src/parser.cc +++ b/spicy/runtime/src/parser.cc @@ -199,3 +199,17 @@ hilti::rt::Bytes detail::extractBytes(hilti::rt::ValueReference& data, const hilti::rt::stream::View& cur, + const hilti::rt::Bytes& literal, std::string_view location, + const hilti::rt::StrongReference& filters) { + detail::waitForInput(data, cur, literal.size(), + hilti::rt::fmt("expected %" PRIu64 R"( bytes for bytes literal "%s")", literal.size(), + literal), + location, filters); + if ( ! cur.startsWith(literal) ) { + auto content = cur.sub(cur.begin() + literal.size()).data(); + throw ParseError(hilti::rt::fmt(R"(expected bytes literal "%s" but input starts with "%s")", literal, content), + location); + } +} diff --git a/spicy/runtime/src/tests/parser.cc b/spicy/runtime/src/tests/parser.cc index e176805cf6..4aa3ac5c80 100644 --- a/spicy/runtime/src/tests/parser.cc +++ b/spicy/runtime/src/tests/parser.cc @@ -445,6 +445,21 @@ TEST_CASE("extractBytes") { } } +TEST_CASE("expectBytesLiteral") { + // Most of the work in extractBytesLiteral() is done through the waitFor...() + // function, which we test separately. + + auto data = hilti::rt::ValueReference(); + data->append("12345"); + data->freeze(); + auto view = data->view(); + + CHECK_NOTHROW(detail::expectBytesLiteral(data, data->view(), "123", "", {})); + CHECK_THROWS_WITH_AS(detail::expectBytesLiteral(data, data->view(), "abc", "", {}), + "expected bytes literal \"abc\" but input starts with \"123\" ()", + const spicy::rt::ParseError&); +} + TEST_CASE("unitFind") { // We just tests the argument forwarding here, the matching itself is // covered by hilti::rt::stream::View::find(). diff --git a/spicy/toolchain/include/ast/types/unit.h b/spicy/toolchain/include/ast/types/unit.h index 2107ba9f88..871633c796 100644 --- a/spicy/toolchain/include/ast/types/unit.h +++ b/spicy/toolchain/include/ast/types/unit.h @@ -113,7 +113,7 @@ class Unit : public UnqualifiedType { void setAttributes(ASTContext* ctx, AttributeSet* attrs) { setChild(ctx, 1, attrs); } void setContextType(ASTContext* ctx, UnqualifiedType* type) { setChild(ctx, 2, type); } - void setGrammar(std::shared_ptr g) { _grammar = std::move(g); } + void setGrammar(spicy::detail::codegen::Grammar* g) { _grammar = g; } void setPublic(bool p) { _public = p; } std::string_view typeClass() const final { return "unit"; } @@ -170,7 +170,7 @@ class Unit : public UnqualifiedType { bool _public = false; bool _may_have_filter = false; - std::shared_ptr _grammar; + spicy::detail::codegen::Grammar* _grammar = nullptr; }; } // namespace type diff --git a/spicy/toolchain/include/compiler/detail/codegen/codegen.h b/spicy/toolchain/include/compiler/detail/codegen/codegen.h index 3a4aaed190..1153d275e5 100644 --- a/spicy/toolchain/include/compiler/detail/codegen/codegen.h +++ b/spicy/toolchain/include/compiler/detail/codegen/codegen.h @@ -4,6 +4,7 @@ #include #include +#include #include #include #include @@ -26,7 +27,13 @@ namespace spicy::detail { namespace codegen { class GrammarBuilder; class ParserBuilder; -struct ASTInfo; + +// Information collected from the AST in an initial pass for any code generation. +struct ASTInfo { + std::set uses_sync_advance; // type ID of units implementing %sync_advance + std::set look_aheads_in_use; +}; + } // namespace codegen /** @@ -44,6 +51,7 @@ class CodeGen { auto driver() const { return context()->driver(); } const auto& compilerContext() const { return driver()->context(); } const auto& options() const { return compilerContext()->options(); } + const auto& astInfo() const { return _ast_info; } /** Entry point for transformation from a Spicy AST to a HILTI AST. */ bool compileAST(hilti::ASTRoot* root); @@ -102,6 +110,7 @@ class CodeGen { Builder* _builder; codegen::GrammarBuilder _gb; codegen::ParserBuilder _pb; + codegen::ASTInfo _ast_info; std::vector _properties; std::map _type_mappings; diff --git a/spicy/toolchain/include/compiler/detail/codegen/grammar-builder.h b/spicy/toolchain/include/compiler/detail/codegen/grammar-builder.h index cab1d731bb..ad232b414f 100644 --- a/spicy/toolchain/include/compiler/detail/codegen/grammar-builder.h +++ b/spicy/toolchain/include/compiler/detail/codegen/grammar-builder.h @@ -41,11 +41,12 @@ class GrammarBuilder { hilti::Result run(type::Unit* unit); /** - * Returns the grammar for a unit type. The type must have been computed - * through `run()` already, otherwise this will abort That's generally - * done for all AST unit types at the beginning of code generation. + * Returns the grammar for a unit type. The type's grammar must have been + * computed through `run()` already, which is generally done for all AST + * unit types at the beginning of code generation. If the grammar hasn't + * been computed yet, this will return null. */ - const Grammar& grammar(const type::Unit& unit); + const Grammar* grammar(const type::Unit& unit); private: CodeGen* _cg; diff --git a/spicy/toolchain/include/compiler/detail/codegen/grammar.h b/spicy/toolchain/include/compiler/detail/codegen/grammar.h index e5287a6642..4c79fb3776 100644 --- a/spicy/toolchain/include/compiler/detail/codegen/grammar.h +++ b/spicy/toolchain/include/compiler/detail/codegen/grammar.h @@ -109,7 +109,9 @@ class Grammar { * * @note will always return false until the root production gets set. */ - bool needsLookAhead() const { return _needs_look_ahead; } + bool needsLookAhead() const { return _needs_look_ahead; } // XXX I believe we can remove this + + const auto& lookAheadsInUse() const { return _look_aheads_in_use; } /** * Prints the grammar in a (somewhat) human readable form. This is for @@ -147,6 +149,7 @@ class Grammar { std::map _nullable; std::map> _first; std::map> _follow; + std::set _look_aheads_in_use; }; } // namespace spicy::detail::codegen diff --git a/spicy/toolchain/include/compiler/detail/codegen/parser-builder.h b/spicy/toolchain/include/compiler/detail/codegen/parser-builder.h index fc4612e849..0d116759d5 100644 --- a/spicy/toolchain/include/compiler/detail/codegen/parser-builder.h +++ b/spicy/toolchain/include/compiler/detail/codegen/parser-builder.h @@ -129,7 +129,7 @@ struct ParserState { ID unit_id; /** True if the current grammar needs look-ahead tracking. */ - bool needs_look_ahead; + bool needs_look_ahead; // XXX I believe we can remove this /**< Expression* referencing the current parse object. */ Expression* self = nullptr; diff --git a/spicy/toolchain/src/compiler/codegen/codegen.cc b/spicy/toolchain/src/compiler/codegen/codegen.cc index 0d15f42fef..c2996760cc 100644 --- a/spicy/toolchain/src/compiler/codegen/codegen.cc +++ b/spicy/toolchain/src/compiler/codegen/codegen.cc @@ -1,7 +1,5 @@ // Copyright (c) 2020-2023 by the Zeek Project. See LICENSE for details. -#include - #include #include #include @@ -27,6 +25,8 @@ #include #include +#include "compiler/detail/codegen/productions/ctor.h" + using namespace spicy; using namespace spicy::detail; using namespace spicy::detail::codegen; @@ -37,38 +37,59 @@ namespace spicy::logging::debug { inline const hilti::logging::DebugStream CodeGen("spicy-codegen"); } // namespace spicy::logging::debug -namespace spicy::detail::codegen { - -// Information collected from the AST in an initial pass for any code generation. -struct ASTInfo { - std::set uses_sync_advance; // type ID of units implementing %sync_advance -}; - -} // namespace spicy::detail::codegen - namespace { // Read-only visitor collecting information from the AST that's needed for // subsequent code generation. struct VisitorASTInfo : public visitor::PreOrder { - VisitorASTInfo(ASTContext* ctx, ASTInfo* info) : context(ctx), info(info) {} + VisitorASTInfo(CodeGen* cg, ASTInfo* info) : cg(cg), info(info) {} - ASTContext* context; + CodeGen* cg; ASTInfo* info; void operator()(declaration::UnitHook* n) final { if ( n->id().local() == ID("0x25_sync_advance") ) { - const auto& unit = context->lookup(n->hook()->unitTypeIndex()); + const auto& unit = cg->context()->lookup(n->hook()->unitTypeIndex()); info->uses_sync_advance.insert(unit->typeID()); } } void operator()(type::unit::item::UnitHook* n) final { if ( n->id() == ID("0x25_sync_advance") ) { - const auto& unit = context->lookup(n->hook()->unitTypeIndex()); + const auto& unit = cg->context()->lookup(n->hook()->unitTypeIndex()); info->uses_sync_advance.insert(unit->typeID()); } } + + void operator()(hilti::declaration::Type* n) final { + if ( auto unit = n->type()->type()->tryAs() ) { + if ( n->type()->alias() ) + return; + + if ( auto r = cg->grammarBuilder()->run(unit); ! r ) { + hilti::logger().error(r.error().description(), n->location()); + return; + } + + auto lahs = unit->grammar().lookAheadsInUse(); + info->look_aheads_in_use.insert(lahs.begin(), lahs.end()); + + for ( const auto& [id, p] : unit->grammar().productions() ) { + auto field = p->meta().field(); + if ( ! field || ! field->attributes()->has("&synchronize") ) + continue; + + auto lahs = unit->grammar().lookAheadsForProduction(p); + if ( ! lahs ) + continue; + + for ( const auto* lah_prod : *lahs ) { + if ( const auto* ctor = lah_prod->tryAs() ) + info->look_aheads_in_use.insert(ctor->tokenID()); + } + } + } + } }; // Visitor that runs over each module's AST at the beginning of their @@ -109,12 +130,6 @@ struct VisitorPass1 : public visitor::MutatingPostOrder { } // Replace unit type with compiled struct type. - - if ( auto r = cg->grammarBuilder()->run(u); ! r ) { - hilti::logger().error(r.error().description(), n->location()); - return; - } - bool declare_only = false; if ( auto m = n->parent(); m && m->skipImplementation() ) declare_only = true; @@ -567,13 +582,14 @@ bool CodeGen::compileAST(hilti::ASTRoot* root) { } }; - ASTInfo info; - visitor::visit(VisitorASTInfo(context(), &info), root, ".spicy"); + visitor::visit(VisitorASTInfo(this, &_ast_info), root, ".spicy"); auto modified = - visitor::visit(VisitorModule(this, 1, &info), root, ".spicy", [](const auto& v) { return v.modified; }); - modified |= visitor::visit(VisitorModule(this, 2, &info), root, ".spicy", [](const auto& v) { return v.modified; }); - modified |= visitor::visit(VisitorModule(this, 3, &info), root, ".spicy", [](const auto& v) { return v.modified; }); + visitor::visit(VisitorModule(this, 1, &_ast_info), root, ".spicy", [](const auto& v) { return v.modified; }); + modified |= + visitor::visit(VisitorModule(this, 2, &_ast_info), root, ".spicy", [](const auto& v) { return v.modified; }); + modified |= + visitor::visit(VisitorModule(this, 3, &_ast_info), root, ".spicy", [](const auto& v) { return v.modified; }); // Update the context with type changes record by any of the passes. for ( auto [old, new_] : _type_mappings ) diff --git a/spicy/toolchain/src/compiler/codegen/grammar-builder.cc b/spicy/toolchain/src/compiler/codegen/grammar-builder.cc index 4586956392..c91101b7b9 100644 --- a/spicy/toolchain/src/compiler/codegen/grammar-builder.cc +++ b/spicy/toolchain/src/compiler/codegen/grammar-builder.cc @@ -352,15 +352,15 @@ hilti::Result GrammarBuilder::run(type::Unit* unit) { return r.error(); _grammars[id] = std::move(g); + unit->setGrammar(&_grammars[id]); return hilti::Nothing(); } -const Grammar& GrammarBuilder::grammar(const type::Unit& unit) { +const Grammar* GrammarBuilder::grammar(const type::Unit& unit) { assert(unit.canonicalID()); auto id = unit.canonicalID(); - if ( _grammars.find(id) == _grammars.end() ) - hilti::logger().internalError(fmt("grammar for unit %s accessed before it's been computed", id), - unit.meta().location()); - - return _grammars[id]; + if ( _grammars.find(id) != _grammars.end() ) + return &_grammars[id]; + else + return nullptr; } diff --git a/spicy/toolchain/src/compiler/codegen/grammar.cc b/spicy/toolchain/src/compiler/codegen/grammar.cc index adaf146122..0801595933 100644 --- a/spicy/toolchain/src/compiler/codegen/grammar.cc +++ b/spicy/toolchain/src/compiler/codegen/grammar.cc @@ -302,6 +302,12 @@ hilti::Result Grammar::_computeTables() { continue; lap->setLookAheads(std::make_pair(*v0, *v1)); + + // Add v0 and v1 to the set of look-ahead tokens in use. + for ( const auto& v : {v0, v1} ) { + for ( const auto& x : *v ) + _look_aheads_in_use.insert(x->tokenID()); + } } return _check(); diff --git a/spicy/toolchain/src/compiler/codegen/parser-builder.cc b/spicy/toolchain/src/compiler/codegen/parser-builder.cc index 4e3f92008f..2737e9633b 100644 --- a/spicy/toolchain/src/compiler/codegen/parser-builder.cc +++ b/spicy/toolchain/src/compiler/codegen/parser-builder.cc @@ -1754,10 +1754,7 @@ struct ProductionVisitor : public production::Visitor { popState(); } - void operator()(const production::Ctor* p) final { - pb->parseLiteral(*p, destination()); - pb->trimInput(); - } + void operator()(const production::Ctor* p) final { pb->parseLiteral(*p, destination()); } auto parseLookAhead(const production::LookAhead& p) { assert(state().needs_look_ahead); @@ -2141,6 +2138,16 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool } if ( ! declare_only ) { + const auto* grammar = cg()->grammarBuilder()->grammar(*t); + if ( ! grammar ) { + // not computed, presumably due to an earlier error + HILTI_DEBUG(spicy::logging::debug::ParserBuilder, + fmt("no grammar available for %s, skipping parser generation", t->canonicalID())); + return; + } + + auto visitor = ProductionVisitor(this, *grammar); + // Helper to initialize a unit's __context attribute. We use // a parse functions "context" argument if that was provided, // and otherwise create a default instanc of the unit's context type. @@ -2161,15 +2168,12 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool HILTI_DEBUG(spicy::logging::debug::ParserBuilder, fmt("creating parser for %s", t->canonicalID())); hilti::logging::DebugPushIndent _(spicy::logging::debug::ParserBuilder); - const auto& grammar = cg()->grammarBuilder()->grammar(*t); - auto visitor = ProductionVisitor(this, grammar); - const auto& parameters = t->parameters(); // Only create `parse1` and `parse3` body if the unit can be default constructed. if ( std::all_of(parameters.begin(), parameters.end(), [](const auto& p) { return p->default_(); }) ) { // Create parse1() body. pushBuilder(); - builder()->setLocation(grammar.root()->location()); + builder()->setLocation(grammar->root()->location()); builder()->addLocal("__unit", builder()->valueReference( builder()->default_(builder()->typeName(t->typeID()), @@ -2193,7 +2197,7 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool init_context(); - auto pstate = ParserState(builder(), t, grammar, builder()->id("__data"), builder()->id("__cur")); + auto pstate = ParserState(builder(), t, *grammar, builder()->id("__data"), builder()->id("__cur")); pstate.self = builder()->id("__unit"); pstate.begin = builder()->begin(builder()->id("__ncur")); pstate.cur = builder()->id("__ncur"); @@ -2203,7 +2207,7 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool pstate.error = builder()->id("__error"); pushState(pstate); visitor.pushDestination(pstate.self); - visitor.parseProduction(*grammar.root(), true); + visitor.parseProduction(*grammar->root(), true); // Check if the unit never left trial mode. pushBuilder(builder()->addIf(state().error), [&]() { @@ -2221,7 +2225,7 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool // Create parse3() body. pushBuilder(); - builder()->setLocation(grammar.root()->location()); + builder()->setLocation(grammar->root()->location()); builder()->addLocal("__unit", builder()->valueReference( builder()->default_(builder()->typeName(t->typeID()), @@ -2248,7 +2252,7 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool init_context(); - pstate = ParserState(builder(), t, grammar, builder()->id("__data"), builder()->id("__cur")); + pstate = ParserState(builder(), t, *grammar, builder()->id("__data"), builder()->id("__cur")); pstate.self = builder()->id("__unit"); pstate.begin = builder()->begin(builder()->id("__ncur")); pstate.cur = builder()->id("__ncur"); @@ -2258,7 +2262,7 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool pstate.error = builder()->id("__error"); pushState(pstate); visitor.pushDestination(pstate.self); - visitor.parseProduction(*grammar.root(), true); + visitor.parseProduction(*grammar->root(), true); // Check if the unit never left trial mode. pushBuilder(builder()->addIf(state().error), [&]() { @@ -2278,7 +2282,7 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool // Create parse2() body. pushBuilder(); - builder()->setLocation(grammar.root()->location()); + builder()->setLocation(grammar->root()->location()); builder()->addLocal("__ncur", builder()->qualifiedType(builder()->typeStreamView(), hilti::Constness::Mutable), builder()->ternary(builder()->id("__cur"), builder()->deref(builder()->id("__cur")), builder()->cast(builder()->deref(builder()->id("__data")), @@ -2293,7 +2297,7 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool init_context(); - auto pstate = ParserState(builder(), t, grammar, builder()->id("__data"), builder()->id("__cur")); + auto pstate = ParserState(builder(), t, *grammar, builder()->id("__data"), builder()->id("__cur")); pstate.self = builder()->id("__unit"); pstate.begin = builder()->begin(builder()->id("__ncur")); pstate.cur = builder()->id("__ncur"); @@ -2303,7 +2307,7 @@ void ParserBuilder::addParserMethods(hilti::type::Struct* s, type::Unit* t, bool pstate.error = builder()->id("__error"); pushState(pstate); visitor.pushDestination(pstate.self); - visitor.parseProduction(*grammar.root(), true); + visitor.parseProduction(*grammar->root(), true); // Check if the unit never left trial mode. pushBuilder(builder()->addIf(state().error), [&]() { diff --git a/spicy/toolchain/src/compiler/codegen/parsers/literals.cc b/spicy/toolchain/src/compiler/codegen/parsers/literals.cc index 523a96f139..2c8ba3cc6f 100644 --- a/spicy/toolchain/src/compiler/codegen/parsers/literals.cc +++ b/spicy/toolchain/src/compiler/codegen/parsers/literals.cc @@ -56,42 +56,62 @@ struct Visitor : public visitor::PreOrder { } auto popBuilder() { return pb()->popBuilder(); } + auto needToCheckForLookAhead(const Meta& meta) { + bool needs = false; + + if ( auto field = lp->production->meta().field(); field && field->attributes()->find("&synchronize") ) + needs = true; + else { + auto tokens = pb()->cg()->astInfo().look_aheads_in_use; + needs = tokens.find(lp->production->tokenID()) != tokens.end(); + } + + if ( pb()->options().debug && ! needs ) + builder()->addAssert(builder()->not_(state().lahead), "unexpected look-ahead token pending", meta); + + return needs; + } + void operator()(hilti::ctor::Bytes* n) final { auto error_msg = fmt("expecting '%s'", n->value()); auto len = builder()->integer(static_cast(n->value().size())); - auto cond = builder()->memberCall(state().cur, "starts_with", {builder()->expression(n)}); switch ( state().literal_mode ) { case LiteralMode::Default: case LiteralMode::Skip: { - auto [have_lah, no_lah] = builder()->addIfElse(state().lahead); + bool check_for_look_ahead = needToCheckForLookAhead(n->meta()); + if ( check_for_look_ahead ) { + auto [have_lah, no_lah] = builder()->addIfElse(state().lahead); - pushBuilder(have_lah); + pushBuilder(have_lah); - pushBuilder(builder()->addIf( - builder()->unequal(state().lahead, builder()->integer(lp->production->tokenID())))); - pb()->parseError("unexpected token to consume", n->meta()); - popBuilder(); + pushBuilder(builder()->addIf( + builder()->unequal(state().lahead, builder()->integer(lp->production->tokenID())))); + pb()->parseError("unexpected token to consume", n->meta()); + popBuilder(); - pushBuilder(builder()->addIf( - builder()->unequal(builder()->expression(n), - builder()->memberCall(state().cur, "sub", - {builder()->begin(state().cur), state().lahead_end})))); - pb()->parseError("unexpected data when consuming token", n->meta()); - popBuilder(); + pushBuilder( + builder()->addIf(builder()->unequal(builder()->expression(n), + builder()->memberCall(state().cur, "sub", + {builder()->begin(state().cur), + state().lahead_end})))); + pb()->parseError("unexpected data when consuming token", n->meta()); + popBuilder(); - pb()->consumeLookAhead(); - popBuilder(); + pb()->consumeLookAhead(); + popBuilder(); - pushBuilder(no_lah); - pb()->waitForInput(len, error_msg, n->meta()); - auto no_match = builder()->addIf(builder()->not_(cond)); - pushBuilder(no_match); - pb()->parseError(error_msg, n->meta()); - popBuilder(); + pushBuilder(no_lah); + } + + builder()->addCall("spicy_rt::expectBytesLiteral", + {state().data, state().cur, builder()->expression(n), + builder()->expression(n->meta()), pb()->currentFilters(state())}); pb()->advanceInput(len); - popBuilder(); + + if ( check_for_look_ahead ) + popBuilder(); if ( state().literal_mode != LiteralMode::Skip ) builder()->addAssign(lp->destination(n->type()->type()), builder()->expression(n)); @@ -102,6 +122,7 @@ struct Visitor : public visitor::PreOrder { case LiteralMode::Search: // Handled in `parseLiteral`. case LiteralMode::Try: + auto cond = builder()->memberCall(state().cur, "starts_with", {builder()->expression(n)}); result = builder()->ternary(builder()->and_(pb()->waitForInputOrEod(len), cond), builder()->sum(builder()->begin(state().cur), len), builder()->begin(state().cur)); @@ -125,45 +146,48 @@ struct Visitor : public visitor::PreOrder { } auto parse = [&](Expression* result) -> Expression* { - auto [have_lah, no_lah] = builder()->addIfElse(state().lahead); if ( ! result && state().literal_mode != LiteralMode::Skip ) result = lp->destination(builder()->typeBytes()); - pushBuilder(have_lah); + bool check_for_look_ahead = needToCheckForLookAhead(n->meta()); + if ( check_for_look_ahead ) { + auto [have_lah, no_lah] = builder()->addIfElse(state().lahead); - pushBuilder( - builder()->addIf(builder()->unequal(state().lahead, builder()->integer(lp->production->tokenID())))); - pb()->parseError("unexpected token to consume", n->meta()); - popBuilder(); + pushBuilder(have_lah); - pb()->consumeLookAhead(result); - popBuilder(); + pushBuilder(builder()->addIf( + builder()->unequal(state().lahead, builder()->integer(lp->production->tokenID())))); + pb()->parseError("unexpected token to consume", n->meta()); + popBuilder(); + + pb()->consumeLookAhead(result); + popBuilder(); - pushBuilder(no_lah); + pushBuilder(no_lah); + } - builder()->addLocal(ID("ncur"), state().cur); + auto ncur = builder()->addTmp(ID("ncur"), state().cur); auto ms = builder()->local("ms", builder()->memberCall(builder()->id(re), "token_matcher")); auto body = builder()->addWhile(ms, builder()->bool_(true)); pushBuilder(body); - builder()->addLocal(ID("rc"), - builder()->qualifiedType(builder()->typeSignedInteger(32), hilti::Constness::Mutable)); + auto rc = builder()->addTmp(ID("rc"), builder()->qualifiedType(builder()->typeSignedInteger(32), + hilti::Constness::Mutable)); - builder()->addAssign(builder()->tuple({builder()->id("rc"), builder()->id("ncur")}), - builder()->memberCall(builder()->id("ms"), "advance", {builder()->id("ncur")}), - n->meta()); + builder()->addAssign(builder()->tuple({rc, ncur}), + builder()->memberCall(builder()->id("ms"), "advance", {ncur}), n->meta()); - auto switch_ = builder()->addSwitch(builder()->id("rc"), n->meta()); + auto switch_ = builder()->addSwitch(rc, n->meta()); auto no_match_try_again = switch_.addCase(builder()->integer(-1)); pushBuilder(no_match_try_again); auto pstate = pb()->state(); pstate.self = builder()->expressionName(ID("self")); - pstate.cur = builder()->id("ncur"); + pstate.cur = ncur; pb()->pushState(std::move(pstate)); builder()->addComment("NOLINTNEXTLINE(clang-analyzer-deadcode.DeadStores)"); - builder()->addLocal(ID("more_data"), pb()->waitForInputOrEod()); + builder()->addExpression(pb()->waitForInputOrEod()); pb()->popState(); builder()->addContinue(); @@ -182,17 +206,17 @@ struct Visitor : public visitor::PreOrder { builder()->addAssign(*state().captures, builder()->memberCall(builder()->id("ms"), "captures", {state().data})); - builder()->addAssign(result, builder()->memberCall(state().cur, "sub", - {builder()->begin(builder()->id("ncur"))})); + builder()->addAssign(result, builder()->memberCall(state().cur, "sub", {builder()->begin(ncur)})); } - pb()->setInput(builder()->id("ncur")); + pb()->setInput(ncur); builder()->addBreak(); popBuilder(); popBuilder(); - popBuilder(); + if ( check_for_look_ahead ) + popBuilder(); return result; }; @@ -223,19 +247,23 @@ struct Visitor : public visitor::PreOrder { switch ( state().literal_mode ) { case LiteralMode::Default: case LiteralMode::Skip: { - auto [have_lah, no_lah] = builder()->addIfElse(state().lahead); + bool check_for_look_ahead = needToCheckForLookAhead(meta); + if ( check_for_look_ahead ) { + auto [have_lah, no_lah] = builder()->addIfElse(state().lahead); - pushBuilder(have_lah); + pushBuilder(have_lah); - pushBuilder(builder()->addIf( - builder()->unequal(state().lahead, builder()->integer(lp->production->tokenID())))); - pb()->parseError("unexpected token to consume", meta); - popBuilder(); + pushBuilder(builder()->addIf( + builder()->unequal(state().lahead, builder()->integer(lp->production->tokenID())))); + pb()->parseError("unexpected token to consume", meta); + popBuilder(); - pb()->consumeLookAhead(); - popBuilder(); + pb()->consumeLookAhead(); + popBuilder(); + + pushBuilder(no_lah); + } - pushBuilder(no_lah); auto old_cur = builder()->addTmp("ocur", state().cur); // Parse value as an instance of the corresponding type. @@ -251,7 +279,8 @@ struct Visitor : public visitor::PreOrder { pb()->parseError(fmt("expecting %u", *expected), meta); popBuilder(); - popBuilder(); + if ( check_for_look_ahead ) + popBuilder(); if ( state().literal_mode != LiteralMode::Skip ) builder()->addAssign(lp->destination(type), expected); @@ -290,26 +319,30 @@ struct Visitor : public visitor::PreOrder { switch ( state().literal_mode ) { case LiteralMode::Default: case LiteralMode::Skip: { - auto [have_lah, no_lah] = builder()->addIfElse(state().lahead); + bool check_for_look_ahead = needToCheckForLookAhead(n->meta()); + if ( check_for_look_ahead ) { + auto [have_lah, no_lah] = builder()->addIfElse(state().lahead); - pushBuilder(have_lah); + pushBuilder(have_lah); - pushBuilder(builder()->addIf( - builder()->unequal(state().lahead, builder()->integer(lp->production->tokenID())))); - pb()->parseError("unexpected token to consume", n->meta()); - popBuilder(); + pushBuilder(builder()->addIf( + builder()->unequal(state().lahead, builder()->integer(lp->production->tokenID())))); + pb()->parseError("unexpected token to consume", n->meta()); + popBuilder(); - // Need to reparse the value to assign it to our destination. - auto value = pb()->parseType(n->btype(), lp->production->meta(), {}, TypesMode::Default); - builder()->addAssign(lp->destination(n->btype()), value); + // Need to reparse the value to assign it to our destination. + auto value = pb()->parseType(n->btype(), lp->production->meta(), {}, TypesMode::Default); + builder()->addAssign(lp->destination(n->btype()), value); - pb()->consumeLookAhead(); - popBuilder(); + pb()->consumeLookAhead(); + popBuilder(); + + pushBuilder(no_lah); + } - pushBuilder(no_lah); auto old_cur = builder()->addTmp("ocur", state().cur); - value = pb()->parseType(n->btype(), lp->production->meta(), {}, TypesMode::Default); + auto value = pb()->parseType(n->btype(), lp->production->meta(), {}, TypesMode::Default); // Check that the bit values match what we expect. for ( const auto& b : n->bits() ) { @@ -324,7 +357,8 @@ struct Visitor : public visitor::PreOrder { if ( state().literal_mode != LiteralMode::Skip ) builder()->addAssign(lp->destination(n->btype()), value); - popBuilder(); + if ( check_for_look_ahead ) + popBuilder(); result = value; return; diff --git a/tests/Baseline/spicy.types.bytes.parse-ctor-error/output b/tests/Baseline/spicy.types.bytes.parse-ctor-error/output index 23e555d957..8921a55a2e 100644 --- a/tests/Baseline/spicy.types.bytes.parse-ctor-error/output +++ b/tests/Baseline/spicy.types.bytes.parse-ctor-error/output @@ -1,3 +1,3 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -[error] terminating with uncaught exception of type spicy::rt::ParseError: expecting 'def' (2 bytes available) (<...>/parse-ctor-error.spicy:10:10-10:15) -[error] terminating with uncaught exception of type spicy::rt::ParseError: expecting 'def' (<...>/parse-ctor-error.spicy:10:10-10:15) +[error] terminating with uncaught exception of type spicy::rt::ParseError: expected 3 bytes for bytes literal "def" (2 bytes available) (<...>/parse-ctor-error.spicy:10:10-10:15) +[error] terminating with uncaught exception of type spicy::rt::ParseError: expected bytes literal "def" but input starts with "ded" (<...>/parse-ctor-error.spicy:10:10-10:15) diff --git a/tests/Baseline/spicy.types.function.cxxname-normalization/output b/tests/Baseline/spicy.types.function.cxxname-normalization/output index 499ddfccb3..c16489a77d 100644 --- a/tests/Baseline/spicy.types.function.cxxname-normalization/output +++ b/tests/Baseline/spicy.types.function.cxxname-normalization/output @@ -59,6 +59,7 @@ [debug/resolver] [spicy_rt.hlt:92:33-92:71] Attribute "&cxxname="spicy::rt::detail::backtrack"" -> Attribute "&cxxname="::spicy::rt::detail::backtrack"" [debug/resolver] [spicy_rt.hlt:94:89-94:132] Attribute "&cxxname="spicy::rt::ParsedUnit::initialize"" -> Attribute "&cxxname="::spicy::rt::ParsedUnit::initialize"" [debug/resolver] [spicy_rt.hlt:96:160-96:201] Attribute "&cxxname="spicy::rt::detail::extractBytes"" -> Attribute "&cxxname="::spicy::rt::detail::extractBytes"" +[debug/resolver] [spicy_rt.hlt:97:155-97:202] Attribute "&cxxname="spicy::rt::detail::expectBytesLiteral"" -> Attribute "&cxxname="::spicy::rt::detail::expectBytesLiteral"" [debug/resolver] [spicy.spicy:14:3-14:37] Attribute "&cxxname="hilti::rt::AddressFamily"" -> Attribute "&cxxname="::hilti::rt::AddressFamily"" [debug/resolver] [spicy.spicy:23:3-23:41] Attribute "&cxxname="hilti::rt::integer::BitOrder"" -> Attribute "&cxxname="::hilti::rt::integer::BitOrder"" [debug/resolver] [spicy.spicy:31:3-31:33] Attribute "&cxxname="hilti::rt::ByteOrder"" -> Attribute "&cxxname="::hilti::rt::ByteOrder"" diff --git a/tests/Baseline/spicy.types.unit.canonical-ids-with-import/output b/tests/Baseline/spicy.types.unit.canonical-ids-with-import/output index 6af8d8b0a1..00234494af 100644 --- a/tests/Baseline/spicy.types.unit.canonical-ids-with-import/output +++ b/tests/Baseline/spicy.types.unit.canonical-ids-with-import/output @@ -263,6 +263,12 @@ [debug/ast-declarations] - Parameter "eod_ok" (spicy_rt::eod_ok) [debug/ast-declarations] - Parameter "location" (spicy_rt::location_3) [debug/ast-declarations] - Parameter "filters" (spicy_rt::filters_7) +[debug/ast-declarations] - Function "expectBytesLiteral" (spicy_rt::expectBytesLiteral) +[debug/ast-declarations] - Parameter "data" (spicy_rt::data_11) +[debug/ast-declarations] - Parameter "cur" (spicy_rt::cur_10) +[debug/ast-declarations] - Parameter "literal" (spicy_rt::literal) +[debug/ast-declarations] - Parameter "location" (spicy_rt::location_4) +[debug/ast-declarations] - Parameter "filters" (spicy_rt::filters_8) [debug/ast-declarations] - Module "spicy" (spicy) [debug/ast-declarations] - Property "%skip-implementation" (spicy::%skip-implementation) [debug/ast-declarations] - Type "AddressFamily" (spicy::AddressFamily) @@ -385,10 +391,11 @@ [debug/ast-declarations] - [function] spicy::zlib_decompress -> spicy::ZlibStream [debug/ast-declarations] - [function] spicy::zlib_finish -> spicy::ZlibStream [debug/ast-declarations] - [function] spicy::zlib_init -> spicy::ZlibStream -[debug/ast-declarations] - [module] spicy_rt -> hilti::Exception, hilti::RecoverableFailure, spicy_rt::Backtrack, spicy_rt::BitOrder, spicy_rt::Direction, spicy_rt::Filters, spicy_rt::FindDirection, spicy_rt::Forward, spicy_rt::HiltiResumable, spicy_rt::MIMEType, spicy_rt::MissingData, spicy_rt::ParseError, spicy_rt::ParsedUnit, spicy_rt::Parser, spicy_rt::ParserPort, spicy_rt::Sink, spicy_rt::SinkState, spicy_rt::TypeInfo, spicy_rt::UnitAlreadyConnected, spicy_rt::UnitContext, spicy_rt::atEod, spicy_rt::backtrack, spicy_rt::confirm, spicy_rt::createContext, spicy_rt::extractBytes, spicy_rt::filter_connect, spicy_rt::filter_disconnect, spicy_rt::filter_forward, spicy_rt::filter_forward_eod, spicy_rt::filter_init, spicy_rt::initializeParsedUnit, spicy_rt::printParserState, spicy_rt::registerParser, spicy_rt::reject, spicy_rt::setContext, spicy_rt::unit_find, spicy_rt::waitForEod, spicy_rt::waitForInput, spicy_rt::waitForInputOrEod, spicy_rt::waitForInputOrEod_2, spicy_rt::waitForInput_2 +[debug/ast-declarations] - [module] spicy_rt -> hilti::Exception, hilti::RecoverableFailure, spicy_rt::Backtrack, spicy_rt::BitOrder, spicy_rt::Direction, spicy_rt::Filters, spicy_rt::FindDirection, spicy_rt::Forward, spicy_rt::HiltiResumable, spicy_rt::MIMEType, spicy_rt::MissingData, spicy_rt::ParseError, spicy_rt::ParsedUnit, spicy_rt::Parser, spicy_rt::ParserPort, spicy_rt::Sink, spicy_rt::SinkState, spicy_rt::TypeInfo, spicy_rt::UnitAlreadyConnected, spicy_rt::UnitContext, spicy_rt::atEod, spicy_rt::backtrack, spicy_rt::confirm, spicy_rt::createContext, spicy_rt::expectBytesLiteral, spicy_rt::extractBytes, spicy_rt::filter_connect, spicy_rt::filter_disconnect, spicy_rt::filter_forward, spicy_rt::filter_forward_eod, spicy_rt::filter_init, spicy_rt::initializeParsedUnit, spicy_rt::printParserState, spicy_rt::registerParser, spicy_rt::reject, spicy_rt::setContext, spicy_rt::unit_find, spicy_rt::waitForEod, spicy_rt::waitForInput, spicy_rt::waitForInputOrEod, spicy_rt::waitForInputOrEod_2, spicy_rt::waitForInput_2 [debug/ast-declarations] - [type] spicy_rt::Parser -> spicy_rt::MIMEType, spicy_rt::ParserPort, spicy_rt::TypeInfo [debug/ast-declarations] - [function] spicy_rt::atEod -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::createContext -> spicy_rt::TypeInfo, spicy_rt::UnitContext +[debug/ast-declarations] - [function] spicy_rt::expectBytesLiteral -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::extractBytes -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::initializeParsedUnit -> spicy_rt::ParsedUnit, spicy_rt::TypeInfo [debug/ast-declarations] - [function] spicy_rt::printParserState -> hilti::Exception, hilti::RecoverableFailure @@ -812,6 +819,12 @@ [debug/ast-declarations] - Parameter "eod_ok" (spicy_rt::eod_ok) [debug/ast-declarations] - Parameter "location" (spicy_rt::location_3) [debug/ast-declarations] - Parameter "filters" (spicy_rt::filters_7) +[debug/ast-declarations] - Function "expectBytesLiteral" (spicy_rt::expectBytesLiteral) +[debug/ast-declarations] - Parameter "data" (spicy_rt::data_11) +[debug/ast-declarations] - Parameter "cur" (spicy_rt::cur_10) +[debug/ast-declarations] - Parameter "literal" (spicy_rt::literal) +[debug/ast-declarations] - Parameter "location" (spicy_rt::location_4) +[debug/ast-declarations] - Parameter "filters" (spicy_rt::filters_8) [debug/ast-declarations] - Module "spicy" (spicy) [debug/ast-declarations] - Property "%skip-implementation" (spicy::%skip-implementation) [debug/ast-declarations] - Type "AddressFamily" (spicy::AddressFamily) @@ -945,10 +958,11 @@ [debug/ast-declarations] - [function] spicy::zlib_decompress -> spicy::ZlibStream [debug/ast-declarations] - [function] spicy::zlib_finish -> spicy::ZlibStream [debug/ast-declarations] - [function] spicy::zlib_init -> spicy::ZlibStream -[debug/ast-declarations] - [module] spicy_rt -> hilti::Exception, hilti::RecoverableFailure, spicy_rt::Backtrack, spicy_rt::BitOrder, spicy_rt::Direction, spicy_rt::Filters, spicy_rt::FindDirection, spicy_rt::Forward, spicy_rt::HiltiResumable, spicy_rt::MIMEType, spicy_rt::MissingData, spicy_rt::ParseError, spicy_rt::ParsedUnit, spicy_rt::Parser, spicy_rt::ParserPort, spicy_rt::Sink, spicy_rt::SinkState, spicy_rt::TypeInfo, spicy_rt::UnitAlreadyConnected, spicy_rt::UnitContext, spicy_rt::atEod, spicy_rt::backtrack, spicy_rt::confirm, spicy_rt::createContext, spicy_rt::extractBytes, spicy_rt::filter_connect, spicy_rt::filter_disconnect, spicy_rt::filter_forward, spicy_rt::filter_forward_eod, spicy_rt::filter_init, spicy_rt::initializeParsedUnit, spicy_rt::printParserState, spicy_rt::registerParser, spicy_rt::reject, spicy_rt::setContext, spicy_rt::unit_find, spicy_rt::waitForEod, spicy_rt::waitForInput, spicy_rt::waitForInputOrEod, spicy_rt::waitForInputOrEod_2, spicy_rt::waitForInput_2 +[debug/ast-declarations] - [module] spicy_rt -> hilti::Exception, hilti::RecoverableFailure, spicy_rt::Backtrack, spicy_rt::BitOrder, spicy_rt::Direction, spicy_rt::Filters, spicy_rt::FindDirection, spicy_rt::Forward, spicy_rt::HiltiResumable, spicy_rt::MIMEType, spicy_rt::MissingData, spicy_rt::ParseError, spicy_rt::ParsedUnit, spicy_rt::Parser, spicy_rt::ParserPort, spicy_rt::Sink, spicy_rt::SinkState, spicy_rt::TypeInfo, spicy_rt::UnitAlreadyConnected, spicy_rt::UnitContext, spicy_rt::atEod, spicy_rt::backtrack, spicy_rt::confirm, spicy_rt::createContext, spicy_rt::expectBytesLiteral, spicy_rt::extractBytes, spicy_rt::filter_connect, spicy_rt::filter_disconnect, spicy_rt::filter_forward, spicy_rt::filter_forward_eod, spicy_rt::filter_init, spicy_rt::initializeParsedUnit, spicy_rt::printParserState, spicy_rt::registerParser, spicy_rt::reject, spicy_rt::setContext, spicy_rt::unit_find, spicy_rt::waitForEod, spicy_rt::waitForInput, spicy_rt::waitForInputOrEod, spicy_rt::waitForInputOrEod_2, spicy_rt::waitForInput_2 [debug/ast-declarations] - [type] spicy_rt::Parser -> spicy_rt::MIMEType, spicy_rt::ParserPort, spicy_rt::TypeInfo [debug/ast-declarations] - [function] spicy_rt::atEod -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::createContext -> spicy_rt::TypeInfo, spicy_rt::UnitContext +[debug/ast-declarations] - [function] spicy_rt::expectBytesLiteral -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::extractBytes -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::initializeParsedUnit -> spicy_rt::ParsedUnit, spicy_rt::TypeInfo [debug/ast-declarations] - [function] spicy_rt::printParserState -> hilti::Exception, hilti::RecoverableFailure diff --git a/tests/Baseline/spicy.types.unit.canonical-ids/output b/tests/Baseline/spicy.types.unit.canonical-ids/output index 517989fea3..07adc814bc 100644 --- a/tests/Baseline/spicy.types.unit.canonical-ids/output +++ b/tests/Baseline/spicy.types.unit.canonical-ids/output @@ -262,6 +262,12 @@ [debug/ast-declarations] - Parameter "eod_ok" (spicy_rt::eod_ok) [debug/ast-declarations] - Parameter "location" (spicy_rt::location_3) [debug/ast-declarations] - Parameter "filters" (spicy_rt::filters_7) +[debug/ast-declarations] - Function "expectBytesLiteral" (spicy_rt::expectBytesLiteral) +[debug/ast-declarations] - Parameter "data" (spicy_rt::data_11) +[debug/ast-declarations] - Parameter "cur" (spicy_rt::cur_10) +[debug/ast-declarations] - Parameter "literal" (spicy_rt::literal) +[debug/ast-declarations] - Parameter "location" (spicy_rt::location_4) +[debug/ast-declarations] - Parameter "filters" (spicy_rt::filters_8) [debug/ast-declarations] - Module "spicy" (spicy) [debug/ast-declarations] - Property "%skip-implementation" (spicy::%skip-implementation) [debug/ast-declarations] - Type "AddressFamily" (spicy::AddressFamily) @@ -386,10 +392,11 @@ [debug/ast-declarations] - [function] spicy::zlib_decompress -> spicy::ZlibStream [debug/ast-declarations] - [function] spicy::zlib_finish -> spicy::ZlibStream [debug/ast-declarations] - [function] spicy::zlib_init -> spicy::ZlibStream -[debug/ast-declarations] - [module] spicy_rt -> hilti::Exception, hilti::RecoverableFailure, spicy_rt::Backtrack, spicy_rt::BitOrder, spicy_rt::Direction, spicy_rt::Filters, spicy_rt::FindDirection, spicy_rt::Forward, spicy_rt::HiltiResumable, spicy_rt::MIMEType, spicy_rt::MissingData, spicy_rt::ParseError, spicy_rt::ParsedUnit, spicy_rt::Parser, spicy_rt::ParserPort, spicy_rt::Sink, spicy_rt::SinkState, spicy_rt::TypeInfo, spicy_rt::UnitAlreadyConnected, spicy_rt::UnitContext, spicy_rt::atEod, spicy_rt::backtrack, spicy_rt::confirm, spicy_rt::createContext, spicy_rt::extractBytes, spicy_rt::filter_connect, spicy_rt::filter_disconnect, spicy_rt::filter_forward, spicy_rt::filter_forward_eod, spicy_rt::filter_init, spicy_rt::initializeParsedUnit, spicy_rt::printParserState, spicy_rt::registerParser, spicy_rt::reject, spicy_rt::setContext, spicy_rt::unit_find, spicy_rt::waitForEod, spicy_rt::waitForInput, spicy_rt::waitForInputOrEod, spicy_rt::waitForInputOrEod_2, spicy_rt::waitForInput_2 +[debug/ast-declarations] - [module] spicy_rt -> hilti::Exception, hilti::RecoverableFailure, spicy_rt::Backtrack, spicy_rt::BitOrder, spicy_rt::Direction, spicy_rt::Filters, spicy_rt::FindDirection, spicy_rt::Forward, spicy_rt::HiltiResumable, spicy_rt::MIMEType, spicy_rt::MissingData, spicy_rt::ParseError, spicy_rt::ParsedUnit, spicy_rt::Parser, spicy_rt::ParserPort, spicy_rt::Sink, spicy_rt::SinkState, spicy_rt::TypeInfo, spicy_rt::UnitAlreadyConnected, spicy_rt::UnitContext, spicy_rt::atEod, spicy_rt::backtrack, spicy_rt::confirm, spicy_rt::createContext, spicy_rt::expectBytesLiteral, spicy_rt::extractBytes, spicy_rt::filter_connect, spicy_rt::filter_disconnect, spicy_rt::filter_forward, spicy_rt::filter_forward_eod, spicy_rt::filter_init, spicy_rt::initializeParsedUnit, spicy_rt::printParserState, spicy_rt::registerParser, spicy_rt::reject, spicy_rt::setContext, spicy_rt::unit_find, spicy_rt::waitForEod, spicy_rt::waitForInput, spicy_rt::waitForInputOrEod, spicy_rt::waitForInputOrEod_2, spicy_rt::waitForInput_2 [debug/ast-declarations] - [type] spicy_rt::Parser -> spicy_rt::MIMEType, spicy_rt::ParserPort, spicy_rt::TypeInfo [debug/ast-declarations] - [function] spicy_rt::atEod -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::createContext -> spicy_rt::TypeInfo, spicy_rt::UnitContext +[debug/ast-declarations] - [function] spicy_rt::expectBytesLiteral -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::extractBytes -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::initializeParsedUnit -> spicy_rt::ParsedUnit, spicy_rt::TypeInfo [debug/ast-declarations] - [function] spicy_rt::printParserState -> hilti::Exception, hilti::RecoverableFailure @@ -956,6 +963,12 @@ [debug/ast-declarations] - Parameter "eod_ok" (spicy_rt::eod_ok) [debug/ast-declarations] - Parameter "location" (spicy_rt::location_3) [debug/ast-declarations] - Parameter "filters" (spicy_rt::filters_7) +[debug/ast-declarations] - Function "expectBytesLiteral" (spicy_rt::expectBytesLiteral) +[debug/ast-declarations] - Parameter "data" (spicy_rt::data_11) +[debug/ast-declarations] - Parameter "cur" (spicy_rt::cur_10) +[debug/ast-declarations] - Parameter "literal" (spicy_rt::literal) +[debug/ast-declarations] - Parameter "location" (spicy_rt::location_4) +[debug/ast-declarations] - Parameter "filters" (spicy_rt::filters_8) [debug/ast-declarations] - Module "spicy" (spicy) [debug/ast-declarations] - Property "%skip-implementation" (spicy::%skip-implementation) [debug/ast-declarations] - Type "AddressFamily" (spicy::AddressFamily) @@ -1086,10 +1099,11 @@ [debug/ast-declarations] - [function] spicy::zlib_decompress -> spicy::ZlibStream [debug/ast-declarations] - [function] spicy::zlib_finish -> spicy::ZlibStream [debug/ast-declarations] - [function] spicy::zlib_init -> spicy::ZlibStream -[debug/ast-declarations] - [module] spicy_rt -> hilti::Exception, hilti::RecoverableFailure, spicy_rt::Backtrack, spicy_rt::BitOrder, spicy_rt::Direction, spicy_rt::Filters, spicy_rt::FindDirection, spicy_rt::Forward, spicy_rt::HiltiResumable, spicy_rt::MIMEType, spicy_rt::MissingData, spicy_rt::ParseError, spicy_rt::ParsedUnit, spicy_rt::Parser, spicy_rt::ParserPort, spicy_rt::Sink, spicy_rt::SinkState, spicy_rt::TypeInfo, spicy_rt::UnitAlreadyConnected, spicy_rt::UnitContext, spicy_rt::atEod, spicy_rt::backtrack, spicy_rt::confirm, spicy_rt::createContext, spicy_rt::extractBytes, spicy_rt::filter_connect, spicy_rt::filter_disconnect, spicy_rt::filter_forward, spicy_rt::filter_forward_eod, spicy_rt::filter_init, spicy_rt::initializeParsedUnit, spicy_rt::printParserState, spicy_rt::registerParser, spicy_rt::reject, spicy_rt::setContext, spicy_rt::unit_find, spicy_rt::waitForEod, spicy_rt::waitForInput, spicy_rt::waitForInputOrEod, spicy_rt::waitForInputOrEod_2, spicy_rt::waitForInput_2 +[debug/ast-declarations] - [module] spicy_rt -> hilti::Exception, hilti::RecoverableFailure, spicy_rt::Backtrack, spicy_rt::BitOrder, spicy_rt::Direction, spicy_rt::Filters, spicy_rt::FindDirection, spicy_rt::Forward, spicy_rt::HiltiResumable, spicy_rt::MIMEType, spicy_rt::MissingData, spicy_rt::ParseError, spicy_rt::ParsedUnit, spicy_rt::Parser, spicy_rt::ParserPort, spicy_rt::Sink, spicy_rt::SinkState, spicy_rt::TypeInfo, spicy_rt::UnitAlreadyConnected, spicy_rt::UnitContext, spicy_rt::atEod, spicy_rt::backtrack, spicy_rt::confirm, spicy_rt::createContext, spicy_rt::expectBytesLiteral, spicy_rt::extractBytes, spicy_rt::filter_connect, spicy_rt::filter_disconnect, spicy_rt::filter_forward, spicy_rt::filter_forward_eod, spicy_rt::filter_init, spicy_rt::initializeParsedUnit, spicy_rt::printParserState, spicy_rt::registerParser, spicy_rt::reject, spicy_rt::setContext, spicy_rt::unit_find, spicy_rt::waitForEod, spicy_rt::waitForInput, spicy_rt::waitForInputOrEod, spicy_rt::waitForInputOrEod_2, spicy_rt::waitForInput_2 [debug/ast-declarations] - [type] spicy_rt::Parser -> spicy_rt::MIMEType, spicy_rt::ParserPort, spicy_rt::TypeInfo [debug/ast-declarations] - [function] spicy_rt::atEod -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::createContext -> spicy_rt::TypeInfo, spicy_rt::UnitContext +[debug/ast-declarations] - [function] spicy_rt::expectBytesLiteral -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::extractBytes -> spicy_rt::Filters [debug/ast-declarations] - [function] spicy_rt::initializeParsedUnit -> spicy_rt::ParsedUnit, spicy_rt::TypeInfo [debug/ast-declarations] - [function] spicy_rt::printParserState -> hilti::Exception, hilti::RecoverableFailure diff --git a/tests/Baseline/spicy.types.unit.finally/output b/tests/Baseline/spicy.types.unit.finally/output index f23d9ee6ed..12bf32c0b6 100644 --- a/tests/Baseline/spicy.types.unit.finally/output +++ b/tests/Baseline/spicy.types.unit.finally/output @@ -16,9 +16,9 @@ Foo error, [$y=(not set)] Foo finally, [$y=(not set)] Bar error, [$x=b"abc", $y=[$y=(not set)]] Bar finally, [$x=b"abc", $y=[$y=(not set)]] -[error] terminating with uncaught exception of type spicy::rt::ParseError: expecting 'def' (<...>/finally.spicy:28:8-28:13) +[error] terminating with uncaught exception of type spicy::rt::ParseError: expected bytes literal "def" but input starts with "XYZ" (<...>/finally.spicy:28:8-28:13) Bar init, [$x=(not set), $y=(not set)] Bar error, [$x=(not set), $y=(not set)] Bar finally, [$x=(not set), $y=(not set)] -[error] terminating with uncaught exception of type spicy::rt::ParseError: expecting 'abc' (<...>/finally.spicy:18:8-18:13) +[error] terminating with uncaught exception of type spicy::rt::ParseError: expected bytes literal "abc" but input starts with "XYZ" (<...>/finally.spicy:18:8-18:13) diff --git a/tests/Baseline/spicy.types.unit.hooks-across-imports/.stderr b/tests/Baseline/spicy.types.unit.hooks-across-imports/.stderr index f482ffe76b..18f4f68c73 100644 --- a/tests/Baseline/spicy.types.unit.hooks-across-imports/.stderr +++ b/tests/Baseline/spicy.types.unit.hooks-across-imports/.stderr @@ -1,70 +1,70 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -[debug/ast-stats] garbage collected 1085 nodes in 12 rounds, 8108 left retained -[debug/ast-stats] garbage collected 479 nodes in 11 rounds, 9309 left retained -[debug/ast-stats] garbage collected 170 nodes in 6 rounds, 9712 left retained -[debug/ast-stats] garbage collected 78 nodes in 3 rounds, 9712 left retained -[debug/ast-stats] garbage collected 78 nodes in 3 rounds, 9712 left retained +[debug/ast-stats] garbage collected 1098 nodes in 12 rounds, 8159 left retained +[debug/ast-stats] garbage collected 479 nodes in 11 rounds, 9404 left retained +[debug/ast-stats] garbage collected 170 nodes in 6 rounds, 9807 left retained +[debug/ast-stats] garbage collected 78 nodes in 3 rounds, 9807 left retained +[debug/ast-stats] garbage collected 78 nodes in 3 rounds, 9807 left retained [debug/ast-stats] # [Spicy] AST statistics: [debug/ast-stats] - # AST rounds 5 [debug/ast-stats] - max tree depth: 16 [debug/ast-stats] - # context declarations: 67 [debug/ast-stats] - # context types: 47 [debug/ast-stats] - # context modules: 5 -[debug/ast-stats] - # nodes reachable in AST: 3150 -[debug/ast-stats] - # nodes live: 9712 -[debug/ast-stats] - # nodes retained: 9712 +[debug/ast-stats] - # nodes reachable in AST: 3201 +[debug/ast-stats] - # nodes live: 9807 +[debug/ast-stats] - # nodes retained: 9807 [debug/ast-stats] - # nodes live > 1%: -[debug/ast-stats] - Attribute: 169 -[debug/ast-stats] - AttributeSet: 335 -[debug/ast-stats] - QualifiedType: 3283 -[debug/ast-stats] - ctor::String: 119 -[debug/ast-stats] - declaration::Parameter: 133 -[debug/ast-stats] - expression::Ctor: 208 +[debug/ast-stats] - Attribute: 171 +[debug/ast-stats] - AttributeSet: 341 +[debug/ast-stats] - QualifiedType: 3318 +[debug/ast-stats] - ctor::String: 120 +[debug/ast-stats] - declaration::Parameter: 138 +[debug/ast-stats] - expression::Ctor: 209 [debug/ast-stats] - type::Bool: 157 -[debug/ast-stats] - type::Bytes: 149 -[debug/ast-stats] - type::Function: 132 +[debug/ast-stats] - type::Bytes: 151 +[debug/ast-stats] - type::Function: 134 [debug/ast-stats] - type::Member: 175 -[debug/ast-stats] - type::OperandList: 668 -[debug/ast-stats] - type::String: 212 -[debug/ast-stats] - type::Unknown: 156 -[debug/ast-stats] - type::UnsignedInteger: 527 -[debug/ast-stats] - type::Void: 114 -[debug/ast-stats] - type::bytes::Iterator: 177 -[debug/ast-stats] - type::operand_list::Operand: 1250 -[debug/ast-stats] - type::stream::Iterator: 137 -[debug/ast-stats] garbage collected 8966 nodes in 14 rounds, 21000 left retained -[debug/ast-stats] garbage collected 2310 nodes in 16 rounds, 24816 left retained -[debug/ast-stats] garbage collected 2346 nodes in 8 rounds, 26234 left retained -[debug/ast-stats] garbage collected 4594 nodes in 11 rounds, 27452 left retained -[debug/ast-stats] garbage collected 412 nodes in 3 rounds, 27452 left retained +[debug/ast-stats] - type::OperandList: 670 +[debug/ast-stats] - type::String: 215 +[debug/ast-stats] - type::Unknown: 157 +[debug/ast-stats] - type::UnsignedInteger: 533 +[debug/ast-stats] - type::Void: 116 +[debug/ast-stats] - type::bytes::Iterator: 179 +[debug/ast-stats] - type::operand_list::Operand: 1257 +[debug/ast-stats] - type::stream::Iterator: 141 +[debug/ast-stats] garbage collected 8966 nodes in 14 rounds, 21095 left retained +[debug/ast-stats] garbage collected 2310 nodes in 16 rounds, 24911 left retained +[debug/ast-stats] garbage collected 2346 nodes in 8 rounds, 26329 left retained +[debug/ast-stats] garbage collected 4594 nodes in 11 rounds, 27547 left retained +[debug/ast-stats] garbage collected 412 nodes in 3 rounds, 27547 left retained [debug/ast-stats] # [HILTI] AST statistics: [debug/ast-stats] - # AST rounds 5 [debug/ast-stats] - max tree depth: 27 [debug/ast-stats] - # context declarations: 254 [debug/ast-stats] - # context types: 55 [debug/ast-stats] - # context modules: 8 -[debug/ast-stats] - # nodes reachable in AST: 18536 -[debug/ast-stats] - # nodes live: 27452 -[debug/ast-stats] - # nodes retained: 27452 +[debug/ast-stats] - # nodes reachable in AST: 18587 +[debug/ast-stats] - # nodes live: 27547 +[debug/ast-stats] - # nodes retained: 27547 [debug/ast-stats] - # nodes live > 1%: -[debug/ast-stats] - Attribute: 299 -[debug/ast-stats] - AttributeSet: 759 -[debug/ast-stats] - QualifiedType: 9081 -[debug/ast-stats] - declaration::Parameter: 394 -[debug/ast-stats] - expression::Ctor: 769 +[debug/ast-stats] - Attribute: 301 +[debug/ast-stats] - AttributeSet: 765 +[debug/ast-stats] - QualifiedType: 9116 +[debug/ast-stats] - declaration::Parameter: 399 +[debug/ast-stats] - expression::Ctor: 770 [debug/ast-stats] - expression::Member: 314 [debug/ast-stats] - expression::Name: 992 [debug/ast-stats] - type::Bool: 338 [debug/ast-stats] - type::Member: 542 -[debug/ast-stats] - type::Name: 520 -[debug/ast-stats] - type::OperandList: 796 +[debug/ast-stats] - type::Name: 522 +[debug/ast-stats] - type::OperandList: 798 [debug/ast-stats] - type::Optional: 465 -[debug/ast-stats] - type::String: 438 +[debug/ast-stats] - type::String: 441 [debug/ast-stats] - type::Tuple: 324 -[debug/ast-stats] - type::UnsignedInteger: 1689 -[debug/ast-stats] - type::operand_list::Operand: 1550 -[debug/ast-stats] - type::stream::Iterator: 1012 -[debug/ast-stats] - type::stream::View: 546 +[debug/ast-stats] - type::UnsignedInteger: 1695 +[debug/ast-stats] - type::operand_list::Operand: 1557 +[debug/ast-stats] - type::stream::Iterator: 1016 +[debug/ast-stats] - type::stream::View: 550 [debug/ast-stats] - type::tuple::Element: 673 -[debug/ast-stats] garbage collected 32096 nodes in 18 rounds, 0 left retained +[debug/ast-stats] garbage collected 32191 nodes in 18 rounds, 0 left retained [debug/ast-stats] garbage collected 0 nodes in 1 round, 0 left retained diff --git a/tests/Baseline/spicy.types.unit.sub-unit/.stderr b/tests/Baseline/spicy.types.unit.sub-unit/.stderr index 95ef6fefee..064693d479 100644 --- a/tests/Baseline/spicy.types.unit.sub-unit/.stderr +++ b/tests/Baseline/spicy.types.unit.sub-unit/.stderr @@ -1,70 +1,70 @@ ### BTest baseline data generated by btest-diff. Do not edit. Use "btest -U/-u" to update. Requires BTest >= 0.63. -[debug/ast-stats] garbage collected 1073 nodes in 12 rounds, 7852 left retained -[debug/ast-stats] garbage collected 282 nodes in 11 rounds, 8962 left retained -[debug/ast-stats] garbage collected 70 nodes in 2 rounds, 9318 left retained -[debug/ast-stats] garbage collected 70 nodes in 2 rounds, 9318 left retained +[debug/ast-stats] garbage collected 1086 nodes in 12 rounds, 7903 left retained +[debug/ast-stats] garbage collected 282 nodes in 11 rounds, 9057 left retained +[debug/ast-stats] garbage collected 70 nodes in 2 rounds, 9413 left retained +[debug/ast-stats] garbage collected 70 nodes in 2 rounds, 9413 left retained [debug/ast-stats] # [Spicy] AST statistics: [debug/ast-stats] - # AST rounds 4 [debug/ast-stats] - max tree depth: 16 [debug/ast-stats] - # context declarations: 60 [debug/ast-stats] - # context types: 43 [debug/ast-stats] - # context modules: 4 -[debug/ast-stats] - # nodes reachable in AST: 2754 -[debug/ast-stats] - # nodes live: 9318 -[debug/ast-stats] - # nodes retained: 9318 +[debug/ast-stats] - # nodes reachable in AST: 2805 +[debug/ast-stats] - # nodes live: 9413 +[debug/ast-stats] - # nodes retained: 9413 [debug/ast-stats] - # nodes live > 1%: -[debug/ast-stats] - Attribute: 164 -[debug/ast-stats] - AttributeSet: 311 -[debug/ast-stats] - QualifiedType: 3153 -[debug/ast-stats] - ctor::String: 111 -[debug/ast-stats] - declaration::Parameter: 133 -[debug/ast-stats] - expression::Ctor: 195 +[debug/ast-stats] - Attribute: 166 +[debug/ast-stats] - AttributeSet: 317 +[debug/ast-stats] - QualifiedType: 3188 +[debug/ast-stats] - ctor::String: 112 +[debug/ast-stats] - declaration::Parameter: 138 +[debug/ast-stats] - expression::Ctor: 196 [debug/ast-stats] - type::Bool: 157 -[debug/ast-stats] - type::Bytes: 118 -[debug/ast-stats] - type::Function: 125 +[debug/ast-stats] - type::Bytes: 120 +[debug/ast-stats] - type::Function: 127 [debug/ast-stats] - type::Member: 167 -[debug/ast-stats] - type::OperandList: 668 -[debug/ast-stats] - type::String: 204 -[debug/ast-stats] - type::Unknown: 156 -[debug/ast-stats] - type::UnsignedInteger: 502 -[debug/ast-stats] - type::Void: 107 -[debug/ast-stats] - type::bytes::Iterator: 146 -[debug/ast-stats] - type::operand_list::Operand: 1250 -[debug/ast-stats] - type::stream::Iterator: 137 -[debug/ast-stats] garbage collected 5080 nodes in 12 rounds, 17693 left retained -[debug/ast-stats] garbage collected 2884 nodes in 16 rounds, 20519 left retained -[debug/ast-stats] garbage collected 1582 nodes in 8 rounds, 21416 left retained -[debug/ast-stats] garbage collected 3397 nodes in 11 rounds, 22209 left retained -[debug/ast-stats] garbage collected 286 nodes in 3 rounds, 22209 left retained +[debug/ast-stats] - type::OperandList: 670 +[debug/ast-stats] - type::String: 207 +[debug/ast-stats] - type::Unknown: 157 +[debug/ast-stats] - type::UnsignedInteger: 508 +[debug/ast-stats] - type::Void: 109 +[debug/ast-stats] - type::bytes::Iterator: 148 +[debug/ast-stats] - type::operand_list::Operand: 1257 +[debug/ast-stats] - type::stream::Iterator: 141 +[debug/ast-stats] garbage collected 5080 nodes in 12 rounds, 17788 left retained +[debug/ast-stats] garbage collected 2884 nodes in 16 rounds, 20614 left retained +[debug/ast-stats] garbage collected 1582 nodes in 8 rounds, 21511 left retained +[debug/ast-stats] garbage collected 3397 nodes in 11 rounds, 22304 left retained +[debug/ast-stats] garbage collected 286 nodes in 3 rounds, 22304 left retained [debug/ast-stats] # [HILTI] AST statistics: [debug/ast-stats] - # AST rounds 5 [debug/ast-stats] - max tree depth: 27 [debug/ast-stats] - # context declarations: 189 [debug/ast-stats] - # context types: 49 [debug/ast-stats] - # context modules: 6 -[debug/ast-stats] - # nodes reachable in AST: 14203 -[debug/ast-stats] - # nodes live: 22209 -[debug/ast-stats] - # nodes retained: 22209 +[debug/ast-stats] - # nodes reachable in AST: 14254 +[debug/ast-stats] - # nodes live: 22304 +[debug/ast-stats] - # nodes retained: 22304 [debug/ast-stats] - # nodes live > 1%: -[debug/ast-stats] - Attribute: 249 -[debug/ast-stats] - AttributeSet: 587 -[debug/ast-stats] - QualifiedType: 7312 -[debug/ast-stats] - ctor::String: 289 -[debug/ast-stats] - declaration::Parameter: 302 -[debug/ast-stats] - expression::Ctor: 694 +[debug/ast-stats] - Attribute: 251 +[debug/ast-stats] - AttributeSet: 593 +[debug/ast-stats] - QualifiedType: 7347 +[debug/ast-stats] - ctor::String: 290 +[debug/ast-stats] - declaration::Parameter: 307 +[debug/ast-stats] - expression::Ctor: 695 [debug/ast-stats] - expression::Name: 736 [debug/ast-stats] - type::Bool: 279 [debug/ast-stats] - type::Member: 394 -[debug/ast-stats] - type::Name: 380 -[debug/ast-stats] - type::OperandList: 750 +[debug/ast-stats] - type::Name: 382 +[debug/ast-stats] - type::OperandList: 752 [debug/ast-stats] - type::Optional: 339 -[debug/ast-stats] - type::String: 533 +[debug/ast-stats] - type::String: 536 [debug/ast-stats] - type::Tuple: 269 -[debug/ast-stats] - type::UnsignedInteger: 1284 -[debug/ast-stats] - type::Void: 251 -[debug/ast-stats] - type::operand_list::Operand: 1444 -[debug/ast-stats] - type::stream::Iterator: 759 -[debug/ast-stats] - type::stream::View: 413 +[debug/ast-stats] - type::UnsignedInteger: 1290 +[debug/ast-stats] - type::Void: 253 +[debug/ast-stats] - type::operand_list::Operand: 1451 +[debug/ast-stats] - type::stream::Iterator: 763 +[debug/ast-stats] - type::stream::View: 417 [debug/ast-stats] - type::tuple::Element: 600 -[debug/ast-stats] garbage collected 23535 nodes in 18 rounds, 0 left retained +[debug/ast-stats] garbage collected 23630 nodes in 18 rounds, 0 left retained [debug/ast-stats] garbage collected 0 nodes in 1 round, 0 left retained diff --git a/tests/Baseline/spicy.types.unit.synchronize-confirm-elsewhere/output b/tests/Baseline/spicy.types.unit.synchronize-confirm-elsewhere/output index 105c4a45e8..568ff97286 100644 --- a/tests/Baseline/spicy.types.unit.synchronize-confirm-elsewhere/output +++ b/tests/Baseline/spicy.types.unit.synchronize-confirm-elsewhere/output @@ -25,4 +25,4 @@ test::Z { c: C } } -[fatal error] terminating with uncaught exception of type spicy::rt::ParseError: successful synchronization never confirmed: expecting 'A' (<...>/synchronize-confirm-elsewhere.spicy:28:8-28:11) +[fatal error] terminating with uncaught exception of type spicy::rt::ParseError: successful synchronization never confirmed: expected bytes literal "A" but input starts with "1" (<...>/synchronize-confirm-elsewhere.spicy:28:8-28:11) diff --git a/tests/Baseline/spicy.types.unit.synchronize-literals/output b/tests/Baseline/spicy.types.unit.synchronize-literals/output index 81de81a1ad..cd70030711 100644 --- a/tests/Baseline/spicy.types.unit.synchronize-literals/output +++ b/tests/Baseline/spicy.types.unit.synchronize-literals/output @@ -5,5 +5,5 @@ [$a=b"Axy", $b=6, $c=(not set), $d=b"Dxy", $e=(1)] [$a=b"Axy", $b=6, $c=(not set), $d=b"Dxy", $e=(1)] [$a=b"Axy", $b=6, $c=b"Cxy", $d=(not set), $e=(1)] -[error] terminating with uncaught exception of type spicy::rt::ParseError: failed to synchronize: expecting 'Cxy' (<...>/synchronize-literals.spicy:24:8-24:13) +[error] terminating with uncaught exception of type spicy::rt::ParseError: failed to synchronize: expected bytes literal "Cxy" but input starts with "CxD" (<...>/synchronize-literals.spicy:24:8-24:13) [error] terminating with uncaught exception of type spicy::rt::ParseError: failed to synchronize: failed to match regular expression (<...>/synchronize-literals.spicy:25:8-25:12) diff --git a/tests/Baseline/spicy.types.unit.synchronize-on-gap/gap_between_matches.log b/tests/Baseline/spicy.types.unit.synchronize-on-gap/gap_between_matches.log index a168e6f546..2009fdcbc1 100644 --- a/tests/Baseline/spicy.types.unit.synchronize-on-gap/gap_between_matches.log +++ b/tests/Baseline/spicy.types.unit.synchronize-on-gap/gap_between_matches.log @@ -8,7 +8,6 @@ [spicy-verbose] - parsing production: Ctor: _anon -> /(A|B|C)/ (regexp) (container 'xs') [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - got container item [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1024 for stream 0xXXXXXXXX @@ -26,14 +25,12 @@ [spicy-verbose] - parsing production: Ctor: _anon -> /(A|B|C)/ (regexp) (container 'xs') [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - got container item [spicy-verbose] - state: type=sync::X1 input="C" stream=0xXXXXXXXX offsets=1027/0/1027/1028/1028 chunks=1 frozen=no mode=default trim=yes lah=1 lah_token="C" recovering=no [spicy-verbose] - state: type=sync::X1 input="C" stream=0xXXXXXXXX offsets=1027/0/1027/1028/1028 chunks=1 frozen=no mode=default trim=yes lah=1 lah_token="C" recovering=no [spicy-verbose] - parsing production: Ctor: _anon -> /(A|B|C)/ (regexp) (container 'xs') [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - got container item [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 0 for stream 0xXXXXXXXX diff --git a/tests/Baseline/spicy.types.unit.synchronize-on-gap/gap_while_matching.log b/tests/Baseline/spicy.types.unit.synchronize-on-gap/gap_while_matching.log index 351188225d..016168840d 100644 --- a/tests/Baseline/spicy.types.unit.synchronize-on-gap/gap_while_matching.log +++ b/tests/Baseline/spicy.types.unit.synchronize-on-gap/gap_while_matching.log @@ -16,7 +16,6 @@ [spicy-verbose] - parsing production: Ctor: _anon_2 -> /AB/ (regexp) (container 'xs') [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - got container item [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 2 for stream 0xXXXXXXXX @@ -25,7 +24,6 @@ [spicy-verbose] - parsing production: Ctor: _anon_2 -> /AB/ (regexp) (container 'xs') [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - got container item [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 0 for stream 0xXXXXXXXX diff --git a/tests/Baseline/spicy.types.unit.synchronize/output b/tests/Baseline/spicy.types.unit.synchronize/output index 02016c90cf..bf31f7cc2e 100644 --- a/tests/Baseline/spicy.types.unit.synchronize/output +++ b/tests/Baseline/spicy.types.unit.synchronize/output @@ -3,32 +3,27 @@ [spicy-verbose] - parsing production: Unit: test__A -> a b c d e [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: a -> /A/ (regexp) -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'a' to 'A' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/1/1/1 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=0/0/1/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: b -> b"B" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'b' to 'B' [spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=2/0/2/2/2 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: c -> /C/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'c' to 'C' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=3/0/3/3/3 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=2/0/3/3/3 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: d -> /D/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'd' to 'D' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=4/0/4/4/4 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=2/0/4/4/4 chunks=2 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: e -> /E/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'e' to 'E' Done: [$a=b"A", $b=b"B", $c=b"C", $d=b"D", $e=b"E"] [spicy-verbose] - state: type=test::A input="1" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -53,67 +48,60 @@ Done: [$a=b"A", $b=b"B", $c=b"C", $d=b"D", $e=b"E"] [spicy-verbose] - trimming input [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=4/0/4/5/5 chunks=1 frozen=no mode=default trim=yes lah=2 lah_token="B" recovering=yes +[spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=4/0/4/5/5 chunks=1 frozen=no mode=default trim=yes lah=1 lah_token="B" recovering=yes [spicy-verbose] successfully synchronized Synced: [$a=(not set), $b=(not set), $c=(not set), $d=(not set), $e=(not set)] Confirmed: [$a=(not set), $b=(not set), $c=(not set), $d=(not set), $e=(not set)] -[spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=4/0/4/5/5 chunks=1 frozen=no mode=default trim=yes lah=2 lah_token="B" recovering=no +[spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=4/0/4/5/5 chunks=1 frozen=no mode=default trim=yes lah=1 lah_token="B" recovering=no [spicy-verbose] - parsing production: Ctor: b -> b"B" (const bytes) [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'b' to 'B' [spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=5/0/5/5/5 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: c -> /C/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'c' to 'C' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=6/0/6/6/6 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=5/0/6/6/6 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: d -> /D/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'd' to 'D' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=7/0/7/7/7 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=5/0/7/7/7 chunks=2 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: e -> /E/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'e' to 'E' Done: [$a=(not set), $b=b"B", $c=b"C", $d=b"D", $e=b"E"] [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: test__A -> a b c d e [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: a -> /A/ (regexp) -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'a' to 'A' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/1/1/1 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=0/0/1/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: b -> b"B" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX [spicy-verbose] failed to parse, will try to synchronize at 'd' -[spicy-verbose] - state: type=test::A input="C" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes -[spicy-verbose] - state: type=test::A input="C" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes -[spicy-verbose] - state: type=test::A input="C" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes +[spicy-verbose] - state: type=test::A input="C" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes +[spicy-verbose] - state: type=test::A input="C" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes +[spicy-verbose] - state: type=test::A input="C" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes [spicy-verbose] - trimming input [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=2/0/2/3/3 chunks=1 frozen=no mode=default trim=yes lah=4 lah_token="D" recovering=yes +[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=2/0/2/3/3 chunks=1 frozen=no mode=default trim=yes lah=2 lah_token="D" recovering=yes [spicy-verbose] successfully synchronized Synced: [$a=b"A", $b=(not set), $c=(not set), $d=(not set), $e=(not set)] Confirmed: [$a=b"A", $b=(not set), $c=(not set), $d=(not set), $e=(not set)] -[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=2/0/2/3/3 chunks=1 frozen=no mode=default trim=yes lah=4 lah_token="D" recovering=no +[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=2/0/2/3/3 chunks=1 frozen=no mode=default trim=yes lah=2 lah_token="D" recovering=no [spicy-verbose] - parsing production: Ctor: d -> /D/ (regexp) [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'd' to 'D' [spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=3/0/3/3/3 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: e -> /E/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'e' to 'E' Done: [$a=b"A", $b=(not set), $c=(not set), $d=b"D", $e=b"E"] [spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -122,96 +110,87 @@ Done: [$a=b"A", $b=(not set), $c=(not set), $d=b"D", $e=b"E"] [spicy-verbose] - parsing production: Ctor: a -> /A/ (regexp) [spicy-verbose] failed to parse, will try to synchronize at 'b' [spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes -[spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=2 lah_token="B" recovering=yes +[spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=1 lah_token="B" recovering=yes [spicy-verbose] successfully synchronized Synced: [$a=(not set), $b=(not set), $c=(not set), $d=(not set), $e=(not set)] Confirmed: [$a=(not set), $b=(not set), $c=(not set), $d=(not set), $e=(not set)] -[spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=2 lah_token="B" recovering=no +[spicy-verbose] - state: type=test::A input="B" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=1 lah_token="B" recovering=no [spicy-verbose] - parsing production: Ctor: b -> b"B" (const bytes) [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'b' to 'B' [spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/1/1/1 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: c -> /C/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'c' to 'C' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=2/0/2/2/2 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/2/2/2 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: d -> /D/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'd' to 'D' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=3/0/3/3/3 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/3/3/3 chunks=2 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: e -> /E/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'e' to 'E' Done: [$a=(not set), $b=b"B", $c=b"C", $d=b"D", $e=b"E"] [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: test__A -> a b c d e [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: a -> /A/ (regexp) -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'a' to 'A' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/1/1/1 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=0/0/1/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: b -> b"B" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX [spicy-verbose] failed to parse, will try to synchronize at 'd' -[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes -[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=no mode=default trim=yes lah=4 lah_token="D" recovering=yes +[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes +[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=no mode=default trim=yes lah=2 lah_token="D" recovering=yes [spicy-verbose] successfully synchronized Synced: [$a=b"A", $b=(not set), $c=(not set), $d=(not set), $e=(not set)] Confirmed: [$a=b"A", $b=(not set), $c=(not set), $d=(not set), $e=(not set)] -[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=no mode=default trim=yes lah=4 lah_token="D" recovering=no +[spicy-verbose] - state: type=test::A input="D" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=no mode=default trim=yes lah=2 lah_token="D" recovering=no [spicy-verbose] - parsing production: Ctor: d -> /D/ (regexp) [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'd' to 'D' [spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=2/0/2/2/2 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: e -> /E/ (regexp) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'e' to 'E' Done: [$a=b"A", $b=(not set), $c=(not set), $d=b"D", $e=b"E"] [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: test__A -> a b c d e [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: a -> /A/ (regexp) -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'a' to 'A' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/1/1/1 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=0/0/1/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: b -> b"B" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX [spicy-verbose] failed to parse, will try to synchronize at 'd' -[spicy-verbose] - state: type=test::A input="E" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes -[spicy-verbose] - state: type=test::A input="E" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes -[spicy-verbose] - state: type=test::A input="E" stream=0xXXXXXXXX offsets=1/0/1/2/2 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes +[spicy-verbose] - state: type=test::A input="E" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes +[spicy-verbose] - state: type=test::A input="E" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes +[spicy-verbose] - state: type=test::A input="E" stream=0xXXXXXXXX offsets=0/0/1/2/2 chunks=2 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes [spicy-verbose] - trimming input [spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=2/0/2/2/2 chunks=0 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes [spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=2/0/2/2/2 chunks=0 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes Error: [$a=b"A", $b=(not set), $c=(not set), $d=(not set), $e=(not set)] -[error] terminating with uncaught exception of type spicy::rt::ParseError: failed to synchronize: expecting 'B' (<...>/synchronize.spicy:39:8-39:11) +[error] terminating with uncaught exception of type spicy::rt::ParseError: failed to synchronize: expected bytes literal "B" but input starts with "E" (<...>/synchronize.spicy:39:8-39:11) [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: test__A -> a b c d e [spicy-verbose] - state: type=test::A input="A" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: a -> /A/ (regexp) -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'a' to 'A' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/1/1/1 chunks=0 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=0/0/1/1/1 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: b -> b"B" (const bytes) [spicy-verbose] insufficient input at end of data for stream 0xXXXXXXXX (which is not ok here) [spicy-verbose] failed to parse, will try to synchronize at 'd' -[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=1/0/1/1/1 chunks=0 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes +[spicy-verbose] - state: type=test::A input="" stream=0xXXXXXXXX offsets=0/0/1/1/1 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes Error: [$a=b"A", $b=(not set), $c=(not set), $d=(not set), $e=(not set)] -[error] terminating with uncaught exception of type spicy::rt::ParseError: failed to synchronize: expecting 'B' (0 bytes available) (<...>/synchronize.spicy:39:8-39:11) +[error] terminating with uncaught exception of type spicy::rt::ParseError: failed to synchronize: expected 1 bytes for bytes literal "B" (0 bytes available) (<...>/synchronize.spicy:39:8-39:11) [spicy-verbose] - state: type=test::A input="1" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: test__A -> a b c d e [spicy-verbose] - state: type=test::A input="1" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -244,28 +223,26 @@ Error: [$a=(not set), $b=(not set), $c=(not set), $d=(not set), $e=(not set)] [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 4 [spicy-verbose] resuming after insufficient input, now have 5 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field '_anon' to 'BEGIN' [spicy-verbose] - state: type=test::E input="" stream=0xXXXXXXXX offsets=5/0/5/5/5 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: test__E_A -> switch_lha_2 [spicy-verbose] - state: type=test::E_A input="" stream=0xXXXXXXXX offsets=5/5/5/5/5 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no -[spicy-verbose] - parsing production: LookAhead: switch_2_lha_2 -> {Ctor: x_5 -> /123/ (regexp) (id 7)}: switch_2_case_1 | {Ctor: y_2 -> /abc/ (regexp) (id 8)}: switch_2_case_2 +[spicy-verbose] - parsing production: LookAhead: switch_2_lha_2 -> {Ctor: x_5 -> /123/ (regexp) (id 3)}: switch_2_case_1 | {Ctor: y_2 -> /abc/ (regexp) (id 4)}: switch_2_case_2 [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 2 for stream 0xXXXXXXXX [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX -[spicy-verbose] - state: type=test::E_A input="123" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no -[spicy-verbose] - state: type=test::E_A input="123" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no +[spicy-verbose] - state: type=test::E_A input="123" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no +[spicy-verbose] - state: type=test::E_A input="123" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no [spicy-verbose] - parsing production: Sequence: switch_2_case_1 -> Resolved_10 -[spicy-verbose] - state: type=test::E_A input="123" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no +[spicy-verbose] - state: type=test::E_A input="123" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no [spicy-verbose] - parsing production: Unit: test__E_X_2 -> x_5 -[spicy-verbose] - state: type=test::E_X input="123" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no +[spicy-verbose] - state: type=test::E_X input="123" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no [spicy-verbose] - parsing production: Ctor: x_6 -> /123/ (regexp) [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'x' to '123' [spicy-verbose] - setting field 'x' to '[$x=b"123"]' [spicy-verbose] - setting field 'a' to '[$x=[$x=b"123"], $y=(not set)]' @@ -278,7 +255,6 @@ Error: [$a=(not set), $b=(not set), $c=(not set), $d=(not set), $e=(not set)] [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field '_anon_2' to 'END' Done: [$a=[$x=[$x=b"123"], $y=(not set)]] [spicy-verbose] - state: type=test::E input="B" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -294,28 +270,26 @@ Done: [$a=[$x=[$x=b"123"], $y=(not set)]] [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 4 [spicy-verbose] resuming after insufficient input, now have 5 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field '_anon' to 'BEGIN' [spicy-verbose] - state: type=test::E input="" stream=0xXXXXXXXX offsets=5/0/5/5/5 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: test__E_A -> switch_lha_2 [spicy-verbose] - state: type=test::E_A input="" stream=0xXXXXXXXX offsets=5/5/5/5/5 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no -[spicy-verbose] - parsing production: LookAhead: switch_2_lha_2 -> {Ctor: x_5 -> /123/ (regexp) (id 7)}: switch_2_case_1 | {Ctor: y_2 -> /abc/ (regexp) (id 8)}: switch_2_case_2 +[spicy-verbose] - parsing production: LookAhead: switch_2_lha_2 -> {Ctor: x_5 -> /123/ (regexp) (id 3)}: switch_2_case_1 | {Ctor: y_2 -> /abc/ (regexp) (id 4)}: switch_2_case_2 [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 1 for stream 0xXXXXXXXX [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 2 for stream 0xXXXXXXXX [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX -[spicy-verbose] - state: type=test::E_A input="abc" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no -[spicy-verbose] - state: type=test::E_A input="abc" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no +[spicy-verbose] - state: type=test::E_A input="abc" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no +[spicy-verbose] - state: type=test::E_A input="abc" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no [spicy-verbose] - parsing production: Sequence: switch_2_case_2 -> Resolved_11 -[spicy-verbose] - state: type=test::E_A input="abc" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no +[spicy-verbose] - state: type=test::E_A input="abc" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no [spicy-verbose] - parsing production: Unit: test__E_Y_2 -> y_2 -[spicy-verbose] - state: type=test::E_Y input="abc" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no +[spicy-verbose] - state: type=test::E_Y input="abc" stream=0xXXXXXXXX offsets=5/5/5/8/8 chunks=3 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no [spicy-verbose] - parsing production: Ctor: y_3 -> /abc/ (regexp) [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'y' to 'abc' [spicy-verbose] - setting field 'y' to '[$y=b"abc"]' [spicy-verbose] - setting field 'a' to '[$x=(not set), $y=[$y=b"abc"]]' @@ -328,7 +302,6 @@ Done: [$a=[$x=[$x=b"123"], $y=(not set)]] [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field '_anon_2' to 'END' Done: [$a=[$x=(not set), $y=[$y=b"abc"]]] [spicy-verbose] - state: type=test::E input="a" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -345,23 +318,22 @@ Done: [$a=[$x=(not set), $y=[$y=b"abc"]]] [spicy-verbose] resuming after insufficient input, now have 5 for stream 0xXXXXXXXX [spicy-verbose] failed to parse, will try to synchronize at 'a' [spicy-verbose] - state: type=test::E input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes -[spicy-verbose] - state: type=test::E input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=yes +[spicy-verbose] - state: type=test::E input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=yes [spicy-verbose] successfully synchronized Synced: [$a=(not set)] Confirmed: [$a=(not set)] -[spicy-verbose] - state: type=test::E input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no +[spicy-verbose] - state: type=test::E input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no [spicy-verbose] - parsing production: Unit: test__E_A -> switch_lha_2 -[spicy-verbose] - state: type=test::E_A input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no -[spicy-verbose] - parsing production: LookAhead: switch_2_lha_2 -> {Ctor: x_5 -> /123/ (regexp) (id 7)}: switch_2_case_1 | {Ctor: y_2 -> /abc/ (regexp) (id 8)}: switch_2_case_2 -[spicy-verbose] - state: type=test::E_A input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no +[spicy-verbose] - state: type=test::E_A input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no +[spicy-verbose] - parsing production: LookAhead: switch_2_lha_2 -> {Ctor: x_5 -> /123/ (regexp) (id 3)}: switch_2_case_1 | {Ctor: y_2 -> /abc/ (regexp) (id 4)}: switch_2_case_2 +[spicy-verbose] - state: type=test::E_A input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no [spicy-verbose] - parsing production: Sequence: switch_2_case_2 -> Resolved_11 -[spicy-verbose] - state: type=test::E_A input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no +[spicy-verbose] - state: type=test::E_A input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no [spicy-verbose] - parsing production: Unit: test__E_Y_2 -> y_2 -[spicy-verbose] - state: type=test::E_Y input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=8 lah_token="abc" recovering=no +[spicy-verbose] - state: type=test::E_Y input="abcEN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=4 lah_token="abc" recovering=no [spicy-verbose] - parsing production: Ctor: y_3 -> /abc/ (regexp) [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'y' to 'abc' [spicy-verbose] - setting field 'y' to '[$y=b"abc"]' [spicy-verbose] - setting field 'a' to '[$x=(not set), $y=[$y=b"abc"]]' @@ -370,7 +342,6 @@ Confirmed: [$a=(not set)] [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field '_anon_2' to 'END' Done: [$a=[$x=(not set), $y=[$y=b"abc"]]] [spicy-verbose] - state: type=test::E input="1" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -387,23 +358,22 @@ Done: [$a=[$x=(not set), $y=[$y=b"abc"]]] [spicy-verbose] resuming after insufficient input, now have 5 for stream 0xXXXXXXXX [spicy-verbose] failed to parse, will try to synchronize at 'a' [spicy-verbose] - state: type=test::E input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=yes -[spicy-verbose] - state: type=test::E input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=yes +[spicy-verbose] - state: type=test::E input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=yes [spicy-verbose] successfully synchronized Synced: [$a=(not set)] Confirmed: [$a=(not set)] -[spicy-verbose] - state: type=test::E input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no +[spicy-verbose] - state: type=test::E input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no [spicy-verbose] - parsing production: Unit: test__E_A -> switch_lha_2 -[spicy-verbose] - state: type=test::E_A input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no -[spicy-verbose] - parsing production: LookAhead: switch_2_lha_2 -> {Ctor: x_5 -> /123/ (regexp) (id 7)}: switch_2_case_1 | {Ctor: y_2 -> /abc/ (regexp) (id 8)}: switch_2_case_2 -[spicy-verbose] - state: type=test::E_A input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no +[spicy-verbose] - state: type=test::E_A input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no +[spicy-verbose] - parsing production: LookAhead: switch_2_lha_2 -> {Ctor: x_5 -> /123/ (regexp) (id 3)}: switch_2_case_1 | {Ctor: y_2 -> /abc/ (regexp) (id 4)}: switch_2_case_2 +[spicy-verbose] - state: type=test::E_A input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no [spicy-verbose] - parsing production: Sequence: switch_2_case_1 -> Resolved_10 -[spicy-verbose] - state: type=test::E_A input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no +[spicy-verbose] - state: type=test::E_A input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no [spicy-verbose] - parsing production: Unit: test__E_X_2 -> x_5 -[spicy-verbose] - state: type=test::E_X input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=7 lah_token="123" recovering=no +[spicy-verbose] - state: type=test::E_X input="123EN" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=5 frozen=no mode=default trim=yes lah=3 lah_token="123" recovering=no [spicy-verbose] - parsing production: Ctor: x_6 -> /123/ (regexp) [spicy-verbose] - consuming look-ahead token [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'x' to '123' [spicy-verbose] - setting field 'x' to '[$x=b"123"]' [spicy-verbose] - setting field 'a' to '[$x=[$x=b"123"], $y=(not set)]' @@ -412,7 +382,6 @@ Confirmed: [$a=(not set)] [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field '_anon_2' to 'END' Done: [$a=[$x=[$x=b"123"], $y=(not set)]] [spicy-verbose] - state: type=test::B input="1" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no diff --git a/tests/Baseline/spicy.types.unit.trimming/.stderr b/tests/Baseline/spicy.types.unit.trimming/.stderr index 3d2c3cb2d6..32ea950d19 100644 --- a/tests/Baseline/spicy.types.unit.trimming/.stderr +++ b/tests/Baseline/spicy.types.unit.trimming/.stderr @@ -4,29 +4,24 @@ [spicy-verbose] - state: type=Mini::Test input="1234567890..." stream=0xXXXXXXXX offsets=0/0/0/14/14 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f1 -> b"12" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f1' to '12' [spicy-verbose] - state: type=Mini::Test input="34567890ab..." stream=0xXXXXXXXX offsets=2/0/2/14/14 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f2 -> b"34" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f2' to '34' [spicy-verbose] - state: type=Mini::Test input="567890abcd" stream=0xXXXXXXXX offsets=4/0/4/14/14 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f3 -> b"5678" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f3' to '5678' [spicy-verbose] - state: type=Mini::Test input="90abcd" stream=0xXXXXXXXX offsets=8/0/8/14/14 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: Mini__Sub_2 -> x_2 y_2 [spicy-verbose] - state: type=Mini::Sub input="90abcd" stream=0xXXXXXXXX offsets=8/8/8/14/14 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: x -> b"90a" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'x' to '90a' [spicy-verbose] - state: type=Mini::Sub input="bcd" stream=0xXXXXXXXX offsets=11/8/11/14/14 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: y -> b"bcd" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'y' to 'bcd' [spicy-verbose] - setting field 'f4' to '[$x=b"90a", $y=b"bcd"]' [spicy-verbose] - state: type=Mini::Test input="1" stream=0xXXXXXXXX offsets=0/0/0/1/1 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -36,7 +31,6 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 2 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f1' to '12' [spicy-verbose] - state: type=Mini::Test input="" stream=0xXXXXXXXX offsets=2/0/2/2/2 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f2 -> b"34" (const bytes) @@ -45,7 +39,6 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 2 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f2' to '34' [spicy-verbose] - state: type=Mini::Test input="" stream=0xXXXXXXXX offsets=4/0/4/4/4 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f3 -> b"5678" (const bytes) @@ -58,7 +51,6 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 3 [spicy-verbose] resuming after insufficient input, now have 4 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f3' to '5678' [spicy-verbose] - state: type=Mini::Test input="" stream=0xXXXXXXXX offsets=8/0/8/8/8 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: Mini__Sub_2 -> x_2 y_2 @@ -71,7 +63,6 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'x' to '90a' [spicy-verbose] - state: type=Mini::Sub input="" stream=0xXXXXXXXX offsets=11/8/11/11/11 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: y -> b"bcd" (const bytes) @@ -82,7 +73,6 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'y' to 'bcd' [spicy-verbose] - setting field 'f4' to '[$x=b"90a", $y=b"bcd"]' [spicy-verbose] - state: type=Mini::Test input="12" stream=0xXXXXXXXX offsets=0/0/0/2/2 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -90,14 +80,12 @@ [spicy-verbose] - state: type=Mini::Test input="12" stream=0xXXXXXXXX offsets=0/0/0/2/2 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f1 -> b"12" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f1' to '12' [spicy-verbose] - state: type=Mini::Test input="" stream=0xXXXXXXXX offsets=2/0/2/2/2 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f2 -> b"34" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 2 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f2' to '34' [spicy-verbose] - state: type=Mini::Test input="" stream=0xXXXXXXXX offsets=4/0/4/4/4 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f3 -> b"5678" (const bytes) @@ -106,7 +94,6 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 4 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f3' to '5678' [spicy-verbose] - state: type=Mini::Test input="" stream=0xXXXXXXXX offsets=8/0/8/8/8 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: Mini__Sub_2 -> x_2 y_2 @@ -117,14 +104,12 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 4 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'x' to '90a' [spicy-verbose] - state: type=Mini::Sub input="b" stream=0xXXXXXXXX offsets=11/8/11/12/12 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: y -> b"bcd" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'y' to 'bcd' [spicy-verbose] - setting field 'f4' to '[$x=b"90a", $y=b"bcd"]' [spicy-verbose] - state: type=Mini::Test input="123" stream=0xXXXXXXXX offsets=0/0/0/3/3 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -132,21 +117,18 @@ [spicy-verbose] - state: type=Mini::Test input="123" stream=0xXXXXXXXX offsets=0/0/0/3/3 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f1 -> b"12" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f1' to '12' [spicy-verbose] - state: type=Mini::Test input="3" stream=0xXXXXXXXX offsets=2/0/2/3/3 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f2 -> b"34" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 4 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f2' to '34' [spicy-verbose] - state: type=Mini::Test input="56" stream=0xXXXXXXXX offsets=4/0/4/6/6 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f3 -> b"5678" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 5 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f3' to '5678' [spicy-verbose] - state: type=Mini::Test input="9" stream=0xXXXXXXXX offsets=8/0/8/9/9 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: Mini__Sub_2 -> x_2 y_2 @@ -155,14 +137,12 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 4 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'x' to '90a' [spicy-verbose] - state: type=Mini::Sub input="b" stream=0xXXXXXXXX offsets=11/8/11/12/12 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: y -> b"bcd" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'y' to 'bcd' [spicy-verbose] - setting field 'f4' to '[$x=b"90a", $y=b"bcd"]' [spicy-verbose] - state: type=Mini::Test input="1234" stream=0xXXXXXXXX offsets=0/0/0/4/4 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -170,19 +150,16 @@ [spicy-verbose] - state: type=Mini::Test input="1234" stream=0xXXXXXXXX offsets=0/0/0/4/4 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f1 -> b"12" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f1' to '12' [spicy-verbose] - state: type=Mini::Test input="34" stream=0xXXXXXXXX offsets=2/0/2/4/4 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f2 -> b"34" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f2' to '34' [spicy-verbose] - state: type=Mini::Test input="" stream=0xXXXXXXXX offsets=4/0/4/4/4 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f3 -> b"5678" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 4 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f3' to '5678' [spicy-verbose] - state: type=Mini::Test input="" stream=0xXXXXXXXX offsets=8/0/8/8/8 chunks=0 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: Mini__Sub_2 -> x_2 y_2 @@ -191,14 +168,12 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 0 [spicy-verbose] resuming after insufficient input, now have 4 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'x' to '90a' [spicy-verbose] - state: type=Mini::Sub input="b" stream=0xXXXXXXXX offsets=11/8/11/12/12 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: y -> b"bcd" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 3 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'y' to 'bcd' [spicy-verbose] - setting field 'f4' to '[$x=b"90a", $y=b"bcd"]' [spicy-verbose] - state: type=Mini::Test input="12345" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no @@ -206,19 +181,16 @@ [spicy-verbose] - state: type=Mini::Test input="12345" stream=0xXXXXXXXX offsets=0/0/0/5/5 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f1 -> b"12" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f1' to '12' [spicy-verbose] - state: type=Mini::Test input="345" stream=0xXXXXXXXX offsets=2/0/2/5/5 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f2 -> b"34" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f2' to '34' [spicy-verbose] - state: type=Mini::Test input="5" stream=0xXXXXXXXX offsets=4/0/4/5/5 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: f3 -> b"5678" (const bytes) [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 1 [spicy-verbose] resuming after insufficient input, now have 6 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'f3' to '5678' [spicy-verbose] - state: type=Mini::Test input="90" stream=0xXXXXXXXX offsets=8/0/8/10/10 chunks=1 frozen=no mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Unit: Mini__Sub_2 -> x_2 y_2 @@ -227,11 +199,9 @@ [spicy-verbose] suspending to wait for more input for stream 0xXXXXXXXX, currently have 2 [spicy-verbose] resuming after insufficient input, now have 6 for stream 0xXXXXXXXX [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'x' to '90a' [spicy-verbose] - state: type=Mini::Sub input="bcd" stream=0xXXXXXXXX offsets=11/8/11/14/14 chunks=1 frozen=yes mode=default trim=yes lah=n/a lah_token="n/a" recovering=no [spicy-verbose] - parsing production: Ctor: y -> b"bcd" (const bytes) [spicy-verbose] - trimming input -[spicy-verbose] - trimming input [spicy-verbose] - setting field 'y' to 'bcd' [spicy-verbose] - setting field 'f4' to '[$x=b"90a", $y=b"bcd"]' diff --git a/tests/spicy/types/bytes/parse-length.spicy b/tests/spicy/types/bytes/parse-length.spicy index 65dd52677d..f2ff438959 100644 --- a/tests/spicy/types/bytes/parse-length.spicy +++ b/tests/spicy/types/bytes/parse-length.spicy @@ -3,6 +3,12 @@ # # Ensure type-specific optimization kicks in. # @TEST-EXEC: spicyc -p %INPUT | grep -q 'b2 = spicy_rt::extractBytes' +# +# Ensure literal-specific optimization kicks in. +# @TEST-EXEC: spicyc -p %INPUT | grep -q '^ *spicy_rt::expectBytesLiteral' +# +# Ensure we don't get any look-ahead checks when parsing the literals, we don't need them here. +# @TEST-EXEC: spicyc -p %INPUT | grep -vq 'if.*lah' module Test;