Skip to content

Commit

Permalink
Rename scanner-common::capture_token to capture and remove old
Browse files Browse the repository at this point in the history
`capture`

Fixes #200.
  • Loading branch information
foonathan committed May 21, 2024
1 parent e8eb4d6 commit 1e5d99f
Show file tree
Hide file tree
Showing 4 changed files with 42 additions and 20 deletions.
4 changes: 3 additions & 1 deletion CHANGELOG.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -4,12 +4,14 @@

=== Potential breaking changes

* `scanner-common::capture_token` was renamed to `scanner-common::capture`, and old `scanner-common::capture` removed.
Previously, `capture_token` was a linker error anyway, but if you're calling `scanner-common::capture` it will no longer work for arbitrary rules and instead only like `dsl::capture`.
* `lexy::parse_as_tree` will add a position token to production nodes that would otherwise be empty.
That way, no production node will be empty, unless the builder API is used directly.
* Change `lexy::dsl::try_()` error recovery behavior:
It will now skip whitespace after the (optional) error recovery rule.
* Deprecate the `lexy::parse_tree::builder::finish()` overload that does not take a `remaining_input`.
* The typo `lexy::code_point::spaing_mark` was fixed to `lexy::code_point::spacing_mark`.
* The typo `lexy::code_point::spaing_mark` was fixed to `spacing_mark`.

=== New Features

Expand Down
4 changes: 1 addition & 3 deletions docs/content/reference/action/scan.adoc
Original file line number Diff line number Diff line change
Expand Up @@ -136,10 +136,8 @@ namespace lexy
template <typename T>
constexpr scan_result<T> integer(_ndigits-dsl_ digits);
// Forwards to `parse(rule)` (without value!) and then returns a lexeme.
// Forwards to `parse(result, dsl::capture(rule))`.
constexpr scan_result<lexeme<Reader>> capture(_rule_ auto rule);
// Forwards to `parse(result, dsl::capture_token(rule))`.
constexpr scan_result<lexeme<Reader>> capture_token(_token-rule_ auto rule);
};
}
----
Expand Down
27 changes: 11 additions & 16 deletions include/lexy/dsl/scan.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,9 @@ struct _prd;
template <typename Rule, typename Tag>
struct _peek;
template <typename Token>
struct _capt;
struct _cap;
template <typename Rule>
struct _capr;
template <typename T, typename Base>
struct _int_dsl;

Expand Down Expand Up @@ -379,25 +381,18 @@ class scanner
return result;
}

template <typename Rule>
constexpr auto capture(Rule rule) -> scan_result<lexeme<Reader>>
template <typename Token>
constexpr auto capture(Token)
{
static_assert(lexy::is_rule<Rule>);

auto begin = _reader.position();
parse(rule);
auto end = _reader.position();

if (*this)
return lexeme<Reader>(begin, end);
else
return scan_failed;
scan_result<lexeme<Reader>> result;
parse(result, lexyd::_cap<Token>{});
return result;
}
template <typename Token>
constexpr auto capture_token(Token)
template <typename Production>
constexpr auto capture(lexyd::_prd<Production>)
{
scan_result<lexeme<Reader>> result;
parse(result, lexyd::_capt<Token>{});
parse(result, lexyd::_capr<lexyd::_prd<Production>>{});
return result;
}

Expand Down
27 changes: 27 additions & 0 deletions tests/lexy/action/scan.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,11 @@ struct production
= lexy::callback<int>([](auto lex) { return static_cast<int>(lex.size()); });
};

struct token_production : lexy::token_production
{
static constexpr auto rule = LEXY_LIT("abc");
};

struct control_production
{
static constexpr auto whitespace = LEXY_LIT(" ");
Expand Down Expand Up @@ -388,5 +393,27 @@ TEST_CASE("lexy::scan")
CHECK(scanner);
check_position(scanner, true, input.data() + 7);
}

SUBCASE("capture")
{
auto input = lexy::zstring_input("abcabc");
auto scanner = lexy::scan(input, errors);
CHECK(scanner);
check_position(scanner, false, input.data());

auto lexeme = scanner.capture(LEXY_LIT("abc"));
CHECK(scanner);
check_position(scanner, false, input.data() + 3);
CHECK(lexeme);
CHECK(lexeme.value().begin() == input.data());
CHECK(lexeme.value().end() == input.data() + 3);

lexeme = scanner.capture(lexy::dsl::p<token_production>);
CHECK(scanner);
check_position(scanner, true, input.data() + 6);
CHECK(lexeme);
CHECK(lexeme.value().begin() == input.data() + 3);
CHECK(lexeme.value().end() == input.data() + 6);
}
}

0 comments on commit 1e5d99f

Please sign in to comment.