Skip to content

Commit

Permalink
[chttp2] Fix a bug in hpack error handling
Browse files Browse the repository at this point in the history
PiperOrigin-RevId: 657234128
PiperOrigin-RevId: 658458047
  • Loading branch information
ctiller committed Aug 1, 2024
1 parent 4af7859 commit de60c42
Show file tree
Hide file tree
Showing 5 changed files with 181 additions and 41 deletions.
67 changes: 39 additions & 28 deletions src/core/ext/transport/chttp2/transport/hpack_parser.cc
Original file line number Diff line number Diff line change
Expand Up @@ -91,12 +91,14 @@ constexpr Base64InverseTable kBase64InverseTable;
class HPackParser::Input {
public:
Input(grpc_slice_refcount* current_slice_refcount, const uint8_t* begin,
const uint8_t* end, HpackParseResult& error)
const uint8_t* end, HpackParseResult& frame_error,
HpackParseResult& field_error)
: current_slice_refcount_(current_slice_refcount),
begin_(begin),
end_(end),
frontier_(begin),
error_(error) {}
frame_error_(frame_error),
field_error_(field_error) {}

// If input is backed by a slice, retrieve its refcount. If not, return
// nullptr.
Expand Down Expand Up @@ -214,14 +216,18 @@ class HPackParser::Input {

// Check if we saw an EOF
bool eof_error() const {
return min_progress_size_ != 0 || error_.connection_error();
return min_progress_size_ != 0 || frame_error_.connection_error();
}

// Reset the field error to be ok
void ClearFieldError() {
if (field_error_.ok()) return;
field_error_ = HpackParseResult();
}

// Minimum number of bytes to unstuck the current parse
size_t min_progress_size() const { return min_progress_size_; }

bool has_error() const { return !error_.ok(); }

// Set the current error - tweaks the error to include a stream id so that
// chttp2 does not close the connection.
// Intended for errors that are specific to a stream and recoverable.
Expand All @@ -245,10 +251,7 @@ class HPackParser::Input {
// read prior to being able to get further in this parse.
void UnexpectedEOF(size_t min_progress_size) {
GPR_ASSERT(min_progress_size > 0);
if (min_progress_size_ != 0 || error_.connection_error()) {
GPR_DEBUG_ASSERT(eof_error());
return;
}
if (eof_error()) return;
// Set min progress size, taking into account bytes parsed already but not
// consumed.
min_progress_size_ = min_progress_size + (begin_ - frontier_);
Expand Down Expand Up @@ -298,13 +301,18 @@ class HPackParser::Input {
// Do not use this directly, instead use SetErrorAndContinueParsing or
// SetErrorAndStopParsing.
void SetError(HpackParseResult error) {
if (!error_.ok() || min_progress_size_ > 0) {
if (error.connection_error() && !error_.connection_error()) {
error_ = std::move(error); // connection errors dominate
SetErrorFor(frame_error_, error);
SetErrorFor(field_error_, std::move(error));
}

void SetErrorFor(HpackParseResult& error, HpackParseResult new_error) {
if (!error.ok() || min_progress_size_ > 0) {
if (new_error.connection_error() && !error.connection_error()) {
error = std::move(new_error); // connection errors dominate
}
return;
}
error_ = std::move(error);
error = std::move(new_error);
}

// Refcount if we are backed by a slice
Expand All @@ -316,7 +324,8 @@ class HPackParser::Input {
// Frontier denotes the first byte past successfully processed input
const uint8_t* frontier_;
// Current error
HpackParseResult& error_;
HpackParseResult& frame_error_;
HpackParseResult& field_error_;
// If the error was EOF, we flag it here by noting how many more bytes would
// be needed to make progress
size_t min_progress_size_ = 0;
Expand Down Expand Up @@ -591,6 +600,7 @@ class HPackParser::Parser {
bool ParseTop() {
GPR_DEBUG_ASSERT(state_.parse_state == ParseState::kTop);
auto cur = *input_->Next();
input_->ClearFieldError();
switch (cur >> 4) {
// Literal header not indexed - First byte format: 0000xxxx
// Literal header never indexed - First byte format: 0001xxxx
Expand Down Expand Up @@ -696,7 +706,7 @@ class HPackParser::Parser {
break;
}
gpr_log(
GPR_DEBUG, "HTTP:%d:%s:%s: %s%s", log_info_.stream_id, type,
GPR_INFO, "HTTP:%d:%s:%s: %s%s", log_info_.stream_id, type,
log_info_.is_client ? "CLI" : "SVR", memento.md.DebugString().c_str(),
memento.parse_status == nullptr
? ""
Expand Down Expand Up @@ -945,11 +955,10 @@ class HPackParser::Parser {
state_.string_length)
: String::Parse(input_, state_.is_string_huff_compressed,
state_.string_length);
HpackParseResult& status = state_.frame_error;
absl::string_view key_string;
if (auto* s = absl::get_if<Slice>(&state_.key)) {
key_string = s->as_string_view();
if (status.ok()) {
if (state_.field_error.ok()) {
auto r = ValidateKey(key_string);
if (r != ValidateMetadataResult::kOk) {
input_->SetErrorAndContinueParsing(
Expand All @@ -959,7 +968,7 @@ class HPackParser::Parser {
} else {
const auto* memento = absl::get<const HPackTable::Memento*>(state_.key);
key_string = memento->md.key();
if (status.ok() && memento->parse_status != nullptr) {
if (state_.field_error.ok() && memento->parse_status != nullptr) {
input_->SetErrorAndContinueParsing(*memento->parse_status);
}
}
Expand All @@ -986,16 +995,16 @@ class HPackParser::Parser {
key_string.size() + value.wire_size + hpack_constants::kEntryOverhead;
auto md = grpc_metadata_batch::Parse(
key_string, std::move(value_slice), state_.add_to_table, transport_size,
[key_string, &status, this](absl::string_view message, const Slice&) {
if (!status.ok()) return;
[key_string, this](absl::string_view message, const Slice&) {
if (!state_.field_error.ok()) return;
input_->SetErrorAndContinueParsing(
HpackParseResult::MetadataParseError(key_string));
gpr_log(GPR_ERROR, "Error parsing '%s' metadata: %s",
std::string(key_string).c_str(),
std::string(message).c_str());
});
HPackTable::Memento memento{std::move(md),
status.PersistentStreamErrorOrNullptr()};
HPackTable::Memento memento{
std::move(md), state_.field_error.PersistentStreamErrorOrNullptr()};
input_->UpdateFrontier();
state_.parse_state = ParseState::kTop;
if (state_.add_to_table) {
Expand Down Expand Up @@ -1155,13 +1164,15 @@ grpc_error_handle HPackParser::Parse(
return absl::OkStatus();
}
std::vector<uint8_t> buffer = std::move(unparsed_bytes_);
return ParseInput(Input(nullptr, buffer.data(),
buffer.data() + buffer.size(), state_.frame_error),
is_last, call_tracer);
return ParseInput(
Input(nullptr, buffer.data(), buffer.data() + buffer.size(),
state_.frame_error, state_.field_error),
is_last, call_tracer);
}
return ParseInput(Input(slice.refcount, GRPC_SLICE_START_PTR(slice),
GRPC_SLICE_END_PTR(slice), state_.frame_error),
is_last, call_tracer);
return ParseInput(
Input(slice.refcount, GRPC_SLICE_START_PTR(slice),
GRPC_SLICE_END_PTR(slice), state_.frame_error, state_.field_error),
is_last, call_tracer);
}

grpc_error_handle HPackParser::ParseInput(
Expand Down
2 changes: 2 additions & 0 deletions src/core/ext/transport/chttp2/transport/hpack_parser.h
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,8 @@ class HPackParser {
HPackTable hpack_table;
// Error so far for this frame (set by class Input)
HpackParseResult frame_error;
// Error so far for this field (set by class Input)
HpackParseResult field_error;
// Length of frame so far.
uint32_t frame_length = 0;
// Length of the string being parsed
Expand Down
89 changes: 76 additions & 13 deletions test/core/transport/chttp2/hpack_parser_test.cc
Original file line number Diff line number Diff line change
Expand Up @@ -437,19 +437,82 @@ INSTANTIATE_TEST_SUITE_P(
Test{"Base64LegalEncoding",
{},
{},
{// Binary metadata: created using:
// tools/codegen/core/gen_header_frame.py
// --compression inc --no_framing --output hexstr
// < test/core/transport/chttp2/bad-base64.headers
{"4009612e622e632d62696e1c6c75636b696c7920666f722075732c206974"
"27732074756573646179",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
0},
{"be",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
0}}},
{
// Binary metadata: created using:
// tools/codegen/core/gen_header_frame.py
// --compression inc --no_framing --output hexstr
// < test/core/transport/chttp2/bad-base64.headers
{"4009612e622e632d62696e1c6c75636b696c7920666f722075732c206974"
"27732074756573646179",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
0},
{"be",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
kEndOfHeaders},
{"82", ":method: GET\n", 0},
}},
Test{"Base64LegalEncodingWorksAfterFailure",
{},
{},
{
// Binary metadata: created using:
// tools/codegen/core/gen_header_frame.py
// --compression inc --no_framing --output hexstr
// < test/core/transport/chttp2/bad-base64.headers
{"4009612e622e632d62696e1c6c75636b696c7920666f722075732c206974"
"27732074756573646179",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
0},
{"be",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
0},
{"400e636f6e74656e742d6c656e6774680135",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
kEndOfHeaders},
{"be", "content-length: 5\n", 0},
}},
Test{"Base64LegalEncodingWorksAfterFailure2",
{},
{},
{
{// Generated with: tools/codegen/core/gen_header_frame.py
// --compression inc --output hexstr --no_framing <
// test/core/transport/chttp2/MiXeD-CaSe.headers
"400a4d695865442d436153651073686f756c64206e6f74207061727365",
absl::InternalError("Illegal header key: MiXeD-CaSe"), 0},
// Binary metadata: created using:
// tools/codegen/core/gen_header_frame.py
// --compression inc --no_framing --output hexstr
// < test/core/transport/chttp2/bad-base64.headers
{"4009612e622e632d62696e1c6c75636b696c7920666f722075732c206974"
"27732074756573646179",
absl::InternalError("Illegal header key: MiXeD-CaSe"), 0},
{"be", absl::InternalError("Illegal header key: MiXeD-CaSe"),
0},
{"400e636f6e74656e742d6c656e6774680135",
absl::InternalError("Illegal header key: MiXeD-CaSe"),
kEndOfHeaders},
{"be", "content-length: 5\n", 0},
{"bf",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
0},
// Only the first error in each frame is reported, so we should
// still see the same error here...
{"c0",
absl::InternalError("Error parsing 'a.b.c-bin' metadata: "
"illegal base64 encoding"),
kEndOfHeaders},
// ... but if we look at the next frame we should see the
// stored error
{"c0", absl::InternalError("Illegal header key: MiXeD-CaSe"),
kEndOfHeaders},
}},
Test{"TeIsTrailers",
{},
{},
Expand Down
61 changes: 61 additions & 0 deletions test/core/transport/chttp2/hpack_sync_fuzzer.cc
Original file line number Diff line number Diff line change
Expand Up @@ -82,6 +82,10 @@ void FuzzOneInput(const hpack_sync_fuzzer::Msg& msg) {
// Not an interesting case to fuzz
continue;
}
if (msg.check_ab_preservation() &&
header.literal_inc_idx().key() == "a") {
continue;
}
if (absl::EndsWith(header.literal_inc_idx().value(), "-bin")) {
std::ignore = encoder.EmitLitHdrWithBinaryStringKeyIncIdx(
Slice::FromCopiedString(header.literal_inc_idx().key()),
Expand All @@ -93,6 +97,10 @@ void FuzzOneInput(const hpack_sync_fuzzer::Msg& msg) {
}
break;
case hpack_sync_fuzzer::Header::kLiteralNotIdx:
if (msg.check_ab_preservation() &&
header.literal_not_idx().key() == "a") {
continue;
}
if (absl::EndsWith(header.literal_not_idx().value(), "-bin")) {
encoder.EmitLitHdrWithBinaryStringKeyNotIdx(
Slice::FromCopiedString(header.literal_not_idx().key()),
Expand All @@ -111,6 +119,10 @@ void FuzzOneInput(const hpack_sync_fuzzer::Msg& msg) {
break;
}
}
if (msg.check_ab_preservation()) {
std::ignore = encoder.EmitLitHdrWithNonBinaryStringKeyIncIdx(
Slice::FromCopiedString("a"), Slice::FromCopiedString("b"));
}

// STAGE 2: Decode the buffer (encode_output) into a list of headers
HPackParser parser;
Expand All @@ -137,6 +149,21 @@ void FuzzOneInput(const hpack_sync_fuzzer::Msg& msg) {
}
}

if (seen_errors.empty() && msg.check_ab_preservation()) {
std::string backing;
auto a_value = read_metadata.GetStringValue("a", &backing);
if (!a_value.has_value()) {
fprintf(stderr, "Expected 'a' header to be present: %s\n",
read_metadata.DebugString().c_str());
abort();
}
if (a_value != "b") {
fprintf(stderr, "Expected 'a' header to be 'b', got '%s'\n",
std::string(*a_value).c_str());
abort();
}
}

// STAGE 3: If we reached here we either had a stream error or no error
// parsing.
// Either way, the hpack tables should be of the same size between client and
Expand Down Expand Up @@ -165,6 +192,40 @@ void FuzzOneInput(const hpack_sync_fuzzer::Msg& msg) {
}
abort();
}

if (msg.check_ab_preservation()) {
SliceBuffer encode_output_2;
hpack_encoder_detail::Encoder encoder_2(
&compressor, msg.use_true_binary_metadata(), encode_output_2);
encoder_2.EmitIndexed(62);
CHECK_EQ(encode_output_2.Count(), 1);
grpc_metadata_batch read_metadata_2;
parser.BeginFrame(
&read_metadata_2, 1024, 1024, HPackParser::Boundary::EndOfHeaders,
HPackParser::Priority::None,
HPackParser::LogInfo{3, HPackParser::LogInfo::kHeaders, false});
auto err = parser.Parse(encode_output_2.c_slice_at(0), true,
/*call_tracer=*/nullptr);
if (!err.ok()) {
fprintf(stderr, "Error parsing preservation encoded data: %s\n",
err.ToString().c_str());
abort();
}
std::string backing;
auto a_value = read_metadata_2.GetStringValue("a", &backing);
if (!a_value.has_value()) {
fprintf(stderr,
"Expected 'a' header to be present: %s\nfirst metadata: %s\n",
read_metadata_2.DebugString().c_str(),
read_metadata.DebugString().c_str());
abort();
}
if (a_value != "b") {
fprintf(stderr, "Expected 'a' header to be 'b', got '%s'\n",
std::string(*a_value).c_str());
abort();
}
}
}

} // namespace
Expand Down
3 changes: 3 additions & 0 deletions test/core/transport/chttp2/hpack_sync_fuzzer.proto
Original file line number Diff line number Diff line change
Expand Up @@ -43,4 +43,7 @@ message Msg {
bool use_true_binary_metadata = 1;
repeated Header headers = 2;
grpc.testing.FuzzConfigVars config_vars = 3;
// Ensure that a header "a: b" appended to headers with hpack incremental
// indexing is correctly added to the hpack table.
bool check_ab_preservation = 5;
}

0 comments on commit de60c42

Please sign in to comment.