Internal change.
PiperOrigin-RevId: 559856819
diff --git a/src/google/protobuf/compiler/cpp/helpers.cc b/src/google/protobuf/compiler/cpp/helpers.cc
index 8d49e30..eb22167 100644
--- a/src/google/protobuf/compiler/cpp/helpers.cc
+++ b/src/google/protobuf/compiler/cpp/helpers.cc
@@ -931,6 +931,10 @@
return false;
}
+bool IsLikelyPresent(const FieldDescriptor* field, const Options& options) {
+ return false;
+}
+
float GetPresenceProbability(const FieldDescriptor* field,
const Options& options) {
return 1.f;
diff --git a/src/google/protobuf/compiler/cpp/helpers.h b/src/google/protobuf/compiler/cpp/helpers.h
index 64ac79c..58283f6 100644
--- a/src/google/protobuf/compiler/cpp/helpers.h
+++ b/src/google/protobuf/compiler/cpp/helpers.h
@@ -370,6 +370,9 @@
// Returns true if `field` is unlikely to be present based on PDProto profile.
bool IsRarelyPresent(const FieldDescriptor* field, const Options& options);
+// Returns true if `field` is likely to be present based on PDProto profile.
+bool IsLikelyPresent(const FieldDescriptor* field, const Options& options);
+
float GetPresenceProbability(const FieldDescriptor* field,
const Options& options);
diff --git a/src/google/protobuf/compiler/cpp/message.cc b/src/google/protobuf/compiler/cpp/message.cc
index eeace61..95bed91 100644
--- a/src/google/protobuf/compiler/cpp/message.cc
+++ b/src/google/protobuf/compiler/cpp/message.cc
@@ -3600,6 +3600,7 @@
bool same =
HasByteIndex(a) == HasByteIndex(b) &&
a->is_repeated() == b->is_repeated() &&
+ IsLikelyPresent(a, options_) == IsLikelyPresent(b, options_) &&
ShouldSplit(a, options_) == ShouldSplit(b, options_) &&
(CanClearByZeroing(a) == CanClearByZeroing(b) ||
(CanClearByZeroing(a) && (chunk_count == 1 || merge_zero_init)));
@@ -3646,11 +3647,12 @@
// We can omit the if() for chunk size 1, or if our fields do not have
// hasbits. I don't understand the rationale for the last part of the
// condition, but it matches the old logic.
- const bool have_outer_if =
+ const bool check_has_byte =
HasBitIndex(fields.front()) != kNoHasbit && fields.size() > 1 &&
+ !IsLikelyPresent(fields.back(), options_) &&
(memset_end != fields.back() || merge_zero_init);
- if (have_outer_if) {
+ if (check_has_byte) {
// Emit an if() that will let us skip the whole chunk if none are set.
uint32_t chunk_mask = GenChunkMask(fields, has_bit_indices_);
std::string chunk_mask_str =
@@ -3712,7 +3714,7 @@
}
}
- if (have_outer_if) {
+ if (check_has_byte) {
format.Outdent();
format("}\n");
}
@@ -3996,6 +3998,7 @@
optimized_order_, options_,
[&](const FieldDescriptor* a, const FieldDescriptor* b) -> bool {
return HasByteIndex(a) == HasByteIndex(b) &&
+ IsLikelyPresent(a, options_) == IsLikelyPresent(b, options_) &&
ShouldSplit(a, options_) == ShouldSplit(b, options_);
});
@@ -4013,10 +4016,11 @@
while (it != next) {
const std::vector<const FieldDescriptor*>& fields = it->fields;
- const bool have_outer_if =
- fields.size() > 1 && HasByteIndex(fields.front()) != kNoHasbit;
+ const bool check_has_byte = fields.size() > 1 &&
+ HasByteIndex(fields.front()) != kNoHasbit &&
+ !IsLikelyPresent(fields.back(), options_);
- if (have_outer_if) {
+ if (check_has_byte) {
// Emit an if() that will let us skip the whole chunk if none are set.
uint32_t chunk_mask = GenChunkMask(fields, has_bit_indices_);
std::string chunk_mask_str =
@@ -4076,7 +4080,7 @@
format("if (cached_has_bits & 0x$1$u) {\n", mask);
format.Indent();
- if (have_outer_if && IsPOD(field)) {
+ if (check_has_byte && IsPOD(field)) {
// Defer hasbit modification until the end of chunk.
// This can reduce the number of loads/stores by up to 7 per 8
// fields.
@@ -4091,7 +4095,7 @@
}
}
- if (have_outer_if) {
+ if (check_has_byte) {
if (deferred_has_bit_changes) {
// Flush the has bits for the primitives we deferred.
ABSL_CHECK_LE(0, cached_has_word_index);
@@ -4743,6 +4747,7 @@
optimized_order_, options_,
[&](const FieldDescriptor* a, const FieldDescriptor* b) -> bool {
return a->label() == b->label() && HasByteIndex(a) == HasByteIndex(b) &&
+ IsLikelyPresent(a, options_) == IsLikelyPresent(b, options_) &&
ShouldSplit(a, options_) == ShouldSplit(b, options_);
});
@@ -4762,10 +4767,11 @@
while (it != next) {
const std::vector<const FieldDescriptor*>& fields = it->fields;
- const bool have_outer_if =
- fields.size() > 1 && HasWordIndex(fields[0]) != kNoHasbit;
+ const bool check_has_byte = fields.size() > 1 &&
+ HasWordIndex(fields[0]) != kNoHasbit &&
+ !IsLikelyPresent(fields.back(), options_);
- if (have_outer_if) {
+ if (check_has_byte) {
// Emit an if() that will let us skip the whole chunk if none are set.
uint32_t chunk_mask = GenChunkMask(fields, has_bit_indices_);
std::string chunk_mask_str =
@@ -4815,7 +4821,7 @@
}
}
- if (have_outer_if) {
+ if (check_has_byte) {
format.Outdent();
format("}\n");
}