Refactor `MapFieldBase::ReflectionPayload` to be a class.
This abstracts away the representation of members of this class and provides access to them via accessors/mutators. In a future cl, we will have to wrap the `RepeatedPtrField` member of this class, and having an accessor to the field rather than exposing it directly contains the blast radius of that change to just this class.
PiperOrigin-RevId: 810699795
diff --git a/src/google/protobuf/map_field.cc b/src/google/protobuf/map_field.cc
index 179a6b2..12ef805 100644
--- a/src/google/protobuf/map_field.cc
+++ b/src/google/protobuf/map_field.cc
@@ -238,8 +238,7 @@
if (p1 == nullptr) p1 = &lhs.payload();
if (p2 == nullptr) p2 = &rhs.payload();
- p1->repeated_field.Swap(&p2->repeated_field);
- SwapRelaxed(p1->state, p2->state);
+ p1->Swap(*p2);
}
void MapFieldBase::InternalSwap(MapFieldBase* other) {
@@ -251,11 +250,11 @@
ConstAccess();
size_t size = 0;
if (auto* p = maybe_payload()) {
- absl::MutexLock lock(&p->mutex);
+ absl::MutexLock lock(&p->mutex());
// Measure the map under the lock, because there could be some repeated
// field data that might be sync'd back into the map.
size = GetMapRaw().SpaceUsedExcludingSelfLong();
- size += p->repeated_field.SpaceUsedExcludingSelfLong();
+ size += p->repeated_field().SpaceUsedExcludingSelfLong();
ConstAccess();
} else {
// Only measure the map without the repeated field, because it is not there.
@@ -281,7 +280,7 @@
MutableAccess();
// These are called by (non-const) mutator functions. So by our API it's the
// callers responsibility to have these calls properly ordered.
- payload().state.store(STATE_MODIFIED_REPEATED, std::memory_order_relaxed);
+ payload().set_state_relaxed(STATE_MODIFIED_REPEATED);
}
const RepeatedPtrFieldBase& MapFieldBase::SyncRepeatedFieldWithMap(
@@ -301,19 +300,18 @@
}
{
- absl::MutexLock lock(&p->mutex);
+ absl::MutexLock lock(&p->mutex());
// Double check state, because another thread may have seen the same
// state and done the synchronization before the current thread.
- if (p->state.load(std::memory_order_relaxed) == STATE_MODIFIED_MAP) {
+ if (p->load_state_relaxed() == STATE_MODIFIED_MAP) {
const_cast<MapFieldBase*>(this)->SyncRepeatedFieldWithMapNoLock();
- p->state.store(CLEAN, std::memory_order_release);
+ p->set_state_release(CLEAN);
}
}
ConstAccess();
- return reinterpret_cast<const RepeatedPtrFieldBase&>(p->repeated_field);
+ return static_cast<const RepeatedPtrFieldBase&>(p->repeated_field());
}
- return reinterpret_cast<const RepeatedPtrFieldBase&>(
- payload().repeated_field);
+ return static_cast<const RepeatedPtrFieldBase&>(payload().repeated_field());
}
void MapFieldBase::SyncRepeatedFieldWithMapNoLock() {
@@ -323,7 +321,7 @@
const FieldDescriptor* key_des = descriptor->map_key();
const FieldDescriptor* val_des = descriptor->map_value();
- RepeatedPtrField<Message>& rep = payload().repeated_field;
+ RepeatedPtrField<Message>& rep = payload().repeated_field();
rep.Clear();
ConstMapIterator it(this, descriptor);
@@ -407,12 +405,12 @@
if (state() == STATE_MODIFIED_REPEATED) {
auto& p = payload();
{
- absl::MutexLock lock(&p.mutex);
+ absl::MutexLock lock(&p.mutex());
// Double check state, because another thread may have seen the same state
// and done the synchronization before the current thread.
- if (p.state.load(std::memory_order_relaxed) == STATE_MODIFIED_REPEATED) {
+ if (p.load_state_relaxed() == STATE_MODIFIED_REPEATED) {
const_cast<MapFieldBase*>(this)->SyncMapWithRepeatedFieldNoLock();
- p.state.store(CLEAN, std::memory_order_release);
+ p.set_state_release(CLEAN);
}
}
ConstAccess();
@@ -422,7 +420,7 @@
void MapFieldBase::SyncMapWithRepeatedFieldNoLock() {
ClearMapNoSync();
- RepeatedPtrField<Message>& rep = payload().repeated_field;
+ RepeatedPtrField<Message>& rep = payload().repeated_field();
if (rep.empty()) return;
@@ -492,7 +490,7 @@
void MapFieldBase::Clear() {
if (ReflectionPayload* p = maybe_payload()) {
- p->repeated_field.Clear();
+ p->repeated_field().Clear();
}
ClearMapNoSync();
@@ -511,6 +509,11 @@
return InsertOrLookupMapValueNoSync(map_key, val);
}
+void MapFieldBase::ReflectionPayload::Swap(ReflectionPayload& other) {
+ repeated_field().Swap(&other.repeated_field());
+ SwapRelaxed(state_, other.state_);
+}
+
} // namespace internal
template <bool kIsMutable>
diff --git a/src/google/protobuf/map_field.h b/src/google/protobuf/map_field.h
index c976cfb..edb83dc 100644
--- a/src/google/protobuf/map_field.h
+++ b/src/google/protobuf/map_field.h
@@ -389,7 +389,7 @@
// callers responsibility to have these calls properly ordered.
if (auto* p = maybe_payload()) {
// If we don't have a payload, it is already assumed `STATE_MODIFIED_MAP`.
- p->state.store(STATE_MODIFIED_MAP, std::memory_order_relaxed);
+ p->set_state_relaxed(STATE_MODIFIED_MAP);
}
}
@@ -429,18 +429,39 @@
CLEAN = 2, // data in map and repeated field are same
};
- struct ReflectionPayload {
- explicit ReflectionPayload(Arena* arena) : repeated_field(arena) {}
- RepeatedPtrField<Message> repeated_field;
+ class ReflectionPayload {
+ public:
+ explicit ReflectionPayload(Arena* arena) : repeated_field_(arena) {}
- absl::Mutex mutex; // The thread to synchronize map and repeated
- // field needs to get lock first;
- std::atomic<State> state{STATE_MODIFIED_MAP};
+ RepeatedPtrField<Message>& repeated_field() { return repeated_field_; }
+
+ absl::Mutex& mutex() { return mutex_; }
+
+ State load_state_relaxed() const {
+ return state_.load(std::memory_order_relaxed);
+ }
+ State load_state_acquire() const {
+ return state_.load(std::memory_order_acquire);
+ }
+ void set_state_relaxed(State state) {
+ state_.store(state, std::memory_order_relaxed);
+ }
+ void set_state_release(State state) {
+ state_.store(state, std::memory_order_release);
+ }
+
+ void Swap(ReflectionPayload& other);
+
+ private:
+ RepeatedPtrField<Message> repeated_field_;
+ absl::Mutex mutex_; // The thread to synchronize map and repeated
+ // field needs to get lock first;
+ std::atomic<State> state_{STATE_MODIFIED_MAP};
};
Arena* arena() const {
auto p = payload_.load(std::memory_order_acquire);
- if (IsPayload(p)) return ToPayload(p)->repeated_field.GetArena();
+ if (IsPayload(p)) return ToPayload(p)->repeated_field().GetArena();
return ToArena(p);
}
@@ -458,7 +479,7 @@
State state() const {
auto* p = maybe_payload();
- return p != nullptr ? p->state.load(std::memory_order_acquire)
+ return p != nullptr ? p->load_state_acquire()
// The default
: STATE_MODIFIED_MAP;
}
diff --git a/src/google/protobuf/map_field_test.cc b/src/google/protobuf/map_field_test.cc
index 553afe5..076a562 100644
--- a/src/google/protobuf/map_field_test.cc
+++ b/src/google/protobuf/map_field_test.cc
@@ -248,7 +248,7 @@
EXPECT_EQ(repeated_size,
map_field->maybe_payload() == nullptr
? 0
- : map_field->maybe_payload()->repeated_field.size());
+ : map_field->maybe_payload()->repeated_field().size());
}
std::unique_ptr<Arena> arena_;
diff --git a/src/google/protobuf/repeated_ptr_field.h b/src/google/protobuf/repeated_ptr_field.h
index bc2ddaf..f7a4c0e 100644
--- a/src/google/protobuf/repeated_ptr_field.h
+++ b/src/google/protobuf/repeated_ptr_field.h
@@ -64,18 +64,14 @@
class MergePartialFromCodedStreamHelper;
class SwapFieldHelper;
-} // namespace internal
+class MapFieldBase;
-namespace internal {
template <typename It>
class RepeatedPtrIterator;
template <typename It, typename VoidPtr>
class RepeatedPtrOverPtrsIterator;
template <typename T>
class AllocatedRepeatedPtrFieldBackInsertIterator;
-} // namespace internal
-
-namespace internal {
// Swaps two non-overlapping blocks of memory of size `N`
template <size_t N>
@@ -1388,6 +1384,10 @@
template <typename T>
friend struct WeakRepeatedPtrField;
+ // The MapFieldBase implementation needs to be able to static_cast down to
+ // `RepeatedPtrFieldBase`.
+ friend internal::MapFieldBase;
+
// Note: RepeatedPtrField SHOULD NOT be subclassed by users.
using TypeHandler = internal::GenericTypeHandler<Element>;