aboutsummaryrefslogtreecommitdiff
path: root/src/google/protobuf/repeated_field.h
diff options
context:
space:
mode:
Diffstat (limited to 'src/google/protobuf/repeated_field.h')
-rw-r--r--src/google/protobuf/repeated_field.h151
1 files changed, 116 insertions, 35 deletions
diff --git a/src/google/protobuf/repeated_field.h b/src/google/protobuf/repeated_field.h
index 5530fefe..1961bc48 100644
--- a/src/google/protobuf/repeated_field.h
+++ b/src/google/protobuf/repeated_field.h
@@ -208,10 +208,19 @@ class RepeatedField {
// sizeof(*this)
int SpaceUsedExcludingSelf() const;
- // Remove the element referenced by position.
+ // Removes the element referenced by position.
+ //
+ // Returns an iterator to the element immediately following the removed
+ // element.
+ //
+ // Invalidates all iterators at or after the removed element, including end().
iterator erase(const_iterator position);
- // Remove the elements in the range [first, last).
+ // Removes the elements in the range [first, last).
+ //
+ // Returns an iterator to the element immediately following the removed range.
+ //
+ // Invalidates all iterators at or after the removed range, including end().
iterator erase(const_iterator first, const_iterator last);
// Get the Arena on which this RepeatedField stores its elements.
@@ -235,7 +244,7 @@ class RepeatedField {
int total_size_;
struct Rep {
Arena* arena;
- Element elements[1];
+ Element elements[1];
};
// We can not use sizeof(Rep) - sizeof(Element) due to the trailing padding on
// the struct. We can not use sizeof(Arena*) as well because there might be
@@ -263,6 +272,22 @@ class RepeatedField {
inline Arena* GetArenaNoVirtual() const {
return (rep_ == NULL) ? NULL : rep_->arena;
}
+
+ // Internal helper to delete all elements and deallocate the storage.
+ // If Element has a trivial destructor (for example, if it's a fundamental
+ // type, like int32), the loop will be removed by the optimizer.
+ void InternalDeallocate(Rep* rep, int size) {
+ if (rep != NULL) {
+ Element* e = &rep->elements[0];
+ Element* limit = &rep->elements[size];
+ for (; e < limit; e++) {
+ e->Element::~Element();
+ }
+ if (rep->arena == NULL) {
+ delete[] reinterpret_cast<char*>(rep);
+ }
+ }
+ }
};
template<typename Element>
@@ -565,7 +590,7 @@ class GenericTypeHandler {
template <typename GenericType>
GenericType* GenericTypeHandler<GenericType>::NewFromPrototype(
- const GenericType* prototype, ::google::protobuf::Arena* arena) {
+ const GenericType* /* prototype */, ::google::protobuf::Arena* arena) {
return New(arena);
}
template <typename GenericType>
@@ -601,6 +626,13 @@ inline void* GenericTypeHandler<MessageLite>::GetMaybeArenaPointer(
template <>
void GenericTypeHandler<MessageLite>::Merge(const MessageLite& from,
MessageLite* to);
+template<>
+inline void GenericTypeHandler<string>::Clear(string* value) {
+ value->clear();
+}
+template<>
+void GenericTypeHandler<string>::Merge(const string& from,
+ string* to);
// Declarations of the specialization as we cannot define them here, as the
// header that defines ProtocolMessage depends on types defined in this header.
@@ -618,7 +650,7 @@ void GenericTypeHandler<MessageLite>::Merge(const MessageLite& from,
// Message specialization bodies defined in message.cc. This split is necessary
// to allow proto2-lite (which includes this header) to be independent of
// Message.
-DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES(Message);
+DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES(Message)
#undef DECLARE_SPECIALIZATIONS_FOR_BASE_PROTO_TYPES
@@ -665,7 +697,7 @@ class LIBPROTOBUF_EXPORT StringTypeHandlerBase {
static inline ::google::protobuf::Arena* GetArena(string*) {
return NULL;
}
- static inline void* GetMaybeArenaPointer(string* value) {
+ static inline void* GetMaybeArenaPointer(string* /* value */) {
return NULL;
}
static inline void Delete(string* value, Arena* arena) {
@@ -683,7 +715,7 @@ class LIBPROTOBUF_EXPORT StringTypeHandlerBase {
class StringTypeHandler : public StringTypeHandlerBase {
public:
static int SpaceUsed(const string& value) {
- return sizeof(value) + StringSpaceUsedExcludingSelf(value);
+ return static_cast<int>(sizeof(value)) + StringSpaceUsedExcludingSelf(value);
}
};
@@ -827,6 +859,15 @@ class RepeatedPtrField : public internal::RepeatedPtrFieldBase {
// Add an already-allocated object, skipping arena-ownership checks. The user
// must guarantee that the given object is in the same arena as this
// RepeatedPtrField.
+ // It is also useful in legacy code that uses temporary ownership to avoid
+ // copies. Example:
+ // RepeatedPtrField<T> temp_field;
+ // temp_field.AddAllocated(new T);
+ // ... // Do something with temp_field
+ // temp_field.ExtractSubrange(0, temp_field.size(), NULL);
+ // If you put temp_field on the arena this fails, because the ownership
+ // transfers to the arena at the "AddAllocated" call and is not released
+ // anymore causing a double delete. UnsafeArenaAddAllocated prevents this.
void UnsafeArenaAddAllocated(Element* value);
// Remove the last element and return it. Works only when operating on an
@@ -885,10 +926,19 @@ class RepeatedPtrField : public internal::RepeatedPtrFieldBase {
// so will trigger a GOOGLE_DCHECK-failure.
Element* ReleaseCleared();
- // Remove the element referenced by position.
+ // Removes the element referenced by position.
+ //
+ // Returns an iterator to the element immediately following the removed
+ // element.
+ //
+ // Invalidates all iterators at or after the removed element, including end().
iterator erase(const_iterator position);
// Removes the elements in the range [first, last).
+ //
+ // Returns an iterator to the element immediately following the removed range.
+ //
+ // Invalidates all iterators at or after the removed range, including end().
iterator erase(const_iterator first, const_iterator last);
// Gets the arena on which this RepeatedPtrField stores its elements.
@@ -974,19 +1024,8 @@ RepeatedField<Element>::RepeatedField(Iter begin, const Iter& end)
template <typename Element>
RepeatedField<Element>::~RepeatedField() {
// See explanation in Reserve(): we need to invoke destructors here for the
- // case that Element has a non-trivial destructor. If Element has a trivial
- // destructor (for example, if it's a primitive type, like int32), this entire
- // loop will be removed by the optimizer.
- if (rep_ != NULL) {
- Element* e = &rep_->elements[0];
- Element* limit = &rep_->elements[total_size_];
- for (; e < limit; e++) {
- e->Element::~Element();
- }
- if (rep_->arena == NULL) {
- delete[] reinterpret_cast<char*>(rep_);
- }
- }
+ // case that Element has a non-trivial destructor.
+ InternalDeallocate(rep_, total_size_);
}
template <typename Element>
@@ -1222,8 +1261,8 @@ void RepeatedField<Element>::Reserve(int new_size) {
if (total_size_ >= new_size) return;
Rep* old_rep = rep_;
Arena* arena = GetArenaNoVirtual();
- new_size = max(google::protobuf::internal::kMinRepeatedFieldAllocationSize,
- max(total_size_ * 2, new_size));
+ new_size = std::max(google::protobuf::internal::kMinRepeatedFieldAllocationSize,
+ std::max(total_size_ * 2, new_size));
GOOGLE_CHECK_LE(static_cast<size_t>(new_size),
(std::numeric_limits<size_t>::max() - kRepHeaderSize) /
sizeof(Element))
@@ -1256,18 +1295,10 @@ void RepeatedField<Element>::Reserve(int new_size) {
if (current_size_ > 0) {
MoveArray(rep_->elements, old_rep->elements, current_size_);
}
- if (old_rep) {
- // Likewise, we need to invoke destructors on the old array. If Element has
- // no destructor, this loop will disappear.
- e = &old_rep->elements[0];
- limit = &old_rep->elements[old_total_size];
- for (; e < limit; e++) {
- e->Element::~Element();
- }
- if (arena == NULL) {
- delete[] reinterpret_cast<char*>(old_rep);
- }
- }
+
+ // Likewise, we need to invoke destructors on the old array.
+ InternalDeallocate(old_rep, old_total_size);
+
}
template <typename Element>
@@ -2362,6 +2393,37 @@ template<typename T> class AllocatedRepeatedPtrFieldBackInsertIterator
private:
RepeatedPtrField<T>* field_;
};
+
+// Almost identical to AllocatedRepeatedPtrFieldBackInsertIterator. This one
+// uses the UnsafeArenaAddAllocated instead.
+template<typename T>
+class UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator
+ : public std::iterator<std::output_iterator_tag, T> {
+ public:
+ explicit UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator(
+ ::google::protobuf::RepeatedPtrField<T>* const mutable_field)
+ : field_(mutable_field) {
+ }
+ UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator=(
+ T const* const ptr_to_value) {
+ field_->UnsafeArenaAddAllocated(const_cast<T*>(ptr_to_value));
+ return *this;
+ }
+ UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator*() {
+ return *this;
+ }
+ UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++() {
+ return *this;
+ }
+ UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>& operator++(
+ int /* unused */) {
+ return *this;
+ }
+
+ private:
+ ::google::protobuf::RepeatedPtrField<T>* field_;
+};
+
} // namespace internal
// Provides a back insert iterator for RepeatedField instances,
@@ -2396,6 +2458,25 @@ AllocatedRepeatedPtrFieldBackInserter(
mutable_field);
}
+// Similar to AllocatedRepeatedPtrFieldBackInserter, using
+// UnsafeArenaAddAllocated instead of AddAllocated.
+// This is slightly faster if that matters. It is also useful in legacy code
+// that uses temporary ownership to avoid copies. Example:
+// RepeatedPtrField<T> temp_field;
+// temp_field.AddAllocated(new T);
+// ... // Do something with temp_field
+// temp_field.ExtractSubrange(0, temp_field.size(), NULL);
+// If you put temp_field on the arena this fails, because the ownership
+// transfers to the arena at the "AddAllocated" call and is not released anymore
+// causing a double delete. Using UnsafeArenaAddAllocated prevents this.
+template<typename T>
+internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>
+UnsafeArenaAllocatedRepeatedPtrFieldBackInserter(
+ ::google::protobuf::RepeatedPtrField<T>* const mutable_field) {
+ return internal::UnsafeArenaAllocatedRepeatedPtrFieldBackInsertIterator<T>(
+ mutable_field);
+}
+
} // namespace protobuf
} // namespace google