aboutsummaryrefslogtreecommitdiff
path: root/python
diff options
context:
space:
mode:
authorAdam Cozzette <acozzette@google.com>2018-03-13 16:37:29 -0700
committerAdam Cozzette <acozzette@google.com>2018-03-13 16:37:29 -0700
commit0400cca3236de1ca303af38bf81eab332d042b7c (patch)
treea8a9b19853f64567c96750a1c7d253926471daa5 /python
parent96b535cc2f4f7b7e22a1b8622149f7c26a5a3f63 (diff)
downloadprotobuf-0400cca3236de1ca303af38bf81eab332d042b7c.tar.gz
protobuf-0400cca3236de1ca303af38bf81eab332d042b7c.tar.bz2
protobuf-0400cca3236de1ca303af38bf81eab332d042b7c.zip
Integrated internal changes from Google
Diffstat (limited to 'python')
-rwxr-xr-xpython/google/protobuf/descriptor.py109
-rwxr-xr-xpython/google/protobuf/internal/_parameterized.py50
-rwxr-xr-xpython/google/protobuf/internal/api_implementation.py11
-rwxr-xr-xpython/google/protobuf/internal/encoder.py16
-rw-r--r--python/google/protobuf/internal/json_format_test.py12
-rwxr-xr-xpython/google/protobuf/internal/message_test.py29
-rw-r--r--python/google/protobuf/internal/no_package.proto12
-rwxr-xr-xpython/google/protobuf/internal/text_format_test.py93
-rw-r--r--python/google/protobuf/internal/well_known_types.py10
-rw-r--r--python/google/protobuf/internal/well_known_types_test.py16
-rw-r--r--python/google/protobuf/json_format.py33
-rw-r--r--python/google/protobuf/pyext/descriptor.cc80
-rw-r--r--python/google/protobuf/pyext/descriptor_pool.cc122
-rw-r--r--python/google/protobuf/pyext/extension_dict.cc3
-rw-r--r--python/google/protobuf/pyext/extension_dict.h15
-rw-r--r--python/google/protobuf/pyext/map_container.cc29
-rw-r--r--python/google/protobuf/pyext/map_container.h17
-rw-r--r--python/google/protobuf/pyext/message.cc29
-rw-r--r--python/google/protobuf/pyext/message.h24
-rw-r--r--python/google/protobuf/pyext/message_factory.cc8
-rw-r--r--python/google/protobuf/pyext/repeated_composite_container.cc129
-rw-r--r--python/google/protobuf/pyext/repeated_composite_container.h21
-rw-r--r--python/google/protobuf/pyext/repeated_scalar_container.cc174
-rw-r--r--python/google/protobuf/pyext/repeated_scalar_container.h19
-rwxr-xr-xpython/google/protobuf/text_format.py88
25 files changed, 746 insertions, 403 deletions
diff --git a/python/google/protobuf/descriptor.py b/python/google/protobuf/descriptor.py
index 0d35425f..8a9ba3da 100755
--- a/python/google/protobuf/descriptor.py
+++ b/python/google/protobuf/descriptor.py
@@ -34,6 +34,7 @@ file, in types that make this information accessible in Python.
__author__ = 'robinson@google.com (Will Robinson)'
+import threading
import six
from google.protobuf.internal import api_implementation
@@ -72,6 +73,24 @@ else:
DescriptorMetaclass = type
+class _Lock(object):
+ """Wrapper class of threading.Lock(), which is allowed by 'with'."""
+
+ def __new__(cls):
+ self = object.__new__(cls)
+ self._lock = threading.Lock() # pylint: disable=protected-access
+ return self
+
+ def __enter__(self):
+ self._lock.acquire()
+
+ def __exit__(self, exc_type, exc_value, exc_tb):
+ self._lock.release()
+
+
+_lock = threading.Lock()
+
+
class DescriptorBase(six.with_metaclass(DescriptorMetaclass)):
"""Descriptors base class.
@@ -92,16 +111,17 @@ class DescriptorBase(six.with_metaclass(DescriptorMetaclass)):
# subclasses" of this descriptor class.
_C_DESCRIPTOR_CLASS = ()
- def __init__(self, options, options_class_name):
+ def __init__(self, options, serialized_options, options_class_name):
"""Initialize the descriptor given its options message and the name of the
class of the options message. The name of the class is required in case
the options message is None and has to be created.
"""
self._options = options
self._options_class_name = options_class_name
+ self._serialized_options = serialized_options
# Does this descriptor have non-default options?
- self.has_options = options is not None
+ self.has_options = (options is not None) or (serialized_options is not None)
def _SetOptions(self, options, options_class_name):
"""Sets the descriptor's options
@@ -123,14 +143,23 @@ class DescriptorBase(six.with_metaclass(DescriptorMetaclass)):
"""
if self._options:
return self._options
+
from google.protobuf import descriptor_pb2
try:
- options_class = getattr(descriptor_pb2, self._options_class_name)
+ options_class = getattr(descriptor_pb2,
+ self._options_class_name)
except AttributeError:
raise RuntimeError('Unknown options class name %s!' %
(self._options_class_name))
- self._options = options_class()
- return self._options
+
+ with _lock:
+ if self._serialized_options is None:
+ self._options = options_class()
+ else:
+ self._options = _ParseOptions(options_class(),
+ self._serialized_options)
+
+ return self._options
class _NestedDescriptorBase(DescriptorBase):
@@ -138,7 +167,7 @@ class _NestedDescriptorBase(DescriptorBase):
def __init__(self, options, options_class_name, name, full_name,
file, containing_type, serialized_start=None,
- serialized_end=None):
+ serialized_end=None, serialized_options=None):
"""Constructor.
Args:
@@ -157,9 +186,10 @@ class _NestedDescriptorBase(DescriptorBase):
file.serialized_pb that describes this descriptor.
serialized_end: The end index (exclusive) in block in the
file.serialized_pb that describes this descriptor.
+ serialized_options: Protocol message serilized options or None.
"""
super(_NestedDescriptorBase, self).__init__(
- options, options_class_name)
+ options, serialized_options, options_class_name)
self.name = name
# TODO(falk): Add function to calculate full_name instead of having it in
@@ -250,6 +280,7 @@ class Descriptor(_NestedDescriptorBase):
def __new__(cls, name, full_name, filename, containing_type, fields,
nested_types, enum_types, extensions, options=None,
+ serialized_options=None,
is_extendable=True, extension_ranges=None, oneofs=None,
file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin
syntax=None):
@@ -261,6 +292,7 @@ class Descriptor(_NestedDescriptorBase):
# name of the argument.
def __init__(self, name, full_name, filename, containing_type, fields,
nested_types, enum_types, extensions, options=None,
+ serialized_options=None,
is_extendable=True, extension_ranges=None, oneofs=None,
file=None, serialized_start=None, serialized_end=None, # pylint: disable=redefined-builtin
syntax=None):
@@ -273,7 +305,7 @@ class Descriptor(_NestedDescriptorBase):
super(Descriptor, self).__init__(
options, 'MessageOptions', name, full_name, file,
containing_type, serialized_start=serialized_start,
- serialized_end=serialized_end)
+ serialized_end=serialized_end, serialized_options=serialized_options)
# We have fields in addition to fields_by_name and fields_by_number,
# so that:
@@ -492,8 +524,9 @@ class FieldDescriptor(DescriptorBase):
def __new__(cls, name, full_name, index, number, type, cpp_type, label,
default_value, message_type, enum_type, containing_type,
is_extension, extension_scope, options=None,
+ serialized_options=None,
has_default_value=True, containing_oneof=None, json_name=None,
- file=None):
+ file=None): # pylint: disable=redefined-builtin
_message.Message._CheckCalledFromGeneratedFile()
if is_extension:
return _message.default_pool.FindExtensionByName(full_name)
@@ -503,8 +536,9 @@ class FieldDescriptor(DescriptorBase):
def __init__(self, name, full_name, index, number, type, cpp_type, label,
default_value, message_type, enum_type, containing_type,
is_extension, extension_scope, options=None,
+ serialized_options=None,
has_default_value=True, containing_oneof=None, json_name=None,
- file=None):
+ file=None): # pylint: disable=redefined-builtin
"""The arguments are as described in the description of FieldDescriptor
attributes above.
@@ -512,7 +546,8 @@ class FieldDescriptor(DescriptorBase):
(to deal with circular references between message types, for example).
Likewise for extension_scope.
"""
- super(FieldDescriptor, self).__init__(options, 'FieldOptions')
+ super(FieldDescriptor, self).__init__(
+ options, serialized_options, 'FieldOptions')
self.name = name
self.full_name = full_name
self.file = file
@@ -598,13 +633,15 @@ class EnumDescriptor(_NestedDescriptorBase):
_C_DESCRIPTOR_CLASS = _message.EnumDescriptor
def __new__(cls, name, full_name, filename, values,
- containing_type=None, options=None, file=None,
+ containing_type=None, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
serialized_start=None, serialized_end=None):
_message.Message._CheckCalledFromGeneratedFile()
return _message.default_pool.FindEnumTypeByName(full_name)
def __init__(self, name, full_name, filename, values,
- containing_type=None, options=None, file=None,
+ containing_type=None, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
serialized_start=None, serialized_end=None):
"""Arguments are as described in the attribute description above.
@@ -614,7 +651,7 @@ class EnumDescriptor(_NestedDescriptorBase):
super(EnumDescriptor, self).__init__(
options, 'EnumOptions', name, full_name, file,
containing_type, serialized_start=serialized_start,
- serialized_end=serialized_end)
+ serialized_end=serialized_end, serialized_options=serialized_options)
self.values = values
for value in self.values:
@@ -650,7 +687,9 @@ class EnumValueDescriptor(DescriptorBase):
if _USE_C_DESCRIPTORS:
_C_DESCRIPTOR_CLASS = _message.EnumValueDescriptor
- def __new__(cls, name, index, number, type=None, options=None):
+ def __new__(cls, name, index, number,
+ type=None, # pylint: disable=redefined-builtin
+ options=None, serialized_options=None):
_message.Message._CheckCalledFromGeneratedFile()
# There is no way we can build a complete EnumValueDescriptor with the
# given parameters (the name of the Enum is not known, for example).
@@ -658,9 +697,12 @@ class EnumValueDescriptor(DescriptorBase):
# constructor, which will ignore it, so returning None is good enough.
return None
- def __init__(self, name, index, number, type=None, options=None):
+ def __init__(self, name, index, number,
+ type=None, # pylint: disable=redefined-builtin
+ options=None, serialized_options=None):
"""Arguments are as described in the attribute description above."""
- super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions')
+ super(EnumValueDescriptor, self).__init__(
+ options, serialized_options, 'EnumValueOptions')
self.name = name
self.index = index
self.number = number
@@ -685,14 +727,17 @@ class OneofDescriptor(DescriptorBase):
_C_DESCRIPTOR_CLASS = _message.OneofDescriptor
def __new__(
- cls, name, full_name, index, containing_type, fields, options=None):
+ cls, name, full_name, index, containing_type, fields, options=None,
+ serialized_options=None):
_message.Message._CheckCalledFromGeneratedFile()
return _message.default_pool.FindOneofByName(full_name)
def __init__(
- self, name, full_name, index, containing_type, fields, options=None):
+ self, name, full_name, index, containing_type, fields, options=None,
+ serialized_options=None):
"""Arguments are as described in the attribute description above."""
- super(OneofDescriptor, self).__init__(options, 'OneofOptions')
+ super(OneofDescriptor, self).__init__(
+ options, serialized_options, 'OneofOptions')
self.name = name
self.full_name = full_name
self.index = index
@@ -721,17 +766,19 @@ class ServiceDescriptor(_NestedDescriptorBase):
if _USE_C_DESCRIPTORS:
_C_DESCRIPTOR_CLASS = _message.ServiceDescriptor
- def __new__(cls, name, full_name, index, methods, options=None, file=None, # pylint: disable=redefined-builtin
+ def __new__(cls, name, full_name, index, methods, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
serialized_start=None, serialized_end=None):
_message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
return _message.default_pool.FindServiceByName(full_name)
- def __init__(self, name, full_name, index, methods, options=None, file=None,
+ def __init__(self, name, full_name, index, methods, options=None,
+ serialized_options=None, file=None, # pylint: disable=redefined-builtin
serialized_start=None, serialized_end=None):
super(ServiceDescriptor, self).__init__(
options, 'ServiceOptions', name, full_name, file,
None, serialized_start=serialized_start,
- serialized_end=serialized_end)
+ serialized_end=serialized_end, serialized_options=serialized_options)
self.index = index
self.methods = methods
self.methods_by_name = dict((m.name, m) for m in methods)
@@ -772,18 +819,19 @@ class MethodDescriptor(DescriptorBase):
_C_DESCRIPTOR_CLASS = _message.MethodDescriptor
def __new__(cls, name, full_name, index, containing_service,
- input_type, output_type, options=None):
+ input_type, output_type, options=None, serialized_options=None):
_message.Message._CheckCalledFromGeneratedFile() # pylint: disable=protected-access
return _message.default_pool.FindMethodByName(full_name)
def __init__(self, name, full_name, index, containing_service,
- input_type, output_type, options=None):
+ input_type, output_type, options=None, serialized_options=None):
"""The arguments are as described in the description of MethodDescriptor
attributes above.
Note that containing_service may be None, and may be set later if necessary.
"""
- super(MethodDescriptor, self).__init__(options, 'MethodOptions')
+ super(MethodDescriptor, self).__init__(
+ options, serialized_options, 'MethodOptions')
self.name = name
self.full_name = full_name
self.index = index
@@ -818,7 +866,8 @@ class FileDescriptor(DescriptorBase):
if _USE_C_DESCRIPTORS:
_C_DESCRIPTOR_CLASS = _message.FileDescriptor
- def __new__(cls, name, package, options=None, serialized_pb=None,
+ def __new__(cls, name, package, options=None,
+ serialized_options=None, serialized_pb=None,
dependencies=None, public_dependencies=None,
syntax=None, pool=None):
# FileDescriptor() is called from various places, not only from generated
@@ -830,11 +879,13 @@ class FileDescriptor(DescriptorBase):
else:
return super(FileDescriptor, cls).__new__(cls)
- def __init__(self, name, package, options=None, serialized_pb=None,
+ def __init__(self, name, package, options=None,
+ serialized_options=None, serialized_pb=None,
dependencies=None, public_dependencies=None,
syntax=None, pool=None):
"""Constructor."""
- super(FileDescriptor, self).__init__(options, 'FileOptions')
+ super(FileDescriptor, self).__init__(
+ options, serialized_options, 'FileOptions')
if pool is None:
from google.protobuf import descriptor_pool
diff --git a/python/google/protobuf/internal/_parameterized.py b/python/google/protobuf/internal/_parameterized.py
index 23a78f03..f2c0b305 100755
--- a/python/google/protobuf/internal/_parameterized.py
+++ b/python/google/protobuf/internal/_parameterized.py
@@ -37,8 +37,8 @@ argument tuples.
A simple example:
- class AdditionExample(parameterized.ParameterizedTestCase):
- @parameterized.Parameters(
+ class AdditionExample(parameterized.TestCase):
+ @parameterized.parameters(
(1, 2, 3),
(4, 5, 9),
(1, 1, 3))
@@ -54,8 +54,8 @@ fail due to an assertion error (1 + 1 != 3).
Parameters for invididual test cases can be tuples (with positional parameters)
or dictionaries (with named parameters):
- class AdditionExample(parameterized.ParameterizedTestCase):
- @parameterized.Parameters(
+ class AdditionExample(parameterized.TestCase):
+ @parameterized.parameters(
{'op1': 1, 'op2': 2, 'result': 3},
{'op1': 4, 'op2': 5, 'result': 9},
)
@@ -77,13 +77,13 @@ stay the same across several invocations, object representations like
'<__main__.Foo object at 0x23d8610>'
are turned into '<__main__.Foo>'. For even more descriptive names,
-especially in test logs, you can use the NamedParameters decorator. In
+especially in test logs, you can use the named_parameters decorator. In
this case, only tuples are supported, and the first parameters has to
be a string (or an object that returns an apt name when converted via
str()):
- class NamedExample(parameterized.ParameterizedTestCase):
- @parameterized.NamedParameters(
+ class NamedExample(parameterized.TestCase):
+ @parameterized.named_parameters(
('Normal', 'aa', 'aaa', True),
('EmptyPrefix', '', 'abc', True),
('BothEmpty', '', '', True))
@@ -103,13 +103,13 @@ from the command line:
Parameterized Classes
=====================
If invocation arguments are shared across test methods in a single
-ParameterizedTestCase class, instead of decorating all test methods
+TestCase class, instead of decorating all test methods
individually, the class itself can be decorated:
- @parameterized.Parameters(
+ @parameterized.parameters(
(1, 2, 3)
(4, 5, 9))
- class ArithmeticTest(parameterized.ParameterizedTestCase):
+ class ArithmeticTest(parameterized.TestCase):
def testAdd(self, arg1, arg2, result):
self.assertEqual(arg1 + arg2, result)
@@ -122,8 +122,8 @@ If parameters should be shared across several test cases, or are dynamically
created from other sources, a single non-tuple iterable can be passed into
the decorator. This iterable will be used to obtain the test cases:
- class AdditionExample(parameterized.ParameterizedTestCase):
- @parameterized.Parameters(
+ class AdditionExample(parameterized.TestCase):
+ @parameterized.parameters(
c.op1, c.op2, c.result for c in testcases
)
def testAddition(self, op1, op2, result):
@@ -135,8 +135,8 @@ Single-Argument Test Methods
If a test method takes only one argument, the single argument does not need to
be wrapped into a tuple:
- class NegativeNumberExample(parameterized.ParameterizedTestCase):
- @parameterized.Parameters(
+ class NegativeNumberExample(parameterized.TestCase):
+ @parameterized.parameters(
-1, -3, -4, -5
)
def testIsNegative(self, arg):
@@ -212,7 +212,7 @@ class _ParameterizedTestIter(object):
def __call__(self, *args, **kwargs):
raise RuntimeError('You appear to be running a parameterized test case '
'without having inherited from parameterized.'
- 'ParameterizedTestCase. This is bad because none of '
+ 'TestCase. This is bad because none of '
'your test cases are actually being run.')
def __iter__(self):
@@ -306,7 +306,7 @@ def _ParameterDecorator(naming_type, testcases):
return _Apply
-def Parameters(*testcases):
+def parameters(*testcases): # pylint: disable=invalid-name
"""A decorator for creating parameterized tests.
See the module docstring for a usage example.
@@ -321,7 +321,7 @@ def Parameters(*testcases):
return _ParameterDecorator(_ARGUMENT_REPR, testcases)
-def NamedParameters(*testcases):
+def named_parameters(*testcases): # pylint: disable=invalid-name
"""A decorator for creating parameterized tests.
See the module docstring for a usage example. The first element of
@@ -348,7 +348,7 @@ class TestGeneratorMetaclass(type):
up as tests by the unittest framework.
In general, it is supposed to be used in conjunction with the
- Parameters decorator.
+ parameters decorator.
"""
def __new__(mcs, class_name, bases, dct):
@@ -385,8 +385,8 @@ def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
-class ParameterizedTestCase(unittest.TestCase):
- """Base class for test cases using the Parameters decorator."""
+class TestCase(unittest.TestCase):
+ """Base class for test cases using the parameters decorator."""
__metaclass__ = TestGeneratorMetaclass
def _OriginalName(self):
@@ -409,10 +409,10 @@ class ParameterizedTestCase(unittest.TestCase):
self._id_suffix.get(self._testMethodName, ''))
-def CoopParameterizedTestCase(other_base_class):
+def CoopTestCase(other_base_class):
"""Returns a new base class with a cooperative metaclass base.
- This enables the ParameterizedTestCase to be used in combination
+ This enables the TestCase to be used in combination
with other base classes that have custom metaclasses, such as
mox.MoxTestBase.
@@ -425,7 +425,7 @@ def CoopParameterizedTestCase(other_base_class):
from google3.testing.pybase import parameterized
- class ExampleTest(parameterized.CoopParameterizedTestCase(mox.MoxTestBase)):
+ class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)):
...
Args:
@@ -439,5 +439,5 @@ def CoopParameterizedTestCase(other_base_class):
(other_base_class.__metaclass__,
TestGeneratorMetaclass), {})
return metaclass(
- 'CoopParameterizedTestCase',
- (other_base_class, ParameterizedTestCase), {})
+ 'CoopTestCase',
+ (other_base_class, TestCase), {})
diff --git a/python/google/protobuf/internal/api_implementation.py b/python/google/protobuf/internal/api_implementation.py
index 553fcdb6..ab9e7812 100755
--- a/python/google/protobuf/internal/api_implementation.py
+++ b/python/google/protobuf/internal/api_implementation.py
@@ -66,10 +66,13 @@ if _api_version < 0: # Still unspecified?
from google.protobuf.internal import use_pure_python
del use_pure_python # Avoids a pylint error and namespace pollution.
except ImportError:
- if _proto_extension_modules_exist_in_build:
- if sys.version_info[0] >= 3: # Python 3 defaults to C++ impl v2.
- _api_version = 2
- # TODO(b/17427486): Make Python 2 default to C++ impl v2.
+ # TODO(b/74017912): It's unsafe to enable :use_fast_cpp_protos by default;
+ # it can cause data loss if you have any Python-only extensions to any
+ # message passed back and forth with C++ code.
+ #
+ # TODO(b/17427486): Once that bug is fixed, we want to make both Python 2
+ # and Python 3 default to `_api_version = 2` (C++ implementation V2).
+ pass
_default_implementation_type = (
'python' if _api_version <= 0 else 'cpp')
diff --git a/python/google/protobuf/internal/encoder.py b/python/google/protobuf/internal/encoder.py
index dc7a8ce8..0d1f49dd 100755
--- a/python/google/protobuf/internal/encoder.py
+++ b/python/google/protobuf/internal/encoder.py
@@ -372,7 +372,7 @@ def MapSizer(field_descriptor, is_message_map):
def _VarintEncoder():
"""Return an encoder for a basic varint value (does not include tag)."""
- def EncodeVarint(write, value, unused_deterministic):
+ def EncodeVarint(write, value, unused_deterministic=None):
bits = value & 0x7f
value >>= 7
while value:
@@ -388,7 +388,7 @@ def _SignedVarintEncoder():
"""Return an encoder for a basic signed varint value (does not include
tag)."""
- def EncodeSignedVarint(write, value, unused_deterministic):
+ def EncodeSignedVarint(write, value, unused_deterministic=None):
if value < 0:
value += (1 << 64)
bits = value & 0x7f
@@ -524,14 +524,14 @@ def _StructPackEncoder(wire_type, format):
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
- def EncodeRepeatedField(write, value, unused_deterministic):
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
for element in value:
write(tag_bytes)
write(local_struct_pack(format, element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
- def EncodeField(write, value, unused_deterministic):
+ def EncodeField(write, value, unused_deterministic=None):
write(tag_bytes)
return write(local_struct_pack(format, value))
return EncodeField
@@ -595,7 +595,7 @@ def _FloatingPointEncoder(wire_type, format):
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
- def EncodeRepeatedField(write, value, unused_deterministic):
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
for element in value:
write(tag_bytes)
try:
@@ -605,7 +605,7 @@ def _FloatingPointEncoder(wire_type, format):
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
- def EncodeField(write, value, unused_deterministic):
+ def EncodeField(write, value, unused_deterministic=None):
write(tag_bytes)
try:
write(local_struct_pack(format, value))
@@ -662,7 +662,7 @@ def BoolEncoder(field_number, is_repeated, is_packed):
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
- def EncodeRepeatedField(write, value, unused_deterministic):
+ def EncodeRepeatedField(write, value, unused_deterministic=None):
for element in value:
write(tag_bytes)
if element:
@@ -672,7 +672,7 @@ def BoolEncoder(field_number, is_repeated, is_packed):
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
- def EncodeField(write, value, unused_deterministic):
+ def EncodeField(write, value, unused_deterministic=None):
write(tag_bytes)
if value:
return write(true_byte)
diff --git a/python/google/protobuf/internal/json_format_test.py b/python/google/protobuf/internal/json_format_test.py
index 19182b7f..d891dce1 100644
--- a/python/google/protobuf/internal/json_format_test.py
+++ b/python/google/protobuf/internal/json_format_test.py
@@ -983,6 +983,18 @@ class JsonFormatTest(JsonFormatBase):
self.assertEqual('{\n"int32Value": 12345\n}',
json_format.MessageToJson(message, indent=0))
+ def testFormatEnumsAsInts(self):
+ message = json_format_proto3_pb2.TestMessage()
+ message.enum_value = json_format_proto3_pb2.BAR
+ message.repeated_enum_value.append(json_format_proto3_pb2.FOO)
+ message.repeated_enum_value.append(json_format_proto3_pb2.BAR)
+ self.assertEqual(json.loads('{\n'
+ ' "enumValue": 1,\n'
+ ' "repeatedEnumValue": [0, 1]\n'
+ '}\n'),
+ json.loads(json_format.MessageToJson(
+ message, use_integers_for_enums=True)))
+
def testParseDict(self):
expected = 12345
js_dict = {'int32Value': expected}
diff --git a/python/google/protobuf/internal/message_test.py b/python/google/protobuf/internal/message_test.py
index 8dae6377..61a56a67 100755
--- a/python/google/protobuf/internal/message_test.py
+++ b/python/google/protobuf/internal/message_test.py
@@ -99,7 +99,7 @@ def IsNegInf(val):
BaseTestCase = testing_refleaks.BaseTestCase
-@_parameterized.NamedParameters(
+@_parameterized.named_parameters(
('_proto2', unittest_pb2),
('_proto3', unittest_proto3_arena_pb2))
class MessageTest(BaseTestCase):
@@ -1694,6 +1694,33 @@ class Proto3Test(BaseTestCase):
with self.assertRaises(TypeError):
del msg2.map_int32_foreign_message['']
+ def testMapMergeFrom(self):
+ msg = map_unittest_pb2.TestMap()
+ msg.map_int32_int32[12] = 34
+ msg.map_int32_int32[56] = 78
+ msg.map_int64_int64[22] = 33
+ msg.map_int32_foreign_message[111].c = 5
+ msg.map_int32_foreign_message[222].c = 10
+
+ msg2 = map_unittest_pb2.TestMap()
+ msg2.map_int32_int32[12] = 55
+ msg2.map_int64_int64[88] = 99
+ msg2.map_int32_foreign_message[222].c = 15
+ msg2.map_int32_foreign_message[222].d = 20
+
+ msg2.map_int32_int32.MergeFrom(msg.map_int32_int32)
+ self.assertEqual(34, msg2.map_int32_int32[12])
+ self.assertEqual(78, msg2.map_int32_int32[56])
+
+ msg2.map_int64_int64.MergeFrom(msg.map_int64_int64)
+ self.assertEqual(33, msg2.map_int64_int64[22])
+ self.assertEqual(99, msg2.map_int64_int64[88])
+
+ msg2.map_int32_foreign_message.MergeFrom(msg.map_int32_foreign_message)
+ self.assertEqual(5, msg2.map_int32_foreign_message[111].c)
+ self.assertEqual(10, msg2.map_int32_foreign_message[222].c)
+ self.assertFalse(msg2.map_int32_foreign_message[222].HasField('d'))
+
def testMergeFromBadType(self):
msg = map_unittest_pb2.TestMap()
with self.assertRaisesRegexp(
diff --git a/python/google/protobuf/internal/no_package.proto b/python/google/protobuf/internal/no_package.proto
new file mode 100644
index 00000000..f6d26735
--- /dev/null
+++ b/python/google/protobuf/internal/no_package.proto
@@ -0,0 +1,12 @@
+syntax = "proto2";
+
+option py_api_version = 2;
+
+enum NoPackageEnum {
+ NO_PACKAGE_VALUE_0 = 0;
+ NO_PACKAGE_VALUE_1 = 1;
+}
+
+message NoPackageMessage {
+ optional NoPackageEnum no_package_enum = 1;
+} \ No newline at end of file
diff --git a/python/google/protobuf/internal/text_format_test.py b/python/google/protobuf/internal/text_format_test.py
index ed3445f2..237a2d50 100755
--- a/python/google/protobuf/internal/text_format_test.py
+++ b/python/google/protobuf/internal/text_format_test.py
@@ -48,6 +48,7 @@ except ImportError:
from google.protobuf.internal import _parameterized
+from google.protobuf import any_pb2
from google.protobuf import any_test_pb2
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_mset_pb2
@@ -99,7 +100,7 @@ class TextFormatBase(unittest.TestCase):
return text
-@_parameterized.Parameters((unittest_pb2), (unittest_proto3_arena_pb2))
+@_parameterized.parameters((unittest_pb2), (unittest_proto3_arena_pb2))
class TextFormatTest(TextFormatBase):
def testPrintExotic(self, message_module):
@@ -369,6 +370,7 @@ class TextFormatTest(TextFormatBase):
def testParseRepeatedScalarShortFormat(self, message_module):
message = message_module.TestAllTypes()
text = ('repeated_int64: [100, 200];\n'
+ 'repeated_int64: []\n'
'repeated_int64: 300,\n'
'repeated_string: ["one", "two"];\n')
text_format.Parse(text, message)
@@ -524,20 +526,68 @@ class OnlyWorksWithProto2RightNowTests(TextFormatBase):
def testPrintInIndexOrder(self):
message = unittest_pb2.TestFieldOrderings()
- message.my_string = '115'
+ # Fields are listed in index order instead of field number.
+ message.my_string = 'str'
message.my_int = 101
message.my_float = 111
message.optional_nested_message.oo = 0
message.optional_nested_message.bb = 1
+ message.Extensions[unittest_pb2.my_extension_string] = 'ext_str0'
+ # Extensions are listed based on the order of extension number.
+ # Extension number 12.
+ message.Extensions[unittest_pb2.TestExtensionOrderings2.
+ test_ext_orderings2].my_string = 'ext_str2'
+ # Extension number 13.
+ message.Extensions[unittest_pb2.TestExtensionOrderings1.
+ test_ext_orderings1].my_string = 'ext_str1'
+ # Extension number 14.
+ message.Extensions[
+ unittest_pb2.TestExtensionOrderings2.TestExtensionOrderings3.
+ test_ext_orderings3].my_string = 'ext_str3'
+
+ # Print in index order.
self.CompareToGoldenText(
- self.RemoveRedundantZeros(text_format.MessageToString(
- message, use_index_order=True)),
- 'my_string: \"115\"\nmy_int: 101\nmy_float: 111\n'
- 'optional_nested_message {\n oo: 0\n bb: 1\n}\n')
+ self.RemoveRedundantZeros(
+ text_format.MessageToString(message, use_index_order=True)),
+ 'my_string: "str"\n'
+ 'my_int: 101\n'
+ 'my_float: 111\n'
+ 'optional_nested_message {\n'
+ ' oo: 0\n'
+ ' bb: 1\n'
+ '}\n'
+ '[protobuf_unittest.TestExtensionOrderings2.test_ext_orderings2] {\n'
+ ' my_string: "ext_str2"\n'
+ '}\n'
+ '[protobuf_unittest.TestExtensionOrderings1.test_ext_orderings1] {\n'
+ ' my_string: "ext_str1"\n'
+ '}\n'
+ '[protobuf_unittest.TestExtensionOrderings2.TestExtensionOrderings3'
+ '.test_ext_orderings3] {\n'
+ ' my_string: "ext_str3"\n'
+ '}\n'
+ '[protobuf_unittest.my_extension_string]: "ext_str0"\n')
+ # By default, print in field number order.
self.CompareToGoldenText(
self.RemoveRedundantZeros(text_format.MessageToString(message)),
- 'my_int: 101\nmy_string: \"115\"\nmy_float: 111\n'
- 'optional_nested_message {\n bb: 1\n oo: 0\n}\n')
+ 'my_int: 101\n'
+ 'my_string: "str"\n'
+ '[protobuf_unittest.TestExtensionOrderings2.test_ext_orderings2] {\n'
+ ' my_string: "ext_str2"\n'
+ '}\n'
+ '[protobuf_unittest.TestExtensionOrderings1.test_ext_orderings1] {\n'
+ ' my_string: "ext_str1"\n'
+ '}\n'
+ '[protobuf_unittest.TestExtensionOrderings2.TestExtensionOrderings3'
+ '.test_ext_orderings3] {\n'
+ ' my_string: "ext_str3"\n'
+ '}\n'
+ '[protobuf_unittest.my_extension_string]: "ext_str0"\n'
+ 'my_float: 111\n'
+ 'optional_nested_message {\n'
+ ' bb: 1\n'
+ ' oo: 0\n'
+ '}\n')
def testMergeLinesGolden(self):
opened = self.ReadGolden('text_format_unittest_data_oneof_implemented.txt')
@@ -970,15 +1020,26 @@ class Proto2Tests(TextFormatBase):
'"protobuf_unittest.optional_int32_extension" extensions.'),
text_format.Parse, text, message)
- def testParseDuplicateNestedMessageScalars(self):
+ def testParseDuplicateMessages(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_nested_message { bb: 1 } '
'optional_nested_message { bb: 2 }')
six.assertRaisesRegex(self, text_format.ParseError, (
- '1:65 : Message type "protobuf_unittest.TestAllTypes.NestedMessage" '
- 'should not have multiple "bb" fields.'), text_format.Parse, text,
+ '1:59 : Message type "protobuf_unittest.TestAllTypes" '
+ 'should not have multiple "optional_nested_message" fields.'),
+ text_format.Parse, text,
message)
+ def testParseDuplicateExtensionMessages(self):
+ message = unittest_pb2.TestAllExtensions()
+ text = ('[protobuf_unittest.optional_nested_message_extension]: {} '
+ '[protobuf_unittest.optional_nested_message_extension]: {}')
+ six.assertRaisesRegex(self, text_format.ParseError, (
+ '1:114 : Message type "protobuf_unittest.TestAllExtensions" '
+ 'should not have multiple '
+ '"protobuf_unittest.optional_nested_message_extension" extensions.'),
+ text_format.Parse, text, message)
+
def testParseDuplicateScalars(self):
message = unittest_pb2.TestAllTypes()
text = ('optional_int32: 42 ' 'optional_int32: 67')
@@ -1065,6 +1126,14 @@ class Proto3Tests(unittest.TestCase):
' }\n'
'}\n')
+ def testTopAnyMessage(self):
+ packed_msg = unittest_pb2.OneString()
+ msg = any_pb2.Any()
+ msg.Pack(packed_msg)
+ text = text_format.MessageToString(msg)
+ other_msg = text_format.Parse(text, any_pb2.Any())
+ self.assertEqual(msg, other_msg)
+
def testPrintMessageExpandAnyRepeated(self):
packed_message = unittest_pb2.OneString()
message = any_test_pb2.TestAny()
@@ -1489,7 +1558,7 @@ class TokenizerTest(unittest.TestCase):
# Tests for pretty printer functionality.
-@_parameterized.Parameters((unittest_pb2), (unittest_proto3_arena_pb2))
+@_parameterized.parameters((unittest_pb2), (unittest_proto3_arena_pb2))
class PrettyPrinterTest(TextFormatBase):
def testPrettyPrintNoMatch(self, message_module):
diff --git a/python/google/protobuf/internal/well_known_types.py b/python/google/protobuf/internal/well_known_types.py
index 3573770b..37a65cfa 100644
--- a/python/google/protobuf/internal/well_known_types.py
+++ b/python/google/protobuf/internal/well_known_types.py
@@ -375,6 +375,9 @@ def _CheckDurationValid(seconds, nanos):
raise Error(
'Duration is not valid: Nanos {0} must be in range '
'[-999999999, 999999999].'.format(nanos))
+ if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
+ raise Error(
+ 'Duration is not valid: Sign mismatch.')
def _RoundTowardZero(value, divider):
@@ -649,9 +652,10 @@ def _MergeMessage(
raise ValueError('Error: Field {0} in message {1} is not a singular '
'message field and cannot have sub-fields.'.format(
name, source_descriptor.full_name))
- _MergeMessage(
- child, getattr(source, name), getattr(destination, name),
- replace_message, replace_repeated)
+ if source.HasField(name):
+ _MergeMessage(
+ child, getattr(source, name), getattr(destination, name),
+ replace_message, replace_repeated)
continue
if field.label == FieldDescriptor.LABEL_REPEATED:
if replace_repeated:
diff --git a/python/google/protobuf/internal/well_known_types_test.py b/python/google/protobuf/internal/well_known_types_test.py
index 573bc37d..965940b2 100644
--- a/python/google/protobuf/internal/well_known_types_test.py
+++ b/python/google/protobuf/internal/well_known_types_test.py
@@ -345,6 +345,12 @@ class TimeUtilTest(TimeUtilTestBase):
r'Duration is not valid\: Nanos 1000000000 must be in range'
r' \[-999999999\, 999999999\].',
message.ToJsonString)
+ message.seconds = -1
+ message.nanos = 1
+ self.assertRaisesRegexp(
+ well_known_types.Error,
+ r'Duration is not valid\: Sign mismatch.',
+ message.ToJsonString)
class FieldMaskTest(unittest.TestCase):
@@ -599,6 +605,16 @@ class FieldMaskTest(unittest.TestCase):
self.assertEqual(1, len(nested_dst.payload.repeated_int32))
self.assertEqual(1234, nested_dst.payload.repeated_int32[0])
+ # Test Merge oneof field.
+ new_msg = unittest_pb2.TestOneof2()
+ dst = unittest_pb2.TestOneof2()
+ dst.foo_message.qux_int = 1
+ mask = field_mask_pb2.FieldMask()
+ mask.FromJsonString('fooMessage,fooLazyMessage.quxInt')
+ mask.MergeMessage(new_msg, dst)
+ self.assertTrue(dst.HasField('foo_message'))
+ self.assertFalse(dst.HasField('foo_lazy_message'))
+
def testMergeErrors(self):
src = unittest_pb2.TestAllTypes()
dst = unittest_pb2.TestAllTypes()
diff --git a/python/google/protobuf/json_format.py b/python/google/protobuf/json_format.py
index 878291db..8d338d3e 100644
--- a/python/google/protobuf/json_format.py
+++ b/python/google/protobuf/json_format.py
@@ -42,21 +42,28 @@ Simple usage example:
__author__ = 'jieluo@google.com (Jie Luo)'
+# pylint: disable=g-statement-before-imports,g-import-not-at-top
try:
from collections import OrderedDict
except ImportError:
- from ordereddict import OrderedDict #PY26
+ from ordereddict import OrderedDict # PY26
+# pylint: enable=g-statement-before-imports,g-import-not-at-top
+
import base64
import json
import math
+
+from operator import methodcaller
+
import re
-import six
import sys
-from operator import methodcaller
+import six
+
from google.protobuf import descriptor
from google.protobuf import symbol_database
+
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32,
descriptor.FieldDescriptor.CPPTYPE_UINT32,
@@ -93,7 +100,8 @@ def MessageToJson(message,
including_default_value_fields=False,
preserving_proto_field_name=False,
indent=2,
- sort_keys=False):
+ sort_keys=False,
+ use_integers_for_enums=False):
"""Converts protobuf message to JSON format.
Args:
@@ -108,18 +116,21 @@ def MessageToJson(message,
indent: The JSON object will be pretty-printed with this indent level.
An indent level of 0 or negative will only insert newlines.
sort_keys: If True, then the output will be sorted by field names.
+ use_integers_for_enums: If true, print integers instead of enum names.
Returns:
A string containing the JSON formatted protocol buffer message.
"""
printer = _Printer(including_default_value_fields,
- preserving_proto_field_name)
+ preserving_proto_field_name,
+ use_integers_for_enums)
return printer.ToJsonString(message, indent, sort_keys)
def MessageToDict(message,
including_default_value_fields=False,
- preserving_proto_field_name=False):
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False):
"""Converts protobuf message to a dictionary.
When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
@@ -133,12 +144,14 @@ def MessageToDict(message,
preserving_proto_field_name: If True, use the original proto field
names as defined in the .proto file. If False, convert the field
names to lowerCamelCase.
+ use_integers_for_enums: If true, print integers instead of enum names.
Returns:
A dict representation of the protocol buffer message.
"""
printer = _Printer(including_default_value_fields,
- preserving_proto_field_name)
+ preserving_proto_field_name,
+ use_integers_for_enums)
# pylint: disable=protected-access
return printer._MessageToJsonObject(message)
@@ -154,9 +167,11 @@ class _Printer(object):
def __init__(self,
including_default_value_fields=False,
- preserving_proto_field_name=False):
+ preserving_proto_field_name=False,
+ use_integers_for_enums=False):
self.including_default_value_fields = including_default_value_fields
self.preserving_proto_field_name = preserving_proto_field_name
+ self.use_integers_for_enums = use_integers_for_enums
def ToJsonString(self, message, indent, sort_keys):
js = self._MessageToJsonObject(message)
@@ -247,6 +262,8 @@ class _Printer(object):
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
return self._MessageToJsonObject(value)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
+ if self.use_integers_for_enums:
+ return value
enum_value = field.enum_type.values_by_number.get(value, None)
if enum_value is not None:
return enum_value.name
diff --git a/python/google/protobuf/pyext/descriptor.cc b/python/google/protobuf/pyext/descriptor.cc
index 9634ea05..bacaaf31 100644
--- a/python/google/protobuf/pyext/descriptor.cc
+++ b/python/google/protobuf/pyext/descriptor.cc
@@ -188,39 +188,36 @@ const FileDescriptor* GetFileDescriptor(const MethodDescriptor* descriptor) {
// Always returns a new reference.
template<class DescriptorClass>
static PyObject* GetOrBuildOptions(const DescriptorClass *descriptor) {
- // Options (and their extensions) are completely resolved in the proto file
- // containing the descriptor.
- PyDescriptorPool* pool = GetDescriptorPool_FromPool(
+ // Options are cached in the pool that owns the descriptor.
+ // First search in the cache.
+ PyDescriptorPool* caching_pool = GetDescriptorPool_FromPool(
GetFileDescriptor(descriptor)->pool());
-
hash_map<const void*, PyObject*>* descriptor_options =
- pool->descriptor_options;
- // First search in the cache.
+ caching_pool->descriptor_options;
if (descriptor_options->find(descriptor) != descriptor_options->end()) {
PyObject *value = (*descriptor_options)[descriptor];
Py_INCREF(value);
return value;
}
+ // Similar to the C++ implementation, we return an Options object from the
+ // default (generated) factory, so that client code know that they can use
+ // extensions from generated files:
+ // d.GetOptions().Extensions[some_pb2.extension]
+ //
+ // The consequence is that extensions not defined in the default pool won't
+ // be available. If needed, we could add an optional 'message_factory'
+ // parameter to the GetOptions() function.
+ PyMessageFactory* message_factory =
+ GetDefaultDescriptorPool()->py_message_factory;
+
// Build the Options object: get its Python class, and make a copy of the C++
// read-only instance.
const Message& options(descriptor->options());
const Descriptor *message_type = options.GetDescriptor();
- PyMessageFactory* message_factory = pool->py_message_factory;
- CMessageClass* message_class = message_factory::GetMessageClass(
+ CMessageClass* message_class = message_factory::GetOrCreateMessageClass(
message_factory, message_type);
if (message_class == NULL) {
- // The Options message was not found in the current DescriptorPool.
- // This means that the pool cannot contain any extensions to the Options
- // message either, so falling back to the basic pool we can only increase
- // the chances of successfully parsing the options.
- PyErr_Clear();
- pool = GetDefaultDescriptorPool();
- message_factory = pool->py_message_factory;
- message_class = message_factory::GetMessageClass(
- message_factory, message_type);
- }
- if (message_class == NULL) {
PyErr_Format(PyExc_TypeError, "Could not retrieve class for Options: %s",
message_type->full_name().c_str());
return NULL;
@@ -248,7 +245,8 @@ static PyObject* GetOrBuildOptions(const DescriptorClass *descriptor) {
options.SerializeToString(&serialized);
io::CodedInputStream input(
reinterpret_cast<const uint8*>(serialized.c_str()), serialized.size());
- input.SetExtensionRegistry(pool->pool, message_factory->message_factory);
+ input.SetExtensionRegistry(message_factory->pool->pool,
+ message_factory->message_factory);
bool success = cmsg->message->MergePartialFromCodedStream(&input);
if (!success) {
PyErr_Format(PyExc_ValueError, "Error parsing Options message");
@@ -564,6 +562,11 @@ static int SetOptions(PyBaseDescriptor *self, PyObject *value,
return CheckCalledFromGeneratedFile("_options");
}
+static int SetSerializedOptions(PyBaseDescriptor *self, PyObject *value,
+ void *closure) {
+ return CheckCalledFromGeneratedFile("_serialized_options");
+}
+
static PyObject* CopyToProto(PyBaseDescriptor *self, PyObject *target) {
return CopyToPythonProto<DescriptorProto>(_GetDescriptor(self), target);
}
@@ -623,6 +626,8 @@ static PyGetSetDef Getters[] = {
{ "is_extendable", (getter)IsExtendable, (setter)NULL},
{ "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
{ "_options", (getter)NULL, (setter)SetOptions, "Options"},
+ { "_serialized_options", (getter)NULL, (setter)SetSerializedOptions,
+ "Serialized Options"},
{ "syntax", (getter)GetSyntax, (setter)NULL, "Syntax"},
{NULL}
};
@@ -785,7 +790,7 @@ static PyObject* GetDefaultValue(PyBaseDescriptor *self, void *closure) {
break;
}
case FieldDescriptor::CPPTYPE_STRING: {
- string value = _GetDescriptor(self)->default_value_string();
+ const string& value = _GetDescriptor(self)->default_value_string();
result = ToStringObject(_GetDescriptor(self), value);
break;
}
@@ -897,6 +902,10 @@ static int SetOptions(PyBaseDescriptor *self, PyObject *value,
return CheckCalledFromGeneratedFile("_options");
}
+static int SetSerializedOptions(PyBaseDescriptor *self, PyObject *value,
+ void *closure) {
+ return CheckCalledFromGeneratedFile("_serialized_options");
+}
static PyGetSetDef Getters[] = {
{ "full_name", (getter)GetFullName, NULL, "Full name"},
@@ -926,6 +935,8 @@ static PyGetSetDef Getters[] = {
"Containing oneof"},
{ "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
{ "_options", (getter)NULL, (setter)SetOptions, "Options"},
+ { "_serialized_options", (getter)NULL, (setter)SetSerializedOptions,
+ "Serialized Options"},
{NULL}
};
@@ -1055,6 +1066,11 @@ static int SetOptions(PyBaseDescriptor *self, PyObject *value,
return CheckCalledFromGeneratedFile("_options");
}
+static int SetSerializedOptions(PyBaseDescriptor *self, PyObject *value,
+ void *closure) {
+ return CheckCalledFromGeneratedFile("_serialized_options");
+}
+
static PyObject* CopyToProto(PyBaseDescriptor *self, PyObject *target) {
return CopyToPythonProto<EnumDescriptorProto>(_GetDescriptor(self), target);
}
@@ -1079,6 +1095,8 @@ static PyGetSetDef Getters[] = {
"Containing type"},
{ "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
{ "_options", (getter)NULL, (setter)SetOptions, "Options"},
+ { "_serialized_options", (getter)NULL, (setter)SetSerializedOptions,
+ "Serialized Options"},
{NULL}
};
@@ -1179,6 +1197,10 @@ static int SetOptions(PyBaseDescriptor *self, PyObject *value,
return CheckCalledFromGeneratedFile("_options");
}
+static int SetSerializedOptions(PyBaseDescriptor *self, PyObject *value,
+ void *closure) {
+ return CheckCalledFromGeneratedFile("_serialized_options");
+}
static PyGetSetDef Getters[] = {
{ "name", (getter)GetName, NULL, "name"},
@@ -1188,6 +1210,8 @@ static PyGetSetDef Getters[] = {
{ "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
{ "_options", (getter)NULL, (setter)SetOptions, "Options"},
+ { "_serialized_options", (getter)NULL, (setter)SetSerializedOptions,
+ "Serialized Options"},
{NULL}
};
@@ -1330,6 +1354,11 @@ static int SetOptions(PyFileDescriptor *self, PyObject *value,
return CheckCalledFromGeneratedFile("_options");
}
+static int SetSerializedOptions(PyFileDescriptor *self, PyObject *value,
+ void *closure) {
+ return CheckCalledFromGeneratedFile("_serialized_options");
+}
+
static PyObject* GetSyntax(PyFileDescriptor *self, void *closure) {
return PyString_InternFromString(
FileDescriptor::SyntaxName(_GetDescriptor(self)->syntax()));
@@ -1355,6 +1384,8 @@ static PyGetSetDef Getters[] = {
{ "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
{ "_options", (getter)NULL, (setter)SetOptions, "Options"},
+ { "_serialized_options", (getter)NULL, (setter)SetSerializedOptions,
+ "Serialized Options"},
{ "syntax", (getter)GetSyntax, (setter)NULL, "Syntax"},
{NULL}
};
@@ -1500,6 +1531,11 @@ static int SetOptions(PyBaseDescriptor *self, PyObject *value,
return CheckCalledFromGeneratedFile("_options");
}
+static int SetSerializedOptions(PyBaseDescriptor *self, PyObject *value,
+ void *closure) {
+ return CheckCalledFromGeneratedFile("_serialized_options");
+}
+
static PyGetSetDef Getters[] = {
{ "name", (getter)GetName, NULL, "Name"},
{ "full_name", (getter)GetFullName, NULL, "Full name"},
@@ -1508,6 +1544,8 @@ static PyGetSetDef Getters[] = {
{ "containing_type", (getter)GetContainingType, NULL, "Containing type"},
{ "has_options", (getter)GetHasOptions, (setter)SetHasOptions, "Has Options"},
{ "_options", (getter)NULL, (setter)SetOptions, "Options"},
+ { "_serialized_options", (getter)NULL, (setter)SetSerializedOptions,
+ "Serialized Options"},
{ "fields", (getter)GetFields, NULL, "Fields"},
{NULL}
};
diff --git a/python/google/protobuf/pyext/descriptor_pool.cc b/python/google/protobuf/pyext/descriptor_pool.cc
index 16f4d49d..95882aeb 100644
--- a/python/google/protobuf/pyext/descriptor_pool.cc
+++ b/python/google/protobuf/pyext/descriptor_pool.cc
@@ -149,7 +149,8 @@ static PyObject* New(PyTypeObject* type,
PyDescriptorPool_NewWithDatabase(database));
}
-static void Dealloc(PyDescriptorPool* self) {
+static void Dealloc(PyObject* pself) {
+ PyDescriptorPool* self = reinterpret_cast<PyDescriptorPool*>(pself);
descriptor_pool_map.erase(self->pool);
Py_CLEAR(self->py_message_factory);
for (hash_map<const void*, PyObject*>::iterator it =
@@ -163,7 +164,7 @@ static void Dealloc(PyDescriptorPool* self) {
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
}
-PyObject* FindMessageByName(PyDescriptorPool* self, PyObject* arg) {
+static PyObject* FindMessageByName(PyObject* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
@@ -171,7 +172,8 @@ PyObject* FindMessageByName(PyDescriptorPool* self, PyObject* arg) {
}
const Descriptor* message_descriptor =
- self->pool->FindMessageTypeByName(string(name, name_size));
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindMessageTypeByName(
+ string(name, name_size));
if (message_descriptor == NULL) {
PyErr_Format(PyExc_KeyError, "Couldn't find message %.200s", name);
@@ -184,7 +186,7 @@ PyObject* FindMessageByName(PyDescriptorPool* self, PyObject* arg) {
-PyObject* FindFileByName(PyDescriptorPool* self, PyObject* arg) {
+static PyObject* FindFileByName(PyObject* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
@@ -192,7 +194,8 @@ PyObject* FindFileByName(PyDescriptorPool* self, PyObject* arg) {
}
const FileDescriptor* file_descriptor =
- self->pool->FindFileByName(string(name, name_size));
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindFileByName(
+ string(name, name_size));
if (file_descriptor == NULL) {
PyErr_Format(PyExc_KeyError, "Couldn't find file %.200s", name);
return NULL;
@@ -218,6 +221,10 @@ PyObject* FindFieldByName(PyDescriptorPool* self, PyObject* arg) {
return PyFieldDescriptor_FromDescriptor(field_descriptor);
}
+static PyObject* FindFieldByNameMethod(PyObject* self, PyObject* arg) {
+ return FindFieldByName(reinterpret_cast<PyDescriptorPool*>(self), arg);
+}
+
PyObject* FindExtensionByName(PyDescriptorPool* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
@@ -235,6 +242,10 @@ PyObject* FindExtensionByName(PyDescriptorPool* self, PyObject* arg) {
return PyFieldDescriptor_FromDescriptor(field_descriptor);
}
+static PyObject* FindExtensionByNameMethod(PyObject* self, PyObject* arg) {
+ return FindExtensionByName(reinterpret_cast<PyDescriptorPool*>(self), arg);
+}
+
PyObject* FindEnumTypeByName(PyDescriptorPool* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
@@ -252,6 +263,10 @@ PyObject* FindEnumTypeByName(PyDescriptorPool* self, PyObject* arg) {
return PyEnumDescriptor_FromDescriptor(enum_descriptor);
}
+static PyObject* FindEnumTypeByNameMethod(PyObject* self, PyObject* arg) {
+ return FindEnumTypeByName(reinterpret_cast<PyDescriptorPool*>(self), arg);
+}
+
PyObject* FindOneofByName(PyDescriptorPool* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
@@ -269,7 +284,11 @@ PyObject* FindOneofByName(PyDescriptorPool* self, PyObject* arg) {
return PyOneofDescriptor_FromDescriptor(oneof_descriptor);
}
-PyObject* FindServiceByName(PyDescriptorPool* self, PyObject* arg) {
+static PyObject* FindOneofByNameMethod(PyObject* self, PyObject* arg) {
+ return FindOneofByName(reinterpret_cast<PyDescriptorPool*>(self), arg);
+}
+
+static PyObject* FindServiceByName(PyObject* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
@@ -277,7 +296,8 @@ PyObject* FindServiceByName(PyDescriptorPool* self, PyObject* arg) {
}
const ServiceDescriptor* service_descriptor =
- self->pool->FindServiceByName(string(name, name_size));
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindServiceByName(
+ string(name, name_size));
if (service_descriptor == NULL) {
PyErr_Format(PyExc_KeyError, "Couldn't find service %.200s", name);
return NULL;
@@ -286,7 +306,7 @@ PyObject* FindServiceByName(PyDescriptorPool* self, PyObject* arg) {
return PyServiceDescriptor_FromDescriptor(service_descriptor);
}
-PyObject* FindMethodByName(PyDescriptorPool* self, PyObject* arg) {
+static PyObject* FindMethodByName(PyObject* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
@@ -294,7 +314,8 @@ PyObject* FindMethodByName(PyDescriptorPool* self, PyObject* arg) {
}
const MethodDescriptor* method_descriptor =
- self->pool->FindMethodByName(string(name, name_size));
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindMethodByName(
+ string(name, name_size));
if (method_descriptor == NULL) {
PyErr_Format(PyExc_KeyError, "Couldn't find method %.200s", name);
return NULL;
@@ -303,7 +324,7 @@ PyObject* FindMethodByName(PyDescriptorPool* self, PyObject* arg) {
return PyMethodDescriptor_FromDescriptor(method_descriptor);
}
-PyObject* FindFileContainingSymbol(PyDescriptorPool* self, PyObject* arg) {
+static PyObject* FindFileContainingSymbol(PyObject* self, PyObject* arg) {
Py_ssize_t name_size;
char* name;
if (PyString_AsStringAndSize(arg, &name, &name_size) < 0) {
@@ -311,7 +332,8 @@ PyObject* FindFileContainingSymbol(PyDescriptorPool* self, PyObject* arg) {
}
const FileDescriptor* file_descriptor =
- self->pool->FindFileContainingSymbol(string(name, name_size));
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindFileContainingSymbol(
+ string(name, name_size));
if (file_descriptor == NULL) {
PyErr_Format(PyExc_KeyError, "Couldn't find symbol %.200s", name);
return NULL;
@@ -320,7 +342,7 @@ PyObject* FindFileContainingSymbol(PyDescriptorPool* self, PyObject* arg) {
return PyFileDescriptor_FromDescriptor(file_descriptor);
}
-PyObject* FindExtensionByNumber(PyDescriptorPool* self, PyObject* args) {
+static PyObject* FindExtensionByNumber(PyObject* self, PyObject* args) {
PyObject* message_descriptor;
int number;
if (!PyArg_ParseTuple(args, "Oi", &message_descriptor, &number)) {
@@ -333,7 +355,8 @@ PyObject* FindExtensionByNumber(PyDescriptorPool* self, PyObject* args) {
}
const FieldDescriptor* extension_descriptor =
- self->pool->FindExtensionByNumber(descriptor, number);
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindExtensionByNumber(
+ descriptor, number);
if (extension_descriptor == NULL) {
PyErr_Format(PyExc_KeyError, "Couldn't find extension %d", number);
return NULL;
@@ -342,14 +365,15 @@ PyObject* FindExtensionByNumber(PyDescriptorPool* self, PyObject* args) {
return PyFieldDescriptor_FromDescriptor(extension_descriptor);
}
-PyObject* FindAllExtensions(PyDescriptorPool* self, PyObject* arg) {
+static PyObject* FindAllExtensions(PyObject* self, PyObject* arg) {
const Descriptor* descriptor = PyMessageDescriptor_AsDescriptor(arg);
if (descriptor == NULL) {
return NULL;
}
std::vector<const FieldDescriptor*> extensions;
- self->pool->FindAllExtensions(descriptor, &extensions);
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindAllExtensions(
+ descriptor, &extensions);
ScopedPyObjectPtr result(PyList_New(extensions.size()));
if (result == NULL) {
@@ -374,14 +398,15 @@ PyObject* FindAllExtensions(PyDescriptorPool* self, PyObject* arg) {
// call a function that will just be a no-op?
// TODO(amauryfa): Need to investigate further.
-PyObject* AddFileDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
+static PyObject* AddFileDescriptor(PyObject* self, PyObject* descriptor) {
const FileDescriptor* file_descriptor =
PyFileDescriptor_AsDescriptor(descriptor);
if (!file_descriptor) {
return NULL;
}
if (file_descriptor !=
- self->pool->FindFileByName(file_descriptor->name())) {
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindFileByName(
+ file_descriptor->name())) {
PyErr_Format(PyExc_ValueError,
"The file descriptor %s does not belong to this pool",
file_descriptor->name().c_str());
@@ -390,14 +415,15 @@ PyObject* AddFileDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
Py_RETURN_NONE;
}
-PyObject* AddDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
+static PyObject* AddDescriptor(PyObject* self, PyObject* descriptor) {
const Descriptor* message_descriptor =
PyMessageDescriptor_AsDescriptor(descriptor);
if (!message_descriptor) {
return NULL;
}
if (message_descriptor !=
- self->pool->FindMessageTypeByName(message_descriptor->full_name())) {
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindMessageTypeByName(
+ message_descriptor->full_name())) {
PyErr_Format(PyExc_ValueError,
"The message descriptor %s does not belong to this pool",
message_descriptor->full_name().c_str());
@@ -406,14 +432,15 @@ PyObject* AddDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
Py_RETURN_NONE;
}
-PyObject* AddEnumDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
+static PyObject* AddEnumDescriptor(PyObject* self, PyObject* descriptor) {
const EnumDescriptor* enum_descriptor =
PyEnumDescriptor_AsDescriptor(descriptor);
if (!enum_descriptor) {
return NULL;
}
if (enum_descriptor !=
- self->pool->FindEnumTypeByName(enum_descriptor->full_name())) {
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindEnumTypeByName(
+ enum_descriptor->full_name())) {
PyErr_Format(PyExc_ValueError,
"The enum descriptor %s does not belong to this pool",
enum_descriptor->full_name().c_str());
@@ -422,14 +449,15 @@ PyObject* AddEnumDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
Py_RETURN_NONE;
}
-PyObject* AddExtensionDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
+static PyObject* AddExtensionDescriptor(PyObject* self, PyObject* descriptor) {
const FieldDescriptor* extension_descriptor =
PyFieldDescriptor_AsDescriptor(descriptor);
if (!extension_descriptor) {
return NULL;
}
if (extension_descriptor !=
- self->pool->FindExtensionByName(extension_descriptor->full_name())) {
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindExtensionByName(
+ extension_descriptor->full_name())) {
PyErr_Format(PyExc_ValueError,
"The extension descriptor %s does not belong to this pool",
extension_descriptor->full_name().c_str());
@@ -438,14 +466,15 @@ PyObject* AddExtensionDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
Py_RETURN_NONE;
}
-PyObject* AddServiceDescriptor(PyDescriptorPool* self, PyObject* descriptor) {
+static PyObject* AddServiceDescriptor(PyObject* self, PyObject* descriptor) {
const ServiceDescriptor* service_descriptor =
PyServiceDescriptor_AsDescriptor(descriptor);
if (!service_descriptor) {
return NULL;
}
if (service_descriptor !=
- self->pool->FindServiceByName(service_descriptor->full_name())) {
+ reinterpret_cast<PyDescriptorPool*>(self)->pool->FindServiceByName(
+ service_descriptor->full_name())) {
PyErr_Format(PyExc_ValueError,
"The service descriptor %s does not belong to this pool",
service_descriptor->full_name().c_str());
@@ -481,7 +510,8 @@ class BuildFileErrorCollector : public DescriptorPool::ErrorCollector {
bool had_errors;
};
-PyObject* AddSerializedFile(PyDescriptorPool* self, PyObject* serialized_pb) {
+static PyObject* AddSerializedFile(PyObject* pself, PyObject* serialized_pb) {
+ PyDescriptorPool* self = reinterpret_cast<PyDescriptorPool*>(pself);
char* message_type;
Py_ssize_t message_len;
@@ -529,7 +559,7 @@ PyObject* AddSerializedFile(PyDescriptorPool* self, PyObject* serialized_pb) {
descriptor, serialized_pb);
}
-PyObject* Add(PyDescriptorPool* self, PyObject* file_descriptor_proto) {
+static PyObject* Add(PyObject* self, PyObject* file_descriptor_proto) {
ScopedPyObjectPtr serialized_pb(
PyObject_CallMethod(file_descriptor_proto, "SerializeToString", NULL));
if (serialized_pb == NULL) {
@@ -539,46 +569,46 @@ PyObject* Add(PyDescriptorPool* self, PyObject* file_descriptor_proto) {
}
static PyMethodDef Methods[] = {
- { "Add", (PyCFunction)Add, METH_O,
+ { "Add", Add, METH_O,
"Adds the FileDescriptorProto and its types to this pool." },
- { "AddSerializedFile", (PyCFunction)AddSerializedFile, METH_O,
+ { "AddSerializedFile", AddSerializedFile, METH_O,
"Adds a serialized FileDescriptorProto to this pool." },
// TODO(amauryfa): Understand why the Python implementation differs from
// this one, ask users to use another API and deprecate these functions.
- { "AddFileDescriptor", (PyCFunction)AddFileDescriptor, METH_O,
+ { "AddFileDescriptor", AddFileDescriptor, METH_O,
"No-op. Add() must have been called before." },
- { "AddDescriptor", (PyCFunction)AddDescriptor, METH_O,
+ { "AddDescriptor", AddDescriptor, METH_O,
"No-op. Add() must have been called before." },
- { "AddEnumDescriptor", (PyCFunction)AddEnumDescriptor, METH_O,
+ { "AddEnumDescriptor", AddEnumDescriptor, METH_O,
"No-op. Add() must have been called before." },
- { "AddExtensionDescriptor", (PyCFunction)AddExtensionDescriptor, METH_O,
+ { "AddExtensionDescriptor", AddExtensionDescriptor, METH_O,
"No-op. Add() must have been called before." },
- { "AddServiceDescriptor", (PyCFunction)AddServiceDescriptor, METH_O,
+ { "AddServiceDescriptor", AddServiceDescriptor, METH_O,
"No-op. Add() must have been called before." },
- { "FindFileByName", (PyCFunction)FindFileByName, METH_O,
+ { "FindFileByName", FindFileByName, METH_O,
"Searches for a file descriptor by its .proto name." },
- { "FindMessageTypeByName", (PyCFunction)FindMessageByName, METH_O,
+ { "FindMessageTypeByName", FindMessageByName, METH_O,
"Searches for a message descriptor by full name." },
- { "FindFieldByName", (PyCFunction)FindFieldByName, METH_O,
+ { "FindFieldByName", FindFieldByNameMethod, METH_O,
"Searches for a field descriptor by full name." },
- { "FindExtensionByName", (PyCFunction)FindExtensionByName, METH_O,
+ { "FindExtensionByName", FindExtensionByNameMethod, METH_O,
"Searches for extension descriptor by full name." },
- { "FindEnumTypeByName", (PyCFunction)FindEnumTypeByName, METH_O,
+ { "FindEnumTypeByName", FindEnumTypeByNameMethod, METH_O,
"Searches for enum type descriptor by full name." },
- { "FindOneofByName", (PyCFunction)FindOneofByName, METH_O,
+ { "FindOneofByName", FindOneofByNameMethod, METH_O,
"Searches for oneof descriptor by full name." },
- { "FindServiceByName", (PyCFunction)FindServiceByName, METH_O,
+ { "FindServiceByName", FindServiceByName, METH_O,
"Searches for service descriptor by full name." },
- { "FindMethodByName", (PyCFunction)FindMethodByName, METH_O,
+ { "FindMethodByName", FindMethodByName, METH_O,
"Searches for method descriptor by full name." },
- { "FindFileContainingSymbol", (PyCFunction)FindFileContainingSymbol, METH_O,
+ { "FindFileContainingSymbol", FindFileContainingSymbol, METH_O,
"Gets the FileDescriptor containing the specified symbol." },
- { "FindExtensionByNumber", (PyCFunction)FindExtensionByNumber, METH_VARARGS,
+ { "FindExtensionByNumber", FindExtensionByNumber, METH_VARARGS,
"Gets the extension descriptor for the given number." },
- { "FindAllExtensions", (PyCFunction)FindAllExtensions, METH_O,
+ { "FindAllExtensions", FindAllExtensions, METH_O,
"Gets all known extensions of the given message descriptor." },
{NULL}
};
@@ -590,7 +620,7 @@ PyTypeObject PyDescriptorPool_Type = {
FULL_MODULE_NAME ".DescriptorPool", // tp_name
sizeof(PyDescriptorPool), // tp_basicsize
0, // tp_itemsize
- (destructor)cdescriptor_pool::Dealloc, // tp_dealloc
+ cdescriptor_pool::Dealloc, // tp_dealloc
0, // tp_print
0, // tp_getattr
0, // tp_setattr
diff --git a/python/google/protobuf/pyext/extension_dict.cc b/python/google/protobuf/pyext/extension_dict.cc
index 6830b10d..018b5c2c 100644
--- a/python/google/protobuf/pyext/extension_dict.cc
+++ b/python/google/protobuf/pyext/extension_dict.cc
@@ -33,9 +33,6 @@
#include <google/protobuf/pyext/extension_dict.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <google/protobuf/stubs/logging.h>
#include <google/protobuf/stubs/common.h>
diff --git a/python/google/protobuf/pyext/extension_dict.h b/python/google/protobuf/pyext/extension_dict.h
index 65b87862..0de2c4ee 100644
--- a/python/google/protobuf/pyext/extension_dict.h
+++ b/python/google/protobuf/pyext/extension_dict.h
@@ -37,9 +37,8 @@
#include <Python.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
+
+#include <google/protobuf/pyext/message.h>
namespace google {
namespace protobuf {
@@ -47,16 +46,8 @@ namespace protobuf {
class Message;
class FieldDescriptor;
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-#else
-using internal::shared_ptr;
-#endif
-
namespace python {
-struct CMessage;
-
typedef struct ExtensionDict {
PyObject_HEAD;
@@ -64,7 +55,7 @@ typedef struct ExtensionDict {
// proto tree. Every Python container class holds a
// reference to it in order to keep it alive as long as there's a
// Python object that references any part of the tree.
- shared_ptr<Message> owner;
+ CMessage::OwnerRef owner;
// Weak reference to parent message. Used to make sure
// the parent is writable when an extension field is modified.
diff --git a/python/google/protobuf/pyext/map_container.cc b/python/google/protobuf/pyext/map_container.cc
index abd15b77..6d7ee285 100644
--- a/python/google/protobuf/pyext/map_container.cc
+++ b/python/google/protobuf/pyext/map_container.cc
@@ -33,9 +33,6 @@
#include <google/protobuf/pyext/map_container.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <google/protobuf/stubs/logging.h>
#include <google/protobuf/stubs/common.h>
@@ -76,7 +73,7 @@ class MapReflectionFriend {
struct MapIterator {
PyObject_HEAD;
- google::protobuf::scoped_ptr< ::google::protobuf::MapIterator> iter;
+ std::unique_ptr<::google::protobuf::MapIterator> iter;
// A pointer back to the container, so we can notice changes to the version.
// We own a ref on this.
@@ -94,7 +91,7 @@ struct MapIterator {
// as this iterator does. This is solely for the benefit of the MapIterator
// destructor -- we should never actually access the iterator in this state
// except to delete it.
- shared_ptr<Message> owner;
+ CMessage::OwnerRef owner;
// The version of the map when we took the iterator to it.
//
@@ -339,6 +336,24 @@ PyObject* GetEntryClass(PyObject* _self) {
return reinterpret_cast<PyObject*>(message_class);
}
+PyObject* MergeFrom(PyObject* _self, PyObject* arg) {
+ MapContainer* self = GetMap(_self);
+ MapContainer* other_map = GetMap(arg);
+ Message* message = self->GetMutableMessage();
+ const Message* other_message = other_map->message;
+ const Reflection* reflection = message->GetReflection();
+ const Reflection* other_reflection = other_message->GetReflection();
+ int count = other_reflection->FieldSize(
+ *other_message, other_map->parent_field_descriptor);
+ for (int i = 0 ; i < count; i ++) {
+ reflection->AddMessage(message, self->parent_field_descriptor)->MergeFrom(
+ other_reflection->GetRepeatedMessage(
+ *other_message, other_map->parent_field_descriptor, i));
+ }
+ self->version++;
+ Py_RETURN_NONE;
+}
+
PyObject* MapReflectionFriend::Contains(PyObject* _self, PyObject* key) {
MapContainer* self = GetMap(_self);
@@ -535,6 +550,8 @@ static PyMethodDef ScalarMapMethods[] = {
"Gets the value for the given key if present, or otherwise a default" },
{ "GetEntryClass", (PyCFunction)GetEntryClass, METH_NOARGS,
"Return the class used to build Entries of (key, value) pairs." },
+ { "MergeFrom", (PyCFunction)MergeFrom, METH_O,
+ "Merges a map into the current map." },
/*
{ "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
"Makes a deep copy of the class." },
@@ -810,6 +827,8 @@ static PyMethodDef MessageMapMethods[] = {
"Alias for getitem, useful to make explicit that the map is mutated." },
{ "GetEntryClass", (PyCFunction)GetEntryClass, METH_NOARGS,
"Return the class used to build Entries of (key, value) pairs." },
+ { "MergeFrom", (PyCFunction)MergeFrom, METH_O,
+ "Merges a map into the current map." },
/*
{ "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
"Makes a deep copy of the class." },
diff --git a/python/google/protobuf/pyext/map_container.h b/python/google/protobuf/pyext/map_container.h
index 615657b0..111fafbf 100644
--- a/python/google/protobuf/pyext/map_container.h
+++ b/python/google/protobuf/pyext/map_container.h
@@ -34,27 +34,18 @@
#include <Python.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <google/protobuf/descriptor.h>
#include <google/protobuf/message.h>
+#include <google/protobuf/pyext/message.h>
namespace google {
namespace protobuf {
class Message;
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-#else
-using internal::shared_ptr;
-#endif
-
namespace python {
-struct CMessage;
struct CMessageClass;
// This struct is used directly for ScalarMap, and is the base class of
@@ -66,7 +57,7 @@ struct MapContainer {
// proto tree. Every Python MapContainer holds a
// reference to it in order to keep it alive as long as there's a
// Python object that references any part of the tree.
- shared_ptr<Message> owner;
+ CMessage::OwnerRef owner;
// Pointer to the C++ Message that contains this container. The
// MapContainer does not own this pointer.
@@ -99,9 +90,7 @@ struct MapContainer {
int Release();
// Set the owner field of self and any children of self.
- void SetOwner(const shared_ptr<Message>& new_owner) {
- owner = new_owner;
- }
+ void SetOwner(const CMessage::OwnerRef& new_owner) { owner = new_owner; }
};
struct MessageMapContainer : public MapContainer {
diff --git a/python/google/protobuf/pyext/message.cc b/python/google/protobuf/pyext/message.cc
index f515b560..5893533a 100644
--- a/python/google/protobuf/pyext/message.cc
+++ b/python/google/protobuf/pyext/message.cc
@@ -35,9 +35,6 @@
#include <map>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <string>
#include <vector>
#include <structmember.h> // A Python header file.
@@ -658,7 +655,7 @@ bool CheckAndGetInteger(PyObject* arg, T* value) {
// Unlike PyLong_AsLongLong, PyLong_AsUnsignedLongLong is very
// picky about the exact type.
PyObject* casted = PyNumber_Long(arg);
- if (GOOGLE_PREDICT_FALSE(casted == NULL)) {
+ if (GOOGLE_PREDICT_FALSE(casted == nullptr)) {
// Propagate existing error.
return false;
}
@@ -683,7 +680,7 @@ bool CheckAndGetInteger(PyObject* arg, T* value) {
// Valid subclasses of numbers.Integral should have a __long__() method
// so fall back to that.
PyObject* casted = PyNumber_Long(arg);
- if (GOOGLE_PREDICT_FALSE(casted == NULL)) {
+ if (GOOGLE_PREDICT_FALSE(casted == nullptr)) {
// Propagate existing error.
return false;
}
@@ -830,7 +827,8 @@ bool CheckAndSetString(
return true;
}
-PyObject* ToStringObject(const FieldDescriptor* descriptor, string value) {
+PyObject* ToStringObject(const FieldDescriptor* descriptor,
+ const string& value) {
if (descriptor->type() != FieldDescriptor::TYPE_STRING) {
return PyBytes_FromStringAndSize(value.c_str(), value.length());
}
@@ -1318,6 +1316,8 @@ CMessage* NewEmptyMessage(CMessageClass* type) {
return NULL;
}
+ // Use "placement new" syntax to initialize the C++ object.
+ new (&self->owner) CMessage::OwnerRef(NULL);
self->message = NULL;
self->parent = NULL;
self->parent_field_descriptor = NULL;
@@ -1414,7 +1414,7 @@ static void Dealloc(CMessage* self) {
Py_CLEAR(self->extensions);
Py_CLEAR(self->composite_fields);
- self->owner.reset();
+ self->owner.~ThreadUnsafeSharedPtr<Message>();
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
}
@@ -1616,9 +1616,10 @@ PyObject* HasExtension(CMessage* self, PyObject* extension) {
// * Clear the weak references from the released container to the
// parent.
-struct SetOwnerVisitor : public ChildVisitor {
+class SetOwnerVisitor : public ChildVisitor {
+ public:
// new_owner must outlive this object.
- explicit SetOwnerVisitor(const shared_ptr<Message>& new_owner)
+ explicit SetOwnerVisitor(const CMessage::OwnerRef& new_owner)
: new_owner_(new_owner) {}
int VisitRepeatedCompositeContainer(RepeatedCompositeContainer* container) {
@@ -1642,11 +1643,11 @@ struct SetOwnerVisitor : public ChildVisitor {
}
private:
- const shared_ptr<Message>& new_owner_;
+ const CMessage::OwnerRef& new_owner_;
};
// Change the owner of this CMessage and all its children, recursively.
-int SetOwner(CMessage* self, const shared_ptr<Message>& new_owner) {
+int SetOwner(CMessage* self, const CMessage::OwnerRef& new_owner) {
self->owner = new_owner;
if (ForEachCompositeField(self, SetOwnerVisitor(new_owner)) == -1)
return -1;
@@ -1679,7 +1680,7 @@ int ReleaseSubMessage(CMessage* self,
const FieldDescriptor* field_descriptor,
CMessage* child_cmessage) {
// Release the Message
- shared_ptr<Message> released_message(ReleaseMessage(
+ CMessage::OwnerRef released_message(ReleaseMessage(
self, child_cmessage->message->GetDescriptor(), field_descriptor));
child_cmessage->message = released_message.get();
child_cmessage->owner.swap(released_message);
@@ -2329,7 +2330,9 @@ PyObject* InternalGetScalar(const Message* message,
break;
}
case FieldDescriptor::CPPTYPE_STRING: {
- string value = reflection->GetString(*message, field_descriptor);
+ string scratch;
+ const string& value =
+ reflection->GetStringReference(*message, field_descriptor, &scratch);
result = ToStringObject(field_descriptor, value);
break;
}
diff --git a/python/google/protobuf/pyext/message.h b/python/google/protobuf/pyext/message.h
index 576d098c..7050f876 100644
--- a/python/google/protobuf/pyext/message.h
+++ b/python/google/protobuf/pyext/message.h
@@ -37,11 +37,11 @@
#include <Python.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <string>
+#include <google/protobuf/stubs/common.h>
+#include <google/protobuf/pyext/thread_unsafe_shared_ptr.h>
+
namespace google {
namespace protobuf {
@@ -71,7 +71,9 @@ typedef struct CMessage {
// proto tree. Every Python CMessage holds a reference to it in
// order to keep it alive as long as there's a Python object that
// references any part of the tree.
- shared_ptr<Message> owner;
+
+ typedef ThreadUnsafeSharedPtr<Message> OwnerRef;
+ OwnerRef owner;
// Weak reference to a parent CMessage object. This is NULL for any top-level
// message and is set for any child message (i.e. a child submessage or a
@@ -255,7 +257,7 @@ PyObject* FindInitializationErrors(CMessage* self);
// Set the owner field of self and any children of self, recursively.
// Used when self is being released and thus has a new owner (the
// released Message.)
-int SetOwner(CMessage* self, const shared_ptr<Message>& new_owner);
+int SetOwner(CMessage* self, const CMessage::OwnerRef& new_owner);
int AssureWritable(CMessage* self);
@@ -336,7 +338,8 @@ bool CheckAndSetString(
const Reflection* reflection,
bool append,
int index);
-PyObject* ToStringObject(const FieldDescriptor* descriptor, string value);
+PyObject* ToStringObject(const FieldDescriptor* descriptor,
+ const string& value);
// Check if the passed field descriptor belongs to the given message.
// If not, return false and set a Python exception (a KeyError)
@@ -347,6 +350,15 @@ extern PyObject* PickleError_class;
bool InitProto2MessageModule(PyObject *m);
+#if LANG_CXX11
+// These are referenced by repeated_scalar_container, and must
+// be explicitly instantiated.
+extern template bool CheckAndGetInteger<int32>(PyObject*, int32*);
+extern template bool CheckAndGetInteger<int64>(PyObject*, int64*);
+extern template bool CheckAndGetInteger<uint32>(PyObject*, uint32*);
+extern template bool CheckAndGetInteger<uint64>(PyObject*, uint64*);
+#endif
+
} // namespace python
} // namespace protobuf
diff --git a/python/google/protobuf/pyext/message_factory.cc b/python/google/protobuf/pyext/message_factory.cc
index 571bae2b..bacc76a6 100644
--- a/python/google/protobuf/pyext/message_factory.cc
+++ b/python/google/protobuf/pyext/message_factory.cc
@@ -100,7 +100,9 @@ PyObject* New(PyTypeObject* type, PyObject* args, PyObject* kwargs) {
NewMessageFactory(type, reinterpret_cast<PyDescriptorPool*>(pool)));
}
-static void Dealloc(PyMessageFactory* self) {
+static void Dealloc(PyObject* pself) {
+ PyMessageFactory* self = reinterpret_cast<PyMessageFactory*>(pself);
+
// TODO(amauryfa): When the MessageFactory is not created from the
// DescriptorPool this reference should be owned, not borrowed.
// Py_CLEAR(self->pool);
@@ -111,7 +113,7 @@ static void Dealloc(PyMessageFactory* self) {
}
delete self->classes_by_descriptor;
delete self->message_factory;
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
+ Py_TYPE(self)->tp_free(pself);
}
// Add a message class to our database.
@@ -231,7 +233,7 @@ PyTypeObject PyMessageFactory_Type = {
".MessageFactory", // tp_name
sizeof(PyMessageFactory), // tp_basicsize
0, // tp_itemsize
- (destructor)message_factory::Dealloc, // tp_dealloc
+ message_factory::Dealloc, // tp_dealloc
0, // tp_print
0, // tp_getattr
0, // tp_setattr
diff --git a/python/google/protobuf/pyext/repeated_composite_container.cc b/python/google/protobuf/pyext/repeated_composite_container.cc
index 5ad71db5..5874d5de 100644
--- a/python/google/protobuf/pyext/repeated_composite_container.cc
+++ b/python/google/protobuf/pyext/repeated_composite_container.cc
@@ -34,9 +34,6 @@
#include <google/protobuf/pyext/repeated_composite_container.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <google/protobuf/stubs/logging.h>
#include <google/protobuf/stubs/common.h>
@@ -81,7 +78,10 @@ namespace repeated_composite_container {
// ---------------------------------------------------------------------
// len()
-static Py_ssize_t Length(RepeatedCompositeContainer* self) {
+static Py_ssize_t Length(PyObject* pself) {
+ RepeatedCompositeContainer* self =
+ reinterpret_cast<RepeatedCompositeContainer*>(pself);
+
Message* message = self->message;
if (message != NULL) {
return message->GetReflection()->FieldSize(*message,
@@ -102,7 +102,7 @@ static int UpdateChildMessages(RepeatedCompositeContainer* self) {
// A MergeFrom on a parent message could have caused extra messages to be
// added in the underlying protobuf so add them to our list. They can never
// be removed in such a way so there's no need to worry about that.
- Py_ssize_t message_length = Length(self);
+ Py_ssize_t message_length = Length(reinterpret_cast<PyObject*>(self));
Py_ssize_t child_length = PyList_GET_SIZE(self->child_messages);
Message* message = self->message;
const Reflection* reflection = message->GetReflection();
@@ -191,6 +191,10 @@ PyObject* Add(RepeatedCompositeContainer* self,
return AddToAttached(self, args, kwargs);
}
+static PyObject* AddMethod(PyObject* self, PyObject* args, PyObject* kwargs) {
+ return Add(reinterpret_cast<RepeatedCompositeContainer*>(self), args, kwargs);
+}
+
// ---------------------------------------------------------------------
// extend()
@@ -226,6 +230,10 @@ PyObject* Extend(RepeatedCompositeContainer* self, PyObject* value) {
Py_RETURN_NONE;
}
+static PyObject* ExtendMethod(PyObject* self, PyObject* value) {
+ return Extend(reinterpret_cast<RepeatedCompositeContainer*>(self), value);
+}
+
PyObject* MergeFrom(RepeatedCompositeContainer* self, PyObject* other) {
if (UpdateChildMessages(self) < 0) {
return NULL;
@@ -233,6 +241,10 @@ PyObject* MergeFrom(RepeatedCompositeContainer* self, PyObject* other) {
return Extend(self, other);
}
+static PyObject* MergeFromMethod(PyObject* self, PyObject* other) {
+ return MergeFrom(reinterpret_cast<RepeatedCompositeContainer*>(self), other);
+}
+
PyObject* Subscript(RepeatedCompositeContainer* self, PyObject* slice) {
if (UpdateChildMessages(self) < 0) {
return NULL;
@@ -242,6 +254,10 @@ PyObject* Subscript(RepeatedCompositeContainer* self, PyObject* slice) {
return PyObject_GetItem(self->child_messages, slice);
}
+static PyObject* SubscriptMethod(PyObject* self, PyObject* slice) {
+ return Subscript(reinterpret_cast<RepeatedCompositeContainer*>(self), slice);
+}
+
int AssignSubscript(RepeatedCompositeContainer* self,
PyObject* slice,
PyObject* value) {
@@ -265,7 +281,7 @@ int AssignSubscript(RepeatedCompositeContainer* self,
Py_ssize_t from;
Py_ssize_t to;
Py_ssize_t step;
- Py_ssize_t length = Length(self);
+ Py_ssize_t length = Length(reinterpret_cast<PyObject*>(self));
Py_ssize_t slicelength;
if (PySlice_Check(slice)) {
#if PY_MAJOR_VERSION >= 3
@@ -290,7 +306,16 @@ int AssignSubscript(RepeatedCompositeContainer* self,
return 0;
}
-static PyObject* Remove(RepeatedCompositeContainer* self, PyObject* value) {
+static int AssignSubscriptMethod(PyObject* self, PyObject* slice,
+ PyObject* value) {
+ return AssignSubscript(reinterpret_cast<RepeatedCompositeContainer*>(self),
+ slice, value);
+}
+
+static PyObject* Remove(PyObject* pself, PyObject* value) {
+ RepeatedCompositeContainer* self =
+ reinterpret_cast<RepeatedCompositeContainer*>(pself);
+
if (UpdateChildMessages(self) < 0) {
return NULL;
}
@@ -305,9 +330,10 @@ static PyObject* Remove(RepeatedCompositeContainer* self, PyObject* value) {
Py_RETURN_NONE;
}
-static PyObject* RichCompare(RepeatedCompositeContainer* self,
- PyObject* other,
- int opid) {
+static PyObject* RichCompare(PyObject* pself, PyObject* other, int opid) {
+ RepeatedCompositeContainer* self =
+ reinterpret_cast<RepeatedCompositeContainer*>(pself);
+
if (UpdateChildMessages(self) < 0) {
return NULL;
}
@@ -340,12 +366,13 @@ static PyObject* RichCompare(RepeatedCompositeContainer* self,
}
}
-static PyObject* ToStr(RepeatedCompositeContainer* self) {
+static PyObject* ToStr(PyObject* pself) {
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
if (full_slice == NULL) {
return NULL;
}
- ScopedPyObjectPtr list(Subscript(self, full_slice.get()));
+ ScopedPyObjectPtr list(Subscript(
+ reinterpret_cast<RepeatedCompositeContainer*>(pself), full_slice.get()));
if (list == NULL) {
return NULL;
}
@@ -359,7 +386,7 @@ static void ReorderAttached(RepeatedCompositeContainer* self) {
Message* message = self->message;
const Reflection* reflection = message->GetReflection();
const FieldDescriptor* descriptor = self->parent_field_descriptor;
- const Py_ssize_t length = Length(self);
+ const Py_ssize_t length = Length(reinterpret_cast<PyObject*>(self));
// Since Python protobuf objects are never arena-allocated, adding and
// removing message pointers to the underlying array is just updating
@@ -390,9 +417,10 @@ static int SortPythonMessages(RepeatedCompositeContainer* self,
return 0;
}
-static PyObject* Sort(RepeatedCompositeContainer* self,
- PyObject* args,
- PyObject* kwds) {
+static PyObject* Sort(PyObject* pself, PyObject* args, PyObject* kwds) {
+ RepeatedCompositeContainer* self =
+ reinterpret_cast<RepeatedCompositeContainer*>(pself);
+
// Support the old sort_function argument for backwards
// compatibility.
if (kwds != NULL) {
@@ -416,11 +444,14 @@ static PyObject* Sort(RepeatedCompositeContainer* self,
// ---------------------------------------------------------------------
-static PyObject* Item(RepeatedCompositeContainer* self, Py_ssize_t index) {
+static PyObject* Item(PyObject* pself, Py_ssize_t index) {
+ RepeatedCompositeContainer* self =
+ reinterpret_cast<RepeatedCompositeContainer*>(pself);
+
if (UpdateChildMessages(self) < 0) {
return NULL;
}
- Py_ssize_t length = Length(self);
+ Py_ssize_t length = Length(pself);
if (index < 0) {
index = length + index;
}
@@ -432,17 +463,17 @@ static PyObject* Item(RepeatedCompositeContainer* self, Py_ssize_t index) {
return item;
}
-static PyObject* Pop(RepeatedCompositeContainer* self,
- PyObject* args) {
+static PyObject* Pop(PyObject* pself, PyObject* args) {
+ RepeatedCompositeContainer* self =
+ reinterpret_cast<RepeatedCompositeContainer*>(pself);
+
Py_ssize_t index = -1;
if (!PyArg_ParseTuple(args, "|n", &index)) {
return NULL;
}
- PyObject* item = Item(self, index);
+ PyObject* item = Item(pself, index);
if (item == NULL) {
- PyErr_Format(PyExc_IndexError,
- "list index (%zd) out of range",
- index);
+ PyErr_Format(PyExc_IndexError, "list index (%zd) out of range", index);
return NULL;
}
ScopedPyObjectPtr py_index(PyLong_FromSsize_t(index));
@@ -460,7 +491,7 @@ void ReleaseLastTo(CMessage* parent,
GOOGLE_CHECK_NOTNULL(field);
GOOGLE_CHECK_NOTNULL(target);
- shared_ptr<Message> released_message(
+ CMessage::OwnerRef released_message(
parent->message->GetReflection()->ReleaseLast(parent->message, field));
// TODO(tibell): Deal with proto1.
@@ -503,7 +534,10 @@ int Release(RepeatedCompositeContainer* self) {
return 0;
}
-PyObject* DeepCopy(RepeatedCompositeContainer* self, PyObject* arg) {
+PyObject* DeepCopy(PyObject* pself, PyObject* arg) {
+ RepeatedCompositeContainer* self =
+ reinterpret_cast<RepeatedCompositeContainer*>(pself);
+
ScopedPyObjectPtr cloneObj(
PyType_GenericAlloc(&RepeatedCompositeContainer_Type, 0));
if (cloneObj == NULL) {
@@ -530,7 +564,7 @@ PyObject* DeepCopy(RepeatedCompositeContainer* self, PyObject* arg) {
}
int SetOwner(RepeatedCompositeContainer* self,
- const shared_ptr<Message>& new_owner) {
+ const CMessage::OwnerRef& new_owner) {
GOOGLE_CHECK_ATTACHED(self);
self->owner = new_owner;
@@ -571,43 +605,46 @@ PyObject *NewContainer(
return reinterpret_cast<PyObject*>(self);
}
-static void Dealloc(RepeatedCompositeContainer* self) {
+static void Dealloc(PyObject* pself) {
+ RepeatedCompositeContainer* self =
+ reinterpret_cast<RepeatedCompositeContainer*>(pself);
+
Py_CLEAR(self->child_messages);
Py_CLEAR(self->child_message_class);
// TODO(tibell): Do we need to call delete on these objects to make
// sure their destructors are called?
self->owner.reset();
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
+ Py_TYPE(self)->tp_free(pself);
}
static PySequenceMethods SqMethods = {
- (lenfunc)Length, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- (ssizeargfunc)Item /* sq_item */
+ Length, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ Item /* sq_item */
};
static PyMappingMethods MpMethods = {
- (lenfunc)Length, /* mp_length */
- (binaryfunc)Subscript, /* mp_subscript */
- (objobjargproc)AssignSubscript,/* mp_ass_subscript */
+ Length, /* mp_length */
+ SubscriptMethod, /* mp_subscript */
+ AssignSubscriptMethod, /* mp_ass_subscript */
};
static PyMethodDef Methods[] = {
- { "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
+ { "__deepcopy__", DeepCopy, METH_VARARGS,
"Makes a deep copy of the class." },
- { "add", (PyCFunction) Add, METH_VARARGS | METH_KEYWORDS,
+ { "add", (PyCFunction)AddMethod, METH_VARARGS | METH_KEYWORDS,
"Adds an object to the repeated container." },
- { "extend", (PyCFunction) Extend, METH_O,
+ { "extend", ExtendMethod, METH_O,
"Adds objects to the repeated container." },
- { "pop", (PyCFunction)Pop, METH_VARARGS,
+ { "pop", Pop, METH_VARARGS,
"Removes an object from the repeated container and returns it." },
- { "remove", (PyCFunction) Remove, METH_O,
+ { "remove", Remove, METH_O,
"Removes an object from the repeated container." },
- { "sort", (PyCFunction) Sort, METH_VARARGS | METH_KEYWORDS,
+ { "sort", (PyCFunction)Sort, METH_VARARGS | METH_KEYWORDS,
"Sorts the repeated container." },
- { "MergeFrom", (PyCFunction) MergeFrom, METH_O,
+ { "MergeFrom", MergeFromMethod, METH_O,
"Adds objects to the repeated container." },
{ NULL, NULL }
};
@@ -619,12 +656,12 @@ PyTypeObject RepeatedCompositeContainer_Type = {
FULL_MODULE_NAME ".RepeatedCompositeContainer", // tp_name
sizeof(RepeatedCompositeContainer), // tp_basicsize
0, // tp_itemsize
- (destructor)repeated_composite_container::Dealloc, // tp_dealloc
+ repeated_composite_container::Dealloc, // tp_dealloc
0, // tp_print
0, // tp_getattr
0, // tp_setattr
0, // tp_compare
- (reprfunc)repeated_composite_container::ToStr, // tp_repr
+ repeated_composite_container::ToStr, // tp_repr
0, // tp_as_number
&repeated_composite_container::SqMethods, // tp_as_sequence
&repeated_composite_container::MpMethods, // tp_as_mapping
@@ -638,7 +675,7 @@ PyTypeObject RepeatedCompositeContainer_Type = {
"A Repeated scalar container", // tp_doc
0, // tp_traverse
0, // tp_clear
- (richcmpfunc)repeated_composite_container::RichCompare, // tp_richcompare
+ repeated_composite_container::RichCompare, // tp_richcompare
0, // tp_weaklistoffset
0, // tp_iter
0, // tp_iternext
diff --git a/python/google/protobuf/pyext/repeated_composite_container.h b/python/google/protobuf/pyext/repeated_composite_container.h
index a7b56b61..e5e946aa 100644
--- a/python/google/protobuf/pyext/repeated_composite_container.h
+++ b/python/google/protobuf/pyext/repeated_composite_container.h
@@ -37,27 +37,19 @@
#include <Python.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <string>
#include <vector>
+#include <google/protobuf/pyext/message.h>
+
namespace google {
namespace protobuf {
class FieldDescriptor;
class Message;
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-#else
-using internal::shared_ptr;
-#endif
-
namespace python {
-struct CMessage;
struct CMessageClass;
// A RepeatedCompositeContainer can be in one of two states: attached
@@ -77,7 +69,7 @@ typedef struct RepeatedCompositeContainer {
// proto tree. Every Python RepeatedCompositeContainer holds a
// reference to it in order to keep it alive as long as there's a
// Python object that references any part of the tree.
- shared_ptr<Message> owner;
+ CMessage::OwnerRef owner;
// Weak reference to parent object. May be NULL. Used to make sure
// the parent is writable before modifying the
@@ -148,11 +140,6 @@ int AssignSubscript(RepeatedCompositeContainer* self,
PyObject* slice,
PyObject* value);
-// Releases the messages in the container to the given message.
-//
-// Returns 0 on success, -1 on failure.
-int ReleaseToMessage(RepeatedCompositeContainer* self, Message* new_message);
-
// Releases the messages in the container to a new message.
//
// Returns 0 on success, -1 on failure.
@@ -160,7 +147,7 @@ int Release(RepeatedCompositeContainer* self);
// Returns 0 on success, -1 on failure.
int SetOwner(RepeatedCompositeContainer* self,
- const shared_ptr<Message>& new_owner);
+ const CMessage::OwnerRef& new_owner);
// Removes the last element of the repeated message field 'field' on
// the Message 'parent', and transfers the ownership of the released
diff --git a/python/google/protobuf/pyext/repeated_scalar_container.cc b/python/google/protobuf/pyext/repeated_scalar_container.cc
index 5a7832cd..1517e654 100644
--- a/python/google/protobuf/pyext/repeated_scalar_container.cc
+++ b/python/google/protobuf/pyext/repeated_scalar_container.cc
@@ -34,9 +34,6 @@
#include <google/protobuf/pyext/repeated_scalar_container.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <google/protobuf/stubs/common.h>
#include <google/protobuf/stubs/logging.h>
@@ -77,15 +74,18 @@ static int InternalAssignRepeatedField(
return 0;
}
-static Py_ssize_t Len(RepeatedScalarContainer* self) {
+static Py_ssize_t Len(PyObject* pself) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
Message* message = self->message;
return message->GetReflection()->FieldSize(*message,
self->parent_field_descriptor);
}
-static int AssignItem(RepeatedScalarContainer* self,
- Py_ssize_t index,
- PyObject* arg) {
+static int AssignItem(PyObject* pself, Py_ssize_t index, PyObject* arg) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
cmessage::AssureWritable(self->parent);
Message* message = self->message;
const FieldDescriptor* field_descriptor = self->parent_field_descriptor;
@@ -188,7 +188,10 @@ static int AssignItem(RepeatedScalarContainer* self,
return 0;
}
-static PyObject* Item(RepeatedScalarContainer* self, Py_ssize_t index) {
+static PyObject* Item(PyObject* pself, Py_ssize_t index) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
Message* message = self->message;
const FieldDescriptor* field_descriptor = self->parent_field_descriptor;
const Reflection* reflection = message->GetReflection();
@@ -256,8 +259,9 @@ static PyObject* Item(RepeatedScalarContainer* self, Py_ssize_t index) {
break;
}
case FieldDescriptor::CPPTYPE_STRING: {
- string value = reflection->GetRepeatedString(
- *message, field_descriptor, index);
+ string scratch;
+ const string& value = reflection->GetRepeatedStringReference(
+ *message, field_descriptor, index, &scratch);
result = ToStringObject(field_descriptor, value);
break;
}
@@ -271,7 +275,10 @@ static PyObject* Item(RepeatedScalarContainer* self, Py_ssize_t index) {
return result;
}
-static PyObject* Subscript(RepeatedScalarContainer* self, PyObject* slice) {
+static PyObject* Subscript(PyObject* pself, PyObject* slice) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
Py_ssize_t from;
Py_ssize_t to;
Py_ssize_t step;
@@ -286,14 +293,13 @@ static PyObject* Subscript(RepeatedScalarContainer* self, PyObject* slice) {
if (PyLong_Check(slice)) {
from = to = PyLong_AsLong(slice);
} else if (PySlice_Check(slice)) {
- length = Len(self);
+ length = Len(pself);
#if PY_MAJOR_VERSION >= 3
if (PySlice_GetIndicesEx(slice,
length, &from, &to, &step, &slicelength) == -1) {
#else
if (PySlice_GetIndicesEx(reinterpret_cast<PySliceObject*>(slice),
length, &from, &to, &step, &slicelength) == -1) {
-
#endif
return NULL;
}
@@ -304,7 +310,7 @@ static PyObject* Subscript(RepeatedScalarContainer* self, PyObject* slice) {
}
if (!return_list) {
- return Item(self, from);
+ return Item(pself, from);
}
PyObject* list = PyList_New(0);
@@ -319,7 +325,7 @@ static PyObject* Subscript(RepeatedScalarContainer* self, PyObject* slice) {
if (index < 0 || index >= length) {
break;
}
- ScopedPyObjectPtr s(Item(self, index));
+ ScopedPyObjectPtr s(Item(pself, index));
PyList_Append(list, s.get());
}
} else {
@@ -330,7 +336,7 @@ static PyObject* Subscript(RepeatedScalarContainer* self, PyObject* slice) {
if (index < 0 || index >= length) {
break;
}
- ScopedPyObjectPtr s(Item(self, index));
+ ScopedPyObjectPtr s(Item(pself, index));
PyList_Append(list, s.get());
}
}
@@ -417,9 +423,14 @@ PyObject* Append(RepeatedScalarContainer* self, PyObject* item) {
Py_RETURN_NONE;
}
-static int AssSubscript(RepeatedScalarContainer* self,
- PyObject* slice,
- PyObject* value) {
+static PyObject* AppendMethod(PyObject* self, PyObject* item) {
+ return Append(reinterpret_cast<RepeatedScalarContainer*>(self), item);
+}
+
+static int AssSubscript(PyObject* pself, PyObject* slice, PyObject* value) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
Py_ssize_t from;
Py_ssize_t to;
Py_ssize_t step;
@@ -435,7 +446,7 @@ static int AssSubscript(RepeatedScalarContainer* self,
#if PY_MAJOR_VERSION < 3
if (PyInt_Check(slice)) {
from = to = PyInt_AsLong(slice);
- } else
+ } else // NOLINT
#endif
if (PyLong_Check(slice)) {
from = to = PyLong_AsLong(slice);
@@ -463,14 +474,14 @@ static int AssSubscript(RepeatedScalarContainer* self,
}
if (!create_list) {
- return AssignItem(self, from, value);
+ return AssignItem(pself, from, value);
}
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
if (full_slice == NULL) {
return -1;
}
- ScopedPyObjectPtr new_list(Subscript(self, full_slice.get()));
+ ScopedPyObjectPtr new_list(Subscript(pself, full_slice.get()));
if (new_list == NULL) {
return -1;
}
@@ -509,14 +520,17 @@ PyObject* Extend(RepeatedScalarContainer* self, PyObject* value) {
Py_RETURN_NONE;
}
-static PyObject* Insert(RepeatedScalarContainer* self, PyObject* args) {
+static PyObject* Insert(PyObject* pself, PyObject* args) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
Py_ssize_t index;
PyObject* value;
if (!PyArg_ParseTuple(args, "lO", &index, &value)) {
return NULL;
}
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
- ScopedPyObjectPtr new_list(Subscript(self, full_slice.get()));
+ ScopedPyObjectPtr new_list(Subscript(pself, full_slice.get()));
if (PyList_Insert(new_list.get(), index, value) < 0) {
return NULL;
}
@@ -527,10 +541,13 @@ static PyObject* Insert(RepeatedScalarContainer* self, PyObject* args) {
Py_RETURN_NONE;
}
-static PyObject* Remove(RepeatedScalarContainer* self, PyObject* value) {
+static PyObject* Remove(PyObject* pself, PyObject* value) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
Py_ssize_t match_index = -1;
- for (Py_ssize_t i = 0; i < Len(self); ++i) {
- ScopedPyObjectPtr elem(Item(self, i));
+ for (Py_ssize_t i = 0; i < Len(pself); ++i) {
+ ScopedPyObjectPtr elem(Item(pself, i));
if (PyObject_RichCompareBool(elem.get(), value, Py_EQ)) {
match_index = i;
break;
@@ -540,15 +557,20 @@ static PyObject* Remove(RepeatedScalarContainer* self, PyObject* value) {
PyErr_SetString(PyExc_ValueError, "remove(x): x not in container");
return NULL;
}
- if (AssignItem(self, match_index, NULL) < 0) {
+ if (AssignItem(pself, match_index, NULL) < 0) {
return NULL;
}
Py_RETURN_NONE;
}
-static PyObject* RichCompare(RepeatedScalarContainer* self,
- PyObject* other,
- int opid) {
+static PyObject* ExtendMethod(PyObject* self, PyObject* value) {
+ return Extend(reinterpret_cast<RepeatedScalarContainer*>(self), value);
+}
+
+static PyObject* RichCompare(PyObject* pself, PyObject* other, int opid) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
if (opid != Py_EQ && opid != Py_NE) {
Py_INCREF(Py_NotImplemented);
return Py_NotImplemented;
@@ -565,28 +587,25 @@ static PyObject* RichCompare(RepeatedScalarContainer* self,
ScopedPyObjectPtr other_list_deleter;
if (PyObject_TypeCheck(other, &RepeatedScalarContainer_Type)) {
- other_list_deleter.reset(Subscript(
- reinterpret_cast<RepeatedScalarContainer*>(other), full_slice.get()));
+ other_list_deleter.reset(Subscript(other, full_slice.get()));
other = other_list_deleter.get();
}
- ScopedPyObjectPtr list(Subscript(self, full_slice.get()));
+ ScopedPyObjectPtr list(Subscript(pself, full_slice.get()));
if (list == NULL) {
return NULL;
}
return PyObject_RichCompare(list.get(), other, opid);
}
-PyObject* Reduce(RepeatedScalarContainer* unused_self) {
+PyObject* Reduce(PyObject* unused_self, PyObject* unused_other) {
PyErr_Format(
PickleError_class,
"can't pickle repeated message fields, convert to list first");
return NULL;
}
-static PyObject* Sort(RepeatedScalarContainer* self,
- PyObject* args,
- PyObject* kwds) {
+static PyObject* Sort(PyObject* pself, PyObject* args, PyObject* kwds) {
// Support the old sort_function argument for backwards
// compatibility.
if (kwds != NULL) {
@@ -605,7 +624,7 @@ static PyObject* Sort(RepeatedScalarContainer* self,
if (full_slice == NULL) {
return NULL;
}
- ScopedPyObjectPtr list(Subscript(self, full_slice.get()));
+ ScopedPyObjectPtr list(Subscript(pself, full_slice.get()));
if (list == NULL) {
return NULL;
}
@@ -617,38 +636,39 @@ static PyObject* Sort(RepeatedScalarContainer* self,
if (res == NULL) {
return NULL;
}
- int ret = InternalAssignRepeatedField(self, list.get());
+ int ret = InternalAssignRepeatedField(
+ reinterpret_cast<RepeatedScalarContainer*>(pself), list.get());
if (ret < 0) {
return NULL;
}
Py_RETURN_NONE;
}
-static PyObject* Pop(RepeatedScalarContainer* self,
- PyObject* args) {
+static PyObject* Pop(PyObject* pself, PyObject* args) {
Py_ssize_t index = -1;
if (!PyArg_ParseTuple(args, "|n", &index)) {
return NULL;
}
- PyObject* item = Item(self, index);
+ PyObject* item = Item(pself, index);
if (item == NULL) {
- PyErr_Format(PyExc_IndexError,
- "list index (%zd) out of range",
- index);
+ PyErr_Format(PyExc_IndexError, "list index (%zd) out of range", index);
return NULL;
}
- if (AssignItem(self, index, NULL) < 0) {
+ if (AssignItem(pself, index, NULL) < 0) {
return NULL;
}
return item;
}
-static PyObject* ToStr(RepeatedScalarContainer* self) {
+static PyObject* ToStr(PyObject* pself) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
if (full_slice == NULL) {
return NULL;
}
- ScopedPyObjectPtr list(Subscript(self, full_slice.get()));
+ ScopedPyObjectPtr list(Subscript(pself, full_slice.get()));
if (list == NULL) {
return NULL;
}
@@ -687,7 +707,8 @@ static int InitializeAndCopyToParentContainer(
if (full_slice == NULL) {
return -1;
}
- ScopedPyObjectPtr values(Subscript(from, full_slice.get()));
+ ScopedPyObjectPtr values(
+ Subscript(reinterpret_cast<PyObject*>(from), full_slice.get()));
if (values == NULL) {
return -1;
}
@@ -706,7 +727,10 @@ int Release(RepeatedScalarContainer* self) {
return InitializeAndCopyToParentContainer(self, self);
}
-PyObject* DeepCopy(RepeatedScalarContainer* self, PyObject* arg) {
+PyObject* DeepCopy(PyObject* pself, PyObject* arg) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
+
RepeatedScalarContainer* clone = reinterpret_cast<RepeatedScalarContainer*>(
PyType_GenericAlloc(&RepeatedScalarContainer_Type, 0));
if (clone == NULL) {
@@ -720,45 +744,47 @@ PyObject* DeepCopy(RepeatedScalarContainer* self, PyObject* arg) {
return reinterpret_cast<PyObject*>(clone);
}
-static void Dealloc(RepeatedScalarContainer* self) {
+static void Dealloc(PyObject* pself) {
+ RepeatedScalarContainer* self =
+ reinterpret_cast<RepeatedScalarContainer*>(pself);
self->owner.reset();
- Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
+ Py_TYPE(self)->tp_free(pself);
}
void SetOwner(RepeatedScalarContainer* self,
- const shared_ptr<Message>& new_owner) {
+ const CMessage::OwnerRef& new_owner) {
self->owner = new_owner;
}
static PySequenceMethods SqMethods = {
- (lenfunc)Len, /* sq_length */
- 0, /* sq_concat */
- 0, /* sq_repeat */
- (ssizeargfunc)Item, /* sq_item */
- 0, /* sq_slice */
- (ssizeobjargproc)AssignItem /* sq_ass_item */
+ Len, /* sq_length */
+ 0, /* sq_concat */
+ 0, /* sq_repeat */
+ Item, /* sq_item */
+ 0, /* sq_slice */
+ AssignItem /* sq_ass_item */
};
static PyMappingMethods MpMethods = {
- (lenfunc)Len, /* mp_length */
- (binaryfunc)Subscript, /* mp_subscript */
- (objobjargproc)AssSubscript, /* mp_ass_subscript */
+ Len, /* mp_length */
+ Subscript, /* mp_subscript */
+ AssSubscript, /* mp_ass_subscript */
};
static PyMethodDef Methods[] = {
- { "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
+ { "__deepcopy__", DeepCopy, METH_VARARGS,
"Makes a deep copy of the class." },
- { "__reduce__", (PyCFunction)Reduce, METH_NOARGS,
+ { "__reduce__", Reduce, METH_NOARGS,
"Outputs picklable representation of the repeated field." },
- { "append", (PyCFunction)Append, METH_O,
+ { "append", AppendMethod, METH_O,
"Appends an object to the repeated container." },
- { "extend", (PyCFunction)Extend, METH_O,
- "Appends objects to the repeated container." },
- { "insert", (PyCFunction)Insert, METH_VARARGS,
+ { "extend", ExtendMethod, METH_O,
"Appends objects to the repeated container." },
- { "pop", (PyCFunction)Pop, METH_VARARGS,
+ { "insert", Insert, METH_VARARGS,
+ "Inserts an object at the specified position in the container." },
+ { "pop", Pop, METH_VARARGS,
"Removes an object from the repeated container and returns it." },
- { "remove", (PyCFunction)Remove, METH_O,
+ { "remove", Remove, METH_O,
"Removes an object from the repeated container." },
{ "sort", (PyCFunction)Sort, METH_VARARGS | METH_KEYWORDS,
"Sorts the repeated container."},
@@ -772,12 +798,12 @@ PyTypeObject RepeatedScalarContainer_Type = {
FULL_MODULE_NAME ".RepeatedScalarContainer", // tp_name
sizeof(RepeatedScalarContainer), // tp_basicsize
0, // tp_itemsize
- (destructor)repeated_scalar_container::Dealloc, // tp_dealloc
+ repeated_scalar_container::Dealloc, // tp_dealloc
0, // tp_print
0, // tp_getattr
0, // tp_setattr
0, // tp_compare
- (reprfunc)repeated_scalar_container::ToStr, // tp_repr
+ repeated_scalar_container::ToStr, // tp_repr
0, // tp_as_number
&repeated_scalar_container::SqMethods, // tp_as_sequence
&repeated_scalar_container::MpMethods, // tp_as_mapping
@@ -791,7 +817,7 @@ PyTypeObject RepeatedScalarContainer_Type = {
"A Repeated scalar container", // tp_doc
0, // tp_traverse
0, // tp_clear
- (richcmpfunc)repeated_scalar_container::RichCompare, // tp_richcompare
+ repeated_scalar_container::RichCompare, // tp_richcompare
0, // tp_weaklistoffset
0, // tp_iter
0, // tp_iternext
diff --git a/python/google/protobuf/pyext/repeated_scalar_container.h b/python/google/protobuf/pyext/repeated_scalar_container.h
index 555e621c..559dec98 100644
--- a/python/google/protobuf/pyext/repeated_scalar_container.h
+++ b/python/google/protobuf/pyext/repeated_scalar_container.h
@@ -37,27 +37,14 @@
#include <Python.h>
#include <memory>
-#ifndef _SHARED_PTR_H
-#include <google/protobuf/stubs/shared_ptr.h>
-#endif
#include <google/protobuf/descriptor.h>
+#include <google/protobuf/pyext/message.h>
namespace google {
namespace protobuf {
-
-class Message;
-
-#ifdef _SHARED_PTR_H
-using std::shared_ptr;
-#else
-using internal::shared_ptr;
-#endif
-
namespace python {
-struct CMessage;
-
typedef struct RepeatedScalarContainer {
PyObject_HEAD;
@@ -65,7 +52,7 @@ typedef struct RepeatedScalarContainer {
// proto tree. Every Python RepeatedScalarContainer holds a
// reference to it in order to keep it alive as long as there's a
// Python object that references any part of the tree.
- shared_ptr<Message> owner;
+ CMessage::OwnerRef owner;
// Pointer to the C++ Message that contains this container. The
// RepeatedScalarContainer does not own this pointer.
@@ -112,7 +99,7 @@ PyObject* Extend(RepeatedScalarContainer* self, PyObject* value);
// Set the owner field of self and any children of self.
void SetOwner(RepeatedScalarContainer* self,
- const shared_ptr<Message>& new_owner);
+ const CMessage::OwnerRef& new_owner);
} // namespace repeated_scalar_container
} // namespace python
diff --git a/python/google/protobuf/text_format.py b/python/google/protobuf/text_format.py
index 36ddd1b7..2cbd21bc 100755
--- a/python/google/protobuf/text_format.py
+++ b/python/google/protobuf/text_format.py
@@ -141,9 +141,11 @@ def MessageToString(message,
as_one_line: Don't introduce newlines between fields.
pointy_brackets: If True, use angle brackets instead of curly braces for
nesting.
- use_index_order: If True, print fields of a proto message using the order
- defined in source code instead of the field number. By default, use the
- field number order.
+ use_index_order: If True, fields of a proto message will be printed using
+ the order defined in source code instead of the field number, extensions
+ will be printed at the end of the message and their relative order is
+ determined by the extension number. By default, use the field number
+ order.
float_format: If set, use this to specify floating point number formatting
(per the "Format Specification Mini-Language"); otherwise, str() is used.
use_field_number: If True, print field numbers instead of names.
@@ -336,11 +338,12 @@ class _Printer(object):
return
fields = message.ListFields()
if self.use_index_order:
- fields.sort(key=lambda x: x[0].index)
+ fields.sort(
+ key=lambda x: x[0].number if x[0].is_extension else x[0].index)
for field, value in fields:
if _IsMapEntry(field):
for key in sorted(value):
- # This is slow for maps with submessage entires because it copies the
+ # This is slow for maps with submessage entries because it copies the
# entire tree. Unfortunately this would take significant refactoring
# of this file to work around.
#
@@ -645,6 +648,30 @@ class _Parser(object):
ParseError: In case of text parsing problems.
"""
message_descriptor = message.DESCRIPTOR
+ if (message_descriptor.full_name == _ANY_FULL_TYPE_NAME and
+ tokenizer.TryConsume('[')):
+ type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer)
+ tokenizer.Consume(']')
+ tokenizer.TryConsume(':')
+ if tokenizer.TryConsume('<'):
+ expanded_any_end_token = '>'
+ else:
+ tokenizer.Consume('{')
+ expanded_any_end_token = '}'
+ expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name,
+ self.descriptor_pool)
+ if not expanded_any_sub_message:
+ raise ParseError('Type %s not found in descriptor pool' %
+ packed_type_name)
+ while not tokenizer.TryConsume(expanded_any_end_token):
+ if tokenizer.AtEnd():
+ raise tokenizer.ParseErrorPreviousToken('Expected "%s".' %
+ (expanded_any_end_token,))
+ self._MergeField(tokenizer, expanded_any_sub_message)
+ message.Pack(expanded_any_sub_message,
+ type_url_prefix=type_url_prefix)
+ return
+
if tokenizer.TryConsume('['):
name = [tokenizer.ConsumeIdentifier()]
while tokenizer.TryConsume('.'):
@@ -725,11 +752,12 @@ class _Parser(object):
if (field.label == descriptor.FieldDescriptor.LABEL_REPEATED and
tokenizer.TryConsume('[')):
# Short repeated format, e.g. "foo: [1, 2, 3]"
- while True:
- merger(tokenizer, message, field)
- if tokenizer.TryConsume(']'):
- break
- tokenizer.Consume(',')
+ if not tokenizer.TryConsume(']'):
+ while True:
+ merger(tokenizer, message, field)
+ if tokenizer.TryConsume(']'):
+ break
+ tokenizer.Consume(',')
else:
merger(tokenizer, message, field)
@@ -777,33 +805,7 @@ class _Parser(object):
tokenizer.Consume('{')
end_token = '}'
- if (field.message_type.full_name == _ANY_FULL_TYPE_NAME and
- tokenizer.TryConsume('[')):
- type_url_prefix, packed_type_name = self._ConsumeAnyTypeUrl(tokenizer)
- tokenizer.Consume(']')
- tokenizer.TryConsume(':')
- if tokenizer.TryConsume('<'):
- expanded_any_end_token = '>'
- else:
- tokenizer.Consume('{')
- expanded_any_end_token = '}'
- expanded_any_sub_message = _BuildMessageFromTypeName(packed_type_name,
- self.descriptor_pool)
- if not expanded_any_sub_message:
- raise ParseError('Type %s not found in descriptor pool' %
- packed_type_name)
- while not tokenizer.TryConsume(expanded_any_end_token):
- if tokenizer.AtEnd():
- raise tokenizer.ParseErrorPreviousToken('Expected "%s".' %
- (expanded_any_end_token,))
- self._MergeField(tokenizer, expanded_any_sub_message)
- if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
- any_message = getattr(message, field.name).add()
- else:
- any_message = getattr(message, field.name)
- any_message.Pack(expanded_any_sub_message,
- type_url_prefix=type_url_prefix)
- elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
+ if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
if field.is_extension:
sub_message = message.Extensions[field].add()
elif is_map_entry:
@@ -812,8 +814,20 @@ class _Parser(object):
sub_message = getattr(message, field.name).add()
else:
if field.is_extension:
+ if (not self._allow_multiple_scalars and
+ message.HasExtension(field)):
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" extensions.' %
+ (message.DESCRIPTOR.full_name, field.full_name))
sub_message = message.Extensions[field]
else:
+ # Also apply _allow_multiple_scalars to message field.
+ # TODO(jieluo): Change to _allow_singular_overwrites.
+ if (not self._allow_multiple_scalars and
+ message.HasField(field.name)):
+ raise tokenizer.ParseErrorPreviousToken(
+ 'Message type "%s" should not have multiple "%s" fields.' %
+ (message.DESCRIPTOR.full_name, field.name))
sub_message = getattr(message, field.name)
sub_message.SetInParent()