From 99a3e30bd761878631937aaf67f6206bf4d4afff Mon Sep 17 00:00:00 2001 From: Manjunath Kudlur Date: Tue, 16 Feb 2016 15:17:10 -0800 Subject: Added PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS macro and setting it when --allow_oversize_protos=true is passed to bazel build. When this macro is set, SetTotalBytesLimit is called to remove the 64MB limit on binary protos when during ParseFromString. --- BUILD | 18 ++++++++++++++---- python/google/protobuf/pyext/message.cc | 9 +++++++++ 2 files changed, 23 insertions(+), 4 deletions(-) diff --git a/BUILD b/BUILD index 3cac4a86..c4de1c4c 100644 --- a/BUILD +++ b/BUILD @@ -462,11 +462,11 @@ genrule( name = "gen_well_known_protos_java", srcs = WELL_KNOWN_PROTOS, outs = [ - "wellknown.srcjar" + "wellknown.srcjar", ], cmd = "$(location :protoc) --java_out=$(@D)/wellknown.jar" + - " -Isrc $(SRCS) " + - " && mv $(@D)/wellknown.jar $(@D)/wellknown.srcjar", + " -Isrc $(SRCS) " + + " && mv $(@D)/wellknown.jar $(@D)/wellknown.srcjar", tools = [":protoc"], ) @@ -539,7 +539,10 @@ cc_binary( ]), copts = COPTS + [ "-DGOOGLE_PROTOBUF_HAS_ONEOF=1", - ], + ] + select({ + "//conditions:default": [], + ":allow_oversize_protos": ["-DPROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS=1"], + }), includes = [ "python/", "src/", @@ -561,6 +564,13 @@ config_setting( }, ) +config_setting( + name = "allow_oversize_protos", + values = { + "define": "allow_oversize_protos=true", + }, +) + py_proto_library( name = "protobuf_python", srcs = WELL_KNOWN_PROTOS, diff --git a/python/google/protobuf/pyext/message.cc b/python/google/protobuf/pyext/message.cc index 863cde01..60ec9c1b 100644 --- a/python/google/protobuf/pyext/message.cc +++ b/python/google/protobuf/pyext/message.cc @@ -1921,6 +1921,15 @@ static PyObject* MergeFromString(CMessage* self, PyObject* arg) { AssureWritable(self); io::CodedInputStream input( reinterpret_cast(data), data_length); +#if PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS + // Protobuf has a 64MB limit built in, this code will override this. Please do + // not enable this unless you fully understand the implications: protobufs + // must all be kept in memory at the same time, so if they grow too big you + // may get OOM errors. The protobuf APIs do not provide any tools for + // processing protobufs in chunks. If you have protos this big you should + // break them up if it is at all convenient to do so. + input.SetTotalBytesLimit(INT_MAX, INT_MAX); +#endif // PROTOBUF_PYTHON_ALLOW_OVERSIZE_PROTOS PyDescriptorPool* pool = GetDescriptorPoolForMessage(self); input.SetExtensionRegistry(pool->pool, pool->message_factory); bool success = self->message->MergePartialFromCodedStream(&input); -- cgit v1.2.3