增加玩家管理(没做完,需要steamid绑定名字啥的)
This commit is contained in:
1
csgo2/sdk/protobuf-2.6.1/python/google/__init__.py
Normal file
1
csgo2/sdk/protobuf-2.6.1/python/google/__init__.py
Normal file
@@ -0,0 +1 @@
|
||||
__import__('pkg_resources').declare_namespace(__name__)
|
||||
849
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/descriptor.py
Normal file
849
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/descriptor.py
Normal file
@@ -0,0 +1,849 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# Needs to stay compatible with Python 2.5 due to GAE.
|
||||
#
|
||||
# Copyright 2007 Google Inc. All Rights Reserved.
|
||||
|
||||
"""Descriptors essentially contain exactly the information found in a .proto
|
||||
file, in types that make this information accessible in Python.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
from google.protobuf.internal import api_implementation
|
||||
|
||||
|
||||
if api_implementation.Type() == 'cpp':
|
||||
# Used by MakeDescriptor in cpp mode
|
||||
import os
|
||||
import uuid
|
||||
|
||||
if api_implementation.Version() == 2:
|
||||
from google.protobuf.pyext import _message
|
||||
else:
|
||||
from google.protobuf.internal import cpp_message
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Base error for this module."""
|
||||
|
||||
|
||||
class TypeTransformationError(Error):
|
||||
"""Error transforming between python proto type and corresponding C++ type."""
|
||||
|
||||
|
||||
class DescriptorBase(object):
|
||||
|
||||
"""Descriptors base class.
|
||||
|
||||
This class is the base of all descriptor classes. It provides common options
|
||||
related functionaility.
|
||||
|
||||
Attributes:
|
||||
has_options: True if the descriptor has non-default options. Usually it
|
||||
is not necessary to read this -- just call GetOptions() which will
|
||||
happily return the default instance. However, it's sometimes useful
|
||||
for efficiency, and also useful inside the protobuf implementation to
|
||||
avoid some bootstrapping issues.
|
||||
"""
|
||||
|
||||
def __init__(self, options, options_class_name):
|
||||
"""Initialize the descriptor given its options message and the name of the
|
||||
class of the options message. The name of the class is required in case
|
||||
the options message is None and has to be created.
|
||||
"""
|
||||
self._options = options
|
||||
self._options_class_name = options_class_name
|
||||
|
||||
# Does this descriptor have non-default options?
|
||||
self.has_options = options is not None
|
||||
|
||||
def _SetOptions(self, options, options_class_name):
|
||||
"""Sets the descriptor's options
|
||||
|
||||
This function is used in generated proto2 files to update descriptor
|
||||
options. It must not be used outside proto2.
|
||||
"""
|
||||
self._options = options
|
||||
self._options_class_name = options_class_name
|
||||
|
||||
# Does this descriptor have non-default options?
|
||||
self.has_options = options is not None
|
||||
|
||||
def GetOptions(self):
|
||||
"""Retrieves descriptor options.
|
||||
|
||||
This method returns the options set or creates the default options for the
|
||||
descriptor.
|
||||
"""
|
||||
if self._options:
|
||||
return self._options
|
||||
from google.protobuf import descriptor_pb2
|
||||
try:
|
||||
options_class = getattr(descriptor_pb2, self._options_class_name)
|
||||
except AttributeError:
|
||||
raise RuntimeError('Unknown options class name %s!' %
|
||||
(self._options_class_name))
|
||||
self._options = options_class()
|
||||
return self._options
|
||||
|
||||
|
||||
class _NestedDescriptorBase(DescriptorBase):
|
||||
"""Common class for descriptors that can be nested."""
|
||||
|
||||
def __init__(self, options, options_class_name, name, full_name,
|
||||
file, containing_type, serialized_start=None,
|
||||
serialized_end=None):
|
||||
"""Constructor.
|
||||
|
||||
Args:
|
||||
options: Protocol message options or None
|
||||
to use default message options.
|
||||
options_class_name: (str) The class name of the above options.
|
||||
|
||||
name: (str) Name of this protocol message type.
|
||||
full_name: (str) Fully-qualified name of this protocol message type,
|
||||
which will include protocol "package" name and the name of any
|
||||
enclosing types.
|
||||
file: (FileDescriptor) Reference to file info.
|
||||
containing_type: if provided, this is a nested descriptor, with this
|
||||
descriptor as parent, otherwise None.
|
||||
serialized_start: The start index (inclusive) in block in the
|
||||
file.serialized_pb that describes this descriptor.
|
||||
serialized_end: The end index (exclusive) in block in the
|
||||
file.serialized_pb that describes this descriptor.
|
||||
"""
|
||||
super(_NestedDescriptorBase, self).__init__(
|
||||
options, options_class_name)
|
||||
|
||||
self.name = name
|
||||
# TODO(falk): Add function to calculate full_name instead of having it in
|
||||
# memory?
|
||||
self.full_name = full_name
|
||||
self.file = file
|
||||
self.containing_type = containing_type
|
||||
|
||||
self._serialized_start = serialized_start
|
||||
self._serialized_end = serialized_end
|
||||
|
||||
def GetTopLevelContainingType(self):
|
||||
"""Returns the root if this is a nested type, or itself if its the root."""
|
||||
desc = self
|
||||
while desc.containing_type is not None:
|
||||
desc = desc.containing_type
|
||||
return desc
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to the matching proto in descriptor_pb2.
|
||||
|
||||
Args:
|
||||
proto: An empty proto instance from descriptor_pb2.
|
||||
|
||||
Raises:
|
||||
Error: If self couldnt be serialized, due to to few constructor arguments.
|
||||
"""
|
||||
if (self.file is not None and
|
||||
self._serialized_start is not None and
|
||||
self._serialized_end is not None):
|
||||
proto.ParseFromString(self.file.serialized_pb[
|
||||
self._serialized_start:self._serialized_end])
|
||||
else:
|
||||
raise Error('Descriptor does not contain serialization.')
|
||||
|
||||
|
||||
class Descriptor(_NestedDescriptorBase):
|
||||
|
||||
"""Descriptor for a protocol message type.
|
||||
|
||||
A Descriptor instance has the following attributes:
|
||||
|
||||
name: (str) Name of this protocol message type.
|
||||
full_name: (str) Fully-qualified name of this protocol message type,
|
||||
which will include protocol "package" name and the name of any
|
||||
enclosing types.
|
||||
|
||||
containing_type: (Descriptor) Reference to the descriptor of the
|
||||
type containing us, or None if this is top-level.
|
||||
|
||||
fields: (list of FieldDescriptors) Field descriptors for all
|
||||
fields in this type.
|
||||
fields_by_number: (dict int -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as in |fields|, but indexed by "number" attribute in each
|
||||
FieldDescriptor.
|
||||
fields_by_name: (dict str -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as in |fields|, but indexed by "name" attribute in each
|
||||
FieldDescriptor.
|
||||
|
||||
nested_types: (list of Descriptors) Descriptor references
|
||||
for all protocol message types nested within this one.
|
||||
nested_types_by_name: (dict str -> Descriptor) Same Descriptor
|
||||
objects as in |nested_types|, but indexed by "name" attribute
|
||||
in each Descriptor.
|
||||
|
||||
enum_types: (list of EnumDescriptors) EnumDescriptor references
|
||||
for all enums contained within this type.
|
||||
enum_types_by_name: (dict str ->EnumDescriptor) Same EnumDescriptor
|
||||
objects as in |enum_types|, but indexed by "name" attribute
|
||||
in each EnumDescriptor.
|
||||
enum_values_by_name: (dict str -> EnumValueDescriptor) Dict mapping
|
||||
from enum value name to EnumValueDescriptor for that value.
|
||||
|
||||
extensions: (list of FieldDescriptor) All extensions defined directly
|
||||
within this message type (NOT within a nested type).
|
||||
extensions_by_name: (dict, string -> FieldDescriptor) Same FieldDescriptor
|
||||
objects as |extensions|, but indexed by "name" attribute of each
|
||||
FieldDescriptor.
|
||||
|
||||
is_extendable: Does this type define any extension ranges?
|
||||
|
||||
options: (descriptor_pb2.MessageOptions) Protocol message options or None
|
||||
to use default message options.
|
||||
|
||||
oneofs: (list of OneofDescriptor) The list of descriptors for oneof fields
|
||||
in this message.
|
||||
oneofs_by_name: (dict str -> OneofDescriptor) Same objects as in |oneofs|,
|
||||
but indexed by "name" attribute.
|
||||
|
||||
file: (FileDescriptor) Reference to file descriptor.
|
||||
"""
|
||||
|
||||
# NOTE(tmarek): The file argument redefining a builtin is nothing we can
|
||||
# fix right now since we don't know how many clients already rely on the
|
||||
# name of the argument.
|
||||
def __init__(self, name, full_name, filename, containing_type, fields,
|
||||
nested_types, enum_types, extensions, options=None,
|
||||
is_extendable=True, extension_ranges=None, oneofs=None,
|
||||
file=None, serialized_start=None, serialized_end=None): # pylint:disable=redefined-builtin
|
||||
"""Arguments to __init__() are as described in the description
|
||||
of Descriptor fields above.
|
||||
|
||||
Note that filename is an obsolete argument, that is not used anymore.
|
||||
Please use file.name to access this as an attribute.
|
||||
"""
|
||||
super(Descriptor, self).__init__(
|
||||
options, 'MessageOptions', name, full_name, file,
|
||||
containing_type, serialized_start=serialized_start,
|
||||
serialized_end=serialized_end)
|
||||
|
||||
# We have fields in addition to fields_by_name and fields_by_number,
|
||||
# so that:
|
||||
# 1. Clients can index fields by "order in which they're listed."
|
||||
# 2. Clients can easily iterate over all fields with the terse
|
||||
# syntax: for f in descriptor.fields: ...
|
||||
self.fields = fields
|
||||
for field in self.fields:
|
||||
field.containing_type = self
|
||||
self.fields_by_number = dict((f.number, f) for f in fields)
|
||||
self.fields_by_name = dict((f.name, f) for f in fields)
|
||||
|
||||
self.nested_types = nested_types
|
||||
for nested_type in nested_types:
|
||||
nested_type.containing_type = self
|
||||
self.nested_types_by_name = dict((t.name, t) for t in nested_types)
|
||||
|
||||
self.enum_types = enum_types
|
||||
for enum_type in self.enum_types:
|
||||
enum_type.containing_type = self
|
||||
self.enum_types_by_name = dict((t.name, t) for t in enum_types)
|
||||
self.enum_values_by_name = dict(
|
||||
(v.name, v) for t in enum_types for v in t.values)
|
||||
|
||||
self.extensions = extensions
|
||||
for extension in self.extensions:
|
||||
extension.extension_scope = self
|
||||
self.extensions_by_name = dict((f.name, f) for f in extensions)
|
||||
self.is_extendable = is_extendable
|
||||
self.extension_ranges = extension_ranges
|
||||
self.oneofs = oneofs if oneofs is not None else []
|
||||
self.oneofs_by_name = dict((o.name, o) for o in self.oneofs)
|
||||
for oneof in self.oneofs:
|
||||
oneof.containing_type = self
|
||||
|
||||
def EnumValueName(self, enum, value):
|
||||
"""Returns the string name of an enum value.
|
||||
|
||||
This is just a small helper method to simplify a common operation.
|
||||
|
||||
Args:
|
||||
enum: string name of the Enum.
|
||||
value: int, value of the enum.
|
||||
|
||||
Returns:
|
||||
string name of the enum value.
|
||||
|
||||
Raises:
|
||||
KeyError if either the Enum doesn't exist or the value is not a valid
|
||||
value for the enum.
|
||||
"""
|
||||
return self.enum_types_by_name[enum].values_by_number[value].name
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to a descriptor_pb2.DescriptorProto.
|
||||
|
||||
Args:
|
||||
proto: An empty descriptor_pb2.DescriptorProto.
|
||||
"""
|
||||
# This function is overriden to give a better doc comment.
|
||||
super(Descriptor, self).CopyToProto(proto)
|
||||
|
||||
|
||||
# TODO(robinson): We should have aggressive checking here,
|
||||
# for example:
|
||||
# * If you specify a repeated field, you should not be allowed
|
||||
# to specify a default value.
|
||||
# * [Other examples here as needed].
|
||||
#
|
||||
# TODO(robinson): for this and other *Descriptor classes, we
|
||||
# might also want to lock things down aggressively (e.g.,
|
||||
# prevent clients from setting the attributes). Having
|
||||
# stronger invariants here in general will reduce the number
|
||||
# of runtime checks we must do in reflection.py...
|
||||
class FieldDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a single field in a .proto file.
|
||||
|
||||
A FieldDescriptor instance has the following attributes:
|
||||
|
||||
name: (str) Name of this field, exactly as it appears in .proto.
|
||||
full_name: (str) Name of this field, including containing scope. This is
|
||||
particularly relevant for extensions.
|
||||
index: (int) Dense, 0-indexed index giving the order that this
|
||||
field textually appears within its message in the .proto file.
|
||||
number: (int) Tag number declared for this field in the .proto file.
|
||||
|
||||
type: (One of the TYPE_* constants below) Declared type.
|
||||
cpp_type: (One of the CPPTYPE_* constants below) C++ type used to
|
||||
represent this field.
|
||||
|
||||
label: (One of the LABEL_* constants below) Tells whether this
|
||||
field is optional, required, or repeated.
|
||||
has_default_value: (bool) True if this field has a default value defined,
|
||||
otherwise false.
|
||||
default_value: (Varies) Default value of this field. Only
|
||||
meaningful for non-repeated scalar fields. Repeated fields
|
||||
should always set this to [], and non-repeated composite
|
||||
fields should always set this to None.
|
||||
|
||||
containing_type: (Descriptor) Descriptor of the protocol message
|
||||
type that contains this field. Set by the Descriptor constructor
|
||||
if we're passed into one.
|
||||
Somewhat confusingly, for extension fields, this is the
|
||||
descriptor of the EXTENDED message, not the descriptor
|
||||
of the message containing this field. (See is_extension and
|
||||
extension_scope below).
|
||||
message_type: (Descriptor) If a composite field, a descriptor
|
||||
of the message type contained in this field. Otherwise, this is None.
|
||||
enum_type: (EnumDescriptor) If this field contains an enum, a
|
||||
descriptor of that enum. Otherwise, this is None.
|
||||
|
||||
is_extension: True iff this describes an extension field.
|
||||
extension_scope: (Descriptor) Only meaningful if is_extension is True.
|
||||
Gives the message that immediately contains this extension field.
|
||||
Will be None iff we're a top-level (file-level) extension field.
|
||||
|
||||
options: (descriptor_pb2.FieldOptions) Protocol message field options or
|
||||
None to use default field options.
|
||||
|
||||
containing_oneof: (OneofDescriptor) If the field is a member of a oneof
|
||||
union, contains its descriptor. Otherwise, None.
|
||||
"""
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::Type enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
TYPE_DOUBLE = 1
|
||||
TYPE_FLOAT = 2
|
||||
TYPE_INT64 = 3
|
||||
TYPE_UINT64 = 4
|
||||
TYPE_INT32 = 5
|
||||
TYPE_FIXED64 = 6
|
||||
TYPE_FIXED32 = 7
|
||||
TYPE_BOOL = 8
|
||||
TYPE_STRING = 9
|
||||
TYPE_GROUP = 10
|
||||
TYPE_MESSAGE = 11
|
||||
TYPE_BYTES = 12
|
||||
TYPE_UINT32 = 13
|
||||
TYPE_ENUM = 14
|
||||
TYPE_SFIXED32 = 15
|
||||
TYPE_SFIXED64 = 16
|
||||
TYPE_SINT32 = 17
|
||||
TYPE_SINT64 = 18
|
||||
MAX_TYPE = 18
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::CppType enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
CPPTYPE_INT32 = 1
|
||||
CPPTYPE_INT64 = 2
|
||||
CPPTYPE_UINT32 = 3
|
||||
CPPTYPE_UINT64 = 4
|
||||
CPPTYPE_DOUBLE = 5
|
||||
CPPTYPE_FLOAT = 6
|
||||
CPPTYPE_BOOL = 7
|
||||
CPPTYPE_ENUM = 8
|
||||
CPPTYPE_STRING = 9
|
||||
CPPTYPE_MESSAGE = 10
|
||||
MAX_CPPTYPE = 10
|
||||
|
||||
_PYTHON_TO_CPP_PROTO_TYPE_MAP = {
|
||||
TYPE_DOUBLE: CPPTYPE_DOUBLE,
|
||||
TYPE_FLOAT: CPPTYPE_FLOAT,
|
||||
TYPE_ENUM: CPPTYPE_ENUM,
|
||||
TYPE_INT64: CPPTYPE_INT64,
|
||||
TYPE_SINT64: CPPTYPE_INT64,
|
||||
TYPE_SFIXED64: CPPTYPE_INT64,
|
||||
TYPE_UINT64: CPPTYPE_UINT64,
|
||||
TYPE_FIXED64: CPPTYPE_UINT64,
|
||||
TYPE_INT32: CPPTYPE_INT32,
|
||||
TYPE_SFIXED32: CPPTYPE_INT32,
|
||||
TYPE_SINT32: CPPTYPE_INT32,
|
||||
TYPE_UINT32: CPPTYPE_UINT32,
|
||||
TYPE_FIXED32: CPPTYPE_UINT32,
|
||||
TYPE_BYTES: CPPTYPE_STRING,
|
||||
TYPE_STRING: CPPTYPE_STRING,
|
||||
TYPE_BOOL: CPPTYPE_BOOL,
|
||||
TYPE_MESSAGE: CPPTYPE_MESSAGE,
|
||||
TYPE_GROUP: CPPTYPE_MESSAGE
|
||||
}
|
||||
|
||||
# Must be consistent with C++ FieldDescriptor::Label enum in
|
||||
# descriptor.h.
|
||||
#
|
||||
# TODO(robinson): Find a way to eliminate this repetition.
|
||||
LABEL_OPTIONAL = 1
|
||||
LABEL_REQUIRED = 2
|
||||
LABEL_REPEATED = 3
|
||||
MAX_LABEL = 3
|
||||
|
||||
# Must be consistent with C++ constants kMaxNumber, kFirstReservedNumber,
|
||||
# and kLastReservedNumber in descriptor.h
|
||||
MAX_FIELD_NUMBER = (1 << 29) - 1
|
||||
FIRST_RESERVED_FIELD_NUMBER = 19000
|
||||
LAST_RESERVED_FIELD_NUMBER = 19999
|
||||
|
||||
def __init__(self, name, full_name, index, number, type, cpp_type, label,
|
||||
default_value, message_type, enum_type, containing_type,
|
||||
is_extension, extension_scope, options=None,
|
||||
has_default_value=True, containing_oneof=None):
|
||||
"""The arguments are as described in the description of FieldDescriptor
|
||||
attributes above.
|
||||
|
||||
Note that containing_type may be None, and may be set later if necessary
|
||||
(to deal with circular references between message types, for example).
|
||||
Likewise for extension_scope.
|
||||
"""
|
||||
super(FieldDescriptor, self).__init__(options, 'FieldOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.index = index
|
||||
self.number = number
|
||||
self.type = type
|
||||
self.cpp_type = cpp_type
|
||||
self.label = label
|
||||
self.has_default_value = has_default_value
|
||||
self.default_value = default_value
|
||||
self.containing_type = containing_type
|
||||
self.message_type = message_type
|
||||
self.enum_type = enum_type
|
||||
self.is_extension = is_extension
|
||||
self.extension_scope = extension_scope
|
||||
self.containing_oneof = containing_oneof
|
||||
if api_implementation.Type() == 'cpp':
|
||||
if is_extension:
|
||||
if api_implementation.Version() == 2:
|
||||
# pylint: disable=protected-access
|
||||
self._cdescriptor = (
|
||||
_message.Message._GetExtensionDescriptor(full_name))
|
||||
# pylint: enable=protected-access
|
||||
else:
|
||||
self._cdescriptor = cpp_message.GetExtensionDescriptor(full_name)
|
||||
else:
|
||||
if api_implementation.Version() == 2:
|
||||
# pylint: disable=protected-access
|
||||
self._cdescriptor = _message.Message._GetFieldDescriptor(full_name)
|
||||
# pylint: enable=protected-access
|
||||
else:
|
||||
self._cdescriptor = cpp_message.GetFieldDescriptor(full_name)
|
||||
else:
|
||||
self._cdescriptor = None
|
||||
|
||||
@staticmethod
|
||||
def ProtoTypeToCppProtoType(proto_type):
|
||||
"""Converts from a Python proto type to a C++ Proto Type.
|
||||
|
||||
The Python ProtocolBuffer classes specify both the 'Python' datatype and the
|
||||
'C++' datatype - and they're not the same. This helper method should
|
||||
translate from one to another.
|
||||
|
||||
Args:
|
||||
proto_type: the Python proto type (descriptor.FieldDescriptor.TYPE_*)
|
||||
Returns:
|
||||
descriptor.FieldDescriptor.CPPTYPE_*, the C++ type.
|
||||
Raises:
|
||||
TypeTransformationError: when the Python proto type isn't known.
|
||||
"""
|
||||
try:
|
||||
return FieldDescriptor._PYTHON_TO_CPP_PROTO_TYPE_MAP[proto_type]
|
||||
except KeyError:
|
||||
raise TypeTransformationError('Unknown proto_type: %s' % proto_type)
|
||||
|
||||
|
||||
class EnumDescriptor(_NestedDescriptorBase):
|
||||
|
||||
"""Descriptor for an enum defined in a .proto file.
|
||||
|
||||
An EnumDescriptor instance has the following attributes:
|
||||
|
||||
name: (str) Name of the enum type.
|
||||
full_name: (str) Full name of the type, including package name
|
||||
and any enclosing type(s).
|
||||
|
||||
values: (list of EnumValueDescriptors) List of the values
|
||||
in this enum.
|
||||
values_by_name: (dict str -> EnumValueDescriptor) Same as |values|,
|
||||
but indexed by the "name" field of each EnumValueDescriptor.
|
||||
values_by_number: (dict int -> EnumValueDescriptor) Same as |values|,
|
||||
but indexed by the "number" field of each EnumValueDescriptor.
|
||||
containing_type: (Descriptor) Descriptor of the immediate containing
|
||||
type of this enum, or None if this is an enum defined at the
|
||||
top level in a .proto file. Set by Descriptor's constructor
|
||||
if we're passed into one.
|
||||
file: (FileDescriptor) Reference to file descriptor.
|
||||
options: (descriptor_pb2.EnumOptions) Enum options message or
|
||||
None to use default enum options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, filename, values,
|
||||
containing_type=None, options=None, file=None,
|
||||
serialized_start=None, serialized_end=None):
|
||||
"""Arguments are as described in the attribute description above.
|
||||
|
||||
Note that filename is an obsolete argument, that is not used anymore.
|
||||
Please use file.name to access this as an attribute.
|
||||
"""
|
||||
super(EnumDescriptor, self).__init__(
|
||||
options, 'EnumOptions', name, full_name, file,
|
||||
containing_type, serialized_start=serialized_start,
|
||||
serialized_end=serialized_end)
|
||||
|
||||
self.values = values
|
||||
for value in self.values:
|
||||
value.type = self
|
||||
self.values_by_name = dict((v.name, v) for v in values)
|
||||
self.values_by_number = dict((v.number, v) for v in values)
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to a descriptor_pb2.EnumDescriptorProto.
|
||||
|
||||
Args:
|
||||
proto: An empty descriptor_pb2.EnumDescriptorProto.
|
||||
"""
|
||||
# This function is overriden to give a better doc comment.
|
||||
super(EnumDescriptor, self).CopyToProto(proto)
|
||||
|
||||
|
||||
class EnumValueDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a single value within an enum.
|
||||
|
||||
name: (str) Name of this value.
|
||||
index: (int) Dense, 0-indexed index giving the order that this
|
||||
value appears textually within its enum in the .proto file.
|
||||
number: (int) Actual number assigned to this enum value.
|
||||
type: (EnumDescriptor) EnumDescriptor to which this value
|
||||
belongs. Set by EnumDescriptor's constructor if we're
|
||||
passed into one.
|
||||
options: (descriptor_pb2.EnumValueOptions) Enum value options message or
|
||||
None to use default enum value options options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, index, number, type=None, options=None):
|
||||
"""Arguments are as described in the attribute description above."""
|
||||
super(EnumValueDescriptor, self).__init__(options, 'EnumValueOptions')
|
||||
self.name = name
|
||||
self.index = index
|
||||
self.number = number
|
||||
self.type = type
|
||||
|
||||
|
||||
class OneofDescriptor(object):
|
||||
"""Descriptor for a oneof field.
|
||||
|
||||
name: (str) Name of the oneof field.
|
||||
full_name: (str) Full name of the oneof field, including package name.
|
||||
index: (int) 0-based index giving the order of the oneof field inside
|
||||
its containing type.
|
||||
containing_type: (Descriptor) Descriptor of the protocol message
|
||||
type that contains this field. Set by the Descriptor constructor
|
||||
if we're passed into one.
|
||||
fields: (list of FieldDescriptor) The list of field descriptors this
|
||||
oneof can contain.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, index, containing_type, fields):
|
||||
"""Arguments are as described in the attribute description above."""
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.index = index
|
||||
self.containing_type = containing_type
|
||||
self.fields = fields
|
||||
|
||||
|
||||
class ServiceDescriptor(_NestedDescriptorBase):
|
||||
|
||||
"""Descriptor for a service.
|
||||
|
||||
name: (str) Name of the service.
|
||||
full_name: (str) Full name of the service, including package name.
|
||||
index: (int) 0-indexed index giving the order that this services
|
||||
definition appears withing the .proto file.
|
||||
methods: (list of MethodDescriptor) List of methods provided by this
|
||||
service.
|
||||
options: (descriptor_pb2.ServiceOptions) Service options message or
|
||||
None to use default service options.
|
||||
file: (FileDescriptor) Reference to file info.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, index, methods, options=None, file=None,
|
||||
serialized_start=None, serialized_end=None):
|
||||
super(ServiceDescriptor, self).__init__(
|
||||
options, 'ServiceOptions', name, full_name, file,
|
||||
None, serialized_start=serialized_start,
|
||||
serialized_end=serialized_end)
|
||||
self.index = index
|
||||
self.methods = methods
|
||||
# Set the containing service for each method in this service.
|
||||
for method in self.methods:
|
||||
method.containing_service = self
|
||||
|
||||
def FindMethodByName(self, name):
|
||||
"""Searches for the specified method, and returns its descriptor."""
|
||||
for method in self.methods:
|
||||
if name == method.name:
|
||||
return method
|
||||
return None
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to a descriptor_pb2.ServiceDescriptorProto.
|
||||
|
||||
Args:
|
||||
proto: An empty descriptor_pb2.ServiceDescriptorProto.
|
||||
"""
|
||||
# This function is overriden to give a better doc comment.
|
||||
super(ServiceDescriptor, self).CopyToProto(proto)
|
||||
|
||||
|
||||
class MethodDescriptor(DescriptorBase):
|
||||
|
||||
"""Descriptor for a method in a service.
|
||||
|
||||
name: (str) Name of the method within the service.
|
||||
full_name: (str) Full name of method.
|
||||
index: (int) 0-indexed index of the method inside the service.
|
||||
containing_service: (ServiceDescriptor) The service that contains this
|
||||
method.
|
||||
input_type: The descriptor of the message that this method accepts.
|
||||
output_type: The descriptor of the message that this method returns.
|
||||
options: (descriptor_pb2.MethodOptions) Method options message or
|
||||
None to use default method options.
|
||||
"""
|
||||
|
||||
def __init__(self, name, full_name, index, containing_service,
|
||||
input_type, output_type, options=None):
|
||||
"""The arguments are as described in the description of MethodDescriptor
|
||||
attributes above.
|
||||
|
||||
Note that containing_service may be None, and may be set later if necessary.
|
||||
"""
|
||||
super(MethodDescriptor, self).__init__(options, 'MethodOptions')
|
||||
self.name = name
|
||||
self.full_name = full_name
|
||||
self.index = index
|
||||
self.containing_service = containing_service
|
||||
self.input_type = input_type
|
||||
self.output_type = output_type
|
||||
|
||||
|
||||
class FileDescriptor(DescriptorBase):
|
||||
"""Descriptor for a file. Mimics the descriptor_pb2.FileDescriptorProto.
|
||||
|
||||
Note that enum_types_by_name, extensions_by_name, and dependencies
|
||||
fields are only set by the message_factory module, and not by the
|
||||
generated proto code.
|
||||
|
||||
name: name of file, relative to root of source tree.
|
||||
package: name of the package
|
||||
serialized_pb: (str) Byte string of serialized
|
||||
descriptor_pb2.FileDescriptorProto.
|
||||
dependencies: List of other FileDescriptors this FileDescriptor depends on.
|
||||
message_types_by_name: Dict of message names of their descriptors.
|
||||
enum_types_by_name: Dict of enum names and their descriptors.
|
||||
extensions_by_name: Dict of extension names and their descriptors.
|
||||
"""
|
||||
|
||||
def __init__(self, name, package, options=None, serialized_pb=None,
|
||||
dependencies=None):
|
||||
"""Constructor."""
|
||||
super(FileDescriptor, self).__init__(options, 'FileOptions')
|
||||
|
||||
self.message_types_by_name = {}
|
||||
self.name = name
|
||||
self.package = package
|
||||
self.serialized_pb = serialized_pb
|
||||
|
||||
self.enum_types_by_name = {}
|
||||
self.extensions_by_name = {}
|
||||
self.dependencies = (dependencies or [])
|
||||
|
||||
if (api_implementation.Type() == 'cpp' and
|
||||
self.serialized_pb is not None):
|
||||
if api_implementation.Version() == 2:
|
||||
# pylint: disable=protected-access
|
||||
_message.Message._BuildFile(self.serialized_pb)
|
||||
# pylint: enable=protected-access
|
||||
else:
|
||||
cpp_message.BuildFile(self.serialized_pb)
|
||||
|
||||
def CopyToProto(self, proto):
|
||||
"""Copies this to a descriptor_pb2.FileDescriptorProto.
|
||||
|
||||
Args:
|
||||
proto: An empty descriptor_pb2.FileDescriptorProto.
|
||||
"""
|
||||
proto.ParseFromString(self.serialized_pb)
|
||||
|
||||
|
||||
def _ParseOptions(message, string):
|
||||
"""Parses serialized options.
|
||||
|
||||
This helper function is used to parse serialized options in generated
|
||||
proto2 files. It must not be used outside proto2.
|
||||
"""
|
||||
message.ParseFromString(string)
|
||||
return message
|
||||
|
||||
|
||||
def MakeDescriptor(desc_proto, package='', build_file_if_cpp=True):
|
||||
"""Make a protobuf Descriptor given a DescriptorProto protobuf.
|
||||
|
||||
Handles nested descriptors. Note that this is limited to the scope of defining
|
||||
a message inside of another message. Composite fields can currently only be
|
||||
resolved if the message is defined in the same scope as the field.
|
||||
|
||||
Args:
|
||||
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
|
||||
package: Optional package name for the new message Descriptor (string).
|
||||
build_file_if_cpp: Update the C++ descriptor pool if api matches.
|
||||
Set to False on recursion, so no duplicates are created.
|
||||
Returns:
|
||||
A Descriptor for protobuf messages.
|
||||
"""
|
||||
if api_implementation.Type() == 'cpp' and build_file_if_cpp:
|
||||
# The C++ implementation requires all descriptors to be backed by the same
|
||||
# definition in the C++ descriptor pool. To do this, we build a
|
||||
# FileDescriptorProto with the same definition as this descriptor and build
|
||||
# it into the pool.
|
||||
from google.protobuf import descriptor_pb2
|
||||
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
|
||||
file_descriptor_proto.message_type.add().MergeFrom(desc_proto)
|
||||
|
||||
# Generate a random name for this proto file to prevent conflicts with
|
||||
# any imported ones. We need to specify a file name so BuildFile accepts
|
||||
# our FileDescriptorProto, but it is not important what that file name
|
||||
# is actually set to.
|
||||
proto_name = str(uuid.uuid4())
|
||||
|
||||
if package:
|
||||
file_descriptor_proto.name = os.path.join(package.replace('.', '/'),
|
||||
proto_name + '.proto')
|
||||
file_descriptor_proto.package = package
|
||||
else:
|
||||
file_descriptor_proto.name = proto_name + '.proto'
|
||||
|
||||
if api_implementation.Version() == 2:
|
||||
# pylint: disable=protected-access
|
||||
_message.Message._BuildFile(file_descriptor_proto.SerializeToString())
|
||||
# pylint: enable=protected-access
|
||||
else:
|
||||
cpp_message.BuildFile(file_descriptor_proto.SerializeToString())
|
||||
|
||||
full_message_name = [desc_proto.name]
|
||||
if package: full_message_name.insert(0, package)
|
||||
|
||||
# Create Descriptors for enum types
|
||||
enum_types = {}
|
||||
for enum_proto in desc_proto.enum_type:
|
||||
full_name = '.'.join(full_message_name + [enum_proto.name])
|
||||
enum_desc = EnumDescriptor(
|
||||
enum_proto.name, full_name, None, [
|
||||
EnumValueDescriptor(enum_val.name, ii, enum_val.number)
|
||||
for ii, enum_val in enumerate(enum_proto.value)])
|
||||
enum_types[full_name] = enum_desc
|
||||
|
||||
# Create Descriptors for nested types
|
||||
nested_types = {}
|
||||
for nested_proto in desc_proto.nested_type:
|
||||
full_name = '.'.join(full_message_name + [nested_proto.name])
|
||||
# Nested types are just those defined inside of the message, not all types
|
||||
# used by fields in the message, so no loops are possible here.
|
||||
nested_desc = MakeDescriptor(nested_proto,
|
||||
package='.'.join(full_message_name),
|
||||
build_file_if_cpp=False)
|
||||
nested_types[full_name] = nested_desc
|
||||
|
||||
fields = []
|
||||
for field_proto in desc_proto.field:
|
||||
full_name = '.'.join(full_message_name + [field_proto.name])
|
||||
enum_desc = None
|
||||
nested_desc = None
|
||||
if field_proto.HasField('type_name'):
|
||||
type_name = field_proto.type_name
|
||||
full_type_name = '.'.join(full_message_name +
|
||||
[type_name[type_name.rfind('.')+1:]])
|
||||
if full_type_name in nested_types:
|
||||
nested_desc = nested_types[full_type_name]
|
||||
elif full_type_name in enum_types:
|
||||
enum_desc = enum_types[full_type_name]
|
||||
# Else type_name references a non-local type, which isn't implemented
|
||||
field = FieldDescriptor(
|
||||
field_proto.name, full_name, field_proto.number - 1,
|
||||
field_proto.number, field_proto.type,
|
||||
FieldDescriptor.ProtoTypeToCppProtoType(field_proto.type),
|
||||
field_proto.label, None, nested_desc, enum_desc, None, False, None,
|
||||
has_default_value=False)
|
||||
fields.append(field)
|
||||
|
||||
desc_name = '.'.join(full_message_name)
|
||||
return Descriptor(desc_proto.name, desc_name, None, None, fields,
|
||||
nested_types.values(), enum_types.values(), [])
|
||||
@@ -0,0 +1,137 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides a container for DescriptorProtos."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
pass
|
||||
|
||||
|
||||
class DescriptorDatabaseConflictingDefinitionError(Error):
|
||||
"""Raised when a proto is added with the same name & different descriptor."""
|
||||
|
||||
|
||||
class DescriptorDatabase(object):
|
||||
"""A container accepting FileDescriptorProtos and maps DescriptorProtos."""
|
||||
|
||||
def __init__(self):
|
||||
self._file_desc_protos_by_file = {}
|
||||
self._file_desc_protos_by_symbol = {}
|
||||
|
||||
def Add(self, file_desc_proto):
|
||||
"""Adds the FileDescriptorProto and its types to this database.
|
||||
|
||||
Args:
|
||||
file_desc_proto: The FileDescriptorProto to add.
|
||||
Raises:
|
||||
DescriptorDatabaseException: if an attempt is made to add a proto
|
||||
with the same name but different definition than an exisiting
|
||||
proto in the database.
|
||||
"""
|
||||
proto_name = file_desc_proto.name
|
||||
if proto_name not in self._file_desc_protos_by_file:
|
||||
self._file_desc_protos_by_file[proto_name] = file_desc_proto
|
||||
elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
|
||||
raise DescriptorDatabaseConflictingDefinitionError(
|
||||
'%s already added, but with different descriptor.' % proto_name)
|
||||
|
||||
package = file_desc_proto.package
|
||||
for message in file_desc_proto.message_type:
|
||||
self._file_desc_protos_by_symbol.update(
|
||||
(name, file_desc_proto) for name in _ExtractSymbols(message, package))
|
||||
for enum in file_desc_proto.enum_type:
|
||||
self._file_desc_protos_by_symbol[
|
||||
'.'.join((package, enum.name))] = file_desc_proto
|
||||
|
||||
def FindFileByName(self, name):
|
||||
"""Finds the file descriptor proto by file name.
|
||||
|
||||
Typically the file name is a relative path ending to a .proto file. The
|
||||
proto with the given name will have to have been added to this database
|
||||
using the Add method or else an error will be raised.
|
||||
|
||||
Args:
|
||||
name: The file name to find.
|
||||
|
||||
Returns:
|
||||
The file descriptor proto matching the name.
|
||||
|
||||
Raises:
|
||||
KeyError if no file by the given name was added.
|
||||
"""
|
||||
|
||||
return self._file_desc_protos_by_file[name]
|
||||
|
||||
def FindFileContainingSymbol(self, symbol):
|
||||
"""Finds the file descriptor proto containing the specified symbol.
|
||||
|
||||
The symbol should be a fully qualified name including the file descriptor's
|
||||
package and any containing messages. Some examples:
|
||||
|
||||
'some.package.name.Message'
|
||||
'some.package.name.Message.NestedEnum'
|
||||
|
||||
The file descriptor proto containing the specified symbol must be added to
|
||||
this database using the Add method or else an error will be raised.
|
||||
|
||||
Args:
|
||||
symbol: The fully qualified symbol name.
|
||||
|
||||
Returns:
|
||||
The file descriptor proto containing the symbol.
|
||||
|
||||
Raises:
|
||||
KeyError if no file contains the specified symbol.
|
||||
"""
|
||||
|
||||
return self._file_desc_protos_by_symbol[symbol]
|
||||
|
||||
|
||||
def _ExtractSymbols(desc_proto, package):
|
||||
"""Pulls out all the symbols from a descriptor proto.
|
||||
|
||||
Args:
|
||||
desc_proto: The proto to extract symbols from.
|
||||
package: The package containing the descriptor type.
|
||||
|
||||
Yields:
|
||||
The fully qualified name found in the descriptor.
|
||||
"""
|
||||
|
||||
message_name = '.'.join((package, desc_proto.name))
|
||||
yield message_name
|
||||
for nested_type in desc_proto.nested_type:
|
||||
for symbol in _ExtractSymbols(nested_type, message_name):
|
||||
yield symbol
|
||||
for enum_type in desc_proto.enum_type:
|
||||
yield '.'.join((message_name, enum_type.name))
|
||||
@@ -0,0 +1,643 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Provides DescriptorPool to use as a container for proto2 descriptors.
|
||||
|
||||
The DescriptorPool is used in conjection with a DescriptorDatabase to maintain
|
||||
a collection of protocol buffer descriptors for use when dynamically creating
|
||||
message types at runtime.
|
||||
|
||||
For most applications protocol buffers should be used via modules generated by
|
||||
the protocol buffer compiler tool. This should only be used when the type of
|
||||
protocol buffers used in an application or library cannot be predetermined.
|
||||
|
||||
Below is a straightforward example on how to use this class:
|
||||
|
||||
pool = DescriptorPool()
|
||||
file_descriptor_protos = [ ... ]
|
||||
for file_descriptor_proto in file_descriptor_protos:
|
||||
pool.Add(file_descriptor_proto)
|
||||
my_message_descriptor = pool.FindMessageTypeByName('some.package.MessageType')
|
||||
|
||||
The message descriptor can be used in conjunction with the message_factory
|
||||
module in order to create a protocol buffer class that can be encoded and
|
||||
decoded.
|
||||
|
||||
If you want to get a Python class for the specified proto, use the
|
||||
helper functions inside google.protobuf.message_factory
|
||||
directly instead of this class.
|
||||
"""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
import sys
|
||||
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import descriptor_database
|
||||
from google.protobuf import text_encoding
|
||||
|
||||
|
||||
def _NormalizeFullyQualifiedName(name):
|
||||
"""Remove leading period from fully-qualified type name.
|
||||
|
||||
Due to b/13860351 in descriptor_database.py, types in the root namespace are
|
||||
generated with a leading period. This function removes that prefix.
|
||||
|
||||
Args:
|
||||
name: A str, the fully-qualified symbol name.
|
||||
|
||||
Returns:
|
||||
A str, the normalized fully-qualified symbol name.
|
||||
"""
|
||||
return name.lstrip('.')
|
||||
|
||||
|
||||
class DescriptorPool(object):
|
||||
"""A collection of protobufs dynamically constructed by descriptor protos."""
|
||||
|
||||
def __init__(self, descriptor_db=None):
|
||||
"""Initializes a Pool of proto buffs.
|
||||
|
||||
The descriptor_db argument to the constructor is provided to allow
|
||||
specialized file descriptor proto lookup code to be triggered on demand. An
|
||||
example would be an implementation which will read and compile a file
|
||||
specified in a call to FindFileByName() and not require the call to Add()
|
||||
at all. Results from this database will be cached internally here as well.
|
||||
|
||||
Args:
|
||||
descriptor_db: A secondary source of file descriptors.
|
||||
"""
|
||||
|
||||
self._internal_db = descriptor_database.DescriptorDatabase()
|
||||
self._descriptor_db = descriptor_db
|
||||
self._descriptors = {}
|
||||
self._enum_descriptors = {}
|
||||
self._file_descriptors = {}
|
||||
|
||||
def Add(self, file_desc_proto):
|
||||
"""Adds the FileDescriptorProto and its types to this pool.
|
||||
|
||||
Args:
|
||||
file_desc_proto: The FileDescriptorProto to add.
|
||||
"""
|
||||
|
||||
self._internal_db.Add(file_desc_proto)
|
||||
|
||||
def AddDescriptor(self, desc):
|
||||
"""Adds a Descriptor to the pool, non-recursively.
|
||||
|
||||
If the Descriptor contains nested messages or enums, the caller must
|
||||
explicitly register them. This method also registers the FileDescriptor
|
||||
associated with the message.
|
||||
|
||||
Args:
|
||||
desc: A Descriptor.
|
||||
"""
|
||||
if not isinstance(desc, descriptor.Descriptor):
|
||||
raise TypeError('Expected instance of descriptor.Descriptor.')
|
||||
|
||||
self._descriptors[desc.full_name] = desc
|
||||
self.AddFileDescriptor(desc.file)
|
||||
|
||||
def AddEnumDescriptor(self, enum_desc):
|
||||
"""Adds an EnumDescriptor to the pool.
|
||||
|
||||
This method also registers the FileDescriptor associated with the message.
|
||||
|
||||
Args:
|
||||
enum_desc: An EnumDescriptor.
|
||||
"""
|
||||
|
||||
if not isinstance(enum_desc, descriptor.EnumDescriptor):
|
||||
raise TypeError('Expected instance of descriptor.EnumDescriptor.')
|
||||
|
||||
self._enum_descriptors[enum_desc.full_name] = enum_desc
|
||||
self.AddFileDescriptor(enum_desc.file)
|
||||
|
||||
def AddFileDescriptor(self, file_desc):
|
||||
"""Adds a FileDescriptor to the pool, non-recursively.
|
||||
|
||||
If the FileDescriptor contains messages or enums, the caller must explicitly
|
||||
register them.
|
||||
|
||||
Args:
|
||||
file_desc: A FileDescriptor.
|
||||
"""
|
||||
|
||||
if not isinstance(file_desc, descriptor.FileDescriptor):
|
||||
raise TypeError('Expected instance of descriptor.FileDescriptor.')
|
||||
self._file_descriptors[file_desc.name] = file_desc
|
||||
|
||||
def FindFileByName(self, file_name):
|
||||
"""Gets a FileDescriptor by file name.
|
||||
|
||||
Args:
|
||||
file_name: The path to the file to get a descriptor for.
|
||||
|
||||
Returns:
|
||||
A FileDescriptor for the named file.
|
||||
|
||||
Raises:
|
||||
KeyError: if the file can not be found in the pool.
|
||||
"""
|
||||
|
||||
try:
|
||||
return self._file_descriptors[file_name]
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
file_proto = self._internal_db.FindFileByName(file_name)
|
||||
except KeyError:
|
||||
_, error, _ = sys.exc_info() #PY25 compatible for GAE.
|
||||
if self._descriptor_db:
|
||||
file_proto = self._descriptor_db.FindFileByName(file_name)
|
||||
else:
|
||||
raise error
|
||||
if not file_proto:
|
||||
raise KeyError('Cannot find a file named %s' % file_name)
|
||||
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
||||
|
||||
def FindFileContainingSymbol(self, symbol):
|
||||
"""Gets the FileDescriptor for the file containing the specified symbol.
|
||||
|
||||
Args:
|
||||
symbol: The name of the symbol to search for.
|
||||
|
||||
Returns:
|
||||
A FileDescriptor that contains the specified symbol.
|
||||
|
||||
Raises:
|
||||
KeyError: if the file can not be found in the pool.
|
||||
"""
|
||||
|
||||
symbol = _NormalizeFullyQualifiedName(symbol)
|
||||
try:
|
||||
return self._descriptors[symbol].file
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
return self._enum_descriptors[symbol].file
|
||||
except KeyError:
|
||||
pass
|
||||
|
||||
try:
|
||||
file_proto = self._internal_db.FindFileContainingSymbol(symbol)
|
||||
except KeyError:
|
||||
_, error, _ = sys.exc_info() #PY25 compatible for GAE.
|
||||
if self._descriptor_db:
|
||||
file_proto = self._descriptor_db.FindFileContainingSymbol(symbol)
|
||||
else:
|
||||
raise error
|
||||
if not file_proto:
|
||||
raise KeyError('Cannot find a file containing %s' % symbol)
|
||||
return self._ConvertFileProtoToFileDescriptor(file_proto)
|
||||
|
||||
def FindMessageTypeByName(self, full_name):
|
||||
"""Loads the named descriptor from the pool.
|
||||
|
||||
Args:
|
||||
full_name: The full name of the descriptor to load.
|
||||
|
||||
Returns:
|
||||
The descriptor for the named type.
|
||||
"""
|
||||
|
||||
full_name = _NormalizeFullyQualifiedName(full_name)
|
||||
if full_name not in self._descriptors:
|
||||
self.FindFileContainingSymbol(full_name)
|
||||
return self._descriptors[full_name]
|
||||
|
||||
def FindEnumTypeByName(self, full_name):
|
||||
"""Loads the named enum descriptor from the pool.
|
||||
|
||||
Args:
|
||||
full_name: The full name of the enum descriptor to load.
|
||||
|
||||
Returns:
|
||||
The enum descriptor for the named type.
|
||||
"""
|
||||
|
||||
full_name = _NormalizeFullyQualifiedName(full_name)
|
||||
if full_name not in self._enum_descriptors:
|
||||
self.FindFileContainingSymbol(full_name)
|
||||
return self._enum_descriptors[full_name]
|
||||
|
||||
def _ConvertFileProtoToFileDescriptor(self, file_proto):
|
||||
"""Creates a FileDescriptor from a proto or returns a cached copy.
|
||||
|
||||
This method also has the side effect of loading all the symbols found in
|
||||
the file into the appropriate dictionaries in the pool.
|
||||
|
||||
Args:
|
||||
file_proto: The proto to convert.
|
||||
|
||||
Returns:
|
||||
A FileDescriptor matching the passed in proto.
|
||||
"""
|
||||
|
||||
if file_proto.name not in self._file_descriptors:
|
||||
built_deps = list(self._GetDeps(file_proto.dependency))
|
||||
direct_deps = [self.FindFileByName(n) for n in file_proto.dependency]
|
||||
|
||||
file_descriptor = descriptor.FileDescriptor(
|
||||
name=file_proto.name,
|
||||
package=file_proto.package,
|
||||
options=file_proto.options,
|
||||
serialized_pb=file_proto.SerializeToString(),
|
||||
dependencies=direct_deps)
|
||||
scope = {}
|
||||
|
||||
# This loop extracts all the message and enum types from all the
|
||||
# dependencoes of the file_proto. This is necessary to create the
|
||||
# scope of available message types when defining the passed in
|
||||
# file proto.
|
||||
for dependency in built_deps:
|
||||
scope.update(self._ExtractSymbols(
|
||||
dependency.message_types_by_name.values()))
|
||||
scope.update((_PrefixWithDot(enum.full_name), enum)
|
||||
for enum in dependency.enum_types_by_name.values())
|
||||
|
||||
for message_type in file_proto.message_type:
|
||||
message_desc = self._ConvertMessageDescriptor(
|
||||
message_type, file_proto.package, file_descriptor, scope)
|
||||
file_descriptor.message_types_by_name[message_desc.name] = message_desc
|
||||
|
||||
for enum_type in file_proto.enum_type:
|
||||
file_descriptor.enum_types_by_name[enum_type.name] = (
|
||||
self._ConvertEnumDescriptor(enum_type, file_proto.package,
|
||||
file_descriptor, None, scope))
|
||||
|
||||
for index, extension_proto in enumerate(file_proto.extension):
|
||||
extension_desc = self.MakeFieldDescriptor(
|
||||
extension_proto, file_proto.package, index, is_extension=True)
|
||||
extension_desc.containing_type = self._GetTypeFromScope(
|
||||
file_descriptor.package, extension_proto.extendee, scope)
|
||||
self.SetFieldType(extension_proto, extension_desc,
|
||||
file_descriptor.package, scope)
|
||||
file_descriptor.extensions_by_name[extension_desc.name] = extension_desc
|
||||
|
||||
for desc_proto in file_proto.message_type:
|
||||
self.SetAllFieldTypes(file_proto.package, desc_proto, scope)
|
||||
|
||||
if file_proto.package:
|
||||
desc_proto_prefix = _PrefixWithDot(file_proto.package)
|
||||
else:
|
||||
desc_proto_prefix = ''
|
||||
|
||||
for desc_proto in file_proto.message_type:
|
||||
desc = self._GetTypeFromScope(desc_proto_prefix, desc_proto.name, scope)
|
||||
file_descriptor.message_types_by_name[desc_proto.name] = desc
|
||||
self.Add(file_proto)
|
||||
self._file_descriptors[file_proto.name] = file_descriptor
|
||||
|
||||
return self._file_descriptors[file_proto.name]
|
||||
|
||||
def _ConvertMessageDescriptor(self, desc_proto, package=None, file_desc=None,
|
||||
scope=None):
|
||||
"""Adds the proto to the pool in the specified package.
|
||||
|
||||
Args:
|
||||
desc_proto: The descriptor_pb2.DescriptorProto protobuf message.
|
||||
package: The package the proto should be located in.
|
||||
file_desc: The file containing this message.
|
||||
scope: Dict mapping short and full symbols to message and enum types.
|
||||
|
||||
Returns:
|
||||
The added descriptor.
|
||||
"""
|
||||
|
||||
if package:
|
||||
desc_name = '.'.join((package, desc_proto.name))
|
||||
else:
|
||||
desc_name = desc_proto.name
|
||||
|
||||
if file_desc is None:
|
||||
file_name = None
|
||||
else:
|
||||
file_name = file_desc.name
|
||||
|
||||
if scope is None:
|
||||
scope = {}
|
||||
|
||||
nested = [
|
||||
self._ConvertMessageDescriptor(nested, desc_name, file_desc, scope)
|
||||
for nested in desc_proto.nested_type]
|
||||
enums = [
|
||||
self._ConvertEnumDescriptor(enum, desc_name, file_desc, None, scope)
|
||||
for enum in desc_proto.enum_type]
|
||||
fields = [self.MakeFieldDescriptor(field, desc_name, index)
|
||||
for index, field in enumerate(desc_proto.field)]
|
||||
extensions = [
|
||||
self.MakeFieldDescriptor(extension, desc_name, index, is_extension=True)
|
||||
for index, extension in enumerate(desc_proto.extension)]
|
||||
oneofs = [
|
||||
descriptor.OneofDescriptor(desc.name, '.'.join((desc_name, desc.name)),
|
||||
index, None, [])
|
||||
for index, desc in enumerate(desc_proto.oneof_decl)]
|
||||
extension_ranges = [(r.start, r.end) for r in desc_proto.extension_range]
|
||||
if extension_ranges:
|
||||
is_extendable = True
|
||||
else:
|
||||
is_extendable = False
|
||||
desc = descriptor.Descriptor(
|
||||
name=desc_proto.name,
|
||||
full_name=desc_name,
|
||||
filename=file_name,
|
||||
containing_type=None,
|
||||
fields=fields,
|
||||
oneofs=oneofs,
|
||||
nested_types=nested,
|
||||
enum_types=enums,
|
||||
extensions=extensions,
|
||||
options=desc_proto.options,
|
||||
is_extendable=is_extendable,
|
||||
extension_ranges=extension_ranges,
|
||||
file=file_desc,
|
||||
serialized_start=None,
|
||||
serialized_end=None)
|
||||
for nested in desc.nested_types:
|
||||
nested.containing_type = desc
|
||||
for enum in desc.enum_types:
|
||||
enum.containing_type = desc
|
||||
for field_index, field_desc in enumerate(desc_proto.field):
|
||||
if field_desc.HasField('oneof_index'):
|
||||
oneof_index = field_desc.oneof_index
|
||||
oneofs[oneof_index].fields.append(fields[field_index])
|
||||
fields[field_index].containing_oneof = oneofs[oneof_index]
|
||||
|
||||
scope[_PrefixWithDot(desc_name)] = desc
|
||||
self._descriptors[desc_name] = desc
|
||||
return desc
|
||||
|
||||
def _ConvertEnumDescriptor(self, enum_proto, package=None, file_desc=None,
|
||||
containing_type=None, scope=None):
|
||||
"""Make a protobuf EnumDescriptor given an EnumDescriptorProto protobuf.
|
||||
|
||||
Args:
|
||||
enum_proto: The descriptor_pb2.EnumDescriptorProto protobuf message.
|
||||
package: Optional package name for the new message EnumDescriptor.
|
||||
file_desc: The file containing the enum descriptor.
|
||||
containing_type: The type containing this enum.
|
||||
scope: Scope containing available types.
|
||||
|
||||
Returns:
|
||||
The added descriptor
|
||||
"""
|
||||
|
||||
if package:
|
||||
enum_name = '.'.join((package, enum_proto.name))
|
||||
else:
|
||||
enum_name = enum_proto.name
|
||||
|
||||
if file_desc is None:
|
||||
file_name = None
|
||||
else:
|
||||
file_name = file_desc.name
|
||||
|
||||
values = [self._MakeEnumValueDescriptor(value, index)
|
||||
for index, value in enumerate(enum_proto.value)]
|
||||
desc = descriptor.EnumDescriptor(name=enum_proto.name,
|
||||
full_name=enum_name,
|
||||
filename=file_name,
|
||||
file=file_desc,
|
||||
values=values,
|
||||
containing_type=containing_type,
|
||||
options=enum_proto.options)
|
||||
scope['.%s' % enum_name] = desc
|
||||
self._enum_descriptors[enum_name] = desc
|
||||
return desc
|
||||
|
||||
def MakeFieldDescriptor(self, field_proto, message_name, index,
|
||||
is_extension=False):
|
||||
"""Creates a field descriptor from a FieldDescriptorProto.
|
||||
|
||||
For message and enum type fields, this method will do a look up
|
||||
in the pool for the appropriate descriptor for that type. If it
|
||||
is unavailable, it will fall back to the _source function to
|
||||
create it. If this type is still unavailable, construction will
|
||||
fail.
|
||||
|
||||
Args:
|
||||
field_proto: The proto describing the field.
|
||||
message_name: The name of the containing message.
|
||||
index: Index of the field
|
||||
is_extension: Indication that this field is for an extension.
|
||||
|
||||
Returns:
|
||||
An initialized FieldDescriptor object
|
||||
"""
|
||||
|
||||
if message_name:
|
||||
full_name = '.'.join((message_name, field_proto.name))
|
||||
else:
|
||||
full_name = field_proto.name
|
||||
|
||||
return descriptor.FieldDescriptor(
|
||||
name=field_proto.name,
|
||||
full_name=full_name,
|
||||
index=index,
|
||||
number=field_proto.number,
|
||||
type=field_proto.type,
|
||||
cpp_type=None,
|
||||
message_type=None,
|
||||
enum_type=None,
|
||||
containing_type=None,
|
||||
label=field_proto.label,
|
||||
has_default_value=False,
|
||||
default_value=None,
|
||||
is_extension=is_extension,
|
||||
extension_scope=None,
|
||||
options=field_proto.options)
|
||||
|
||||
def SetAllFieldTypes(self, package, desc_proto, scope):
|
||||
"""Sets all the descriptor's fields's types.
|
||||
|
||||
This method also sets the containing types on any extensions.
|
||||
|
||||
Args:
|
||||
package: The current package of desc_proto.
|
||||
desc_proto: The message descriptor to update.
|
||||
scope: Enclosing scope of available types.
|
||||
"""
|
||||
|
||||
package = _PrefixWithDot(package)
|
||||
|
||||
main_desc = self._GetTypeFromScope(package, desc_proto.name, scope)
|
||||
|
||||
if package == '.':
|
||||
nested_package = _PrefixWithDot(desc_proto.name)
|
||||
else:
|
||||
nested_package = '.'.join([package, desc_proto.name])
|
||||
|
||||
for field_proto, field_desc in zip(desc_proto.field, main_desc.fields):
|
||||
self.SetFieldType(field_proto, field_desc, nested_package, scope)
|
||||
|
||||
for extension_proto, extension_desc in (
|
||||
zip(desc_proto.extension, main_desc.extensions)):
|
||||
extension_desc.containing_type = self._GetTypeFromScope(
|
||||
nested_package, extension_proto.extendee, scope)
|
||||
self.SetFieldType(extension_proto, extension_desc, nested_package, scope)
|
||||
|
||||
for nested_type in desc_proto.nested_type:
|
||||
self.SetAllFieldTypes(nested_package, nested_type, scope)
|
||||
|
||||
def SetFieldType(self, field_proto, field_desc, package, scope):
|
||||
"""Sets the field's type, cpp_type, message_type and enum_type.
|
||||
|
||||
Args:
|
||||
field_proto: Data about the field in proto format.
|
||||
field_desc: The descriptor to modiy.
|
||||
package: The package the field's container is in.
|
||||
scope: Enclosing scope of available types.
|
||||
"""
|
||||
if field_proto.type_name:
|
||||
desc = self._GetTypeFromScope(package, field_proto.type_name, scope)
|
||||
else:
|
||||
desc = None
|
||||
|
||||
if not field_proto.HasField('type'):
|
||||
if isinstance(desc, descriptor.Descriptor):
|
||||
field_proto.type = descriptor.FieldDescriptor.TYPE_MESSAGE
|
||||
else:
|
||||
field_proto.type = descriptor.FieldDescriptor.TYPE_ENUM
|
||||
|
||||
field_desc.cpp_type = descriptor.FieldDescriptor.ProtoTypeToCppProtoType(
|
||||
field_proto.type)
|
||||
|
||||
if (field_proto.type == descriptor.FieldDescriptor.TYPE_MESSAGE
|
||||
or field_proto.type == descriptor.FieldDescriptor.TYPE_GROUP):
|
||||
field_desc.message_type = desc
|
||||
|
||||
if field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
||||
field_desc.enum_type = desc
|
||||
|
||||
if field_proto.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
field_desc.has_default_value = False
|
||||
field_desc.default_value = []
|
||||
elif field_proto.HasField('default_value'):
|
||||
field_desc.has_default_value = True
|
||||
if (field_proto.type == descriptor.FieldDescriptor.TYPE_DOUBLE or
|
||||
field_proto.type == descriptor.FieldDescriptor.TYPE_FLOAT):
|
||||
field_desc.default_value = float(field_proto.default_value)
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_STRING:
|
||||
field_desc.default_value = field_proto.default_value
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BOOL:
|
||||
field_desc.default_value = field_proto.default_value.lower() == 'true'
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
||||
field_desc.default_value = field_desc.enum_type.values_by_name[
|
||||
field_proto.default_value].index
|
||||
elif field_proto.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
||||
field_desc.default_value = text_encoding.CUnescape(
|
||||
field_proto.default_value)
|
||||
else:
|
||||
field_desc.default_value = int(field_proto.default_value)
|
||||
else:
|
||||
field_desc.has_default_value = False
|
||||
field_desc.default_value = None
|
||||
|
||||
field_desc.type = field_proto.type
|
||||
|
||||
def _MakeEnumValueDescriptor(self, value_proto, index):
|
||||
"""Creates a enum value descriptor object from a enum value proto.
|
||||
|
||||
Args:
|
||||
value_proto: The proto describing the enum value.
|
||||
index: The index of the enum value.
|
||||
|
||||
Returns:
|
||||
An initialized EnumValueDescriptor object.
|
||||
"""
|
||||
|
||||
return descriptor.EnumValueDescriptor(
|
||||
name=value_proto.name,
|
||||
index=index,
|
||||
number=value_proto.number,
|
||||
options=value_proto.options,
|
||||
type=None)
|
||||
|
||||
def _ExtractSymbols(self, descriptors):
|
||||
"""Pulls out all the symbols from descriptor protos.
|
||||
|
||||
Args:
|
||||
descriptors: The messages to extract descriptors from.
|
||||
Yields:
|
||||
A two element tuple of the type name and descriptor object.
|
||||
"""
|
||||
|
||||
for desc in descriptors:
|
||||
yield (_PrefixWithDot(desc.full_name), desc)
|
||||
for symbol in self._ExtractSymbols(desc.nested_types):
|
||||
yield symbol
|
||||
for enum in desc.enum_types:
|
||||
yield (_PrefixWithDot(enum.full_name), enum)
|
||||
|
||||
def _GetDeps(self, dependencies):
|
||||
"""Recursively finds dependencies for file protos.
|
||||
|
||||
Args:
|
||||
dependencies: The names of the files being depended on.
|
||||
|
||||
Yields:
|
||||
Each direct and indirect dependency.
|
||||
"""
|
||||
|
||||
for dependency in dependencies:
|
||||
dep_desc = self.FindFileByName(dependency)
|
||||
yield dep_desc
|
||||
for parent_dep in dep_desc.dependencies:
|
||||
yield parent_dep
|
||||
|
||||
def _GetTypeFromScope(self, package, type_name, scope):
|
||||
"""Finds a given type name in the current scope.
|
||||
|
||||
Args:
|
||||
package: The package the proto should be located in.
|
||||
type_name: The name of the type to be found in the scope.
|
||||
scope: Dict mapping short and full symbols to message and enum types.
|
||||
|
||||
Returns:
|
||||
The descriptor for the requested type.
|
||||
"""
|
||||
if type_name not in scope:
|
||||
components = _PrefixWithDot(package).split('.')
|
||||
while components:
|
||||
possible_match = '.'.join(components + [type_name])
|
||||
if possible_match in scope:
|
||||
type_name = possible_match
|
||||
break
|
||||
else:
|
||||
components.pop(-1)
|
||||
return scope[type_name]
|
||||
|
||||
|
||||
def _PrefixWithDot(name):
|
||||
return name if name.startswith('.') else '.%s' % name
|
||||
@@ -0,0 +1,139 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
// Version constant.
|
||||
// This is either 0 for python, 1 for CPP V1, 2 for CPP V2.
|
||||
//
|
||||
// 0 is default and is equivalent to
|
||||
// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=python
|
||||
//
|
||||
// 1 is set with -DPYTHON_PROTO2_CPP_IMPL_V1 and is equivalent to
|
||||
// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp
|
||||
// and
|
||||
// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION=1
|
||||
//
|
||||
// 2 is set with -DPYTHON_PROTO2_CPP_IMPL_V2 and is equivalent to
|
||||
// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp
|
||||
// and
|
||||
// PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION=2
|
||||
#ifdef PYTHON_PROTO2_CPP_IMPL_V1
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#error "PYTHON_PROTO2_CPP_IMPL_V1 is not supported under Python 3."
|
||||
#endif
|
||||
static int kImplVersion = 1;
|
||||
#else
|
||||
#ifdef PYTHON_PROTO2_CPP_IMPL_V2
|
||||
static int kImplVersion = 2;
|
||||
#else
|
||||
#ifdef PYTHON_PROTO2_PYTHON_IMPL
|
||||
static int kImplVersion = 0;
|
||||
#else
|
||||
|
||||
// The defaults are set here. Python 3 uses the fast C++ APIv2 by default.
|
||||
// Python 2 still uses the Python version by default until some compatibility
|
||||
// issues can be worked around.
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
static int kImplVersion = 2;
|
||||
#else
|
||||
static int kImplVersion = 0;
|
||||
#endif
|
||||
|
||||
#endif // PYTHON_PROTO2_PYTHON_IMPL
|
||||
#endif // PYTHON_PROTO2_CPP_IMPL_V2
|
||||
#endif // PYTHON_PROTO2_CPP_IMPL_V1
|
||||
|
||||
static const char* kImplVersionName = "api_version";
|
||||
|
||||
static const char* kModuleName = "_api_implementation";
|
||||
static const char kModuleDocstring[] =
|
||||
"_api_implementation is a module that exposes compile-time constants that\n"
|
||||
"determine the default API implementation to use for Python proto2.\n"
|
||||
"\n"
|
||||
"It complements api_implementation.py by setting defaults using compile-time\n"
|
||||
"constants defined in C, such that one can set defaults at compilation\n"
|
||||
"(e.g. with blaze flag --copt=-DPYTHON_PROTO2_CPP_IMPL_V2).";
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
static struct PyModuleDef _module = {
|
||||
PyModuleDef_HEAD_INIT,
|
||||
kModuleName,
|
||||
kModuleDocstring,
|
||||
-1,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL,
|
||||
NULL
|
||||
};
|
||||
#define INITFUNC PyInit__api_implementation
|
||||
#define INITFUNC_ERRORVAL NULL
|
||||
#else
|
||||
#define INITFUNC init_api_implementation
|
||||
#define INITFUNC_ERRORVAL
|
||||
#endif
|
||||
|
||||
extern "C" {
|
||||
PyMODINIT_FUNC INITFUNC() {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
PyObject *module = PyModule_Create(&_module);
|
||||
#else
|
||||
PyObject *module = Py_InitModule3(
|
||||
const_cast<char*>(kModuleName),
|
||||
NULL,
|
||||
const_cast<char*>(kModuleDocstring));
|
||||
#endif
|
||||
if (module == NULL) {
|
||||
return INITFUNC_ERRORVAL;
|
||||
}
|
||||
|
||||
// Adds the module variable "api_version".
|
||||
if (PyModule_AddIntConstant(
|
||||
module,
|
||||
const_cast<char*>(kImplVersionName),
|
||||
kImplVersion))
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
return;
|
||||
#else
|
||||
{ Py_DECREF(module); return NULL; }
|
||||
|
||||
return module;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
@@ -0,0 +1,89 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Determine which implementation of the protobuf API is used in this process.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
|
||||
try:
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.protobuf.internal import _api_implementation
|
||||
# The compile-time constants in the _api_implementation module can be used to
|
||||
# switch to a certain implementation of the Python API at build time.
|
||||
_api_version = _api_implementation.api_version
|
||||
del _api_implementation
|
||||
except ImportError:
|
||||
_api_version = 0
|
||||
|
||||
_default_implementation_type = (
|
||||
'python' if _api_version == 0 else 'cpp')
|
||||
_default_version_str = (
|
||||
'1' if _api_version <= 1 else '2')
|
||||
|
||||
# This environment variable can be used to switch to a certain implementation
|
||||
# of the Python API, overriding the compile-time constants in the
|
||||
# _api_implementation module. Right now only 'python' and 'cpp' are valid
|
||||
# values. Any other value will be ignored.
|
||||
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
|
||||
_default_implementation_type)
|
||||
|
||||
if _implementation_type != 'python':
|
||||
_implementation_type = 'cpp'
|
||||
|
||||
# This environment variable can be used to switch between the two
|
||||
# 'cpp' implementations, overriding the compile-time constants in the
|
||||
# _api_implementation module. Right now only 1 and 2 are valid values. Any other
|
||||
# value will be ignored.
|
||||
_implementation_version_str = os.getenv(
|
||||
'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION',
|
||||
_default_version_str)
|
||||
|
||||
if _implementation_version_str not in ('1', '2'):
|
||||
raise ValueError(
|
||||
"unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: '" +
|
||||
_implementation_version_str + "' (supported versions: 1, 2)"
|
||||
)
|
||||
|
||||
_implementation_version = int(_implementation_version_str)
|
||||
|
||||
|
||||
# Usage of this function is discouraged. Clients shouldn't care which
|
||||
# implementation of the API is in use. Note that there is no guarantee
|
||||
# that differences between APIs will be maintained.
|
||||
# Please don't use this function if possible.
|
||||
def Type():
|
||||
return _implementation_type
|
||||
|
||||
|
||||
# See comment on 'Type' above.
|
||||
def Version():
|
||||
return _implementation_version
|
||||
@@ -0,0 +1,63 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Test that the api_implementation defaults are what we expect."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
# Clear environment implementation settings before the google3 imports.
|
||||
os.environ.pop('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION', None)
|
||||
os.environ.pop('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION', None)
|
||||
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.apputils import basetest
|
||||
from google.protobuf.internal import api_implementation
|
||||
|
||||
|
||||
class ApiImplementationDefaultTest(basetest.TestCase):
|
||||
|
||||
if sys.version_info.major <= 2:
|
||||
|
||||
def testThatPythonIsTheDefault(self):
|
||||
"""If -DPYTHON_PROTO_*IMPL* was given at build time, this may fail."""
|
||||
self.assertEqual('python', api_implementation.Type())
|
||||
|
||||
else:
|
||||
|
||||
def testThatCppApiV2IsTheDefault(self):
|
||||
"""If -DPYTHON_PROTO_*IMPL* was given at build time, this may fail."""
|
||||
self.assertEqual('cpp', api_implementation.Type())
|
||||
self.assertEqual(2, api_implementation.Version())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,269 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains container classes to represent different protocol buffer types.
|
||||
|
||||
This file defines container classes which represent categories of protocol
|
||||
buffer field types which need extra maintenance. Currently these categories
|
||||
are:
|
||||
- Repeated scalar fields - These are all repeated fields which aren't
|
||||
composite (e.g. they are of simple types like int32, string, etc).
|
||||
- Repeated composite fields - Repeated fields which are composite. This
|
||||
includes groups and nested messages.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
|
||||
class BaseContainer(object):
|
||||
|
||||
"""Base container class."""
|
||||
|
||||
# Minimizes memory usage and disallows assignment to other attributes.
|
||||
__slots__ = ['_message_listener', '_values']
|
||||
|
||||
def __init__(self, message_listener):
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedScalarFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
"""
|
||||
self._message_listener = message_listener
|
||||
self._values = []
|
||||
|
||||
def __getitem__(self, key):
|
||||
"""Retrieves item by the specified key."""
|
||||
return self._values[key]
|
||||
|
||||
def __len__(self):
|
||||
"""Returns the number of elements in the container."""
|
||||
return len(self._values)
|
||||
|
||||
def __ne__(self, other):
|
||||
"""Checks if another instance isn't equal to this one."""
|
||||
# The concrete classes should define __eq__.
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
def __repr__(self):
|
||||
return repr(self._values)
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
# Continue to support the old sort_function keyword argument.
|
||||
# This is expected to be a rare occurrence, so use LBYL to avoid
|
||||
# the overhead of actually catching KeyError.
|
||||
if 'sort_function' in kwargs:
|
||||
kwargs['cmp'] = kwargs.pop('sort_function')
|
||||
self._values.sort(*args, **kwargs)
|
||||
|
||||
|
||||
class RepeatedScalarFieldContainer(BaseContainer):
|
||||
|
||||
"""Simple, type-checked, list-like container for holding repeated scalars."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_type_checker']
|
||||
|
||||
def __init__(self, message_listener, type_checker):
|
||||
"""
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedScalarFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
type_checker: A type_checkers.ValueChecker instance to run on elements
|
||||
inserted into this container.
|
||||
"""
|
||||
super(RepeatedScalarFieldContainer, self).__init__(message_listener)
|
||||
self._type_checker = type_checker
|
||||
|
||||
def append(self, value):
|
||||
"""Appends an item to the list. Similar to list.append()."""
|
||||
self._values.append(self._type_checker.CheckValue(value))
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def insert(self, key, value):
|
||||
"""Inserts the item at the specified position. Similar to list.insert()."""
|
||||
self._values.insert(key, self._type_checker.CheckValue(value))
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
|
||||
def extend(self, elem_seq):
|
||||
"""Extends by appending the given sequence. Similar to list.extend()."""
|
||||
if not elem_seq:
|
||||
return
|
||||
|
||||
new_values = []
|
||||
for elem in elem_seq:
|
||||
new_values.append(self._type_checker.CheckValue(elem))
|
||||
self._values.extend(new_values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def MergeFrom(self, other):
|
||||
"""Appends the contents of another repeated field of the same type to this
|
||||
one. We do not check the types of the individual fields.
|
||||
"""
|
||||
self._values.extend(other._values)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def remove(self, elem):
|
||||
"""Removes an item from the list. Similar to list.remove()."""
|
||||
self._values.remove(elem)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
"""Sets the item on the specified position."""
|
||||
if isinstance(key, slice): # PY3
|
||||
if key.step is not None:
|
||||
raise ValueError('Extended slices not supported')
|
||||
self.__setslice__(key.start, key.stop, value)
|
||||
else:
|
||||
self._values[key] = self._type_checker.CheckValue(value)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __getslice__(self, start, stop):
|
||||
"""Retrieves the subset of items from between the specified indices."""
|
||||
return self._values[start:stop]
|
||||
|
||||
def __setslice__(self, start, stop, values):
|
||||
"""Sets the subset of items from between the specified indices."""
|
||||
new_values = []
|
||||
for value in values:
|
||||
new_values.append(self._type_checker.CheckValue(value))
|
||||
self._values[start:stop] = new_values
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delitem__(self, key):
|
||||
"""Deletes the item at the specified position."""
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delslice__(self, start, stop):
|
||||
"""Deletes the subset of items from between the specified indices."""
|
||||
del self._values[start:stop]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
# Special case for the same type which should be common and fast.
|
||||
if isinstance(other, self.__class__):
|
||||
return other._values == self._values
|
||||
# We are presumably comparing against some other sequence type.
|
||||
return other == self._values
|
||||
|
||||
|
||||
class RepeatedCompositeFieldContainer(BaseContainer):
|
||||
|
||||
"""Simple, list-like container for holding repeated composite fields."""
|
||||
|
||||
# Disallows assignment to other attributes.
|
||||
__slots__ = ['_message_descriptor']
|
||||
|
||||
def __init__(self, message_listener, message_descriptor):
|
||||
"""
|
||||
Note that we pass in a descriptor instead of the generated directly,
|
||||
since at the time we construct a _RepeatedCompositeFieldContainer we
|
||||
haven't yet necessarily initialized the type that will be contained in the
|
||||
container.
|
||||
|
||||
Args:
|
||||
message_listener: A MessageListener implementation.
|
||||
The RepeatedCompositeFieldContainer will call this object's
|
||||
Modified() method when it is modified.
|
||||
message_descriptor: A Descriptor instance describing the protocol type
|
||||
that should be present in this container. We'll use the
|
||||
_concrete_class field of this descriptor when the client calls add().
|
||||
"""
|
||||
super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
|
||||
self._message_descriptor = message_descriptor
|
||||
|
||||
def add(self, **kwargs):
|
||||
"""Adds a new element at the end of the list and returns it. Keyword
|
||||
arguments may be used to initialize the element.
|
||||
"""
|
||||
new_element = self._message_descriptor._concrete_class(**kwargs)
|
||||
new_element._SetListener(self._message_listener)
|
||||
self._values.append(new_element)
|
||||
if not self._message_listener.dirty:
|
||||
self._message_listener.Modified()
|
||||
return new_element
|
||||
|
||||
def extend(self, elem_seq):
|
||||
"""Extends by appending the given sequence of elements of the same type
|
||||
as this one, copying each individual message.
|
||||
"""
|
||||
message_class = self._message_descriptor._concrete_class
|
||||
listener = self._message_listener
|
||||
values = self._values
|
||||
for message in elem_seq:
|
||||
new_element = message_class()
|
||||
new_element._SetListener(listener)
|
||||
new_element.MergeFrom(message)
|
||||
values.append(new_element)
|
||||
listener.Modified()
|
||||
|
||||
def MergeFrom(self, other):
|
||||
"""Appends the contents of another repeated field of the same type to this
|
||||
one, copying each individual message.
|
||||
"""
|
||||
self.extend(other._values)
|
||||
|
||||
def remove(self, elem):
|
||||
"""Removes an item from the list. Similar to list.remove()."""
|
||||
self._values.remove(elem)
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __getslice__(self, start, stop):
|
||||
"""Retrieves the subset of items from between the specified indices."""
|
||||
return self._values[start:stop]
|
||||
|
||||
def __delitem__(self, key):
|
||||
"""Deletes the item at the specified position."""
|
||||
del self._values[key]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __delslice__(self, start, stop):
|
||||
"""Deletes the subset of items from between the specified indices."""
|
||||
del self._values[start:stop]
|
||||
self._message_listener.Modified()
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
if not isinstance(other, self.__class__):
|
||||
raise TypeError('Can only compare repeated composite fields against '
|
||||
'other repeated composite fields.')
|
||||
return self._values == other._values
|
||||
@@ -0,0 +1,663 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains helper functions used to create protocol message classes from
|
||||
Descriptor objects at runtime backed by the protocol buffer C++ API.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
import copy_reg
|
||||
import operator
|
||||
from google.protobuf.internal import _net_proto2___python
|
||||
from google.protobuf.internal import enum_type_wrapper
|
||||
from google.protobuf import message
|
||||
|
||||
|
||||
_LABEL_REPEATED = _net_proto2___python.LABEL_REPEATED
|
||||
_LABEL_OPTIONAL = _net_proto2___python.LABEL_OPTIONAL
|
||||
_CPPTYPE_MESSAGE = _net_proto2___python.CPPTYPE_MESSAGE
|
||||
_TYPE_MESSAGE = _net_proto2___python.TYPE_MESSAGE
|
||||
|
||||
|
||||
def GetDescriptorPool():
|
||||
"""Creates a new DescriptorPool C++ object."""
|
||||
return _net_proto2___python.NewCDescriptorPool()
|
||||
|
||||
|
||||
_pool = GetDescriptorPool()
|
||||
|
||||
|
||||
def GetFieldDescriptor(full_field_name):
|
||||
"""Searches for a field descriptor given a full field name."""
|
||||
return _pool.FindFieldByName(full_field_name)
|
||||
|
||||
|
||||
def BuildFile(content):
|
||||
"""Registers a new proto file in the underlying C++ descriptor pool."""
|
||||
_net_proto2___python.BuildFile(content)
|
||||
|
||||
|
||||
def GetExtensionDescriptor(full_extension_name):
|
||||
"""Searches for extension descriptor given a full field name."""
|
||||
return _pool.FindExtensionByName(full_extension_name)
|
||||
|
||||
|
||||
def NewCMessage(full_message_name):
|
||||
"""Creates a new C++ protocol message by its name."""
|
||||
return _net_proto2___python.NewCMessage(full_message_name)
|
||||
|
||||
|
||||
def ScalarProperty(cdescriptor):
|
||||
"""Returns a scalar property for the given descriptor."""
|
||||
|
||||
def Getter(self):
|
||||
return self._cmsg.GetScalar(cdescriptor)
|
||||
|
||||
def Setter(self, value):
|
||||
self._cmsg.SetScalar(cdescriptor, value)
|
||||
|
||||
return property(Getter, Setter)
|
||||
|
||||
|
||||
def CompositeProperty(cdescriptor, message_type):
|
||||
"""Returns a Python property the given composite field."""
|
||||
|
||||
def Getter(self):
|
||||
sub_message = self._composite_fields.get(cdescriptor.name, None)
|
||||
if sub_message is None:
|
||||
cmessage = self._cmsg.NewSubMessage(cdescriptor)
|
||||
sub_message = message_type._concrete_class(__cmessage=cmessage)
|
||||
self._composite_fields[cdescriptor.name] = sub_message
|
||||
return sub_message
|
||||
|
||||
return property(Getter)
|
||||
|
||||
|
||||
class RepeatedScalarContainer(object):
|
||||
"""Container for repeated scalar fields."""
|
||||
|
||||
__slots__ = ['_message', '_cfield_descriptor', '_cmsg']
|
||||
|
||||
def __init__(self, msg, cfield_descriptor):
|
||||
self._message = msg
|
||||
self._cmsg = msg._cmsg
|
||||
self._cfield_descriptor = cfield_descriptor
|
||||
|
||||
def append(self, value):
|
||||
self._cmsg.AddRepeatedScalar(
|
||||
self._cfield_descriptor, value)
|
||||
|
||||
def extend(self, sequence):
|
||||
for element in sequence:
|
||||
self.append(element)
|
||||
|
||||
def insert(self, key, value):
|
||||
values = self[slice(None, None, None)]
|
||||
values.insert(key, value)
|
||||
self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
|
||||
|
||||
def remove(self, value):
|
||||
values = self[slice(None, None, None)]
|
||||
values.remove(value)
|
||||
self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
|
||||
|
||||
def __setitem__(self, key, value):
|
||||
values = self[slice(None, None, None)]
|
||||
values[key] = value
|
||||
self._cmsg.AssignRepeatedScalar(self._cfield_descriptor, values)
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self._cmsg.GetRepeatedScalar(self._cfield_descriptor, key)
|
||||
|
||||
def __delitem__(self, key):
|
||||
self._cmsg.DeleteRepeatedField(self._cfield_descriptor, key)
|
||||
|
||||
def __len__(self):
|
||||
return len(self[slice(None, None, None)])
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
if not operator.isSequenceType(other):
|
||||
raise TypeError(
|
||||
'Can only compare repeated scalar fields against sequences.')
|
||||
# We are presumably comparing against some other sequence type.
|
||||
return other == self[slice(None, None, None)]
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
def sort(self, *args, **kwargs):
|
||||
# Maintain compatibility with the previous interface.
|
||||
if 'sort_function' in kwargs:
|
||||
kwargs['cmp'] = kwargs.pop('sort_function')
|
||||
self._cmsg.AssignRepeatedScalar(self._cfield_descriptor,
|
||||
sorted(self, *args, **kwargs))
|
||||
|
||||
|
||||
def RepeatedScalarProperty(cdescriptor):
|
||||
"""Returns a Python property the given repeated scalar field."""
|
||||
|
||||
def Getter(self):
|
||||
container = self._composite_fields.get(cdescriptor.name, None)
|
||||
if container is None:
|
||||
container = RepeatedScalarContainer(self, cdescriptor)
|
||||
self._composite_fields[cdescriptor.name] = container
|
||||
return container
|
||||
|
||||
def Setter(self, new_value):
|
||||
raise AttributeError('Assignment not allowed to repeated field '
|
||||
'"%s" in protocol message object.' % cdescriptor.name)
|
||||
|
||||
doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name
|
||||
return property(Getter, Setter, doc=doc)
|
||||
|
||||
|
||||
class RepeatedCompositeContainer(object):
|
||||
"""Container for repeated composite fields."""
|
||||
|
||||
__slots__ = ['_message', '_subclass', '_cfield_descriptor', '_cmsg']
|
||||
|
||||
def __init__(self, msg, cfield_descriptor, subclass):
|
||||
self._message = msg
|
||||
self._cmsg = msg._cmsg
|
||||
self._subclass = subclass
|
||||
self._cfield_descriptor = cfield_descriptor
|
||||
|
||||
def add(self, **kwargs):
|
||||
cmessage = self._cmsg.AddMessage(self._cfield_descriptor)
|
||||
return self._subclass(__cmessage=cmessage, __owner=self._message, **kwargs)
|
||||
|
||||
def extend(self, elem_seq):
|
||||
"""Extends by appending the given sequence of elements of the same type
|
||||
as this one, copying each individual message.
|
||||
"""
|
||||
for message in elem_seq:
|
||||
self.add().MergeFrom(message)
|
||||
|
||||
def remove(self, value):
|
||||
# TODO(protocol-devel): This is inefficient as it needs to generate a
|
||||
# message pointer for each message only to do index(). Move this to a C++
|
||||
# extension function.
|
||||
self.__delitem__(self[slice(None, None, None)].index(value))
|
||||
|
||||
def MergeFrom(self, other):
|
||||
for message in other[:]:
|
||||
self.add().MergeFrom(message)
|
||||
|
||||
def __getitem__(self, key):
|
||||
cmessages = self._cmsg.GetRepeatedMessage(
|
||||
self._cfield_descriptor, key)
|
||||
subclass = self._subclass
|
||||
if not isinstance(cmessages, list):
|
||||
return subclass(__cmessage=cmessages, __owner=self._message)
|
||||
|
||||
return [subclass(__cmessage=m, __owner=self._message) for m in cmessages]
|
||||
|
||||
def __delitem__(self, key):
|
||||
self._cmsg.DeleteRepeatedField(
|
||||
self._cfield_descriptor, key)
|
||||
|
||||
def __len__(self):
|
||||
return self._cmsg.FieldLength(self._cfield_descriptor)
|
||||
|
||||
def __eq__(self, other):
|
||||
"""Compares the current instance with another one."""
|
||||
if self is other:
|
||||
return True
|
||||
if not isinstance(other, self.__class__):
|
||||
raise TypeError('Can only compare repeated composite fields against '
|
||||
'other repeated composite fields.')
|
||||
messages = self[slice(None, None, None)]
|
||||
other_messages = other[slice(None, None, None)]
|
||||
return messages == other_messages
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
def sort(self, cmp=None, key=None, reverse=False, **kwargs):
|
||||
# Maintain compatibility with the old interface.
|
||||
if cmp is None and 'sort_function' in kwargs:
|
||||
cmp = kwargs.pop('sort_function')
|
||||
|
||||
# The cmp function, if provided, is passed the results of the key function,
|
||||
# so we only need to wrap one of them.
|
||||
if key is None:
|
||||
index_key = self.__getitem__
|
||||
else:
|
||||
index_key = lambda i: key(self[i])
|
||||
|
||||
# Sort the list of current indexes by the underlying object.
|
||||
indexes = range(len(self))
|
||||
indexes.sort(cmp=cmp, key=index_key, reverse=reverse)
|
||||
|
||||
# Apply the transposition.
|
||||
for dest, src in enumerate(indexes):
|
||||
if dest == src:
|
||||
continue
|
||||
self._cmsg.SwapRepeatedFieldElements(self._cfield_descriptor, dest, src)
|
||||
# Don't swap the same value twice.
|
||||
indexes[src] = src
|
||||
|
||||
|
||||
def RepeatedCompositeProperty(cdescriptor, message_type):
|
||||
"""Returns a Python property for the given repeated composite field."""
|
||||
|
||||
def Getter(self):
|
||||
container = self._composite_fields.get(cdescriptor.name, None)
|
||||
if container is None:
|
||||
container = RepeatedCompositeContainer(
|
||||
self, cdescriptor, message_type._concrete_class)
|
||||
self._composite_fields[cdescriptor.name] = container
|
||||
return container
|
||||
|
||||
def Setter(self, new_value):
|
||||
raise AttributeError('Assignment not allowed to repeated field '
|
||||
'"%s" in protocol message object.' % cdescriptor.name)
|
||||
|
||||
doc = 'Magic attribute generated for "%s" proto field.' % cdescriptor.name
|
||||
return property(Getter, Setter, doc=doc)
|
||||
|
||||
|
||||
class ExtensionDict(object):
|
||||
"""Extension dictionary added to each protocol message."""
|
||||
|
||||
def __init__(self, msg):
|
||||
self._message = msg
|
||||
self._cmsg = msg._cmsg
|
||||
self._values = {}
|
||||
|
||||
def __setitem__(self, extension, value):
|
||||
from google.protobuf import descriptor
|
||||
if not isinstance(extension, descriptor.FieldDescriptor):
|
||||
raise KeyError('Bad extension %r.' % (extension,))
|
||||
cdescriptor = extension._cdescriptor
|
||||
if (cdescriptor.label != _LABEL_OPTIONAL or
|
||||
cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
|
||||
raise TypeError('Extension %r is repeated and/or a composite type.' % (
|
||||
extension.full_name,))
|
||||
self._cmsg.SetScalar(cdescriptor, value)
|
||||
self._values[extension] = value
|
||||
|
||||
def __getitem__(self, extension):
|
||||
from google.protobuf import descriptor
|
||||
if not isinstance(extension, descriptor.FieldDescriptor):
|
||||
raise KeyError('Bad extension %r.' % (extension,))
|
||||
|
||||
cdescriptor = extension._cdescriptor
|
||||
if (cdescriptor.label != _LABEL_REPEATED and
|
||||
cdescriptor.cpp_type != _CPPTYPE_MESSAGE):
|
||||
return self._cmsg.GetScalar(cdescriptor)
|
||||
|
||||
ext = self._values.get(extension, None)
|
||||
if ext is not None:
|
||||
return ext
|
||||
|
||||
ext = self._CreateNewHandle(extension)
|
||||
self._values[extension] = ext
|
||||
return ext
|
||||
|
||||
def ClearExtension(self, extension):
|
||||
from google.protobuf import descriptor
|
||||
if not isinstance(extension, descriptor.FieldDescriptor):
|
||||
raise KeyError('Bad extension %r.' % (extension,))
|
||||
self._cmsg.ClearFieldByDescriptor(extension._cdescriptor)
|
||||
if extension in self._values:
|
||||
del self._values[extension]
|
||||
|
||||
def HasExtension(self, extension):
|
||||
from google.protobuf import descriptor
|
||||
if not isinstance(extension, descriptor.FieldDescriptor):
|
||||
raise KeyError('Bad extension %r.' % (extension,))
|
||||
return self._cmsg.HasFieldByDescriptor(extension._cdescriptor)
|
||||
|
||||
def _FindExtensionByName(self, name):
|
||||
"""Tries to find a known extension with the specified name.
|
||||
|
||||
Args:
|
||||
name: Extension full name.
|
||||
|
||||
Returns:
|
||||
Extension field descriptor.
|
||||
"""
|
||||
return self._message._extensions_by_name.get(name, None)
|
||||
|
||||
def _CreateNewHandle(self, extension):
|
||||
cdescriptor = extension._cdescriptor
|
||||
if (cdescriptor.label != _LABEL_REPEATED and
|
||||
cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
|
||||
cmessage = self._cmsg.NewSubMessage(cdescriptor)
|
||||
return extension.message_type._concrete_class(__cmessage=cmessage)
|
||||
|
||||
if cdescriptor.label == _LABEL_REPEATED:
|
||||
if cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
|
||||
return RepeatedCompositeContainer(
|
||||
self._message, cdescriptor, extension.message_type._concrete_class)
|
||||
else:
|
||||
return RepeatedScalarContainer(self._message, cdescriptor)
|
||||
# This shouldn't happen!
|
||||
assert False
|
||||
return None
|
||||
|
||||
|
||||
def NewMessage(bases, message_descriptor, dictionary):
|
||||
"""Creates a new protocol message *class*."""
|
||||
_AddClassAttributesForNestedExtensions(message_descriptor, dictionary)
|
||||
_AddEnumValues(message_descriptor, dictionary)
|
||||
_AddDescriptors(message_descriptor, dictionary)
|
||||
return bases
|
||||
|
||||
|
||||
def InitMessage(message_descriptor, cls):
|
||||
"""Constructs a new message instance (called before instance's __init__)."""
|
||||
cls._extensions_by_name = {}
|
||||
_AddInitMethod(message_descriptor, cls)
|
||||
_AddMessageMethods(message_descriptor, cls)
|
||||
_AddPropertiesForExtensions(message_descriptor, cls)
|
||||
copy_reg.pickle(cls, lambda obj: (cls, (), obj.__getstate__()))
|
||||
|
||||
|
||||
def _AddDescriptors(message_descriptor, dictionary):
|
||||
"""Sets up a new protocol message class dictionary.
|
||||
|
||||
Args:
|
||||
message_descriptor: A Descriptor instance describing this message type.
|
||||
dictionary: Class dictionary to which we'll add a '__slots__' entry.
|
||||
"""
|
||||
dictionary['__descriptors'] = {}
|
||||
for field in message_descriptor.fields:
|
||||
dictionary['__descriptors'][field.name] = GetFieldDescriptor(
|
||||
field.full_name)
|
||||
|
||||
dictionary['__slots__'] = list(dictionary['__descriptors'].iterkeys()) + [
|
||||
'_cmsg', '_owner', '_composite_fields', 'Extensions', '_HACK_REFCOUNTS']
|
||||
|
||||
|
||||
def _AddEnumValues(message_descriptor, dictionary):
|
||||
"""Sets class-level attributes for all enum fields defined in this message.
|
||||
|
||||
Args:
|
||||
message_descriptor: Descriptor object for this message type.
|
||||
dictionary: Class dictionary that should be populated.
|
||||
"""
|
||||
for enum_type in message_descriptor.enum_types:
|
||||
dictionary[enum_type.name] = enum_type_wrapper.EnumTypeWrapper(enum_type)
|
||||
for enum_value in enum_type.values:
|
||||
dictionary[enum_value.name] = enum_value.number
|
||||
|
||||
|
||||
def _AddClassAttributesForNestedExtensions(message_descriptor, dictionary):
|
||||
"""Adds class attributes for the nested extensions."""
|
||||
extension_dict = message_descriptor.extensions_by_name
|
||||
for extension_name, extension_field in extension_dict.iteritems():
|
||||
assert extension_name not in dictionary
|
||||
dictionary[extension_name] = extension_field
|
||||
|
||||
|
||||
def _AddInitMethod(message_descriptor, cls):
|
||||
"""Adds an __init__ method to cls."""
|
||||
|
||||
# Create and attach message field properties to the message class.
|
||||
# This can be done just once per message class, since property setters and
|
||||
# getters are passed the message instance.
|
||||
# This makes message instantiation extremely fast, and at the same time it
|
||||
# doesn't require the creation of property objects for each message instance,
|
||||
# which saves a lot of memory.
|
||||
for field in message_descriptor.fields:
|
||||
field_cdescriptor = cls.__descriptors[field.name]
|
||||
if field.label == _LABEL_REPEATED:
|
||||
if field.cpp_type == _CPPTYPE_MESSAGE:
|
||||
value = RepeatedCompositeProperty(field_cdescriptor, field.message_type)
|
||||
else:
|
||||
value = RepeatedScalarProperty(field_cdescriptor)
|
||||
elif field.cpp_type == _CPPTYPE_MESSAGE:
|
||||
value = CompositeProperty(field_cdescriptor, field.message_type)
|
||||
else:
|
||||
value = ScalarProperty(field_cdescriptor)
|
||||
setattr(cls, field.name, value)
|
||||
|
||||
# Attach a constant with the field number.
|
||||
constant_name = field.name.upper() + '_FIELD_NUMBER'
|
||||
setattr(cls, constant_name, field.number)
|
||||
|
||||
def Init(self, **kwargs):
|
||||
"""Message constructor."""
|
||||
cmessage = kwargs.pop('__cmessage', None)
|
||||
if cmessage:
|
||||
self._cmsg = cmessage
|
||||
else:
|
||||
self._cmsg = NewCMessage(message_descriptor.full_name)
|
||||
|
||||
# Keep a reference to the owner, as the owner keeps a reference to the
|
||||
# underlying protocol buffer message.
|
||||
owner = kwargs.pop('__owner', None)
|
||||
if owner:
|
||||
self._owner = owner
|
||||
|
||||
if message_descriptor.is_extendable:
|
||||
self.Extensions = ExtensionDict(self)
|
||||
else:
|
||||
# Reference counting in the C++ code is broken and depends on
|
||||
# the Extensions reference to keep this object alive during unit
|
||||
# tests (see b/4856052). Remove this once b/4945904 is fixed.
|
||||
self._HACK_REFCOUNTS = self
|
||||
self._composite_fields = {}
|
||||
|
||||
for field_name, field_value in kwargs.iteritems():
|
||||
field_cdescriptor = self.__descriptors.get(field_name, None)
|
||||
if not field_cdescriptor:
|
||||
raise ValueError('Protocol message has no "%s" field.' % field_name)
|
||||
if field_cdescriptor.label == _LABEL_REPEATED:
|
||||
if field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
|
||||
field_name = getattr(self, field_name)
|
||||
for val in field_value:
|
||||
field_name.add().MergeFrom(val)
|
||||
else:
|
||||
getattr(self, field_name).extend(field_value)
|
||||
elif field_cdescriptor.cpp_type == _CPPTYPE_MESSAGE:
|
||||
getattr(self, field_name).MergeFrom(field_value)
|
||||
else:
|
||||
setattr(self, field_name, field_value)
|
||||
|
||||
Init.__module__ = None
|
||||
Init.__doc__ = None
|
||||
cls.__init__ = Init
|
||||
|
||||
|
||||
def _IsMessageSetExtension(field):
|
||||
"""Checks if a field is a message set extension."""
|
||||
return (field.is_extension and
|
||||
field.containing_type.has_options and
|
||||
field.containing_type.GetOptions().message_set_wire_format and
|
||||
field.type == _TYPE_MESSAGE and
|
||||
field.message_type == field.extension_scope and
|
||||
field.label == _LABEL_OPTIONAL)
|
||||
|
||||
|
||||
def _AddMessageMethods(message_descriptor, cls):
|
||||
"""Adds the methods to a protocol message class."""
|
||||
if message_descriptor.is_extendable:
|
||||
|
||||
def ClearExtension(self, extension):
|
||||
self.Extensions.ClearExtension(extension)
|
||||
|
||||
def HasExtension(self, extension):
|
||||
return self.Extensions.HasExtension(extension)
|
||||
|
||||
def HasField(self, field_name):
|
||||
return self._cmsg.HasField(field_name)
|
||||
|
||||
def ClearField(self, field_name):
|
||||
child_cmessage = None
|
||||
if field_name in self._composite_fields:
|
||||
child_field = self._composite_fields[field_name]
|
||||
del self._composite_fields[field_name]
|
||||
|
||||
child_cdescriptor = self.__descriptors[field_name]
|
||||
# TODO(anuraag): Support clearing repeated message fields as well.
|
||||
if (child_cdescriptor.label != _LABEL_REPEATED and
|
||||
child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
|
||||
child_field._owner = None
|
||||
child_cmessage = child_field._cmsg
|
||||
|
||||
if child_cmessage is not None:
|
||||
self._cmsg.ClearField(field_name, child_cmessage)
|
||||
else:
|
||||
self._cmsg.ClearField(field_name)
|
||||
|
||||
def Clear(self):
|
||||
cmessages_to_release = []
|
||||
for field_name, child_field in self._composite_fields.iteritems():
|
||||
child_cdescriptor = self.__descriptors[field_name]
|
||||
# TODO(anuraag): Support clearing repeated message fields as well.
|
||||
if (child_cdescriptor.label != _LABEL_REPEATED and
|
||||
child_cdescriptor.cpp_type == _CPPTYPE_MESSAGE):
|
||||
child_field._owner = None
|
||||
cmessages_to_release.append((child_cdescriptor, child_field._cmsg))
|
||||
self._composite_fields.clear()
|
||||
self._cmsg.Clear(cmessages_to_release)
|
||||
|
||||
def IsInitialized(self, errors=None):
|
||||
if self._cmsg.IsInitialized():
|
||||
return True
|
||||
if errors is not None:
|
||||
errors.extend(self.FindInitializationErrors());
|
||||
return False
|
||||
|
||||
def SerializeToString(self):
|
||||
if not self.IsInitialized():
|
||||
raise message.EncodeError(
|
||||
'Message %s is missing required fields: %s' % (
|
||||
self._cmsg.full_name, ','.join(self.FindInitializationErrors())))
|
||||
return self._cmsg.SerializeToString()
|
||||
|
||||
def SerializePartialToString(self):
|
||||
return self._cmsg.SerializePartialToString()
|
||||
|
||||
def ParseFromString(self, serialized):
|
||||
self.Clear()
|
||||
self.MergeFromString(serialized)
|
||||
|
||||
def MergeFromString(self, serialized):
|
||||
byte_size = self._cmsg.MergeFromString(serialized)
|
||||
if byte_size < 0:
|
||||
raise message.DecodeError('Unable to merge from string.')
|
||||
return byte_size
|
||||
|
||||
def MergeFrom(self, msg):
|
||||
if not isinstance(msg, cls):
|
||||
raise TypeError(
|
||||
"Parameter to MergeFrom() must be instance of same class: "
|
||||
"expected %s got %s." % (cls.__name__, type(msg).__name__))
|
||||
self._cmsg.MergeFrom(msg._cmsg)
|
||||
|
||||
def CopyFrom(self, msg):
|
||||
self._cmsg.CopyFrom(msg._cmsg)
|
||||
|
||||
def ByteSize(self):
|
||||
return self._cmsg.ByteSize()
|
||||
|
||||
def SetInParent(self):
|
||||
return self._cmsg.SetInParent()
|
||||
|
||||
def ListFields(self):
|
||||
all_fields = []
|
||||
field_list = self._cmsg.ListFields()
|
||||
fields_by_name = cls.DESCRIPTOR.fields_by_name
|
||||
for is_extension, field_name in field_list:
|
||||
if is_extension:
|
||||
extension = cls._extensions_by_name[field_name]
|
||||
all_fields.append((extension, self.Extensions[extension]))
|
||||
else:
|
||||
field_descriptor = fields_by_name[field_name]
|
||||
all_fields.append(
|
||||
(field_descriptor, getattr(self, field_name)))
|
||||
all_fields.sort(key=lambda item: item[0].number)
|
||||
return all_fields
|
||||
|
||||
def FindInitializationErrors(self):
|
||||
return self._cmsg.FindInitializationErrors()
|
||||
|
||||
def __str__(self):
|
||||
return str(self._cmsg)
|
||||
|
||||
def __eq__(self, other):
|
||||
if self is other:
|
||||
return True
|
||||
if not isinstance(other, self.__class__):
|
||||
return False
|
||||
return self.ListFields() == other.ListFields()
|
||||
|
||||
def __ne__(self, other):
|
||||
return not self == other
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
def __unicode__(self):
|
||||
# Lazy import to prevent circular import when text_format imports this file.
|
||||
from google.protobuf import text_format
|
||||
return text_format.MessageToString(self, as_utf8=True).decode('utf-8')
|
||||
|
||||
# Attach the local methods to the message class.
|
||||
for key, value in locals().copy().iteritems():
|
||||
if key not in ('key', 'value', '__builtins__', '__name__', '__doc__'):
|
||||
setattr(cls, key, value)
|
||||
|
||||
# Static methods:
|
||||
|
||||
def RegisterExtension(extension_handle):
|
||||
extension_handle.containing_type = cls.DESCRIPTOR
|
||||
cls._extensions_by_name[extension_handle.full_name] = extension_handle
|
||||
|
||||
if _IsMessageSetExtension(extension_handle):
|
||||
# MessageSet extension. Also register under type name.
|
||||
cls._extensions_by_name[
|
||||
extension_handle.message_type.full_name] = extension_handle
|
||||
cls.RegisterExtension = staticmethod(RegisterExtension)
|
||||
|
||||
def FromString(string):
|
||||
msg = cls()
|
||||
msg.MergeFromString(string)
|
||||
return msg
|
||||
cls.FromString = staticmethod(FromString)
|
||||
|
||||
|
||||
|
||||
def _AddPropertiesForExtensions(message_descriptor, cls):
|
||||
"""Adds properties for all fields in this protocol message type."""
|
||||
extension_dict = message_descriptor.extensions_by_name
|
||||
for extension_name, extension_field in extension_dict.iteritems():
|
||||
constant_name = extension_name.upper() + '_FIELD_NUMBER'
|
||||
setattr(cls, constant_name, extension_field.number)
|
||||
@@ -0,0 +1,831 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#PY25 compatible for GAE.
|
||||
#
|
||||
# Copyright 2009 Google Inc. All Rights Reserved.
|
||||
|
||||
"""Code for decoding protocol buffer primitives.
|
||||
|
||||
This code is very similar to encoder.py -- read the docs for that module first.
|
||||
|
||||
A "decoder" is a function with the signature:
|
||||
Decode(buffer, pos, end, message, field_dict)
|
||||
The arguments are:
|
||||
buffer: The string containing the encoded message.
|
||||
pos: The current position in the string.
|
||||
end: The position in the string where the current message ends. May be
|
||||
less than len(buffer) if we're reading a sub-message.
|
||||
message: The message object into which we're parsing.
|
||||
field_dict: message._fields (avoids a hashtable lookup).
|
||||
The decoder reads the field and stores it into field_dict, returning the new
|
||||
buffer position. A decoder for a repeated field may proactively decode all of
|
||||
the elements of that field, if they appear consecutively.
|
||||
|
||||
Note that decoders may throw any of the following:
|
||||
IndexError: Indicates a truncated message.
|
||||
struct.error: Unpacking of a fixed-width field failed.
|
||||
message.DecodeError: Other errors.
|
||||
|
||||
Decoders are expected to raise an exception if they are called with pos > end.
|
||||
This allows callers to be lax about bounds checking: it's fineto read past
|
||||
"end" as long as you are sure that someone else will notice and throw an
|
||||
exception later on.
|
||||
|
||||
Something up the call stack is expected to catch IndexError and struct.error
|
||||
and convert them to message.DecodeError.
|
||||
|
||||
Decoders are constructed using decoder constructors with the signature:
|
||||
MakeDecoder(field_number, is_repeated, is_packed, key, new_default)
|
||||
The arguments are:
|
||||
field_number: The field number of the field we want to decode.
|
||||
is_repeated: Is the field a repeated field? (bool)
|
||||
is_packed: Is the field a packed field? (bool)
|
||||
key: The key to use when looking up the field within field_dict.
|
||||
(This is actually the FieldDescriptor but nothing in this
|
||||
file should depend on that.)
|
||||
new_default: A function which takes a message object as a parameter and
|
||||
returns a new instance of the default value for this field.
|
||||
(This is called for repeated fields and sub-messages, when an
|
||||
instance does not already exist.)
|
||||
|
||||
As with encoders, we define a decoder constructor for every type of field.
|
||||
Then, for every field of every message class we construct an actual decoder.
|
||||
That decoder goes into a dict indexed by tag, so when we decode a message
|
||||
we repeatedly read a tag, look up the corresponding decoder, and invoke it.
|
||||
"""
|
||||
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import struct
|
||||
import sys ##PY25
|
||||
_PY2 = sys.version_info[0] < 3 ##PY25
|
||||
from google.protobuf.internal import encoder
|
||||
from google.protobuf.internal import wire_format
|
||||
from google.protobuf import message
|
||||
|
||||
|
||||
# This will overflow and thus become IEEE-754 "infinity". We would use
|
||||
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
|
||||
_POS_INF = 1e10000
|
||||
_NEG_INF = -_POS_INF
|
||||
_NAN = _POS_INF * 0
|
||||
|
||||
|
||||
# This is not for optimization, but rather to avoid conflicts with local
|
||||
# variables named "message".
|
||||
_DecodeError = message.DecodeError
|
||||
|
||||
|
||||
def _VarintDecoder(mask, result_type):
|
||||
"""Return an encoder for a basic varint value (does not include tag).
|
||||
|
||||
Decoded values will be bitwise-anded with the given mask before being
|
||||
returned, e.g. to limit them to 32 bits. The returned decoder does not
|
||||
take the usual "end" parameter -- the caller is expected to do bounds checking
|
||||
after the fact (often the caller can defer such checking until later). The
|
||||
decoder returns a (value, new_pos) pair.
|
||||
"""
|
||||
|
||||
local_ord = ord
|
||||
py2 = _PY2 ##PY25
|
||||
##!PY25 py2 = str is bytes
|
||||
def DecodeVarint(buffer, pos):
|
||||
result = 0
|
||||
shift = 0
|
||||
while 1:
|
||||
b = local_ord(buffer[pos]) if py2 else buffer[pos]
|
||||
result |= ((b & 0x7f) << shift)
|
||||
pos += 1
|
||||
if not (b & 0x80):
|
||||
result &= mask
|
||||
result = result_type(result)
|
||||
return (result, pos)
|
||||
shift += 7
|
||||
if shift >= 64:
|
||||
raise _DecodeError('Too many bytes when decoding varint.')
|
||||
return DecodeVarint
|
||||
|
||||
|
||||
def _SignedVarintDecoder(mask, result_type):
|
||||
"""Like _VarintDecoder() but decodes signed values."""
|
||||
|
||||
local_ord = ord
|
||||
py2 = _PY2 ##PY25
|
||||
##!PY25 py2 = str is bytes
|
||||
def DecodeVarint(buffer, pos):
|
||||
result = 0
|
||||
shift = 0
|
||||
while 1:
|
||||
b = local_ord(buffer[pos]) if py2 else buffer[pos]
|
||||
result |= ((b & 0x7f) << shift)
|
||||
pos += 1
|
||||
if not (b & 0x80):
|
||||
if result > 0x7fffffffffffffff:
|
||||
result -= (1 << 64)
|
||||
result |= ~mask
|
||||
else:
|
||||
result &= mask
|
||||
result = result_type(result)
|
||||
return (result, pos)
|
||||
shift += 7
|
||||
if shift >= 64:
|
||||
raise _DecodeError('Too many bytes when decoding varint.')
|
||||
return DecodeVarint
|
||||
|
||||
# We force 32-bit values to int and 64-bit values to long to make
|
||||
# alternate implementations where the distinction is more significant
|
||||
# (e.g. the C++ implementation) simpler.
|
||||
|
||||
_DecodeVarint = _VarintDecoder((1 << 64) - 1, long)
|
||||
_DecodeSignedVarint = _SignedVarintDecoder((1 << 64) - 1, long)
|
||||
|
||||
# Use these versions for values which must be limited to 32 bits.
|
||||
_DecodeVarint32 = _VarintDecoder((1 << 32) - 1, int)
|
||||
_DecodeSignedVarint32 = _SignedVarintDecoder((1 << 32) - 1, int)
|
||||
|
||||
|
||||
def ReadTag(buffer, pos):
|
||||
"""Read a tag from the buffer, and return a (tag_bytes, new_pos) tuple.
|
||||
|
||||
We return the raw bytes of the tag rather than decoding them. The raw
|
||||
bytes can then be used to look up the proper decoder. This effectively allows
|
||||
us to trade some work that would be done in pure-python (decoding a varint)
|
||||
for work that is done in C (searching for a byte string in a hash table).
|
||||
In a low-level language it would be much cheaper to decode the varint and
|
||||
use that, but not in Python.
|
||||
"""
|
||||
|
||||
py2 = _PY2 ##PY25
|
||||
##!PY25 py2 = str is bytes
|
||||
start = pos
|
||||
while (ord(buffer[pos]) if py2 else buffer[pos]) & 0x80:
|
||||
pos += 1
|
||||
pos += 1
|
||||
return (buffer[start:pos], pos)
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
def _SimpleDecoder(wire_type, decode_value):
|
||||
"""Return a constructor for a decoder for fields of a particular type.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type.
|
||||
decode_value: A function which decodes an individual value, e.g.
|
||||
_DecodeVarint()
|
||||
"""
|
||||
|
||||
def SpecificDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
if is_packed:
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
def DecodePackedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
(endpoint, pos) = local_DecodeVarint(buffer, pos)
|
||||
endpoint += pos
|
||||
if endpoint > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
while pos < endpoint:
|
||||
(element, pos) = decode_value(buffer, pos)
|
||||
value.append(element)
|
||||
if pos > endpoint:
|
||||
del value[-1] # Discard corrupt value.
|
||||
raise _DecodeError('Packed element was truncated.')
|
||||
return pos
|
||||
return DecodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number, wire_type)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
(element, new_pos) = decode_value(buffer, pos)
|
||||
value.append(element)
|
||||
# Predict that the next tag is another copy of the same repeated
|
||||
# field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
|
||||
# Prediction failed. Return.
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
(field_dict[key], pos) = decode_value(buffer, pos)
|
||||
if pos > end:
|
||||
del field_dict[key] # Discard corrupt value.
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
return DecodeField
|
||||
|
||||
return SpecificDecoder
|
||||
|
||||
|
||||
def _ModifiedDecoder(wire_type, decode_value, modify_value):
|
||||
"""Like SimpleDecoder but additionally invokes modify_value on every value
|
||||
before storing it. Usually modify_value is ZigZagDecode.
|
||||
"""
|
||||
|
||||
# Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
|
||||
# not enough to make a significant difference.
|
||||
|
||||
def InnerDecode(buffer, pos):
|
||||
(result, new_pos) = decode_value(buffer, pos)
|
||||
return (modify_value(result), new_pos)
|
||||
return _SimpleDecoder(wire_type, InnerDecode)
|
||||
|
||||
|
||||
def _StructPackDecoder(wire_type, format):
|
||||
"""Return a constructor for a decoder for a fixed-width field.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type.
|
||||
format: The format string to pass to struct.unpack().
|
||||
"""
|
||||
|
||||
value_size = struct.calcsize(format)
|
||||
local_unpack = struct.unpack
|
||||
|
||||
# Reusing _SimpleDecoder is slightly slower than copying a bunch of code, but
|
||||
# not enough to make a significant difference.
|
||||
|
||||
# Note that we expect someone up-stack to catch struct.error and convert
|
||||
# it to _DecodeError -- this way we don't have to set up exception-
|
||||
# handling blocks every time we parse one value.
|
||||
|
||||
def InnerDecode(buffer, pos):
|
||||
new_pos = pos + value_size
|
||||
result = local_unpack(format, buffer[pos:new_pos])[0]
|
||||
return (result, new_pos)
|
||||
return _SimpleDecoder(wire_type, InnerDecode)
|
||||
|
||||
|
||||
def _FloatDecoder():
|
||||
"""Returns a decoder for a float field.
|
||||
|
||||
This code works around a bug in struct.unpack for non-finite 32-bit
|
||||
floating-point values.
|
||||
"""
|
||||
|
||||
local_unpack = struct.unpack
|
||||
b = (lambda x:x) if _PY2 else lambda x:x.encode('latin1') ##PY25
|
||||
|
||||
def InnerDecode(buffer, pos):
|
||||
# We expect a 32-bit value in little-endian byte order. Bit 1 is the sign
|
||||
# bit, bits 2-9 represent the exponent, and bits 10-32 are the significand.
|
||||
new_pos = pos + 4
|
||||
float_bytes = buffer[pos:new_pos]
|
||||
|
||||
# If this value has all its exponent bits set, then it's non-finite.
|
||||
# In Python 2.4, struct.unpack will convert it to a finite 64-bit value.
|
||||
# To avoid that, we parse it specially.
|
||||
if ((float_bytes[3:4] in b('\x7F\xFF')) ##PY25
|
||||
##!PY25 if ((float_bytes[3:4] in b'\x7F\xFF')
|
||||
and (float_bytes[2:3] >= b('\x80'))): ##PY25
|
||||
##!PY25 and (float_bytes[2:3] >= b'\x80')):
|
||||
# If at least one significand bit is set...
|
||||
if float_bytes[0:3] != b('\x00\x00\x80'): ##PY25
|
||||
##!PY25 if float_bytes[0:3] != b'\x00\x00\x80':
|
||||
return (_NAN, new_pos)
|
||||
# If sign bit is set...
|
||||
if float_bytes[3:4] == b('\xFF'): ##PY25
|
||||
##!PY25 if float_bytes[3:4] == b'\xFF':
|
||||
return (_NEG_INF, new_pos)
|
||||
return (_POS_INF, new_pos)
|
||||
|
||||
# Note that we expect someone up-stack to catch struct.error and convert
|
||||
# it to _DecodeError -- this way we don't have to set up exception-
|
||||
# handling blocks every time we parse one value.
|
||||
result = local_unpack('<f', float_bytes)[0]
|
||||
return (result, new_pos)
|
||||
return _SimpleDecoder(wire_format.WIRETYPE_FIXED32, InnerDecode)
|
||||
|
||||
|
||||
def _DoubleDecoder():
|
||||
"""Returns a decoder for a double field.
|
||||
|
||||
This code works around a bug in struct.unpack for not-a-number.
|
||||
"""
|
||||
|
||||
local_unpack = struct.unpack
|
||||
b = (lambda x:x) if _PY2 else lambda x:x.encode('latin1') ##PY25
|
||||
|
||||
def InnerDecode(buffer, pos):
|
||||
# We expect a 64-bit value in little-endian byte order. Bit 1 is the sign
|
||||
# bit, bits 2-12 represent the exponent, and bits 13-64 are the significand.
|
||||
new_pos = pos + 8
|
||||
double_bytes = buffer[pos:new_pos]
|
||||
|
||||
# If this value has all its exponent bits set and at least one significand
|
||||
# bit set, it's not a number. In Python 2.4, struct.unpack will treat it
|
||||
# as inf or -inf. To avoid that, we treat it specially.
|
||||
##!PY25 if ((double_bytes[7:8] in b'\x7F\xFF')
|
||||
##!PY25 and (double_bytes[6:7] >= b'\xF0')
|
||||
##!PY25 and (double_bytes[0:7] != b'\x00\x00\x00\x00\x00\x00\xF0')):
|
||||
if ((double_bytes[7:8] in b('\x7F\xFF')) ##PY25
|
||||
and (double_bytes[6:7] >= b('\xF0')) ##PY25
|
||||
and (double_bytes[0:7] != b('\x00\x00\x00\x00\x00\x00\xF0'))): ##PY25
|
||||
return (_NAN, new_pos)
|
||||
|
||||
# Note that we expect someone up-stack to catch struct.error and convert
|
||||
# it to _DecodeError -- this way we don't have to set up exception-
|
||||
# handling blocks every time we parse one value.
|
||||
result = local_unpack('<d', double_bytes)[0]
|
||||
return (result, new_pos)
|
||||
return _SimpleDecoder(wire_format.WIRETYPE_FIXED64, InnerDecode)
|
||||
|
||||
|
||||
def EnumDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
enum_type = key.enum_type
|
||||
if is_packed:
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
def DecodePackedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
(endpoint, pos) = local_DecodeVarint(buffer, pos)
|
||||
endpoint += pos
|
||||
if endpoint > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
while pos < endpoint:
|
||||
value_start_pos = pos
|
||||
(element, pos) = _DecodeSignedVarint32(buffer, pos)
|
||||
if element in enum_type.values_by_number:
|
||||
value.append(element)
|
||||
else:
|
||||
if not message._unknown_fields:
|
||||
message._unknown_fields = []
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_VARINT)
|
||||
message._unknown_fields.append(
|
||||
(tag_bytes, buffer[value_start_pos:pos]))
|
||||
if pos > endpoint:
|
||||
if element in enum_type.values_by_number:
|
||||
del value[-1] # Discard corrupt value.
|
||||
else:
|
||||
del message._unknown_fields[-1]
|
||||
raise _DecodeError('Packed element was truncated.')
|
||||
return pos
|
||||
return DecodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
(element, new_pos) = _DecodeSignedVarint32(buffer, pos)
|
||||
if element in enum_type.values_by_number:
|
||||
value.append(element)
|
||||
else:
|
||||
if not message._unknown_fields:
|
||||
message._unknown_fields = []
|
||||
message._unknown_fields.append(
|
||||
(tag_bytes, buffer[pos:new_pos]))
|
||||
# Predict that the next tag is another copy of the same repeated
|
||||
# field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos >= end:
|
||||
# Prediction failed. Return.
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
value_start_pos = pos
|
||||
(enum_value, pos) = _DecodeSignedVarint32(buffer, pos)
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
if enum_value in enum_type.values_by_number:
|
||||
field_dict[key] = enum_value
|
||||
else:
|
||||
if not message._unknown_fields:
|
||||
message._unknown_fields = []
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_VARINT)
|
||||
message._unknown_fields.append(
|
||||
(tag_bytes, buffer[value_start_pos:pos]))
|
||||
return pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
|
||||
Int32Decoder = _SimpleDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeSignedVarint32)
|
||||
|
||||
Int64Decoder = _SimpleDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeSignedVarint)
|
||||
|
||||
UInt32Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint32)
|
||||
UInt64Decoder = _SimpleDecoder(wire_format.WIRETYPE_VARINT, _DecodeVarint)
|
||||
|
||||
SInt32Decoder = _ModifiedDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeVarint32, wire_format.ZigZagDecode)
|
||||
SInt64Decoder = _ModifiedDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeVarint, wire_format.ZigZagDecode)
|
||||
|
||||
# Note that Python conveniently guarantees that when using the '<' prefix on
|
||||
# formats, they will also have the same size across all platforms (as opposed
|
||||
# to without the prefix, where their sizes depend on the C compiler's basic
|
||||
# type sizes).
|
||||
Fixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<I')
|
||||
Fixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<Q')
|
||||
SFixed32Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED32, '<i')
|
||||
SFixed64Decoder = _StructPackDecoder(wire_format.WIRETYPE_FIXED64, '<q')
|
||||
FloatDecoder = _FloatDecoder()
|
||||
DoubleDecoder = _DoubleDecoder()
|
||||
|
||||
BoolDecoder = _ModifiedDecoder(
|
||||
wire_format.WIRETYPE_VARINT, _DecodeVarint, bool)
|
||||
|
||||
|
||||
def StringDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
"""Returns a decoder for a string field."""
|
||||
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
local_unicode = unicode
|
||||
|
||||
def _ConvertToUnicode(byte_str):
|
||||
try:
|
||||
return local_unicode(byte_str, 'utf-8')
|
||||
except UnicodeDecodeError, e:
|
||||
# add more information to the error message and re-raise it.
|
||||
e.reason = '%s in field: %s' % (e, key.full_name)
|
||||
raise
|
||||
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated string.')
|
||||
value.append(_ConvertToUnicode(buffer[pos:new_pos]))
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated string.')
|
||||
field_dict[key] = _ConvertToUnicode(buffer[pos:new_pos])
|
||||
return new_pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
def BytesDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
"""Returns a decoder for a bytes field."""
|
||||
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated string.')
|
||||
value.append(buffer[pos:new_pos])
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated string.')
|
||||
field_dict[key] = buffer[pos:new_pos]
|
||||
return new_pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
def GroupDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
"""Returns a decoder for a group field."""
|
||||
|
||||
end_tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_END_GROUP)
|
||||
end_tag_len = len(end_tag_bytes)
|
||||
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_START_GROUP)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read sub-message.
|
||||
pos = value.add()._InternalParse(buffer, pos, end)
|
||||
# Read end tag.
|
||||
new_pos = pos+end_tag_len
|
||||
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
||||
raise _DecodeError('Missing group end tag.')
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read sub-message.
|
||||
pos = value._InternalParse(buffer, pos, end)
|
||||
# Read end tag.
|
||||
new_pos = pos+end_tag_len
|
||||
if buffer[pos:new_pos] != end_tag_bytes or new_pos > end:
|
||||
raise _DecodeError('Missing group end tag.')
|
||||
return new_pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
def MessageDecoder(field_number, is_repeated, is_packed, key, new_default):
|
||||
"""Returns a decoder for a message field."""
|
||||
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
tag_bytes = encoder.TagBytes(field_number,
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
tag_len = len(tag_bytes)
|
||||
def DecodeRepeatedField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
while 1:
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read length.
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
if value.add()._InternalParse(buffer, pos, new_pos) != new_pos:
|
||||
# The only reason _InternalParse would return early is if it
|
||||
# encountered an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
# Predict that the next tag is another copy of the same repeated field.
|
||||
pos = new_pos + tag_len
|
||||
if buffer[new_pos:pos] != tag_bytes or new_pos == end:
|
||||
# Prediction failed. Return.
|
||||
return new_pos
|
||||
return DecodeRepeatedField
|
||||
else:
|
||||
def DecodeField(buffer, pos, end, message, field_dict):
|
||||
value = field_dict.get(key)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(key, new_default(message))
|
||||
# Read length.
|
||||
(size, pos) = local_DecodeVarint(buffer, pos)
|
||||
new_pos = pos + size
|
||||
if new_pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
# Read sub-message.
|
||||
if value._InternalParse(buffer, pos, new_pos) != new_pos:
|
||||
# The only reason _InternalParse would return early is if it encountered
|
||||
# an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
return new_pos
|
||||
return DecodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
|
||||
MESSAGE_SET_ITEM_TAG = encoder.TagBytes(1, wire_format.WIRETYPE_START_GROUP)
|
||||
|
||||
def MessageSetItemDecoder(extensions_by_number):
|
||||
"""Returns a decoder for a MessageSet item.
|
||||
|
||||
The parameter is the _extensions_by_number map for the message class.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
type_id_tag_bytes = encoder.TagBytes(2, wire_format.WIRETYPE_VARINT)
|
||||
message_tag_bytes = encoder.TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
item_end_tag_bytes = encoder.TagBytes(1, wire_format.WIRETYPE_END_GROUP)
|
||||
|
||||
local_ReadTag = ReadTag
|
||||
local_DecodeVarint = _DecodeVarint
|
||||
local_SkipField = SkipField
|
||||
|
||||
def DecodeItem(buffer, pos, end, message, field_dict):
|
||||
message_set_item_start = pos
|
||||
type_id = -1
|
||||
message_start = -1
|
||||
message_end = -1
|
||||
|
||||
# Technically, type_id and message can appear in any order, so we need
|
||||
# a little loop here.
|
||||
while 1:
|
||||
(tag_bytes, pos) = local_ReadTag(buffer, pos)
|
||||
if tag_bytes == type_id_tag_bytes:
|
||||
(type_id, pos) = local_DecodeVarint(buffer, pos)
|
||||
elif tag_bytes == message_tag_bytes:
|
||||
(size, message_start) = local_DecodeVarint(buffer, pos)
|
||||
pos = message_end = message_start + size
|
||||
elif tag_bytes == item_end_tag_bytes:
|
||||
break
|
||||
else:
|
||||
pos = SkipField(buffer, pos, end, tag_bytes)
|
||||
if pos == -1:
|
||||
raise _DecodeError('Missing group end tag.')
|
||||
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
|
||||
if type_id == -1:
|
||||
raise _DecodeError('MessageSet item missing type_id.')
|
||||
if message_start == -1:
|
||||
raise _DecodeError('MessageSet item missing message.')
|
||||
|
||||
extension = extensions_by_number.get(type_id)
|
||||
if extension is not None:
|
||||
value = field_dict.get(extension)
|
||||
if value is None:
|
||||
value = field_dict.setdefault(
|
||||
extension, extension.message_type._concrete_class())
|
||||
if value._InternalParse(buffer, message_start,message_end) != message_end:
|
||||
# The only reason _InternalParse would return early is if it encountered
|
||||
# an end-group tag.
|
||||
raise _DecodeError('Unexpected end-group tag.')
|
||||
else:
|
||||
if not message._unknown_fields:
|
||||
message._unknown_fields = []
|
||||
message._unknown_fields.append((MESSAGE_SET_ITEM_TAG,
|
||||
buffer[message_set_item_start:pos]))
|
||||
|
||||
return pos
|
||||
|
||||
return DecodeItem
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# Optimization is not as heavy here because calls to SkipField() are rare,
|
||||
# except for handling end-group tags.
|
||||
|
||||
def _SkipVarint(buffer, pos, end):
|
||||
"""Skip a varint value. Returns the new position."""
|
||||
# Previously ord(buffer[pos]) raised IndexError when pos is out of range.
|
||||
# With this code, ord(b'') raises TypeError. Both are handled in
|
||||
# python_message.py to generate a 'Truncated message' error.
|
||||
while ord(buffer[pos:pos+1]) & 0x80:
|
||||
pos += 1
|
||||
pos += 1
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
|
||||
def _SkipFixed64(buffer, pos, end):
|
||||
"""Skip a fixed64 value. Returns the new position."""
|
||||
|
||||
pos += 8
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
|
||||
def _SkipLengthDelimited(buffer, pos, end):
|
||||
"""Skip a length-delimited value. Returns the new position."""
|
||||
|
||||
(size, pos) = _DecodeVarint(buffer, pos)
|
||||
pos += size
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
|
||||
def _SkipGroup(buffer, pos, end):
|
||||
"""Skip sub-group. Returns the new position."""
|
||||
|
||||
while 1:
|
||||
(tag_bytes, pos) = ReadTag(buffer, pos)
|
||||
new_pos = SkipField(buffer, pos, end, tag_bytes)
|
||||
if new_pos == -1:
|
||||
return pos
|
||||
pos = new_pos
|
||||
|
||||
def _EndGroup(buffer, pos, end):
|
||||
"""Skipping an END_GROUP tag returns -1 to tell the parent loop to break."""
|
||||
|
||||
return -1
|
||||
|
||||
def _SkipFixed32(buffer, pos, end):
|
||||
"""Skip a fixed32 value. Returns the new position."""
|
||||
|
||||
pos += 4
|
||||
if pos > end:
|
||||
raise _DecodeError('Truncated message.')
|
||||
return pos
|
||||
|
||||
def _RaiseInvalidWireType(buffer, pos, end):
|
||||
"""Skip function for unknown wire types. Raises an exception."""
|
||||
|
||||
raise _DecodeError('Tag had invalid wire type.')
|
||||
|
||||
def _FieldSkipper():
|
||||
"""Constructs the SkipField function."""
|
||||
|
||||
WIRETYPE_TO_SKIPPER = [
|
||||
_SkipVarint,
|
||||
_SkipFixed64,
|
||||
_SkipLengthDelimited,
|
||||
_SkipGroup,
|
||||
_EndGroup,
|
||||
_SkipFixed32,
|
||||
_RaiseInvalidWireType,
|
||||
_RaiseInvalidWireType,
|
||||
]
|
||||
|
||||
wiretype_mask = wire_format.TAG_TYPE_MASK
|
||||
|
||||
def SkipField(buffer, pos, end, tag_bytes):
|
||||
"""Skips a field with the specified tag.
|
||||
|
||||
|pos| should point to the byte immediately after the tag.
|
||||
|
||||
Returns:
|
||||
The new position (after the tag value), or -1 if the tag is an end-group
|
||||
tag (in which case the calling loop should break).
|
||||
"""
|
||||
|
||||
# The wire type is always in the first byte since varints are little-endian.
|
||||
wire_type = ord(tag_bytes[0:1]) & wiretype_mask
|
||||
return WIRETYPE_TO_SKIPPER[wire_type](buffer, pos, end)
|
||||
|
||||
return SkipField
|
||||
|
||||
SkipField = _FieldSkipper()
|
||||
@@ -0,0 +1,63 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.descriptor_database."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import descriptor_pb2
|
||||
from google.protobuf.internal import factory_test2_pb2
|
||||
from google.protobuf import descriptor_database
|
||||
|
||||
|
||||
class DescriptorDatabaseTest(basetest.TestCase):
|
||||
|
||||
def testAdd(self):
|
||||
db = descriptor_database.DescriptorDatabase()
|
||||
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
factory_test2_pb2.DESCRIPTOR.serialized_pb)
|
||||
db.Add(file_desc_proto)
|
||||
|
||||
self.assertEquals(file_desc_proto, db.FindFileByName(
|
||||
'google/protobuf/internal/factory_test2.proto'))
|
||||
self.assertEquals(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Message'))
|
||||
self.assertEquals(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Message.NestedFactory2Message'))
|
||||
self.assertEquals(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Enum'))
|
||||
self.assertEquals(file_desc_proto, db.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum'))
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,564 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.descriptor_pool."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
import os
|
||||
import unittest
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import descriptor_pb2
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import descriptor_pool_test1_pb2
|
||||
from google.protobuf.internal import descriptor_pool_test2_pb2
|
||||
from google.protobuf.internal import factory_test1_pb2
|
||||
from google.protobuf.internal import factory_test2_pb2
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import descriptor_database
|
||||
from google.protobuf import descriptor_pool
|
||||
|
||||
|
||||
class DescriptorPoolTest(basetest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.pool = descriptor_pool.DescriptorPool()
|
||||
self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
factory_test1_pb2.DESCRIPTOR.serialized_pb)
|
||||
self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
factory_test2_pb2.DESCRIPTOR.serialized_pb)
|
||||
self.pool.Add(self.factory_test1_fd)
|
||||
self.pool.Add(self.factory_test2_fd)
|
||||
|
||||
def testFindFileByName(self):
|
||||
name1 = 'google/protobuf/internal/factory_test1.proto'
|
||||
file_desc1 = self.pool.FindFileByName(name1)
|
||||
self.assertIsInstance(file_desc1, descriptor.FileDescriptor)
|
||||
self.assertEquals(name1, file_desc1.name)
|
||||
self.assertEquals('google.protobuf.python.internal', file_desc1.package)
|
||||
self.assertIn('Factory1Message', file_desc1.message_types_by_name)
|
||||
|
||||
name2 = 'google/protobuf/internal/factory_test2.proto'
|
||||
file_desc2 = self.pool.FindFileByName(name2)
|
||||
self.assertIsInstance(file_desc2, descriptor.FileDescriptor)
|
||||
self.assertEquals(name2, file_desc2.name)
|
||||
self.assertEquals('google.protobuf.python.internal', file_desc2.package)
|
||||
self.assertIn('Factory2Message', file_desc2.message_types_by_name)
|
||||
|
||||
def testFindFileByNameFailure(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindFileByName('Does not exist')
|
||||
|
||||
def testFindFileContainingSymbol(self):
|
||||
file_desc1 = self.pool.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory1Message')
|
||||
self.assertIsInstance(file_desc1, descriptor.FileDescriptor)
|
||||
self.assertEquals('google/protobuf/internal/factory_test1.proto',
|
||||
file_desc1.name)
|
||||
self.assertEquals('google.protobuf.python.internal', file_desc1.package)
|
||||
self.assertIn('Factory1Message', file_desc1.message_types_by_name)
|
||||
|
||||
file_desc2 = self.pool.FindFileContainingSymbol(
|
||||
'google.protobuf.python.internal.Factory2Message')
|
||||
self.assertIsInstance(file_desc2, descriptor.FileDescriptor)
|
||||
self.assertEquals('google/protobuf/internal/factory_test2.proto',
|
||||
file_desc2.name)
|
||||
self.assertEquals('google.protobuf.python.internal', file_desc2.package)
|
||||
self.assertIn('Factory2Message', file_desc2.message_types_by_name)
|
||||
|
||||
def testFindFileContainingSymbolFailure(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindFileContainingSymbol('Does not exist')
|
||||
|
||||
def testFindMessageTypeByName(self):
|
||||
msg1 = self.pool.FindMessageTypeByName(
|
||||
'google.protobuf.python.internal.Factory1Message')
|
||||
self.assertIsInstance(msg1, descriptor.Descriptor)
|
||||
self.assertEquals('Factory1Message', msg1.name)
|
||||
self.assertEquals('google.protobuf.python.internal.Factory1Message',
|
||||
msg1.full_name)
|
||||
self.assertEquals(None, msg1.containing_type)
|
||||
|
||||
nested_msg1 = msg1.nested_types[0]
|
||||
self.assertEquals('NestedFactory1Message', nested_msg1.name)
|
||||
self.assertEquals(msg1, nested_msg1.containing_type)
|
||||
|
||||
nested_enum1 = msg1.enum_types[0]
|
||||
self.assertEquals('NestedFactory1Enum', nested_enum1.name)
|
||||
self.assertEquals(msg1, nested_enum1.containing_type)
|
||||
|
||||
self.assertEquals(nested_msg1, msg1.fields_by_name[
|
||||
'nested_factory_1_message'].message_type)
|
||||
self.assertEquals(nested_enum1, msg1.fields_by_name[
|
||||
'nested_factory_1_enum'].enum_type)
|
||||
|
||||
msg2 = self.pool.FindMessageTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Message')
|
||||
self.assertIsInstance(msg2, descriptor.Descriptor)
|
||||
self.assertEquals('Factory2Message', msg2.name)
|
||||
self.assertEquals('google.protobuf.python.internal.Factory2Message',
|
||||
msg2.full_name)
|
||||
self.assertIsNone(msg2.containing_type)
|
||||
|
||||
nested_msg2 = msg2.nested_types[0]
|
||||
self.assertEquals('NestedFactory2Message', nested_msg2.name)
|
||||
self.assertEquals(msg2, nested_msg2.containing_type)
|
||||
|
||||
nested_enum2 = msg2.enum_types[0]
|
||||
self.assertEquals('NestedFactory2Enum', nested_enum2.name)
|
||||
self.assertEquals(msg2, nested_enum2.containing_type)
|
||||
|
||||
self.assertEquals(nested_msg2, msg2.fields_by_name[
|
||||
'nested_factory_2_message'].message_type)
|
||||
self.assertEquals(nested_enum2, msg2.fields_by_name[
|
||||
'nested_factory_2_enum'].enum_type)
|
||||
|
||||
self.assertTrue(msg2.fields_by_name['int_with_default'].has_default_value)
|
||||
self.assertEquals(
|
||||
1776, msg2.fields_by_name['int_with_default'].default_value)
|
||||
|
||||
self.assertTrue(
|
||||
msg2.fields_by_name['double_with_default'].has_default_value)
|
||||
self.assertEquals(
|
||||
9.99, msg2.fields_by_name['double_with_default'].default_value)
|
||||
|
||||
self.assertTrue(
|
||||
msg2.fields_by_name['string_with_default'].has_default_value)
|
||||
self.assertEquals(
|
||||
'hello world', msg2.fields_by_name['string_with_default'].default_value)
|
||||
|
||||
self.assertTrue(msg2.fields_by_name['bool_with_default'].has_default_value)
|
||||
self.assertFalse(msg2.fields_by_name['bool_with_default'].default_value)
|
||||
|
||||
self.assertTrue(msg2.fields_by_name['enum_with_default'].has_default_value)
|
||||
self.assertEquals(
|
||||
1, msg2.fields_by_name['enum_with_default'].default_value)
|
||||
|
||||
msg3 = self.pool.FindMessageTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Message.NestedFactory2Message')
|
||||
self.assertEquals(nested_msg2, msg3)
|
||||
|
||||
self.assertTrue(msg2.fields_by_name['bytes_with_default'].has_default_value)
|
||||
self.assertEquals(
|
||||
b'a\xfb\x00c',
|
||||
msg2.fields_by_name['bytes_with_default'].default_value)
|
||||
|
||||
self.assertEqual(1, len(msg2.oneofs))
|
||||
self.assertEqual(1, len(msg2.oneofs_by_name))
|
||||
self.assertEqual(2, len(msg2.oneofs[0].fields))
|
||||
for name in ['oneof_int', 'oneof_string']:
|
||||
self.assertEqual(msg2.oneofs[0],
|
||||
msg2.fields_by_name[name].containing_oneof)
|
||||
self.assertIn(msg2.fields_by_name[name], msg2.oneofs[0].fields)
|
||||
|
||||
def testFindMessageTypeByNameFailure(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindMessageTypeByName('Does not exist')
|
||||
|
||||
def testFindEnumTypeByName(self):
|
||||
enum1 = self.pool.FindEnumTypeByName(
|
||||
'google.protobuf.python.internal.Factory1Enum')
|
||||
self.assertIsInstance(enum1, descriptor.EnumDescriptor)
|
||||
self.assertEquals(0, enum1.values_by_name['FACTORY_1_VALUE_0'].number)
|
||||
self.assertEquals(1, enum1.values_by_name['FACTORY_1_VALUE_1'].number)
|
||||
|
||||
nested_enum1 = self.pool.FindEnumTypeByName(
|
||||
'google.protobuf.python.internal.Factory1Message.NestedFactory1Enum')
|
||||
self.assertIsInstance(nested_enum1, descriptor.EnumDescriptor)
|
||||
self.assertEquals(
|
||||
0, nested_enum1.values_by_name['NESTED_FACTORY_1_VALUE_0'].number)
|
||||
self.assertEquals(
|
||||
1, nested_enum1.values_by_name['NESTED_FACTORY_1_VALUE_1'].number)
|
||||
|
||||
enum2 = self.pool.FindEnumTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Enum')
|
||||
self.assertIsInstance(enum2, descriptor.EnumDescriptor)
|
||||
self.assertEquals(0, enum2.values_by_name['FACTORY_2_VALUE_0'].number)
|
||||
self.assertEquals(1, enum2.values_by_name['FACTORY_2_VALUE_1'].number)
|
||||
|
||||
nested_enum2 = self.pool.FindEnumTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum')
|
||||
self.assertIsInstance(nested_enum2, descriptor.EnumDescriptor)
|
||||
self.assertEquals(
|
||||
0, nested_enum2.values_by_name['NESTED_FACTORY_2_VALUE_0'].number)
|
||||
self.assertEquals(
|
||||
1, nested_enum2.values_by_name['NESTED_FACTORY_2_VALUE_1'].number)
|
||||
|
||||
def testFindEnumTypeByNameFailure(self):
|
||||
with self.assertRaises(KeyError):
|
||||
self.pool.FindEnumTypeByName('Does not exist')
|
||||
|
||||
def testUserDefinedDB(self):
|
||||
db = descriptor_database.DescriptorDatabase()
|
||||
self.pool = descriptor_pool.DescriptorPool(db)
|
||||
db.Add(self.factory_test1_fd)
|
||||
db.Add(self.factory_test2_fd)
|
||||
self.testFindMessageTypeByName()
|
||||
|
||||
def testComplexNesting(self):
|
||||
test1_desc = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
descriptor_pool_test1_pb2.DESCRIPTOR.serialized_pb)
|
||||
test2_desc = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
descriptor_pool_test2_pb2.DESCRIPTOR.serialized_pb)
|
||||
self.pool.Add(test1_desc)
|
||||
self.pool.Add(test2_desc)
|
||||
TEST1_FILE.CheckFile(self, self.pool)
|
||||
TEST2_FILE.CheckFile(self, self.pool)
|
||||
|
||||
|
||||
|
||||
class ProtoFile(object):
|
||||
|
||||
def __init__(self, name, package, messages, dependencies=None):
|
||||
self.name = name
|
||||
self.package = package
|
||||
self.messages = messages
|
||||
self.dependencies = dependencies or []
|
||||
|
||||
def CheckFile(self, test, pool):
|
||||
file_desc = pool.FindFileByName(self.name)
|
||||
test.assertEquals(self.name, file_desc.name)
|
||||
test.assertEquals(self.package, file_desc.package)
|
||||
dependencies_names = [f.name for f in file_desc.dependencies]
|
||||
test.assertEqual(self.dependencies, dependencies_names)
|
||||
for name, msg_type in self.messages.items():
|
||||
msg_type.CheckType(test, None, name, file_desc)
|
||||
|
||||
|
||||
class EnumType(object):
|
||||
|
||||
def __init__(self, values):
|
||||
self.values = values
|
||||
|
||||
def CheckType(self, test, msg_desc, name, file_desc):
|
||||
enum_desc = msg_desc.enum_types_by_name[name]
|
||||
test.assertEqual(name, enum_desc.name)
|
||||
expected_enum_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_enum_full_name, enum_desc.full_name)
|
||||
test.assertEqual(msg_desc, enum_desc.containing_type)
|
||||
test.assertEqual(file_desc, enum_desc.file)
|
||||
for index, (value, number) in enumerate(self.values):
|
||||
value_desc = enum_desc.values_by_name[value]
|
||||
test.assertEqual(value, value_desc.name)
|
||||
test.assertEqual(index, value_desc.index)
|
||||
test.assertEqual(number, value_desc.number)
|
||||
test.assertEqual(enum_desc, value_desc.type)
|
||||
test.assertIn(value, msg_desc.enum_values_by_name)
|
||||
|
||||
|
||||
class MessageType(object):
|
||||
|
||||
def __init__(self, type_dict, field_list, is_extendable=False,
|
||||
extensions=None):
|
||||
self.type_dict = type_dict
|
||||
self.field_list = field_list
|
||||
self.is_extendable = is_extendable
|
||||
self.extensions = extensions or []
|
||||
|
||||
def CheckType(self, test, containing_type_desc, name, file_desc):
|
||||
if containing_type_desc is None:
|
||||
desc = file_desc.message_types_by_name[name]
|
||||
expected_full_name = '.'.join([file_desc.package, name])
|
||||
else:
|
||||
desc = containing_type_desc.nested_types_by_name[name]
|
||||
expected_full_name = '.'.join([containing_type_desc.full_name, name])
|
||||
|
||||
test.assertEqual(name, desc.name)
|
||||
test.assertEqual(expected_full_name, desc.full_name)
|
||||
test.assertEqual(containing_type_desc, desc.containing_type)
|
||||
test.assertEqual(desc.file, file_desc)
|
||||
test.assertEqual(self.is_extendable, desc.is_extendable)
|
||||
for name, subtype in self.type_dict.items():
|
||||
subtype.CheckType(test, desc, name, file_desc)
|
||||
|
||||
for index, (name, field) in enumerate(self.field_list):
|
||||
field.CheckField(test, desc, name, index)
|
||||
|
||||
for index, (name, field) in enumerate(self.extensions):
|
||||
field.CheckField(test, desc, name, index)
|
||||
|
||||
|
||||
class EnumField(object):
|
||||
|
||||
def __init__(self, number, type_name, default_value):
|
||||
self.number = number
|
||||
self.type_name = type_name
|
||||
self.default_value = default_value
|
||||
|
||||
def CheckField(self, test, msg_desc, name, index):
|
||||
field_desc = msg_desc.fields_by_name[name]
|
||||
enum_desc = msg_desc.enum_types_by_name[self.type_name]
|
||||
test.assertEqual(name, field_desc.name)
|
||||
expected_field_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_field_full_name, field_desc.full_name)
|
||||
test.assertEqual(index, field_desc.index)
|
||||
test.assertEqual(self.number, field_desc.number)
|
||||
test.assertEqual(descriptor.FieldDescriptor.TYPE_ENUM, field_desc.type)
|
||||
test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_ENUM,
|
||||
field_desc.cpp_type)
|
||||
test.assertTrue(field_desc.has_default_value)
|
||||
test.assertEqual(enum_desc.values_by_name[self.default_value].index,
|
||||
field_desc.default_value)
|
||||
test.assertEqual(msg_desc, field_desc.containing_type)
|
||||
test.assertEqual(enum_desc, field_desc.enum_type)
|
||||
|
||||
|
||||
class MessageField(object):
|
||||
|
||||
def __init__(self, number, type_name):
|
||||
self.number = number
|
||||
self.type_name = type_name
|
||||
|
||||
def CheckField(self, test, msg_desc, name, index):
|
||||
field_desc = msg_desc.fields_by_name[name]
|
||||
field_type_desc = msg_desc.nested_types_by_name[self.type_name]
|
||||
test.assertEqual(name, field_desc.name)
|
||||
expected_field_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_field_full_name, field_desc.full_name)
|
||||
test.assertEqual(index, field_desc.index)
|
||||
test.assertEqual(self.number, field_desc.number)
|
||||
test.assertEqual(descriptor.FieldDescriptor.TYPE_MESSAGE, field_desc.type)
|
||||
test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_MESSAGE,
|
||||
field_desc.cpp_type)
|
||||
test.assertFalse(field_desc.has_default_value)
|
||||
test.assertEqual(msg_desc, field_desc.containing_type)
|
||||
test.assertEqual(field_type_desc, field_desc.message_type)
|
||||
|
||||
|
||||
class StringField(object):
|
||||
|
||||
def __init__(self, number, default_value):
|
||||
self.number = number
|
||||
self.default_value = default_value
|
||||
|
||||
def CheckField(self, test, msg_desc, name, index):
|
||||
field_desc = msg_desc.fields_by_name[name]
|
||||
test.assertEqual(name, field_desc.name)
|
||||
expected_field_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_field_full_name, field_desc.full_name)
|
||||
test.assertEqual(index, field_desc.index)
|
||||
test.assertEqual(self.number, field_desc.number)
|
||||
test.assertEqual(descriptor.FieldDescriptor.TYPE_STRING, field_desc.type)
|
||||
test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_STRING,
|
||||
field_desc.cpp_type)
|
||||
test.assertTrue(field_desc.has_default_value)
|
||||
test.assertEqual(self.default_value, field_desc.default_value)
|
||||
|
||||
|
||||
class ExtensionField(object):
|
||||
|
||||
def __init__(self, number, extended_type):
|
||||
self.number = number
|
||||
self.extended_type = extended_type
|
||||
|
||||
def CheckField(self, test, msg_desc, name, index):
|
||||
field_desc = msg_desc.extensions_by_name[name]
|
||||
test.assertEqual(name, field_desc.name)
|
||||
expected_field_full_name = '.'.join([msg_desc.full_name, name])
|
||||
test.assertEqual(expected_field_full_name, field_desc.full_name)
|
||||
test.assertEqual(self.number, field_desc.number)
|
||||
test.assertEqual(index, field_desc.index)
|
||||
test.assertEqual(descriptor.FieldDescriptor.TYPE_MESSAGE, field_desc.type)
|
||||
test.assertEqual(descriptor.FieldDescriptor.CPPTYPE_MESSAGE,
|
||||
field_desc.cpp_type)
|
||||
test.assertFalse(field_desc.has_default_value)
|
||||
test.assertTrue(field_desc.is_extension)
|
||||
test.assertEqual(msg_desc, field_desc.extension_scope)
|
||||
test.assertEqual(msg_desc, field_desc.message_type)
|
||||
test.assertEqual(self.extended_type, field_desc.containing_type.name)
|
||||
|
||||
|
||||
class AddDescriptorTest(basetest.TestCase):
|
||||
|
||||
def _TestMessage(self, prefix):
|
||||
pool = descriptor_pool.DescriptorPool()
|
||||
pool.AddDescriptor(unittest_pb2.TestAllTypes.DESCRIPTOR)
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.TestAllTypes',
|
||||
pool.FindMessageTypeByName(
|
||||
prefix + 'protobuf_unittest.TestAllTypes').full_name)
|
||||
|
||||
# AddDescriptor is not recursive.
|
||||
with self.assertRaises(KeyError):
|
||||
pool.FindMessageTypeByName(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedMessage')
|
||||
|
||||
pool.AddDescriptor(unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR)
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.TestAllTypes.NestedMessage',
|
||||
pool.FindMessageTypeByName(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedMessage').full_name)
|
||||
|
||||
# Files are implicitly also indexed when messages are added.
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileByName(
|
||||
'google/protobuf/unittest.proto').name)
|
||||
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileContainingSymbol(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedMessage').name)
|
||||
|
||||
def testMessage(self):
|
||||
self._TestMessage('')
|
||||
self._TestMessage('.')
|
||||
|
||||
def _TestEnum(self, prefix):
|
||||
pool = descriptor_pool.DescriptorPool()
|
||||
pool.AddEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR)
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.ForeignEnum',
|
||||
pool.FindEnumTypeByName(
|
||||
prefix + 'protobuf_unittest.ForeignEnum').full_name)
|
||||
|
||||
# AddEnumDescriptor is not recursive.
|
||||
with self.assertRaises(KeyError):
|
||||
pool.FindEnumTypeByName(
|
||||
prefix + 'protobuf_unittest.ForeignEnum.NestedEnum')
|
||||
|
||||
pool.AddEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR)
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.TestAllTypes.NestedEnum',
|
||||
pool.FindEnumTypeByName(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedEnum').full_name)
|
||||
|
||||
# Files are implicitly also indexed when enums are added.
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileByName(
|
||||
'google/protobuf/unittest.proto').name)
|
||||
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileContainingSymbol(
|
||||
prefix + 'protobuf_unittest.TestAllTypes.NestedEnum').name)
|
||||
|
||||
def testEnum(self):
|
||||
self._TestEnum('')
|
||||
self._TestEnum('.')
|
||||
|
||||
def testFile(self):
|
||||
pool = descriptor_pool.DescriptorPool()
|
||||
pool.AddFileDescriptor(unittest_pb2.DESCRIPTOR)
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
pool.FindFileByName(
|
||||
'google/protobuf/unittest.proto').name)
|
||||
|
||||
# AddFileDescriptor is not recursive; messages and enums within files must
|
||||
# be explicitly registered.
|
||||
with self.assertRaises(KeyError):
|
||||
pool.FindFileContainingSymbol(
|
||||
'protobuf_unittest.TestAllTypes')
|
||||
|
||||
|
||||
TEST1_FILE = ProtoFile(
|
||||
'google/protobuf/internal/descriptor_pool_test1.proto',
|
||||
'google.protobuf.python.internal',
|
||||
{
|
||||
'DescriptorPoolTest1': MessageType({
|
||||
'NestedEnum': EnumType([('ALPHA', 1), ('BETA', 2)]),
|
||||
'NestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('EPSILON', 5), ('ZETA', 6)]),
|
||||
'DeepNestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('ETA', 7), ('THETA', 8)]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'ETA')),
|
||||
('nested_field', StringField(2, 'theta')),
|
||||
]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'ZETA')),
|
||||
('nested_field', StringField(2, 'beta')),
|
||||
('deep_nested_message', MessageField(3, 'DeepNestedMessage')),
|
||||
])
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'BETA')),
|
||||
('nested_message', MessageField(2, 'NestedMessage')),
|
||||
], is_extendable=True),
|
||||
|
||||
'DescriptorPoolTest2': MessageType({
|
||||
'NestedEnum': EnumType([('GAMMA', 3), ('DELTA', 4)]),
|
||||
'NestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('IOTA', 9), ('KAPPA', 10)]),
|
||||
'DeepNestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('LAMBDA', 11), ('MU', 12)]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'MU')),
|
||||
('nested_field', StringField(2, 'lambda')),
|
||||
]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'IOTA')),
|
||||
('nested_field', StringField(2, 'delta')),
|
||||
('deep_nested_message', MessageField(3, 'DeepNestedMessage')),
|
||||
])
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'GAMMA')),
|
||||
('nested_message', MessageField(2, 'NestedMessage')),
|
||||
]),
|
||||
})
|
||||
|
||||
|
||||
TEST2_FILE = ProtoFile(
|
||||
'google/protobuf/internal/descriptor_pool_test2.proto',
|
||||
'google.protobuf.python.internal',
|
||||
{
|
||||
'DescriptorPoolTest3': MessageType({
|
||||
'NestedEnum': EnumType([('NU', 13), ('XI', 14)]),
|
||||
'NestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('OMICRON', 15), ('PI', 16)]),
|
||||
'DeepNestedMessage': MessageType({
|
||||
'NestedEnum': EnumType([('RHO', 17), ('SIGMA', 18)]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'RHO')),
|
||||
('nested_field', StringField(2, 'sigma')),
|
||||
]),
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'PI')),
|
||||
('nested_field', StringField(2, 'nu')),
|
||||
('deep_nested_message', MessageField(3, 'DeepNestedMessage')),
|
||||
])
|
||||
}, [
|
||||
('nested_enum', EnumField(1, 'NestedEnum', 'XI')),
|
||||
('nested_message', MessageField(2, 'NestedMessage')),
|
||||
], extensions=[
|
||||
('descriptor_pool_test',
|
||||
ExtensionField(1001, 'DescriptorPoolTest1')),
|
||||
]),
|
||||
},
|
||||
dependencies=['google/protobuf/internal/descriptor_pool_test1.proto'])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,94 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
|
||||
message DescriptorPoolTest1 {
|
||||
extensions 1000 to max;
|
||||
|
||||
enum NestedEnum {
|
||||
ALPHA = 1;
|
||||
BETA = 2;
|
||||
}
|
||||
|
||||
optional NestedEnum nested_enum = 1 [default = BETA];
|
||||
|
||||
message NestedMessage {
|
||||
enum NestedEnum {
|
||||
EPSILON = 5;
|
||||
ZETA = 6;
|
||||
}
|
||||
optional NestedEnum nested_enum = 1 [default = ZETA];
|
||||
optional string nested_field = 2 [default = "beta"];
|
||||
optional DeepNestedMessage deep_nested_message = 3;
|
||||
|
||||
message DeepNestedMessage {
|
||||
enum NestedEnum {
|
||||
ETA = 7;
|
||||
THETA = 8;
|
||||
}
|
||||
optional NestedEnum nested_enum = 1 [default = ETA];
|
||||
optional string nested_field = 2 [default = "theta"];
|
||||
}
|
||||
}
|
||||
|
||||
optional NestedMessage nested_message = 2;
|
||||
}
|
||||
|
||||
message DescriptorPoolTest2 {
|
||||
enum NestedEnum {
|
||||
GAMMA = 3;
|
||||
DELTA = 4;
|
||||
}
|
||||
|
||||
optional NestedEnum nested_enum = 1 [default = GAMMA];
|
||||
|
||||
message NestedMessage {
|
||||
enum NestedEnum {
|
||||
IOTA = 9;
|
||||
KAPPA = 10;
|
||||
}
|
||||
optional NestedEnum nested_enum = 1 [default = IOTA];
|
||||
optional string nested_field = 2 [default = "delta"];
|
||||
optional DeepNestedMessage deep_nested_message = 3;
|
||||
|
||||
message DeepNestedMessage {
|
||||
enum NestedEnum {
|
||||
LAMBDA = 11;
|
||||
MU = 12;
|
||||
}
|
||||
optional NestedEnum nested_enum = 1 [default = MU];
|
||||
optional string nested_field = 2 [default = "lambda"];
|
||||
}
|
||||
}
|
||||
|
||||
optional NestedMessage nested_message = 2;
|
||||
}
|
||||
@@ -0,0 +1,70 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
import "google/protobuf/internal/descriptor_pool_test1.proto";
|
||||
|
||||
|
||||
message DescriptorPoolTest3 {
|
||||
|
||||
extend DescriptorPoolTest1 {
|
||||
optional DescriptorPoolTest3 descriptor_pool_test = 1001;
|
||||
}
|
||||
|
||||
enum NestedEnum {
|
||||
NU = 13;
|
||||
XI = 14;
|
||||
}
|
||||
|
||||
optional NestedEnum nested_enum = 1 [default = XI];
|
||||
|
||||
message NestedMessage {
|
||||
enum NestedEnum {
|
||||
OMICRON = 15;
|
||||
PI = 16;
|
||||
}
|
||||
optional NestedEnum nested_enum = 1 [default = PI];
|
||||
optional string nested_field = 2 [default = "nu"];
|
||||
optional DeepNestedMessage deep_nested_message = 3;
|
||||
|
||||
message DeepNestedMessage {
|
||||
enum NestedEnum {
|
||||
RHO = 17;
|
||||
SIGMA = 18;
|
||||
}
|
||||
optional NestedEnum nested_enum = 1 [default = RHO];
|
||||
optional string nested_field = 2 [default = "sigma"];
|
||||
}
|
||||
}
|
||||
|
||||
optional NestedMessage nested_message = 2;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,54 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Unittest for descriptor.py for the pure Python implementation."""
|
||||
|
||||
import os
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'python'
|
||||
|
||||
# We must set the implementation version above before the google3 imports.
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.apputils import basetest
|
||||
from google.protobuf.internal import api_implementation
|
||||
# Run all tests from the original module by putting them in our namespace.
|
||||
# pylint: disable=wildcard-import
|
||||
from google.protobuf.internal.descriptor_test import *
|
||||
|
||||
|
||||
class ConfirmPurePythonTest(basetest.TestCase):
|
||||
|
||||
def testImplementationSetting(self):
|
||||
self.assertEqual('python', api_implementation.Type())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,669 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Unittest for google.protobuf.internal.descriptor."""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import unittest_custom_options_pb2
|
||||
from google.protobuf import unittest_import_pb2
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import descriptor_pb2
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import text_format
|
||||
|
||||
|
||||
TEST_EMPTY_MESSAGE_DESCRIPTOR_ASCII = """
|
||||
name: 'TestEmptyMessage'
|
||||
"""
|
||||
|
||||
|
||||
class DescriptorTest(basetest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.my_file = descriptor.FileDescriptor(
|
||||
name='some/filename/some.proto',
|
||||
package='protobuf_unittest'
|
||||
)
|
||||
self.my_enum = descriptor.EnumDescriptor(
|
||||
name='ForeignEnum',
|
||||
full_name='protobuf_unittest.ForeignEnum',
|
||||
filename=None,
|
||||
file=self.my_file,
|
||||
values=[
|
||||
descriptor.EnumValueDescriptor(name='FOREIGN_FOO', index=0, number=4),
|
||||
descriptor.EnumValueDescriptor(name='FOREIGN_BAR', index=1, number=5),
|
||||
descriptor.EnumValueDescriptor(name='FOREIGN_BAZ', index=2, number=6),
|
||||
])
|
||||
self.my_message = descriptor.Descriptor(
|
||||
name='NestedMessage',
|
||||
full_name='protobuf_unittest.TestAllTypes.NestedMessage',
|
||||
filename=None,
|
||||
file=self.my_file,
|
||||
containing_type=None,
|
||||
fields=[
|
||||
descriptor.FieldDescriptor(
|
||||
name='bb',
|
||||
full_name='protobuf_unittest.TestAllTypes.NestedMessage.bb',
|
||||
index=0, number=1,
|
||||
type=5, cpp_type=1, label=1,
|
||||
has_default_value=False, default_value=0,
|
||||
message_type=None, enum_type=None, containing_type=None,
|
||||
is_extension=False, extension_scope=None),
|
||||
],
|
||||
nested_types=[],
|
||||
enum_types=[
|
||||
self.my_enum,
|
||||
],
|
||||
extensions=[])
|
||||
self.my_method = descriptor.MethodDescriptor(
|
||||
name='Bar',
|
||||
full_name='protobuf_unittest.TestService.Bar',
|
||||
index=0,
|
||||
containing_service=None,
|
||||
input_type=None,
|
||||
output_type=None)
|
||||
self.my_service = descriptor.ServiceDescriptor(
|
||||
name='TestServiceWithOptions',
|
||||
full_name='protobuf_unittest.TestServiceWithOptions',
|
||||
file=self.my_file,
|
||||
index=0,
|
||||
methods=[
|
||||
self.my_method
|
||||
])
|
||||
|
||||
def testEnumValueName(self):
|
||||
self.assertEqual(self.my_message.EnumValueName('ForeignEnum', 4),
|
||||
'FOREIGN_FOO')
|
||||
|
||||
self.assertEqual(
|
||||
self.my_message.enum_types_by_name[
|
||||
'ForeignEnum'].values_by_number[4].name,
|
||||
self.my_message.EnumValueName('ForeignEnum', 4))
|
||||
|
||||
def testEnumFixups(self):
|
||||
self.assertEqual(self.my_enum, self.my_enum.values[0].type)
|
||||
|
||||
def testContainingTypeFixups(self):
|
||||
self.assertEqual(self.my_message, self.my_message.fields[0].containing_type)
|
||||
self.assertEqual(self.my_message, self.my_enum.containing_type)
|
||||
|
||||
def testContainingServiceFixups(self):
|
||||
self.assertEqual(self.my_service, self.my_method.containing_service)
|
||||
|
||||
def testGetOptions(self):
|
||||
self.assertEqual(self.my_enum.GetOptions(),
|
||||
descriptor_pb2.EnumOptions())
|
||||
self.assertEqual(self.my_enum.values[0].GetOptions(),
|
||||
descriptor_pb2.EnumValueOptions())
|
||||
self.assertEqual(self.my_message.GetOptions(),
|
||||
descriptor_pb2.MessageOptions())
|
||||
self.assertEqual(self.my_message.fields[0].GetOptions(),
|
||||
descriptor_pb2.FieldOptions())
|
||||
self.assertEqual(self.my_method.GetOptions(),
|
||||
descriptor_pb2.MethodOptions())
|
||||
self.assertEqual(self.my_service.GetOptions(),
|
||||
descriptor_pb2.ServiceOptions())
|
||||
|
||||
def testSimpleCustomOptions(self):
|
||||
file_descriptor = unittest_custom_options_pb2.DESCRIPTOR
|
||||
message_descriptor =\
|
||||
unittest_custom_options_pb2.TestMessageWithCustomOptions.DESCRIPTOR
|
||||
field_descriptor = message_descriptor.fields_by_name["field1"]
|
||||
enum_descriptor = message_descriptor.enum_types_by_name["AnEnum"]
|
||||
enum_value_descriptor =\
|
||||
message_descriptor.enum_values_by_name["ANENUM_VAL2"]
|
||||
service_descriptor =\
|
||||
unittest_custom_options_pb2.TestServiceWithCustomOptions.DESCRIPTOR
|
||||
method_descriptor = service_descriptor.FindMethodByName("Foo")
|
||||
|
||||
file_options = file_descriptor.GetOptions()
|
||||
file_opt1 = unittest_custom_options_pb2.file_opt1
|
||||
self.assertEqual(9876543210, file_options.Extensions[file_opt1])
|
||||
message_options = message_descriptor.GetOptions()
|
||||
message_opt1 = unittest_custom_options_pb2.message_opt1
|
||||
self.assertEqual(-56, message_options.Extensions[message_opt1])
|
||||
field_options = field_descriptor.GetOptions()
|
||||
field_opt1 = unittest_custom_options_pb2.field_opt1
|
||||
self.assertEqual(8765432109, field_options.Extensions[field_opt1])
|
||||
field_opt2 = unittest_custom_options_pb2.field_opt2
|
||||
self.assertEqual(42, field_options.Extensions[field_opt2])
|
||||
enum_options = enum_descriptor.GetOptions()
|
||||
enum_opt1 = unittest_custom_options_pb2.enum_opt1
|
||||
self.assertEqual(-789, enum_options.Extensions[enum_opt1])
|
||||
enum_value_options = enum_value_descriptor.GetOptions()
|
||||
enum_value_opt1 = unittest_custom_options_pb2.enum_value_opt1
|
||||
self.assertEqual(123, enum_value_options.Extensions[enum_value_opt1])
|
||||
|
||||
service_options = service_descriptor.GetOptions()
|
||||
service_opt1 = unittest_custom_options_pb2.service_opt1
|
||||
self.assertEqual(-9876543210, service_options.Extensions[service_opt1])
|
||||
method_options = method_descriptor.GetOptions()
|
||||
method_opt1 = unittest_custom_options_pb2.method_opt1
|
||||
self.assertEqual(unittest_custom_options_pb2.METHODOPT1_VAL2,
|
||||
method_options.Extensions[method_opt1])
|
||||
|
||||
def testDifferentCustomOptionTypes(self):
|
||||
kint32min = -2**31
|
||||
kint64min = -2**63
|
||||
kint32max = 2**31 - 1
|
||||
kint64max = 2**63 - 1
|
||||
kuint32max = 2**32 - 1
|
||||
kuint64max = 2**64 - 1
|
||||
|
||||
message_descriptor =\
|
||||
unittest_custom_options_pb2.CustomOptionMinIntegerValues.DESCRIPTOR
|
||||
message_options = message_descriptor.GetOptions()
|
||||
self.assertEqual(False, message_options.Extensions[
|
||||
unittest_custom_options_pb2.bool_opt])
|
||||
self.assertEqual(kint32min, message_options.Extensions[
|
||||
unittest_custom_options_pb2.int32_opt])
|
||||
self.assertEqual(kint64min, message_options.Extensions[
|
||||
unittest_custom_options_pb2.int64_opt])
|
||||
self.assertEqual(0, message_options.Extensions[
|
||||
unittest_custom_options_pb2.uint32_opt])
|
||||
self.assertEqual(0, message_options.Extensions[
|
||||
unittest_custom_options_pb2.uint64_opt])
|
||||
self.assertEqual(kint32min, message_options.Extensions[
|
||||
unittest_custom_options_pb2.sint32_opt])
|
||||
self.assertEqual(kint64min, message_options.Extensions[
|
||||
unittest_custom_options_pb2.sint64_opt])
|
||||
self.assertEqual(0, message_options.Extensions[
|
||||
unittest_custom_options_pb2.fixed32_opt])
|
||||
self.assertEqual(0, message_options.Extensions[
|
||||
unittest_custom_options_pb2.fixed64_opt])
|
||||
self.assertEqual(kint32min, message_options.Extensions[
|
||||
unittest_custom_options_pb2.sfixed32_opt])
|
||||
self.assertEqual(kint64min, message_options.Extensions[
|
||||
unittest_custom_options_pb2.sfixed64_opt])
|
||||
|
||||
message_descriptor =\
|
||||
unittest_custom_options_pb2.CustomOptionMaxIntegerValues.DESCRIPTOR
|
||||
message_options = message_descriptor.GetOptions()
|
||||
self.assertEqual(True, message_options.Extensions[
|
||||
unittest_custom_options_pb2.bool_opt])
|
||||
self.assertEqual(kint32max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.int32_opt])
|
||||
self.assertEqual(kint64max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.int64_opt])
|
||||
self.assertEqual(kuint32max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.uint32_opt])
|
||||
self.assertEqual(kuint64max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.uint64_opt])
|
||||
self.assertEqual(kint32max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.sint32_opt])
|
||||
self.assertEqual(kint64max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.sint64_opt])
|
||||
self.assertEqual(kuint32max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.fixed32_opt])
|
||||
self.assertEqual(kuint64max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.fixed64_opt])
|
||||
self.assertEqual(kint32max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.sfixed32_opt])
|
||||
self.assertEqual(kint64max, message_options.Extensions[
|
||||
unittest_custom_options_pb2.sfixed64_opt])
|
||||
|
||||
message_descriptor =\
|
||||
unittest_custom_options_pb2.CustomOptionOtherValues.DESCRIPTOR
|
||||
message_options = message_descriptor.GetOptions()
|
||||
self.assertEqual(-100, message_options.Extensions[
|
||||
unittest_custom_options_pb2.int32_opt])
|
||||
self.assertAlmostEqual(12.3456789, message_options.Extensions[
|
||||
unittest_custom_options_pb2.float_opt], 6)
|
||||
self.assertAlmostEqual(1.234567890123456789, message_options.Extensions[
|
||||
unittest_custom_options_pb2.double_opt])
|
||||
self.assertEqual("Hello, \"World\"", message_options.Extensions[
|
||||
unittest_custom_options_pb2.string_opt])
|
||||
self.assertEqual(b"Hello\0World", message_options.Extensions[
|
||||
unittest_custom_options_pb2.bytes_opt])
|
||||
dummy_enum = unittest_custom_options_pb2.DummyMessageContainingEnum
|
||||
self.assertEqual(
|
||||
dummy_enum.TEST_OPTION_ENUM_TYPE2,
|
||||
message_options.Extensions[unittest_custom_options_pb2.enum_opt])
|
||||
|
||||
message_descriptor =\
|
||||
unittest_custom_options_pb2.SettingRealsFromPositiveInts.DESCRIPTOR
|
||||
message_options = message_descriptor.GetOptions()
|
||||
self.assertAlmostEqual(12, message_options.Extensions[
|
||||
unittest_custom_options_pb2.float_opt], 6)
|
||||
self.assertAlmostEqual(154, message_options.Extensions[
|
||||
unittest_custom_options_pb2.double_opt])
|
||||
|
||||
message_descriptor =\
|
||||
unittest_custom_options_pb2.SettingRealsFromNegativeInts.DESCRIPTOR
|
||||
message_options = message_descriptor.GetOptions()
|
||||
self.assertAlmostEqual(-12, message_options.Extensions[
|
||||
unittest_custom_options_pb2.float_opt], 6)
|
||||
self.assertAlmostEqual(-154, message_options.Extensions[
|
||||
unittest_custom_options_pb2.double_opt])
|
||||
|
||||
def testComplexExtensionOptions(self):
|
||||
descriptor =\
|
||||
unittest_custom_options_pb2.VariousComplexOptions.DESCRIPTOR
|
||||
options = descriptor.GetOptions()
|
||||
self.assertEqual(42, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt1].foo)
|
||||
self.assertEqual(324, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt1].Extensions[
|
||||
unittest_custom_options_pb2.quux])
|
||||
self.assertEqual(876, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt1].Extensions[
|
||||
unittest_custom_options_pb2.corge].qux)
|
||||
self.assertEqual(987, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].baz)
|
||||
self.assertEqual(654, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].Extensions[
|
||||
unittest_custom_options_pb2.grault])
|
||||
self.assertEqual(743, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].bar.foo)
|
||||
self.assertEqual(1999, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].bar.Extensions[
|
||||
unittest_custom_options_pb2.quux])
|
||||
self.assertEqual(2008, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].bar.Extensions[
|
||||
unittest_custom_options_pb2.corge].qux)
|
||||
self.assertEqual(741, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].Extensions[
|
||||
unittest_custom_options_pb2.garply].foo)
|
||||
self.assertEqual(1998, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].Extensions[
|
||||
unittest_custom_options_pb2.garply].Extensions[
|
||||
unittest_custom_options_pb2.quux])
|
||||
self.assertEqual(2121, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].Extensions[
|
||||
unittest_custom_options_pb2.garply].Extensions[
|
||||
unittest_custom_options_pb2.corge].qux)
|
||||
self.assertEqual(1971, options.Extensions[
|
||||
unittest_custom_options_pb2.ComplexOptionType2
|
||||
.ComplexOptionType4.complex_opt4].waldo)
|
||||
self.assertEqual(321, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt2].fred.waldo)
|
||||
self.assertEqual(9, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt3].qux)
|
||||
self.assertEqual(22, options.Extensions[
|
||||
unittest_custom_options_pb2.complex_opt3].complexoptiontype5.plugh)
|
||||
self.assertEqual(24, options.Extensions[
|
||||
unittest_custom_options_pb2.complexopt6].xyzzy)
|
||||
|
||||
# Check that aggregate options were parsed and saved correctly in
|
||||
# the appropriate descriptors.
|
||||
def testAggregateOptions(self):
|
||||
file_descriptor = unittest_custom_options_pb2.DESCRIPTOR
|
||||
message_descriptor =\
|
||||
unittest_custom_options_pb2.AggregateMessage.DESCRIPTOR
|
||||
field_descriptor = message_descriptor.fields_by_name["fieldname"]
|
||||
enum_descriptor = unittest_custom_options_pb2.AggregateEnum.DESCRIPTOR
|
||||
enum_value_descriptor = enum_descriptor.values_by_name["VALUE"]
|
||||
service_descriptor =\
|
||||
unittest_custom_options_pb2.AggregateService.DESCRIPTOR
|
||||
method_descriptor = service_descriptor.FindMethodByName("Method")
|
||||
|
||||
# Tests for the different types of data embedded in fileopt
|
||||
file_options = file_descriptor.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.fileopt]
|
||||
self.assertEqual(100, file_options.i)
|
||||
self.assertEqual("FileAnnotation", file_options.s)
|
||||
self.assertEqual("NestedFileAnnotation", file_options.sub.s)
|
||||
self.assertEqual("FileExtensionAnnotation", file_options.file.Extensions[
|
||||
unittest_custom_options_pb2.fileopt].s)
|
||||
self.assertEqual("EmbeddedMessageSetElement", file_options.mset.Extensions[
|
||||
unittest_custom_options_pb2.AggregateMessageSetElement
|
||||
.message_set_extension].s)
|
||||
|
||||
# Simple tests for all the other types of annotations
|
||||
self.assertEqual(
|
||||
"MessageAnnotation",
|
||||
message_descriptor.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.msgopt].s)
|
||||
self.assertEqual(
|
||||
"FieldAnnotation",
|
||||
field_descriptor.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.fieldopt].s)
|
||||
self.assertEqual(
|
||||
"EnumAnnotation",
|
||||
enum_descriptor.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.enumopt].s)
|
||||
self.assertEqual(
|
||||
"EnumValueAnnotation",
|
||||
enum_value_descriptor.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.enumvalopt].s)
|
||||
self.assertEqual(
|
||||
"ServiceAnnotation",
|
||||
service_descriptor.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.serviceopt].s)
|
||||
self.assertEqual(
|
||||
"MethodAnnotation",
|
||||
method_descriptor.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.methodopt].s)
|
||||
|
||||
def testNestedOptions(self):
|
||||
nested_message =\
|
||||
unittest_custom_options_pb2.NestedOptionType.NestedMessage.DESCRIPTOR
|
||||
self.assertEqual(1001, nested_message.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.message_opt1])
|
||||
nested_field = nested_message.fields_by_name["nested_field"]
|
||||
self.assertEqual(1002, nested_field.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.field_opt1])
|
||||
outer_message =\
|
||||
unittest_custom_options_pb2.NestedOptionType.DESCRIPTOR
|
||||
nested_enum = outer_message.enum_types_by_name["NestedEnum"]
|
||||
self.assertEqual(1003, nested_enum.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.enum_opt1])
|
||||
nested_enum_value = outer_message.enum_values_by_name["NESTED_ENUM_VALUE"]
|
||||
self.assertEqual(1004, nested_enum_value.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.enum_value_opt1])
|
||||
nested_extension = outer_message.extensions_by_name["nested_extension"]
|
||||
self.assertEqual(1005, nested_extension.GetOptions().Extensions[
|
||||
unittest_custom_options_pb2.field_opt2])
|
||||
|
||||
def testFileDescriptorReferences(self):
|
||||
self.assertEqual(self.my_enum.file, self.my_file)
|
||||
self.assertEqual(self.my_message.file, self.my_file)
|
||||
|
||||
def testFileDescriptor(self):
|
||||
self.assertEqual(self.my_file.name, 'some/filename/some.proto')
|
||||
self.assertEqual(self.my_file.package, 'protobuf_unittest')
|
||||
|
||||
|
||||
class DescriptorCopyToProtoTest(basetest.TestCase):
|
||||
"""Tests for CopyTo functions of Descriptor."""
|
||||
|
||||
def _AssertProtoEqual(self, actual_proto, expected_class, expected_ascii):
|
||||
expected_proto = expected_class()
|
||||
text_format.Merge(expected_ascii, expected_proto)
|
||||
|
||||
self.assertEqual(
|
||||
actual_proto, expected_proto,
|
||||
'Not equal,\nActual:\n%s\nExpected:\n%s\n'
|
||||
% (str(actual_proto), str(expected_proto)))
|
||||
|
||||
def _InternalTestCopyToProto(self, desc, expected_proto_class,
|
||||
expected_proto_ascii):
|
||||
actual = expected_proto_class()
|
||||
desc.CopyToProto(actual)
|
||||
self._AssertProtoEqual(
|
||||
actual, expected_proto_class, expected_proto_ascii)
|
||||
|
||||
def testCopyToProto_EmptyMessage(self):
|
||||
self._InternalTestCopyToProto(
|
||||
unittest_pb2.TestEmptyMessage.DESCRIPTOR,
|
||||
descriptor_pb2.DescriptorProto,
|
||||
TEST_EMPTY_MESSAGE_DESCRIPTOR_ASCII)
|
||||
|
||||
def testCopyToProto_NestedMessage(self):
|
||||
TEST_NESTED_MESSAGE_ASCII = """
|
||||
name: 'NestedMessage'
|
||||
field: <
|
||||
name: 'bb'
|
||||
number: 1
|
||||
label: 1 # Optional
|
||||
type: 5 # TYPE_INT32
|
||||
>
|
||||
"""
|
||||
|
||||
self._InternalTestCopyToProto(
|
||||
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR,
|
||||
descriptor_pb2.DescriptorProto,
|
||||
TEST_NESTED_MESSAGE_ASCII)
|
||||
|
||||
def testCopyToProto_ForeignNestedMessage(self):
|
||||
TEST_FOREIGN_NESTED_ASCII = """
|
||||
name: 'TestForeignNested'
|
||||
field: <
|
||||
name: 'foreign_nested'
|
||||
number: 1
|
||||
label: 1 # Optional
|
||||
type: 11 # TYPE_MESSAGE
|
||||
type_name: '.protobuf_unittest.TestAllTypes.NestedMessage'
|
||||
>
|
||||
"""
|
||||
|
||||
self._InternalTestCopyToProto(
|
||||
unittest_pb2.TestForeignNested.DESCRIPTOR,
|
||||
descriptor_pb2.DescriptorProto,
|
||||
TEST_FOREIGN_NESTED_ASCII)
|
||||
|
||||
def testCopyToProto_ForeignEnum(self):
|
||||
TEST_FOREIGN_ENUM_ASCII = """
|
||||
name: 'ForeignEnum'
|
||||
value: <
|
||||
name: 'FOREIGN_FOO'
|
||||
number: 4
|
||||
>
|
||||
value: <
|
||||
name: 'FOREIGN_BAR'
|
||||
number: 5
|
||||
>
|
||||
value: <
|
||||
name: 'FOREIGN_BAZ'
|
||||
number: 6
|
||||
>
|
||||
"""
|
||||
|
||||
self._InternalTestCopyToProto(
|
||||
unittest_pb2._FOREIGNENUM,
|
||||
descriptor_pb2.EnumDescriptorProto,
|
||||
TEST_FOREIGN_ENUM_ASCII)
|
||||
|
||||
def testCopyToProto_Options(self):
|
||||
TEST_DEPRECATED_FIELDS_ASCII = """
|
||||
name: 'TestDeprecatedFields'
|
||||
field: <
|
||||
name: 'deprecated_int32'
|
||||
number: 1
|
||||
label: 1 # Optional
|
||||
type: 5 # TYPE_INT32
|
||||
options: <
|
||||
deprecated: true
|
||||
>
|
||||
>
|
||||
"""
|
||||
|
||||
self._InternalTestCopyToProto(
|
||||
unittest_pb2.TestDeprecatedFields.DESCRIPTOR,
|
||||
descriptor_pb2.DescriptorProto,
|
||||
TEST_DEPRECATED_FIELDS_ASCII)
|
||||
|
||||
def testCopyToProto_AllExtensions(self):
|
||||
TEST_EMPTY_MESSAGE_WITH_EXTENSIONS_ASCII = """
|
||||
name: 'TestEmptyMessageWithExtensions'
|
||||
extension_range: <
|
||||
start: 1
|
||||
end: 536870912
|
||||
>
|
||||
"""
|
||||
|
||||
self._InternalTestCopyToProto(
|
||||
unittest_pb2.TestEmptyMessageWithExtensions.DESCRIPTOR,
|
||||
descriptor_pb2.DescriptorProto,
|
||||
TEST_EMPTY_MESSAGE_WITH_EXTENSIONS_ASCII)
|
||||
|
||||
def testCopyToProto_SeveralExtensions(self):
|
||||
TEST_MESSAGE_WITH_SEVERAL_EXTENSIONS_ASCII = """
|
||||
name: 'TestMultipleExtensionRanges'
|
||||
extension_range: <
|
||||
start: 42
|
||||
end: 43
|
||||
>
|
||||
extension_range: <
|
||||
start: 4143
|
||||
end: 4244
|
||||
>
|
||||
extension_range: <
|
||||
start: 65536
|
||||
end: 536870912
|
||||
>
|
||||
"""
|
||||
|
||||
self._InternalTestCopyToProto(
|
||||
unittest_pb2.TestMultipleExtensionRanges.DESCRIPTOR,
|
||||
descriptor_pb2.DescriptorProto,
|
||||
TEST_MESSAGE_WITH_SEVERAL_EXTENSIONS_ASCII)
|
||||
|
||||
# Disable this test so we can make changes to the proto file.
|
||||
# TODO(xiaofeng): Enable this test after cl/55530659 is submitted.
|
||||
#
|
||||
# def testCopyToProto_FileDescriptor(self):
|
||||
# UNITTEST_IMPORT_FILE_DESCRIPTOR_ASCII = ("""
|
||||
# name: 'google/protobuf/unittest_import.proto'
|
||||
# package: 'protobuf_unittest_import'
|
||||
# dependency: 'google/protobuf/unittest_import_public.proto'
|
||||
# message_type: <
|
||||
# name: 'ImportMessage'
|
||||
# field: <
|
||||
# name: 'd'
|
||||
# number: 1
|
||||
# label: 1 # Optional
|
||||
# type: 5 # TYPE_INT32
|
||||
# >
|
||||
# >
|
||||
# """ +
|
||||
# """enum_type: <
|
||||
# name: 'ImportEnum'
|
||||
# value: <
|
||||
# name: 'IMPORT_FOO'
|
||||
# number: 7
|
||||
# >
|
||||
# value: <
|
||||
# name: 'IMPORT_BAR'
|
||||
# number: 8
|
||||
# >
|
||||
# value: <
|
||||
# name: 'IMPORT_BAZ'
|
||||
# number: 9
|
||||
# >
|
||||
# >
|
||||
# options: <
|
||||
# java_package: 'com.google.protobuf.test'
|
||||
# optimize_for: 1 # SPEED
|
||||
# >
|
||||
# public_dependency: 0
|
||||
# """)
|
||||
# self._InternalTestCopyToProto(
|
||||
# unittest_import_pb2.DESCRIPTOR,
|
||||
# descriptor_pb2.FileDescriptorProto,
|
||||
# UNITTEST_IMPORT_FILE_DESCRIPTOR_ASCII)
|
||||
|
||||
def testCopyToProto_ServiceDescriptor(self):
|
||||
TEST_SERVICE_ASCII = """
|
||||
name: 'TestService'
|
||||
method: <
|
||||
name: 'Foo'
|
||||
input_type: '.protobuf_unittest.FooRequest'
|
||||
output_type: '.protobuf_unittest.FooResponse'
|
||||
>
|
||||
method: <
|
||||
name: 'Bar'
|
||||
input_type: '.protobuf_unittest.BarRequest'
|
||||
output_type: '.protobuf_unittest.BarResponse'
|
||||
>
|
||||
"""
|
||||
self._InternalTestCopyToProto(
|
||||
unittest_pb2.TestService.DESCRIPTOR,
|
||||
descriptor_pb2.ServiceDescriptorProto,
|
||||
TEST_SERVICE_ASCII)
|
||||
|
||||
|
||||
class MakeDescriptorTest(basetest.TestCase):
|
||||
|
||||
def testMakeDescriptorWithNestedFields(self):
|
||||
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
|
||||
file_descriptor_proto.name = 'Foo2'
|
||||
message_type = file_descriptor_proto.message_type.add()
|
||||
message_type.name = file_descriptor_proto.name
|
||||
nested_type = message_type.nested_type.add()
|
||||
nested_type.name = 'Sub'
|
||||
enum_type = nested_type.enum_type.add()
|
||||
enum_type.name = 'FOO'
|
||||
enum_type_val = enum_type.value.add()
|
||||
enum_type_val.name = 'BAR'
|
||||
enum_type_val.number = 3
|
||||
field = message_type.field.add()
|
||||
field.number = 1
|
||||
field.name = 'uint64_field'
|
||||
field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
|
||||
field.type = descriptor.FieldDescriptor.TYPE_UINT64
|
||||
field = message_type.field.add()
|
||||
field.number = 2
|
||||
field.name = 'nested_message_field'
|
||||
field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
|
||||
field.type = descriptor.FieldDescriptor.TYPE_MESSAGE
|
||||
field.type_name = 'Sub'
|
||||
enum_field = nested_type.field.add()
|
||||
enum_field.number = 2
|
||||
enum_field.name = 'bar_field'
|
||||
enum_field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
|
||||
enum_field.type = descriptor.FieldDescriptor.TYPE_ENUM
|
||||
enum_field.type_name = 'Foo2.Sub.FOO'
|
||||
|
||||
result = descriptor.MakeDescriptor(message_type)
|
||||
self.assertEqual(result.fields[0].cpp_type,
|
||||
descriptor.FieldDescriptor.CPPTYPE_UINT64)
|
||||
self.assertEqual(result.fields[1].cpp_type,
|
||||
descriptor.FieldDescriptor.CPPTYPE_MESSAGE)
|
||||
self.assertEqual(result.fields[1].message_type.containing_type,
|
||||
result)
|
||||
self.assertEqual(result.nested_types[0].fields[0].full_name,
|
||||
'Foo2.Sub.bar_field')
|
||||
self.assertEqual(result.nested_types[0].fields[0].enum_type,
|
||||
result.nested_types[0].enum_types[0])
|
||||
|
||||
def testMakeDescriptorWithUnsignedIntField(self):
|
||||
file_descriptor_proto = descriptor_pb2.FileDescriptorProto()
|
||||
file_descriptor_proto.name = 'Foo'
|
||||
message_type = file_descriptor_proto.message_type.add()
|
||||
message_type.name = file_descriptor_proto.name
|
||||
enum_type = message_type.enum_type.add()
|
||||
enum_type.name = 'FOO'
|
||||
enum_type_val = enum_type.value.add()
|
||||
enum_type_val.name = 'BAR'
|
||||
enum_type_val.number = 3
|
||||
field = message_type.field.add()
|
||||
field.number = 1
|
||||
field.name = 'uint64_field'
|
||||
field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
|
||||
field.type = descriptor.FieldDescriptor.TYPE_UINT64
|
||||
enum_field = message_type.field.add()
|
||||
enum_field.number = 2
|
||||
enum_field.name = 'bar_field'
|
||||
enum_field.label = descriptor.FieldDescriptor.LABEL_REQUIRED
|
||||
enum_field.type = descriptor.FieldDescriptor.TYPE_ENUM
|
||||
enum_field.type_name = 'Foo.FOO'
|
||||
|
||||
result = descriptor.MakeDescriptor(message_type)
|
||||
self.assertEqual(result.fields[0].cpp_type,
|
||||
descriptor.FieldDescriptor.CPPTYPE_UINT64)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,788 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#PY25 compatible for GAE.
|
||||
#
|
||||
# Copyright 2009 Google Inc. All Rights Reserved.
|
||||
|
||||
"""Code for encoding protocol message primitives.
|
||||
|
||||
Contains the logic for encoding every logical protocol field type
|
||||
into one of the 5 physical wire types.
|
||||
|
||||
This code is designed to push the Python interpreter's performance to the
|
||||
limits.
|
||||
|
||||
The basic idea is that at startup time, for every field (i.e. every
|
||||
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
|
||||
sizer takes a value of this field's type and computes its byte size. The
|
||||
encoder takes a writer function and a value. It encodes the value into byte
|
||||
strings and invokes the writer function to write those strings. Typically the
|
||||
writer function is the write() method of a cStringIO.
|
||||
|
||||
We try to do as much work as possible when constructing the writer and the
|
||||
sizer rather than when calling them. In particular:
|
||||
* We copy any needed global functions to local variables, so that we do not need
|
||||
to do costly global table lookups at runtime.
|
||||
* Similarly, we try to do any attribute lookups at startup time if possible.
|
||||
* Every field's tag is encoded to bytes at startup, since it can't change at
|
||||
runtime.
|
||||
* Whatever component of the field size we can compute at startup, we do.
|
||||
* We *avoid* sharing code if doing so would make the code slower and not sharing
|
||||
does not burden us too much. For example, encoders for repeated fields do
|
||||
not just call the encoders for singular fields in a loop because this would
|
||||
add an extra function call overhead for every loop iteration; instead, we
|
||||
manually inline the single-value encoder into the loop.
|
||||
* If a Python function lacks a return statement, Python actually generates
|
||||
instructions to pop the result of the last statement off the stack, push
|
||||
None onto the stack, and then return that. If we really don't care what
|
||||
value is returned, then we can save two instructions by returning the
|
||||
result of the last statement. It looks funny but it helps.
|
||||
* We assume that type and bounds checking has happened at a higher level.
|
||||
"""
|
||||
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import struct
|
||||
import sys ##PY25
|
||||
_PY2 = sys.version_info[0] < 3 ##PY25
|
||||
from google.protobuf.internal import wire_format
|
||||
|
||||
|
||||
# This will overflow and thus become IEEE-754 "infinity". We would use
|
||||
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
|
||||
_POS_INF = 1e10000
|
||||
_NEG_INF = -_POS_INF
|
||||
|
||||
|
||||
def _VarintSize(value):
|
||||
"""Compute the size of a varint value."""
|
||||
if value <= 0x7f: return 1
|
||||
if value <= 0x3fff: return 2
|
||||
if value <= 0x1fffff: return 3
|
||||
if value <= 0xfffffff: return 4
|
||||
if value <= 0x7ffffffff: return 5
|
||||
if value <= 0x3ffffffffff: return 6
|
||||
if value <= 0x1ffffffffffff: return 7
|
||||
if value <= 0xffffffffffffff: return 8
|
||||
if value <= 0x7fffffffffffffff: return 9
|
||||
return 10
|
||||
|
||||
|
||||
def _SignedVarintSize(value):
|
||||
"""Compute the size of a signed varint value."""
|
||||
if value < 0: return 10
|
||||
if value <= 0x7f: return 1
|
||||
if value <= 0x3fff: return 2
|
||||
if value <= 0x1fffff: return 3
|
||||
if value <= 0xfffffff: return 4
|
||||
if value <= 0x7ffffffff: return 5
|
||||
if value <= 0x3ffffffffff: return 6
|
||||
if value <= 0x1ffffffffffff: return 7
|
||||
if value <= 0xffffffffffffff: return 8
|
||||
if value <= 0x7fffffffffffffff: return 9
|
||||
return 10
|
||||
|
||||
|
||||
def _TagSize(field_number):
|
||||
"""Returns the number of bytes required to serialize a tag with this field
|
||||
number."""
|
||||
# Just pass in type 0, since the type won't affect the tag+type size.
|
||||
return _VarintSize(wire_format.PackTag(field_number, 0))
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# In this section we define some generic sizers. Each of these functions
|
||||
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
|
||||
# It returns another function which in turn takes parameters specific to a
|
||||
# particular field, e.g. the field number and whether it is repeated or packed.
|
||||
# Look at the next section to see how these are used.
|
||||
|
||||
|
||||
def _SimpleSizer(compute_value_size):
|
||||
"""A sizer which uses the function compute_value_size to compute the size of
|
||||
each value. Typically compute_value_size is _VarintSize."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = 0
|
||||
for element in value:
|
||||
result += compute_value_size(element)
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += compute_value_size(element)
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + compute_value_size(value)
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
def _ModifiedSizer(compute_value_size, modify_value):
|
||||
"""Like SimpleSizer, but modify_value is invoked on each value before it is
|
||||
passed to compute_value_size. modify_value is typically ZigZagEncode."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = 0
|
||||
for element in value:
|
||||
result += compute_value_size(modify_value(element))
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += compute_value_size(modify_value(element))
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + compute_value_size(modify_value(value))
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
def _FixedSizer(value_size):
|
||||
"""Like _SimpleSizer except for a fixed-size field. The input is the size
|
||||
of one value."""
|
||||
|
||||
def SpecificSizer(field_number, is_repeated, is_packed):
|
||||
tag_size = _TagSize(field_number)
|
||||
if is_packed:
|
||||
local_VarintSize = _VarintSize
|
||||
def PackedFieldSize(value):
|
||||
result = len(value) * value_size
|
||||
return result + local_VarintSize(result) + tag_size
|
||||
return PackedFieldSize
|
||||
elif is_repeated:
|
||||
element_size = value_size + tag_size
|
||||
def RepeatedFieldSize(value):
|
||||
return len(value) * element_size
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
field_size = value_size + tag_size
|
||||
def FieldSize(value):
|
||||
return field_size
|
||||
return FieldSize
|
||||
|
||||
return SpecificSizer
|
||||
|
||||
|
||||
# ====================================================================
|
||||
# Here we declare a sizer constructor for each field type. Each "sizer
|
||||
# constructor" is a function that takes (field_number, is_repeated, is_packed)
|
||||
# as parameters and returns a sizer, which in turn takes a field value as
|
||||
# a parameter and returns its encoded size.
|
||||
|
||||
|
||||
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
|
||||
|
||||
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
|
||||
|
||||
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
|
||||
_SignedVarintSize, wire_format.ZigZagEncode)
|
||||
|
||||
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
|
||||
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
|
||||
|
||||
BoolSizer = _FixedSizer(1)
|
||||
|
||||
|
||||
def StringSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a string field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = local_len(element.encode('utf-8'))
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = local_len(value.encode('utf-8'))
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
def BytesSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a bytes field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = local_len(element)
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = local_len(value)
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
def GroupSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a group field."""
|
||||
|
||||
tag_size = _TagSize(field_number) * 2
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
result += element.ByteSize()
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
return tag_size + value.ByteSize()
|
||||
return FieldSize
|
||||
|
||||
|
||||
def MessageSizer(field_number, is_repeated, is_packed):
|
||||
"""Returns a sizer for a message field."""
|
||||
|
||||
tag_size = _TagSize(field_number)
|
||||
local_VarintSize = _VarintSize
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def RepeatedFieldSize(value):
|
||||
result = tag_size * len(value)
|
||||
for element in value:
|
||||
l = element.ByteSize()
|
||||
result += local_VarintSize(l) + l
|
||||
return result
|
||||
return RepeatedFieldSize
|
||||
else:
|
||||
def FieldSize(value):
|
||||
l = value.ByteSize()
|
||||
return tag_size + local_VarintSize(l) + l
|
||||
return FieldSize
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# MessageSet is special.
|
||||
|
||||
|
||||
def MessageSetItemSizer(field_number):
|
||||
"""Returns a sizer for extensions of MessageSet.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
|
||||
_TagSize(3))
|
||||
local_VarintSize = _VarintSize
|
||||
|
||||
def FieldSize(value):
|
||||
l = value.ByteSize()
|
||||
return static_size + local_VarintSize(l) + l
|
||||
|
||||
return FieldSize
|
||||
|
||||
|
||||
# ====================================================================
|
||||
# Encoders!
|
||||
|
||||
|
||||
def _VarintEncoder():
|
||||
"""Return an encoder for a basic varint value (does not include tag)."""
|
||||
|
||||
local_chr = _PY2 and chr or (lambda x: bytes((x,))) ##PY25
|
||||
##!PY25 local_chr = chr if bytes is str else lambda x: bytes((x,))
|
||||
def EncodeVarint(write, value):
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
while value:
|
||||
write(local_chr(0x80|bits))
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
return write(local_chr(bits))
|
||||
|
||||
return EncodeVarint
|
||||
|
||||
|
||||
def _SignedVarintEncoder():
|
||||
"""Return an encoder for a basic signed varint value (does not include
|
||||
tag)."""
|
||||
|
||||
local_chr = _PY2 and chr or (lambda x: bytes((x,))) ##PY25
|
||||
##!PY25 local_chr = chr if bytes is str else lambda x: bytes((x,))
|
||||
def EncodeSignedVarint(write, value):
|
||||
if value < 0:
|
||||
value += (1 << 64)
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
while value:
|
||||
write(local_chr(0x80|bits))
|
||||
bits = value & 0x7f
|
||||
value >>= 7
|
||||
return write(local_chr(bits))
|
||||
|
||||
return EncodeSignedVarint
|
||||
|
||||
|
||||
_EncodeVarint = _VarintEncoder()
|
||||
_EncodeSignedVarint = _SignedVarintEncoder()
|
||||
|
||||
|
||||
def _VarintBytes(value):
|
||||
"""Encode the given integer as a varint and return the bytes. This is only
|
||||
called at startup time so it doesn't need to be fast."""
|
||||
|
||||
pieces = []
|
||||
_EncodeVarint(pieces.append, value)
|
||||
return "".encode("latin1").join(pieces) ##PY25
|
||||
##!PY25 return b"".join(pieces)
|
||||
|
||||
|
||||
def TagBytes(field_number, wire_type):
|
||||
"""Encode the given tag and return the bytes. Only called at startup."""
|
||||
|
||||
return _VarintBytes(wire_format.PackTag(field_number, wire_type))
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# As with sizers (see above), we have a number of common encoder
|
||||
# implementations.
|
||||
|
||||
|
||||
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
|
||||
"""Return a constructor for an encoder for fields of a particular type.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
encode_value: A function which encodes an individual value, e.g.
|
||||
_EncodeVarint().
|
||||
compute_value_size: A function which computes the size of an individual
|
||||
value, e.g. _VarintSize().
|
||||
"""
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value):
|
||||
write(tag_bytes)
|
||||
size = 0
|
||||
for element in value:
|
||||
size += compute_value_size(element)
|
||||
local_EncodeVarint(write, size)
|
||||
for element in value:
|
||||
encode_value(write, element)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
encode_value(write, element)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value):
|
||||
write(tag_bytes)
|
||||
return encode_value(write, value)
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
|
||||
"""Like SimpleEncoder but additionally invokes modify_value on every value
|
||||
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value):
|
||||
write(tag_bytes)
|
||||
size = 0
|
||||
for element in value:
|
||||
size += compute_value_size(modify_value(element))
|
||||
local_EncodeVarint(write, size)
|
||||
for element in value:
|
||||
encode_value(write, modify_value(element))
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
encode_value(write, modify_value(element))
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value):
|
||||
write(tag_bytes)
|
||||
return encode_value(write, modify_value(value))
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _StructPackEncoder(wire_type, format):
|
||||
"""Return a constructor for an encoder for a fixed-width field.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
format: The format string to pass to struct.pack().
|
||||
"""
|
||||
|
||||
value_size = struct.calcsize(format)
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
local_struct_pack = struct.pack
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value) * value_size)
|
||||
for element in value:
|
||||
write(local_struct_pack(format, element))
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
write(local_struct_pack(format, element))
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value):
|
||||
write(tag_bytes)
|
||||
return write(local_struct_pack(format, value))
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
def _FloatingPointEncoder(wire_type, format):
|
||||
"""Return a constructor for an encoder for float fields.
|
||||
|
||||
This is like StructPackEncoder, but catches errors that may be due to
|
||||
passing non-finite floating-point values to struct.pack, and makes a
|
||||
second attempt to encode those values.
|
||||
|
||||
Args:
|
||||
wire_type: The field's wire type, for encoding tags.
|
||||
format: The format string to pass to struct.pack().
|
||||
"""
|
||||
|
||||
b = _PY2 and (lambda x:x) or (lambda x:x.encode('latin1')) ##PY25
|
||||
value_size = struct.calcsize(format)
|
||||
if value_size == 4:
|
||||
def EncodeNonFiniteOrRaise(write, value):
|
||||
# Remember that the serialized form uses little-endian byte order.
|
||||
if value == _POS_INF:
|
||||
write(b('\x00\x00\x80\x7F')) ##PY25
|
||||
##!PY25 write(b'\x00\x00\x80\x7F')
|
||||
elif value == _NEG_INF:
|
||||
write(b('\x00\x00\x80\xFF')) ##PY25
|
||||
##!PY25 write(b'\x00\x00\x80\xFF')
|
||||
elif value != value: # NaN
|
||||
write(b('\x00\x00\xC0\x7F')) ##PY25
|
||||
##!PY25 write(b'\x00\x00\xC0\x7F')
|
||||
else:
|
||||
raise
|
||||
elif value_size == 8:
|
||||
def EncodeNonFiniteOrRaise(write, value):
|
||||
if value == _POS_INF:
|
||||
write(b('\x00\x00\x00\x00\x00\x00\xF0\x7F')) ##PY25
|
||||
##!PY25 write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
|
||||
elif value == _NEG_INF:
|
||||
write(b('\x00\x00\x00\x00\x00\x00\xF0\xFF')) ##PY25
|
||||
##!PY25 write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
|
||||
elif value != value: # NaN
|
||||
write(b('\x00\x00\x00\x00\x00\x00\xF8\x7F')) ##PY25
|
||||
##!PY25 write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
|
||||
else:
|
||||
raise
|
||||
else:
|
||||
raise ValueError('Can\'t encode floating-point values that are '
|
||||
'%d bytes long (only 4 or 8)' % value_size)
|
||||
|
||||
def SpecificEncoder(field_number, is_repeated, is_packed):
|
||||
local_struct_pack = struct.pack
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value) * value_size)
|
||||
for element in value:
|
||||
# This try/except block is going to be faster than any code that
|
||||
# we could write to check whether element is finite.
|
||||
try:
|
||||
write(local_struct_pack(format, element))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, element)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
try:
|
||||
write(local_struct_pack(format, element))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, element)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_type)
|
||||
def EncodeField(write, value):
|
||||
write(tag_bytes)
|
||||
try:
|
||||
write(local_struct_pack(format, value))
|
||||
except SystemError:
|
||||
EncodeNonFiniteOrRaise(write, value)
|
||||
return EncodeField
|
||||
|
||||
return SpecificEncoder
|
||||
|
||||
|
||||
# ====================================================================
|
||||
# Here we declare an encoder constructor for each field type. These work
|
||||
# very similarly to sizer constructors, described earlier.
|
||||
|
||||
|
||||
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
|
||||
|
||||
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
|
||||
|
||||
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
|
||||
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
|
||||
wire_format.ZigZagEncode)
|
||||
|
||||
# Note that Python conveniently guarantees that when using the '<' prefix on
|
||||
# formats, they will also have the same size across all platforms (as opposed
|
||||
# to without the prefix, where their sizes depend on the C compiler's basic
|
||||
# type sizes).
|
||||
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
|
||||
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
|
||||
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
|
||||
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
|
||||
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
|
||||
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
|
||||
|
||||
|
||||
def BoolEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a boolean field."""
|
||||
|
||||
##!PY25 false_byte = b'\x00'
|
||||
##!PY25 true_byte = b'\x01'
|
||||
false_byte = '\x00'.encode('latin1') ##PY25
|
||||
true_byte = '\x01'.encode('latin1') ##PY25
|
||||
if is_packed:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
def EncodePackedField(write, value):
|
||||
write(tag_bytes)
|
||||
local_EncodeVarint(write, len(value))
|
||||
for element in value:
|
||||
if element:
|
||||
write(true_byte)
|
||||
else:
|
||||
write(false_byte)
|
||||
return EncodePackedField
|
||||
elif is_repeated:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
write(tag_bytes)
|
||||
if element:
|
||||
write(true_byte)
|
||||
else:
|
||||
write(false_byte)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
|
||||
def EncodeField(write, value):
|
||||
write(tag_bytes)
|
||||
if value:
|
||||
return write(true_byte)
|
||||
return write(false_byte)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def StringEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a string field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
encoded = element.encode('utf-8')
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(encoded))
|
||||
write(encoded)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value):
|
||||
encoded = value.encode('utf-8')
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(encoded))
|
||||
return write(encoded)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def BytesEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a bytes field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
local_len = len
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(element))
|
||||
write(element)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value):
|
||||
write(tag)
|
||||
local_EncodeVarint(write, local_len(value))
|
||||
return write(value)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def GroupEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a group field."""
|
||||
|
||||
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
|
||||
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
write(start_tag)
|
||||
element._InternalSerialize(write)
|
||||
write(end_tag)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value):
|
||||
write(start_tag)
|
||||
value._InternalSerialize(write)
|
||||
return write(end_tag)
|
||||
return EncodeField
|
||||
|
||||
|
||||
def MessageEncoder(field_number, is_repeated, is_packed):
|
||||
"""Returns an encoder for a message field."""
|
||||
|
||||
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
assert not is_packed
|
||||
if is_repeated:
|
||||
def EncodeRepeatedField(write, value):
|
||||
for element in value:
|
||||
write(tag)
|
||||
local_EncodeVarint(write, element.ByteSize())
|
||||
element._InternalSerialize(write)
|
||||
return EncodeRepeatedField
|
||||
else:
|
||||
def EncodeField(write, value):
|
||||
write(tag)
|
||||
local_EncodeVarint(write, value.ByteSize())
|
||||
return value._InternalSerialize(write)
|
||||
return EncodeField
|
||||
|
||||
|
||||
# --------------------------------------------------------------------
|
||||
# As before, MessageSet is special.
|
||||
|
||||
|
||||
def MessageSetItemEncoder(field_number):
|
||||
"""Encoder for extensions of MessageSet.
|
||||
|
||||
The message set message looks like this:
|
||||
message MessageSet {
|
||||
repeated group Item = 1 {
|
||||
required int32 type_id = 2;
|
||||
required string message = 3;
|
||||
}
|
||||
}
|
||||
"""
|
||||
start_bytes = "".encode("latin1").join([ ##PY25
|
||||
##!PY25 start_bytes = b"".join([
|
||||
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
|
||||
TagBytes(2, wire_format.WIRETYPE_VARINT),
|
||||
_VarintBytes(field_number),
|
||||
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
|
||||
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
|
||||
local_EncodeVarint = _EncodeVarint
|
||||
|
||||
def EncodeField(write, value):
|
||||
write(start_bytes)
|
||||
local_EncodeVarint(write, value.ByteSize())
|
||||
value._InternalSerialize(write)
|
||||
return write(end_bytes)
|
||||
|
||||
return EncodeField
|
||||
@@ -0,0 +1,89 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""A simple wrapper around enum types to expose utility functions.
|
||||
|
||||
Instances are created as properties with the same name as the enum they wrap
|
||||
on proto classes. For usage, see:
|
||||
reflection_test.py
|
||||
"""
|
||||
|
||||
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
|
||||
|
||||
|
||||
class EnumTypeWrapper(object):
|
||||
"""A utility for finding the names of enum values."""
|
||||
|
||||
DESCRIPTOR = None
|
||||
|
||||
def __init__(self, enum_type):
|
||||
"""Inits EnumTypeWrapper with an EnumDescriptor."""
|
||||
self._enum_type = enum_type
|
||||
self.DESCRIPTOR = enum_type;
|
||||
|
||||
def Name(self, number):
|
||||
"""Returns a string containing the name of an enum value."""
|
||||
if number in self._enum_type.values_by_number:
|
||||
return self._enum_type.values_by_number[number].name
|
||||
raise ValueError('Enum %s has no name defined for value %d' % (
|
||||
self._enum_type.name, number))
|
||||
|
||||
def Value(self, name):
|
||||
"""Returns the value coresponding to the given enum name."""
|
||||
if name in self._enum_type.values_by_name:
|
||||
return self._enum_type.values_by_name[name].number
|
||||
raise ValueError('Enum %s has no value defined for name %s' % (
|
||||
self._enum_type.name, name))
|
||||
|
||||
def keys(self):
|
||||
"""Return a list of the string names in the enum.
|
||||
|
||||
These are returned in the order they were defined in the .proto file.
|
||||
"""
|
||||
|
||||
return [value_descriptor.name
|
||||
for value_descriptor in self._enum_type.values]
|
||||
|
||||
def values(self):
|
||||
"""Return a list of the integer values in the enum.
|
||||
|
||||
These are returned in the order they were defined in the .proto file.
|
||||
"""
|
||||
|
||||
return [value_descriptor.number
|
||||
for value_descriptor in self._enum_type.values]
|
||||
|
||||
def items(self):
|
||||
"""Return a list of the (name, value) pairs of the enum.
|
||||
|
||||
These are returned in the order they were defined in the .proto file.
|
||||
"""
|
||||
return [(value_descriptor.name, value_descriptor.number)
|
||||
for value_descriptor in self._enum_type.values]
|
||||
@@ -0,0 +1,57 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: matthewtoia@google.com (Matt Toia)
|
||||
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
|
||||
enum Factory1Enum {
|
||||
FACTORY_1_VALUE_0 = 0;
|
||||
FACTORY_1_VALUE_1 = 1;
|
||||
}
|
||||
|
||||
message Factory1Message {
|
||||
optional Factory1Enum factory_1_enum = 1;
|
||||
enum NestedFactory1Enum {
|
||||
NESTED_FACTORY_1_VALUE_0 = 0;
|
||||
NESTED_FACTORY_1_VALUE_1 = 1;
|
||||
}
|
||||
optional NestedFactory1Enum nested_factory_1_enum = 2;
|
||||
message NestedFactory1Message {
|
||||
optional string value = 1;
|
||||
}
|
||||
optional NestedFactory1Message nested_factory_1_message = 3;
|
||||
optional int32 scalar_value = 4;
|
||||
repeated string list_value = 5;
|
||||
|
||||
extensions 1000 to max;
|
||||
}
|
||||
@@ -0,0 +1,92 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: matthewtoia@google.com (Matt Toia)
|
||||
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
import "google/protobuf/internal/factory_test1.proto";
|
||||
|
||||
|
||||
enum Factory2Enum {
|
||||
FACTORY_2_VALUE_0 = 0;
|
||||
FACTORY_2_VALUE_1 = 1;
|
||||
}
|
||||
|
||||
message Factory2Message {
|
||||
required int32 mandatory = 1;
|
||||
optional Factory2Enum factory_2_enum = 2;
|
||||
enum NestedFactory2Enum {
|
||||
NESTED_FACTORY_2_VALUE_0 = 0;
|
||||
NESTED_FACTORY_2_VALUE_1 = 1;
|
||||
}
|
||||
optional NestedFactory2Enum nested_factory_2_enum = 3;
|
||||
message NestedFactory2Message {
|
||||
optional string value = 1;
|
||||
}
|
||||
optional NestedFactory2Message nested_factory_2_message = 4;
|
||||
optional Factory1Message factory_1_message = 5;
|
||||
optional Factory1Enum factory_1_enum = 6;
|
||||
optional Factory1Message.NestedFactory1Enum nested_factory_1_enum = 7;
|
||||
optional Factory1Message.NestedFactory1Message nested_factory_1_message = 8;
|
||||
optional Factory2Message circular_message = 9;
|
||||
optional string scalar_value = 10;
|
||||
repeated string list_value = 11;
|
||||
repeated group Grouped = 12 {
|
||||
optional string part_1 = 13;
|
||||
optional string part_2 = 14;
|
||||
}
|
||||
optional LoopMessage loop = 15;
|
||||
optional int32 int_with_default = 16 [default = 1776];
|
||||
optional double double_with_default = 17 [default = 9.99];
|
||||
optional string string_with_default = 18 [default = "hello world"];
|
||||
optional bool bool_with_default = 19 [default = false];
|
||||
optional Factory2Enum enum_with_default = 20 [default = FACTORY_2_VALUE_1];
|
||||
optional bytes bytes_with_default = 21 [default = "a\373\000c"];
|
||||
|
||||
|
||||
extend Factory1Message {
|
||||
optional string one_more_field = 1001;
|
||||
}
|
||||
|
||||
oneof oneof_field {
|
||||
int32 oneof_int = 22;
|
||||
string oneof_string = 23;
|
||||
}
|
||||
}
|
||||
|
||||
message LoopMessage {
|
||||
optional Factory2Message loop = 1;
|
||||
}
|
||||
|
||||
extend Factory1Message {
|
||||
optional string another_field = 1002;
|
||||
}
|
||||
@@ -0,0 +1,343 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# TODO(robinson): Flesh this out considerably. We focused on reflection_test.py
|
||||
# first, since it's testing the subtler code, and since it provides decent
|
||||
# indirect testing of the protocol compiler output.
|
||||
|
||||
"""Unittest that directly tests the output of the pure-Python protocol
|
||||
compiler. See //google/protobuf/reflection_test.py for a test which
|
||||
further ensures that we can use Python protocol message objects as we expect.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf.internal import test_bad_identifiers_pb2
|
||||
from google.protobuf import unittest_custom_options_pb2
|
||||
from google.protobuf import unittest_import_pb2
|
||||
from google.protobuf import unittest_import_public_pb2
|
||||
from google.protobuf import unittest_mset_pb2
|
||||
from google.protobuf import unittest_no_generic_services_pb2
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import service
|
||||
from google.protobuf import symbol_database
|
||||
|
||||
MAX_EXTENSION = 536870912
|
||||
|
||||
|
||||
class GeneratorTest(basetest.TestCase):
|
||||
|
||||
def testNestedMessageDescriptor(self):
|
||||
field_name = 'optional_nested_message'
|
||||
proto_type = unittest_pb2.TestAllTypes
|
||||
self.assertEqual(
|
||||
proto_type.NestedMessage.DESCRIPTOR,
|
||||
proto_type.DESCRIPTOR.fields_by_name[field_name].message_type)
|
||||
|
||||
def testEnums(self):
|
||||
# We test only module-level enums here.
|
||||
# TODO(robinson): Examine descriptors directly to check
|
||||
# enum descriptor output.
|
||||
self.assertEqual(4, unittest_pb2.FOREIGN_FOO)
|
||||
self.assertEqual(5, unittest_pb2.FOREIGN_BAR)
|
||||
self.assertEqual(6, unittest_pb2.FOREIGN_BAZ)
|
||||
|
||||
proto = unittest_pb2.TestAllTypes()
|
||||
self.assertEqual(1, proto.FOO)
|
||||
self.assertEqual(1, unittest_pb2.TestAllTypes.FOO)
|
||||
self.assertEqual(2, proto.BAR)
|
||||
self.assertEqual(2, unittest_pb2.TestAllTypes.BAR)
|
||||
self.assertEqual(3, proto.BAZ)
|
||||
self.assertEqual(3, unittest_pb2.TestAllTypes.BAZ)
|
||||
|
||||
def testExtremeDefaultValues(self):
|
||||
message = unittest_pb2.TestExtremeDefaultValues()
|
||||
|
||||
# Python pre-2.6 does not have isinf() or isnan() functions, so we have
|
||||
# to provide our own.
|
||||
def isnan(val):
|
||||
# NaN is never equal to itself.
|
||||
return val != val
|
||||
def isinf(val):
|
||||
# Infinity times zero equals NaN.
|
||||
return not isnan(val) and isnan(val * 0)
|
||||
|
||||
self.assertTrue(isinf(message.inf_double))
|
||||
self.assertTrue(message.inf_double > 0)
|
||||
self.assertTrue(isinf(message.neg_inf_double))
|
||||
self.assertTrue(message.neg_inf_double < 0)
|
||||
self.assertTrue(isnan(message.nan_double))
|
||||
|
||||
self.assertTrue(isinf(message.inf_float))
|
||||
self.assertTrue(message.inf_float > 0)
|
||||
self.assertTrue(isinf(message.neg_inf_float))
|
||||
self.assertTrue(message.neg_inf_float < 0)
|
||||
self.assertTrue(isnan(message.nan_float))
|
||||
self.assertEqual("? ? ?? ?? ??? ??/ ??-", message.cpp_trigraph)
|
||||
|
||||
def testHasDefaultValues(self):
|
||||
desc = unittest_pb2.TestAllTypes.DESCRIPTOR
|
||||
|
||||
expected_has_default_by_name = {
|
||||
'optional_int32': False,
|
||||
'repeated_int32': False,
|
||||
'optional_nested_message': False,
|
||||
'default_int32': True,
|
||||
}
|
||||
|
||||
has_default_by_name = dict(
|
||||
[(f.name, f.has_default_value)
|
||||
for f in desc.fields
|
||||
if f.name in expected_has_default_by_name])
|
||||
self.assertEqual(expected_has_default_by_name, has_default_by_name)
|
||||
|
||||
def testContainingTypeBehaviorForExtensions(self):
|
||||
self.assertEqual(unittest_pb2.optional_int32_extension.containing_type,
|
||||
unittest_pb2.TestAllExtensions.DESCRIPTOR)
|
||||
self.assertEqual(unittest_pb2.TestRequired.single.containing_type,
|
||||
unittest_pb2.TestAllExtensions.DESCRIPTOR)
|
||||
|
||||
def testExtensionScope(self):
|
||||
self.assertEqual(unittest_pb2.optional_int32_extension.extension_scope,
|
||||
None)
|
||||
self.assertEqual(unittest_pb2.TestRequired.single.extension_scope,
|
||||
unittest_pb2.TestRequired.DESCRIPTOR)
|
||||
|
||||
def testIsExtension(self):
|
||||
self.assertTrue(unittest_pb2.optional_int32_extension.is_extension)
|
||||
self.assertTrue(unittest_pb2.TestRequired.single.is_extension)
|
||||
|
||||
message_descriptor = unittest_pb2.TestRequired.DESCRIPTOR
|
||||
non_extension_descriptor = message_descriptor.fields_by_name['a']
|
||||
self.assertTrue(not non_extension_descriptor.is_extension)
|
||||
|
||||
def testOptions(self):
|
||||
proto = unittest_mset_pb2.TestMessageSet()
|
||||
self.assertTrue(proto.DESCRIPTOR.GetOptions().message_set_wire_format)
|
||||
|
||||
def testMessageWithCustomOptions(self):
|
||||
proto = unittest_custom_options_pb2.TestMessageWithCustomOptions()
|
||||
enum_options = proto.DESCRIPTOR.enum_types_by_name['AnEnum'].GetOptions()
|
||||
self.assertTrue(enum_options is not None)
|
||||
# TODO(gps): We really should test for the presense of the enum_opt1
|
||||
# extension and for its value to be set to -789.
|
||||
|
||||
def testNestedTypes(self):
|
||||
self.assertEquals(
|
||||
set(unittest_pb2.TestAllTypes.DESCRIPTOR.nested_types),
|
||||
set([
|
||||
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR,
|
||||
unittest_pb2.TestAllTypes.OptionalGroup.DESCRIPTOR,
|
||||
unittest_pb2.TestAllTypes.RepeatedGroup.DESCRIPTOR,
|
||||
]))
|
||||
self.assertEqual(unittest_pb2.TestEmptyMessage.DESCRIPTOR.nested_types, [])
|
||||
self.assertEqual(
|
||||
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.nested_types, [])
|
||||
|
||||
def testContainingType(self):
|
||||
self.assertTrue(
|
||||
unittest_pb2.TestEmptyMessage.DESCRIPTOR.containing_type is None)
|
||||
self.assertTrue(
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR.containing_type is None)
|
||||
self.assertEqual(
|
||||
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.containing_type,
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR)
|
||||
self.assertEqual(
|
||||
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.containing_type,
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR)
|
||||
self.assertEqual(
|
||||
unittest_pb2.TestAllTypes.RepeatedGroup.DESCRIPTOR.containing_type,
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR)
|
||||
|
||||
def testContainingTypeInEnumDescriptor(self):
|
||||
self.assertTrue(unittest_pb2._FOREIGNENUM.containing_type is None)
|
||||
self.assertEqual(unittest_pb2._TESTALLTYPES_NESTEDENUM.containing_type,
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR)
|
||||
|
||||
def testPackage(self):
|
||||
self.assertEqual(
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR.file.package,
|
||||
'protobuf_unittest')
|
||||
desc = unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR
|
||||
self.assertEqual(desc.file.package, 'protobuf_unittest')
|
||||
self.assertEqual(
|
||||
unittest_import_pb2.ImportMessage.DESCRIPTOR.file.package,
|
||||
'protobuf_unittest_import')
|
||||
|
||||
self.assertEqual(
|
||||
unittest_pb2._FOREIGNENUM.file.package, 'protobuf_unittest')
|
||||
self.assertEqual(
|
||||
unittest_pb2._TESTALLTYPES_NESTEDENUM.file.package,
|
||||
'protobuf_unittest')
|
||||
self.assertEqual(
|
||||
unittest_import_pb2._IMPORTENUM.file.package,
|
||||
'protobuf_unittest_import')
|
||||
|
||||
def testExtensionRange(self):
|
||||
self.assertEqual(
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR.extension_ranges, [])
|
||||
self.assertEqual(
|
||||
unittest_pb2.TestAllExtensions.DESCRIPTOR.extension_ranges,
|
||||
[(1, MAX_EXTENSION)])
|
||||
self.assertEqual(
|
||||
unittest_pb2.TestMultipleExtensionRanges.DESCRIPTOR.extension_ranges,
|
||||
[(42, 43), (4143, 4244), (65536, MAX_EXTENSION)])
|
||||
|
||||
def testFileDescriptor(self):
|
||||
self.assertEqual(unittest_pb2.DESCRIPTOR.name,
|
||||
'google/protobuf/unittest.proto')
|
||||
self.assertEqual(unittest_pb2.DESCRIPTOR.package, 'protobuf_unittest')
|
||||
self.assertFalse(unittest_pb2.DESCRIPTOR.serialized_pb is None)
|
||||
self.assertEqual(unittest_pb2.DESCRIPTOR.dependencies,
|
||||
[unittest_import_pb2.DESCRIPTOR])
|
||||
self.assertEqual(unittest_import_pb2.DESCRIPTOR.dependencies,
|
||||
[unittest_import_public_pb2.DESCRIPTOR])
|
||||
|
||||
def testNoGenericServices(self):
|
||||
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "TestMessage"))
|
||||
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "FOO"))
|
||||
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "test_extension"))
|
||||
|
||||
# Make sure unittest_no_generic_services_pb2 has no services subclassing
|
||||
# Proto2 Service class.
|
||||
if hasattr(unittest_no_generic_services_pb2, "TestService"):
|
||||
self.assertFalse(issubclass(unittest_no_generic_services_pb2.TestService,
|
||||
service.Service))
|
||||
|
||||
def testMessageTypesByName(self):
|
||||
file_type = unittest_pb2.DESCRIPTOR
|
||||
self.assertEqual(
|
||||
unittest_pb2._TESTALLTYPES,
|
||||
file_type.message_types_by_name[unittest_pb2._TESTALLTYPES.name])
|
||||
|
||||
# Nested messages shouldn't be included in the message_types_by_name
|
||||
# dictionary (like in the C++ API).
|
||||
self.assertFalse(
|
||||
unittest_pb2._TESTALLTYPES_NESTEDMESSAGE.name in
|
||||
file_type.message_types_by_name)
|
||||
|
||||
def testEnumTypesByName(self):
|
||||
file_type = unittest_pb2.DESCRIPTOR
|
||||
self.assertEqual(
|
||||
unittest_pb2._FOREIGNENUM,
|
||||
file_type.enum_types_by_name[unittest_pb2._FOREIGNENUM.name])
|
||||
|
||||
def testExtensionsByName(self):
|
||||
file_type = unittest_pb2.DESCRIPTOR
|
||||
self.assertEqual(
|
||||
unittest_pb2.my_extension_string,
|
||||
file_type.extensions_by_name[unittest_pb2.my_extension_string.name])
|
||||
|
||||
def testPublicImports(self):
|
||||
# Test public imports as embedded message.
|
||||
all_type_proto = unittest_pb2.TestAllTypes()
|
||||
self.assertEqual(0, all_type_proto.optional_public_import_message.e)
|
||||
|
||||
# PublicImportMessage is actually defined in unittest_import_public_pb2
|
||||
# module, and is public imported by unittest_import_pb2 module.
|
||||
public_import_proto = unittest_import_pb2.PublicImportMessage()
|
||||
self.assertEqual(0, public_import_proto.e)
|
||||
self.assertTrue(unittest_import_public_pb2.PublicImportMessage is
|
||||
unittest_import_pb2.PublicImportMessage)
|
||||
|
||||
def testBadIdentifiers(self):
|
||||
# We're just testing that the code was imported without problems.
|
||||
message = test_bad_identifiers_pb2.TestBadIdentifiers()
|
||||
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.message],
|
||||
"foo")
|
||||
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.descriptor],
|
||||
"bar")
|
||||
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.reflection],
|
||||
"baz")
|
||||
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.service],
|
||||
"qux")
|
||||
|
||||
def testOneof(self):
|
||||
desc = unittest_pb2.TestAllTypes.DESCRIPTOR
|
||||
self.assertEqual(1, len(desc.oneofs))
|
||||
self.assertEqual('oneof_field', desc.oneofs[0].name)
|
||||
self.assertEqual(0, desc.oneofs[0].index)
|
||||
self.assertIs(desc, desc.oneofs[0].containing_type)
|
||||
self.assertIs(desc.oneofs[0], desc.oneofs_by_name['oneof_field'])
|
||||
nested_names = set(['oneof_uint32', 'oneof_nested_message',
|
||||
'oneof_string', 'oneof_bytes'])
|
||||
self.assertSameElements(
|
||||
nested_names,
|
||||
[field.name for field in desc.oneofs[0].fields])
|
||||
for field_name, field_desc in desc.fields_by_name.iteritems():
|
||||
if field_name in nested_names:
|
||||
self.assertIs(desc.oneofs[0], field_desc.containing_oneof)
|
||||
else:
|
||||
self.assertIsNone(field_desc.containing_oneof)
|
||||
|
||||
|
||||
class SymbolDatabaseRegistrationTest(basetest.TestCase):
|
||||
"""Checks that messages, enums and files are correctly registered."""
|
||||
|
||||
def testGetSymbol(self):
|
||||
self.assertEquals(
|
||||
unittest_pb2.TestAllTypes, symbol_database.Default().GetSymbol(
|
||||
'protobuf_unittest.TestAllTypes'))
|
||||
self.assertEquals(
|
||||
unittest_pb2.TestAllTypes.NestedMessage,
|
||||
symbol_database.Default().GetSymbol(
|
||||
'protobuf_unittest.TestAllTypes.NestedMessage'))
|
||||
with self.assertRaises(KeyError):
|
||||
symbol_database.Default().GetSymbol('protobuf_unittest.NestedMessage')
|
||||
self.assertEquals(
|
||||
unittest_pb2.TestAllTypes.OptionalGroup,
|
||||
symbol_database.Default().GetSymbol(
|
||||
'protobuf_unittest.TestAllTypes.OptionalGroup'))
|
||||
self.assertEquals(
|
||||
unittest_pb2.TestAllTypes.RepeatedGroup,
|
||||
symbol_database.Default().GetSymbol(
|
||||
'protobuf_unittest.TestAllTypes.RepeatedGroup'))
|
||||
|
||||
def testEnums(self):
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.ForeignEnum',
|
||||
symbol_database.Default().pool.FindEnumTypeByName(
|
||||
'protobuf_unittest.ForeignEnum').full_name)
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.TestAllTypes.NestedEnum',
|
||||
symbol_database.Default().pool.FindEnumTypeByName(
|
||||
'protobuf_unittest.TestAllTypes.NestedEnum').full_name)
|
||||
|
||||
def testFindFileByName(self):
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
symbol_database.Default().pool.FindFileByName(
|
||||
'google/protobuf/unittest.proto').name)
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,54 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for ..public.message_factory for the pure Python implementation."""
|
||||
|
||||
import os
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'python'
|
||||
|
||||
# We must set the implementation version above before the google3 imports.
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.apputils import basetest
|
||||
from google.protobuf.internal import api_implementation
|
||||
# Run all tests from the original module by putting them in our namespace.
|
||||
# pylint: disable=wildcard-import
|
||||
from google.protobuf.internal.message_factory_test import *
|
||||
|
||||
|
||||
class ConfirmPurePythonTest(basetest.TestCase):
|
||||
|
||||
def testImplementationSetting(self):
|
||||
self.assertEqual('python', api_implementation.Type())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,131 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.message_factory."""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import descriptor_pb2
|
||||
from google.protobuf.internal import factory_test1_pb2
|
||||
from google.protobuf.internal import factory_test2_pb2
|
||||
from google.protobuf import descriptor_database
|
||||
from google.protobuf import descriptor_pool
|
||||
from google.protobuf import message_factory
|
||||
|
||||
|
||||
class MessageFactoryTest(basetest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
factory_test1_pb2.DESCRIPTOR.serialized_pb)
|
||||
self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString(
|
||||
factory_test2_pb2.DESCRIPTOR.serialized_pb)
|
||||
|
||||
def _ExerciseDynamicClass(self, cls):
|
||||
msg = cls()
|
||||
msg.mandatory = 42
|
||||
msg.nested_factory_2_enum = 0
|
||||
msg.nested_factory_2_message.value = 'nested message value'
|
||||
msg.factory_1_message.factory_1_enum = 1
|
||||
msg.factory_1_message.nested_factory_1_enum = 0
|
||||
msg.factory_1_message.nested_factory_1_message.value = (
|
||||
'nested message value')
|
||||
msg.factory_1_message.scalar_value = 22
|
||||
msg.factory_1_message.list_value.extend([u'one', u'two', u'three'])
|
||||
msg.factory_1_message.list_value.append(u'four')
|
||||
msg.factory_1_enum = 1
|
||||
msg.nested_factory_1_enum = 0
|
||||
msg.nested_factory_1_message.value = 'nested message value'
|
||||
msg.circular_message.mandatory = 1
|
||||
msg.circular_message.circular_message.mandatory = 2
|
||||
msg.circular_message.scalar_value = 'one deep'
|
||||
msg.scalar_value = 'zero deep'
|
||||
msg.list_value.extend([u'four', u'three', u'two'])
|
||||
msg.list_value.append(u'one')
|
||||
msg.grouped.add()
|
||||
msg.grouped[0].part_1 = 'hello'
|
||||
msg.grouped[0].part_2 = 'world'
|
||||
msg.grouped.add(part_1='testing', part_2='123')
|
||||
msg.loop.loop.mandatory = 2
|
||||
msg.loop.loop.loop.loop.mandatory = 4
|
||||
serialized = msg.SerializeToString()
|
||||
converted = factory_test2_pb2.Factory2Message.FromString(serialized)
|
||||
reserialized = converted.SerializeToString()
|
||||
self.assertEquals(serialized, reserialized)
|
||||
result = cls.FromString(reserialized)
|
||||
self.assertEquals(msg, result)
|
||||
|
||||
def testGetPrototype(self):
|
||||
db = descriptor_database.DescriptorDatabase()
|
||||
pool = descriptor_pool.DescriptorPool(db)
|
||||
db.Add(self.factory_test1_fd)
|
||||
db.Add(self.factory_test2_fd)
|
||||
factory = message_factory.MessageFactory()
|
||||
cls = factory.GetPrototype(pool.FindMessageTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Message'))
|
||||
self.assertIsNot(cls, factory_test2_pb2.Factory2Message)
|
||||
self._ExerciseDynamicClass(cls)
|
||||
cls2 = factory.GetPrototype(pool.FindMessageTypeByName(
|
||||
'google.protobuf.python.internal.Factory2Message'))
|
||||
self.assertIs(cls, cls2)
|
||||
|
||||
def testGetMessages(self):
|
||||
# performed twice because multiple calls with the same input must be allowed
|
||||
for _ in range(2):
|
||||
messages = message_factory.GetMessages([self.factory_test2_fd,
|
||||
self.factory_test1_fd])
|
||||
self.assertContainsSubset(
|
||||
['google.protobuf.python.internal.Factory2Message',
|
||||
'google.protobuf.python.internal.Factory1Message'],
|
||||
messages.keys())
|
||||
self._ExerciseDynamicClass(
|
||||
messages['google.protobuf.python.internal.Factory2Message'])
|
||||
self.assertContainsSubset(
|
||||
['google.protobuf.python.internal.Factory2Message.one_more_field',
|
||||
'google.protobuf.python.internal.another_field'],
|
||||
(messages['google.protobuf.python.internal.Factory1Message']
|
||||
._extensions_by_name.keys()))
|
||||
factory_msg1 = messages['google.protobuf.python.internal.Factory1Message']
|
||||
msg1 = messages['google.protobuf.python.internal.Factory1Message']()
|
||||
ext1 = factory_msg1._extensions_by_name[
|
||||
'google.protobuf.python.internal.Factory2Message.one_more_field']
|
||||
ext2 = factory_msg1._extensions_by_name[
|
||||
'google.protobuf.python.internal.another_field']
|
||||
msg1.Extensions[ext1] = 'test1'
|
||||
msg1.Extensions[ext2] = 'test2'
|
||||
self.assertEquals('test1', msg1.Extensions[ext1])
|
||||
self.assertEquals('test2', msg1.Extensions[ext2])
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,78 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Defines a listener interface for observing certain
|
||||
state transitions on Message objects.
|
||||
|
||||
Also defines a null implementation of this interface.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
|
||||
class MessageListener(object):
|
||||
|
||||
"""Listens for modifications made to a message. Meant to be registered via
|
||||
Message._SetListener().
|
||||
|
||||
Attributes:
|
||||
dirty: If True, then calling Modified() would be a no-op. This can be
|
||||
used to avoid these calls entirely in the common case.
|
||||
"""
|
||||
|
||||
def Modified(self):
|
||||
"""Called every time the message is modified in such a way that the parent
|
||||
message may need to be updated. This currently means either:
|
||||
(a) The message was modified for the first time, so the parent message
|
||||
should henceforth mark the message as present.
|
||||
(b) The message's cached byte size became dirty -- i.e. the message was
|
||||
modified for the first time after a previous call to ByteSize().
|
||||
Therefore the parent should also mark its byte size as dirty.
|
||||
Note that (a) implies (b), since new objects start out with a client cached
|
||||
size (zero). However, we document (a) explicitly because it is important.
|
||||
|
||||
Modified() will *only* be called in response to one of these two events --
|
||||
not every time the sub-message is modified.
|
||||
|
||||
Note that if the listener's |dirty| attribute is true, then calling
|
||||
Modified at the moment would be a no-op, so it can be skipped. Performance-
|
||||
sensitive callers should check this attribute directly before calling since
|
||||
it will be true most of the time.
|
||||
"""
|
||||
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class NullMessageListener(object):
|
||||
|
||||
"""No-op MessageListener implementation."""
|
||||
|
||||
def Modified(self):
|
||||
pass
|
||||
@@ -0,0 +1,54 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for ..public.message for the pure Python implementation."""
|
||||
|
||||
import os
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'python'
|
||||
|
||||
# We must set the implementation version above before the google3 imports.
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.apputils import basetest
|
||||
from google.protobuf.internal import api_implementation
|
||||
# Run all tests from the original module by putting them in our namespace.
|
||||
# pylint: disable=wildcard-import
|
||||
from google.protobuf.internal.message_test import *
|
||||
|
||||
|
||||
class ConfirmPurePythonTest(basetest.TestCase):
|
||||
|
||||
def testImplementationSetting(self):
|
||||
self.assertEqual('python', api_implementation.Type())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,681 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests python protocol buffers against the golden message.
|
||||
|
||||
Note that the golden messages exercise every known field type, thus this
|
||||
test ends up exercising and verifying nearly all of the parsing and
|
||||
serialization code in the whole library.
|
||||
|
||||
TODO(kenton): Merge with wire_format_test? It doesn't make a whole lot of
|
||||
sense to call this a test of the "message" module, which only declares an
|
||||
abstract interface.
|
||||
"""
|
||||
|
||||
__author__ = 'gps@google.com (Gregory P. Smith)'
|
||||
|
||||
import copy
|
||||
import math
|
||||
import operator
|
||||
import pickle
|
||||
import sys
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import test_util
|
||||
from google.protobuf import message
|
||||
|
||||
# Python pre-2.6 does not have isinf() or isnan() functions, so we have
|
||||
# to provide our own.
|
||||
def isnan(val):
|
||||
# NaN is never equal to itself.
|
||||
return val != val
|
||||
def isinf(val):
|
||||
# Infinity times zero equals NaN.
|
||||
return not isnan(val) and isnan(val * 0)
|
||||
def IsPosInf(val):
|
||||
return isinf(val) and (val > 0)
|
||||
def IsNegInf(val):
|
||||
return isinf(val) and (val < 0)
|
||||
|
||||
|
||||
class MessageTest(basetest.TestCase):
|
||||
|
||||
def testBadUtf8String(self):
|
||||
if api_implementation.Type() != 'python':
|
||||
self.skipTest("Skipping testBadUtf8String, currently only the python "
|
||||
"api implementation raises UnicodeDecodeError when a "
|
||||
"string field contains bad utf-8.")
|
||||
bad_utf8_data = test_util.GoldenFileData('bad_utf8_string')
|
||||
with self.assertRaises(UnicodeDecodeError) as context:
|
||||
unittest_pb2.TestAllTypes.FromString(bad_utf8_data)
|
||||
self.assertIn('field: protobuf_unittest.TestAllTypes.optional_string',
|
||||
str(context.exception))
|
||||
|
||||
def testGoldenMessage(self):
|
||||
golden_data = test_util.GoldenFileData(
|
||||
'golden_message_oneof_implemented')
|
||||
golden_message = unittest_pb2.TestAllTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
test_util.ExpectAllFieldsSet(self, golden_message)
|
||||
self.assertEqual(golden_data, golden_message.SerializeToString())
|
||||
golden_copy = copy.deepcopy(golden_message)
|
||||
self.assertEqual(golden_data, golden_copy.SerializeToString())
|
||||
|
||||
def testGoldenExtensions(self):
|
||||
golden_data = test_util.GoldenFileData('golden_message')
|
||||
golden_message = unittest_pb2.TestAllExtensions()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
all_set = unittest_pb2.TestAllExtensions()
|
||||
test_util.SetAllExtensions(all_set)
|
||||
self.assertEquals(all_set, golden_message)
|
||||
self.assertEqual(golden_data, golden_message.SerializeToString())
|
||||
golden_copy = copy.deepcopy(golden_message)
|
||||
self.assertEqual(golden_data, golden_copy.SerializeToString())
|
||||
|
||||
def testGoldenPackedMessage(self):
|
||||
golden_data = test_util.GoldenFileData('golden_packed_fields_message')
|
||||
golden_message = unittest_pb2.TestPackedTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
all_set = unittest_pb2.TestPackedTypes()
|
||||
test_util.SetAllPackedFields(all_set)
|
||||
self.assertEquals(all_set, golden_message)
|
||||
self.assertEqual(golden_data, all_set.SerializeToString())
|
||||
golden_copy = copy.deepcopy(golden_message)
|
||||
self.assertEqual(golden_data, golden_copy.SerializeToString())
|
||||
|
||||
def testGoldenPackedExtensions(self):
|
||||
golden_data = test_util.GoldenFileData('golden_packed_fields_message')
|
||||
golden_message = unittest_pb2.TestPackedExtensions()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
all_set = unittest_pb2.TestPackedExtensions()
|
||||
test_util.SetAllPackedExtensions(all_set)
|
||||
self.assertEquals(all_set, golden_message)
|
||||
self.assertEqual(golden_data, all_set.SerializeToString())
|
||||
golden_copy = copy.deepcopy(golden_message)
|
||||
self.assertEqual(golden_data, golden_copy.SerializeToString())
|
||||
|
||||
def testPickleSupport(self):
|
||||
golden_data = test_util.GoldenFileData('golden_message')
|
||||
golden_message = unittest_pb2.TestAllTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
pickled_message = pickle.dumps(golden_message)
|
||||
|
||||
unpickled_message = pickle.loads(pickled_message)
|
||||
self.assertEquals(unpickled_message, golden_message)
|
||||
|
||||
|
||||
def testPickleIncompleteProto(self):
|
||||
golden_message = unittest_pb2.TestRequired(a=1)
|
||||
pickled_message = pickle.dumps(golden_message)
|
||||
|
||||
unpickled_message = pickle.loads(pickled_message)
|
||||
self.assertEquals(unpickled_message, golden_message)
|
||||
self.assertEquals(unpickled_message.a, 1)
|
||||
# This is still an incomplete proto - so serializing should fail
|
||||
self.assertRaises(message.EncodeError, unpickled_message.SerializeToString)
|
||||
|
||||
def testPositiveInfinity(self):
|
||||
golden_data = (b'\x5D\x00\x00\x80\x7F'
|
||||
b'\x61\x00\x00\x00\x00\x00\x00\xF0\x7F'
|
||||
b'\xCD\x02\x00\x00\x80\x7F'
|
||||
b'\xD1\x02\x00\x00\x00\x00\x00\x00\xF0\x7F')
|
||||
golden_message = unittest_pb2.TestAllTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
self.assertTrue(IsPosInf(golden_message.optional_float))
|
||||
self.assertTrue(IsPosInf(golden_message.optional_double))
|
||||
self.assertTrue(IsPosInf(golden_message.repeated_float[0]))
|
||||
self.assertTrue(IsPosInf(golden_message.repeated_double[0]))
|
||||
self.assertEqual(golden_data, golden_message.SerializeToString())
|
||||
|
||||
def testNegativeInfinity(self):
|
||||
golden_data = (b'\x5D\x00\x00\x80\xFF'
|
||||
b'\x61\x00\x00\x00\x00\x00\x00\xF0\xFF'
|
||||
b'\xCD\x02\x00\x00\x80\xFF'
|
||||
b'\xD1\x02\x00\x00\x00\x00\x00\x00\xF0\xFF')
|
||||
golden_message = unittest_pb2.TestAllTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
self.assertTrue(IsNegInf(golden_message.optional_float))
|
||||
self.assertTrue(IsNegInf(golden_message.optional_double))
|
||||
self.assertTrue(IsNegInf(golden_message.repeated_float[0]))
|
||||
self.assertTrue(IsNegInf(golden_message.repeated_double[0]))
|
||||
self.assertEqual(golden_data, golden_message.SerializeToString())
|
||||
|
||||
def testNotANumber(self):
|
||||
golden_data = (b'\x5D\x00\x00\xC0\x7F'
|
||||
b'\x61\x00\x00\x00\x00\x00\x00\xF8\x7F'
|
||||
b'\xCD\x02\x00\x00\xC0\x7F'
|
||||
b'\xD1\x02\x00\x00\x00\x00\x00\x00\xF8\x7F')
|
||||
golden_message = unittest_pb2.TestAllTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
self.assertTrue(isnan(golden_message.optional_float))
|
||||
self.assertTrue(isnan(golden_message.optional_double))
|
||||
self.assertTrue(isnan(golden_message.repeated_float[0]))
|
||||
self.assertTrue(isnan(golden_message.repeated_double[0]))
|
||||
|
||||
# The protocol buffer may serialize to any one of multiple different
|
||||
# representations of a NaN. Rather than verify a specific representation,
|
||||
# verify the serialized string can be converted into a correctly
|
||||
# behaving protocol buffer.
|
||||
serialized = golden_message.SerializeToString()
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.ParseFromString(serialized)
|
||||
self.assertTrue(isnan(message.optional_float))
|
||||
self.assertTrue(isnan(message.optional_double))
|
||||
self.assertTrue(isnan(message.repeated_float[0]))
|
||||
self.assertTrue(isnan(message.repeated_double[0]))
|
||||
|
||||
def testPositiveInfinityPacked(self):
|
||||
golden_data = (b'\xA2\x06\x04\x00\x00\x80\x7F'
|
||||
b'\xAA\x06\x08\x00\x00\x00\x00\x00\x00\xF0\x7F')
|
||||
golden_message = unittest_pb2.TestPackedTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
self.assertTrue(IsPosInf(golden_message.packed_float[0]))
|
||||
self.assertTrue(IsPosInf(golden_message.packed_double[0]))
|
||||
self.assertEqual(golden_data, golden_message.SerializeToString())
|
||||
|
||||
def testNegativeInfinityPacked(self):
|
||||
golden_data = (b'\xA2\x06\x04\x00\x00\x80\xFF'
|
||||
b'\xAA\x06\x08\x00\x00\x00\x00\x00\x00\xF0\xFF')
|
||||
golden_message = unittest_pb2.TestPackedTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
self.assertTrue(IsNegInf(golden_message.packed_float[0]))
|
||||
self.assertTrue(IsNegInf(golden_message.packed_double[0]))
|
||||
self.assertEqual(golden_data, golden_message.SerializeToString())
|
||||
|
||||
def testNotANumberPacked(self):
|
||||
golden_data = (b'\xA2\x06\x04\x00\x00\xC0\x7F'
|
||||
b'\xAA\x06\x08\x00\x00\x00\x00\x00\x00\xF8\x7F')
|
||||
golden_message = unittest_pb2.TestPackedTypes()
|
||||
golden_message.ParseFromString(golden_data)
|
||||
self.assertTrue(isnan(golden_message.packed_float[0]))
|
||||
self.assertTrue(isnan(golden_message.packed_double[0]))
|
||||
|
||||
serialized = golden_message.SerializeToString()
|
||||
message = unittest_pb2.TestPackedTypes()
|
||||
message.ParseFromString(serialized)
|
||||
self.assertTrue(isnan(message.packed_float[0]))
|
||||
self.assertTrue(isnan(message.packed_double[0]))
|
||||
|
||||
def testExtremeFloatValues(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
|
||||
# Most positive exponent, no significand bits set.
|
||||
kMostPosExponentNoSigBits = math.pow(2, 127)
|
||||
message.optional_float = kMostPosExponentNoSigBits
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_float == kMostPosExponentNoSigBits)
|
||||
|
||||
# Most positive exponent, one significand bit set.
|
||||
kMostPosExponentOneSigBit = 1.5 * math.pow(2, 127)
|
||||
message.optional_float = kMostPosExponentOneSigBit
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_float == kMostPosExponentOneSigBit)
|
||||
|
||||
# Repeat last two cases with values of same magnitude, but negative.
|
||||
message.optional_float = -kMostPosExponentNoSigBits
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_float == -kMostPosExponentNoSigBits)
|
||||
|
||||
message.optional_float = -kMostPosExponentOneSigBit
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_float == -kMostPosExponentOneSigBit)
|
||||
|
||||
# Most negative exponent, no significand bits set.
|
||||
kMostNegExponentNoSigBits = math.pow(2, -127)
|
||||
message.optional_float = kMostNegExponentNoSigBits
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_float == kMostNegExponentNoSigBits)
|
||||
|
||||
# Most negative exponent, one significand bit set.
|
||||
kMostNegExponentOneSigBit = 1.5 * math.pow(2, -127)
|
||||
message.optional_float = kMostNegExponentOneSigBit
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_float == kMostNegExponentOneSigBit)
|
||||
|
||||
# Repeat last two cases with values of the same magnitude, but negative.
|
||||
message.optional_float = -kMostNegExponentNoSigBits
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_float == -kMostNegExponentNoSigBits)
|
||||
|
||||
message.optional_float = -kMostNegExponentOneSigBit
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_float == -kMostNegExponentOneSigBit)
|
||||
|
||||
def testExtremeDoubleValues(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
|
||||
# Most positive exponent, no significand bits set.
|
||||
kMostPosExponentNoSigBits = math.pow(2, 1023)
|
||||
message.optional_double = kMostPosExponentNoSigBits
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_double == kMostPosExponentNoSigBits)
|
||||
|
||||
# Most positive exponent, one significand bit set.
|
||||
kMostPosExponentOneSigBit = 1.5 * math.pow(2, 1023)
|
||||
message.optional_double = kMostPosExponentOneSigBit
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_double == kMostPosExponentOneSigBit)
|
||||
|
||||
# Repeat last two cases with values of same magnitude, but negative.
|
||||
message.optional_double = -kMostPosExponentNoSigBits
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_double == -kMostPosExponentNoSigBits)
|
||||
|
||||
message.optional_double = -kMostPosExponentOneSigBit
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_double == -kMostPosExponentOneSigBit)
|
||||
|
||||
# Most negative exponent, no significand bits set.
|
||||
kMostNegExponentNoSigBits = math.pow(2, -1023)
|
||||
message.optional_double = kMostNegExponentNoSigBits
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_double == kMostNegExponentNoSigBits)
|
||||
|
||||
# Most negative exponent, one significand bit set.
|
||||
kMostNegExponentOneSigBit = 1.5 * math.pow(2, -1023)
|
||||
message.optional_double = kMostNegExponentOneSigBit
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_double == kMostNegExponentOneSigBit)
|
||||
|
||||
# Repeat last two cases with values of the same magnitude, but negative.
|
||||
message.optional_double = -kMostNegExponentNoSigBits
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_double == -kMostNegExponentNoSigBits)
|
||||
|
||||
message.optional_double = -kMostNegExponentOneSigBit
|
||||
message.ParseFromString(message.SerializeToString())
|
||||
self.assertTrue(message.optional_double == -kMostNegExponentOneSigBit)
|
||||
|
||||
def testFloatPrinting(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.optional_float = 2.0
|
||||
self.assertEqual(str(message), 'optional_float: 2.0\n')
|
||||
|
||||
def testHighPrecisionFloatPrinting(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.optional_double = 0.12345678912345678
|
||||
if sys.version_info.major >= 3:
|
||||
self.assertEqual(str(message), 'optional_double: 0.12345678912345678\n')
|
||||
else:
|
||||
self.assertEqual(str(message), 'optional_double: 0.123456789123\n')
|
||||
|
||||
def testUnknownFieldPrinting(self):
|
||||
populated = unittest_pb2.TestAllTypes()
|
||||
test_util.SetAllNonLazyFields(populated)
|
||||
empty = unittest_pb2.TestEmptyMessage()
|
||||
empty.ParseFromString(populated.SerializeToString())
|
||||
self.assertEqual(str(empty), '')
|
||||
|
||||
def testSortingRepeatedScalarFieldsDefaultComparator(self):
|
||||
"""Check some different types with the default comparator."""
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
|
||||
# TODO(mattp): would testing more scalar types strengthen test?
|
||||
message.repeated_int32.append(1)
|
||||
message.repeated_int32.append(3)
|
||||
message.repeated_int32.append(2)
|
||||
message.repeated_int32.sort()
|
||||
self.assertEqual(message.repeated_int32[0], 1)
|
||||
self.assertEqual(message.repeated_int32[1], 2)
|
||||
self.assertEqual(message.repeated_int32[2], 3)
|
||||
|
||||
message.repeated_float.append(1.1)
|
||||
message.repeated_float.append(1.3)
|
||||
message.repeated_float.append(1.2)
|
||||
message.repeated_float.sort()
|
||||
self.assertAlmostEqual(message.repeated_float[0], 1.1)
|
||||
self.assertAlmostEqual(message.repeated_float[1], 1.2)
|
||||
self.assertAlmostEqual(message.repeated_float[2], 1.3)
|
||||
|
||||
message.repeated_string.append('a')
|
||||
message.repeated_string.append('c')
|
||||
message.repeated_string.append('b')
|
||||
message.repeated_string.sort()
|
||||
self.assertEqual(message.repeated_string[0], 'a')
|
||||
self.assertEqual(message.repeated_string[1], 'b')
|
||||
self.assertEqual(message.repeated_string[2], 'c')
|
||||
|
||||
message.repeated_bytes.append(b'a')
|
||||
message.repeated_bytes.append(b'c')
|
||||
message.repeated_bytes.append(b'b')
|
||||
message.repeated_bytes.sort()
|
||||
self.assertEqual(message.repeated_bytes[0], b'a')
|
||||
self.assertEqual(message.repeated_bytes[1], b'b')
|
||||
self.assertEqual(message.repeated_bytes[2], b'c')
|
||||
|
||||
def testSortingRepeatedScalarFieldsCustomComparator(self):
|
||||
"""Check some different types with custom comparator."""
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
|
||||
message.repeated_int32.append(-3)
|
||||
message.repeated_int32.append(-2)
|
||||
message.repeated_int32.append(-1)
|
||||
message.repeated_int32.sort(key=abs)
|
||||
self.assertEqual(message.repeated_int32[0], -1)
|
||||
self.assertEqual(message.repeated_int32[1], -2)
|
||||
self.assertEqual(message.repeated_int32[2], -3)
|
||||
|
||||
message.repeated_string.append('aaa')
|
||||
message.repeated_string.append('bb')
|
||||
message.repeated_string.append('c')
|
||||
message.repeated_string.sort(key=len)
|
||||
self.assertEqual(message.repeated_string[0], 'c')
|
||||
self.assertEqual(message.repeated_string[1], 'bb')
|
||||
self.assertEqual(message.repeated_string[2], 'aaa')
|
||||
|
||||
def testSortingRepeatedCompositeFieldsCustomComparator(self):
|
||||
"""Check passing a custom comparator to sort a repeated composite field."""
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
|
||||
message.repeated_nested_message.add().bb = 1
|
||||
message.repeated_nested_message.add().bb = 3
|
||||
message.repeated_nested_message.add().bb = 2
|
||||
message.repeated_nested_message.add().bb = 6
|
||||
message.repeated_nested_message.add().bb = 5
|
||||
message.repeated_nested_message.add().bb = 4
|
||||
message.repeated_nested_message.sort(key=operator.attrgetter('bb'))
|
||||
self.assertEqual(message.repeated_nested_message[0].bb, 1)
|
||||
self.assertEqual(message.repeated_nested_message[1].bb, 2)
|
||||
self.assertEqual(message.repeated_nested_message[2].bb, 3)
|
||||
self.assertEqual(message.repeated_nested_message[3].bb, 4)
|
||||
self.assertEqual(message.repeated_nested_message[4].bb, 5)
|
||||
self.assertEqual(message.repeated_nested_message[5].bb, 6)
|
||||
|
||||
def testRepeatedCompositeFieldSortArguments(self):
|
||||
"""Check sorting a repeated composite field using list.sort() arguments."""
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
|
||||
get_bb = operator.attrgetter('bb')
|
||||
cmp_bb = lambda a, b: cmp(a.bb, b.bb)
|
||||
message.repeated_nested_message.add().bb = 1
|
||||
message.repeated_nested_message.add().bb = 3
|
||||
message.repeated_nested_message.add().bb = 2
|
||||
message.repeated_nested_message.add().bb = 6
|
||||
message.repeated_nested_message.add().bb = 5
|
||||
message.repeated_nested_message.add().bb = 4
|
||||
message.repeated_nested_message.sort(key=get_bb)
|
||||
self.assertEqual([k.bb for k in message.repeated_nested_message],
|
||||
[1, 2, 3, 4, 5, 6])
|
||||
message.repeated_nested_message.sort(key=get_bb, reverse=True)
|
||||
self.assertEqual([k.bb for k in message.repeated_nested_message],
|
||||
[6, 5, 4, 3, 2, 1])
|
||||
if sys.version_info.major >= 3: return # No cmp sorting in PY3.
|
||||
message.repeated_nested_message.sort(sort_function=cmp_bb)
|
||||
self.assertEqual([k.bb for k in message.repeated_nested_message],
|
||||
[1, 2, 3, 4, 5, 6])
|
||||
message.repeated_nested_message.sort(cmp=cmp_bb, reverse=True)
|
||||
self.assertEqual([k.bb for k in message.repeated_nested_message],
|
||||
[6, 5, 4, 3, 2, 1])
|
||||
|
||||
def testRepeatedScalarFieldSortArguments(self):
|
||||
"""Check sorting a scalar field using list.sort() arguments."""
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
|
||||
message.repeated_int32.append(-3)
|
||||
message.repeated_int32.append(-2)
|
||||
message.repeated_int32.append(-1)
|
||||
message.repeated_int32.sort(key=abs)
|
||||
self.assertEqual(list(message.repeated_int32), [-1, -2, -3])
|
||||
message.repeated_int32.sort(key=abs, reverse=True)
|
||||
self.assertEqual(list(message.repeated_int32), [-3, -2, -1])
|
||||
if sys.version_info.major < 3: # No cmp sorting in PY3.
|
||||
abs_cmp = lambda a, b: cmp(abs(a), abs(b))
|
||||
message.repeated_int32.sort(sort_function=abs_cmp)
|
||||
self.assertEqual(list(message.repeated_int32), [-1, -2, -3])
|
||||
message.repeated_int32.sort(cmp=abs_cmp, reverse=True)
|
||||
self.assertEqual(list(message.repeated_int32), [-3, -2, -1])
|
||||
|
||||
message.repeated_string.append('aaa')
|
||||
message.repeated_string.append('bb')
|
||||
message.repeated_string.append('c')
|
||||
message.repeated_string.sort(key=len)
|
||||
self.assertEqual(list(message.repeated_string), ['c', 'bb', 'aaa'])
|
||||
message.repeated_string.sort(key=len, reverse=True)
|
||||
self.assertEqual(list(message.repeated_string), ['aaa', 'bb', 'c'])
|
||||
if sys.version_info.major < 3: # No cmp sorting in PY3.
|
||||
len_cmp = lambda a, b: cmp(len(a), len(b))
|
||||
message.repeated_string.sort(sort_function=len_cmp)
|
||||
self.assertEqual(list(message.repeated_string), ['c', 'bb', 'aaa'])
|
||||
message.repeated_string.sort(cmp=len_cmp, reverse=True)
|
||||
self.assertEqual(list(message.repeated_string), ['aaa', 'bb', 'c'])
|
||||
|
||||
def testRepeatedFieldsComparable(self):
|
||||
m1 = unittest_pb2.TestAllTypes()
|
||||
m2 = unittest_pb2.TestAllTypes()
|
||||
m1.repeated_int32.append(0)
|
||||
m1.repeated_int32.append(1)
|
||||
m1.repeated_int32.append(2)
|
||||
m2.repeated_int32.append(0)
|
||||
m2.repeated_int32.append(1)
|
||||
m2.repeated_int32.append(2)
|
||||
m1.repeated_nested_message.add().bb = 1
|
||||
m1.repeated_nested_message.add().bb = 2
|
||||
m1.repeated_nested_message.add().bb = 3
|
||||
m2.repeated_nested_message.add().bb = 1
|
||||
m2.repeated_nested_message.add().bb = 2
|
||||
m2.repeated_nested_message.add().bb = 3
|
||||
|
||||
if sys.version_info.major >= 3: return # No cmp() in PY3.
|
||||
|
||||
# These comparisons should not raise errors.
|
||||
_ = m1 < m2
|
||||
_ = m1.repeated_nested_message < m2.repeated_nested_message
|
||||
|
||||
# Make sure cmp always works. If it wasn't defined, these would be
|
||||
# id() comparisons and would all fail.
|
||||
self.assertEqual(cmp(m1, m2), 0)
|
||||
self.assertEqual(cmp(m1.repeated_int32, m2.repeated_int32), 0)
|
||||
self.assertEqual(cmp(m1.repeated_int32, [0, 1, 2]), 0)
|
||||
self.assertEqual(cmp(m1.repeated_nested_message,
|
||||
m2.repeated_nested_message), 0)
|
||||
with self.assertRaises(TypeError):
|
||||
# Can't compare repeated composite containers to lists.
|
||||
cmp(m1.repeated_nested_message, m2.repeated_nested_message[:])
|
||||
|
||||
# TODO(anuraag): Implement extensiondict comparison in C++ and then add test
|
||||
|
||||
def testParsingMerge(self):
|
||||
"""Check the merge behavior when a required or optional field appears
|
||||
multiple times in the input."""
|
||||
messages = [
|
||||
unittest_pb2.TestAllTypes(),
|
||||
unittest_pb2.TestAllTypes(),
|
||||
unittest_pb2.TestAllTypes() ]
|
||||
messages[0].optional_int32 = 1
|
||||
messages[1].optional_int64 = 2
|
||||
messages[2].optional_int32 = 3
|
||||
messages[2].optional_string = 'hello'
|
||||
|
||||
merged_message = unittest_pb2.TestAllTypes()
|
||||
merged_message.optional_int32 = 3
|
||||
merged_message.optional_int64 = 2
|
||||
merged_message.optional_string = 'hello'
|
||||
|
||||
generator = unittest_pb2.TestParsingMerge.RepeatedFieldsGenerator()
|
||||
generator.field1.extend(messages)
|
||||
generator.field2.extend(messages)
|
||||
generator.field3.extend(messages)
|
||||
generator.ext1.extend(messages)
|
||||
generator.ext2.extend(messages)
|
||||
generator.group1.add().field1.MergeFrom(messages[0])
|
||||
generator.group1.add().field1.MergeFrom(messages[1])
|
||||
generator.group1.add().field1.MergeFrom(messages[2])
|
||||
generator.group2.add().field1.MergeFrom(messages[0])
|
||||
generator.group2.add().field1.MergeFrom(messages[1])
|
||||
generator.group2.add().field1.MergeFrom(messages[2])
|
||||
|
||||
data = generator.SerializeToString()
|
||||
parsing_merge = unittest_pb2.TestParsingMerge()
|
||||
parsing_merge.ParseFromString(data)
|
||||
|
||||
# Required and optional fields should be merged.
|
||||
self.assertEqual(parsing_merge.required_all_types, merged_message)
|
||||
self.assertEqual(parsing_merge.optional_all_types, merged_message)
|
||||
self.assertEqual(parsing_merge.optionalgroup.optional_group_all_types,
|
||||
merged_message)
|
||||
self.assertEqual(parsing_merge.Extensions[
|
||||
unittest_pb2.TestParsingMerge.optional_ext],
|
||||
merged_message)
|
||||
|
||||
# Repeated fields should not be merged.
|
||||
self.assertEqual(len(parsing_merge.repeated_all_types), 3)
|
||||
self.assertEqual(len(parsing_merge.repeatedgroup), 3)
|
||||
self.assertEqual(len(parsing_merge.Extensions[
|
||||
unittest_pb2.TestParsingMerge.repeated_ext]), 3)
|
||||
|
||||
def ensureNestedMessageExists(self, msg, attribute):
|
||||
"""Make sure that a nested message object exists.
|
||||
|
||||
As soon as a nested message attribute is accessed, it will be present in the
|
||||
_fields dict, without being marked as actually being set.
|
||||
"""
|
||||
getattr(msg, attribute)
|
||||
self.assertFalse(msg.HasField(attribute))
|
||||
|
||||
def testOneofGetCaseNonexistingField(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
self.assertRaises(ValueError, m.WhichOneof, 'no_such_oneof_field')
|
||||
|
||||
def testOneofSemantics(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
self.assertIs(None, m.WhichOneof('oneof_field'))
|
||||
|
||||
m.oneof_uint32 = 11
|
||||
self.assertEqual('oneof_uint32', m.WhichOneof('oneof_field'))
|
||||
self.assertTrue(m.HasField('oneof_uint32'))
|
||||
|
||||
m.oneof_string = u'foo'
|
||||
self.assertEqual('oneof_string', m.WhichOneof('oneof_field'))
|
||||
self.assertFalse(m.HasField('oneof_uint32'))
|
||||
self.assertTrue(m.HasField('oneof_string'))
|
||||
|
||||
m.oneof_nested_message.bb = 11
|
||||
self.assertEqual('oneof_nested_message', m.WhichOneof('oneof_field'))
|
||||
self.assertFalse(m.HasField('oneof_string'))
|
||||
self.assertTrue(m.HasField('oneof_nested_message'))
|
||||
|
||||
m.oneof_bytes = b'bb'
|
||||
self.assertEqual('oneof_bytes', m.WhichOneof('oneof_field'))
|
||||
self.assertFalse(m.HasField('oneof_nested_message'))
|
||||
self.assertTrue(m.HasField('oneof_bytes'))
|
||||
|
||||
def testOneofCompositeFieldReadAccess(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
m.oneof_uint32 = 11
|
||||
|
||||
self.ensureNestedMessageExists(m, 'oneof_nested_message')
|
||||
self.assertEqual('oneof_uint32', m.WhichOneof('oneof_field'))
|
||||
self.assertEqual(11, m.oneof_uint32)
|
||||
|
||||
def testOneofHasField(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
self.assertFalse(m.HasField('oneof_field'))
|
||||
m.oneof_uint32 = 11
|
||||
self.assertTrue(m.HasField('oneof_field'))
|
||||
m.oneof_bytes = b'bb'
|
||||
self.assertTrue(m.HasField('oneof_field'))
|
||||
m.ClearField('oneof_bytes')
|
||||
self.assertFalse(m.HasField('oneof_field'))
|
||||
|
||||
def testOneofClearField(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
m.oneof_uint32 = 11
|
||||
m.ClearField('oneof_field')
|
||||
self.assertFalse(m.HasField('oneof_field'))
|
||||
self.assertFalse(m.HasField('oneof_uint32'))
|
||||
self.assertIs(None, m.WhichOneof('oneof_field'))
|
||||
|
||||
def testOneofClearSetField(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
m.oneof_uint32 = 11
|
||||
m.ClearField('oneof_uint32')
|
||||
self.assertFalse(m.HasField('oneof_field'))
|
||||
self.assertFalse(m.HasField('oneof_uint32'))
|
||||
self.assertIs(None, m.WhichOneof('oneof_field'))
|
||||
|
||||
def testOneofClearUnsetField(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
m.oneof_uint32 = 11
|
||||
self.ensureNestedMessageExists(m, 'oneof_nested_message')
|
||||
m.ClearField('oneof_nested_message')
|
||||
self.assertEqual(11, m.oneof_uint32)
|
||||
self.assertTrue(m.HasField('oneof_field'))
|
||||
self.assertTrue(m.HasField('oneof_uint32'))
|
||||
self.assertEqual('oneof_uint32', m.WhichOneof('oneof_field'))
|
||||
|
||||
def testOneofDeserialize(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
m.oneof_uint32 = 11
|
||||
m2 = unittest_pb2.TestAllTypes()
|
||||
m2.ParseFromString(m.SerializeToString())
|
||||
self.assertEqual('oneof_uint32', m2.WhichOneof('oneof_field'))
|
||||
|
||||
def testSortEmptyRepeatedCompositeContainer(self):
|
||||
"""Exercise a scenario that has led to segfaults in the past.
|
||||
"""
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
m.repeated_nested_message.sort()
|
||||
|
||||
def testHasFieldOnRepeatedField(self):
|
||||
"""Using HasField on a repeated field should raise an exception.
|
||||
"""
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
with self.assertRaises(ValueError) as _:
|
||||
m.HasField('repeated_int32')
|
||||
|
||||
|
||||
class ValidTypeNamesTest(basetest.TestCase):
|
||||
|
||||
def assertImportFromName(self, msg, base_name):
|
||||
# Parse <type 'module.class_name'> to extra 'some.name' as a string.
|
||||
tp_name = str(type(msg)).split("'")[1]
|
||||
valid_names = ('Repeated%sContainer' % base_name,
|
||||
'Repeated%sFieldContainer' % base_name)
|
||||
self.assertTrue(any(tp_name.endswith(v) for v in valid_names),
|
||||
'%r does end with any of %r' % (tp_name, valid_names))
|
||||
|
||||
parts = tp_name.split('.')
|
||||
class_name = parts[-1]
|
||||
module_name = '.'.join(parts[:-1])
|
||||
__import__(module_name, fromlist=[class_name])
|
||||
|
||||
def testTypeNamesCanBeImported(self):
|
||||
# If import doesn't work, pickling won't work either.
|
||||
pb = unittest_pb2.TestAllTypes()
|
||||
self.assertImportFromName(pb.repeated_int32, 'Scalar')
|
||||
self.assertImportFromName(pb.repeated_nested_message, 'Composite')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,50 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
message TestEnumValues {
|
||||
enum NestedEnum {
|
||||
ZERO = 0;
|
||||
ONE = 1;
|
||||
}
|
||||
optional NestedEnum optional_nested_enum = 1;
|
||||
repeated NestedEnum repeated_nested_enum = 2;
|
||||
repeated NestedEnum packed_nested_enum = 3 [packed = true];
|
||||
}
|
||||
|
||||
message TestMissingEnumValues {
|
||||
enum NestedEnum {
|
||||
TWO = 2;
|
||||
}
|
||||
optional NestedEnum optional_nested_enum = 1;
|
||||
repeated NestedEnum repeated_nested_enum = 2;
|
||||
repeated NestedEnum packed_nested_enum = 3 [packed = true];
|
||||
}
|
||||
@@ -0,0 +1,58 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: robinson@google.com (Will Robinson)
|
||||
|
||||
|
||||
package google.protobuf.internal;
|
||||
|
||||
|
||||
message TopLevelMessage {
|
||||
optional ExtendedMessage submessage = 1;
|
||||
}
|
||||
|
||||
|
||||
message ExtendedMessage {
|
||||
extensions 1 to max;
|
||||
}
|
||||
|
||||
|
||||
message ForeignMessage {
|
||||
optional int32 foreign_message_int = 1;
|
||||
}
|
||||
|
||||
|
||||
extend ExtendedMessage {
|
||||
optional int32 optional_int_extension = 1;
|
||||
optional ForeignMessage optional_message_extension = 2;
|
||||
|
||||
repeated int32 repeated_int_extension = 3;
|
||||
repeated ForeignMessage repeated_message_extension = 4;
|
||||
}
|
||||
@@ -0,0 +1,49 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: jasonh@google.com (Jason Hsueh)
|
||||
//
|
||||
// This file is used to test a corner case in the CPP implementation where the
|
||||
// generated C++ type is available for the extendee, but the extension is
|
||||
// defined in a file whose C++ type is not in the binary.
|
||||
|
||||
|
||||
import "google/protobuf/internal/more_extensions.proto";
|
||||
|
||||
package google.protobuf.internal;
|
||||
|
||||
message DynamicMessageType {
|
||||
optional int32 a = 1;
|
||||
}
|
||||
|
||||
extend ExtendedMessage {
|
||||
optional int32 dynamic_int32_extension = 100;
|
||||
optional DynamicMessageType dynamic_message_extension = 101;
|
||||
}
|
||||
@@ -0,0 +1,51 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: robinson@google.com (Will Robinson)
|
||||
|
||||
|
||||
package google.protobuf.internal;
|
||||
|
||||
// A message where tag numbers are listed out of order, to allow us to test our
|
||||
// canonicalization of serialized output, which should always be in tag order.
|
||||
// We also mix in some extensions for extra fun.
|
||||
message OutOfOrderFields {
|
||||
optional sint32 optional_sint32 = 5;
|
||||
extensions 4 to 4;
|
||||
optional uint32 optional_uint32 = 3;
|
||||
extensions 2 to 2;
|
||||
optional int32 optional_int32 = 1;
|
||||
};
|
||||
|
||||
|
||||
extend OutOfOrderFields {
|
||||
optional uint64 optional_uint64 = 4;
|
||||
optional int64 optional_int64 = 2;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -0,0 +1,136 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.internal.service_reflection."""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import service_reflection
|
||||
from google.protobuf import service
|
||||
|
||||
|
||||
class FooUnitTest(basetest.TestCase):
|
||||
|
||||
def testService(self):
|
||||
class MockRpcChannel(service.RpcChannel):
|
||||
def CallMethod(self, method, controller, request, response, callback):
|
||||
self.method = method
|
||||
self.controller = controller
|
||||
self.request = request
|
||||
callback(response)
|
||||
|
||||
class MockRpcController(service.RpcController):
|
||||
def SetFailed(self, msg):
|
||||
self.failure_message = msg
|
||||
|
||||
self.callback_response = None
|
||||
|
||||
class MyService(unittest_pb2.TestService):
|
||||
pass
|
||||
|
||||
self.callback_response = None
|
||||
|
||||
def MyCallback(response):
|
||||
self.callback_response = response
|
||||
|
||||
rpc_controller = MockRpcController()
|
||||
channel = MockRpcChannel()
|
||||
srvc = MyService()
|
||||
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
|
||||
self.assertEqual('Method Foo not implemented.',
|
||||
rpc_controller.failure_message)
|
||||
self.assertEqual(None, self.callback_response)
|
||||
|
||||
rpc_controller.failure_message = None
|
||||
|
||||
service_descriptor = unittest_pb2.TestService.GetDescriptor()
|
||||
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
|
||||
unittest_pb2.BarRequest(), MyCallback)
|
||||
self.assertEqual('Method Bar not implemented.',
|
||||
rpc_controller.failure_message)
|
||||
self.assertEqual(None, self.callback_response)
|
||||
|
||||
class MyServiceImpl(unittest_pb2.TestService):
|
||||
def Foo(self, rpc_controller, request, done):
|
||||
self.foo_called = True
|
||||
def Bar(self, rpc_controller, request, done):
|
||||
self.bar_called = True
|
||||
|
||||
srvc = MyServiceImpl()
|
||||
rpc_controller.failure_message = None
|
||||
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
|
||||
self.assertEqual(None, rpc_controller.failure_message)
|
||||
self.assertEqual(True, srvc.foo_called)
|
||||
|
||||
rpc_controller.failure_message = None
|
||||
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
|
||||
unittest_pb2.BarRequest(), MyCallback)
|
||||
self.assertEqual(None, rpc_controller.failure_message)
|
||||
self.assertEqual(True, srvc.bar_called)
|
||||
|
||||
def testServiceStub(self):
|
||||
class MockRpcChannel(service.RpcChannel):
|
||||
def CallMethod(self, method, controller, request,
|
||||
response_class, callback):
|
||||
self.method = method
|
||||
self.controller = controller
|
||||
self.request = request
|
||||
callback(response_class())
|
||||
|
||||
self.callback_response = None
|
||||
|
||||
def MyCallback(response):
|
||||
self.callback_response = response
|
||||
|
||||
channel = MockRpcChannel()
|
||||
stub = unittest_pb2.TestService_Stub(channel)
|
||||
rpc_controller = 'controller'
|
||||
request = 'request'
|
||||
|
||||
# GetDescriptor now static, still works as instance method for compatability
|
||||
self.assertEqual(unittest_pb2.TestService_Stub.GetDescriptor(),
|
||||
stub.GetDescriptor())
|
||||
|
||||
# Invoke method.
|
||||
stub.Foo(rpc_controller, request, MyCallback)
|
||||
|
||||
self.assertTrue(isinstance(self.callback_response,
|
||||
unittest_pb2.FooResponse))
|
||||
self.assertEqual(request, channel.request)
|
||||
self.assertEqual(rpc_controller, channel.controller)
|
||||
self.assertEqual(stub.GetDescriptor().methods[0], channel.method)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,120 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.symbol_database."""
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import symbol_database
|
||||
|
||||
|
||||
class SymbolDatabaseTest(basetest.TestCase):
|
||||
|
||||
def _Database(self):
|
||||
db = symbol_database.SymbolDatabase()
|
||||
# Register representative types from unittest_pb2.
|
||||
db.RegisterFileDescriptor(unittest_pb2.DESCRIPTOR)
|
||||
db.RegisterMessage(unittest_pb2.TestAllTypes)
|
||||
db.RegisterMessage(unittest_pb2.TestAllTypes.NestedMessage)
|
||||
db.RegisterMessage(unittest_pb2.TestAllTypes.OptionalGroup)
|
||||
db.RegisterMessage(unittest_pb2.TestAllTypes.RepeatedGroup)
|
||||
db.RegisterEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR)
|
||||
db.RegisterEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR)
|
||||
return db
|
||||
|
||||
def testGetPrototype(self):
|
||||
instance = self._Database().GetPrototype(
|
||||
unittest_pb2.TestAllTypes.DESCRIPTOR)
|
||||
self.assertTrue(instance is unittest_pb2.TestAllTypes)
|
||||
|
||||
def testGetMessages(self):
|
||||
messages = self._Database().GetMessages(
|
||||
['google/protobuf/unittest.proto'])
|
||||
self.assertTrue(
|
||||
unittest_pb2.TestAllTypes is
|
||||
messages['protobuf_unittest.TestAllTypes'])
|
||||
|
||||
def testGetSymbol(self):
|
||||
self.assertEquals(
|
||||
unittest_pb2.TestAllTypes, self._Database().GetSymbol(
|
||||
'protobuf_unittest.TestAllTypes'))
|
||||
self.assertEquals(
|
||||
unittest_pb2.TestAllTypes.NestedMessage, self._Database().GetSymbol(
|
||||
'protobuf_unittest.TestAllTypes.NestedMessage'))
|
||||
self.assertEquals(
|
||||
unittest_pb2.TestAllTypes.OptionalGroup, self._Database().GetSymbol(
|
||||
'protobuf_unittest.TestAllTypes.OptionalGroup'))
|
||||
self.assertEquals(
|
||||
unittest_pb2.TestAllTypes.RepeatedGroup, self._Database().GetSymbol(
|
||||
'protobuf_unittest.TestAllTypes.RepeatedGroup'))
|
||||
|
||||
def testEnums(self):
|
||||
# Check registration of types in the pool.
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.ForeignEnum',
|
||||
self._Database().pool.FindEnumTypeByName(
|
||||
'protobuf_unittest.ForeignEnum').full_name)
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.TestAllTypes.NestedEnum',
|
||||
self._Database().pool.FindEnumTypeByName(
|
||||
'protobuf_unittest.TestAllTypes.NestedEnum').full_name)
|
||||
|
||||
def testFindMessageTypeByName(self):
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.TestAllTypes',
|
||||
self._Database().pool.FindMessageTypeByName(
|
||||
'protobuf_unittest.TestAllTypes').full_name)
|
||||
self.assertEquals(
|
||||
'protobuf_unittest.TestAllTypes.NestedMessage',
|
||||
self._Database().pool.FindMessageTypeByName(
|
||||
'protobuf_unittest.TestAllTypes.NestedMessage').full_name)
|
||||
|
||||
def testFindFindContainingSymbol(self):
|
||||
# Lookup based on either enum or message.
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
self._Database().pool.FindFileContainingSymbol(
|
||||
'protobuf_unittest.TestAllTypes.NestedEnum').name)
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
self._Database().pool.FindFileContainingSymbol(
|
||||
'protobuf_unittest.TestAllTypes').name)
|
||||
|
||||
def testFindFileByName(self):
|
||||
self.assertEquals(
|
||||
'google/protobuf/unittest.proto',
|
||||
self._Database().pool.FindFileByName(
|
||||
'google/protobuf/unittest.proto').name)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,52 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: kenton@google.com (Kenton Varda)
|
||||
|
||||
|
||||
package protobuf_unittest;
|
||||
|
||||
option py_generic_services = true;
|
||||
|
||||
message TestBadIdentifiers {
|
||||
extensions 100 to max;
|
||||
}
|
||||
|
||||
// Make sure these reasonable extension names don't conflict with internal
|
||||
// variables.
|
||||
extend TestBadIdentifiers {
|
||||
optional string message = 100 [default="foo"];
|
||||
optional string descriptor = 101 [default="bar"];
|
||||
optional string reflection = 102 [default="baz"];
|
||||
optional string service = 103 [default="qux"];
|
||||
}
|
||||
|
||||
message AnotherMessage {}
|
||||
service AnotherService {}
|
||||
@@ -0,0 +1,662 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Utilities for Python proto2 tests.
|
||||
|
||||
This is intentionally modeled on C++ code in
|
||||
//google/protobuf/test_util.*.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import os.path
|
||||
|
||||
from google.protobuf import unittest_import_pb2
|
||||
from google.protobuf import unittest_pb2
|
||||
|
||||
|
||||
def SetAllNonLazyFields(message):
|
||||
"""Sets every non-lazy field in the message to a unique value.
|
||||
|
||||
Args:
|
||||
message: A unittest_pb2.TestAllTypes instance.
|
||||
"""
|
||||
|
||||
#
|
||||
# Optional fields.
|
||||
#
|
||||
|
||||
message.optional_int32 = 101
|
||||
message.optional_int64 = 102
|
||||
message.optional_uint32 = 103
|
||||
message.optional_uint64 = 104
|
||||
message.optional_sint32 = 105
|
||||
message.optional_sint64 = 106
|
||||
message.optional_fixed32 = 107
|
||||
message.optional_fixed64 = 108
|
||||
message.optional_sfixed32 = 109
|
||||
message.optional_sfixed64 = 110
|
||||
message.optional_float = 111
|
||||
message.optional_double = 112
|
||||
message.optional_bool = True
|
||||
message.optional_string = u'115'
|
||||
message.optional_bytes = b'116'
|
||||
|
||||
message.optionalgroup.a = 117
|
||||
message.optional_nested_message.bb = 118
|
||||
message.optional_foreign_message.c = 119
|
||||
message.optional_import_message.d = 120
|
||||
message.optional_public_import_message.e = 126
|
||||
|
||||
message.optional_nested_enum = unittest_pb2.TestAllTypes.BAZ
|
||||
message.optional_foreign_enum = unittest_pb2.FOREIGN_BAZ
|
||||
message.optional_import_enum = unittest_import_pb2.IMPORT_BAZ
|
||||
|
||||
message.optional_string_piece = u'124'
|
||||
message.optional_cord = u'125'
|
||||
|
||||
#
|
||||
# Repeated fields.
|
||||
#
|
||||
|
||||
message.repeated_int32.append(201)
|
||||
message.repeated_int64.append(202)
|
||||
message.repeated_uint32.append(203)
|
||||
message.repeated_uint64.append(204)
|
||||
message.repeated_sint32.append(205)
|
||||
message.repeated_sint64.append(206)
|
||||
message.repeated_fixed32.append(207)
|
||||
message.repeated_fixed64.append(208)
|
||||
message.repeated_sfixed32.append(209)
|
||||
message.repeated_sfixed64.append(210)
|
||||
message.repeated_float.append(211)
|
||||
message.repeated_double.append(212)
|
||||
message.repeated_bool.append(True)
|
||||
message.repeated_string.append(u'215')
|
||||
message.repeated_bytes.append(b'216')
|
||||
|
||||
message.repeatedgroup.add().a = 217
|
||||
message.repeated_nested_message.add().bb = 218
|
||||
message.repeated_foreign_message.add().c = 219
|
||||
message.repeated_import_message.add().d = 220
|
||||
message.repeated_lazy_message.add().bb = 227
|
||||
|
||||
message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAR)
|
||||
message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAR)
|
||||
message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAR)
|
||||
|
||||
message.repeated_string_piece.append(u'224')
|
||||
message.repeated_cord.append(u'225')
|
||||
|
||||
# Add a second one of each field.
|
||||
message.repeated_int32.append(301)
|
||||
message.repeated_int64.append(302)
|
||||
message.repeated_uint32.append(303)
|
||||
message.repeated_uint64.append(304)
|
||||
message.repeated_sint32.append(305)
|
||||
message.repeated_sint64.append(306)
|
||||
message.repeated_fixed32.append(307)
|
||||
message.repeated_fixed64.append(308)
|
||||
message.repeated_sfixed32.append(309)
|
||||
message.repeated_sfixed64.append(310)
|
||||
message.repeated_float.append(311)
|
||||
message.repeated_double.append(312)
|
||||
message.repeated_bool.append(False)
|
||||
message.repeated_string.append(u'315')
|
||||
message.repeated_bytes.append(b'316')
|
||||
|
||||
message.repeatedgroup.add().a = 317
|
||||
message.repeated_nested_message.add().bb = 318
|
||||
message.repeated_foreign_message.add().c = 319
|
||||
message.repeated_import_message.add().d = 320
|
||||
message.repeated_lazy_message.add().bb = 327
|
||||
|
||||
message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAZ)
|
||||
message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAZ)
|
||||
message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAZ)
|
||||
|
||||
message.repeated_string_piece.append(u'324')
|
||||
message.repeated_cord.append(u'325')
|
||||
|
||||
#
|
||||
# Fields that have defaults.
|
||||
#
|
||||
|
||||
message.default_int32 = 401
|
||||
message.default_int64 = 402
|
||||
message.default_uint32 = 403
|
||||
message.default_uint64 = 404
|
||||
message.default_sint32 = 405
|
||||
message.default_sint64 = 406
|
||||
message.default_fixed32 = 407
|
||||
message.default_fixed64 = 408
|
||||
message.default_sfixed32 = 409
|
||||
message.default_sfixed64 = 410
|
||||
message.default_float = 411
|
||||
message.default_double = 412
|
||||
message.default_bool = False
|
||||
message.default_string = '415'
|
||||
message.default_bytes = b'416'
|
||||
|
||||
message.default_nested_enum = unittest_pb2.TestAllTypes.FOO
|
||||
message.default_foreign_enum = unittest_pb2.FOREIGN_FOO
|
||||
message.default_import_enum = unittest_import_pb2.IMPORT_FOO
|
||||
|
||||
message.default_string_piece = '424'
|
||||
message.default_cord = '425'
|
||||
|
||||
message.oneof_uint32 = 601
|
||||
message.oneof_nested_message.bb = 602
|
||||
message.oneof_string = '603'
|
||||
message.oneof_bytes = b'604'
|
||||
|
||||
|
||||
def SetAllFields(message):
|
||||
SetAllNonLazyFields(message)
|
||||
message.optional_lazy_message.bb = 127
|
||||
|
||||
|
||||
def SetAllExtensions(message):
|
||||
"""Sets every extension in the message to a unique value.
|
||||
|
||||
Args:
|
||||
message: A unittest_pb2.TestAllExtensions instance.
|
||||
"""
|
||||
|
||||
extensions = message.Extensions
|
||||
pb2 = unittest_pb2
|
||||
import_pb2 = unittest_import_pb2
|
||||
|
||||
#
|
||||
# Optional fields.
|
||||
#
|
||||
|
||||
extensions[pb2.optional_int32_extension] = 101
|
||||
extensions[pb2.optional_int64_extension] = 102
|
||||
extensions[pb2.optional_uint32_extension] = 103
|
||||
extensions[pb2.optional_uint64_extension] = 104
|
||||
extensions[pb2.optional_sint32_extension] = 105
|
||||
extensions[pb2.optional_sint64_extension] = 106
|
||||
extensions[pb2.optional_fixed32_extension] = 107
|
||||
extensions[pb2.optional_fixed64_extension] = 108
|
||||
extensions[pb2.optional_sfixed32_extension] = 109
|
||||
extensions[pb2.optional_sfixed64_extension] = 110
|
||||
extensions[pb2.optional_float_extension] = 111
|
||||
extensions[pb2.optional_double_extension] = 112
|
||||
extensions[pb2.optional_bool_extension] = True
|
||||
extensions[pb2.optional_string_extension] = u'115'
|
||||
extensions[pb2.optional_bytes_extension] = b'116'
|
||||
|
||||
extensions[pb2.optionalgroup_extension].a = 117
|
||||
extensions[pb2.optional_nested_message_extension].bb = 118
|
||||
extensions[pb2.optional_foreign_message_extension].c = 119
|
||||
extensions[pb2.optional_import_message_extension].d = 120
|
||||
extensions[pb2.optional_public_import_message_extension].e = 126
|
||||
extensions[pb2.optional_lazy_message_extension].bb = 127
|
||||
|
||||
extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ
|
||||
extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ
|
||||
extensions[pb2.optional_foreign_enum_extension] = pb2.FOREIGN_BAZ
|
||||
extensions[pb2.optional_import_enum_extension] = import_pb2.IMPORT_BAZ
|
||||
|
||||
extensions[pb2.optional_string_piece_extension] = u'124'
|
||||
extensions[pb2.optional_cord_extension] = u'125'
|
||||
|
||||
#
|
||||
# Repeated fields.
|
||||
#
|
||||
|
||||
extensions[pb2.repeated_int32_extension].append(201)
|
||||
extensions[pb2.repeated_int64_extension].append(202)
|
||||
extensions[pb2.repeated_uint32_extension].append(203)
|
||||
extensions[pb2.repeated_uint64_extension].append(204)
|
||||
extensions[pb2.repeated_sint32_extension].append(205)
|
||||
extensions[pb2.repeated_sint64_extension].append(206)
|
||||
extensions[pb2.repeated_fixed32_extension].append(207)
|
||||
extensions[pb2.repeated_fixed64_extension].append(208)
|
||||
extensions[pb2.repeated_sfixed32_extension].append(209)
|
||||
extensions[pb2.repeated_sfixed64_extension].append(210)
|
||||
extensions[pb2.repeated_float_extension].append(211)
|
||||
extensions[pb2.repeated_double_extension].append(212)
|
||||
extensions[pb2.repeated_bool_extension].append(True)
|
||||
extensions[pb2.repeated_string_extension].append(u'215')
|
||||
extensions[pb2.repeated_bytes_extension].append(b'216')
|
||||
|
||||
extensions[pb2.repeatedgroup_extension].add().a = 217
|
||||
extensions[pb2.repeated_nested_message_extension].add().bb = 218
|
||||
extensions[pb2.repeated_foreign_message_extension].add().c = 219
|
||||
extensions[pb2.repeated_import_message_extension].add().d = 220
|
||||
extensions[pb2.repeated_lazy_message_extension].add().bb = 227
|
||||
|
||||
extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAR)
|
||||
extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAR)
|
||||
extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAR)
|
||||
|
||||
extensions[pb2.repeated_string_piece_extension].append(u'224')
|
||||
extensions[pb2.repeated_cord_extension].append(u'225')
|
||||
|
||||
# Append a second one of each field.
|
||||
extensions[pb2.repeated_int32_extension].append(301)
|
||||
extensions[pb2.repeated_int64_extension].append(302)
|
||||
extensions[pb2.repeated_uint32_extension].append(303)
|
||||
extensions[pb2.repeated_uint64_extension].append(304)
|
||||
extensions[pb2.repeated_sint32_extension].append(305)
|
||||
extensions[pb2.repeated_sint64_extension].append(306)
|
||||
extensions[pb2.repeated_fixed32_extension].append(307)
|
||||
extensions[pb2.repeated_fixed64_extension].append(308)
|
||||
extensions[pb2.repeated_sfixed32_extension].append(309)
|
||||
extensions[pb2.repeated_sfixed64_extension].append(310)
|
||||
extensions[pb2.repeated_float_extension].append(311)
|
||||
extensions[pb2.repeated_double_extension].append(312)
|
||||
extensions[pb2.repeated_bool_extension].append(False)
|
||||
extensions[pb2.repeated_string_extension].append(u'315')
|
||||
extensions[pb2.repeated_bytes_extension].append(b'316')
|
||||
|
||||
extensions[pb2.repeatedgroup_extension].add().a = 317
|
||||
extensions[pb2.repeated_nested_message_extension].add().bb = 318
|
||||
extensions[pb2.repeated_foreign_message_extension].add().c = 319
|
||||
extensions[pb2.repeated_import_message_extension].add().d = 320
|
||||
extensions[pb2.repeated_lazy_message_extension].add().bb = 327
|
||||
|
||||
extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAZ)
|
||||
extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAZ)
|
||||
extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAZ)
|
||||
|
||||
extensions[pb2.repeated_string_piece_extension].append(u'324')
|
||||
extensions[pb2.repeated_cord_extension].append(u'325')
|
||||
|
||||
#
|
||||
# Fields with defaults.
|
||||
#
|
||||
|
||||
extensions[pb2.default_int32_extension] = 401
|
||||
extensions[pb2.default_int64_extension] = 402
|
||||
extensions[pb2.default_uint32_extension] = 403
|
||||
extensions[pb2.default_uint64_extension] = 404
|
||||
extensions[pb2.default_sint32_extension] = 405
|
||||
extensions[pb2.default_sint64_extension] = 406
|
||||
extensions[pb2.default_fixed32_extension] = 407
|
||||
extensions[pb2.default_fixed64_extension] = 408
|
||||
extensions[pb2.default_sfixed32_extension] = 409
|
||||
extensions[pb2.default_sfixed64_extension] = 410
|
||||
extensions[pb2.default_float_extension] = 411
|
||||
extensions[pb2.default_double_extension] = 412
|
||||
extensions[pb2.default_bool_extension] = False
|
||||
extensions[pb2.default_string_extension] = u'415'
|
||||
extensions[pb2.default_bytes_extension] = b'416'
|
||||
|
||||
extensions[pb2.default_nested_enum_extension] = pb2.TestAllTypes.FOO
|
||||
extensions[pb2.default_foreign_enum_extension] = pb2.FOREIGN_FOO
|
||||
extensions[pb2.default_import_enum_extension] = import_pb2.IMPORT_FOO
|
||||
|
||||
extensions[pb2.default_string_piece_extension] = u'424'
|
||||
extensions[pb2.default_cord_extension] = '425'
|
||||
|
||||
extensions[pb2.oneof_uint32_extension] = 601
|
||||
extensions[pb2.oneof_nested_message_extension].bb = 602
|
||||
extensions[pb2.oneof_string_extension] = u'603'
|
||||
extensions[pb2.oneof_bytes_extension] = b'604'
|
||||
|
||||
|
||||
def SetAllFieldsAndExtensions(message):
|
||||
"""Sets every field and extension in the message to a unique value.
|
||||
|
||||
Args:
|
||||
message: A unittest_pb2.TestAllExtensions message.
|
||||
"""
|
||||
message.my_int = 1
|
||||
message.my_string = 'foo'
|
||||
message.my_float = 1.0
|
||||
message.Extensions[unittest_pb2.my_extension_int] = 23
|
||||
message.Extensions[unittest_pb2.my_extension_string] = 'bar'
|
||||
|
||||
|
||||
def ExpectAllFieldsAndExtensionsInOrder(serialized):
|
||||
"""Ensures that serialized is the serialization we expect for a message
|
||||
filled with SetAllFieldsAndExtensions(). (Specifically, ensures that the
|
||||
serialization is in canonical, tag-number order).
|
||||
"""
|
||||
my_extension_int = unittest_pb2.my_extension_int
|
||||
my_extension_string = unittest_pb2.my_extension_string
|
||||
expected_strings = []
|
||||
message = unittest_pb2.TestFieldOrderings()
|
||||
message.my_int = 1 # Field 1.
|
||||
expected_strings.append(message.SerializeToString())
|
||||
message.Clear()
|
||||
message.Extensions[my_extension_int] = 23 # Field 5.
|
||||
expected_strings.append(message.SerializeToString())
|
||||
message.Clear()
|
||||
message.my_string = 'foo' # Field 11.
|
||||
expected_strings.append(message.SerializeToString())
|
||||
message.Clear()
|
||||
message.Extensions[my_extension_string] = 'bar' # Field 50.
|
||||
expected_strings.append(message.SerializeToString())
|
||||
message.Clear()
|
||||
message.my_float = 1.0
|
||||
expected_strings.append(message.SerializeToString())
|
||||
message.Clear()
|
||||
expected = b''.join(expected_strings)
|
||||
|
||||
if expected != serialized:
|
||||
raise ValueError('Expected %r, found %r' % (expected, serialized))
|
||||
|
||||
|
||||
def ExpectAllFieldsSet(test_case, message):
|
||||
"""Check all fields for correct values have after Set*Fields() is called."""
|
||||
test_case.assertTrue(message.HasField('optional_int32'))
|
||||
test_case.assertTrue(message.HasField('optional_int64'))
|
||||
test_case.assertTrue(message.HasField('optional_uint32'))
|
||||
test_case.assertTrue(message.HasField('optional_uint64'))
|
||||
test_case.assertTrue(message.HasField('optional_sint32'))
|
||||
test_case.assertTrue(message.HasField('optional_sint64'))
|
||||
test_case.assertTrue(message.HasField('optional_fixed32'))
|
||||
test_case.assertTrue(message.HasField('optional_fixed64'))
|
||||
test_case.assertTrue(message.HasField('optional_sfixed32'))
|
||||
test_case.assertTrue(message.HasField('optional_sfixed64'))
|
||||
test_case.assertTrue(message.HasField('optional_float'))
|
||||
test_case.assertTrue(message.HasField('optional_double'))
|
||||
test_case.assertTrue(message.HasField('optional_bool'))
|
||||
test_case.assertTrue(message.HasField('optional_string'))
|
||||
test_case.assertTrue(message.HasField('optional_bytes'))
|
||||
|
||||
test_case.assertTrue(message.HasField('optionalgroup'))
|
||||
test_case.assertTrue(message.HasField('optional_nested_message'))
|
||||
test_case.assertTrue(message.HasField('optional_foreign_message'))
|
||||
test_case.assertTrue(message.HasField('optional_import_message'))
|
||||
|
||||
test_case.assertTrue(message.optionalgroup.HasField('a'))
|
||||
test_case.assertTrue(message.optional_nested_message.HasField('bb'))
|
||||
test_case.assertTrue(message.optional_foreign_message.HasField('c'))
|
||||
test_case.assertTrue(message.optional_import_message.HasField('d'))
|
||||
|
||||
test_case.assertTrue(message.HasField('optional_nested_enum'))
|
||||
test_case.assertTrue(message.HasField('optional_foreign_enum'))
|
||||
test_case.assertTrue(message.HasField('optional_import_enum'))
|
||||
|
||||
test_case.assertTrue(message.HasField('optional_string_piece'))
|
||||
test_case.assertTrue(message.HasField('optional_cord'))
|
||||
|
||||
test_case.assertEqual(101, message.optional_int32)
|
||||
test_case.assertEqual(102, message.optional_int64)
|
||||
test_case.assertEqual(103, message.optional_uint32)
|
||||
test_case.assertEqual(104, message.optional_uint64)
|
||||
test_case.assertEqual(105, message.optional_sint32)
|
||||
test_case.assertEqual(106, message.optional_sint64)
|
||||
test_case.assertEqual(107, message.optional_fixed32)
|
||||
test_case.assertEqual(108, message.optional_fixed64)
|
||||
test_case.assertEqual(109, message.optional_sfixed32)
|
||||
test_case.assertEqual(110, message.optional_sfixed64)
|
||||
test_case.assertEqual(111, message.optional_float)
|
||||
test_case.assertEqual(112, message.optional_double)
|
||||
test_case.assertEqual(True, message.optional_bool)
|
||||
test_case.assertEqual('115', message.optional_string)
|
||||
test_case.assertEqual(b'116', message.optional_bytes)
|
||||
|
||||
test_case.assertEqual(117, message.optionalgroup.a)
|
||||
test_case.assertEqual(118, message.optional_nested_message.bb)
|
||||
test_case.assertEqual(119, message.optional_foreign_message.c)
|
||||
test_case.assertEqual(120, message.optional_import_message.d)
|
||||
test_case.assertEqual(126, message.optional_public_import_message.e)
|
||||
test_case.assertEqual(127, message.optional_lazy_message.bb)
|
||||
|
||||
test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ,
|
||||
message.optional_nested_enum)
|
||||
test_case.assertEqual(unittest_pb2.FOREIGN_BAZ,
|
||||
message.optional_foreign_enum)
|
||||
test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ,
|
||||
message.optional_import_enum)
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
|
||||
test_case.assertEqual(2, len(message.repeated_int32))
|
||||
test_case.assertEqual(2, len(message.repeated_int64))
|
||||
test_case.assertEqual(2, len(message.repeated_uint32))
|
||||
test_case.assertEqual(2, len(message.repeated_uint64))
|
||||
test_case.assertEqual(2, len(message.repeated_sint32))
|
||||
test_case.assertEqual(2, len(message.repeated_sint64))
|
||||
test_case.assertEqual(2, len(message.repeated_fixed32))
|
||||
test_case.assertEqual(2, len(message.repeated_fixed64))
|
||||
test_case.assertEqual(2, len(message.repeated_sfixed32))
|
||||
test_case.assertEqual(2, len(message.repeated_sfixed64))
|
||||
test_case.assertEqual(2, len(message.repeated_float))
|
||||
test_case.assertEqual(2, len(message.repeated_double))
|
||||
test_case.assertEqual(2, len(message.repeated_bool))
|
||||
test_case.assertEqual(2, len(message.repeated_string))
|
||||
test_case.assertEqual(2, len(message.repeated_bytes))
|
||||
|
||||
test_case.assertEqual(2, len(message.repeatedgroup))
|
||||
test_case.assertEqual(2, len(message.repeated_nested_message))
|
||||
test_case.assertEqual(2, len(message.repeated_foreign_message))
|
||||
test_case.assertEqual(2, len(message.repeated_import_message))
|
||||
test_case.assertEqual(2, len(message.repeated_nested_enum))
|
||||
test_case.assertEqual(2, len(message.repeated_foreign_enum))
|
||||
test_case.assertEqual(2, len(message.repeated_import_enum))
|
||||
|
||||
test_case.assertEqual(2, len(message.repeated_string_piece))
|
||||
test_case.assertEqual(2, len(message.repeated_cord))
|
||||
|
||||
test_case.assertEqual(201, message.repeated_int32[0])
|
||||
test_case.assertEqual(202, message.repeated_int64[0])
|
||||
test_case.assertEqual(203, message.repeated_uint32[0])
|
||||
test_case.assertEqual(204, message.repeated_uint64[0])
|
||||
test_case.assertEqual(205, message.repeated_sint32[0])
|
||||
test_case.assertEqual(206, message.repeated_sint64[0])
|
||||
test_case.assertEqual(207, message.repeated_fixed32[0])
|
||||
test_case.assertEqual(208, message.repeated_fixed64[0])
|
||||
test_case.assertEqual(209, message.repeated_sfixed32[0])
|
||||
test_case.assertEqual(210, message.repeated_sfixed64[0])
|
||||
test_case.assertEqual(211, message.repeated_float[0])
|
||||
test_case.assertEqual(212, message.repeated_double[0])
|
||||
test_case.assertEqual(True, message.repeated_bool[0])
|
||||
test_case.assertEqual('215', message.repeated_string[0])
|
||||
test_case.assertEqual(b'216', message.repeated_bytes[0])
|
||||
|
||||
test_case.assertEqual(217, message.repeatedgroup[0].a)
|
||||
test_case.assertEqual(218, message.repeated_nested_message[0].bb)
|
||||
test_case.assertEqual(219, message.repeated_foreign_message[0].c)
|
||||
test_case.assertEqual(220, message.repeated_import_message[0].d)
|
||||
test_case.assertEqual(227, message.repeated_lazy_message[0].bb)
|
||||
|
||||
test_case.assertEqual(unittest_pb2.TestAllTypes.BAR,
|
||||
message.repeated_nested_enum[0])
|
||||
test_case.assertEqual(unittest_pb2.FOREIGN_BAR,
|
||||
message.repeated_foreign_enum[0])
|
||||
test_case.assertEqual(unittest_import_pb2.IMPORT_BAR,
|
||||
message.repeated_import_enum[0])
|
||||
|
||||
test_case.assertEqual(301, message.repeated_int32[1])
|
||||
test_case.assertEqual(302, message.repeated_int64[1])
|
||||
test_case.assertEqual(303, message.repeated_uint32[1])
|
||||
test_case.assertEqual(304, message.repeated_uint64[1])
|
||||
test_case.assertEqual(305, message.repeated_sint32[1])
|
||||
test_case.assertEqual(306, message.repeated_sint64[1])
|
||||
test_case.assertEqual(307, message.repeated_fixed32[1])
|
||||
test_case.assertEqual(308, message.repeated_fixed64[1])
|
||||
test_case.assertEqual(309, message.repeated_sfixed32[1])
|
||||
test_case.assertEqual(310, message.repeated_sfixed64[1])
|
||||
test_case.assertEqual(311, message.repeated_float[1])
|
||||
test_case.assertEqual(312, message.repeated_double[1])
|
||||
test_case.assertEqual(False, message.repeated_bool[1])
|
||||
test_case.assertEqual('315', message.repeated_string[1])
|
||||
test_case.assertEqual(b'316', message.repeated_bytes[1])
|
||||
|
||||
test_case.assertEqual(317, message.repeatedgroup[1].a)
|
||||
test_case.assertEqual(318, message.repeated_nested_message[1].bb)
|
||||
test_case.assertEqual(319, message.repeated_foreign_message[1].c)
|
||||
test_case.assertEqual(320, message.repeated_import_message[1].d)
|
||||
test_case.assertEqual(327, message.repeated_lazy_message[1].bb)
|
||||
|
||||
test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ,
|
||||
message.repeated_nested_enum[1])
|
||||
test_case.assertEqual(unittest_pb2.FOREIGN_BAZ,
|
||||
message.repeated_foreign_enum[1])
|
||||
test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ,
|
||||
message.repeated_import_enum[1])
|
||||
|
||||
# -----------------------------------------------------------------
|
||||
|
||||
test_case.assertTrue(message.HasField('default_int32'))
|
||||
test_case.assertTrue(message.HasField('default_int64'))
|
||||
test_case.assertTrue(message.HasField('default_uint32'))
|
||||
test_case.assertTrue(message.HasField('default_uint64'))
|
||||
test_case.assertTrue(message.HasField('default_sint32'))
|
||||
test_case.assertTrue(message.HasField('default_sint64'))
|
||||
test_case.assertTrue(message.HasField('default_fixed32'))
|
||||
test_case.assertTrue(message.HasField('default_fixed64'))
|
||||
test_case.assertTrue(message.HasField('default_sfixed32'))
|
||||
test_case.assertTrue(message.HasField('default_sfixed64'))
|
||||
test_case.assertTrue(message.HasField('default_float'))
|
||||
test_case.assertTrue(message.HasField('default_double'))
|
||||
test_case.assertTrue(message.HasField('default_bool'))
|
||||
test_case.assertTrue(message.HasField('default_string'))
|
||||
test_case.assertTrue(message.HasField('default_bytes'))
|
||||
|
||||
test_case.assertTrue(message.HasField('default_nested_enum'))
|
||||
test_case.assertTrue(message.HasField('default_foreign_enum'))
|
||||
test_case.assertTrue(message.HasField('default_import_enum'))
|
||||
|
||||
test_case.assertEqual(401, message.default_int32)
|
||||
test_case.assertEqual(402, message.default_int64)
|
||||
test_case.assertEqual(403, message.default_uint32)
|
||||
test_case.assertEqual(404, message.default_uint64)
|
||||
test_case.assertEqual(405, message.default_sint32)
|
||||
test_case.assertEqual(406, message.default_sint64)
|
||||
test_case.assertEqual(407, message.default_fixed32)
|
||||
test_case.assertEqual(408, message.default_fixed64)
|
||||
test_case.assertEqual(409, message.default_sfixed32)
|
||||
test_case.assertEqual(410, message.default_sfixed64)
|
||||
test_case.assertEqual(411, message.default_float)
|
||||
test_case.assertEqual(412, message.default_double)
|
||||
test_case.assertEqual(False, message.default_bool)
|
||||
test_case.assertEqual('415', message.default_string)
|
||||
test_case.assertEqual(b'416', message.default_bytes)
|
||||
|
||||
test_case.assertEqual(unittest_pb2.TestAllTypes.FOO,
|
||||
message.default_nested_enum)
|
||||
test_case.assertEqual(unittest_pb2.FOREIGN_FOO,
|
||||
message.default_foreign_enum)
|
||||
test_case.assertEqual(unittest_import_pb2.IMPORT_FOO,
|
||||
message.default_import_enum)
|
||||
|
||||
|
||||
def GoldenFile(filename):
|
||||
"""Finds the given golden file and returns a file object representing it."""
|
||||
|
||||
# Search up the directory tree looking for the C++ protobuf source code.
|
||||
path = '.'
|
||||
while os.path.exists(path):
|
||||
if os.path.exists(os.path.join(path, 'src/google/protobuf')):
|
||||
# Found it. Load the golden file from the testdata directory.
|
||||
full_path = os.path.join(path, 'src/google/protobuf/testdata', filename)
|
||||
return open(full_path, 'rb')
|
||||
path = os.path.join(path, '..')
|
||||
|
||||
raise RuntimeError(
|
||||
'Could not find golden files. This test must be run from within the '
|
||||
'protobuf source package so that it can read test data files from the '
|
||||
'C++ source tree.')
|
||||
|
||||
|
||||
def GoldenFileData(filename):
|
||||
"""Finds the given golden file and returns its contents."""
|
||||
with GoldenFile(filename) as f:
|
||||
return f.read()
|
||||
|
||||
|
||||
def SetAllPackedFields(message):
|
||||
"""Sets every field in the message to a unique value.
|
||||
|
||||
Args:
|
||||
message: A unittest_pb2.TestPackedTypes instance.
|
||||
"""
|
||||
message.packed_int32.extend([601, 701])
|
||||
message.packed_int64.extend([602, 702])
|
||||
message.packed_uint32.extend([603, 703])
|
||||
message.packed_uint64.extend([604, 704])
|
||||
message.packed_sint32.extend([605, 705])
|
||||
message.packed_sint64.extend([606, 706])
|
||||
message.packed_fixed32.extend([607, 707])
|
||||
message.packed_fixed64.extend([608, 708])
|
||||
message.packed_sfixed32.extend([609, 709])
|
||||
message.packed_sfixed64.extend([610, 710])
|
||||
message.packed_float.extend([611.0, 711.0])
|
||||
message.packed_double.extend([612.0, 712.0])
|
||||
message.packed_bool.extend([True, False])
|
||||
message.packed_enum.extend([unittest_pb2.FOREIGN_BAR,
|
||||
unittest_pb2.FOREIGN_BAZ])
|
||||
|
||||
|
||||
def SetAllPackedExtensions(message):
|
||||
"""Sets every extension in the message to a unique value.
|
||||
|
||||
Args:
|
||||
message: A unittest_pb2.TestPackedExtensions instance.
|
||||
"""
|
||||
extensions = message.Extensions
|
||||
pb2 = unittest_pb2
|
||||
|
||||
extensions[pb2.packed_int32_extension].extend([601, 701])
|
||||
extensions[pb2.packed_int64_extension].extend([602, 702])
|
||||
extensions[pb2.packed_uint32_extension].extend([603, 703])
|
||||
extensions[pb2.packed_uint64_extension].extend([604, 704])
|
||||
extensions[pb2.packed_sint32_extension].extend([605, 705])
|
||||
extensions[pb2.packed_sint64_extension].extend([606, 706])
|
||||
extensions[pb2.packed_fixed32_extension].extend([607, 707])
|
||||
extensions[pb2.packed_fixed64_extension].extend([608, 708])
|
||||
extensions[pb2.packed_sfixed32_extension].extend([609, 709])
|
||||
extensions[pb2.packed_sfixed64_extension].extend([610, 710])
|
||||
extensions[pb2.packed_float_extension].extend([611.0, 711.0])
|
||||
extensions[pb2.packed_double_extension].extend([612.0, 712.0])
|
||||
extensions[pb2.packed_bool_extension].extend([True, False])
|
||||
extensions[pb2.packed_enum_extension].extend([unittest_pb2.FOREIGN_BAR,
|
||||
unittest_pb2.FOREIGN_BAZ])
|
||||
|
||||
|
||||
def SetAllUnpackedFields(message):
|
||||
"""Sets every field in the message to a unique value.
|
||||
|
||||
Args:
|
||||
message: A unittest_pb2.TestUnpackedTypes instance.
|
||||
"""
|
||||
message.unpacked_int32.extend([601, 701])
|
||||
message.unpacked_int64.extend([602, 702])
|
||||
message.unpacked_uint32.extend([603, 703])
|
||||
message.unpacked_uint64.extend([604, 704])
|
||||
message.unpacked_sint32.extend([605, 705])
|
||||
message.unpacked_sint64.extend([606, 706])
|
||||
message.unpacked_fixed32.extend([607, 707])
|
||||
message.unpacked_fixed64.extend([608, 708])
|
||||
message.unpacked_sfixed32.extend([609, 709])
|
||||
message.unpacked_sfixed64.extend([610, 710])
|
||||
message.unpacked_float.extend([611.0, 711.0])
|
||||
message.unpacked_double.extend([612.0, 712.0])
|
||||
message.unpacked_bool.extend([True, False])
|
||||
message.unpacked_enum.extend([unittest_pb2.FOREIGN_BAR,
|
||||
unittest_pb2.FOREIGN_BAZ])
|
||||
@@ -0,0 +1,68 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.text_encoding."""
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import text_encoding
|
||||
|
||||
TEST_VALUES = [
|
||||
("foo\\rbar\\nbaz\\t",
|
||||
"foo\\rbar\\nbaz\\t",
|
||||
b"foo\rbar\nbaz\t"),
|
||||
("\\'full of \\\"sound\\\" and \\\"fury\\\"\\'",
|
||||
"\\'full of \\\"sound\\\" and \\\"fury\\\"\\'",
|
||||
b"'full of \"sound\" and \"fury\"'"),
|
||||
("signi\\\\fying\\\\ nothing\\\\",
|
||||
"signi\\\\fying\\\\ nothing\\\\",
|
||||
b"signi\\fying\\ nothing\\"),
|
||||
("\\010\\t\\n\\013\\014\\r",
|
||||
"\x08\\t\\n\x0b\x0c\\r",
|
||||
b"\010\011\012\013\014\015")]
|
||||
|
||||
|
||||
class TextEncodingTestCase(basetest.TestCase):
|
||||
def testCEscape(self):
|
||||
for escaped, escaped_utf8, unescaped in TEST_VALUES:
|
||||
self.assertEquals(escaped,
|
||||
text_encoding.CEscape(unescaped, as_utf8=False))
|
||||
self.assertEquals(escaped_utf8,
|
||||
text_encoding.CEscape(unescaped, as_utf8=True))
|
||||
|
||||
def testCUnescape(self):
|
||||
for escaped, escaped_utf8, unescaped in TEST_VALUES:
|
||||
self.assertEquals(unescaped, text_encoding.CUnescape(escaped))
|
||||
self.assertEquals(unescaped, text_encoding.CUnescape(escaped_utf8))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
basetest.main()
|
||||
@@ -0,0 +1,743 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Test for google.protobuf.text_format."""
|
||||
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import re
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import text_format
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import test_util
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf import unittest_mset_pb2
|
||||
|
||||
class TextFormatTest(basetest.TestCase):
|
||||
|
||||
def ReadGolden(self, golden_filename):
|
||||
with test_util.GoldenFile(golden_filename) as f:
|
||||
return (f.readlines() if str is bytes else # PY3
|
||||
[golden_line.decode('utf-8') for golden_line in f])
|
||||
|
||||
def CompareToGoldenFile(self, text, golden_filename):
|
||||
golden_lines = self.ReadGolden(golden_filename)
|
||||
self.assertMultiLineEqual(text, ''.join(golden_lines))
|
||||
|
||||
def CompareToGoldenText(self, text, golden_text):
|
||||
self.assertMultiLineEqual(text, golden_text)
|
||||
|
||||
def testPrintAllFields(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
test_util.SetAllFields(message)
|
||||
self.CompareToGoldenFile(
|
||||
self.RemoveRedundantZeros(text_format.MessageToString(message)),
|
||||
'text_format_unittest_data_oneof_implemented.txt')
|
||||
|
||||
def testPrintInIndexOrder(self):
|
||||
message = unittest_pb2.TestFieldOrderings()
|
||||
message.my_string = '115'
|
||||
message.my_int = 101
|
||||
message.my_float = 111
|
||||
self.CompareToGoldenText(
|
||||
self.RemoveRedundantZeros(text_format.MessageToString(
|
||||
message, use_index_order=True)),
|
||||
'my_string: \"115\"\nmy_int: 101\nmy_float: 111\n')
|
||||
self.CompareToGoldenText(
|
||||
self.RemoveRedundantZeros(text_format.MessageToString(
|
||||
message)), 'my_int: 101\nmy_string: \"115\"\nmy_float: 111\n')
|
||||
|
||||
def testPrintAllExtensions(self):
|
||||
message = unittest_pb2.TestAllExtensions()
|
||||
test_util.SetAllExtensions(message)
|
||||
self.CompareToGoldenFile(
|
||||
self.RemoveRedundantZeros(text_format.MessageToString(message)),
|
||||
'text_format_unittest_extensions_data.txt')
|
||||
|
||||
def testPrintAllFieldsPointy(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
test_util.SetAllFields(message)
|
||||
self.CompareToGoldenFile(
|
||||
self.RemoveRedundantZeros(
|
||||
text_format.MessageToString(message, pointy_brackets=True)),
|
||||
'text_format_unittest_data_pointy_oneof.txt')
|
||||
|
||||
def testPrintAllExtensionsPointy(self):
|
||||
message = unittest_pb2.TestAllExtensions()
|
||||
test_util.SetAllExtensions(message)
|
||||
self.CompareToGoldenFile(
|
||||
self.RemoveRedundantZeros(text_format.MessageToString(
|
||||
message, pointy_brackets=True)),
|
||||
'text_format_unittest_extensions_data_pointy.txt')
|
||||
|
||||
def testPrintMessageSet(self):
|
||||
message = unittest_mset_pb2.TestMessageSetContainer()
|
||||
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
|
||||
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
|
||||
message.message_set.Extensions[ext1].i = 23
|
||||
message.message_set.Extensions[ext2].str = 'foo'
|
||||
self.CompareToGoldenText(
|
||||
text_format.MessageToString(message),
|
||||
'message_set {\n'
|
||||
' [protobuf_unittest.TestMessageSetExtension1] {\n'
|
||||
' i: 23\n'
|
||||
' }\n'
|
||||
' [protobuf_unittest.TestMessageSetExtension2] {\n'
|
||||
' str: \"foo\"\n'
|
||||
' }\n'
|
||||
'}\n')
|
||||
|
||||
def testPrintExotic(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.repeated_int64.append(-9223372036854775808)
|
||||
message.repeated_uint64.append(18446744073709551615)
|
||||
message.repeated_double.append(123.456)
|
||||
message.repeated_double.append(1.23e22)
|
||||
message.repeated_double.append(1.23e-18)
|
||||
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
|
||||
message.repeated_string.append(u'\u00fc\ua71f')
|
||||
self.CompareToGoldenText(
|
||||
self.RemoveRedundantZeros(text_format.MessageToString(message)),
|
||||
'repeated_int64: -9223372036854775808\n'
|
||||
'repeated_uint64: 18446744073709551615\n'
|
||||
'repeated_double: 123.456\n'
|
||||
'repeated_double: 1.23e+22\n'
|
||||
'repeated_double: 1.23e-18\n'
|
||||
'repeated_string:'
|
||||
' "\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n'
|
||||
'repeated_string: "\\303\\274\\352\\234\\237"\n')
|
||||
|
||||
def testPrintExoticUnicodeSubclass(self):
|
||||
class UnicodeSub(unicode):
|
||||
pass
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.repeated_string.append(UnicodeSub(u'\u00fc\ua71f'))
|
||||
self.CompareToGoldenText(
|
||||
text_format.MessageToString(message),
|
||||
'repeated_string: "\\303\\274\\352\\234\\237"\n')
|
||||
|
||||
def testPrintNestedMessageAsOneLine(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
msg = message.repeated_nested_message.add()
|
||||
msg.bb = 42
|
||||
self.CompareToGoldenText(
|
||||
text_format.MessageToString(message, as_one_line=True),
|
||||
'repeated_nested_message { bb: 42 }')
|
||||
|
||||
def testPrintRepeatedFieldsAsOneLine(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.repeated_int32.append(1)
|
||||
message.repeated_int32.append(1)
|
||||
message.repeated_int32.append(3)
|
||||
message.repeated_string.append("Google")
|
||||
message.repeated_string.append("Zurich")
|
||||
self.CompareToGoldenText(
|
||||
text_format.MessageToString(message, as_one_line=True),
|
||||
'repeated_int32: 1 repeated_int32: 1 repeated_int32: 3 '
|
||||
'repeated_string: "Google" repeated_string: "Zurich"')
|
||||
|
||||
def testPrintNestedNewLineInStringAsOneLine(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.optional_string = "a\nnew\nline"
|
||||
self.CompareToGoldenText(
|
||||
text_format.MessageToString(message, as_one_line=True),
|
||||
'optional_string: "a\\nnew\\nline"')
|
||||
|
||||
def testPrintMessageSetAsOneLine(self):
|
||||
message = unittest_mset_pb2.TestMessageSetContainer()
|
||||
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
|
||||
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
|
||||
message.message_set.Extensions[ext1].i = 23
|
||||
message.message_set.Extensions[ext2].str = 'foo'
|
||||
self.CompareToGoldenText(
|
||||
text_format.MessageToString(message, as_one_line=True),
|
||||
'message_set {'
|
||||
' [protobuf_unittest.TestMessageSetExtension1] {'
|
||||
' i: 23'
|
||||
' }'
|
||||
' [protobuf_unittest.TestMessageSetExtension2] {'
|
||||
' str: \"foo\"'
|
||||
' }'
|
||||
' }')
|
||||
|
||||
def testPrintExoticAsOneLine(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.repeated_int64.append(-9223372036854775808)
|
||||
message.repeated_uint64.append(18446744073709551615)
|
||||
message.repeated_double.append(123.456)
|
||||
message.repeated_double.append(1.23e22)
|
||||
message.repeated_double.append(1.23e-18)
|
||||
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
|
||||
message.repeated_string.append(u'\u00fc\ua71f')
|
||||
self.CompareToGoldenText(
|
||||
self.RemoveRedundantZeros(
|
||||
text_format.MessageToString(message, as_one_line=True)),
|
||||
'repeated_int64: -9223372036854775808'
|
||||
' repeated_uint64: 18446744073709551615'
|
||||
' repeated_double: 123.456'
|
||||
' repeated_double: 1.23e+22'
|
||||
' repeated_double: 1.23e-18'
|
||||
' repeated_string: '
|
||||
'"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""'
|
||||
' repeated_string: "\\303\\274\\352\\234\\237"')
|
||||
|
||||
def testRoundTripExoticAsOneLine(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.repeated_int64.append(-9223372036854775808)
|
||||
message.repeated_uint64.append(18446744073709551615)
|
||||
message.repeated_double.append(123.456)
|
||||
message.repeated_double.append(1.23e22)
|
||||
message.repeated_double.append(1.23e-18)
|
||||
message.repeated_string.append('\000\001\a\b\f\n\r\t\v\\\'"')
|
||||
message.repeated_string.append(u'\u00fc\ua71f')
|
||||
|
||||
# Test as_utf8 = False.
|
||||
wire_text = text_format.MessageToString(
|
||||
message, as_one_line=True, as_utf8=False)
|
||||
parsed_message = unittest_pb2.TestAllTypes()
|
||||
r = text_format.Parse(wire_text, parsed_message)
|
||||
self.assertIs(r, parsed_message)
|
||||
self.assertEquals(message, parsed_message)
|
||||
|
||||
# Test as_utf8 = True.
|
||||
wire_text = text_format.MessageToString(
|
||||
message, as_one_line=True, as_utf8=True)
|
||||
parsed_message = unittest_pb2.TestAllTypes()
|
||||
r = text_format.Parse(wire_text, parsed_message)
|
||||
self.assertIs(r, parsed_message)
|
||||
self.assertEquals(message, parsed_message,
|
||||
'\n%s != %s' % (message, parsed_message))
|
||||
|
||||
def testPrintRawUtf8String(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.repeated_string.append(u'\u00fc\ua71f')
|
||||
text = text_format.MessageToString(message, as_utf8=True)
|
||||
self.CompareToGoldenText(text, 'repeated_string: "\303\274\352\234\237"\n')
|
||||
parsed_message = unittest_pb2.TestAllTypes()
|
||||
text_format.Parse(text, parsed_message)
|
||||
self.assertEquals(message, parsed_message,
|
||||
'\n%s != %s' % (message, parsed_message))
|
||||
|
||||
def testPrintFloatFormat(self):
|
||||
# Check that float_format argument is passed to sub-message formatting.
|
||||
message = unittest_pb2.NestedTestAllTypes()
|
||||
# We use 1.25 as it is a round number in binary. The proto 32-bit float
|
||||
# will not gain additional imprecise digits as a 64-bit Python float and
|
||||
# show up in its str. 32-bit 1.2 is noisy when extended to 64-bit:
|
||||
# >>> struct.unpack('f', struct.pack('f', 1.2))[0]
|
||||
# 1.2000000476837158
|
||||
# >>> struct.unpack('f', struct.pack('f', 1.25))[0]
|
||||
# 1.25
|
||||
message.payload.optional_float = 1.25
|
||||
# Check rounding at 15 significant digits
|
||||
message.payload.optional_double = -.000003456789012345678
|
||||
# Check no decimal point.
|
||||
message.payload.repeated_float.append(-5642)
|
||||
# Check no trailing zeros.
|
||||
message.payload.repeated_double.append(.000078900)
|
||||
formatted_fields = ['optional_float: 1.25',
|
||||
'optional_double: -3.45678901234568e-6',
|
||||
'repeated_float: -5642',
|
||||
'repeated_double: 7.89e-5']
|
||||
text_message = text_format.MessageToString(message, float_format='.15g')
|
||||
self.CompareToGoldenText(
|
||||
self.RemoveRedundantZeros(text_message),
|
||||
'payload {{\n {}\n {}\n {}\n {}\n}}\n'.format(*formatted_fields))
|
||||
# as_one_line=True is a separate code branch where float_format is passed.
|
||||
text_message = text_format.MessageToString(message, as_one_line=True,
|
||||
float_format='.15g')
|
||||
self.CompareToGoldenText(
|
||||
self.RemoveRedundantZeros(text_message),
|
||||
'payload {{ {} {} {} {} }}'.format(*formatted_fields))
|
||||
|
||||
def testMessageToString(self):
|
||||
message = unittest_pb2.ForeignMessage()
|
||||
message.c = 123
|
||||
self.assertEqual('c: 123\n', str(message))
|
||||
|
||||
def RemoveRedundantZeros(self, text):
|
||||
# Some platforms print 1e+5 as 1e+005. This is fine, but we need to remove
|
||||
# these zeros in order to match the golden file.
|
||||
text = text.replace('e+0','e+').replace('e+0','e+') \
|
||||
.replace('e-0','e-').replace('e-0','e-')
|
||||
# Floating point fields are printed with .0 suffix even if they are
|
||||
# actualy integer numbers.
|
||||
text = re.compile('\.0$', re.MULTILINE).sub('', text)
|
||||
return text
|
||||
|
||||
def testParseGolden(self):
|
||||
golden_text = '\n'.join(self.ReadGolden('text_format_unittest_data.txt'))
|
||||
parsed_message = unittest_pb2.TestAllTypes()
|
||||
r = text_format.Parse(golden_text, parsed_message)
|
||||
self.assertIs(r, parsed_message)
|
||||
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
test_util.SetAllFields(message)
|
||||
self.assertEquals(message, parsed_message)
|
||||
|
||||
def testParseGoldenExtensions(self):
|
||||
golden_text = '\n'.join(self.ReadGolden(
|
||||
'text_format_unittest_extensions_data.txt'))
|
||||
parsed_message = unittest_pb2.TestAllExtensions()
|
||||
text_format.Parse(golden_text, parsed_message)
|
||||
|
||||
message = unittest_pb2.TestAllExtensions()
|
||||
test_util.SetAllExtensions(message)
|
||||
self.assertEquals(message, parsed_message)
|
||||
|
||||
def testParseAllFields(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
test_util.SetAllFields(message)
|
||||
ascii_text = text_format.MessageToString(message)
|
||||
|
||||
parsed_message = unittest_pb2.TestAllTypes()
|
||||
text_format.Parse(ascii_text, parsed_message)
|
||||
self.assertEqual(message, parsed_message)
|
||||
test_util.ExpectAllFieldsSet(self, message)
|
||||
|
||||
def testParseAllExtensions(self):
|
||||
message = unittest_pb2.TestAllExtensions()
|
||||
test_util.SetAllExtensions(message)
|
||||
ascii_text = text_format.MessageToString(message)
|
||||
|
||||
parsed_message = unittest_pb2.TestAllExtensions()
|
||||
text_format.Parse(ascii_text, parsed_message)
|
||||
self.assertEqual(message, parsed_message)
|
||||
|
||||
def testParseMessageSet(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = ('repeated_uint64: 1\n'
|
||||
'repeated_uint64: 2\n')
|
||||
text_format.Parse(text, message)
|
||||
self.assertEqual(1, message.repeated_uint64[0])
|
||||
self.assertEqual(2, message.repeated_uint64[1])
|
||||
|
||||
message = unittest_mset_pb2.TestMessageSetContainer()
|
||||
text = ('message_set {\n'
|
||||
' [protobuf_unittest.TestMessageSetExtension1] {\n'
|
||||
' i: 23\n'
|
||||
' }\n'
|
||||
' [protobuf_unittest.TestMessageSetExtension2] {\n'
|
||||
' str: \"foo\"\n'
|
||||
' }\n'
|
||||
'}\n')
|
||||
text_format.Parse(text, message)
|
||||
ext1 = unittest_mset_pb2.TestMessageSetExtension1.message_set_extension
|
||||
ext2 = unittest_mset_pb2.TestMessageSetExtension2.message_set_extension
|
||||
self.assertEquals(23, message.message_set.Extensions[ext1].i)
|
||||
self.assertEquals('foo', message.message_set.Extensions[ext2].str)
|
||||
|
||||
def testParseExotic(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = ('repeated_int64: -9223372036854775808\n'
|
||||
'repeated_uint64: 18446744073709551615\n'
|
||||
'repeated_double: 123.456\n'
|
||||
'repeated_double: 1.23e+22\n'
|
||||
'repeated_double: 1.23e-18\n'
|
||||
'repeated_string: \n'
|
||||
'"\\000\\001\\007\\010\\014\\n\\r\\t\\013\\\\\\\'\\""\n'
|
||||
'repeated_string: "foo" \'corge\' "grault"\n'
|
||||
'repeated_string: "\\303\\274\\352\\234\\237"\n'
|
||||
'repeated_string: "\\xc3\\xbc"\n'
|
||||
'repeated_string: "\xc3\xbc"\n')
|
||||
text_format.Parse(text, message)
|
||||
|
||||
self.assertEqual(-9223372036854775808, message.repeated_int64[0])
|
||||
self.assertEqual(18446744073709551615, message.repeated_uint64[0])
|
||||
self.assertEqual(123.456, message.repeated_double[0])
|
||||
self.assertEqual(1.23e22, message.repeated_double[1])
|
||||
self.assertEqual(1.23e-18, message.repeated_double[2])
|
||||
self.assertEqual(
|
||||
'\000\001\a\b\f\n\r\t\v\\\'"', message.repeated_string[0])
|
||||
self.assertEqual('foocorgegrault', message.repeated_string[1])
|
||||
self.assertEqual(u'\u00fc\ua71f', message.repeated_string[2])
|
||||
self.assertEqual(u'\u00fc', message.repeated_string[3])
|
||||
|
||||
def testParseTrailingCommas(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = ('repeated_int64: 100;\n'
|
||||
'repeated_int64: 200;\n'
|
||||
'repeated_int64: 300,\n'
|
||||
'repeated_string: "one",\n'
|
||||
'repeated_string: "two";\n')
|
||||
text_format.Parse(text, message)
|
||||
|
||||
self.assertEqual(100, message.repeated_int64[0])
|
||||
self.assertEqual(200, message.repeated_int64[1])
|
||||
self.assertEqual(300, message.repeated_int64[2])
|
||||
self.assertEqual(u'one', message.repeated_string[0])
|
||||
self.assertEqual(u'two', message.repeated_string[1])
|
||||
|
||||
def testParseEmptyText(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = ''
|
||||
text_format.Parse(text, message)
|
||||
self.assertEquals(unittest_pb2.TestAllTypes(), message)
|
||||
|
||||
def testParseInvalidUtf8(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'repeated_string: "\\xc3\\xc3"'
|
||||
self.assertRaises(text_format.ParseError, text_format.Parse, text, message)
|
||||
|
||||
def testParseSingleWord(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'foo'
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named '
|
||||
'"foo".'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testParseUnknownField(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'unknown_field: 8\n'
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:1 : Message type "protobuf_unittest.TestAllTypes" has no field named '
|
||||
'"unknown_field".'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testParseBadExtension(self):
|
||||
message = unittest_pb2.TestAllExtensions()
|
||||
text = '[unknown_extension]: 8\n'
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
'1:2 : Extension "unknown_extension" not registered.',
|
||||
text_format.Parse, text, message)
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:2 : Message type "protobuf_unittest.TestAllTypes" does not have '
|
||||
'extensions.'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testParseGroupNotClosed(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'RepeatedGroup: <'
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError, '1:16 : Expected ">".',
|
||||
text_format.Parse, text, message)
|
||||
|
||||
text = 'RepeatedGroup: {'
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError, '1:16 : Expected "}".',
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testParseEmptyGroup(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'OptionalGroup: {}'
|
||||
text_format.Parse(text, message)
|
||||
self.assertTrue(message.HasField('optionalgroup'))
|
||||
|
||||
message.Clear()
|
||||
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'OptionalGroup: <>'
|
||||
text_format.Parse(text, message)
|
||||
self.assertTrue(message.HasField('optionalgroup'))
|
||||
|
||||
def testParseBadEnumValue(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'optional_nested_enum: BARR'
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:23 : Enum type "protobuf_unittest.TestAllTypes.NestedEnum" '
|
||||
'has no value named BARR.'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'optional_nested_enum: 100'
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:23 : Enum type "protobuf_unittest.TestAllTypes.NestedEnum" '
|
||||
'has no value with number 100.'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testParseBadIntValue(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = 'optional_int32: bork'
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:17 : Couldn\'t parse integer: bork'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testParseStringFieldUnescape(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = r'''repeated_string: "\xf\x62"
|
||||
repeated_string: "\\xf\\x62"
|
||||
repeated_string: "\\\xf\\\x62"
|
||||
repeated_string: "\\\\xf\\\\x62"
|
||||
repeated_string: "\\\\\xf\\\\\x62"
|
||||
repeated_string: "\x5cx20"'''
|
||||
text_format.Parse(text, message)
|
||||
|
||||
SLASH = '\\'
|
||||
self.assertEqual('\x0fb', message.repeated_string[0])
|
||||
self.assertEqual(SLASH + 'xf' + SLASH + 'x62', message.repeated_string[1])
|
||||
self.assertEqual(SLASH + '\x0f' + SLASH + 'b', message.repeated_string[2])
|
||||
self.assertEqual(SLASH + SLASH + 'xf' + SLASH + SLASH + 'x62',
|
||||
message.repeated_string[3])
|
||||
self.assertEqual(SLASH + SLASH + '\x0f' + SLASH + SLASH + 'b',
|
||||
message.repeated_string[4])
|
||||
self.assertEqual(SLASH + 'x20', message.repeated_string[5])
|
||||
|
||||
def testMergeRepeatedScalars(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = ('optional_int32: 42 '
|
||||
'optional_int32: 67')
|
||||
r = text_format.Merge(text, message)
|
||||
self.assertIs(r, message)
|
||||
self.assertEqual(67, message.optional_int32)
|
||||
|
||||
def testParseRepeatedScalars(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = ('optional_int32: 42 '
|
||||
'optional_int32: 67')
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:36 : Message type "protobuf_unittest.TestAllTypes" should not '
|
||||
'have multiple "optional_int32" fields.'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testMergeRepeatedNestedMessageScalars(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = ('optional_nested_message { bb: 1 } '
|
||||
'optional_nested_message { bb: 2 }')
|
||||
r = text_format.Merge(text, message)
|
||||
self.assertTrue(r is message)
|
||||
self.assertEqual(2, message.optional_nested_message.bb)
|
||||
|
||||
def testParseRepeatedNestedMessageScalars(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
text = ('optional_nested_message { bb: 1 } '
|
||||
'optional_nested_message { bb: 2 }')
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:65 : Message type "protobuf_unittest.TestAllTypes.NestedMessage" '
|
||||
'should not have multiple "bb" fields.'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testMergeRepeatedExtensionScalars(self):
|
||||
message = unittest_pb2.TestAllExtensions()
|
||||
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
|
||||
'[protobuf_unittest.optional_int32_extension]: 67')
|
||||
text_format.Merge(text, message)
|
||||
self.assertEqual(
|
||||
67,
|
||||
message.Extensions[unittest_pb2.optional_int32_extension])
|
||||
|
||||
def testParseRepeatedExtensionScalars(self):
|
||||
message = unittest_pb2.TestAllExtensions()
|
||||
text = ('[protobuf_unittest.optional_int32_extension]: 42 '
|
||||
'[protobuf_unittest.optional_int32_extension]: 67')
|
||||
self.assertRaisesWithLiteralMatch(
|
||||
text_format.ParseError,
|
||||
('1:96 : Message type "protobuf_unittest.TestAllExtensions" '
|
||||
'should not have multiple '
|
||||
'"protobuf_unittest.optional_int32_extension" extensions.'),
|
||||
text_format.Parse, text, message)
|
||||
|
||||
def testParseLinesGolden(self):
|
||||
opened = self.ReadGolden('text_format_unittest_data.txt')
|
||||
parsed_message = unittest_pb2.TestAllTypes()
|
||||
r = text_format.ParseLines(opened, parsed_message)
|
||||
self.assertIs(r, parsed_message)
|
||||
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
test_util.SetAllFields(message)
|
||||
self.assertEquals(message, parsed_message)
|
||||
|
||||
def testMergeLinesGolden(self):
|
||||
opened = self.ReadGolden('text_format_unittest_data.txt')
|
||||
parsed_message = unittest_pb2.TestAllTypes()
|
||||
r = text_format.MergeLines(opened, parsed_message)
|
||||
self.assertIs(r, parsed_message)
|
||||
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
test_util.SetAllFields(message)
|
||||
self.assertEqual(message, parsed_message)
|
||||
|
||||
def testParseOneof(self):
|
||||
m = unittest_pb2.TestAllTypes()
|
||||
m.oneof_uint32 = 11
|
||||
m2 = unittest_pb2.TestAllTypes()
|
||||
text_format.Parse(text_format.MessageToString(m), m2)
|
||||
self.assertEqual('oneof_uint32', m2.WhichOneof('oneof_field'))
|
||||
|
||||
|
||||
class TokenizerTest(basetest.TestCase):
|
||||
|
||||
def testSimpleTokenCases(self):
|
||||
text = ('identifier1:"string1"\n \n\n'
|
||||
'identifier2 : \n \n123 \n identifier3 :\'string\'\n'
|
||||
'identifiER_4 : 1.1e+2 ID5:-0.23 ID6:\'aaaa\\\'bbbb\'\n'
|
||||
'ID7 : "aa\\"bb"\n\n\n\n ID8: {A:inf B:-inf C:true D:false}\n'
|
||||
'ID9: 22 ID10: -111111111111111111 ID11: -22\n'
|
||||
'ID12: 2222222222222222222 ID13: 1.23456f ID14: 1.2e+2f '
|
||||
'false_bool: 0 true_BOOL:t \n true_bool1: 1 false_BOOL1:f ')
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
methods = [(tokenizer.ConsumeIdentifier, 'identifier1'),
|
||||
':',
|
||||
(tokenizer.ConsumeString, 'string1'),
|
||||
(tokenizer.ConsumeIdentifier, 'identifier2'),
|
||||
':',
|
||||
(tokenizer.ConsumeInt32, 123),
|
||||
(tokenizer.ConsumeIdentifier, 'identifier3'),
|
||||
':',
|
||||
(tokenizer.ConsumeString, 'string'),
|
||||
(tokenizer.ConsumeIdentifier, 'identifiER_4'),
|
||||
':',
|
||||
(tokenizer.ConsumeFloat, 1.1e+2),
|
||||
(tokenizer.ConsumeIdentifier, 'ID5'),
|
||||
':',
|
||||
(tokenizer.ConsumeFloat, -0.23),
|
||||
(tokenizer.ConsumeIdentifier, 'ID6'),
|
||||
':',
|
||||
(tokenizer.ConsumeString, 'aaaa\'bbbb'),
|
||||
(tokenizer.ConsumeIdentifier, 'ID7'),
|
||||
':',
|
||||
(tokenizer.ConsumeString, 'aa\"bb'),
|
||||
(tokenizer.ConsumeIdentifier, 'ID8'),
|
||||
':',
|
||||
'{',
|
||||
(tokenizer.ConsumeIdentifier, 'A'),
|
||||
':',
|
||||
(tokenizer.ConsumeFloat, float('inf')),
|
||||
(tokenizer.ConsumeIdentifier, 'B'),
|
||||
':',
|
||||
(tokenizer.ConsumeFloat, -float('inf')),
|
||||
(tokenizer.ConsumeIdentifier, 'C'),
|
||||
':',
|
||||
(tokenizer.ConsumeBool, True),
|
||||
(tokenizer.ConsumeIdentifier, 'D'),
|
||||
':',
|
||||
(tokenizer.ConsumeBool, False),
|
||||
'}',
|
||||
(tokenizer.ConsumeIdentifier, 'ID9'),
|
||||
':',
|
||||
(tokenizer.ConsumeUint32, 22),
|
||||
(tokenizer.ConsumeIdentifier, 'ID10'),
|
||||
':',
|
||||
(tokenizer.ConsumeInt64, -111111111111111111),
|
||||
(tokenizer.ConsumeIdentifier, 'ID11'),
|
||||
':',
|
||||
(tokenizer.ConsumeInt32, -22),
|
||||
(tokenizer.ConsumeIdentifier, 'ID12'),
|
||||
':',
|
||||
(tokenizer.ConsumeUint64, 2222222222222222222),
|
||||
(tokenizer.ConsumeIdentifier, 'ID13'),
|
||||
':',
|
||||
(tokenizer.ConsumeFloat, 1.23456),
|
||||
(tokenizer.ConsumeIdentifier, 'ID14'),
|
||||
':',
|
||||
(tokenizer.ConsumeFloat, 1.2e+2),
|
||||
(tokenizer.ConsumeIdentifier, 'false_bool'),
|
||||
':',
|
||||
(tokenizer.ConsumeBool, False),
|
||||
(tokenizer.ConsumeIdentifier, 'true_BOOL'),
|
||||
':',
|
||||
(tokenizer.ConsumeBool, True),
|
||||
(tokenizer.ConsumeIdentifier, 'true_bool1'),
|
||||
':',
|
||||
(tokenizer.ConsumeBool, True),
|
||||
(tokenizer.ConsumeIdentifier, 'false_BOOL1'),
|
||||
':',
|
||||
(tokenizer.ConsumeBool, False)]
|
||||
|
||||
i = 0
|
||||
while not tokenizer.AtEnd():
|
||||
m = methods[i]
|
||||
if type(m) == str:
|
||||
token = tokenizer.token
|
||||
self.assertEqual(token, m)
|
||||
tokenizer.NextToken()
|
||||
else:
|
||||
self.assertEqual(m[1], m[0]())
|
||||
i += 1
|
||||
|
||||
def testConsumeIntegers(self):
|
||||
# This test only tests the failures in the integer parsing methods as well
|
||||
# as the '0' special cases.
|
||||
int64_max = (1 << 63) - 1
|
||||
uint32_max = (1 << 32) - 1
|
||||
text = '-1 %d %d' % (uint32_max + 1, int64_max + 1)
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32)
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint64)
|
||||
self.assertEqual(-1, tokenizer.ConsumeInt32())
|
||||
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeUint32)
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt32)
|
||||
self.assertEqual(uint32_max + 1, tokenizer.ConsumeInt64())
|
||||
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeInt64)
|
||||
self.assertEqual(int64_max + 1, tokenizer.ConsumeUint64())
|
||||
self.assertTrue(tokenizer.AtEnd())
|
||||
|
||||
text = '-0 -0 0 0'
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
self.assertEqual(0, tokenizer.ConsumeUint32())
|
||||
self.assertEqual(0, tokenizer.ConsumeUint64())
|
||||
self.assertEqual(0, tokenizer.ConsumeUint32())
|
||||
self.assertEqual(0, tokenizer.ConsumeUint64())
|
||||
self.assertTrue(tokenizer.AtEnd())
|
||||
|
||||
def testConsumeByteString(self):
|
||||
text = '"string1\''
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
|
||||
|
||||
text = 'string1"'
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
|
||||
|
||||
text = '\n"\\xt"'
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
|
||||
|
||||
text = '\n"\\"'
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
|
||||
|
||||
text = '\n"\\x"'
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeByteString)
|
||||
|
||||
def testConsumeBool(self):
|
||||
text = 'not-a-bool'
|
||||
tokenizer = text_format._Tokenizer(text.splitlines())
|
||||
self.assertRaises(text_format.ParseError, tokenizer.ConsumeBool)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,328 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#PY25 compatible for GAE.
|
||||
#
|
||||
# Copyright 2008 Google Inc. All Rights Reserved.
|
||||
|
||||
"""Provides type checking routines.
|
||||
|
||||
This module defines type checking utilities in the forms of dictionaries:
|
||||
|
||||
VALUE_CHECKERS: A dictionary of field types and a value validation object.
|
||||
TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
|
||||
function.
|
||||
TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
|
||||
function.
|
||||
FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
|
||||
coresponding wire types.
|
||||
TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
|
||||
function.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import sys ##PY25
|
||||
if sys.version < '2.6': bytes = str ##PY25
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import decoder
|
||||
from google.protobuf.internal import encoder
|
||||
from google.protobuf.internal import wire_format
|
||||
from google.protobuf import descriptor
|
||||
|
||||
_FieldDescriptor = descriptor.FieldDescriptor
|
||||
|
||||
|
||||
def GetTypeChecker(field):
|
||||
"""Returns a type checker for a message field of the specified types.
|
||||
|
||||
Args:
|
||||
field: FieldDescriptor object for this field.
|
||||
|
||||
Returns:
|
||||
An instance of TypeChecker which can be used to verify the types
|
||||
of values assigned to a field of the specified type.
|
||||
"""
|
||||
if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
|
||||
field.type == _FieldDescriptor.TYPE_STRING):
|
||||
return UnicodeValueChecker()
|
||||
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
|
||||
return EnumValueChecker(field.enum_type)
|
||||
return _VALUE_CHECKERS[field.cpp_type]
|
||||
|
||||
|
||||
# None of the typecheckers below make any attempt to guard against people
|
||||
# subclassing builtin types and doing weird things. We're not trying to
|
||||
# protect against malicious clients here, just people accidentally shooting
|
||||
# themselves in the foot in obvious ways.
|
||||
|
||||
class TypeChecker(object):
|
||||
|
||||
"""Type checker used to catch type errors as early as possible
|
||||
when the client is setting scalar fields in protocol messages.
|
||||
"""
|
||||
|
||||
def __init__(self, *acceptable_types):
|
||||
self._acceptable_types = acceptable_types
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
"""Type check the provided value and return it.
|
||||
|
||||
The returned value might have been normalized to another type.
|
||||
"""
|
||||
if not isinstance(proposed_value, self._acceptable_types):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), self._acceptable_types))
|
||||
raise TypeError(message)
|
||||
return proposed_value
|
||||
|
||||
|
||||
# IntValueChecker and its subclasses perform integer type-checks
|
||||
# and bounds-checks.
|
||||
class IntValueChecker(object):
|
||||
|
||||
"""Checker used for integer fields. Performs type-check and range check."""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not isinstance(proposed_value, (int, long)):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (int, long)))
|
||||
raise TypeError(message)
|
||||
if not self._MIN <= proposed_value <= self._MAX:
|
||||
raise ValueError('Value out of range: %d' % proposed_value)
|
||||
# We force 32-bit values to int and 64-bit values to long to make
|
||||
# alternate implementations where the distinction is more significant
|
||||
# (e.g. the C++ implementation) simpler.
|
||||
proposed_value = self._TYPE(proposed_value)
|
||||
return proposed_value
|
||||
|
||||
|
||||
class EnumValueChecker(object):
|
||||
|
||||
"""Checker used for enum fields. Performs type-check and range check."""
|
||||
|
||||
def __init__(self, enum_type):
|
||||
self._enum_type = enum_type
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not isinstance(proposed_value, (int, long)):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (int, long)))
|
||||
raise TypeError(message)
|
||||
if proposed_value not in self._enum_type.values_by_number:
|
||||
raise ValueError('Unknown enum value: %d' % proposed_value)
|
||||
return proposed_value
|
||||
|
||||
|
||||
class UnicodeValueChecker(object):
|
||||
|
||||
"""Checker used for string fields.
|
||||
|
||||
Always returns a unicode value, even if the input is of type str.
|
||||
"""
|
||||
|
||||
def CheckValue(self, proposed_value):
|
||||
if not isinstance(proposed_value, (bytes, unicode)):
|
||||
message = ('%.1024r has type %s, but expected one of: %s' %
|
||||
(proposed_value, type(proposed_value), (bytes, unicode)))
|
||||
raise TypeError(message)
|
||||
|
||||
# If the value is of type 'bytes' make sure that it is in 7-bit ASCII
|
||||
# encoding.
|
||||
if isinstance(proposed_value, bytes):
|
||||
try:
|
||||
proposed_value = proposed_value.decode('ascii')
|
||||
except UnicodeDecodeError:
|
||||
raise ValueError('%.1024r has type bytes, but isn\'t in 7-bit ASCII '
|
||||
'encoding. Non-ASCII strings must be converted to '
|
||||
'unicode objects before being added.' %
|
||||
(proposed_value))
|
||||
return proposed_value
|
||||
|
||||
|
||||
class Int32ValueChecker(IntValueChecker):
|
||||
# We're sure to use ints instead of longs here since comparison may be more
|
||||
# efficient.
|
||||
_MIN = -2147483648
|
||||
_MAX = 2147483647
|
||||
_TYPE = int
|
||||
|
||||
|
||||
class Uint32ValueChecker(IntValueChecker):
|
||||
_MIN = 0
|
||||
_MAX = (1 << 32) - 1
|
||||
_TYPE = int
|
||||
|
||||
|
||||
class Int64ValueChecker(IntValueChecker):
|
||||
_MIN = -(1 << 63)
|
||||
_MAX = (1 << 63) - 1
|
||||
_TYPE = long
|
||||
|
||||
|
||||
class Uint64ValueChecker(IntValueChecker):
|
||||
_MIN = 0
|
||||
_MAX = (1 << 64) - 1
|
||||
_TYPE = long
|
||||
|
||||
|
||||
# Type-checkers for all scalar CPPTYPEs.
|
||||
_VALUE_CHECKERS = {
|
||||
_FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
|
||||
_FieldDescriptor.CPPTYPE_DOUBLE: TypeChecker(
|
||||
float, int, long),
|
||||
_FieldDescriptor.CPPTYPE_FLOAT: TypeChecker(
|
||||
float, int, long),
|
||||
_FieldDescriptor.CPPTYPE_BOOL: TypeChecker(bool, int),
|
||||
_FieldDescriptor.CPPTYPE_STRING: TypeChecker(bytes),
|
||||
}
|
||||
|
||||
|
||||
# Map from field type to a function F, such that F(field_num, value)
|
||||
# gives the total byte size for a value of the given type. This
|
||||
# byte size includes tag information and any other additional space
|
||||
# associated with serializing "value".
|
||||
TYPE_TO_BYTE_SIZE_FN = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
|
||||
_FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
|
||||
_FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
|
||||
_FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
|
||||
_FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
|
||||
_FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
|
||||
_FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
|
||||
_FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
|
||||
_FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
|
||||
_FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
|
||||
_FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
|
||||
_FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
|
||||
_FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
|
||||
_FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
|
||||
_FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
|
||||
_FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
|
||||
_FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
|
||||
_FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
|
||||
}
|
||||
|
||||
|
||||
# Maps from field types to encoder constructors.
|
||||
TYPE_TO_ENCODER = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
|
||||
_FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
|
||||
_FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
|
||||
_FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
|
||||
_FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
|
||||
_FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
|
||||
_FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
|
||||
_FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
|
||||
_FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
|
||||
_FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
|
||||
_FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
|
||||
_FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
|
||||
_FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
|
||||
_FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
|
||||
_FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
|
||||
_FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
|
||||
_FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
|
||||
_FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
|
||||
}
|
||||
|
||||
|
||||
# Maps from field types to sizer constructors.
|
||||
TYPE_TO_SIZER = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
|
||||
_FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
|
||||
_FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
|
||||
_FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
|
||||
_FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
|
||||
_FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
|
||||
_FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
|
||||
_FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
|
||||
_FieldDescriptor.TYPE_STRING: encoder.StringSizer,
|
||||
_FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
|
||||
_FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
|
||||
_FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
|
||||
_FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
|
||||
_FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
|
||||
_FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
|
||||
_FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
|
||||
_FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
|
||||
_FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
|
||||
}
|
||||
|
||||
|
||||
# Maps from field type to a decoder constructor.
|
||||
TYPE_TO_DECODER = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
|
||||
_FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
|
||||
_FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
|
||||
_FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
|
||||
_FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
|
||||
_FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
|
||||
_FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
|
||||
_FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
|
||||
_FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
|
||||
_FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
|
||||
_FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
|
||||
_FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
|
||||
_FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
|
||||
_FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
|
||||
_FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
|
||||
_FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
|
||||
_FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
|
||||
_FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
|
||||
}
|
||||
|
||||
# Maps from field type to expected wiretype.
|
||||
FIELD_TYPE_TO_WIRE_TYPE = {
|
||||
_FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_STRING:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
|
||||
_FieldDescriptor.TYPE_MESSAGE:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_BYTES:
|
||||
wire_format.WIRETYPE_LENGTH_DELIMITED,
|
||||
_FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
|
||||
_FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
|
||||
_FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
|
||||
_FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
|
||||
}
|
||||
@@ -0,0 +1,231 @@
|
||||
#! /usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Test for preservation of unknown fields in the pure Python implementation."""
|
||||
|
||||
__author__ = 'bohdank@google.com (Bohdan Koval)'
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import unittest_mset_pb2
|
||||
from google.protobuf import unittest_pb2
|
||||
from google.protobuf.internal import encoder
|
||||
from google.protobuf.internal import missing_enum_values_pb2
|
||||
from google.protobuf.internal import test_util
|
||||
from google.protobuf.internal import type_checkers
|
||||
|
||||
|
||||
class UnknownFieldsTest(basetest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
|
||||
self.all_fields = unittest_pb2.TestAllTypes()
|
||||
test_util.SetAllFields(self.all_fields)
|
||||
self.all_fields_data = self.all_fields.SerializeToString()
|
||||
self.empty_message = unittest_pb2.TestEmptyMessage()
|
||||
self.empty_message.ParseFromString(self.all_fields_data)
|
||||
self.unknown_fields = self.empty_message._unknown_fields
|
||||
|
||||
def GetField(self, name):
|
||||
field_descriptor = self.descriptor.fields_by_name[name]
|
||||
wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type]
|
||||
field_tag = encoder.TagBytes(field_descriptor.number, wire_type)
|
||||
result_dict = {}
|
||||
for tag_bytes, value in self.unknown_fields:
|
||||
if tag_bytes == field_tag:
|
||||
decoder = unittest_pb2.TestAllTypes._decoders_by_tag[tag_bytes][0]
|
||||
decoder(value, 0, len(value), self.all_fields, result_dict)
|
||||
return result_dict[field_descriptor]
|
||||
|
||||
def testEnum(self):
|
||||
value = self.GetField('optional_nested_enum')
|
||||
self.assertEqual(self.all_fields.optional_nested_enum, value)
|
||||
|
||||
def testRepeatedEnum(self):
|
||||
value = self.GetField('repeated_nested_enum')
|
||||
self.assertEqual(self.all_fields.repeated_nested_enum, value)
|
||||
|
||||
def testVarint(self):
|
||||
value = self.GetField('optional_int32')
|
||||
self.assertEqual(self.all_fields.optional_int32, value)
|
||||
|
||||
def testFixed32(self):
|
||||
value = self.GetField('optional_fixed32')
|
||||
self.assertEqual(self.all_fields.optional_fixed32, value)
|
||||
|
||||
def testFixed64(self):
|
||||
value = self.GetField('optional_fixed64')
|
||||
self.assertEqual(self.all_fields.optional_fixed64, value)
|
||||
|
||||
def testLengthDelimited(self):
|
||||
value = self.GetField('optional_string')
|
||||
self.assertEqual(self.all_fields.optional_string, value)
|
||||
|
||||
def testGroup(self):
|
||||
value = self.GetField('optionalgroup')
|
||||
self.assertEqual(self.all_fields.optionalgroup, value)
|
||||
|
||||
def testSerialize(self):
|
||||
data = self.empty_message.SerializeToString()
|
||||
|
||||
# Don't use assertEqual because we don't want to dump raw binary data to
|
||||
# stdout.
|
||||
self.assertTrue(data == self.all_fields_data)
|
||||
|
||||
def testCopyFrom(self):
|
||||
message = unittest_pb2.TestEmptyMessage()
|
||||
message.CopyFrom(self.empty_message)
|
||||
self.assertEqual(self.unknown_fields, message._unknown_fields)
|
||||
|
||||
def testMergeFrom(self):
|
||||
message = unittest_pb2.TestAllTypes()
|
||||
message.optional_int32 = 1
|
||||
message.optional_uint32 = 2
|
||||
source = unittest_pb2.TestEmptyMessage()
|
||||
source.ParseFromString(message.SerializeToString())
|
||||
|
||||
message.ClearField('optional_int32')
|
||||
message.optional_int64 = 3
|
||||
message.optional_uint32 = 4
|
||||
destination = unittest_pb2.TestEmptyMessage()
|
||||
destination.ParseFromString(message.SerializeToString())
|
||||
unknown_fields = destination._unknown_fields[:]
|
||||
|
||||
destination.MergeFrom(source)
|
||||
self.assertEqual(unknown_fields + source._unknown_fields,
|
||||
destination._unknown_fields)
|
||||
|
||||
def testClear(self):
|
||||
self.empty_message.Clear()
|
||||
self.assertEqual(0, len(self.empty_message._unknown_fields))
|
||||
|
||||
def testByteSize(self):
|
||||
self.assertEqual(self.all_fields.ByteSize(), self.empty_message.ByteSize())
|
||||
|
||||
def testUnknownExtensions(self):
|
||||
message = unittest_pb2.TestEmptyMessageWithExtensions()
|
||||
message.ParseFromString(self.all_fields_data)
|
||||
self.assertEqual(self.empty_message._unknown_fields,
|
||||
message._unknown_fields)
|
||||
|
||||
def testListFields(self):
|
||||
# Make sure ListFields doesn't return unknown fields.
|
||||
self.assertEqual(0, len(self.empty_message.ListFields()))
|
||||
|
||||
def testSerializeMessageSetWireFormatUnknownExtension(self):
|
||||
# Create a message using the message set wire format with an unknown
|
||||
# message.
|
||||
raw = unittest_mset_pb2.RawMessageSet()
|
||||
|
||||
# Add an unknown extension.
|
||||
item = raw.item.add()
|
||||
item.type_id = 1545009
|
||||
message1 = unittest_mset_pb2.TestMessageSetExtension1()
|
||||
message1.i = 12345
|
||||
item.message = message1.SerializeToString()
|
||||
|
||||
serialized = raw.SerializeToString()
|
||||
|
||||
# Parse message using the message set wire format.
|
||||
proto = unittest_mset_pb2.TestMessageSet()
|
||||
proto.MergeFromString(serialized)
|
||||
|
||||
# Verify that the unknown extension is serialized unchanged
|
||||
reserialized = proto.SerializeToString()
|
||||
new_raw = unittest_mset_pb2.RawMessageSet()
|
||||
new_raw.MergeFromString(reserialized)
|
||||
self.assertEqual(raw, new_raw)
|
||||
|
||||
def testEquals(self):
|
||||
message = unittest_pb2.TestEmptyMessage()
|
||||
message.ParseFromString(self.all_fields_data)
|
||||
self.assertEqual(self.empty_message, message)
|
||||
|
||||
self.all_fields.ClearField('optional_string')
|
||||
message.ParseFromString(self.all_fields.SerializeToString())
|
||||
self.assertNotEqual(self.empty_message, message)
|
||||
|
||||
|
||||
class UnknownFieldsTest(basetest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.descriptor = missing_enum_values_pb2.TestEnumValues.DESCRIPTOR
|
||||
|
||||
self.message = missing_enum_values_pb2.TestEnumValues()
|
||||
self.message.optional_nested_enum = (
|
||||
missing_enum_values_pb2.TestEnumValues.ZERO)
|
||||
self.message.repeated_nested_enum.extend([
|
||||
missing_enum_values_pb2.TestEnumValues.ZERO,
|
||||
missing_enum_values_pb2.TestEnumValues.ONE,
|
||||
])
|
||||
self.message.packed_nested_enum.extend([
|
||||
missing_enum_values_pb2.TestEnumValues.ZERO,
|
||||
missing_enum_values_pb2.TestEnumValues.ONE,
|
||||
])
|
||||
self.message_data = self.message.SerializeToString()
|
||||
self.missing_message = missing_enum_values_pb2.TestMissingEnumValues()
|
||||
self.missing_message.ParseFromString(self.message_data)
|
||||
self.unknown_fields = self.missing_message._unknown_fields
|
||||
|
||||
def GetField(self, name):
|
||||
field_descriptor = self.descriptor.fields_by_name[name]
|
||||
wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type]
|
||||
field_tag = encoder.TagBytes(field_descriptor.number, wire_type)
|
||||
result_dict = {}
|
||||
for tag_bytes, value in self.unknown_fields:
|
||||
if tag_bytes == field_tag:
|
||||
decoder = missing_enum_values_pb2.TestEnumValues._decoders_by_tag[
|
||||
tag_bytes][0]
|
||||
decoder(value, 0, len(value), self.message, result_dict)
|
||||
return result_dict[field_descriptor]
|
||||
|
||||
def testUnknownEnumValue(self):
|
||||
self.assertFalse(self.missing_message.HasField('optional_nested_enum'))
|
||||
value = self.GetField('optional_nested_enum')
|
||||
self.assertEqual(self.message.optional_nested_enum, value)
|
||||
|
||||
def testUnknownRepeatedEnumValue(self):
|
||||
value = self.GetField('repeated_nested_enum')
|
||||
self.assertEqual(self.message.repeated_nested_enum, value)
|
||||
|
||||
def testUnknownPackedEnumValue(self):
|
||||
value = self.GetField('packed_nested_enum')
|
||||
self.assertEqual(self.message.packed_nested_enum, value)
|
||||
|
||||
def testRoundTrip(self):
|
||||
new_message = missing_enum_values_pb2.TestEnumValues()
|
||||
new_message.ParseFromString(self.missing_message.SerializeToString())
|
||||
self.assertEqual(self.message, new_message)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,268 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Constants and static functions to support protocol buffer wire format."""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
import struct
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import message
|
||||
|
||||
|
||||
TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
|
||||
TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
|
||||
|
||||
# These numbers identify the wire type of a protocol buffer value.
|
||||
# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
|
||||
# tag-and-type to store one of these WIRETYPE_* constants.
|
||||
# These values must match WireType enum in google/protobuf/wire_format.h.
|
||||
WIRETYPE_VARINT = 0
|
||||
WIRETYPE_FIXED64 = 1
|
||||
WIRETYPE_LENGTH_DELIMITED = 2
|
||||
WIRETYPE_START_GROUP = 3
|
||||
WIRETYPE_END_GROUP = 4
|
||||
WIRETYPE_FIXED32 = 5
|
||||
_WIRETYPE_MAX = 5
|
||||
|
||||
|
||||
# Bounds for various integer types.
|
||||
INT32_MAX = int((1 << 31) - 1)
|
||||
INT32_MIN = int(-(1 << 31))
|
||||
UINT32_MAX = (1 << 32) - 1
|
||||
|
||||
INT64_MAX = (1 << 63) - 1
|
||||
INT64_MIN = -(1 << 63)
|
||||
UINT64_MAX = (1 << 64) - 1
|
||||
|
||||
# "struct" format strings that will encode/decode the specified formats.
|
||||
FORMAT_UINT32_LITTLE_ENDIAN = '<I'
|
||||
FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
|
||||
FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
|
||||
FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
|
||||
|
||||
|
||||
# We'll have to provide alternate implementations of AppendLittleEndian*() on
|
||||
# any architectures where these checks fail.
|
||||
if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
|
||||
raise AssertionError('Format "I" is not a 32-bit number.')
|
||||
if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
|
||||
raise AssertionError('Format "Q" is not a 64-bit number.')
|
||||
|
||||
|
||||
def PackTag(field_number, wire_type):
|
||||
"""Returns an unsigned 32-bit integer that encodes the field number and
|
||||
wire type information in standard protocol message wire format.
|
||||
|
||||
Args:
|
||||
field_number: Expected to be an integer in the range [1, 1 << 29)
|
||||
wire_type: One of the WIRETYPE_* constants.
|
||||
"""
|
||||
if not 0 <= wire_type <= _WIRETYPE_MAX:
|
||||
raise message.EncodeError('Unknown wire type: %d' % wire_type)
|
||||
return (field_number << TAG_TYPE_BITS) | wire_type
|
||||
|
||||
|
||||
def UnpackTag(tag):
|
||||
"""The inverse of PackTag(). Given an unsigned 32-bit number,
|
||||
returns a (field_number, wire_type) tuple.
|
||||
"""
|
||||
return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
|
||||
|
||||
|
||||
def ZigZagEncode(value):
|
||||
"""ZigZag Transform: Encodes signed integers so that they can be
|
||||
effectively used with varint encoding. See wire_format.h for
|
||||
more details.
|
||||
"""
|
||||
if value >= 0:
|
||||
return value << 1
|
||||
return (value << 1) ^ (~0)
|
||||
|
||||
|
||||
def ZigZagDecode(value):
|
||||
"""Inverse of ZigZagEncode()."""
|
||||
if not value & 0x1:
|
||||
return value >> 1
|
||||
return (value >> 1) ^ (~0)
|
||||
|
||||
|
||||
|
||||
# The *ByteSize() functions below return the number of bytes required to
|
||||
# serialize "field number + type" information and then serialize the value.
|
||||
|
||||
|
||||
def Int32ByteSize(field_number, int32):
|
||||
return Int64ByteSize(field_number, int32)
|
||||
|
||||
|
||||
def Int32ByteSizeNoTag(int32):
|
||||
return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
|
||||
|
||||
|
||||
def Int64ByteSize(field_number, int64):
|
||||
# Have to convert to uint before calling UInt64ByteSize().
|
||||
return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
|
||||
|
||||
|
||||
def UInt32ByteSize(field_number, uint32):
|
||||
return UInt64ByteSize(field_number, uint32)
|
||||
|
||||
|
||||
def UInt64ByteSize(field_number, uint64):
|
||||
return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
|
||||
|
||||
|
||||
def SInt32ByteSize(field_number, int32):
|
||||
return UInt32ByteSize(field_number, ZigZagEncode(int32))
|
||||
|
||||
|
||||
def SInt64ByteSize(field_number, int64):
|
||||
return UInt64ByteSize(field_number, ZigZagEncode(int64))
|
||||
|
||||
|
||||
def Fixed32ByteSize(field_number, fixed32):
|
||||
return TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def Fixed64ByteSize(field_number, fixed64):
|
||||
return TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def SFixed32ByteSize(field_number, sfixed32):
|
||||
return TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def SFixed64ByteSize(field_number, sfixed64):
|
||||
return TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def FloatByteSize(field_number, flt):
|
||||
return TagByteSize(field_number) + 4
|
||||
|
||||
|
||||
def DoubleByteSize(field_number, double):
|
||||
return TagByteSize(field_number) + 8
|
||||
|
||||
|
||||
def BoolByteSize(field_number, b):
|
||||
return TagByteSize(field_number) + 1
|
||||
|
||||
|
||||
def EnumByteSize(field_number, enum):
|
||||
return UInt32ByteSize(field_number, enum)
|
||||
|
||||
|
||||
def StringByteSize(field_number, string):
|
||||
return BytesByteSize(field_number, string.encode('utf-8'))
|
||||
|
||||
|
||||
def BytesByteSize(field_number, b):
|
||||
return (TagByteSize(field_number)
|
||||
+ _VarUInt64ByteSizeNoTag(len(b))
|
||||
+ len(b))
|
||||
|
||||
|
||||
def GroupByteSize(field_number, message):
|
||||
return (2 * TagByteSize(field_number) # START and END group.
|
||||
+ message.ByteSize())
|
||||
|
||||
|
||||
def MessageByteSize(field_number, message):
|
||||
return (TagByteSize(field_number)
|
||||
+ _VarUInt64ByteSizeNoTag(message.ByteSize())
|
||||
+ message.ByteSize())
|
||||
|
||||
|
||||
def MessageSetItemByteSize(field_number, msg):
|
||||
# First compute the sizes of the tags.
|
||||
# There are 2 tags for the beginning and ending of the repeated group, that
|
||||
# is field number 1, one with field number 2 (type_id) and one with field
|
||||
# number 3 (message).
|
||||
total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
|
||||
|
||||
# Add the number of bytes for type_id.
|
||||
total_size += _VarUInt64ByteSizeNoTag(field_number)
|
||||
|
||||
message_size = msg.ByteSize()
|
||||
|
||||
# The number of bytes for encoding the length of the message.
|
||||
total_size += _VarUInt64ByteSizeNoTag(message_size)
|
||||
|
||||
# The size of the message.
|
||||
total_size += message_size
|
||||
return total_size
|
||||
|
||||
|
||||
def TagByteSize(field_number):
|
||||
"""Returns the bytes required to serialize a tag with this field number."""
|
||||
# Just pass in type 0, since the type won't affect the tag+type size.
|
||||
return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
|
||||
|
||||
|
||||
# Private helper function for the *ByteSize() functions above.
|
||||
|
||||
def _VarUInt64ByteSizeNoTag(uint64):
|
||||
"""Returns the number of bytes required to serialize a single varint
|
||||
using boundary value comparisons. (unrolled loop optimization -WPierce)
|
||||
uint64 must be unsigned.
|
||||
"""
|
||||
if uint64 <= 0x7f: return 1
|
||||
if uint64 <= 0x3fff: return 2
|
||||
if uint64 <= 0x1fffff: return 3
|
||||
if uint64 <= 0xfffffff: return 4
|
||||
if uint64 <= 0x7ffffffff: return 5
|
||||
if uint64 <= 0x3ffffffffff: return 6
|
||||
if uint64 <= 0x1ffffffffffff: return 7
|
||||
if uint64 <= 0xffffffffffffff: return 8
|
||||
if uint64 <= 0x7fffffffffffffff: return 9
|
||||
if uint64 > UINT64_MAX:
|
||||
raise message.EncodeError('Value out of range: %d' % uint64)
|
||||
return 10
|
||||
|
||||
|
||||
NON_PACKABLE_TYPES = (
|
||||
descriptor.FieldDescriptor.TYPE_STRING,
|
||||
descriptor.FieldDescriptor.TYPE_GROUP,
|
||||
descriptor.FieldDescriptor.TYPE_MESSAGE,
|
||||
descriptor.FieldDescriptor.TYPE_BYTES
|
||||
)
|
||||
|
||||
|
||||
def IsTypePackable(field_type):
|
||||
"""Return true iff packable = true is valid for fields of this type.
|
||||
|
||||
Args:
|
||||
field_type: a FieldDescriptor::Type value.
|
||||
|
||||
Returns:
|
||||
True iff fields of this type are packable.
|
||||
"""
|
||||
return field_type not in NON_PACKABLE_TYPES
|
||||
@@ -0,0 +1,253 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Test for google.protobuf.internal.wire_format."""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf import message
|
||||
from google.protobuf.internal import wire_format
|
||||
|
||||
|
||||
class WireFormatTest(basetest.TestCase):
|
||||
|
||||
def testPackTag(self):
|
||||
field_number = 0xabc
|
||||
tag_type = 2
|
||||
self.assertEqual((field_number << 3) | tag_type,
|
||||
wire_format.PackTag(field_number, tag_type))
|
||||
PackTag = wire_format.PackTag
|
||||
# Number too high.
|
||||
self.assertRaises(message.EncodeError, PackTag, field_number, 6)
|
||||
# Number too low.
|
||||
self.assertRaises(message.EncodeError, PackTag, field_number, -1)
|
||||
|
||||
def testUnpackTag(self):
|
||||
# Test field numbers that will require various varint sizes.
|
||||
for expected_field_number in (1, 15, 16, 2047, 2048):
|
||||
for expected_wire_type in range(6): # Highest-numbered wiretype is 5.
|
||||
field_number, wire_type = wire_format.UnpackTag(
|
||||
wire_format.PackTag(expected_field_number, expected_wire_type))
|
||||
self.assertEqual(expected_field_number, field_number)
|
||||
self.assertEqual(expected_wire_type, wire_type)
|
||||
|
||||
self.assertRaises(TypeError, wire_format.UnpackTag, None)
|
||||
self.assertRaises(TypeError, wire_format.UnpackTag, 'abc')
|
||||
self.assertRaises(TypeError, wire_format.UnpackTag, 0.0)
|
||||
self.assertRaises(TypeError, wire_format.UnpackTag, object())
|
||||
|
||||
def testZigZagEncode(self):
|
||||
Z = wire_format.ZigZagEncode
|
||||
self.assertEqual(0, Z(0))
|
||||
self.assertEqual(1, Z(-1))
|
||||
self.assertEqual(2, Z(1))
|
||||
self.assertEqual(3, Z(-2))
|
||||
self.assertEqual(4, Z(2))
|
||||
self.assertEqual(0xfffffffe, Z(0x7fffffff))
|
||||
self.assertEqual(0xffffffff, Z(-0x80000000))
|
||||
self.assertEqual(0xfffffffffffffffe, Z(0x7fffffffffffffff))
|
||||
self.assertEqual(0xffffffffffffffff, Z(-0x8000000000000000))
|
||||
|
||||
self.assertRaises(TypeError, Z, None)
|
||||
self.assertRaises(TypeError, Z, 'abcd')
|
||||
self.assertRaises(TypeError, Z, 0.0)
|
||||
self.assertRaises(TypeError, Z, object())
|
||||
|
||||
def testZigZagDecode(self):
|
||||
Z = wire_format.ZigZagDecode
|
||||
self.assertEqual(0, Z(0))
|
||||
self.assertEqual(-1, Z(1))
|
||||
self.assertEqual(1, Z(2))
|
||||
self.assertEqual(-2, Z(3))
|
||||
self.assertEqual(2, Z(4))
|
||||
self.assertEqual(0x7fffffff, Z(0xfffffffe))
|
||||
self.assertEqual(-0x80000000, Z(0xffffffff))
|
||||
self.assertEqual(0x7fffffffffffffff, Z(0xfffffffffffffffe))
|
||||
self.assertEqual(-0x8000000000000000, Z(0xffffffffffffffff))
|
||||
|
||||
self.assertRaises(TypeError, Z, None)
|
||||
self.assertRaises(TypeError, Z, 'abcd')
|
||||
self.assertRaises(TypeError, Z, 0.0)
|
||||
self.assertRaises(TypeError, Z, object())
|
||||
|
||||
def NumericByteSizeTestHelper(self, byte_size_fn, value, expected_value_size):
|
||||
# Use field numbers that cause various byte sizes for the tag information.
|
||||
for field_number, tag_bytes in ((15, 1), (16, 2), (2047, 2), (2048, 3)):
|
||||
expected_size = expected_value_size + tag_bytes
|
||||
actual_size = byte_size_fn(field_number, value)
|
||||
self.assertEqual(expected_size, actual_size,
|
||||
'byte_size_fn: %s, field_number: %d, value: %r\n'
|
||||
'Expected: %d, Actual: %d'% (
|
||||
byte_size_fn, field_number, value, expected_size, actual_size))
|
||||
|
||||
def testByteSizeFunctions(self):
|
||||
# Test all numeric *ByteSize() functions.
|
||||
NUMERIC_ARGS = [
|
||||
# Int32ByteSize().
|
||||
[wire_format.Int32ByteSize, 0, 1],
|
||||
[wire_format.Int32ByteSize, 127, 1],
|
||||
[wire_format.Int32ByteSize, 128, 2],
|
||||
[wire_format.Int32ByteSize, -1, 10],
|
||||
# Int64ByteSize().
|
||||
[wire_format.Int64ByteSize, 0, 1],
|
||||
[wire_format.Int64ByteSize, 127, 1],
|
||||
[wire_format.Int64ByteSize, 128, 2],
|
||||
[wire_format.Int64ByteSize, -1, 10],
|
||||
# UInt32ByteSize().
|
||||
[wire_format.UInt32ByteSize, 0, 1],
|
||||
[wire_format.UInt32ByteSize, 127, 1],
|
||||
[wire_format.UInt32ByteSize, 128, 2],
|
||||
[wire_format.UInt32ByteSize, wire_format.UINT32_MAX, 5],
|
||||
# UInt64ByteSize().
|
||||
[wire_format.UInt64ByteSize, 0, 1],
|
||||
[wire_format.UInt64ByteSize, 127, 1],
|
||||
[wire_format.UInt64ByteSize, 128, 2],
|
||||
[wire_format.UInt64ByteSize, wire_format.UINT64_MAX, 10],
|
||||
# SInt32ByteSize().
|
||||
[wire_format.SInt32ByteSize, 0, 1],
|
||||
[wire_format.SInt32ByteSize, -1, 1],
|
||||
[wire_format.SInt32ByteSize, 1, 1],
|
||||
[wire_format.SInt32ByteSize, -63, 1],
|
||||
[wire_format.SInt32ByteSize, 63, 1],
|
||||
[wire_format.SInt32ByteSize, -64, 1],
|
||||
[wire_format.SInt32ByteSize, 64, 2],
|
||||
# SInt64ByteSize().
|
||||
[wire_format.SInt64ByteSize, 0, 1],
|
||||
[wire_format.SInt64ByteSize, -1, 1],
|
||||
[wire_format.SInt64ByteSize, 1, 1],
|
||||
[wire_format.SInt64ByteSize, -63, 1],
|
||||
[wire_format.SInt64ByteSize, 63, 1],
|
||||
[wire_format.SInt64ByteSize, -64, 1],
|
||||
[wire_format.SInt64ByteSize, 64, 2],
|
||||
# Fixed32ByteSize().
|
||||
[wire_format.Fixed32ByteSize, 0, 4],
|
||||
[wire_format.Fixed32ByteSize, wire_format.UINT32_MAX, 4],
|
||||
# Fixed64ByteSize().
|
||||
[wire_format.Fixed64ByteSize, 0, 8],
|
||||
[wire_format.Fixed64ByteSize, wire_format.UINT64_MAX, 8],
|
||||
# SFixed32ByteSize().
|
||||
[wire_format.SFixed32ByteSize, 0, 4],
|
||||
[wire_format.SFixed32ByteSize, wire_format.INT32_MIN, 4],
|
||||
[wire_format.SFixed32ByteSize, wire_format.INT32_MAX, 4],
|
||||
# SFixed64ByteSize().
|
||||
[wire_format.SFixed64ByteSize, 0, 8],
|
||||
[wire_format.SFixed64ByteSize, wire_format.INT64_MIN, 8],
|
||||
[wire_format.SFixed64ByteSize, wire_format.INT64_MAX, 8],
|
||||
# FloatByteSize().
|
||||
[wire_format.FloatByteSize, 0.0, 4],
|
||||
[wire_format.FloatByteSize, 1000000000.0, 4],
|
||||
[wire_format.FloatByteSize, -1000000000.0, 4],
|
||||
# DoubleByteSize().
|
||||
[wire_format.DoubleByteSize, 0.0, 8],
|
||||
[wire_format.DoubleByteSize, 1000000000.0, 8],
|
||||
[wire_format.DoubleByteSize, -1000000000.0, 8],
|
||||
# BoolByteSize().
|
||||
[wire_format.BoolByteSize, False, 1],
|
||||
[wire_format.BoolByteSize, True, 1],
|
||||
# EnumByteSize().
|
||||
[wire_format.EnumByteSize, 0, 1],
|
||||
[wire_format.EnumByteSize, 127, 1],
|
||||
[wire_format.EnumByteSize, 128, 2],
|
||||
[wire_format.EnumByteSize, wire_format.UINT32_MAX, 5],
|
||||
]
|
||||
for args in NUMERIC_ARGS:
|
||||
self.NumericByteSizeTestHelper(*args)
|
||||
|
||||
# Test strings and bytes.
|
||||
for byte_size_fn in (wire_format.StringByteSize, wire_format.BytesByteSize):
|
||||
# 1 byte for tag, 1 byte for length, 3 bytes for contents.
|
||||
self.assertEqual(5, byte_size_fn(10, 'abc'))
|
||||
# 2 bytes for tag, 1 byte for length, 3 bytes for contents.
|
||||
self.assertEqual(6, byte_size_fn(16, 'abc'))
|
||||
# 2 bytes for tag, 2 bytes for length, 128 bytes for contents.
|
||||
self.assertEqual(132, byte_size_fn(16, 'a' * 128))
|
||||
|
||||
# Test UTF-8 string byte size calculation.
|
||||
# 1 byte for tag, 1 byte for length, 8 bytes for content.
|
||||
self.assertEqual(10, wire_format.StringByteSize(
|
||||
5, b'\xd0\xa2\xd0\xb5\xd1\x81\xd1\x82'.decode('utf-8')))
|
||||
|
||||
class MockMessage(object):
|
||||
def __init__(self, byte_size):
|
||||
self.byte_size = byte_size
|
||||
def ByteSize(self):
|
||||
return self.byte_size
|
||||
|
||||
message_byte_size = 10
|
||||
mock_message = MockMessage(byte_size=message_byte_size)
|
||||
# Test groups.
|
||||
# (2 * 1) bytes for begin and end tags, plus message_byte_size.
|
||||
self.assertEqual(2 + message_byte_size,
|
||||
wire_format.GroupByteSize(1, mock_message))
|
||||
# (2 * 2) bytes for begin and end tags, plus message_byte_size.
|
||||
self.assertEqual(4 + message_byte_size,
|
||||
wire_format.GroupByteSize(16, mock_message))
|
||||
|
||||
# Test messages.
|
||||
# 1 byte for tag, plus 1 byte for length, plus contents.
|
||||
self.assertEqual(2 + mock_message.byte_size,
|
||||
wire_format.MessageByteSize(1, mock_message))
|
||||
# 2 bytes for tag, plus 1 byte for length, plus contents.
|
||||
self.assertEqual(3 + mock_message.byte_size,
|
||||
wire_format.MessageByteSize(16, mock_message))
|
||||
# 2 bytes for tag, plus 2 bytes for length, plus contents.
|
||||
mock_message.byte_size = 128
|
||||
self.assertEqual(4 + mock_message.byte_size,
|
||||
wire_format.MessageByteSize(16, mock_message))
|
||||
|
||||
|
||||
# Test message set item byte size.
|
||||
# 4 bytes for tags, plus 1 byte for length, plus 1 byte for type_id,
|
||||
# plus contents.
|
||||
mock_message.byte_size = 10
|
||||
self.assertEqual(mock_message.byte_size + 6,
|
||||
wire_format.MessageSetItemByteSize(1, mock_message))
|
||||
|
||||
# 4 bytes for tags, plus 2 bytes for length, plus 1 byte for type_id,
|
||||
# plus contents.
|
||||
mock_message.byte_size = 128
|
||||
self.assertEqual(mock_message.byte_size + 7,
|
||||
wire_format.MessageSetItemByteSize(1, mock_message))
|
||||
|
||||
# 4 bytes for tags, plus 2 bytes for length, plus 2 byte for type_id,
|
||||
# plus contents.
|
||||
self.assertEqual(mock_message.byte_size + 8,
|
||||
wire_format.MessageSetItemByteSize(128, mock_message))
|
||||
|
||||
# Too-long varint.
|
||||
self.assertRaises(message.EncodeError,
|
||||
wire_format.UInt64ByteSize, 1, 1 << 128)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
284
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/message.py
Normal file
284
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/message.py
Normal file
@@ -0,0 +1,284 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# TODO(robinson): We should just make these methods all "pure-virtual" and move
|
||||
# all implementation out, into reflection.py for now.
|
||||
|
||||
|
||||
"""Contains an abstract base class for protocol messages."""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
|
||||
class Error(Exception): pass
|
||||
class DecodeError(Error): pass
|
||||
class EncodeError(Error): pass
|
||||
|
||||
|
||||
class Message(object):
|
||||
|
||||
"""Abstract base class for protocol messages.
|
||||
|
||||
Protocol message classes are almost always generated by the protocol
|
||||
compiler. These generated types subclass Message and implement the methods
|
||||
shown below.
|
||||
|
||||
TODO(robinson): Link to an HTML document here.
|
||||
|
||||
TODO(robinson): Document that instances of this class will also
|
||||
have an Extensions attribute with __getitem__ and __setitem__.
|
||||
Again, not sure how to best convey this.
|
||||
|
||||
TODO(robinson): Document that the class must also have a static
|
||||
RegisterExtension(extension_field) method.
|
||||
Not sure how to best express at this point.
|
||||
"""
|
||||
|
||||
# TODO(robinson): Document these fields and methods.
|
||||
|
||||
__slots__ = []
|
||||
|
||||
DESCRIPTOR = None
|
||||
|
||||
def __deepcopy__(self, memo=None):
|
||||
clone = type(self)()
|
||||
clone.MergeFrom(self)
|
||||
return clone
|
||||
|
||||
def __eq__(self, other_msg):
|
||||
"""Recursively compares two messages by value and structure."""
|
||||
raise NotImplementedError
|
||||
|
||||
def __ne__(self, other_msg):
|
||||
# Can't just say self != other_msg, since that would infinitely recurse. :)
|
||||
return not self == other_msg
|
||||
|
||||
def __hash__(self):
|
||||
raise TypeError('unhashable object')
|
||||
|
||||
def __str__(self):
|
||||
"""Outputs a human-readable representation of the message."""
|
||||
raise NotImplementedError
|
||||
|
||||
def __unicode__(self):
|
||||
"""Outputs a human-readable representation of the message."""
|
||||
raise NotImplementedError
|
||||
|
||||
def MergeFrom(self, other_msg):
|
||||
"""Merges the contents of the specified message into current message.
|
||||
|
||||
This method merges the contents of the specified message into the current
|
||||
message. Singular fields that are set in the specified message overwrite
|
||||
the corresponding fields in the current message. Repeated fields are
|
||||
appended. Singular sub-messages and groups are recursively merged.
|
||||
|
||||
Args:
|
||||
other_msg: Message to merge into the current message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def CopyFrom(self, other_msg):
|
||||
"""Copies the content of the specified message into the current message.
|
||||
|
||||
The method clears the current message and then merges the specified
|
||||
message using MergeFrom.
|
||||
|
||||
Args:
|
||||
other_msg: Message to copy into the current one.
|
||||
"""
|
||||
if self is other_msg:
|
||||
return
|
||||
self.Clear()
|
||||
self.MergeFrom(other_msg)
|
||||
|
||||
def Clear(self):
|
||||
"""Clears all data that was set in the message."""
|
||||
raise NotImplementedError
|
||||
|
||||
def SetInParent(self):
|
||||
"""Mark this as present in the parent.
|
||||
|
||||
This normally happens automatically when you assign a field of a
|
||||
sub-message, but sometimes you want to make the sub-message
|
||||
present while keeping it empty. If you find yourself using this,
|
||||
you may want to reconsider your design."""
|
||||
raise NotImplementedError
|
||||
|
||||
def IsInitialized(self):
|
||||
"""Checks if the message is initialized.
|
||||
|
||||
Returns:
|
||||
The method returns True if the message is initialized (i.e. all of its
|
||||
required fields are set).
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# TODO(robinson): MergeFromString() should probably return None and be
|
||||
# implemented in terms of a helper that returns the # of bytes read. Our
|
||||
# deserialization routines would use the helper when recursively
|
||||
# deserializing, but the end user would almost always just want the no-return
|
||||
# MergeFromString().
|
||||
|
||||
def MergeFromString(self, serialized):
|
||||
"""Merges serialized protocol buffer data into this message.
|
||||
|
||||
When we find a field in |serialized| that is already present
|
||||
in this message:
|
||||
- If it's a "repeated" field, we append to the end of our list.
|
||||
- Else, if it's a scalar, we overwrite our field.
|
||||
- Else, (it's a nonrepeated composite), we recursively merge
|
||||
into the existing composite.
|
||||
|
||||
TODO(robinson): Document handling of unknown fields.
|
||||
|
||||
Args:
|
||||
serialized: Any object that allows us to call buffer(serialized)
|
||||
to access a string of bytes using the buffer interface.
|
||||
|
||||
TODO(robinson): When we switch to a helper, this will return None.
|
||||
|
||||
Returns:
|
||||
The number of bytes read from |serialized|.
|
||||
For non-group messages, this will always be len(serialized),
|
||||
but for messages which are actually groups, this will
|
||||
generally be less than len(serialized), since we must
|
||||
stop when we reach an END_GROUP tag. Note that if
|
||||
we *do* stop because of an END_GROUP tag, the number
|
||||
of bytes returned does not include the bytes
|
||||
for the END_GROUP tag information.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ParseFromString(self, serialized):
|
||||
"""Parse serialized protocol buffer data into this message.
|
||||
|
||||
Like MergeFromString(), except we clear the object first and
|
||||
do not return the value that MergeFromString returns.
|
||||
"""
|
||||
self.Clear()
|
||||
self.MergeFromString(serialized)
|
||||
|
||||
def SerializeToString(self):
|
||||
"""Serializes the protocol message to a binary string.
|
||||
|
||||
Returns:
|
||||
A binary string representation of the message if all of the required
|
||||
fields in the message are set (i.e. the message is initialized).
|
||||
|
||||
Raises:
|
||||
message.EncodeError if the message isn't initialized.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def SerializePartialToString(self):
|
||||
"""Serializes the protocol message to a binary string.
|
||||
|
||||
This method is similar to SerializeToString but doesn't check if the
|
||||
message is initialized.
|
||||
|
||||
Returns:
|
||||
A string representation of the partial message.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# TODO(robinson): Decide whether we like these better
|
||||
# than auto-generated has_foo() and clear_foo() methods
|
||||
# on the instances themselves. This way is less consistent
|
||||
# with C++, but it makes reflection-type access easier and
|
||||
# reduces the number of magically autogenerated things.
|
||||
#
|
||||
# TODO(robinson): Be sure to document (and test) exactly
|
||||
# which field names are accepted here. Are we case-sensitive?
|
||||
# What do we do with fields that share names with Python keywords
|
||||
# like 'lambda' and 'yield'?
|
||||
#
|
||||
# nnorwitz says:
|
||||
# """
|
||||
# Typically (in python), an underscore is appended to names that are
|
||||
# keywords. So they would become lambda_ or yield_.
|
||||
# """
|
||||
def ListFields(self):
|
||||
"""Returns a list of (FieldDescriptor, value) tuples for all
|
||||
fields in the message which are not empty. A singular field is non-empty
|
||||
if HasField() would return true, and a repeated field is non-empty if
|
||||
it contains at least one element. The fields are ordered by field
|
||||
number"""
|
||||
raise NotImplementedError
|
||||
|
||||
def HasField(self, field_name):
|
||||
"""Checks if a certain field is set for the message. Note if the
|
||||
field_name is not defined in the message descriptor, ValueError will be
|
||||
raised."""
|
||||
raise NotImplementedError
|
||||
|
||||
def ClearField(self, field_name):
|
||||
raise NotImplementedError
|
||||
|
||||
def HasExtension(self, extension_handle):
|
||||
raise NotImplementedError
|
||||
|
||||
def ClearExtension(self, extension_handle):
|
||||
raise NotImplementedError
|
||||
|
||||
def ByteSize(self):
|
||||
"""Returns the serialized size of this message.
|
||||
Recursively calls ByteSize() on all contained messages.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def _SetListener(self, message_listener):
|
||||
"""Internal method used by the protocol message implementation.
|
||||
Clients should not call this directly.
|
||||
|
||||
Sets a listener that this message will call on certain state transitions.
|
||||
|
||||
The purpose of this method is to register back-edges from children to
|
||||
parents at runtime, for the purpose of setting "has" bits and
|
||||
byte-size-dirty bits in the parent and ancestor objects whenever a child or
|
||||
descendant object is modified.
|
||||
|
||||
If the client wants to disconnect this Message from the object tree, she
|
||||
explicitly sets callback to None.
|
||||
|
||||
If message_listener is None, unregisters any existing listener. Otherwise,
|
||||
message_listener must implement the MessageListener interface in
|
||||
internal/message_listener.py, and we discard any listener registered
|
||||
via a previous _SetListener() call.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def __getstate__(self):
|
||||
"""Support the pickle protocol."""
|
||||
return dict(serialized=self.SerializePartialToString())
|
||||
|
||||
def __setstate__(self, state):
|
||||
"""Support the pickle protocol."""
|
||||
self.__init__()
|
||||
self.ParseFromString(state['serialized'])
|
||||
@@ -0,0 +1,155 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#PY25 compatible for GAE.
|
||||
#
|
||||
# Copyright 2012 Google Inc. All Rights Reserved.
|
||||
|
||||
"""Provides a factory class for generating dynamic messages.
|
||||
|
||||
The easiest way to use this class is if you have access to the FileDescriptor
|
||||
protos containing the messages you want to create you can just do the following:
|
||||
|
||||
message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
|
||||
my_proto_instance = message_classes['some.proto.package.MessageName']()
|
||||
"""
|
||||
|
||||
__author__ = 'matthewtoia@google.com (Matt Toia)'
|
||||
|
||||
import sys ##PY25
|
||||
from google.protobuf import descriptor_database
|
||||
from google.protobuf import descriptor_pool
|
||||
from google.protobuf import message
|
||||
from google.protobuf import reflection
|
||||
|
||||
|
||||
class MessageFactory(object):
|
||||
"""Factory for creating Proto2 messages from descriptors in a pool."""
|
||||
|
||||
def __init__(self, pool=None):
|
||||
"""Initializes a new factory."""
|
||||
self.pool = (pool or descriptor_pool.DescriptorPool(
|
||||
descriptor_database.DescriptorDatabase()))
|
||||
|
||||
# local cache of all classes built from protobuf descriptors
|
||||
self._classes = {}
|
||||
|
||||
def GetPrototype(self, descriptor):
|
||||
"""Builds a proto2 message class based on the passed in descriptor.
|
||||
|
||||
Passing a descriptor with a fully qualified name matching a previous
|
||||
invocation will cause the same class to be returned.
|
||||
|
||||
Args:
|
||||
descriptor: The descriptor to build from.
|
||||
|
||||
Returns:
|
||||
A class describing the passed in descriptor.
|
||||
"""
|
||||
if descriptor.full_name not in self._classes:
|
||||
descriptor_name = descriptor.name
|
||||
if sys.version_info[0] < 3: ##PY25
|
||||
##!PY25 if str is bytes: # PY2
|
||||
descriptor_name = descriptor.name.encode('ascii', 'ignore')
|
||||
result_class = reflection.GeneratedProtocolMessageType(
|
||||
descriptor_name,
|
||||
(message.Message,),
|
||||
{'DESCRIPTOR': descriptor, '__module__': None})
|
||||
# If module not set, it wrongly points to the reflection.py module.
|
||||
self._classes[descriptor.full_name] = result_class
|
||||
for field in descriptor.fields:
|
||||
if field.message_type:
|
||||
self.GetPrototype(field.message_type)
|
||||
for extension in result_class.DESCRIPTOR.extensions:
|
||||
if extension.containing_type.full_name not in self._classes:
|
||||
self.GetPrototype(extension.containing_type)
|
||||
extended_class = self._classes[extension.containing_type.full_name]
|
||||
extended_class.RegisterExtension(extension)
|
||||
return self._classes[descriptor.full_name]
|
||||
|
||||
def GetMessages(self, files):
|
||||
"""Gets all the messages from a specified file.
|
||||
|
||||
This will find and resolve dependencies, failing if the descriptor
|
||||
pool cannot satisfy them.
|
||||
|
||||
Args:
|
||||
files: The file names to extract messages from.
|
||||
|
||||
Returns:
|
||||
A dictionary mapping proto names to the message classes. This will include
|
||||
any dependent messages as well as any messages defined in the same file as
|
||||
a specified message.
|
||||
"""
|
||||
result = {}
|
||||
for file_name in files:
|
||||
file_desc = self.pool.FindFileByName(file_name)
|
||||
for name, msg in file_desc.message_types_by_name.iteritems():
|
||||
if file_desc.package:
|
||||
full_name = '.'.join([file_desc.package, name])
|
||||
else:
|
||||
full_name = msg.name
|
||||
result[full_name] = self.GetPrototype(
|
||||
self.pool.FindMessageTypeByName(full_name))
|
||||
|
||||
# While the extension FieldDescriptors are created by the descriptor pool,
|
||||
# the python classes created in the factory need them to be registered
|
||||
# explicitly, which is done below.
|
||||
#
|
||||
# The call to RegisterExtension will specifically check if the
|
||||
# extension was already registered on the object and either
|
||||
# ignore the registration if the original was the same, or raise
|
||||
# an error if they were different.
|
||||
|
||||
for name, extension in file_desc.extensions_by_name.iteritems():
|
||||
if extension.containing_type.full_name not in self._classes:
|
||||
self.GetPrototype(extension.containing_type)
|
||||
extended_class = self._classes[extension.containing_type.full_name]
|
||||
extended_class.RegisterExtension(extension)
|
||||
return result
|
||||
|
||||
|
||||
_FACTORY = MessageFactory()
|
||||
|
||||
|
||||
def GetMessages(file_protos):
|
||||
"""Builds a dictionary of all the messages available in a set of files.
|
||||
|
||||
Args:
|
||||
file_protos: A sequence of file protos to build messages out of.
|
||||
|
||||
Returns:
|
||||
A dictionary mapping proto names to the message classes. This will include
|
||||
any dependent messages as well as any messages defined in the same file as
|
||||
a specified message.
|
||||
"""
|
||||
for file_proto in file_protos:
|
||||
_FACTORY.pool.Add(file_proto)
|
||||
return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])
|
||||
@@ -0,0 +1,6 @@
|
||||
This is the 'v2' C++ implementation for python proto2.
|
||||
|
||||
It is active when:
|
||||
|
||||
PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION=cpp
|
||||
PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION=2
|
||||
@@ -0,0 +1,61 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Protocol message implementation hooks for C++ implementation.
|
||||
|
||||
Contains helper functions used to create protocol message classes from
|
||||
Descriptor objects at runtime backed by the protocol buffer C++ API.
|
||||
"""
|
||||
|
||||
__author__ = 'tibell@google.com (Johan Tibell)'
|
||||
|
||||
from google.protobuf.pyext import _message
|
||||
from google.protobuf import message
|
||||
|
||||
|
||||
def NewMessage(bases, message_descriptor, dictionary):
|
||||
"""Creates a new protocol message *class*."""
|
||||
new_bases = []
|
||||
for base in bases:
|
||||
if base is message.Message:
|
||||
# _message.Message must come before message.Message as it
|
||||
# overrides methods in that class.
|
||||
new_bases.append(_message.Message)
|
||||
new_bases.append(base)
|
||||
return tuple(new_bases)
|
||||
|
||||
|
||||
def InitMessage(message_descriptor, cls):
|
||||
"""Constructs a new message instance (called before instance's __init__)."""
|
||||
|
||||
def SubInit(self, **kwargs):
|
||||
super(cls, self).__init__(message_descriptor, **kwargs)
|
||||
cls.__init__ = SubInit
|
||||
cls.AddDescriptors(message_descriptor)
|
||||
@@ -0,0 +1,357 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: petar@google.com (Petar Petrov)
|
||||
|
||||
#include <Python.h>
|
||||
#include <string>
|
||||
|
||||
#include <google/protobuf/descriptor.pb.h>
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
|
||||
|
||||
#define C(str) const_cast<char*>(str)
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define PyString_FromStringAndSize PyUnicode_FromStringAndSize
|
||||
#define PyInt_FromLong PyLong_FromLong
|
||||
#if PY_VERSION_HEX < 0x03030000
|
||||
#error "Python 3.0 - 3.2 are not supported."
|
||||
#else
|
||||
#define PyString_AsString(ob) \
|
||||
(PyUnicode_Check(ob)? PyUnicode_AsUTF8(ob): PyBytes_AS_STRING(ob))
|
||||
#endif
|
||||
#endif
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
|
||||
#ifndef PyVarObject_HEAD_INIT
|
||||
#define PyVarObject_HEAD_INIT(type, size) PyObject_HEAD_INIT(type) size,
|
||||
#endif
|
||||
#ifndef Py_TYPE
|
||||
#define Py_TYPE(ob) (((PyObject*)(ob))->ob_type)
|
||||
#endif
|
||||
|
||||
|
||||
static google::protobuf::DescriptorPool* g_descriptor_pool = NULL;
|
||||
|
||||
namespace cfield_descriptor {
|
||||
|
||||
static void Dealloc(CFieldDescriptor* self) {
|
||||
Py_CLEAR(self->descriptor_field);
|
||||
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
|
||||
}
|
||||
|
||||
static PyObject* GetFullName(CFieldDescriptor* self, void *closure) {
|
||||
return PyString_FromStringAndSize(
|
||||
self->descriptor->full_name().c_str(),
|
||||
self->descriptor->full_name().size());
|
||||
}
|
||||
|
||||
static PyObject* GetName(CFieldDescriptor *self, void *closure) {
|
||||
return PyString_FromStringAndSize(
|
||||
self->descriptor->name().c_str(),
|
||||
self->descriptor->name().size());
|
||||
}
|
||||
|
||||
static PyObject* GetCppType(CFieldDescriptor *self, void *closure) {
|
||||
return PyInt_FromLong(self->descriptor->cpp_type());
|
||||
}
|
||||
|
||||
static PyObject* GetLabel(CFieldDescriptor *self, void *closure) {
|
||||
return PyInt_FromLong(self->descriptor->label());
|
||||
}
|
||||
|
||||
static PyObject* GetID(CFieldDescriptor *self, void *closure) {
|
||||
return PyLong_FromVoidPtr(self);
|
||||
}
|
||||
|
||||
static PyGetSetDef Getters[] = {
|
||||
{ C("full_name"), (getter)GetFullName, NULL, "Full name", NULL},
|
||||
{ C("name"), (getter)GetName, NULL, "last name", NULL},
|
||||
{ C("cpp_type"), (getter)GetCppType, NULL, "C++ Type", NULL},
|
||||
{ C("label"), (getter)GetLabel, NULL, "Label", NULL},
|
||||
{ C("id"), (getter)GetID, NULL, "ID", NULL},
|
||||
{NULL}
|
||||
};
|
||||
|
||||
} // namespace cfield_descriptor
|
||||
|
||||
PyTypeObject CFieldDescriptor_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0)
|
||||
C("google.protobuf.internal."
|
||||
"_net_proto2___python."
|
||||
"CFieldDescriptor"), // tp_name
|
||||
sizeof(CFieldDescriptor), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)cfield_descriptor::Dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
0, // tp_getattr
|
||||
0, // tp_setattr
|
||||
0, // tp_compare
|
||||
0, // tp_repr
|
||||
0, // tp_as_number
|
||||
0, // tp_as_sequence
|
||||
0, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
0, // tp_setattro
|
||||
0, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
C("A Field Descriptor"), // tp_doc
|
||||
0, // tp_traverse
|
||||
0, // tp_clear
|
||||
0, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
0, // tp_iter
|
||||
0, // tp_iternext
|
||||
0, // tp_methods
|
||||
0, // tp_members
|
||||
cfield_descriptor::Getters, // tp_getset
|
||||
0, // tp_base
|
||||
0, // tp_dict
|
||||
0, // tp_descr_get
|
||||
0, // tp_descr_set
|
||||
0, // tp_dictoffset
|
||||
0, // tp_init
|
||||
PyType_GenericAlloc, // tp_alloc
|
||||
PyType_GenericNew, // tp_new
|
||||
PyObject_Del, // tp_free
|
||||
};
|
||||
|
||||
namespace cdescriptor_pool {
|
||||
|
||||
static void Dealloc(CDescriptorPool* self) {
|
||||
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
|
||||
}
|
||||
|
||||
static PyObject* NewCDescriptor(
|
||||
const google::protobuf::FieldDescriptor* field_descriptor) {
|
||||
CFieldDescriptor* cfield_descriptor = PyObject_New(
|
||||
CFieldDescriptor, &CFieldDescriptor_Type);
|
||||
if (cfield_descriptor == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
cfield_descriptor->descriptor = field_descriptor;
|
||||
cfield_descriptor->descriptor_field = NULL;
|
||||
|
||||
return reinterpret_cast<PyObject*>(cfield_descriptor);
|
||||
}
|
||||
|
||||
PyObject* FindFieldByName(CDescriptorPool* self, PyObject* name) {
|
||||
const char* full_field_name = PyString_AsString(name);
|
||||
if (full_field_name == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const google::protobuf::FieldDescriptor* field_descriptor = NULL;
|
||||
|
||||
field_descriptor = self->pool->FindFieldByName(full_field_name);
|
||||
|
||||
if (field_descriptor == NULL) {
|
||||
PyErr_Format(PyExc_TypeError, "Couldn't find field %.200s",
|
||||
full_field_name);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return NewCDescriptor(field_descriptor);
|
||||
}
|
||||
|
||||
PyObject* FindExtensionByName(CDescriptorPool* self, PyObject* arg) {
|
||||
const char* full_field_name = PyString_AsString(arg);
|
||||
if (full_field_name == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const google::protobuf::FieldDescriptor* field_descriptor =
|
||||
self->pool->FindExtensionByName(full_field_name);
|
||||
if (field_descriptor == NULL) {
|
||||
PyErr_Format(PyExc_TypeError, "Couldn't find field %.200s",
|
||||
full_field_name);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
return NewCDescriptor(field_descriptor);
|
||||
}
|
||||
|
||||
static PyMethodDef Methods[] = {
|
||||
{ C("FindFieldByName"),
|
||||
(PyCFunction)FindFieldByName,
|
||||
METH_O,
|
||||
C("Searches for a field descriptor by full name.") },
|
||||
{ C("FindExtensionByName"),
|
||||
(PyCFunction)FindExtensionByName,
|
||||
METH_O,
|
||||
C("Searches for extension descriptor by full name.") },
|
||||
{NULL}
|
||||
};
|
||||
|
||||
} // namespace cdescriptor_pool
|
||||
|
||||
PyTypeObject CDescriptorPool_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0)
|
||||
C("google.protobuf.internal."
|
||||
"_net_proto2___python."
|
||||
"CFieldDescriptor"), // tp_name
|
||||
sizeof(CDescriptorPool), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)cdescriptor_pool::Dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
0, // tp_getattr
|
||||
0, // tp_setattr
|
||||
0, // tp_compare
|
||||
0, // tp_repr
|
||||
0, // tp_as_number
|
||||
0, // tp_as_sequence
|
||||
0, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
0, // tp_setattro
|
||||
0, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
C("A Descriptor Pool"), // tp_doc
|
||||
0, // tp_traverse
|
||||
0, // tp_clear
|
||||
0, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
0, // tp_iter
|
||||
0, // tp_iternext
|
||||
cdescriptor_pool::Methods, // tp_methods
|
||||
0, // tp_members
|
||||
0, // tp_getset
|
||||
0, // tp_base
|
||||
0, // tp_dict
|
||||
0, // tp_descr_get
|
||||
0, // tp_descr_set
|
||||
0, // tp_dictoffset
|
||||
0, // tp_init
|
||||
PyType_GenericAlloc, // tp_alloc
|
||||
PyType_GenericNew, // tp_new
|
||||
PyObject_Del, // tp_free
|
||||
};
|
||||
|
||||
google::protobuf::DescriptorPool* GetDescriptorPool() {
|
||||
if (g_descriptor_pool == NULL) {
|
||||
g_descriptor_pool = new google::protobuf::DescriptorPool(
|
||||
google::protobuf::DescriptorPool::generated_pool());
|
||||
}
|
||||
return g_descriptor_pool;
|
||||
}
|
||||
|
||||
PyObject* Python_NewCDescriptorPool(PyObject* ignored, PyObject* args) {
|
||||
CDescriptorPool* cdescriptor_pool = PyObject_New(
|
||||
CDescriptorPool, &CDescriptorPool_Type);
|
||||
if (cdescriptor_pool == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
cdescriptor_pool->pool = GetDescriptorPool();
|
||||
return reinterpret_cast<PyObject*>(cdescriptor_pool);
|
||||
}
|
||||
|
||||
|
||||
// Collects errors that occur during proto file building to allow them to be
|
||||
// propagated in the python exception instead of only living in ERROR logs.
|
||||
class BuildFileErrorCollector : public google::protobuf::DescriptorPool::ErrorCollector {
|
||||
public:
|
||||
BuildFileErrorCollector() : error_message(""), had_errors(false) {}
|
||||
|
||||
void AddError(const string& filename, const string& element_name,
|
||||
const Message* descriptor, ErrorLocation location,
|
||||
const string& message) {
|
||||
// Replicates the logging behavior that happens in the C++ implementation
|
||||
// when an error collector is not passed in.
|
||||
if (!had_errors) {
|
||||
error_message +=
|
||||
("Invalid proto descriptor for file \"" + filename + "\":\n");
|
||||
}
|
||||
// As this only happens on failure and will result in the program not
|
||||
// running at all, no effort is made to optimize this string manipulation.
|
||||
error_message += (" " + element_name + ": " + message + "\n");
|
||||
}
|
||||
|
||||
string error_message;
|
||||
bool had_errors;
|
||||
};
|
||||
|
||||
PyObject* Python_BuildFile(PyObject* ignored, PyObject* arg) {
|
||||
char* message_type;
|
||||
Py_ssize_t message_len;
|
||||
|
||||
if (PyBytes_AsStringAndSize(arg, &message_type, &message_len) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
google::protobuf::FileDescriptorProto file_proto;
|
||||
if (!file_proto.ParseFromArray(message_type, message_len)) {
|
||||
PyErr_SetString(PyExc_TypeError, "Couldn't parse file content!");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (google::protobuf::DescriptorPool::generated_pool()->FindFileByName(
|
||||
file_proto.name()) != NULL) {
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
BuildFileErrorCollector error_collector;
|
||||
const google::protobuf::FileDescriptor* descriptor =
|
||||
GetDescriptorPool()->BuildFileCollectingErrors(file_proto,
|
||||
&error_collector);
|
||||
if (descriptor == NULL) {
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"Couldn't build proto file into descriptor pool!\n%s",
|
||||
error_collector.error_message.c_str());
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
bool InitDescriptor() {
|
||||
CFieldDescriptor_Type.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&CFieldDescriptor_Type) < 0)
|
||||
return false;
|
||||
|
||||
CDescriptorPool_Type.tp_new = PyType_GenericNew;
|
||||
if (PyType_Ready(&CDescriptorPool_Type) < 0)
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
@@ -0,0 +1,96 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: petar@google.com (Petar Petrov)
|
||||
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_H__
|
||||
|
||||
#include <Python.h>
|
||||
#include <structmember.h>
|
||||
|
||||
#include <google/protobuf/descriptor.h>
|
||||
|
||||
#if PY_VERSION_HEX < 0x02050000 && !defined(PY_SSIZE_T_MIN)
|
||||
typedef int Py_ssize_t;
|
||||
#define PY_SSIZE_T_MAX INT_MAX
|
||||
#define PY_SSIZE_T_MIN INT_MIN
|
||||
#endif
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
typedef struct CFieldDescriptor {
|
||||
PyObject_HEAD
|
||||
|
||||
// The proto2 descriptor that this object represents.
|
||||
const google::protobuf::FieldDescriptor* descriptor;
|
||||
|
||||
// Reference to the original field object in the Python DESCRIPTOR.
|
||||
PyObject* descriptor_field;
|
||||
} CFieldDescriptor;
|
||||
|
||||
typedef struct {
|
||||
PyObject_HEAD
|
||||
|
||||
const google::protobuf::DescriptorPool* pool;
|
||||
} CDescriptorPool;
|
||||
|
||||
extern PyTypeObject CFieldDescriptor_Type;
|
||||
|
||||
extern PyTypeObject CDescriptorPool_Type;
|
||||
|
||||
namespace cdescriptor_pool {
|
||||
|
||||
// Looks up a field by name. Returns a CDescriptor corresponding to
|
||||
// the field on success, or NULL on failure.
|
||||
//
|
||||
// Returns a new reference.
|
||||
PyObject* FindFieldByName(CDescriptorPool* self, PyObject* name);
|
||||
|
||||
// Looks up an extension by name. Returns a CDescriptor corresponding
|
||||
// to the field on success, or NULL on failure.
|
||||
//
|
||||
// Returns a new reference.
|
||||
PyObject* FindExtensionByName(CDescriptorPool* self, PyObject* arg);
|
||||
|
||||
} // namespace cdescriptor_pool
|
||||
|
||||
PyObject* Python_NewCDescriptorPool(PyObject* ignored, PyObject* args);
|
||||
PyObject* Python_BuildFile(PyObject* ignored, PyObject* args);
|
||||
bool InitDescriptor();
|
||||
google::protobuf::DescriptorPool* GetDescriptorPool();
|
||||
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
|
||||
} // namespace google
|
||||
#endif // GOOGLE_PROTOBUF_PYTHON_CPP_DESCRIPTOR_H__
|
||||
@@ -0,0 +1,58 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.pyext behavior."""
|
||||
|
||||
__author__ = 'anuraag@google.com (Anuraag Agrawal)'
|
||||
|
||||
import os
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'cpp'
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION'] = '2'
|
||||
|
||||
# We must set the implementation version above before the google3 imports.
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.apputils import basetest
|
||||
from google.protobuf.internal import api_implementation
|
||||
# Run all tests from the original module by putting them in our namespace.
|
||||
# pylint: disable=wildcard-import
|
||||
from google.protobuf.internal.descriptor_test import *
|
||||
|
||||
|
||||
class ConfirmCppApi2Test(basetest.TestCase):
|
||||
|
||||
def testImplementationSetting(self):
|
||||
self.assertEqual('cpp', api_implementation.Type())
|
||||
self.assertEqual(2, api_implementation.Version())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,338 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: anuraag@google.com (Anuraag Agrawal)
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
|
||||
#include <google/protobuf/pyext/extension_dict.h>
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/dynamic_message.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
#include <google/protobuf/pyext/message.h>
|
||||
#include <google/protobuf/pyext/repeated_composite_container.h>
|
||||
#include <google/protobuf/pyext/repeated_scalar_container.h>
|
||||
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
extern google::protobuf::DynamicMessageFactory* global_message_factory;
|
||||
|
||||
namespace extension_dict {
|
||||
|
||||
// TODO(tibell): Always use self->message for clarity, just like in
|
||||
// RepeatedCompositeContainer.
|
||||
static google::protobuf::Message* GetMessage(ExtensionDict* self) {
|
||||
if (self->parent != NULL) {
|
||||
return self->parent->message;
|
||||
} else {
|
||||
return self->message;
|
||||
}
|
||||
}
|
||||
|
||||
CFieldDescriptor* InternalGetCDescriptorFromExtension(PyObject* extension) {
|
||||
PyObject* cdescriptor = PyObject_GetAttrString(extension, "_cdescriptor");
|
||||
if (cdescriptor == NULL) {
|
||||
PyErr_SetString(PyExc_KeyError, "Unregistered extension.");
|
||||
return NULL;
|
||||
}
|
||||
if (!PyObject_TypeCheck(cdescriptor, &CFieldDescriptor_Type)) {
|
||||
PyErr_SetString(PyExc_TypeError, "Not a CFieldDescriptor");
|
||||
Py_DECREF(cdescriptor);
|
||||
return NULL;
|
||||
}
|
||||
CFieldDescriptor* descriptor =
|
||||
reinterpret_cast<CFieldDescriptor*>(cdescriptor);
|
||||
return descriptor;
|
||||
}
|
||||
|
||||
PyObject* len(ExtensionDict* self) {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
return PyLong_FromLong(PyDict_Size(self->values));
|
||||
#else
|
||||
return PyInt_FromLong(PyDict_Size(self->values));
|
||||
#endif
|
||||
}
|
||||
|
||||
// TODO(tibell): Use VisitCompositeField.
|
||||
int ReleaseExtension(ExtensionDict* self,
|
||||
PyObject* extension,
|
||||
const google::protobuf::FieldDescriptor* descriptor) {
|
||||
if (descriptor->label() == google::protobuf::FieldDescriptor::LABEL_REPEATED) {
|
||||
if (descriptor->cpp_type() ==
|
||||
google::protobuf::FieldDescriptor::CPPTYPE_MESSAGE) {
|
||||
if (repeated_composite_container::Release(
|
||||
reinterpret_cast<RepeatedCompositeContainer*>(
|
||||
extension)) < 0) {
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
if (repeated_scalar_container::Release(
|
||||
reinterpret_cast<RepeatedScalarContainer*>(
|
||||
extension)) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
} else if (descriptor->cpp_type() ==
|
||||
google::protobuf::FieldDescriptor::CPPTYPE_MESSAGE) {
|
||||
if (cmessage::ReleaseSubMessage(
|
||||
GetMessage(self), descriptor,
|
||||
reinterpret_cast<CMessage*>(extension)) < 0) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
PyObject* subscript(ExtensionDict* self, PyObject* key) {
|
||||
CFieldDescriptor* cdescriptor = InternalGetCDescriptorFromExtension(
|
||||
key);
|
||||
if (cdescriptor == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr py_cdescriptor(reinterpret_cast<PyObject*>(cdescriptor));
|
||||
const google::protobuf::FieldDescriptor* descriptor = cdescriptor->descriptor;
|
||||
if (descriptor == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (descriptor->label() != FieldDescriptor::LABEL_REPEATED &&
|
||||
descriptor->cpp_type() != FieldDescriptor::CPPTYPE_MESSAGE) {
|
||||
return cmessage::InternalGetScalar(self->parent, descriptor);
|
||||
}
|
||||
|
||||
PyObject* value = PyDict_GetItem(self->values, key);
|
||||
if (value != NULL) {
|
||||
Py_INCREF(value);
|
||||
return value;
|
||||
}
|
||||
|
||||
if (descriptor->label() != FieldDescriptor::LABEL_REPEATED &&
|
||||
descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
|
||||
PyObject* sub_message = cmessage::InternalGetSubMessage(
|
||||
self->parent, cdescriptor);
|
||||
if (sub_message == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
PyDict_SetItem(self->values, key, sub_message);
|
||||
return sub_message;
|
||||
}
|
||||
|
||||
if (descriptor->label() == FieldDescriptor::LABEL_REPEATED) {
|
||||
if (descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
|
||||
// COPIED
|
||||
PyObject* py_container = PyObject_CallObject(
|
||||
reinterpret_cast<PyObject*>(&RepeatedCompositeContainer_Type),
|
||||
NULL);
|
||||
if (py_container == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
RepeatedCompositeContainer* container =
|
||||
reinterpret_cast<RepeatedCompositeContainer*>(py_container);
|
||||
PyObject* field = cdescriptor->descriptor_field;
|
||||
PyObject* message_type = PyObject_GetAttrString(field, "message_type");
|
||||
PyObject* concrete_class = PyObject_GetAttrString(message_type,
|
||||
"_concrete_class");
|
||||
container->owner = self->owner;
|
||||
container->parent = self->parent;
|
||||
container->message = self->parent->message;
|
||||
container->parent_field = cdescriptor;
|
||||
container->subclass_init = concrete_class;
|
||||
Py_DECREF(message_type);
|
||||
PyDict_SetItem(self->values, key, py_container);
|
||||
return py_container;
|
||||
} else {
|
||||
// COPIED
|
||||
ScopedPyObjectPtr init_args(PyTuple_Pack(2, self->parent, cdescriptor));
|
||||
PyObject* py_container = PyObject_CallObject(
|
||||
reinterpret_cast<PyObject*>(&RepeatedScalarContainer_Type),
|
||||
init_args);
|
||||
if (py_container == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
PyDict_SetItem(self->values, key, py_container);
|
||||
return py_container;
|
||||
}
|
||||
}
|
||||
PyErr_SetString(PyExc_ValueError, "control reached unexpected line");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int ass_subscript(ExtensionDict* self, PyObject* key, PyObject* value) {
|
||||
CFieldDescriptor* cdescriptor = InternalGetCDescriptorFromExtension(
|
||||
key);
|
||||
if (cdescriptor == NULL) {
|
||||
return -1;
|
||||
}
|
||||
ScopedPyObjectPtr py_cdescriptor(reinterpret_cast<PyObject*>(cdescriptor));
|
||||
const google::protobuf::FieldDescriptor* descriptor = cdescriptor->descriptor;
|
||||
if (descriptor->label() != FieldDescriptor::LABEL_OPTIONAL ||
|
||||
descriptor->cpp_type() == FieldDescriptor::CPPTYPE_MESSAGE) {
|
||||
PyErr_SetString(PyExc_TypeError, "Extension is repeated and/or composite "
|
||||
"type");
|
||||
return -1;
|
||||
}
|
||||
cmessage::AssureWritable(self->parent);
|
||||
if (cmessage::InternalSetScalar(self->parent, descriptor, value) < 0) {
|
||||
return -1;
|
||||
}
|
||||
// TODO(tibell): We shouldn't write scalars to the cache.
|
||||
PyDict_SetItem(self->values, key, value);
|
||||
return 0;
|
||||
}
|
||||
|
||||
PyObject* ClearExtension(ExtensionDict* self, PyObject* extension) {
|
||||
CFieldDescriptor* cdescriptor = InternalGetCDescriptorFromExtension(
|
||||
extension);
|
||||
if (cdescriptor == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr py_cdescriptor(reinterpret_cast<PyObject*>(cdescriptor));
|
||||
PyObject* value = PyDict_GetItem(self->values, extension);
|
||||
if (value != NULL) {
|
||||
if (ReleaseExtension(self, value, cdescriptor->descriptor) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
if (cmessage::ClearFieldByDescriptor(self->parent,
|
||||
cdescriptor->descriptor) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (PyDict_DelItem(self->values, extension) < 0) {
|
||||
PyErr_Clear();
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
PyObject* HasExtension(ExtensionDict* self, PyObject* extension) {
|
||||
CFieldDescriptor* cdescriptor = InternalGetCDescriptorFromExtension(
|
||||
extension);
|
||||
if (cdescriptor == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr py_cdescriptor(reinterpret_cast<PyObject*>(cdescriptor));
|
||||
PyObject* result = cmessage::HasFieldByDescriptor(
|
||||
self->parent, cdescriptor->descriptor);
|
||||
return result;
|
||||
}
|
||||
|
||||
PyObject* _FindExtensionByName(ExtensionDict* self, PyObject* name) {
|
||||
ScopedPyObjectPtr extensions_by_name(PyObject_GetAttrString(
|
||||
reinterpret_cast<PyObject*>(self->parent), "_extensions_by_name"));
|
||||
if (extensions_by_name == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
PyObject* result = PyDict_GetItem(extensions_by_name, name);
|
||||
if (result == NULL) {
|
||||
Py_RETURN_NONE;
|
||||
} else {
|
||||
Py_INCREF(result);
|
||||
return result;
|
||||
}
|
||||
}
|
||||
|
||||
int init(ExtensionDict* self, PyObject* args, PyObject* kwargs) {
|
||||
self->parent = NULL;
|
||||
self->message = NULL;
|
||||
self->values = PyDict_New();
|
||||
return 0;
|
||||
}
|
||||
|
||||
void dealloc(ExtensionDict* self) {
|
||||
Py_CLEAR(self->values);
|
||||
self->owner.reset();
|
||||
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
|
||||
}
|
||||
|
||||
static PyMappingMethods MpMethods = {
|
||||
(lenfunc)len, /* mp_length */
|
||||
(binaryfunc)subscript, /* mp_subscript */
|
||||
(objobjargproc)ass_subscript,/* mp_ass_subscript */
|
||||
};
|
||||
|
||||
#define EDMETHOD(name, args, doc) { #name, (PyCFunction)name, args, doc }
|
||||
static PyMethodDef Methods[] = {
|
||||
EDMETHOD(ClearExtension, METH_O, "Clears an extension from the object."),
|
||||
EDMETHOD(HasExtension, METH_O, "Checks if the object has an extension."),
|
||||
EDMETHOD(_FindExtensionByName, METH_O,
|
||||
"Finds an extension by name."),
|
||||
{ NULL, NULL }
|
||||
};
|
||||
|
||||
} // namespace extension_dict
|
||||
|
||||
PyTypeObject ExtensionDict_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0)
|
||||
"google.protobuf.internal."
|
||||
"cpp._message.ExtensionDict", // tp_name
|
||||
sizeof(ExtensionDict), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)extension_dict::dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
0, // tp_getattr
|
||||
0, // tp_setattr
|
||||
0, // tp_compare
|
||||
0, // tp_repr
|
||||
0, // tp_as_number
|
||||
0, // tp_as_sequence
|
||||
&extension_dict::MpMethods, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
0, // tp_setattro
|
||||
0, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
"An extension dict", // tp_doc
|
||||
0, // tp_traverse
|
||||
0, // tp_clear
|
||||
0, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
0, // tp_iter
|
||||
0, // tp_iternext
|
||||
extension_dict::Methods, // tp_methods
|
||||
0, // tp_members
|
||||
0, // tp_getset
|
||||
0, // tp_base
|
||||
0, // tp_dict
|
||||
0, // tp_descr_get
|
||||
0, // tp_descr_set
|
||||
0, // tp_dictoffset
|
||||
(initproc)extension_dict::init, // tp_init
|
||||
};
|
||||
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
@@ -0,0 +1,123 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: anuraag@google.com (Anuraag Agrawal)
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_EXTENSION_DICT_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_CPP_EXTENSION_DICT_H__
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#include <memory>
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
class Message;
|
||||
class FieldDescriptor;
|
||||
|
||||
using internal::shared_ptr;
|
||||
|
||||
namespace python {
|
||||
|
||||
struct CMessage;
|
||||
struct CFieldDescriptor;
|
||||
|
||||
typedef struct ExtensionDict {
|
||||
PyObject_HEAD;
|
||||
shared_ptr<Message> owner;
|
||||
CMessage* parent;
|
||||
Message* message;
|
||||
PyObject* values;
|
||||
} ExtensionDict;
|
||||
|
||||
extern PyTypeObject ExtensionDict_Type;
|
||||
|
||||
namespace extension_dict {
|
||||
|
||||
// Gets the _cdescriptor reference to a CFieldDescriptor object given a
|
||||
// python descriptor object.
|
||||
//
|
||||
// Returns a new reference.
|
||||
CFieldDescriptor* InternalGetCDescriptorFromExtension(PyObject* extension);
|
||||
|
||||
// Gets the number of extension values in this ExtensionDict as a python object.
|
||||
//
|
||||
// Returns a new reference.
|
||||
PyObject* len(ExtensionDict* self);
|
||||
|
||||
// Releases extensions referenced outside this dictionary to keep outside
|
||||
// references alive.
|
||||
//
|
||||
// Returns 0 on success, -1 on failure.
|
||||
int ReleaseExtension(ExtensionDict* self,
|
||||
PyObject* extension,
|
||||
const google::protobuf::FieldDescriptor* descriptor);
|
||||
|
||||
// Gets an extension from the dict for the given extension descriptor.
|
||||
//
|
||||
// Returns a new reference.
|
||||
PyObject* subscript(ExtensionDict* self, PyObject* key);
|
||||
|
||||
// Assigns a value to an extension in the dict. Can only be used for singular
|
||||
// simple types.
|
||||
//
|
||||
// Returns 0 on success, -1 on failure.
|
||||
int ass_subscript(ExtensionDict* self, PyObject* key, PyObject* value);
|
||||
|
||||
// Clears an extension from the dict. Will release the extension if there
|
||||
// is still an external reference left to it.
|
||||
//
|
||||
// Returns None on success.
|
||||
PyObject* ClearExtension(ExtensionDict* self,
|
||||
PyObject* extension);
|
||||
|
||||
// Checks if the dict has an extension.
|
||||
//
|
||||
// Returns a new python boolean reference.
|
||||
PyObject* HasExtension(ExtensionDict* self, PyObject* extension);
|
||||
|
||||
// Gets an extension from the dict given the extension name as opposed to
|
||||
// descriptor.
|
||||
//
|
||||
// Returns a new reference.
|
||||
PyObject* _FindExtensionByName(ExtensionDict* self, PyObject* name);
|
||||
|
||||
} // namespace extension_dict
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
|
||||
} // namespace google
|
||||
#endif // GOOGLE_PROTOBUF_PYTHON_CPP_EXTENSION_DICT_H__
|
||||
2561
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/pyext/message.cc
Normal file
2561
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/pyext/message.cc
Normal file
File diff suppressed because it is too large
Load Diff
305
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/pyext/message.h
Normal file
305
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/pyext/message.h
Normal file
@@ -0,0 +1,305 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: anuraag@google.com (Anuraag Agrawal)
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_H__
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#include <memory>
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
#include <string>
|
||||
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
class Message;
|
||||
class Reflection;
|
||||
class FieldDescriptor;
|
||||
|
||||
using internal::shared_ptr;
|
||||
|
||||
namespace python {
|
||||
|
||||
struct CFieldDescriptor;
|
||||
struct ExtensionDict;
|
||||
|
||||
typedef struct CMessage {
|
||||
PyObject_HEAD;
|
||||
|
||||
// This is the top-level C++ Message object that owns the whole
|
||||
// proto tree. Every Python CMessage holds a reference to it in
|
||||
// order to keep it alive as long as there's a Python object that
|
||||
// references any part of the tree.
|
||||
shared_ptr<Message> owner;
|
||||
|
||||
// Weak reference to a parent CMessage object. This is NULL for any top-level
|
||||
// message and is set for any child message (i.e. a child submessage or a
|
||||
// part of a repeated composite field).
|
||||
//
|
||||
// Used to make sure all ancestors are also mutable when first modifying
|
||||
// a child submessage (in other words, turning a default message instance
|
||||
// into a mutable one).
|
||||
//
|
||||
// If a submessage is released (becomes a new top-level message), this field
|
||||
// MUST be set to NULL. The parent may get deallocated and further attempts
|
||||
// to use this pointer will result in a crash.
|
||||
struct CMessage* parent;
|
||||
|
||||
// Weak reference to the parent's descriptor that describes this submessage.
|
||||
// Used together with the parent's message when making a default message
|
||||
// instance mutable.
|
||||
// TODO(anuraag): With a bit of work on the Python/C++ layer, it should be
|
||||
// possible to make this a direct pointer to a C++ FieldDescriptor, this would
|
||||
// be easier if this implementation replaces upstream.
|
||||
CFieldDescriptor* parent_field;
|
||||
|
||||
// Pointer to the C++ Message object for this CMessage. The
|
||||
// CMessage does not own this pointer.
|
||||
Message* message;
|
||||
|
||||
// Indicates this submessage is pointing to a default instance of a message.
|
||||
// Submessages are always first created as read only messages and are then
|
||||
// made writable, at which point this field is set to false.
|
||||
bool read_only;
|
||||
|
||||
// A reference to a Python dictionary containing CMessage,
|
||||
// RepeatedCompositeContainer, and RepeatedScalarContainer
|
||||
// objects. Used as a cache to make sure we don't have to make a
|
||||
// Python wrapper for the C++ Message objects on every access, or
|
||||
// deal with the synchronization nightmare that could create.
|
||||
PyObject* composite_fields;
|
||||
|
||||
// A reference to the dictionary containing the message's extensions.
|
||||
// Similar to composite_fields, acting as a cache, but also contains the
|
||||
// required extension dict logic.
|
||||
ExtensionDict* extensions;
|
||||
} CMessage;
|
||||
|
||||
extern PyTypeObject CMessage_Type;
|
||||
|
||||
namespace cmessage {
|
||||
|
||||
// Create a new empty message that can be populated by the parent.
|
||||
PyObject* NewEmpty(PyObject* type);
|
||||
|
||||
// Release a submessage from its proto tree, making it a new top-level messgae.
|
||||
// A new message will be created if this is a read-only default instance.
|
||||
//
|
||||
// Corresponds to reflection api method ReleaseMessage.
|
||||
int ReleaseSubMessage(google::protobuf::Message* message,
|
||||
const google::protobuf::FieldDescriptor* field_descriptor,
|
||||
CMessage* child_cmessage);
|
||||
|
||||
// Initializes a new CMessage instance for a submessage. Only called once per
|
||||
// submessage as the result is cached in composite_fields.
|
||||
//
|
||||
// Corresponds to reflection api method GetMessage.
|
||||
PyObject* InternalGetSubMessage(CMessage* self,
|
||||
CFieldDescriptor* cfield_descriptor);
|
||||
|
||||
// Deletes a range of C++ submessages in a repeated field (following a
|
||||
// removal in a RepeatedCompositeContainer).
|
||||
//
|
||||
// Releases messages to the provided cmessage_list if it is not NULL rather
|
||||
// than just removing them from the underlying proto. This cmessage_list must
|
||||
// have a CMessage for each underlying submessage. The CMessages refered to
|
||||
// by slice will be removed from cmessage_list by this function.
|
||||
//
|
||||
// Corresponds to reflection api method RemoveLast.
|
||||
int InternalDeleteRepeatedField(google::protobuf::Message* message,
|
||||
const google::protobuf::FieldDescriptor* field_descriptor,
|
||||
PyObject* slice, PyObject* cmessage_list);
|
||||
|
||||
// Sets the specified scalar value to the message.
|
||||
int InternalSetScalar(CMessage* self,
|
||||
const google::protobuf::FieldDescriptor* field_descriptor,
|
||||
PyObject* value);
|
||||
|
||||
// Retrieves the specified scalar value from the message.
|
||||
//
|
||||
// Returns a new python reference.
|
||||
PyObject* InternalGetScalar(CMessage* self,
|
||||
const google::protobuf::FieldDescriptor* field_descriptor);
|
||||
|
||||
// Clears the message, removing all contained data. Extension dictionary and
|
||||
// submessages are released first if there are remaining external references.
|
||||
//
|
||||
// Corresponds to message api method Clear.
|
||||
PyObject* Clear(CMessage* self);
|
||||
|
||||
// Clears the data described by the given descriptor. Used to clear extensions
|
||||
// (which don't have names). Extension release is handled by ExtensionDict
|
||||
// class, not this function.
|
||||
// TODO(anuraag): Try to make this discrepancy in release semantics with
|
||||
// ClearField less confusing.
|
||||
//
|
||||
// Corresponds to reflection api method ClearField.
|
||||
PyObject* ClearFieldByDescriptor(
|
||||
CMessage* self,
|
||||
const google::protobuf::FieldDescriptor* descriptor);
|
||||
|
||||
// Clears the data for the given field name. The message is released if there
|
||||
// are any external references.
|
||||
//
|
||||
// Corresponds to reflection api method ClearField.
|
||||
PyObject* ClearField(CMessage* self, PyObject* arg);
|
||||
|
||||
// Checks if the message has the field described by the descriptor. Used for
|
||||
// extensions (which have no name).
|
||||
//
|
||||
// Corresponds to reflection api method HasField
|
||||
PyObject* HasFieldByDescriptor(
|
||||
CMessage* self, const google::protobuf::FieldDescriptor* field_descriptor);
|
||||
|
||||
// Checks if the message has the named field.
|
||||
//
|
||||
// Corresponds to reflection api method HasField.
|
||||
PyObject* HasField(CMessage* self, PyObject* arg);
|
||||
|
||||
// Initializes constants/enum values on a message. This is called by
|
||||
// RepeatedCompositeContainer and ExtensionDict after calling the constructor.
|
||||
// TODO(anuraag): Make it always called from within the constructor since it can
|
||||
int InitAttributes(CMessage* self, PyObject* descriptor, PyObject* kwargs);
|
||||
|
||||
PyObject* MergeFrom(CMessage* self, PyObject* arg);
|
||||
|
||||
// Retrieves an attribute named 'name' from CMessage 'self'. Returns
|
||||
// the attribute value on success, or NULL on failure.
|
||||
//
|
||||
// Returns a new reference.
|
||||
PyObject* GetAttr(CMessage* self, PyObject* name);
|
||||
|
||||
// Set the value of the attribute named 'name', for CMessage 'self',
|
||||
// to the value 'value'. Returns -1 on failure.
|
||||
int SetAttr(CMessage* self, PyObject* name, PyObject* value);
|
||||
|
||||
PyObject* FindInitializationErrors(CMessage* self);
|
||||
|
||||
// Set the owner field of self and any children of self, recursively.
|
||||
// Used when self is being released and thus has a new owner (the
|
||||
// released Message.)
|
||||
int SetOwner(CMessage* self, const shared_ptr<Message>& new_owner);
|
||||
|
||||
int AssureWritable(CMessage* self);
|
||||
|
||||
} // namespace cmessage
|
||||
|
||||
/* Is 64bit */
|
||||
#define IS_64BIT (SIZEOF_LONG == 8)
|
||||
|
||||
#define FIELD_BELONGS_TO_MESSAGE(field_descriptor, message) \
|
||||
((message)->GetDescriptor() == (field_descriptor)->containing_type())
|
||||
|
||||
#define FIELD_IS_REPEATED(field_descriptor) \
|
||||
((field_descriptor)->label() == google::protobuf::FieldDescriptor::LABEL_REPEATED)
|
||||
|
||||
#define GOOGLE_CHECK_GET_INT32(arg, value, err) \
|
||||
int32 value; \
|
||||
if (!CheckAndGetInteger(arg, &value, kint32min_py, kint32max_py)) { \
|
||||
return err; \
|
||||
}
|
||||
|
||||
#define GOOGLE_CHECK_GET_INT64(arg, value, err) \
|
||||
int64 value; \
|
||||
if (!CheckAndGetInteger(arg, &value, kint64min_py, kint64max_py)) { \
|
||||
return err; \
|
||||
}
|
||||
|
||||
#define GOOGLE_CHECK_GET_UINT32(arg, value, err) \
|
||||
uint32 value; \
|
||||
if (!CheckAndGetInteger(arg, &value, kPythonZero, kuint32max_py)) { \
|
||||
return err; \
|
||||
}
|
||||
|
||||
#define GOOGLE_CHECK_GET_UINT64(arg, value, err) \
|
||||
uint64 value; \
|
||||
if (!CheckAndGetInteger(arg, &value, kPythonZero, kuint64max_py)) { \
|
||||
return err; \
|
||||
}
|
||||
|
||||
#define GOOGLE_CHECK_GET_FLOAT(arg, value, err) \
|
||||
float value; \
|
||||
if (!CheckAndGetFloat(arg, &value)) { \
|
||||
return err; \
|
||||
} \
|
||||
|
||||
#define GOOGLE_CHECK_GET_DOUBLE(arg, value, err) \
|
||||
double value; \
|
||||
if (!CheckAndGetDouble(arg, &value)) { \
|
||||
return err; \
|
||||
}
|
||||
|
||||
#define GOOGLE_CHECK_GET_BOOL(arg, value, err) \
|
||||
bool value; \
|
||||
if (!CheckAndGetBool(arg, &value)) { \
|
||||
return err; \
|
||||
}
|
||||
|
||||
|
||||
extern PyObject* kPythonZero;
|
||||
extern PyObject* kint32min_py;
|
||||
extern PyObject* kint32max_py;
|
||||
extern PyObject* kuint32max_py;
|
||||
extern PyObject* kint64min_py;
|
||||
extern PyObject* kint64max_py;
|
||||
extern PyObject* kuint64max_py;
|
||||
|
||||
#define C(str) const_cast<char*>(str)
|
||||
|
||||
void FormatTypeError(PyObject* arg, char* expected_types);
|
||||
template<class T>
|
||||
bool CheckAndGetInteger(
|
||||
PyObject* arg, T* value, PyObject* min, PyObject* max);
|
||||
bool CheckAndGetDouble(PyObject* arg, double* value);
|
||||
bool CheckAndGetFloat(PyObject* arg, float* value);
|
||||
bool CheckAndGetBool(PyObject* arg, bool* value);
|
||||
bool CheckAndSetString(
|
||||
PyObject* arg, google::protobuf::Message* message,
|
||||
const google::protobuf::FieldDescriptor* descriptor,
|
||||
const google::protobuf::Reflection* reflection,
|
||||
bool append,
|
||||
int index);
|
||||
PyObject* ToStringObject(
|
||||
const google::protobuf::FieldDescriptor* descriptor, string value);
|
||||
|
||||
extern PyObject* PickleError_class;
|
||||
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
|
||||
} // namespace google
|
||||
#endif // GOOGLE_PROTOBUF_PYTHON_CPP_MESSAGE_H__
|
||||
@@ -0,0 +1,56 @@
|
||||
#! /usr/bin/python
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Tests for google.protobuf.message_factory."""
|
||||
|
||||
import os
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'cpp'
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION'] = '2'
|
||||
|
||||
# We must set the implementation version above before the google3 imports.
|
||||
# pylint: disable=g-import-not-at-top
|
||||
from google.apputils import basetest
|
||||
from google.protobuf.internal import api_implementation
|
||||
# Run all tests from the original module by putting them in our namespace.
|
||||
# pylint: disable=wildcard-import
|
||||
from google.protobuf.internal.message_factory_test import *
|
||||
|
||||
|
||||
class ConfirmCppApi2Test(basetest.TestCase):
|
||||
|
||||
def testImplementationSetting(self):
|
||||
self.assertEqual('cpp', api_implementation.Type())
|
||||
self.assertEqual(2, api_implementation.Version())
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,38 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
import "google/protobuf/internal/cpp/proto1_api_test.proto";
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
message TestNestedProto1APIMessage {
|
||||
optional int32 a = 1;
|
||||
optional TestMessage.NestedMessage b = 2;
|
||||
}
|
||||
@@ -0,0 +1,66 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
//
|
||||
// These message definitions are used to exercises known corner cases
|
||||
// in the C++ implementation of the Python API.
|
||||
|
||||
|
||||
package google.protobuf.python.internal;
|
||||
|
||||
// Protos optimized for SPEED use a strict superset of the generated code
|
||||
// of equivalent ones optimized for CODE_SIZE, so we should optimize all our
|
||||
// tests for speed unless explicitly testing code size optimization.
|
||||
option optimize_for = SPEED;
|
||||
|
||||
message TestAllTypes {
|
||||
message NestedMessage {
|
||||
optional int32 bb = 1;
|
||||
optional ForeignMessage cc = 2;
|
||||
}
|
||||
|
||||
repeated NestedMessage repeated_nested_message = 1;
|
||||
optional NestedMessage optional_nested_message = 2;
|
||||
optional int32 optional_int32 = 3;
|
||||
}
|
||||
|
||||
message ForeignMessage {
|
||||
optional int32 c = 1;
|
||||
repeated int32 d = 2;
|
||||
}
|
||||
|
||||
message TestAllExtensions {
|
||||
extensions 1 to max;
|
||||
}
|
||||
|
||||
extend TestAllExtensions {
|
||||
optional TestAllTypes.NestedMessage optional_nested_message_extension = 1;
|
||||
}
|
||||
@@ -0,0 +1,57 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: qrczak@google.com (Marcin Kowalczyk)
|
||||
//
|
||||
// This module exposes the C proto inside the given Python proto, in
|
||||
// case the Python proto is implemented with a C proto.
|
||||
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
class Message;
|
||||
|
||||
namespace python {
|
||||
|
||||
// Return the pointer to the C proto inside the given Python proto,
|
||||
// or NULL when this is not a Python proto implemented with a C proto.
|
||||
const Message* GetCProtoInsidePyProto(PyObject* msg);
|
||||
Message* MutableCProtoInsidePyProto(PyObject* msg);
|
||||
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
|
||||
} // namespace google
|
||||
#endif // GOOGLE_PROTOBUF_PYTHON_PYTHON_PROTOBUF_H__
|
||||
@@ -0,0 +1,94 @@
|
||||
#! /usr/bin/python
|
||||
# -*- coding: utf-8 -*-
|
||||
#
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Unittest for reflection.py, which tests the generated C++ implementation."""
|
||||
|
||||
__author__ = 'jasonh@google.com (Jason Hsueh)'
|
||||
|
||||
import os
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION'] = 'cpp'
|
||||
os.environ['PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION'] = '2'
|
||||
|
||||
from google.apputils import basetest
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf.internal import more_extensions_dynamic_pb2
|
||||
from google.protobuf.internal import more_extensions_pb2
|
||||
from google.protobuf.internal.reflection_test import *
|
||||
|
||||
|
||||
class ReflectionCppTest(basetest.TestCase):
|
||||
def testImplementationSetting(self):
|
||||
self.assertEqual('cpp', api_implementation.Type())
|
||||
self.assertEqual(2, api_implementation.Version())
|
||||
|
||||
def testExtensionOfGeneratedTypeInDynamicFile(self):
|
||||
"""Tests that a file built dynamically can extend a generated C++ type.
|
||||
|
||||
The C++ implementation uses a DescriptorPool that has the generated
|
||||
DescriptorPool as an underlay. Typically, a type can only find
|
||||
extensions in its own pool. With the python C-extension, the generated C++
|
||||
extendee may be available, but not the extension. This tests that the
|
||||
C-extension implements the correct special handling to make such extensions
|
||||
available.
|
||||
"""
|
||||
pb1 = more_extensions_pb2.ExtendedMessage()
|
||||
# Test that basic accessors work.
|
||||
self.assertFalse(
|
||||
pb1.HasExtension(more_extensions_dynamic_pb2.dynamic_int32_extension))
|
||||
self.assertFalse(
|
||||
pb1.HasExtension(more_extensions_dynamic_pb2.dynamic_message_extension))
|
||||
pb1.Extensions[more_extensions_dynamic_pb2.dynamic_int32_extension] = 17
|
||||
pb1.Extensions[more_extensions_dynamic_pb2.dynamic_message_extension].a = 24
|
||||
self.assertTrue(
|
||||
pb1.HasExtension(more_extensions_dynamic_pb2.dynamic_int32_extension))
|
||||
self.assertTrue(
|
||||
pb1.HasExtension(more_extensions_dynamic_pb2.dynamic_message_extension))
|
||||
|
||||
# Now serialize the data and parse to a new message.
|
||||
pb2 = more_extensions_pb2.ExtendedMessage()
|
||||
pb2.MergeFromString(pb1.SerializeToString())
|
||||
|
||||
self.assertTrue(
|
||||
pb2.HasExtension(more_extensions_dynamic_pb2.dynamic_int32_extension))
|
||||
self.assertTrue(
|
||||
pb2.HasExtension(more_extensions_dynamic_pb2.dynamic_message_extension))
|
||||
self.assertEqual(
|
||||
17, pb2.Extensions[more_extensions_dynamic_pb2.dynamic_int32_extension])
|
||||
self.assertEqual(
|
||||
24,
|
||||
pb2.Extensions[more_extensions_dynamic_pb2.dynamic_message_extension].a)
|
||||
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
basetest.main()
|
||||
@@ -0,0 +1,763 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: anuraag@google.com (Anuraag Agrawal)
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
|
||||
#include <google/protobuf/pyext/repeated_composite_container.h>
|
||||
|
||||
#include <memory>
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/dynamic_message.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
#include <google/protobuf/pyext/message.h>
|
||||
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define PyInt_Check PyLong_Check
|
||||
#define PyInt_AsLong PyLong_AsLong
|
||||
#define PyInt_FromLong PyLong_FromLong
|
||||
#endif
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
extern google::protobuf::DynamicMessageFactory* global_message_factory;
|
||||
|
||||
namespace repeated_composite_container {
|
||||
|
||||
// TODO(tibell): We might also want to check:
|
||||
// GOOGLE_CHECK_NOTNULL((self)->owner.get());
|
||||
#define GOOGLE_CHECK_ATTACHED(self) \
|
||||
do { \
|
||||
GOOGLE_CHECK_NOTNULL((self)->message); \
|
||||
GOOGLE_CHECK_NOTNULL((self)->parent_field); \
|
||||
} while (0);
|
||||
|
||||
#define GOOGLE_CHECK_RELEASED(self) \
|
||||
do { \
|
||||
GOOGLE_CHECK((self)->owner.get() == NULL); \
|
||||
GOOGLE_CHECK((self)->message == NULL); \
|
||||
GOOGLE_CHECK((self)->parent_field == NULL); \
|
||||
GOOGLE_CHECK((self)->parent == NULL); \
|
||||
} while (0);
|
||||
|
||||
// Returns a new reference.
|
||||
static PyObject* GetKey(PyObject* x) {
|
||||
// Just the identity function.
|
||||
Py_INCREF(x);
|
||||
return x;
|
||||
}
|
||||
|
||||
#define GET_KEY(keyfunc, value) \
|
||||
((keyfunc) == NULL ? \
|
||||
GetKey((value)) : \
|
||||
PyObject_CallFunctionObjArgs((keyfunc), (value), NULL))
|
||||
|
||||
// Converts a comparison function that returns -1, 0, or 1 into a
|
||||
// less-than predicate.
|
||||
//
|
||||
// Returns -1 on error, 1 if x < y, 0 if x >= y.
|
||||
static int islt(PyObject *x, PyObject *y, PyObject *compare) {
|
||||
if (compare == NULL)
|
||||
return PyObject_RichCompareBool(x, y, Py_LT);
|
||||
|
||||
ScopedPyObjectPtr res(PyObject_CallFunctionObjArgs(compare, x, y, NULL));
|
||||
if (res == NULL)
|
||||
return -1;
|
||||
if (!PyInt_Check(res)) {
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"comparison function must return int, not %.200s",
|
||||
Py_TYPE(res)->tp_name);
|
||||
return -1;
|
||||
}
|
||||
return PyInt_AsLong(res) < 0;
|
||||
}
|
||||
|
||||
// Copied from uarrsort.c but swaps memcpy swaps with protobuf/python swaps
|
||||
// TODO(anuraag): Is there a better way to do this then reinventing the wheel?
|
||||
static int InternalQuickSort(RepeatedCompositeContainer* self,
|
||||
Py_ssize_t start,
|
||||
Py_ssize_t limit,
|
||||
PyObject* cmp,
|
||||
PyObject* keyfunc) {
|
||||
if (limit - start <= 1)
|
||||
return 0; // Nothing to sort.
|
||||
|
||||
GOOGLE_CHECK_ATTACHED(self);
|
||||
|
||||
google::protobuf::Message* message = self->message;
|
||||
const google::protobuf::Reflection* reflection = message->GetReflection();
|
||||
const google::protobuf::FieldDescriptor* descriptor = self->parent_field->descriptor;
|
||||
Py_ssize_t left;
|
||||
Py_ssize_t right;
|
||||
|
||||
PyObject* children = self->child_messages;
|
||||
|
||||
do {
|
||||
left = start;
|
||||
right = limit;
|
||||
ScopedPyObjectPtr mid(
|
||||
GET_KEY(keyfunc, PyList_GET_ITEM(children, (start + limit) / 2)));
|
||||
do {
|
||||
ScopedPyObjectPtr key(GET_KEY(keyfunc, PyList_GET_ITEM(children, left)));
|
||||
int is_lt = islt(key, mid, cmp);
|
||||
if (is_lt == -1)
|
||||
return -1;
|
||||
/* array[left]<x */
|
||||
while (is_lt) {
|
||||
++left;
|
||||
ScopedPyObjectPtr key(GET_KEY(keyfunc,
|
||||
PyList_GET_ITEM(children, left)));
|
||||
is_lt = islt(key, mid, cmp);
|
||||
if (is_lt == -1)
|
||||
return -1;
|
||||
}
|
||||
key.reset(GET_KEY(keyfunc, PyList_GET_ITEM(children, right - 1)));
|
||||
is_lt = islt(mid, key, cmp);
|
||||
if (is_lt == -1)
|
||||
return -1;
|
||||
while (is_lt) {
|
||||
--right;
|
||||
ScopedPyObjectPtr key(GET_KEY(keyfunc,
|
||||
PyList_GET_ITEM(children, right - 1)));
|
||||
is_lt = islt(mid, key, cmp);
|
||||
if (is_lt == -1)
|
||||
return -1;
|
||||
}
|
||||
if (left < right) {
|
||||
--right;
|
||||
if (left < right) {
|
||||
reflection->SwapElements(message, descriptor, left, right);
|
||||
PyObject* tmp = PyList_GET_ITEM(children, left);
|
||||
PyList_SET_ITEM(children, left, PyList_GET_ITEM(children, right));
|
||||
PyList_SET_ITEM(children, right, tmp);
|
||||
}
|
||||
++left;
|
||||
}
|
||||
} while (left < right);
|
||||
|
||||
if ((right - start) < (limit - left)) {
|
||||
/* sort [start..right[ */
|
||||
if (start < (right - 1)) {
|
||||
InternalQuickSort(self, start, right, cmp, keyfunc);
|
||||
}
|
||||
|
||||
/* sort [left..limit[ */
|
||||
start = left;
|
||||
} else {
|
||||
/* sort [left..limit[ */
|
||||
if (left < (limit - 1)) {
|
||||
InternalQuickSort(self, left, limit, cmp, keyfunc);
|
||||
}
|
||||
|
||||
/* sort [start..right[ */
|
||||
limit = right;
|
||||
}
|
||||
} while (start < (limit - 1));
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
#undef GET_KEY
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// len()
|
||||
|
||||
static Py_ssize_t Length(RepeatedCompositeContainer* self) {
|
||||
google::protobuf::Message* message = self->message;
|
||||
if (message != NULL) {
|
||||
return message->GetReflection()->FieldSize(*message,
|
||||
self->parent_field->descriptor);
|
||||
} else {
|
||||
// The container has been released (i.e. by a call to Clear() or
|
||||
// ClearField() on the parent) and thus there's no message.
|
||||
return PyList_GET_SIZE(self->child_messages);
|
||||
}
|
||||
}
|
||||
|
||||
// Returns 0 if successful; returns -1 and sets an exception if
|
||||
// unsuccessful.
|
||||
static int UpdateChildMessages(RepeatedCompositeContainer* self) {
|
||||
if (self->message == NULL)
|
||||
return 0;
|
||||
|
||||
// A MergeFrom on a parent message could have caused extra messages to be
|
||||
// added in the underlying protobuf so add them to our list. They can never
|
||||
// be removed in such a way so there's no need to worry about that.
|
||||
Py_ssize_t message_length = Length(self);
|
||||
Py_ssize_t child_length = PyList_GET_SIZE(self->child_messages);
|
||||
google::protobuf::Message* message = self->message;
|
||||
const google::protobuf::Reflection* reflection = message->GetReflection();
|
||||
for (Py_ssize_t i = child_length; i < message_length; ++i) {
|
||||
const Message& sub_message = reflection->GetRepeatedMessage(
|
||||
*(self->message), self->parent_field->descriptor, i);
|
||||
ScopedPyObjectPtr py_cmsg(cmessage::NewEmpty(self->subclass_init));
|
||||
if (py_cmsg == NULL) {
|
||||
return -1;
|
||||
}
|
||||
CMessage* cmsg = reinterpret_cast<CMessage*>(py_cmsg.get());
|
||||
cmsg->owner = self->owner;
|
||||
cmsg->message = const_cast<google::protobuf::Message*>(&sub_message);
|
||||
cmsg->parent = self->parent;
|
||||
if (cmessage::InitAttributes(cmsg, NULL, NULL) < 0) {
|
||||
return -1;
|
||||
}
|
||||
PyList_Append(self->child_messages, py_cmsg);
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// add()
|
||||
|
||||
static PyObject* AddToAttached(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwargs) {
|
||||
GOOGLE_CHECK_ATTACHED(self);
|
||||
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
if (cmessage::AssureWritable(self->parent) == -1)
|
||||
return NULL;
|
||||
google::protobuf::Message* message = self->message;
|
||||
google::protobuf::Message* sub_message =
|
||||
message->GetReflection()->AddMessage(message,
|
||||
self->parent_field->descriptor);
|
||||
PyObject* py_cmsg = cmessage::NewEmpty(self->subclass_init);
|
||||
if (py_cmsg == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
CMessage* cmsg = reinterpret_cast<CMessage*>(py_cmsg);
|
||||
|
||||
cmsg->owner = self->owner;
|
||||
cmsg->message = sub_message;
|
||||
cmsg->parent = self->parent;
|
||||
// cmessage::InitAttributes must be called after cmsg->message has
|
||||
// been set.
|
||||
if (cmessage::InitAttributes(cmsg, NULL, kwargs) < 0) {
|
||||
Py_DECREF(py_cmsg);
|
||||
return NULL;
|
||||
}
|
||||
PyList_Append(self->child_messages, py_cmsg);
|
||||
return py_cmsg;
|
||||
}
|
||||
|
||||
static PyObject* AddToReleased(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwargs) {
|
||||
GOOGLE_CHECK_RELEASED(self);
|
||||
|
||||
// Create the CMessage
|
||||
PyObject* py_cmsg = PyObject_CallObject(self->subclass_init, NULL);
|
||||
if (py_cmsg == NULL)
|
||||
return NULL;
|
||||
CMessage* cmsg = reinterpret_cast<CMessage*>(py_cmsg);
|
||||
if (cmessage::InitAttributes(cmsg, NULL, kwargs) < 0) {
|
||||
Py_DECREF(py_cmsg);
|
||||
return NULL;
|
||||
}
|
||||
|
||||
// The Message got created by the call to subclass_init above and
|
||||
// it set self->owner to the newly allocated message.
|
||||
|
||||
PyList_Append(self->child_messages, py_cmsg);
|
||||
return py_cmsg;
|
||||
}
|
||||
|
||||
PyObject* Add(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwargs) {
|
||||
if (self->message == NULL)
|
||||
return AddToReleased(self, args, kwargs);
|
||||
else
|
||||
return AddToAttached(self, args, kwargs);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// extend()
|
||||
|
||||
PyObject* Extend(RepeatedCompositeContainer* self, PyObject* value) {
|
||||
cmessage::AssureWritable(self->parent);
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr iter(PyObject_GetIter(value));
|
||||
if (iter == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Value must be iterable");
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr next;
|
||||
while ((next.reset(PyIter_Next(iter))) != NULL) {
|
||||
if (!PyObject_TypeCheck(next, &CMessage_Type)) {
|
||||
PyErr_SetString(PyExc_TypeError, "Not a cmessage");
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr new_message(Add(self, NULL, NULL));
|
||||
if (new_message == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
CMessage* new_cmessage = reinterpret_cast<CMessage*>(new_message.get());
|
||||
if (cmessage::MergeFrom(new_cmessage, next) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
if (PyErr_Occurred()) {
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
PyObject* MergeFrom(RepeatedCompositeContainer* self, PyObject* other) {
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
return Extend(self, other);
|
||||
}
|
||||
|
||||
PyObject* Subscript(RepeatedCompositeContainer* self, PyObject* slice) {
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
Py_ssize_t from;
|
||||
Py_ssize_t to;
|
||||
Py_ssize_t step;
|
||||
Py_ssize_t length = Length(self);
|
||||
Py_ssize_t slicelength;
|
||||
if (PySlice_Check(slice)) {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PySlice_GetIndicesEx(slice,
|
||||
#else
|
||||
if (PySlice_GetIndicesEx(reinterpret_cast<PySliceObject*>(slice),
|
||||
#endif
|
||||
length, &from, &to, &step, &slicelength) == -1) {
|
||||
return NULL;
|
||||
}
|
||||
return PyList_GetSlice(self->child_messages, from, to);
|
||||
} else if (PyInt_Check(slice) || PyLong_Check(slice)) {
|
||||
from = to = PyLong_AsLong(slice);
|
||||
if (from < 0) {
|
||||
from = to = length + from;
|
||||
}
|
||||
PyObject* result = PyList_GetItem(self->child_messages, from);
|
||||
if (result == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(result);
|
||||
return result;
|
||||
}
|
||||
PyErr_SetString(PyExc_TypeError, "index must be an integer or slice");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int AssignSubscript(RepeatedCompositeContainer* self,
|
||||
PyObject* slice,
|
||||
PyObject* value) {
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return -1;
|
||||
}
|
||||
if (value != NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "does not support assignment");
|
||||
return -1;
|
||||
}
|
||||
|
||||
// Delete from the underlying Message, if any.
|
||||
if (self->message != NULL) {
|
||||
if (cmessage::InternalDeleteRepeatedField(self->message,
|
||||
self->parent_field->descriptor,
|
||||
slice,
|
||||
self->child_messages) < 0) {
|
||||
return -1;
|
||||
}
|
||||
} else {
|
||||
Py_ssize_t from;
|
||||
Py_ssize_t to;
|
||||
Py_ssize_t step;
|
||||
Py_ssize_t length = Length(self);
|
||||
Py_ssize_t slicelength;
|
||||
if (PySlice_Check(slice)) {
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PySlice_GetIndicesEx(slice,
|
||||
#else
|
||||
if (PySlice_GetIndicesEx(reinterpret_cast<PySliceObject*>(slice),
|
||||
#endif
|
||||
length, &from, &to, &step, &slicelength) == -1) {
|
||||
return -1;
|
||||
}
|
||||
return PySequence_DelSlice(self->child_messages, from, to);
|
||||
} else if (PyInt_Check(slice) || PyLong_Check(slice)) {
|
||||
from = to = PyLong_AsLong(slice);
|
||||
if (from < 0) {
|
||||
from = to = length + from;
|
||||
}
|
||||
return PySequence_DelItem(self->child_messages, from);
|
||||
}
|
||||
}
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject* Remove(RepeatedCompositeContainer* self, PyObject* value) {
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
Py_ssize_t index = PySequence_Index(self->child_messages, value);
|
||||
if (index == -1) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr py_index(PyLong_FromLong(index));
|
||||
if (AssignSubscript(self, py_index, NULL) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject* RichCompare(RepeatedCompositeContainer* self,
|
||||
PyObject* other,
|
||||
int opid) {
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
if (!PyObject_TypeCheck(other, &RepeatedCompositeContainer_Type)) {
|
||||
PyErr_SetString(PyExc_TypeError,
|
||||
"Can only compare repeated composite fields "
|
||||
"against other repeated composite fields.");
|
||||
return NULL;
|
||||
}
|
||||
if (opid == Py_EQ || opid == Py_NE) {
|
||||
// TODO(anuraag): Don't make new lists just for this...
|
||||
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
|
||||
if (full_slice == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr list(Subscript(self, full_slice));
|
||||
if (list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr other_list(
|
||||
Subscript(
|
||||
reinterpret_cast<RepeatedCompositeContainer*>(other), full_slice));
|
||||
if (other_list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
return PyObject_RichCompare(list, other_list, opid);
|
||||
} else {
|
||||
Py_INCREF(Py_NotImplemented);
|
||||
return Py_NotImplemented;
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
// sort()
|
||||
|
||||
static PyObject* SortAttached(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwds) {
|
||||
// Sort the underlying Message array.
|
||||
PyObject *compare = NULL;
|
||||
int reverse = 0;
|
||||
PyObject *keyfunc = NULL;
|
||||
static char *kwlist[] = {"cmp", "key", "reverse", 0};
|
||||
|
||||
if (args != NULL) {
|
||||
if (!PyArg_ParseTupleAndKeywords(args, kwds, "|OOi:sort",
|
||||
kwlist, &compare, &keyfunc, &reverse))
|
||||
return NULL;
|
||||
}
|
||||
if (compare == Py_None)
|
||||
compare = NULL;
|
||||
if (keyfunc == Py_None)
|
||||
keyfunc = NULL;
|
||||
|
||||
const Py_ssize_t length = Length(self);
|
||||
if (InternalQuickSort(self, 0, length, compare, keyfunc) < 0)
|
||||
return NULL;
|
||||
|
||||
// Finally reverse the result if requested.
|
||||
if (reverse) {
|
||||
google::protobuf::Message* message = self->message;
|
||||
const google::protobuf::Reflection* reflection = message->GetReflection();
|
||||
const google::protobuf::FieldDescriptor* descriptor = self->parent_field->descriptor;
|
||||
|
||||
// Reverse the Message array.
|
||||
for (int i = 0; i < length / 2; ++i)
|
||||
reflection->SwapElements(message, descriptor, i, length - i - 1);
|
||||
|
||||
// Reverse the Python list.
|
||||
ScopedPyObjectPtr res(PyObject_CallMethod(self->child_messages,
|
||||
"reverse", NULL));
|
||||
if (res == NULL)
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject* SortReleased(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwds) {
|
||||
ScopedPyObjectPtr m(PyObject_GetAttrString(self->child_messages, "sort"));
|
||||
if (m == NULL)
|
||||
return NULL;
|
||||
if (PyObject_Call(m, args, kwds) == NULL)
|
||||
return NULL;
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject* Sort(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwds) {
|
||||
// Support the old sort_function argument for backwards
|
||||
// compatibility.
|
||||
if (kwds != NULL) {
|
||||
PyObject* sort_func = PyDict_GetItemString(kwds, "sort_function");
|
||||
if (sort_func != NULL) {
|
||||
// Must set before deleting as sort_func is a borrowed reference
|
||||
// and kwds might be the only thing keeping it alive.
|
||||
PyDict_SetItemString(kwds, "cmp", sort_func);
|
||||
PyDict_DelItemString(kwds, "sort_function");
|
||||
}
|
||||
}
|
||||
|
||||
if (UpdateChildMessages(self) < 0)
|
||||
return NULL;
|
||||
if (self->message == NULL) {
|
||||
return SortReleased(self, args, kwds);
|
||||
} else {
|
||||
return SortAttached(self, args, kwds);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------
|
||||
|
||||
static PyObject* Item(RepeatedCompositeContainer* self, Py_ssize_t index) {
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
Py_ssize_t length = Length(self);
|
||||
if (index < 0) {
|
||||
index = length + index;
|
||||
}
|
||||
PyObject* item = PyList_GetItem(self->child_messages, index);
|
||||
if (item == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
Py_INCREF(item);
|
||||
return item;
|
||||
}
|
||||
|
||||
// The caller takes ownership of the returned Message.
|
||||
Message* ReleaseLast(const FieldDescriptor* field,
|
||||
const Descriptor* type,
|
||||
Message* message) {
|
||||
GOOGLE_CHECK_NOTNULL(field);
|
||||
GOOGLE_CHECK_NOTNULL(type);
|
||||
GOOGLE_CHECK_NOTNULL(message);
|
||||
|
||||
Message* released_message = message->GetReflection()->ReleaseLast(
|
||||
message, field);
|
||||
// TODO(tibell): Deal with proto1.
|
||||
|
||||
// ReleaseMessage will return NULL which differs from
|
||||
// child_cmessage->message, if the field does not exist. In this case,
|
||||
// the latter points to the default instance via a const_cast<>, so we
|
||||
// have to reset it to a new mutable object since we are taking ownership.
|
||||
if (released_message == NULL) {
|
||||
const Message* prototype = global_message_factory->GetPrototype(type);
|
||||
GOOGLE_CHECK_NOTNULL(prototype);
|
||||
return prototype->New();
|
||||
} else {
|
||||
return released_message;
|
||||
}
|
||||
}
|
||||
|
||||
// Release field of message and transfer the ownership to cmessage.
|
||||
void ReleaseLastTo(const FieldDescriptor* field,
|
||||
Message* message,
|
||||
CMessage* cmessage) {
|
||||
GOOGLE_CHECK_NOTNULL(field);
|
||||
GOOGLE_CHECK_NOTNULL(message);
|
||||
GOOGLE_CHECK_NOTNULL(cmessage);
|
||||
|
||||
shared_ptr<Message> released_message(
|
||||
ReleaseLast(field, cmessage->message->GetDescriptor(), message));
|
||||
cmessage->parent = NULL;
|
||||
cmessage->parent_field = NULL;
|
||||
cmessage->message = released_message.get();
|
||||
cmessage->read_only = false;
|
||||
cmessage::SetOwner(cmessage, released_message);
|
||||
}
|
||||
|
||||
// Called to release a container using
|
||||
// ClearField('container_field_name') on the parent.
|
||||
int Release(RepeatedCompositeContainer* self) {
|
||||
if (UpdateChildMessages(self) < 0) {
|
||||
PyErr_WriteUnraisable(PyBytes_FromString("Failed to update released "
|
||||
"messages"));
|
||||
return -1;
|
||||
}
|
||||
|
||||
Message* message = self->message;
|
||||
const FieldDescriptor* field = self->parent_field->descriptor;
|
||||
|
||||
// The reflection API only lets us release the last message in a
|
||||
// repeated field. Therefore we iterate through the children
|
||||
// starting with the last one.
|
||||
const Py_ssize_t size = PyList_GET_SIZE(self->child_messages);
|
||||
GOOGLE_DCHECK_EQ(size, message->GetReflection()->FieldSize(*message, field));
|
||||
for (Py_ssize_t i = size - 1; i >= 0; --i) {
|
||||
CMessage* child_cmessage = reinterpret_cast<CMessage*>(
|
||||
PyList_GET_ITEM(self->child_messages, i));
|
||||
ReleaseLastTo(field, message, child_cmessage);
|
||||
}
|
||||
|
||||
// Detach from containing message.
|
||||
self->parent = NULL;
|
||||
self->parent_field = NULL;
|
||||
self->message = NULL;
|
||||
self->owner.reset();
|
||||
|
||||
return 0;
|
||||
}
|
||||
|
||||
int SetOwner(RepeatedCompositeContainer* self,
|
||||
const shared_ptr<Message>& new_owner) {
|
||||
GOOGLE_CHECK_ATTACHED(self);
|
||||
|
||||
self->owner = new_owner;
|
||||
const Py_ssize_t n = PyList_GET_SIZE(self->child_messages);
|
||||
for (Py_ssize_t i = 0; i < n; ++i) {
|
||||
PyObject* msg = PyList_GET_ITEM(self->child_messages, i);
|
||||
if (cmessage::SetOwner(reinterpret_cast<CMessage*>(msg), new_owner) == -1) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static int Init(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwargs) {
|
||||
self->message = NULL;
|
||||
self->parent = NULL;
|
||||
self->parent_field = NULL;
|
||||
self->subclass_init = NULL;
|
||||
self->child_messages = PyList_New(0);
|
||||
return 0;
|
||||
}
|
||||
|
||||
static void Dealloc(RepeatedCompositeContainer* self) {
|
||||
Py_CLEAR(self->child_messages);
|
||||
// TODO(tibell): Do we need to call delete on these objects to make
|
||||
// sure their destructors are called?
|
||||
self->owner.reset();
|
||||
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
|
||||
}
|
||||
|
||||
static PySequenceMethods SqMethods = {
|
||||
(lenfunc)Length, /* sq_length */
|
||||
0, /* sq_concat */
|
||||
0, /* sq_repeat */
|
||||
(ssizeargfunc)Item /* sq_item */
|
||||
};
|
||||
|
||||
static PyMappingMethods MpMethods = {
|
||||
(lenfunc)Length, /* mp_length */
|
||||
(binaryfunc)Subscript, /* mp_subscript */
|
||||
(objobjargproc)AssignSubscript,/* mp_ass_subscript */
|
||||
};
|
||||
|
||||
static PyMethodDef Methods[] = {
|
||||
{ "add", (PyCFunction) Add, METH_VARARGS | METH_KEYWORDS,
|
||||
"Adds an object to the repeated container." },
|
||||
{ "extend", (PyCFunction) Extend, METH_O,
|
||||
"Adds objects to the repeated container." },
|
||||
{ "remove", (PyCFunction) Remove, METH_O,
|
||||
"Removes an object from the repeated container." },
|
||||
{ "sort", (PyCFunction) Sort, METH_VARARGS | METH_KEYWORDS,
|
||||
"Sorts the repeated container." },
|
||||
{ "MergeFrom", (PyCFunction) MergeFrom, METH_O,
|
||||
"Adds objects to the repeated container." },
|
||||
{ NULL, NULL }
|
||||
};
|
||||
|
||||
} // namespace repeated_composite_container
|
||||
|
||||
PyTypeObject RepeatedCompositeContainer_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0)
|
||||
"google.protobuf.internal."
|
||||
"cpp._message.RepeatedCompositeContainer", // tp_name
|
||||
sizeof(RepeatedCompositeContainer), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)repeated_composite_container::Dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
0, // tp_getattr
|
||||
0, // tp_setattr
|
||||
0, // tp_compare
|
||||
0, // tp_repr
|
||||
0, // tp_as_number
|
||||
&repeated_composite_container::SqMethods, // tp_as_sequence
|
||||
&repeated_composite_container::MpMethods, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
0, // tp_setattro
|
||||
0, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
"A Repeated scalar container", // tp_doc
|
||||
0, // tp_traverse
|
||||
0, // tp_clear
|
||||
(richcmpfunc)repeated_composite_container::RichCompare, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
0, // tp_iter
|
||||
0, // tp_iternext
|
||||
repeated_composite_container::Methods, // tp_methods
|
||||
0, // tp_members
|
||||
0, // tp_getset
|
||||
0, // tp_base
|
||||
0, // tp_dict
|
||||
0, // tp_descr_get
|
||||
0, // tp_descr_set
|
||||
0, // tp_dictoffset
|
||||
(initproc)repeated_composite_container::Init, // tp_init
|
||||
};
|
||||
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
@@ -0,0 +1,172 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: anuraag@google.com (Anuraag Agrawal)
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_COMPOSITE_CONTAINER_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_COMPOSITE_CONTAINER_H__
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#include <memory>
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
#include <string>
|
||||
#include <vector>
|
||||
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
class FieldDescriptor;
|
||||
class Message;
|
||||
|
||||
using internal::shared_ptr;
|
||||
|
||||
namespace python {
|
||||
|
||||
struct CMessage;
|
||||
struct CFieldDescriptor;
|
||||
|
||||
// A RepeatedCompositeContainer can be in one of two states: attached
|
||||
// or released.
|
||||
//
|
||||
// When in the attached state all modifications to the container are
|
||||
// done both on the 'message' and on the 'child_messages'
|
||||
// list. In this state all Messages refered to by the children in
|
||||
// 'child_messages' are owner by the 'owner'.
|
||||
//
|
||||
// When in the released state 'message', 'owner', 'parent', and
|
||||
// 'parent_field' are NULL.
|
||||
typedef struct RepeatedCompositeContainer {
|
||||
PyObject_HEAD;
|
||||
|
||||
// This is the top-level C++ Message object that owns the whole
|
||||
// proto tree. Every Python RepeatedCompositeContainer holds a
|
||||
// reference to it in order to keep it alive as long as there's a
|
||||
// Python object that references any part of the tree.
|
||||
shared_ptr<Message> owner;
|
||||
|
||||
// Weak reference to parent object. May be NULL. Used to make sure
|
||||
// the parent is writable before modifying the
|
||||
// RepeatedCompositeContainer.
|
||||
CMessage* parent;
|
||||
|
||||
// A descriptor used to modify the underlying 'message'.
|
||||
CFieldDescriptor* parent_field;
|
||||
|
||||
// Pointer to the C++ Message that contains this container. The
|
||||
// RepeatedCompositeContainer does not own this pointer.
|
||||
//
|
||||
// If NULL, this message has been released from its parent (by
|
||||
// calling Clear() or ClearField() on the parent.
|
||||
Message* message;
|
||||
|
||||
// A callable that is used to create new child messages.
|
||||
PyObject* subclass_init;
|
||||
|
||||
// A list of child messages.
|
||||
PyObject* child_messages;
|
||||
} RepeatedCompositeContainer;
|
||||
|
||||
extern PyTypeObject RepeatedCompositeContainer_Type;
|
||||
|
||||
namespace repeated_composite_container {
|
||||
|
||||
// Returns the number of items in this repeated composite container.
|
||||
static Py_ssize_t Length(RepeatedCompositeContainer* self);
|
||||
|
||||
// Appends a new CMessage to the container and returns it. The
|
||||
// CMessage is initialized using the content of kwargs.
|
||||
//
|
||||
// Returns a new reference if successful; returns NULL and sets an
|
||||
// exception if unsuccessful.
|
||||
PyObject* Add(RepeatedCompositeContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwargs);
|
||||
|
||||
// Appends all the CMessages in the input iterator to the container.
|
||||
//
|
||||
// Returns None if successful; returns NULL and sets an exception if
|
||||
// unsuccessful.
|
||||
PyObject* Extend(RepeatedCompositeContainer* self, PyObject* value);
|
||||
|
||||
// Appends a new message to the container for each message in the
|
||||
// input iterator, merging each data element in. Equivalent to extend.
|
||||
//
|
||||
// Returns None if successful; returns NULL and sets an exception if
|
||||
// unsuccessful.
|
||||
PyObject* MergeFrom(RepeatedCompositeContainer* self, PyObject* other);
|
||||
|
||||
// Accesses messages in the container.
|
||||
//
|
||||
// Returns a new reference to the message for an integer parameter.
|
||||
// Returns a new reference to a list of messages for a slice.
|
||||
PyObject* Subscript(RepeatedCompositeContainer* self, PyObject* slice);
|
||||
|
||||
// Deletes items from the container (cannot be used for assignment).
|
||||
//
|
||||
// Returns 0 on success, -1 on failure.
|
||||
int AssignSubscript(RepeatedCompositeContainer* self,
|
||||
PyObject* slice,
|
||||
PyObject* value);
|
||||
|
||||
// Releases the messages in the container to the given message.
|
||||
//
|
||||
// Returns 0 on success, -1 on failure.
|
||||
int ReleaseToMessage(RepeatedCompositeContainer* self,
|
||||
google::protobuf::Message* new_message);
|
||||
|
||||
// Releases the messages in the container to a new message.
|
||||
//
|
||||
// Returns 0 on success, -1 on failure.
|
||||
int Release(RepeatedCompositeContainer* self);
|
||||
|
||||
// Returns 0 on success, -1 on failure.
|
||||
int SetOwner(RepeatedCompositeContainer* self,
|
||||
const shared_ptr<Message>& new_owner);
|
||||
|
||||
// Removes the last element of the repeated message field 'field' on
|
||||
// the Message 'message', and transfers the ownership of the released
|
||||
// Message to 'cmessage'.
|
||||
//
|
||||
// Corresponds to reflection api method ReleaseMessage.
|
||||
void ReleaseLastTo(const FieldDescriptor* field,
|
||||
Message* message,
|
||||
CMessage* cmessage);
|
||||
|
||||
} // namespace repeated_composite_container
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
|
||||
} // namespace google
|
||||
#endif // GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_COMPOSITE_CONTAINER_H__
|
||||
@@ -0,0 +1,825 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: anuraag@google.com (Anuraag Agrawal)
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
|
||||
#include <google/protobuf/pyext/repeated_scalar_container.h>
|
||||
|
||||
#include <memory>
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
|
||||
#include <google/protobuf/stubs/common.h>
|
||||
#include <google/protobuf/descriptor.h>
|
||||
#include <google/protobuf/dynamic_message.h>
|
||||
#include <google/protobuf/message.h>
|
||||
#include <google/protobuf/pyext/descriptor.h>
|
||||
#include <google/protobuf/pyext/message.h>
|
||||
#include <google/protobuf/pyext/scoped_pyobject_ptr.h>
|
||||
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
#define PyInt_FromLong PyLong_FromLong
|
||||
#if PY_VERSION_HEX < 0x03030000
|
||||
#error "Python 3.0 - 3.2 are not supported."
|
||||
#else
|
||||
#define PyString_AsString(ob) \
|
||||
(PyUnicode_Check(ob)? PyUnicode_AsUTF8(ob): PyBytes_AS_STRING(ob))
|
||||
#endif
|
||||
#endif
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
namespace python {
|
||||
|
||||
extern google::protobuf::DynamicMessageFactory* global_message_factory;
|
||||
|
||||
namespace repeated_scalar_container {
|
||||
|
||||
static int InternalAssignRepeatedField(
|
||||
RepeatedScalarContainer* self, PyObject* list) {
|
||||
self->message->GetReflection()->ClearField(self->message,
|
||||
self->parent_field->descriptor);
|
||||
for (Py_ssize_t i = 0; i < PyList_GET_SIZE(list); ++i) {
|
||||
PyObject* value = PyList_GET_ITEM(list, i);
|
||||
if (Append(self, value) == NULL) {
|
||||
return -1;
|
||||
}
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static Py_ssize_t Len(RepeatedScalarContainer* self) {
|
||||
google::protobuf::Message* message = self->message;
|
||||
return message->GetReflection()->FieldSize(*message,
|
||||
self->parent_field->descriptor);
|
||||
}
|
||||
|
||||
static int AssignItem(RepeatedScalarContainer* self,
|
||||
Py_ssize_t index,
|
||||
PyObject* arg) {
|
||||
cmessage::AssureWritable(self->parent);
|
||||
google::protobuf::Message* message = self->message;
|
||||
const google::protobuf::FieldDescriptor* field_descriptor =
|
||||
self->parent_field->descriptor;
|
||||
if (!FIELD_BELONGS_TO_MESSAGE(field_descriptor, message)) {
|
||||
PyErr_SetString(
|
||||
PyExc_KeyError, "Field does not belong to message!");
|
||||
return -1;
|
||||
}
|
||||
|
||||
const google::protobuf::Reflection* reflection = message->GetReflection();
|
||||
int field_size = reflection->FieldSize(*message, field_descriptor);
|
||||
if (index < 0) {
|
||||
index = field_size + index;
|
||||
}
|
||||
if (index < 0 || index >= field_size) {
|
||||
PyErr_Format(PyExc_IndexError,
|
||||
"list assignment index (%d) out of range",
|
||||
static_cast<int>(index));
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (arg == NULL) {
|
||||
ScopedPyObjectPtr py_index(PyLong_FromLong(index));
|
||||
return cmessage::InternalDeleteRepeatedField(message, field_descriptor,
|
||||
py_index, NULL);
|
||||
}
|
||||
|
||||
if (PySequence_Check(arg) && !(PyBytes_Check(arg) || PyUnicode_Check(arg))) {
|
||||
PyErr_SetString(PyExc_TypeError, "Value must be scalar");
|
||||
return -1;
|
||||
}
|
||||
|
||||
switch (field_descriptor->cpp_type()) {
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_INT32: {
|
||||
GOOGLE_CHECK_GET_INT32(arg, value, -1);
|
||||
reflection->SetRepeatedInt32(message, field_descriptor, index, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_INT64: {
|
||||
GOOGLE_CHECK_GET_INT64(arg, value, -1);
|
||||
reflection->SetRepeatedInt64(message, field_descriptor, index, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_UINT32: {
|
||||
GOOGLE_CHECK_GET_UINT32(arg, value, -1);
|
||||
reflection->SetRepeatedUInt32(message, field_descriptor, index, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_UINT64: {
|
||||
GOOGLE_CHECK_GET_UINT64(arg, value, -1);
|
||||
reflection->SetRepeatedUInt64(message, field_descriptor, index, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_FLOAT: {
|
||||
GOOGLE_CHECK_GET_FLOAT(arg, value, -1);
|
||||
reflection->SetRepeatedFloat(message, field_descriptor, index, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_DOUBLE: {
|
||||
GOOGLE_CHECK_GET_DOUBLE(arg, value, -1);
|
||||
reflection->SetRepeatedDouble(message, field_descriptor, index, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_BOOL: {
|
||||
GOOGLE_CHECK_GET_BOOL(arg, value, -1);
|
||||
reflection->SetRepeatedBool(message, field_descriptor, index, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_STRING: {
|
||||
if (!CheckAndSetString(
|
||||
arg, message, field_descriptor, reflection, false, index)) {
|
||||
return -1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_ENUM: {
|
||||
GOOGLE_CHECK_GET_INT32(arg, value, -1);
|
||||
const google::protobuf::EnumDescriptor* enum_descriptor =
|
||||
field_descriptor->enum_type();
|
||||
const google::protobuf::EnumValueDescriptor* enum_value =
|
||||
enum_descriptor->FindValueByNumber(value);
|
||||
if (enum_value != NULL) {
|
||||
reflection->SetRepeatedEnum(message, field_descriptor, index,
|
||||
enum_value);
|
||||
} else {
|
||||
ScopedPyObjectPtr s(PyObject_Str(arg));
|
||||
if (s != NULL) {
|
||||
PyErr_Format(PyExc_ValueError, "Unknown enum value: %s",
|
||||
PyString_AsString(s.get()));
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
PyErr_Format(
|
||||
PyExc_SystemError, "Adding value to a field of unknown type %d",
|
||||
field_descriptor->cpp_type());
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
static PyObject* Item(RepeatedScalarContainer* self, Py_ssize_t index) {
|
||||
google::protobuf::Message* message = self->message;
|
||||
const google::protobuf::FieldDescriptor* field_descriptor =
|
||||
self->parent_field->descriptor;
|
||||
const google::protobuf::Reflection* reflection = message->GetReflection();
|
||||
|
||||
int field_size = reflection->FieldSize(*message, field_descriptor);
|
||||
if (index < 0) {
|
||||
index = field_size + index;
|
||||
}
|
||||
if (index < 0 || index >= field_size) {
|
||||
PyErr_Format(PyExc_IndexError,
|
||||
"list assignment index (%d) out of range",
|
||||
static_cast<int>(index));
|
||||
return NULL;
|
||||
}
|
||||
|
||||
PyObject* result = NULL;
|
||||
switch (field_descriptor->cpp_type()) {
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_INT32: {
|
||||
int32 value = reflection->GetRepeatedInt32(
|
||||
*message, field_descriptor, index);
|
||||
result = PyInt_FromLong(value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_INT64: {
|
||||
int64 value = reflection->GetRepeatedInt64(
|
||||
*message, field_descriptor, index);
|
||||
result = PyLong_FromLongLong(value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_UINT32: {
|
||||
uint32 value = reflection->GetRepeatedUInt32(
|
||||
*message, field_descriptor, index);
|
||||
result = PyLong_FromLongLong(value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_UINT64: {
|
||||
uint64 value = reflection->GetRepeatedUInt64(
|
||||
*message, field_descriptor, index);
|
||||
result = PyLong_FromUnsignedLongLong(value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_FLOAT: {
|
||||
float value = reflection->GetRepeatedFloat(
|
||||
*message, field_descriptor, index);
|
||||
result = PyFloat_FromDouble(value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_DOUBLE: {
|
||||
double value = reflection->GetRepeatedDouble(
|
||||
*message, field_descriptor, index);
|
||||
result = PyFloat_FromDouble(value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_BOOL: {
|
||||
bool value = reflection->GetRepeatedBool(
|
||||
*message, field_descriptor, index);
|
||||
result = PyBool_FromLong(value ? 1 : 0);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_ENUM: {
|
||||
const google::protobuf::EnumValueDescriptor* enum_value =
|
||||
message->GetReflection()->GetRepeatedEnum(
|
||||
*message, field_descriptor, index);
|
||||
result = PyInt_FromLong(enum_value->number());
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_STRING: {
|
||||
string value = reflection->GetRepeatedString(
|
||||
*message, field_descriptor, index);
|
||||
result = ToStringObject(field_descriptor, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_MESSAGE: {
|
||||
PyObject* py_cmsg = PyObject_CallObject(reinterpret_cast<PyObject*>(
|
||||
&CMessage_Type), NULL);
|
||||
if (py_cmsg == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
CMessage* cmsg = reinterpret_cast<CMessage*>(py_cmsg);
|
||||
const google::protobuf::Message& msg = reflection->GetRepeatedMessage(
|
||||
*message, field_descriptor, index);
|
||||
cmsg->owner = self->owner;
|
||||
cmsg->parent = self->parent;
|
||||
cmsg->message = const_cast<google::protobuf::Message*>(&msg);
|
||||
cmsg->read_only = false;
|
||||
result = reinterpret_cast<PyObject*>(py_cmsg);
|
||||
break;
|
||||
}
|
||||
default:
|
||||
PyErr_Format(
|
||||
PyExc_SystemError,
|
||||
"Getting value from a repeated field of unknown type %d",
|
||||
field_descriptor->cpp_type());
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
static PyObject* Subscript(RepeatedScalarContainer* self, PyObject* slice) {
|
||||
Py_ssize_t from;
|
||||
Py_ssize_t to;
|
||||
Py_ssize_t step;
|
||||
Py_ssize_t length;
|
||||
Py_ssize_t slicelength;
|
||||
bool return_list = false;
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (PyInt_Check(slice)) {
|
||||
from = to = PyInt_AsLong(slice);
|
||||
} else // NOLINT
|
||||
#endif
|
||||
if (PyLong_Check(slice)) {
|
||||
from = to = PyLong_AsLong(slice);
|
||||
} else if (PySlice_Check(slice)) {
|
||||
length = Len(self);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PySlice_GetIndicesEx(slice,
|
||||
#else
|
||||
if (PySlice_GetIndicesEx(reinterpret_cast<PySliceObject*>(slice),
|
||||
#endif
|
||||
length, &from, &to, &step, &slicelength) == -1) {
|
||||
return NULL;
|
||||
}
|
||||
return_list = true;
|
||||
} else {
|
||||
PyErr_SetString(PyExc_TypeError, "list indices must be integers");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
if (!return_list) {
|
||||
return Item(self, from);
|
||||
}
|
||||
|
||||
PyObject* list = PyList_New(0);
|
||||
if (list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (from <= to) {
|
||||
if (step < 0) {
|
||||
return list;
|
||||
}
|
||||
for (Py_ssize_t index = from; index < to; index += step) {
|
||||
if (index < 0 || index >= length) {
|
||||
break;
|
||||
}
|
||||
ScopedPyObjectPtr s(Item(self, index));
|
||||
PyList_Append(list, s);
|
||||
}
|
||||
} else {
|
||||
if (step > 0) {
|
||||
return list;
|
||||
}
|
||||
for (Py_ssize_t index = from; index > to; index += step) {
|
||||
if (index < 0 || index >= length) {
|
||||
break;
|
||||
}
|
||||
ScopedPyObjectPtr s(Item(self, index));
|
||||
PyList_Append(list, s);
|
||||
}
|
||||
}
|
||||
return list;
|
||||
}
|
||||
|
||||
PyObject* Append(RepeatedScalarContainer* self, PyObject* item) {
|
||||
cmessage::AssureWritable(self->parent);
|
||||
google::protobuf::Message* message = self->message;
|
||||
const google::protobuf::FieldDescriptor* field_descriptor =
|
||||
self->parent_field->descriptor;
|
||||
|
||||
if (!FIELD_BELONGS_TO_MESSAGE(field_descriptor, message)) {
|
||||
PyErr_SetString(
|
||||
PyExc_KeyError, "Field does not belong to message!");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
const google::protobuf::Reflection* reflection = message->GetReflection();
|
||||
switch (field_descriptor->cpp_type()) {
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_INT32: {
|
||||
GOOGLE_CHECK_GET_INT32(item, value, NULL);
|
||||
reflection->AddInt32(message, field_descriptor, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_INT64: {
|
||||
GOOGLE_CHECK_GET_INT64(item, value, NULL);
|
||||
reflection->AddInt64(message, field_descriptor, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_UINT32: {
|
||||
GOOGLE_CHECK_GET_UINT32(item, value, NULL);
|
||||
reflection->AddUInt32(message, field_descriptor, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_UINT64: {
|
||||
GOOGLE_CHECK_GET_UINT64(item, value, NULL);
|
||||
reflection->AddUInt64(message, field_descriptor, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_FLOAT: {
|
||||
GOOGLE_CHECK_GET_FLOAT(item, value, NULL);
|
||||
reflection->AddFloat(message, field_descriptor, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_DOUBLE: {
|
||||
GOOGLE_CHECK_GET_DOUBLE(item, value, NULL);
|
||||
reflection->AddDouble(message, field_descriptor, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_BOOL: {
|
||||
GOOGLE_CHECK_GET_BOOL(item, value, NULL);
|
||||
reflection->AddBool(message, field_descriptor, value);
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_STRING: {
|
||||
if (!CheckAndSetString(
|
||||
item, message, field_descriptor, reflection, true, -1)) {
|
||||
return NULL;
|
||||
}
|
||||
break;
|
||||
}
|
||||
case google::protobuf::FieldDescriptor::CPPTYPE_ENUM: {
|
||||
GOOGLE_CHECK_GET_INT32(item, value, NULL);
|
||||
const google::protobuf::EnumDescriptor* enum_descriptor =
|
||||
field_descriptor->enum_type();
|
||||
const google::protobuf::EnumValueDescriptor* enum_value =
|
||||
enum_descriptor->FindValueByNumber(value);
|
||||
if (enum_value != NULL) {
|
||||
reflection->AddEnum(message, field_descriptor, enum_value);
|
||||
} else {
|
||||
ScopedPyObjectPtr s(PyObject_Str(item));
|
||||
if (s != NULL) {
|
||||
PyErr_Format(PyExc_ValueError, "Unknown enum value: %s",
|
||||
PyString_AsString(s.get()));
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
PyErr_Format(
|
||||
PyExc_SystemError, "Adding value to a field of unknown type %d",
|
||||
field_descriptor->cpp_type());
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static int AssSubscript(RepeatedScalarContainer* self,
|
||||
PyObject* slice,
|
||||
PyObject* value) {
|
||||
Py_ssize_t from;
|
||||
Py_ssize_t to;
|
||||
Py_ssize_t step;
|
||||
Py_ssize_t length;
|
||||
Py_ssize_t slicelength;
|
||||
bool create_list = false;
|
||||
|
||||
cmessage::AssureWritable(self->parent);
|
||||
google::protobuf::Message* message = self->message;
|
||||
const google::protobuf::FieldDescriptor* field_descriptor =
|
||||
self->parent_field->descriptor;
|
||||
|
||||
#if PY_MAJOR_VERSION < 3
|
||||
if (PyInt_Check(slice)) {
|
||||
from = to = PyInt_AsLong(slice);
|
||||
} else
|
||||
#endif
|
||||
if (PyLong_Check(slice)) {
|
||||
from = to = PyLong_AsLong(slice);
|
||||
} else if (PySlice_Check(slice)) {
|
||||
const google::protobuf::Reflection* reflection = message->GetReflection();
|
||||
length = reflection->FieldSize(*message, field_descriptor);
|
||||
#if PY_MAJOR_VERSION >= 3
|
||||
if (PySlice_GetIndicesEx(slice,
|
||||
#else
|
||||
if (PySlice_GetIndicesEx(reinterpret_cast<PySliceObject*>(slice),
|
||||
#endif
|
||||
length, &from, &to, &step, &slicelength) == -1) {
|
||||
return -1;
|
||||
}
|
||||
create_list = true;
|
||||
} else {
|
||||
PyErr_SetString(PyExc_TypeError, "list indices must be integers");
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (value == NULL) {
|
||||
return cmessage::InternalDeleteRepeatedField(
|
||||
message, field_descriptor, slice, NULL);
|
||||
}
|
||||
|
||||
if (!create_list) {
|
||||
return AssignItem(self, from, value);
|
||||
}
|
||||
|
||||
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
|
||||
if (full_slice == NULL) {
|
||||
return -1;
|
||||
}
|
||||
ScopedPyObjectPtr new_list(Subscript(self, full_slice));
|
||||
if (new_list == NULL) {
|
||||
return -1;
|
||||
}
|
||||
if (PySequence_SetSlice(new_list, from, to, value) < 0) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
return InternalAssignRepeatedField(self, new_list);
|
||||
}
|
||||
|
||||
PyObject* Extend(RepeatedScalarContainer* self, PyObject* value) {
|
||||
cmessage::AssureWritable(self->parent);
|
||||
if (PyObject_Not(value)) {
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
ScopedPyObjectPtr iter(PyObject_GetIter(value));
|
||||
if (iter == NULL) {
|
||||
PyErr_SetString(PyExc_TypeError, "Value must be iterable");
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr next;
|
||||
while ((next.reset(PyIter_Next(iter))) != NULL) {
|
||||
if (Append(self, next) == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
if (PyErr_Occurred()) {
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject* Insert(RepeatedScalarContainer* self, PyObject* args) {
|
||||
Py_ssize_t index;
|
||||
PyObject* value;
|
||||
if (!PyArg_ParseTuple(args, "lO", &index, &value)) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
|
||||
ScopedPyObjectPtr new_list(Subscript(self, full_slice));
|
||||
if (PyList_Insert(new_list, index, value) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
int ret = InternalAssignRepeatedField(self, new_list);
|
||||
if (ret < 0) {
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject* Remove(RepeatedScalarContainer* self, PyObject* value) {
|
||||
Py_ssize_t match_index = -1;
|
||||
for (Py_ssize_t i = 0; i < Len(self); ++i) {
|
||||
ScopedPyObjectPtr elem(Item(self, i));
|
||||
if (PyObject_RichCompareBool(elem, value, Py_EQ)) {
|
||||
match_index = i;
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (match_index == -1) {
|
||||
PyErr_SetString(PyExc_ValueError, "remove(x): x not in container");
|
||||
return NULL;
|
||||
}
|
||||
if (AssignItem(self, match_index, NULL) < 0) {
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static PyObject* RichCompare(RepeatedScalarContainer* self,
|
||||
PyObject* other,
|
||||
int opid) {
|
||||
if (opid != Py_EQ && opid != Py_NE) {
|
||||
Py_INCREF(Py_NotImplemented);
|
||||
return Py_NotImplemented;
|
||||
}
|
||||
|
||||
// Copy the contents of this repeated scalar container, and other if it is
|
||||
// also a repeated scalar container, into Python lists so we can delegate
|
||||
// to the list's compare method.
|
||||
|
||||
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
|
||||
if (full_slice == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
ScopedPyObjectPtr other_list_deleter;
|
||||
if (PyObject_TypeCheck(other, &RepeatedScalarContainer_Type)) {
|
||||
other_list_deleter.reset(Subscript(
|
||||
reinterpret_cast<RepeatedScalarContainer*>(other), full_slice));
|
||||
other = other_list_deleter.get();
|
||||
}
|
||||
|
||||
ScopedPyObjectPtr list(Subscript(self, full_slice));
|
||||
if (list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
return PyObject_RichCompare(list, other, opid);
|
||||
}
|
||||
|
||||
PyObject* Reduce(RepeatedScalarContainer* unused_self) {
|
||||
PyErr_Format(
|
||||
PickleError_class,
|
||||
"can't pickle repeated message fields, convert to list first");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
static PyObject* Sort(RepeatedScalarContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwds) {
|
||||
// Support the old sort_function argument for backwards
|
||||
// compatibility.
|
||||
if (kwds != NULL) {
|
||||
PyObject* sort_func = PyDict_GetItemString(kwds, "sort_function");
|
||||
if (sort_func != NULL) {
|
||||
// Must set before deleting as sort_func is a borrowed reference
|
||||
// and kwds might be the only thing keeping it alive.
|
||||
if (PyDict_SetItemString(kwds, "cmp", sort_func) == -1)
|
||||
return NULL;
|
||||
if (PyDict_DelItemString(kwds, "sort_function") == -1)
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
|
||||
if (full_slice == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr list(Subscript(self, full_slice));
|
||||
if (list == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr m(PyObject_GetAttrString(list, "sort"));
|
||||
if (m == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
ScopedPyObjectPtr res(PyObject_Call(m, args, kwds));
|
||||
if (res == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
int ret = InternalAssignRepeatedField(self, list);
|
||||
if (ret < 0) {
|
||||
return NULL;
|
||||
}
|
||||
Py_RETURN_NONE;
|
||||
}
|
||||
|
||||
static int Init(RepeatedScalarContainer* self,
|
||||
PyObject* args,
|
||||
PyObject* kwargs) {
|
||||
PyObject* py_parent;
|
||||
PyObject* py_parent_field;
|
||||
if (!PyArg_UnpackTuple(args, "__init__()", 2, 2, &py_parent,
|
||||
&py_parent_field)) {
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!PyObject_TypeCheck(py_parent, &CMessage_Type)) {
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"expect %s, but got %s",
|
||||
CMessage_Type.tp_name,
|
||||
Py_TYPE(py_parent)->tp_name);
|
||||
return -1;
|
||||
}
|
||||
|
||||
if (!PyObject_TypeCheck(py_parent_field, &CFieldDescriptor_Type)) {
|
||||
PyErr_Format(PyExc_TypeError,
|
||||
"expect %s, but got %s",
|
||||
CFieldDescriptor_Type.tp_name,
|
||||
Py_TYPE(py_parent_field)->tp_name);
|
||||
return -1;
|
||||
}
|
||||
|
||||
CMessage* cmessage = reinterpret_cast<CMessage*>(py_parent);
|
||||
CFieldDescriptor* cdescriptor = reinterpret_cast<CFieldDescriptor*>(
|
||||
py_parent_field);
|
||||
|
||||
if (!FIELD_BELONGS_TO_MESSAGE(cdescriptor->descriptor, cmessage->message)) {
|
||||
PyErr_SetString(
|
||||
PyExc_KeyError, "Field does not belong to message!");
|
||||
return -1;
|
||||
}
|
||||
|
||||
self->message = cmessage->message;
|
||||
self->parent = cmessage;
|
||||
self->parent_field = cdescriptor;
|
||||
self->owner = cmessage->owner;
|
||||
return 0;
|
||||
}
|
||||
|
||||
// Initializes the underlying Message object of "to" so it becomes a new parent
|
||||
// repeated scalar, and copies all the values from "from" to it. A child scalar
|
||||
// container can be released by passing it as both from and to (e.g. making it
|
||||
// the recipient of the new parent message and copying the values from itself).
|
||||
static int InitializeAndCopyToParentContainer(
|
||||
RepeatedScalarContainer* from,
|
||||
RepeatedScalarContainer* to) {
|
||||
ScopedPyObjectPtr full_slice(PySlice_New(NULL, NULL, NULL));
|
||||
if (full_slice == NULL) {
|
||||
return -1;
|
||||
}
|
||||
ScopedPyObjectPtr values(Subscript(from, full_slice));
|
||||
if (values == NULL) {
|
||||
return -1;
|
||||
}
|
||||
google::protobuf::Message* new_message = global_message_factory->GetPrototype(
|
||||
from->message->GetDescriptor())->New();
|
||||
to->parent = NULL;
|
||||
// TODO(anuraag): Document why it's OK to hang on to parent_field,
|
||||
// even though it's a weak reference. It ought to be enough to
|
||||
// hold on to the FieldDescriptor only.
|
||||
to->parent_field = from->parent_field;
|
||||
to->message = new_message;
|
||||
to->owner.reset(new_message);
|
||||
if (InternalAssignRepeatedField(to, values) < 0) {
|
||||
return -1;
|
||||
}
|
||||
return 0;
|
||||
}
|
||||
|
||||
int Release(RepeatedScalarContainer* self) {
|
||||
return InitializeAndCopyToParentContainer(self, self);
|
||||
}
|
||||
|
||||
PyObject* DeepCopy(RepeatedScalarContainer* self, PyObject* arg) {
|
||||
ScopedPyObjectPtr init_args(
|
||||
PyTuple_Pack(2, self->parent, self->parent_field));
|
||||
PyObject* clone = PyObject_CallObject(
|
||||
reinterpret_cast<PyObject*>(&RepeatedScalarContainer_Type), init_args);
|
||||
if (clone == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
if (!PyObject_TypeCheck(clone, &RepeatedScalarContainer_Type)) {
|
||||
Py_DECREF(clone);
|
||||
return NULL;
|
||||
}
|
||||
if (InitializeAndCopyToParentContainer(
|
||||
self, reinterpret_cast<RepeatedScalarContainer*>(clone)) < 0) {
|
||||
Py_DECREF(clone);
|
||||
return NULL;
|
||||
}
|
||||
return clone;
|
||||
}
|
||||
|
||||
static void Dealloc(RepeatedScalarContainer* self) {
|
||||
self->owner.reset();
|
||||
Py_TYPE(self)->tp_free(reinterpret_cast<PyObject*>(self));
|
||||
}
|
||||
|
||||
void SetOwner(RepeatedScalarContainer* self,
|
||||
const shared_ptr<Message>& new_owner) {
|
||||
self->owner = new_owner;
|
||||
}
|
||||
|
||||
static PySequenceMethods SqMethods = {
|
||||
(lenfunc)Len, /* sq_length */
|
||||
0, /* sq_concat */
|
||||
0, /* sq_repeat */
|
||||
(ssizeargfunc)Item, /* sq_item */
|
||||
0, /* sq_slice */
|
||||
(ssizeobjargproc)AssignItem /* sq_ass_item */
|
||||
};
|
||||
|
||||
static PyMappingMethods MpMethods = {
|
||||
(lenfunc)Len, /* mp_length */
|
||||
(binaryfunc)Subscript, /* mp_subscript */
|
||||
(objobjargproc)AssSubscript, /* mp_ass_subscript */
|
||||
};
|
||||
|
||||
static PyMethodDef Methods[] = {
|
||||
{ "__deepcopy__", (PyCFunction)DeepCopy, METH_VARARGS,
|
||||
"Makes a deep copy of the class." },
|
||||
{ "__reduce__", (PyCFunction)Reduce, METH_NOARGS,
|
||||
"Outputs picklable representation of the repeated field." },
|
||||
{ "append", (PyCFunction)Append, METH_O,
|
||||
"Appends an object to the repeated container." },
|
||||
{ "extend", (PyCFunction)Extend, METH_O,
|
||||
"Appends objects to the repeated container." },
|
||||
{ "insert", (PyCFunction)Insert, METH_VARARGS,
|
||||
"Appends objects to the repeated container." },
|
||||
{ "remove", (PyCFunction)Remove, METH_O,
|
||||
"Removes an object from the repeated container." },
|
||||
{ "sort", (PyCFunction)Sort, METH_VARARGS | METH_KEYWORDS,
|
||||
"Sorts the repeated container."},
|
||||
{ NULL, NULL }
|
||||
};
|
||||
|
||||
} // namespace repeated_scalar_container
|
||||
|
||||
PyTypeObject RepeatedScalarContainer_Type = {
|
||||
PyVarObject_HEAD_INIT(&PyType_Type, 0)
|
||||
"google.protobuf.internal."
|
||||
"cpp._message.RepeatedScalarContainer", // tp_name
|
||||
sizeof(RepeatedScalarContainer), // tp_basicsize
|
||||
0, // tp_itemsize
|
||||
(destructor)repeated_scalar_container::Dealloc, // tp_dealloc
|
||||
0, // tp_print
|
||||
0, // tp_getattr
|
||||
0, // tp_setattr
|
||||
0, // tp_compare
|
||||
0, // tp_repr
|
||||
0, // tp_as_number
|
||||
&repeated_scalar_container::SqMethods, // tp_as_sequence
|
||||
&repeated_scalar_container::MpMethods, // tp_as_mapping
|
||||
0, // tp_hash
|
||||
0, // tp_call
|
||||
0, // tp_str
|
||||
0, // tp_getattro
|
||||
0, // tp_setattro
|
||||
0, // tp_as_buffer
|
||||
Py_TPFLAGS_DEFAULT, // tp_flags
|
||||
"A Repeated scalar container", // tp_doc
|
||||
0, // tp_traverse
|
||||
0, // tp_clear
|
||||
(richcmpfunc)repeated_scalar_container::RichCompare, // tp_richcompare
|
||||
0, // tp_weaklistoffset
|
||||
0, // tp_iter
|
||||
0, // tp_iternext
|
||||
repeated_scalar_container::Methods, // tp_methods
|
||||
0, // tp_members
|
||||
0, // tp_getset
|
||||
0, // tp_base
|
||||
0, // tp_dict
|
||||
0, // tp_descr_get
|
||||
0, // tp_descr_set
|
||||
0, // tp_dictoffset
|
||||
(initproc)repeated_scalar_container::Init, // tp_init
|
||||
};
|
||||
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
} // namespace google
|
||||
@@ -0,0 +1,112 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: anuraag@google.com (Anuraag Agrawal)
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
#include <memory>
|
||||
#ifndef _SHARED_PTR_H
|
||||
#include <google/protobuf/stubs/shared_ptr.h>
|
||||
#endif
|
||||
|
||||
|
||||
namespace google {
|
||||
namespace protobuf {
|
||||
|
||||
class Message;
|
||||
|
||||
using internal::shared_ptr;
|
||||
|
||||
namespace python {
|
||||
|
||||
struct CFieldDescriptor;
|
||||
struct CMessage;
|
||||
|
||||
typedef struct RepeatedScalarContainer {
|
||||
PyObject_HEAD;
|
||||
|
||||
// This is the top-level C++ Message object that owns the whole
|
||||
// proto tree. Every Python RepeatedScalarContainer holds a
|
||||
// reference to it in order to keep it alive as long as there's a
|
||||
// Python object that references any part of the tree.
|
||||
shared_ptr<Message> owner;
|
||||
|
||||
// Pointer to the C++ Message that contains this container. The
|
||||
// RepeatedScalarContainer does not own this pointer.
|
||||
Message* message;
|
||||
|
||||
// Weak reference to a parent CMessage object (i.e. may be NULL.)
|
||||
//
|
||||
// Used to make sure all ancestors are also mutable when first
|
||||
// modifying the container.
|
||||
CMessage* parent;
|
||||
|
||||
// Weak reference to the parent's descriptor that describes this
|
||||
// field. Used together with the parent's message when making a
|
||||
// default message instance mutable.
|
||||
CFieldDescriptor* parent_field;
|
||||
} RepeatedScalarContainer;
|
||||
|
||||
extern PyTypeObject RepeatedScalarContainer_Type;
|
||||
|
||||
namespace repeated_scalar_container {
|
||||
|
||||
// Appends the scalar 'item' to the end of the container 'self'.
|
||||
//
|
||||
// Returns None if successful; returns NULL and sets an exception if
|
||||
// unsuccessful.
|
||||
PyObject* Append(RepeatedScalarContainer* self, PyObject* item);
|
||||
|
||||
// Releases the messages in the container to a new message.
|
||||
//
|
||||
// Returns 0 on success, -1 on failure.
|
||||
int Release(RepeatedScalarContainer* self);
|
||||
|
||||
// Appends all the elements in the input iterator to the container.
|
||||
//
|
||||
// Returns None if successful; returns NULL and sets an exception if
|
||||
// unsuccessful.
|
||||
PyObject* Extend(RepeatedScalarContainer* self, PyObject* value);
|
||||
|
||||
// Set the owner field of self and any children of self.
|
||||
void SetOwner(RepeatedScalarContainer* self,
|
||||
const shared_ptr<Message>& new_owner);
|
||||
|
||||
} // namespace repeated_scalar_container
|
||||
} // namespace python
|
||||
} // namespace protobuf
|
||||
|
||||
} // namespace google
|
||||
#endif // GOOGLE_PROTOBUF_PYTHON_CPP_REPEATED_SCALAR_CONTAINER_H__
|
||||
@@ -0,0 +1,95 @@
|
||||
// Protocol Buffers - Google's data interchange format
|
||||
// Copyright 2008 Google Inc. All rights reserved.
|
||||
// https://developers.google.com/protocol-buffers/
|
||||
//
|
||||
// Redistribution and use in source and binary forms, with or without
|
||||
// modification, are permitted provided that the following conditions are
|
||||
// met:
|
||||
//
|
||||
// * Redistributions of source code must retain the above copyright
|
||||
// notice, this list of conditions and the following disclaimer.
|
||||
// * Redistributions in binary form must reproduce the above
|
||||
// copyright notice, this list of conditions and the following disclaimer
|
||||
// in the documentation and/or other materials provided with the
|
||||
// distribution.
|
||||
// * Neither the name of Google Inc. nor the names of its
|
||||
// contributors may be used to endorse or promote products derived from
|
||||
// this software without specific prior written permission.
|
||||
//
|
||||
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
// Author: tibell@google.com (Johan Tibell)
|
||||
|
||||
#ifndef GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
|
||||
#define GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
|
||||
|
||||
#include <Python.h>
|
||||
|
||||
namespace google {
|
||||
class ScopedPyObjectPtr {
|
||||
public:
|
||||
// Constructor. Defaults to intializing with NULL.
|
||||
// There is no way to create an uninitialized ScopedPyObjectPtr.
|
||||
explicit ScopedPyObjectPtr(PyObject* p = NULL) : ptr_(p) { }
|
||||
|
||||
// Destructor. If there is a PyObject object, delete it.
|
||||
~ScopedPyObjectPtr() {
|
||||
Py_XDECREF(ptr_);
|
||||
}
|
||||
|
||||
// Reset. Deletes the current owned object, if any.
|
||||
// Then takes ownership of a new object, if given.
|
||||
// this->reset(this->get()) works.
|
||||
PyObject* reset(PyObject* p = NULL) {
|
||||
if (p != ptr_) {
|
||||
Py_XDECREF(ptr_);
|
||||
ptr_ = p;
|
||||
}
|
||||
return ptr_;
|
||||
}
|
||||
|
||||
// Releases ownership of the object.
|
||||
PyObject* release() {
|
||||
PyObject* p = ptr_;
|
||||
ptr_ = NULL;
|
||||
return p;
|
||||
}
|
||||
|
||||
operator PyObject*() { return ptr_; }
|
||||
|
||||
PyObject* operator->() const {
|
||||
assert(ptr_ != NULL);
|
||||
return ptr_;
|
||||
}
|
||||
|
||||
PyObject* get() const { return ptr_; }
|
||||
|
||||
Py_ssize_t refcnt() const { return Py_REFCNT(ptr_); }
|
||||
|
||||
void inc() const { Py_INCREF(ptr_); }
|
||||
|
||||
// Comparison operators.
|
||||
// These return whether a ScopedPyObjectPtr and a raw pointer
|
||||
// refer to the same object, not just to two different but equal
|
||||
// objects.
|
||||
bool operator==(const PyObject* p) const { return ptr_ == p; }
|
||||
bool operator!=(const PyObject* p) const { return ptr_ != p; }
|
||||
|
||||
private:
|
||||
PyObject* ptr_;
|
||||
|
||||
GOOGLE_DISALLOW_EVIL_CONSTRUCTORS(ScopedPyObjectPtr);
|
||||
};
|
||||
|
||||
} // namespace google
|
||||
#endif // GOOGLE_PROTOBUF_PYTHON_CPP_SCOPED_PYOBJECT_PTR_H__
|
||||
205
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/reflection.py
Normal file
205
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/reflection.py
Normal file
@@ -0,0 +1,205 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
# This code is meant to work on Python 2.4 and above only.
|
||||
|
||||
"""Contains a metaclass and helper functions used to create
|
||||
protocol message classes from Descriptor objects at runtime.
|
||||
|
||||
Recall that a metaclass is the "type" of a class.
|
||||
(A class is to a metaclass what an instance is to a class.)
|
||||
|
||||
In this case, we use the GeneratedProtocolMessageType metaclass
|
||||
to inject all the useful functionality into the classes
|
||||
output by the protocol compiler at compile-time.
|
||||
|
||||
The upshot of all this is that the real implementation
|
||||
details for ALL pure-Python protocol buffers are *here in
|
||||
this file*.
|
||||
"""
|
||||
|
||||
__author__ = 'robinson@google.com (Will Robinson)'
|
||||
|
||||
|
||||
from google.protobuf.internal import api_implementation
|
||||
from google.protobuf import descriptor as descriptor_mod
|
||||
from google.protobuf import message
|
||||
|
||||
_FieldDescriptor = descriptor_mod.FieldDescriptor
|
||||
|
||||
|
||||
if api_implementation.Type() == 'cpp':
|
||||
if api_implementation.Version() == 2:
|
||||
from google.protobuf.pyext import cpp_message
|
||||
_NewMessage = cpp_message.NewMessage
|
||||
_InitMessage = cpp_message.InitMessage
|
||||
else:
|
||||
from google.protobuf.internal import cpp_message
|
||||
_NewMessage = cpp_message.NewMessage
|
||||
_InitMessage = cpp_message.InitMessage
|
||||
else:
|
||||
from google.protobuf.internal import python_message
|
||||
_NewMessage = python_message.NewMessage
|
||||
_InitMessage = python_message.InitMessage
|
||||
|
||||
|
||||
class GeneratedProtocolMessageType(type):
|
||||
|
||||
"""Metaclass for protocol message classes created at runtime from Descriptors.
|
||||
|
||||
We add implementations for all methods described in the Message class. We
|
||||
also create properties to allow getting/setting all fields in the protocol
|
||||
message. Finally, we create slots to prevent users from accidentally
|
||||
"setting" nonexistent fields in the protocol message, which then wouldn't get
|
||||
serialized / deserialized properly.
|
||||
|
||||
The protocol compiler currently uses this metaclass to create protocol
|
||||
message classes at runtime. Clients can also manually create their own
|
||||
classes at runtime, as in this example:
|
||||
|
||||
mydescriptor = Descriptor(.....)
|
||||
class MyProtoClass(Message):
|
||||
__metaclass__ = GeneratedProtocolMessageType
|
||||
DESCRIPTOR = mydescriptor
|
||||
myproto_instance = MyProtoClass()
|
||||
myproto.foo_field = 23
|
||||
...
|
||||
|
||||
The above example will not work for nested types. If you wish to include them,
|
||||
use reflection.MakeClass() instead of manually instantiating the class in
|
||||
order to create the appropriate class structure.
|
||||
"""
|
||||
|
||||
# Must be consistent with the protocol-compiler code in
|
||||
# proto2/compiler/internal/generator.*.
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __new__(cls, name, bases, dictionary):
|
||||
"""Custom allocation for runtime-generated class types.
|
||||
|
||||
We override __new__ because this is apparently the only place
|
||||
where we can meaningfully set __slots__ on the class we're creating(?).
|
||||
(The interplay between metaclasses and slots is not very well-documented).
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the
|
||||
metaclass protocol).
|
||||
bases: Base classes of the class we're constructing.
|
||||
(Should be message.Message). We ignore this field, but
|
||||
it's required by the metaclass protocol
|
||||
dictionary: The class dictionary of the class we're
|
||||
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
||||
a Descriptor object describing this protocol message
|
||||
type.
|
||||
|
||||
Returns:
|
||||
Newly-allocated class.
|
||||
"""
|
||||
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
||||
bases = _NewMessage(bases, descriptor, dictionary)
|
||||
superclass = super(GeneratedProtocolMessageType, cls)
|
||||
|
||||
new_class = superclass.__new__(cls, name, bases, dictionary)
|
||||
setattr(descriptor, '_concrete_class', new_class)
|
||||
return new_class
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Here we perform the majority of our work on the class.
|
||||
We add enum getters, an __init__ method, implementations
|
||||
of all Message methods, and properties for all fields
|
||||
in the protocol type.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the
|
||||
metaclass protocol).
|
||||
bases: Base classes of the class we're constructing.
|
||||
(Should be message.Message). We ignore this field, but
|
||||
it's required by the metaclass protocol
|
||||
dictionary: The class dictionary of the class we're
|
||||
constructing. dictionary[_DESCRIPTOR_KEY] must contain
|
||||
a Descriptor object describing this protocol message
|
||||
type.
|
||||
"""
|
||||
descriptor = dictionary[GeneratedProtocolMessageType._DESCRIPTOR_KEY]
|
||||
_InitMessage(descriptor, cls)
|
||||
superclass = super(GeneratedProtocolMessageType, cls)
|
||||
superclass.__init__(name, bases, dictionary)
|
||||
|
||||
|
||||
def ParseMessage(descriptor, byte_str):
|
||||
"""Generate a new Message instance from this Descriptor and a byte string.
|
||||
|
||||
Args:
|
||||
descriptor: Protobuf Descriptor object
|
||||
byte_str: Serialized protocol buffer byte string
|
||||
|
||||
Returns:
|
||||
Newly created protobuf Message object.
|
||||
"""
|
||||
result_class = MakeClass(descriptor)
|
||||
new_msg = result_class()
|
||||
new_msg.ParseFromString(byte_str)
|
||||
return new_msg
|
||||
|
||||
|
||||
def MakeClass(descriptor):
|
||||
"""Construct a class object for a protobuf described by descriptor.
|
||||
|
||||
Composite descriptors are handled by defining the new class as a member of the
|
||||
parent class, recursing as deep as necessary.
|
||||
This is the dynamic equivalent to:
|
||||
|
||||
class Parent(message.Message):
|
||||
__metaclass__ = GeneratedProtocolMessageType
|
||||
DESCRIPTOR = descriptor
|
||||
class Child(message.Message):
|
||||
__metaclass__ = GeneratedProtocolMessageType
|
||||
DESCRIPTOR = descriptor.nested_types[0]
|
||||
|
||||
Sample usage:
|
||||
file_descriptor = descriptor_pb2.FileDescriptorProto()
|
||||
file_descriptor.ParseFromString(proto2_string)
|
||||
msg_descriptor = descriptor.MakeDescriptor(file_descriptor.message_type[0])
|
||||
msg_class = reflection.MakeClass(msg_descriptor)
|
||||
msg = msg_class()
|
||||
|
||||
Args:
|
||||
descriptor: A descriptor.Descriptor object describing the protobuf.
|
||||
Returns:
|
||||
The Message class object described by the descriptor.
|
||||
"""
|
||||
attributes = {}
|
||||
for name, nested_type in descriptor.nested_types_by_name.items():
|
||||
attributes[name] = MakeClass(nested_type)
|
||||
|
||||
attributes[GeneratedProtocolMessageType._DESCRIPTOR_KEY] = descriptor
|
||||
|
||||
return GeneratedProtocolMessageType(str(descriptor.name), (message.Message,),
|
||||
attributes)
|
||||
226
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/service.py
Normal file
226
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/service.py
Normal file
@@ -0,0 +1,226 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""DEPRECATED: Declares the RPC service interfaces.
|
||||
|
||||
This module declares the abstract interfaces underlying proto2 RPC
|
||||
services. These are intended to be independent of any particular RPC
|
||||
implementation, so that proto2 services can be used on top of a variety
|
||||
of implementations. Starting with version 2.3.0, RPC implementations should
|
||||
not try to build on these, but should instead provide code generator plugins
|
||||
which generate code specific to the particular RPC implementation. This way
|
||||
the generated code can be more appropriate for the implementation in use
|
||||
and can avoid unnecessary layers of indirection.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
|
||||
class RpcException(Exception):
|
||||
"""Exception raised on failed blocking RPC method call."""
|
||||
pass
|
||||
|
||||
|
||||
class Service(object):
|
||||
|
||||
"""Abstract base interface for protocol-buffer-based RPC services.
|
||||
|
||||
Services themselves are abstract classes (implemented either by servers or as
|
||||
stubs), but they subclass this base interface. The methods of this
|
||||
interface can be used to call the methods of the service without knowing
|
||||
its exact type at compile time (analogous to the Message interface).
|
||||
"""
|
||||
|
||||
def GetDescriptor():
|
||||
"""Retrieves this service's descriptor."""
|
||||
raise NotImplementedError
|
||||
|
||||
def CallMethod(self, method_descriptor, rpc_controller,
|
||||
request, done):
|
||||
"""Calls a method of the service specified by method_descriptor.
|
||||
|
||||
If "done" is None then the call is blocking and the response
|
||||
message will be returned directly. Otherwise the call is asynchronous
|
||||
and "done" will later be called with the response value.
|
||||
|
||||
In the blocking case, RpcException will be raised on error.
|
||||
|
||||
Preconditions:
|
||||
* method_descriptor.service == GetDescriptor
|
||||
* request is of the exact same classes as returned by
|
||||
GetRequestClass(method).
|
||||
* After the call has started, the request must not be modified.
|
||||
* "rpc_controller" is of the correct type for the RPC implementation being
|
||||
used by this Service. For stubs, the "correct type" depends on the
|
||||
RpcChannel which the stub is using.
|
||||
|
||||
Postconditions:
|
||||
* "done" will be called when the method is complete. This may be
|
||||
before CallMethod() returns or it may be at some point in the future.
|
||||
* If the RPC failed, the response value passed to "done" will be None.
|
||||
Further details about the failure can be found by querying the
|
||||
RpcController.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def GetRequestClass(self, method_descriptor):
|
||||
"""Returns the class of the request message for the specified method.
|
||||
|
||||
CallMethod() requires that the request is of a particular subclass of
|
||||
Message. GetRequestClass() gets the default instance of this required
|
||||
type.
|
||||
|
||||
Example:
|
||||
method = service.GetDescriptor().FindMethodByName("Foo")
|
||||
request = stub.GetRequestClass(method)()
|
||||
request.ParseFromString(input)
|
||||
service.CallMethod(method, request, callback)
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def GetResponseClass(self, method_descriptor):
|
||||
"""Returns the class of the response message for the specified method.
|
||||
|
||||
This method isn't really needed, as the RpcChannel's CallMethod constructs
|
||||
the response protocol message. It's provided anyway in case it is useful
|
||||
for the caller to know the response type in advance.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RpcController(object):
|
||||
|
||||
"""An RpcController mediates a single method call.
|
||||
|
||||
The primary purpose of the controller is to provide a way to manipulate
|
||||
settings specific to the RPC implementation and to find out about RPC-level
|
||||
errors. The methods provided by the RpcController interface are intended
|
||||
to be a "least common denominator" set of features which we expect all
|
||||
implementations to support. Specific implementations may provide more
|
||||
advanced features (e.g. deadline propagation).
|
||||
"""
|
||||
|
||||
# Client-side methods below
|
||||
|
||||
def Reset(self):
|
||||
"""Resets the RpcController to its initial state.
|
||||
|
||||
After the RpcController has been reset, it may be reused in
|
||||
a new call. Must not be called while an RPC is in progress.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def Failed(self):
|
||||
"""Returns true if the call failed.
|
||||
|
||||
After a call has finished, returns true if the call failed. The possible
|
||||
reasons for failure depend on the RPC implementation. Failed() must not
|
||||
be called before a call has finished. If Failed() returns true, the
|
||||
contents of the response message are undefined.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def ErrorText(self):
|
||||
"""If Failed is true, returns a human-readable description of the error."""
|
||||
raise NotImplementedError
|
||||
|
||||
def StartCancel(self):
|
||||
"""Initiate cancellation.
|
||||
|
||||
Advises the RPC system that the caller desires that the RPC call be
|
||||
canceled. The RPC system may cancel it immediately, may wait awhile and
|
||||
then cancel it, or may not even cancel the call at all. If the call is
|
||||
canceled, the "done" callback will still be called and the RpcController
|
||||
will indicate that the call failed at that time.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
# Server-side methods below
|
||||
|
||||
def SetFailed(self, reason):
|
||||
"""Sets a failure reason.
|
||||
|
||||
Causes Failed() to return true on the client side. "reason" will be
|
||||
incorporated into the message returned by ErrorText(). If you find
|
||||
you need to return machine-readable information about failures, you
|
||||
should incorporate it into your response protocol buffer and should
|
||||
NOT call SetFailed().
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def IsCanceled(self):
|
||||
"""Checks if the client cancelled the RPC.
|
||||
|
||||
If true, indicates that the client canceled the RPC, so the server may
|
||||
as well give up on replying to it. The server should still call the
|
||||
final "done" callback.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
def NotifyOnCancel(self, callback):
|
||||
"""Sets a callback to invoke on cancel.
|
||||
|
||||
Asks that the given callback be called when the RPC is canceled. The
|
||||
callback will always be called exactly once. If the RPC completes without
|
||||
being canceled, the callback will be called after completion. If the RPC
|
||||
has already been canceled when NotifyOnCancel() is called, the callback
|
||||
will be called immediately.
|
||||
|
||||
NotifyOnCancel() must be called no more than once per request.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class RpcChannel(object):
|
||||
|
||||
"""Abstract interface for an RPC channel.
|
||||
|
||||
An RpcChannel represents a communication line to a service which can be used
|
||||
to call that service's methods. The service may be running on another
|
||||
machine. Normally, you should not use an RpcChannel directly, but instead
|
||||
construct a stub {@link Service} wrapping it. Example:
|
||||
|
||||
Example:
|
||||
RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
|
||||
RpcController controller = rpcImpl.Controller()
|
||||
MyService service = MyService_Stub(channel)
|
||||
service.MyMethod(controller, request, callback)
|
||||
"""
|
||||
|
||||
def CallMethod(self, method_descriptor, rpc_controller,
|
||||
request, response_class, done):
|
||||
"""Calls the method identified by the descriptor.
|
||||
|
||||
Call the given method of the remote service. The signature of this
|
||||
procedure looks the same as Service.CallMethod(), but the requirements
|
||||
are less strict in one important way: the request object doesn't have to
|
||||
be of any specific class as long as its descriptor is method.input_type.
|
||||
"""
|
||||
raise NotImplementedError
|
||||
@@ -0,0 +1,284 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""Contains metaclasses used to create protocol service and service stub
|
||||
classes from ServiceDescriptor objects at runtime.
|
||||
|
||||
The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
|
||||
inject all useful functionality into the classes output by the protocol
|
||||
compiler at compile-time.
|
||||
"""
|
||||
|
||||
__author__ = 'petar@google.com (Petar Petrov)'
|
||||
|
||||
|
||||
class GeneratedServiceType(type):
|
||||
|
||||
"""Metaclass for service classes created at runtime from ServiceDescriptors.
|
||||
|
||||
Implementations for all methods described in the Service class are added here
|
||||
by this class. We also create properties to allow getting/setting all fields
|
||||
in the protocol message.
|
||||
|
||||
The protocol compiler currently uses this metaclass to create protocol service
|
||||
classes at runtime. Clients can also manually create their own classes at
|
||||
runtime, as in this example:
|
||||
|
||||
mydescriptor = ServiceDescriptor(.....)
|
||||
class MyProtoService(service.Service):
|
||||
__metaclass__ = GeneratedServiceType
|
||||
DESCRIPTOR = mydescriptor
|
||||
myservice_instance = MyProtoService()
|
||||
...
|
||||
"""
|
||||
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Creates a message service class.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, but required by the metaclass
|
||||
protocol).
|
||||
bases: Base classes of the class being constructed.
|
||||
dictionary: The class dictionary of the class being constructed.
|
||||
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
||||
describing this protocol service type.
|
||||
"""
|
||||
# Don't do anything if this class doesn't have a descriptor. This happens
|
||||
# when a service class is subclassed.
|
||||
if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
|
||||
return
|
||||
descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
|
||||
service_builder = _ServiceBuilder(descriptor)
|
||||
service_builder.BuildService(cls)
|
||||
|
||||
|
||||
class GeneratedServiceStubType(GeneratedServiceType):
|
||||
|
||||
"""Metaclass for service stubs created at runtime from ServiceDescriptors.
|
||||
|
||||
This class has similar responsibilities as GeneratedServiceType, except that
|
||||
it creates the service stub classes.
|
||||
"""
|
||||
|
||||
_DESCRIPTOR_KEY = 'DESCRIPTOR'
|
||||
|
||||
def __init__(cls, name, bases, dictionary):
|
||||
"""Creates a message service stub class.
|
||||
|
||||
Args:
|
||||
name: Name of the class (ignored, here).
|
||||
bases: Base classes of the class being constructed.
|
||||
dictionary: The class dictionary of the class being constructed.
|
||||
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
|
||||
describing this protocol service type.
|
||||
"""
|
||||
super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
|
||||
# Don't do anything if this class doesn't have a descriptor. This happens
|
||||
# when a service stub is subclassed.
|
||||
if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
|
||||
return
|
||||
descriptor = dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY]
|
||||
service_stub_builder = _ServiceStubBuilder(descriptor)
|
||||
service_stub_builder.BuildServiceStub(cls)
|
||||
|
||||
|
||||
class _ServiceBuilder(object):
|
||||
|
||||
"""This class constructs a protocol service class using a service descriptor.
|
||||
|
||||
Given a service descriptor, this class constructs a class that represents
|
||||
the specified service descriptor. One service builder instance constructs
|
||||
exactly one service class. That means all instances of that class share the
|
||||
same builder.
|
||||
"""
|
||||
|
||||
def __init__(self, service_descriptor):
|
||||
"""Initializes an instance of the service class builder.
|
||||
|
||||
Args:
|
||||
service_descriptor: ServiceDescriptor to use when constructing the
|
||||
service class.
|
||||
"""
|
||||
self.descriptor = service_descriptor
|
||||
|
||||
def BuildService(self, cls):
|
||||
"""Constructs the service class.
|
||||
|
||||
Args:
|
||||
cls: The class that will be constructed.
|
||||
"""
|
||||
|
||||
# CallMethod needs to operate with an instance of the Service class. This
|
||||
# internal wrapper function exists only to be able to pass the service
|
||||
# instance to the method that does the real CallMethod work.
|
||||
def _WrapCallMethod(srvc, method_descriptor,
|
||||
rpc_controller, request, callback):
|
||||
return self._CallMethod(srvc, method_descriptor,
|
||||
rpc_controller, request, callback)
|
||||
self.cls = cls
|
||||
cls.CallMethod = _WrapCallMethod
|
||||
cls.GetDescriptor = staticmethod(lambda: self.descriptor)
|
||||
cls.GetDescriptor.__doc__ = "Returns the service descriptor."
|
||||
cls.GetRequestClass = self._GetRequestClass
|
||||
cls.GetResponseClass = self._GetResponseClass
|
||||
for method in self.descriptor.methods:
|
||||
setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
|
||||
|
||||
def _CallMethod(self, srvc, method_descriptor,
|
||||
rpc_controller, request, callback):
|
||||
"""Calls the method described by a given method descriptor.
|
||||
|
||||
Args:
|
||||
srvc: Instance of the service for which this method is called.
|
||||
method_descriptor: Descriptor that represent the method to call.
|
||||
rpc_controller: RPC controller to use for this method's execution.
|
||||
request: Request protocol message.
|
||||
callback: A callback to invoke after the method has completed.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'CallMethod() given method descriptor for wrong service type.')
|
||||
method = getattr(srvc, method_descriptor.name)
|
||||
return method(rpc_controller, request, callback)
|
||||
|
||||
def _GetRequestClass(self, method_descriptor):
|
||||
"""Returns the class of the request protocol message.
|
||||
|
||||
Args:
|
||||
method_descriptor: Descriptor of the method for which to return the
|
||||
request protocol message class.
|
||||
|
||||
Returns:
|
||||
A class that represents the input protocol message of the specified
|
||||
method.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'GetRequestClass() given method descriptor for wrong service type.')
|
||||
return method_descriptor.input_type._concrete_class
|
||||
|
||||
def _GetResponseClass(self, method_descriptor):
|
||||
"""Returns the class of the response protocol message.
|
||||
|
||||
Args:
|
||||
method_descriptor: Descriptor of the method for which to return the
|
||||
response protocol message class.
|
||||
|
||||
Returns:
|
||||
A class that represents the output protocol message of the specified
|
||||
method.
|
||||
"""
|
||||
if method_descriptor.containing_service != self.descriptor:
|
||||
raise RuntimeError(
|
||||
'GetResponseClass() given method descriptor for wrong service type.')
|
||||
return method_descriptor.output_type._concrete_class
|
||||
|
||||
def _GenerateNonImplementedMethod(self, method):
|
||||
"""Generates and returns a method that can be set for a service methods.
|
||||
|
||||
Args:
|
||||
method: Descriptor of the service method for which a method is to be
|
||||
generated.
|
||||
|
||||
Returns:
|
||||
A method that can be added to the service class.
|
||||
"""
|
||||
return lambda inst, rpc_controller, request, callback: (
|
||||
self._NonImplementedMethod(method.name, rpc_controller, callback))
|
||||
|
||||
def _NonImplementedMethod(self, method_name, rpc_controller, callback):
|
||||
"""The body of all methods in the generated service class.
|
||||
|
||||
Args:
|
||||
method_name: Name of the method being executed.
|
||||
rpc_controller: RPC controller used to execute this method.
|
||||
callback: A callback which will be invoked when the method finishes.
|
||||
"""
|
||||
rpc_controller.SetFailed('Method %s not implemented.' % method_name)
|
||||
callback(None)
|
||||
|
||||
|
||||
class _ServiceStubBuilder(object):
|
||||
|
||||
"""Constructs a protocol service stub class using a service descriptor.
|
||||
|
||||
Given a service descriptor, this class constructs a suitable stub class.
|
||||
A stub is just a type-safe wrapper around an RpcChannel which emulates a
|
||||
local implementation of the service.
|
||||
|
||||
One service stub builder instance constructs exactly one class. It means all
|
||||
instances of that class share the same service stub builder.
|
||||
"""
|
||||
|
||||
def __init__(self, service_descriptor):
|
||||
"""Initializes an instance of the service stub class builder.
|
||||
|
||||
Args:
|
||||
service_descriptor: ServiceDescriptor to use when constructing the
|
||||
stub class.
|
||||
"""
|
||||
self.descriptor = service_descriptor
|
||||
|
||||
def BuildServiceStub(self, cls):
|
||||
"""Constructs the stub class.
|
||||
|
||||
Args:
|
||||
cls: The class that will be constructed.
|
||||
"""
|
||||
|
||||
def _ServiceStubInit(stub, rpc_channel):
|
||||
stub.rpc_channel = rpc_channel
|
||||
self.cls = cls
|
||||
cls.__init__ = _ServiceStubInit
|
||||
for method in self.descriptor.methods:
|
||||
setattr(cls, method.name, self._GenerateStubMethod(method))
|
||||
|
||||
def _GenerateStubMethod(self, method):
|
||||
return (lambda inst, rpc_controller, request, callback=None:
|
||||
self._StubMethod(inst, method, rpc_controller, request, callback))
|
||||
|
||||
def _StubMethod(self, stub, method_descriptor,
|
||||
rpc_controller, request, callback):
|
||||
"""The body of all service methods in the generated stub class.
|
||||
|
||||
Args:
|
||||
stub: Stub instance.
|
||||
method_descriptor: Descriptor of the invoked method.
|
||||
rpc_controller: Rpc controller to execute the method.
|
||||
request: Request protocol message.
|
||||
callback: A callback to execute when the method finishes.
|
||||
Returns:
|
||||
Response message (in case of blocking call).
|
||||
"""
|
||||
return stub.rpc_channel.CallMethod(
|
||||
method_descriptor, rpc_controller, request,
|
||||
method_descriptor.output_type._concrete_class, callback)
|
||||
@@ -0,0 +1,185 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
"""A database of Python protocol buffer generated symbols.
|
||||
|
||||
SymbolDatabase makes it easy to create new instances of a registered type, given
|
||||
only the type's protocol buffer symbol name. Once all symbols are registered,
|
||||
they can be accessed using either the MessageFactory interface which
|
||||
SymbolDatabase exposes, or the DescriptorPool interface of the underlying
|
||||
pool.
|
||||
|
||||
Example usage:
|
||||
|
||||
db = symbol_database.SymbolDatabase()
|
||||
|
||||
# Register symbols of interest, from one or multiple files.
|
||||
db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
|
||||
db.RegisterMessage(my_proto_pb2.MyMessage)
|
||||
db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
|
||||
|
||||
# The database can be used as a MessageFactory, to generate types based on
|
||||
# their name:
|
||||
types = db.GetMessages(['my_proto.proto'])
|
||||
my_message_instance = types['MyMessage']()
|
||||
|
||||
# The database's underlying descriptor pool can be queried, so it's not
|
||||
# necessary to know a type's filename to be able to generate it:
|
||||
filename = db.pool.FindFileContainingSymbol('MyMessage')
|
||||
my_message_instance = db.GetMessages([filename])['MyMessage']()
|
||||
|
||||
# This functionality is also provided directly via a convenience method:
|
||||
my_message_instance = db.GetSymbol('MyMessage')()
|
||||
"""
|
||||
|
||||
|
||||
from google.protobuf import descriptor_pool
|
||||
|
||||
|
||||
class SymbolDatabase(object):
|
||||
"""A database of Python generated symbols.
|
||||
|
||||
SymbolDatabase also models message_factory.MessageFactory.
|
||||
|
||||
The symbol database can be used to keep a global registry of all protocol
|
||||
buffer types used within a program.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Constructor."""
|
||||
|
||||
self._symbols = {}
|
||||
self._symbols_by_file = {}
|
||||
self.pool = descriptor_pool.DescriptorPool()
|
||||
|
||||
def RegisterMessage(self, message):
|
||||
"""Registers the given message type in the local database.
|
||||
|
||||
Args:
|
||||
message: a message.Message, to be registered.
|
||||
|
||||
Returns:
|
||||
The provided message.
|
||||
"""
|
||||
|
||||
desc = message.DESCRIPTOR
|
||||
self._symbols[desc.full_name] = message
|
||||
if desc.file.name not in self._symbols_by_file:
|
||||
self._symbols_by_file[desc.file.name] = {}
|
||||
self._symbols_by_file[desc.file.name][desc.full_name] = message
|
||||
self.pool.AddDescriptor(desc)
|
||||
return message
|
||||
|
||||
def RegisterEnumDescriptor(self, enum_descriptor):
|
||||
"""Registers the given enum descriptor in the local database.
|
||||
|
||||
Args:
|
||||
enum_descriptor: a descriptor.EnumDescriptor.
|
||||
|
||||
Returns:
|
||||
The provided descriptor.
|
||||
"""
|
||||
self.pool.AddEnumDescriptor(enum_descriptor)
|
||||
return enum_descriptor
|
||||
|
||||
def RegisterFileDescriptor(self, file_descriptor):
|
||||
"""Registers the given file descriptor in the local database.
|
||||
|
||||
Args:
|
||||
file_descriptor: a descriptor.FileDescriptor.
|
||||
|
||||
Returns:
|
||||
The provided descriptor.
|
||||
"""
|
||||
self.pool.AddFileDescriptor(file_descriptor)
|
||||
|
||||
def GetSymbol(self, symbol):
|
||||
"""Tries to find a symbol in the local database.
|
||||
|
||||
Currently, this method only returns message.Message instances, however, if
|
||||
may be extended in future to support other symbol types.
|
||||
|
||||
Args:
|
||||
symbol: A str, a protocol buffer symbol.
|
||||
|
||||
Returns:
|
||||
A Python class corresponding to the symbol.
|
||||
|
||||
Raises:
|
||||
KeyError: if the symbol could not be found.
|
||||
"""
|
||||
|
||||
return self._symbols[symbol]
|
||||
|
||||
def GetPrototype(self, descriptor):
|
||||
"""Builds a proto2 message class based on the passed in descriptor.
|
||||
|
||||
Passing a descriptor with a fully qualified name matching a previous
|
||||
invocation will cause the same class to be returned.
|
||||
|
||||
Args:
|
||||
descriptor: The descriptor to build from.
|
||||
|
||||
Returns:
|
||||
A class describing the passed in descriptor.
|
||||
"""
|
||||
|
||||
return self.GetSymbol(descriptor.full_name)
|
||||
|
||||
def GetMessages(self, files):
|
||||
"""Gets all the messages from a specified file.
|
||||
|
||||
This will find and resolve dependencies, failing if they are not registered
|
||||
in the symbol database.
|
||||
|
||||
|
||||
Args:
|
||||
files: The file names to extract messages from.
|
||||
|
||||
Returns:
|
||||
A dictionary mapping proto names to the message classes. This will include
|
||||
any dependent messages as well as any messages defined in the same file as
|
||||
a specified message.
|
||||
|
||||
Raises:
|
||||
KeyError: if a file could not be found.
|
||||
"""
|
||||
|
||||
result = {}
|
||||
for f in files:
|
||||
result.update(self._symbols_by_file[f])
|
||||
return result
|
||||
|
||||
_DEFAULT = SymbolDatabase()
|
||||
|
||||
|
||||
def Default():
|
||||
"""Returns the default SymbolDatabase."""
|
||||
return _DEFAULT
|
||||
110
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/text_encoding.py
Normal file
110
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/text_encoding.py
Normal file
@@ -0,0 +1,110 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#PY25 compatible for GAE.
|
||||
#
|
||||
"""Encoding related utilities."""
|
||||
|
||||
import re
|
||||
import sys ##PY25
|
||||
|
||||
# Lookup table for utf8
|
||||
_cescape_utf8_to_str = [chr(i) for i in xrange(0, 256)]
|
||||
_cescape_utf8_to_str[9] = r'\t' # optional escape
|
||||
_cescape_utf8_to_str[10] = r'\n' # optional escape
|
||||
_cescape_utf8_to_str[13] = r'\r' # optional escape
|
||||
_cescape_utf8_to_str[39] = r"\'" # optional escape
|
||||
|
||||
_cescape_utf8_to_str[34] = r'\"' # necessary escape
|
||||
_cescape_utf8_to_str[92] = r'\\' # necessary escape
|
||||
|
||||
# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
|
||||
_cescape_byte_to_str = ([r'\%03o' % i for i in xrange(0, 32)] +
|
||||
[chr(i) for i in xrange(32, 127)] +
|
||||
[r'\%03o' % i for i in xrange(127, 256)])
|
||||
_cescape_byte_to_str[9] = r'\t' # optional escape
|
||||
_cescape_byte_to_str[10] = r'\n' # optional escape
|
||||
_cescape_byte_to_str[13] = r'\r' # optional escape
|
||||
_cescape_byte_to_str[39] = r"\'" # optional escape
|
||||
|
||||
_cescape_byte_to_str[34] = r'\"' # necessary escape
|
||||
_cescape_byte_to_str[92] = r'\\' # necessary escape
|
||||
|
||||
|
||||
def CEscape(text, as_utf8):
|
||||
"""Escape a bytes string for use in an ascii protocol buffer.
|
||||
|
||||
text.encode('string_escape') does not seem to satisfy our needs as it
|
||||
encodes unprintable characters using two-digit hex escapes whereas our
|
||||
C++ unescaping function allows hex escapes to be any length. So,
|
||||
"\0011".encode('string_escape') ends up being "\\x011", which will be
|
||||
decoded in C++ as a single-character string with char code 0x11.
|
||||
|
||||
Args:
|
||||
text: A byte string to be escaped
|
||||
as_utf8: Specifies if result should be returned in UTF-8 encoding
|
||||
Returns:
|
||||
Escaped string
|
||||
"""
|
||||
# PY3 hack: make Ord work for str and bytes:
|
||||
# //platforms/networking/data uses unicode here, hence basestring.
|
||||
Ord = ord if isinstance(text, basestring) else lambda x: x
|
||||
if as_utf8:
|
||||
return ''.join(_cescape_utf8_to_str[Ord(c)] for c in text)
|
||||
return ''.join(_cescape_byte_to_str[Ord(c)] for c in text)
|
||||
|
||||
|
||||
_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
|
||||
_cescape_highbit_to_str = ([chr(i) for i in range(0, 127)] +
|
||||
[r'\%03o' % i for i in range(127, 256)])
|
||||
|
||||
|
||||
def CUnescape(text):
|
||||
"""Unescape a text string with C-style escape sequences to UTF-8 bytes."""
|
||||
|
||||
def ReplaceHex(m):
|
||||
# Only replace the match if the number of leading back slashes is odd. i.e.
|
||||
# the slash itself is not escaped.
|
||||
if len(m.group(1)) & 1:
|
||||
return m.group(1) + 'x0' + m.group(2)
|
||||
return m.group(0)
|
||||
|
||||
# This is required because the 'string_escape' encoding doesn't
|
||||
# allow single-digit hex escapes (like '\xf').
|
||||
result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
|
||||
|
||||
if sys.version_info[0] < 3: ##PY25
|
||||
##!PY25 if str is bytes: # PY2
|
||||
return result.decode('string_escape')
|
||||
result = ''.join(_cescape_highbit_to_str[ord(c)] for c in result)
|
||||
return (result.encode('ascii') # Make it bytes to allow decode.
|
||||
.decode('unicode_escape')
|
||||
# Make it bytes again to return the proper type.
|
||||
.encode('raw_unicode_escape'))
|
||||
873
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/text_format.py
Normal file
873
csgo2/sdk/protobuf-2.6.1/python/google/protobuf/text_format.py
Normal file
@@ -0,0 +1,873 @@
|
||||
# Protocol Buffers - Google's data interchange format
|
||||
# Copyright 2008 Google Inc. All rights reserved.
|
||||
# https://developers.google.com/protocol-buffers/
|
||||
#
|
||||
# Redistribution and use in source and binary forms, with or without
|
||||
# modification, are permitted provided that the following conditions are
|
||||
# met:
|
||||
#
|
||||
# * Redistributions of source code must retain the above copyright
|
||||
# notice, this list of conditions and the following disclaimer.
|
||||
# * Redistributions in binary form must reproduce the above
|
||||
# copyright notice, this list of conditions and the following disclaimer
|
||||
# in the documentation and/or other materials provided with the
|
||||
# distribution.
|
||||
# * Neither the name of Google Inc. nor the names of its
|
||||
# contributors may be used to endorse or promote products derived from
|
||||
# this software without specific prior written permission.
|
||||
#
|
||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
|
||||
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
|
||||
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
|
||||
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
|
||||
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
|
||||
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
|
||||
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
|
||||
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
|
||||
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
||||
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||
|
||||
#PY25 compatible for GAE.
|
||||
#
|
||||
# Copyright 2007 Google Inc. All Rights Reserved.
|
||||
|
||||
"""Contains routines for printing protocol messages in text format."""
|
||||
|
||||
__author__ = 'kenton@google.com (Kenton Varda)'
|
||||
|
||||
import cStringIO
|
||||
import re
|
||||
|
||||
from google.protobuf.internal import type_checkers
|
||||
from google.protobuf import descriptor
|
||||
from google.protobuf import text_encoding
|
||||
|
||||
__all__ = ['MessageToString', 'PrintMessage', 'PrintField',
|
||||
'PrintFieldValue', 'Merge']
|
||||
|
||||
|
||||
_INTEGER_CHECKERS = (type_checkers.Uint32ValueChecker(),
|
||||
type_checkers.Int32ValueChecker(),
|
||||
type_checkers.Uint64ValueChecker(),
|
||||
type_checkers.Int64ValueChecker())
|
||||
_FLOAT_INFINITY = re.compile('-?inf(?:inity)?f?', re.IGNORECASE)
|
||||
_FLOAT_NAN = re.compile('nanf?', re.IGNORECASE)
|
||||
_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
|
||||
descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
|
||||
|
||||
|
||||
class Error(Exception):
|
||||
"""Top-level module error for text_format."""
|
||||
|
||||
|
||||
class ParseError(Error):
|
||||
"""Thrown in case of ASCII parsing error."""
|
||||
|
||||
|
||||
def MessageToString(message, as_utf8=False, as_one_line=False,
|
||||
pointy_brackets=False, use_index_order=False,
|
||||
float_format=None):
|
||||
"""Convert protobuf message to text format.
|
||||
|
||||
Floating point values can be formatted compactly with 15 digits of
|
||||
precision (which is the most that IEEE 754 "double" can guarantee)
|
||||
using float_format='.15g'.
|
||||
|
||||
Args:
|
||||
message: The protocol buffers message.
|
||||
as_utf8: Produce text output in UTF8 format.
|
||||
as_one_line: Don't introduce newlines between fields.
|
||||
pointy_brackets: If True, use angle brackets instead of curly braces for
|
||||
nesting.
|
||||
use_index_order: If True, print fields of a proto message using the order
|
||||
defined in source code instead of the field number. By default, use the
|
||||
field number order.
|
||||
float_format: If set, use this to specify floating point number formatting
|
||||
(per the "Format Specification Mini-Language"); otherwise, str() is used.
|
||||
|
||||
Returns:
|
||||
A string of the text formatted protocol buffer message.
|
||||
"""
|
||||
out = cStringIO.StringIO()
|
||||
PrintMessage(message, out, as_utf8=as_utf8, as_one_line=as_one_line,
|
||||
pointy_brackets=pointy_brackets,
|
||||
use_index_order=use_index_order,
|
||||
float_format=float_format)
|
||||
result = out.getvalue()
|
||||
out.close()
|
||||
if as_one_line:
|
||||
return result.rstrip()
|
||||
return result
|
||||
|
||||
|
||||
def PrintMessage(message, out, indent=0, as_utf8=False, as_one_line=False,
|
||||
pointy_brackets=False, use_index_order=False,
|
||||
float_format=None):
|
||||
fields = message.ListFields()
|
||||
if use_index_order:
|
||||
fields.sort(key=lambda x: x[0].index)
|
||||
for field, value in fields:
|
||||
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
for element in value:
|
||||
PrintField(field, element, out, indent, as_utf8, as_one_line,
|
||||
pointy_brackets=pointy_brackets,
|
||||
float_format=float_format)
|
||||
else:
|
||||
PrintField(field, value, out, indent, as_utf8, as_one_line,
|
||||
pointy_brackets=pointy_brackets,
|
||||
float_format=float_format)
|
||||
|
||||
|
||||
def PrintField(field, value, out, indent=0, as_utf8=False, as_one_line=False,
|
||||
pointy_brackets=False, float_format=None):
|
||||
"""Print a single field name/value pair. For repeated fields, the value
|
||||
should be a single element."""
|
||||
|
||||
out.write(' ' * indent)
|
||||
if field.is_extension:
|
||||
out.write('[')
|
||||
if (field.containing_type.GetOptions().message_set_wire_format and
|
||||
field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
|
||||
field.message_type == field.extension_scope and
|
||||
field.label == descriptor.FieldDescriptor.LABEL_OPTIONAL):
|
||||
out.write(field.message_type.full_name)
|
||||
else:
|
||||
out.write(field.full_name)
|
||||
out.write(']')
|
||||
elif field.type == descriptor.FieldDescriptor.TYPE_GROUP:
|
||||
# For groups, use the capitalized name.
|
||||
out.write(field.message_type.name)
|
||||
else:
|
||||
out.write(field.name)
|
||||
|
||||
if field.cpp_type != descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
# The colon is optional in this case, but our cross-language golden files
|
||||
# don't include it.
|
||||
out.write(': ')
|
||||
|
||||
PrintFieldValue(field, value, out, indent, as_utf8, as_one_line,
|
||||
pointy_brackets=pointy_brackets,
|
||||
float_format=float_format)
|
||||
if as_one_line:
|
||||
out.write(' ')
|
||||
else:
|
||||
out.write('\n')
|
||||
|
||||
|
||||
def PrintFieldValue(field, value, out, indent=0, as_utf8=False,
|
||||
as_one_line=False, pointy_brackets=False,
|
||||
float_format=None):
|
||||
"""Print a single field value (not including name). For repeated fields,
|
||||
the value should be a single element."""
|
||||
|
||||
if pointy_brackets:
|
||||
openb = '<'
|
||||
closeb = '>'
|
||||
else:
|
||||
openb = '{'
|
||||
closeb = '}'
|
||||
|
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
if as_one_line:
|
||||
out.write(' %s ' % openb)
|
||||
PrintMessage(value, out, indent, as_utf8, as_one_line,
|
||||
pointy_brackets=pointy_brackets,
|
||||
float_format=float_format)
|
||||
out.write(closeb)
|
||||
else:
|
||||
out.write(' %s\n' % openb)
|
||||
PrintMessage(value, out, indent + 2, as_utf8, as_one_line,
|
||||
pointy_brackets=pointy_brackets,
|
||||
float_format=float_format)
|
||||
out.write(' ' * indent + closeb)
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
|
||||
enum_value = field.enum_type.values_by_number.get(value, None)
|
||||
if enum_value is not None:
|
||||
out.write(enum_value.name)
|
||||
else:
|
||||
out.write(str(value))
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
|
||||
out.write('\"')
|
||||
if isinstance(value, unicode):
|
||||
out_value = value.encode('utf-8')
|
||||
else:
|
||||
out_value = value
|
||||
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
||||
# We need to escape non-UTF8 chars in TYPE_BYTES field.
|
||||
out_as_utf8 = False
|
||||
else:
|
||||
out_as_utf8 = as_utf8
|
||||
out.write(text_encoding.CEscape(out_value, out_as_utf8))
|
||||
out.write('\"')
|
||||
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
|
||||
if value:
|
||||
out.write('true')
|
||||
else:
|
||||
out.write('false')
|
||||
elif field.cpp_type in _FLOAT_TYPES and float_format is not None:
|
||||
out.write('{1:{0}}'.format(float_format, value))
|
||||
else:
|
||||
out.write(str(value))
|
||||
|
||||
|
||||
def _ParseOrMerge(lines, message, allow_multiple_scalars):
|
||||
"""Converts an ASCII representation of a protocol message into a message.
|
||||
|
||||
Args:
|
||||
lines: Lines of a message's ASCII representation.
|
||||
message: A protocol buffer message to merge into.
|
||||
allow_multiple_scalars: Determines if repeated values for a non-repeated
|
||||
field are permitted, e.g., the string "foo: 1 foo: 2" for a
|
||||
required/optional field named "foo".
|
||||
|
||||
Raises:
|
||||
ParseError: On ASCII parsing problems.
|
||||
"""
|
||||
tokenizer = _Tokenizer(lines)
|
||||
while not tokenizer.AtEnd():
|
||||
_MergeField(tokenizer, message, allow_multiple_scalars)
|
||||
|
||||
|
||||
def Parse(text, message):
|
||||
"""Parses an ASCII representation of a protocol message into a message.
|
||||
|
||||
Args:
|
||||
text: Message ASCII representation.
|
||||
message: A protocol buffer message to merge into.
|
||||
|
||||
Returns:
|
||||
The same message passed as argument.
|
||||
|
||||
Raises:
|
||||
ParseError: On ASCII parsing problems.
|
||||
"""
|
||||
if not isinstance(text, str): text = text.decode('utf-8')
|
||||
return ParseLines(text.split('\n'), message)
|
||||
|
||||
|
||||
def Merge(text, message):
|
||||
"""Parses an ASCII representation of a protocol message into a message.
|
||||
|
||||
Like Parse(), but allows repeated values for a non-repeated field, and uses
|
||||
the last one.
|
||||
|
||||
Args:
|
||||
text: Message ASCII representation.
|
||||
message: A protocol buffer message to merge into.
|
||||
|
||||
Returns:
|
||||
The same message passed as argument.
|
||||
|
||||
Raises:
|
||||
ParseError: On ASCII parsing problems.
|
||||
"""
|
||||
return MergeLines(text.split('\n'), message)
|
||||
|
||||
|
||||
def ParseLines(lines, message):
|
||||
"""Parses an ASCII representation of a protocol message into a message.
|
||||
|
||||
Args:
|
||||
lines: An iterable of lines of a message's ASCII representation.
|
||||
message: A protocol buffer message to merge into.
|
||||
|
||||
Returns:
|
||||
The same message passed as argument.
|
||||
|
||||
Raises:
|
||||
ParseError: On ASCII parsing problems.
|
||||
"""
|
||||
_ParseOrMerge(lines, message, False)
|
||||
return message
|
||||
|
||||
|
||||
def MergeLines(lines, message):
|
||||
"""Parses an ASCII representation of a protocol message into a message.
|
||||
|
||||
Args:
|
||||
lines: An iterable of lines of a message's ASCII representation.
|
||||
message: A protocol buffer message to merge into.
|
||||
|
||||
Returns:
|
||||
The same message passed as argument.
|
||||
|
||||
Raises:
|
||||
ParseError: On ASCII parsing problems.
|
||||
"""
|
||||
_ParseOrMerge(lines, message, True)
|
||||
return message
|
||||
|
||||
|
||||
def _MergeField(tokenizer, message, allow_multiple_scalars):
|
||||
"""Merges a single protocol message field into a message.
|
||||
|
||||
Args:
|
||||
tokenizer: A tokenizer to parse the field name and values.
|
||||
message: A protocol message to record the data.
|
||||
allow_multiple_scalars: Determines if repeated values for a non-repeated
|
||||
field are permitted, e.g., the string "foo: 1 foo: 2" for a
|
||||
required/optional field named "foo".
|
||||
|
||||
Raises:
|
||||
ParseError: In case of ASCII parsing problems.
|
||||
"""
|
||||
message_descriptor = message.DESCRIPTOR
|
||||
if tokenizer.TryConsume('['):
|
||||
name = [tokenizer.ConsumeIdentifier()]
|
||||
while tokenizer.TryConsume('.'):
|
||||
name.append(tokenizer.ConsumeIdentifier())
|
||||
name = '.'.join(name)
|
||||
|
||||
if not message_descriptor.is_extendable:
|
||||
raise tokenizer.ParseErrorPreviousToken(
|
||||
'Message type "%s" does not have extensions.' %
|
||||
message_descriptor.full_name)
|
||||
# pylint: disable=protected-access
|
||||
field = message.Extensions._FindExtensionByName(name)
|
||||
# pylint: enable=protected-access
|
||||
if not field:
|
||||
raise tokenizer.ParseErrorPreviousToken(
|
||||
'Extension "%s" not registered.' % name)
|
||||
elif message_descriptor != field.containing_type:
|
||||
raise tokenizer.ParseErrorPreviousToken(
|
||||
'Extension "%s" does not extend message type "%s".' % (
|
||||
name, message_descriptor.full_name))
|
||||
tokenizer.Consume(']')
|
||||
else:
|
||||
name = tokenizer.ConsumeIdentifier()
|
||||
field = message_descriptor.fields_by_name.get(name, None)
|
||||
|
||||
# Group names are expected to be capitalized as they appear in the
|
||||
# .proto file, which actually matches their type names, not their field
|
||||
# names.
|
||||
if not field:
|
||||
field = message_descriptor.fields_by_name.get(name.lower(), None)
|
||||
if field and field.type != descriptor.FieldDescriptor.TYPE_GROUP:
|
||||
field = None
|
||||
|
||||
if (field and field.type == descriptor.FieldDescriptor.TYPE_GROUP and
|
||||
field.message_type.name != name):
|
||||
field = None
|
||||
|
||||
if not field:
|
||||
raise tokenizer.ParseErrorPreviousToken(
|
||||
'Message type "%s" has no field named "%s".' % (
|
||||
message_descriptor.full_name, name))
|
||||
|
||||
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
|
||||
tokenizer.TryConsume(':')
|
||||
|
||||
if tokenizer.TryConsume('<'):
|
||||
end_token = '>'
|
||||
else:
|
||||
tokenizer.Consume('{')
|
||||
end_token = '}'
|
||||
|
||||
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
if field.is_extension:
|
||||
sub_message = message.Extensions[field].add()
|
||||
else:
|
||||
sub_message = getattr(message, field.name).add()
|
||||
else:
|
||||
if field.is_extension:
|
||||
sub_message = message.Extensions[field]
|
||||
else:
|
||||
sub_message = getattr(message, field.name)
|
||||
sub_message.SetInParent()
|
||||
|
||||
while not tokenizer.TryConsume(end_token):
|
||||
if tokenizer.AtEnd():
|
||||
raise tokenizer.ParseErrorPreviousToken('Expected "%s".' % (end_token))
|
||||
_MergeField(tokenizer, sub_message, allow_multiple_scalars)
|
||||
else:
|
||||
_MergeScalarField(tokenizer, message, field, allow_multiple_scalars)
|
||||
|
||||
# For historical reasons, fields may optionally be separated by commas or
|
||||
# semicolons.
|
||||
if not tokenizer.TryConsume(','):
|
||||
tokenizer.TryConsume(';')
|
||||
|
||||
|
||||
def _MergeScalarField(tokenizer, message, field, allow_multiple_scalars):
|
||||
"""Merges a single protocol message scalar field into a message.
|
||||
|
||||
Args:
|
||||
tokenizer: A tokenizer to parse the field value.
|
||||
message: A protocol message to record the data.
|
||||
field: The descriptor of the field to be merged.
|
||||
allow_multiple_scalars: Determines if repeated values for a non-repeated
|
||||
field are permitted, e.g., the string "foo: 1 foo: 2" for a
|
||||
required/optional field named "foo".
|
||||
|
||||
Raises:
|
||||
ParseError: In case of ASCII parsing problems.
|
||||
RuntimeError: On runtime errors.
|
||||
"""
|
||||
tokenizer.Consume(':')
|
||||
value = None
|
||||
|
||||
if field.type in (descriptor.FieldDescriptor.TYPE_INT32,
|
||||
descriptor.FieldDescriptor.TYPE_SINT32,
|
||||
descriptor.FieldDescriptor.TYPE_SFIXED32):
|
||||
value = tokenizer.ConsumeInt32()
|
||||
elif field.type in (descriptor.FieldDescriptor.TYPE_INT64,
|
||||
descriptor.FieldDescriptor.TYPE_SINT64,
|
||||
descriptor.FieldDescriptor.TYPE_SFIXED64):
|
||||
value = tokenizer.ConsumeInt64()
|
||||
elif field.type in (descriptor.FieldDescriptor.TYPE_UINT32,
|
||||
descriptor.FieldDescriptor.TYPE_FIXED32):
|
||||
value = tokenizer.ConsumeUint32()
|
||||
elif field.type in (descriptor.FieldDescriptor.TYPE_UINT64,
|
||||
descriptor.FieldDescriptor.TYPE_FIXED64):
|
||||
value = tokenizer.ConsumeUint64()
|
||||
elif field.type in (descriptor.FieldDescriptor.TYPE_FLOAT,
|
||||
descriptor.FieldDescriptor.TYPE_DOUBLE):
|
||||
value = tokenizer.ConsumeFloat()
|
||||
elif field.type == descriptor.FieldDescriptor.TYPE_BOOL:
|
||||
value = tokenizer.ConsumeBool()
|
||||
elif field.type == descriptor.FieldDescriptor.TYPE_STRING:
|
||||
value = tokenizer.ConsumeString()
|
||||
elif field.type == descriptor.FieldDescriptor.TYPE_BYTES:
|
||||
value = tokenizer.ConsumeByteString()
|
||||
elif field.type == descriptor.FieldDescriptor.TYPE_ENUM:
|
||||
value = tokenizer.ConsumeEnum(field)
|
||||
else:
|
||||
raise RuntimeError('Unknown field type %d' % field.type)
|
||||
|
||||
if field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
|
||||
if field.is_extension:
|
||||
message.Extensions[field].append(value)
|
||||
else:
|
||||
getattr(message, field.name).append(value)
|
||||
else:
|
||||
if field.is_extension:
|
||||
if not allow_multiple_scalars and message.HasExtension(field):
|
||||
raise tokenizer.ParseErrorPreviousToken(
|
||||
'Message type "%s" should not have multiple "%s" extensions.' %
|
||||
(message.DESCRIPTOR.full_name, field.full_name))
|
||||
else:
|
||||
message.Extensions[field] = value
|
||||
else:
|
||||
if not allow_multiple_scalars and message.HasField(field.name):
|
||||
raise tokenizer.ParseErrorPreviousToken(
|
||||
'Message type "%s" should not have multiple "%s" fields.' %
|
||||
(message.DESCRIPTOR.full_name, field.name))
|
||||
else:
|
||||
setattr(message, field.name, value)
|
||||
|
||||
|
||||
class _Tokenizer(object):
|
||||
"""Protocol buffer ASCII representation tokenizer.
|
||||
|
||||
This class handles the lower level string parsing by splitting it into
|
||||
meaningful tokens.
|
||||
|
||||
It was directly ported from the Java protocol buffer API.
|
||||
"""
|
||||
|
||||
_WHITESPACE = re.compile('(\\s|(#.*$))+', re.MULTILINE)
|
||||
_TOKEN = re.compile(
|
||||
'[a-zA-Z_][0-9a-zA-Z_+-]*|' # an identifier
|
||||
'[0-9+-][0-9a-zA-Z_.+-]*|' # a number
|
||||
'\"([^\"\n\\\\]|\\\\.)*(\"|\\\\?$)|' # a double-quoted string
|
||||
'\'([^\'\n\\\\]|\\\\.)*(\'|\\\\?$)') # a single-quoted string
|
||||
_IDENTIFIER = re.compile(r'\w+')
|
||||
|
||||
def __init__(self, lines):
|
||||
self._position = 0
|
||||
self._line = -1
|
||||
self._column = 0
|
||||
self._token_start = None
|
||||
self.token = ''
|
||||
self._lines = iter(lines)
|
||||
self._current_line = ''
|
||||
self._previous_line = 0
|
||||
self._previous_column = 0
|
||||
self._more_lines = True
|
||||
self._SkipWhitespace()
|
||||
self.NextToken()
|
||||
|
||||
def AtEnd(self):
|
||||
"""Checks the end of the text was reached.
|
||||
|
||||
Returns:
|
||||
True iff the end was reached.
|
||||
"""
|
||||
return not self.token
|
||||
|
||||
def _PopLine(self):
|
||||
while len(self._current_line) <= self._column:
|
||||
try:
|
||||
self._current_line = self._lines.next()
|
||||
except StopIteration:
|
||||
self._current_line = ''
|
||||
self._more_lines = False
|
||||
return
|
||||
else:
|
||||
self._line += 1
|
||||
self._column = 0
|
||||
|
||||
def _SkipWhitespace(self):
|
||||
while True:
|
||||
self._PopLine()
|
||||
match = self._WHITESPACE.match(self._current_line, self._column)
|
||||
if not match:
|
||||
break
|
||||
length = len(match.group(0))
|
||||
self._column += length
|
||||
|
||||
def TryConsume(self, token):
|
||||
"""Tries to consume a given piece of text.
|
||||
|
||||
Args:
|
||||
token: Text to consume.
|
||||
|
||||
Returns:
|
||||
True iff the text was consumed.
|
||||
"""
|
||||
if self.token == token:
|
||||
self.NextToken()
|
||||
return True
|
||||
return False
|
||||
|
||||
def Consume(self, token):
|
||||
"""Consumes a piece of text.
|
||||
|
||||
Args:
|
||||
token: Text to consume.
|
||||
|
||||
Raises:
|
||||
ParseError: If the text couldn't be consumed.
|
||||
"""
|
||||
if not self.TryConsume(token):
|
||||
raise self._ParseError('Expected "%s".' % token)
|
||||
|
||||
def ConsumeIdentifier(self):
|
||||
"""Consumes protocol message field identifier.
|
||||
|
||||
Returns:
|
||||
Identifier string.
|
||||
|
||||
Raises:
|
||||
ParseError: If an identifier couldn't be consumed.
|
||||
"""
|
||||
result = self.token
|
||||
if not self._IDENTIFIER.match(result):
|
||||
raise self._ParseError('Expected identifier.')
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ConsumeInt32(self):
|
||||
"""Consumes a signed 32bit integer number.
|
||||
|
||||
Returns:
|
||||
The integer parsed.
|
||||
|
||||
Raises:
|
||||
ParseError: If a signed 32bit integer couldn't be consumed.
|
||||
"""
|
||||
try:
|
||||
result = ParseInteger(self.token, is_signed=True, is_long=False)
|
||||
except ValueError, e:
|
||||
raise self._ParseError(str(e))
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ConsumeUint32(self):
|
||||
"""Consumes an unsigned 32bit integer number.
|
||||
|
||||
Returns:
|
||||
The integer parsed.
|
||||
|
||||
Raises:
|
||||
ParseError: If an unsigned 32bit integer couldn't be consumed.
|
||||
"""
|
||||
try:
|
||||
result = ParseInteger(self.token, is_signed=False, is_long=False)
|
||||
except ValueError, e:
|
||||
raise self._ParseError(str(e))
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ConsumeInt64(self):
|
||||
"""Consumes a signed 64bit integer number.
|
||||
|
||||
Returns:
|
||||
The integer parsed.
|
||||
|
||||
Raises:
|
||||
ParseError: If a signed 64bit integer couldn't be consumed.
|
||||
"""
|
||||
try:
|
||||
result = ParseInteger(self.token, is_signed=True, is_long=True)
|
||||
except ValueError, e:
|
||||
raise self._ParseError(str(e))
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ConsumeUint64(self):
|
||||
"""Consumes an unsigned 64bit integer number.
|
||||
|
||||
Returns:
|
||||
The integer parsed.
|
||||
|
||||
Raises:
|
||||
ParseError: If an unsigned 64bit integer couldn't be consumed.
|
||||
"""
|
||||
try:
|
||||
result = ParseInteger(self.token, is_signed=False, is_long=True)
|
||||
except ValueError, e:
|
||||
raise self._ParseError(str(e))
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ConsumeFloat(self):
|
||||
"""Consumes an floating point number.
|
||||
|
||||
Returns:
|
||||
The number parsed.
|
||||
|
||||
Raises:
|
||||
ParseError: If a floating point number couldn't be consumed.
|
||||
"""
|
||||
try:
|
||||
result = ParseFloat(self.token)
|
||||
except ValueError, e:
|
||||
raise self._ParseError(str(e))
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ConsumeBool(self):
|
||||
"""Consumes a boolean value.
|
||||
|
||||
Returns:
|
||||
The bool parsed.
|
||||
|
||||
Raises:
|
||||
ParseError: If a boolean value couldn't be consumed.
|
||||
"""
|
||||
try:
|
||||
result = ParseBool(self.token)
|
||||
except ValueError, e:
|
||||
raise self._ParseError(str(e))
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ConsumeString(self):
|
||||
"""Consumes a string value.
|
||||
|
||||
Returns:
|
||||
The string parsed.
|
||||
|
||||
Raises:
|
||||
ParseError: If a string value couldn't be consumed.
|
||||
"""
|
||||
the_bytes = self.ConsumeByteString()
|
||||
try:
|
||||
return unicode(the_bytes, 'utf-8')
|
||||
except UnicodeDecodeError, e:
|
||||
raise self._StringParseError(e)
|
||||
|
||||
def ConsumeByteString(self):
|
||||
"""Consumes a byte array value.
|
||||
|
||||
Returns:
|
||||
The array parsed (as a string).
|
||||
|
||||
Raises:
|
||||
ParseError: If a byte array value couldn't be consumed.
|
||||
"""
|
||||
the_list = [self._ConsumeSingleByteString()]
|
||||
while self.token and self.token[0] in ('\'', '"'):
|
||||
the_list.append(self._ConsumeSingleByteString())
|
||||
return ''.encode('latin1').join(the_list) ##PY25
|
||||
##!PY25 return b''.join(the_list)
|
||||
|
||||
def _ConsumeSingleByteString(self):
|
||||
"""Consume one token of a string literal.
|
||||
|
||||
String literals (whether bytes or text) can come in multiple adjacent
|
||||
tokens which are automatically concatenated, like in C or Python. This
|
||||
method only consumes one token.
|
||||
"""
|
||||
text = self.token
|
||||
if len(text) < 1 or text[0] not in ('\'', '"'):
|
||||
raise self._ParseError('Expected string.')
|
||||
|
||||
if len(text) < 2 or text[-1] != text[0]:
|
||||
raise self._ParseError('String missing ending quote.')
|
||||
|
||||
try:
|
||||
result = text_encoding.CUnescape(text[1:-1])
|
||||
except ValueError, e:
|
||||
raise self._ParseError(str(e))
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ConsumeEnum(self, field):
|
||||
try:
|
||||
result = ParseEnum(field, self.token)
|
||||
except ValueError, e:
|
||||
raise self._ParseError(str(e))
|
||||
self.NextToken()
|
||||
return result
|
||||
|
||||
def ParseErrorPreviousToken(self, message):
|
||||
"""Creates and *returns* a ParseError for the previously read token.
|
||||
|
||||
Args:
|
||||
message: A message to set for the exception.
|
||||
|
||||
Returns:
|
||||
A ParseError instance.
|
||||
"""
|
||||
return ParseError('%d:%d : %s' % (
|
||||
self._previous_line + 1, self._previous_column + 1, message))
|
||||
|
||||
def _ParseError(self, message):
|
||||
"""Creates and *returns* a ParseError for the current token."""
|
||||
return ParseError('%d:%d : %s' % (
|
||||
self._line + 1, self._column + 1, message))
|
||||
|
||||
def _StringParseError(self, e):
|
||||
return self._ParseError('Couldn\'t parse string: ' + str(e))
|
||||
|
||||
def NextToken(self):
|
||||
"""Reads the next meaningful token."""
|
||||
self._previous_line = self._line
|
||||
self._previous_column = self._column
|
||||
|
||||
self._column += len(self.token)
|
||||
self._SkipWhitespace()
|
||||
|
||||
if not self._more_lines:
|
||||
self.token = ''
|
||||
return
|
||||
|
||||
match = self._TOKEN.match(self._current_line, self._column)
|
||||
if match:
|
||||
token = match.group(0)
|
||||
self.token = token
|
||||
else:
|
||||
self.token = self._current_line[self._column]
|
||||
|
||||
|
||||
def ParseInteger(text, is_signed=False, is_long=False):
|
||||
"""Parses an integer.
|
||||
|
||||
Args:
|
||||
text: The text to parse.
|
||||
is_signed: True if a signed integer must be parsed.
|
||||
is_long: True if a long integer must be parsed.
|
||||
|
||||
Returns:
|
||||
The integer value.
|
||||
|
||||
Raises:
|
||||
ValueError: Thrown Iff the text is not a valid integer.
|
||||
"""
|
||||
# Do the actual parsing. Exception handling is propagated to caller.
|
||||
try:
|
||||
# We force 32-bit values to int and 64-bit values to long to make
|
||||
# alternate implementations where the distinction is more significant
|
||||
# (e.g. the C++ implementation) simpler.
|
||||
if is_long:
|
||||
result = long(text, 0)
|
||||
else:
|
||||
result = int(text, 0)
|
||||
except ValueError:
|
||||
raise ValueError('Couldn\'t parse integer: %s' % text)
|
||||
|
||||
# Check if the integer is sane. Exceptions handled by callers.
|
||||
checker = _INTEGER_CHECKERS[2 * int(is_long) + int(is_signed)]
|
||||
checker.CheckValue(result)
|
||||
return result
|
||||
|
||||
|
||||
def ParseFloat(text):
|
||||
"""Parse a floating point number.
|
||||
|
||||
Args:
|
||||
text: Text to parse.
|
||||
|
||||
Returns:
|
||||
The number parsed.
|
||||
|
||||
Raises:
|
||||
ValueError: If a floating point number couldn't be parsed.
|
||||
"""
|
||||
try:
|
||||
# Assume Python compatible syntax.
|
||||
return float(text)
|
||||
except ValueError:
|
||||
# Check alternative spellings.
|
||||
if _FLOAT_INFINITY.match(text):
|
||||
if text[0] == '-':
|
||||
return float('-inf')
|
||||
else:
|
||||
return float('inf')
|
||||
elif _FLOAT_NAN.match(text):
|
||||
return float('nan')
|
||||
else:
|
||||
# assume '1.0f' format
|
||||
try:
|
||||
return float(text.rstrip('f'))
|
||||
except ValueError:
|
||||
raise ValueError('Couldn\'t parse float: %s' % text)
|
||||
|
||||
|
||||
def ParseBool(text):
|
||||
"""Parse a boolean value.
|
||||
|
||||
Args:
|
||||
text: Text to parse.
|
||||
|
||||
Returns:
|
||||
Boolean values parsed
|
||||
|
||||
Raises:
|
||||
ValueError: If text is not a valid boolean.
|
||||
"""
|
||||
if text in ('true', 't', '1'):
|
||||
return True
|
||||
elif text in ('false', 'f', '0'):
|
||||
return False
|
||||
else:
|
||||
raise ValueError('Expected "true" or "false".')
|
||||
|
||||
|
||||
def ParseEnum(field, value):
|
||||
"""Parse an enum value.
|
||||
|
||||
The value can be specified by a number (the enum value), or by
|
||||
a string literal (the enum name).
|
||||
|
||||
Args:
|
||||
field: Enum field descriptor.
|
||||
value: String value.
|
||||
|
||||
Returns:
|
||||
Enum value number.
|
||||
|
||||
Raises:
|
||||
ValueError: If the enum value could not be parsed.
|
||||
"""
|
||||
enum_descriptor = field.enum_type
|
||||
try:
|
||||
number = int(value, 0)
|
||||
except ValueError:
|
||||
# Identifier.
|
||||
enum_value = enum_descriptor.values_by_name.get(value, None)
|
||||
if enum_value is None:
|
||||
raise ValueError(
|
||||
'Enum type "%s" has no value named %s.' % (
|
||||
enum_descriptor.full_name, value))
|
||||
else:
|
||||
# Numeric value.
|
||||
enum_value = enum_descriptor.values_by_number.get(number, None)
|
||||
if enum_value is None:
|
||||
raise ValueError(
|
||||
'Enum type "%s" has no value with number %d.' % (
|
||||
enum_descriptor.full_name, number))
|
||||
return enum_value.number
|
||||
Reference in New Issue
Block a user