Adding mysql and mysql.connector package

pull/4/head
sgoudham 4 years ago
parent 923820c45f
commit 7f2c3872db

@ -0,0 +1,179 @@
"""
MySQLdb - A DB API v2.0 compatible interface to MySQL.
This package is a wrapper around _mysql, which mostly implements the
MySQL C API.
connect() -- connects to server
See the C API specification and the MySQL documentation for more info
on other items.
For information on how MySQLdb handles type conversion, see the
MySQLdb.converters module.
"""
try:
from MySQLdb.release import version_info
from . import _mysql
assert version_info == _mysql.version_info
except Exception:
raise ImportError(
"this is MySQLdb version {}, but _mysql is version {!r}\n_mysql: {!r}".format(
version_info, _mysql.version_info, _mysql.__file__
)
)
from ._mysql import (
NotSupportedError,
OperationalError,
get_client_info,
ProgrammingError,
Error,
InterfaceError,
debug,
IntegrityError,
string_literal,
MySQLError,
DataError,
escape,
escape_string,
DatabaseError,
InternalError,
Warning,
)
from MySQLdb.constants import FIELD_TYPE
from MySQLdb.times import (
Date,
Time,
Timestamp,
DateFromTicks,
TimeFromTicks,
TimestampFromTicks,
)
try:
frozenset
except NameError:
from sets import ImmutableSet as frozenset
threadsafety = 1
apilevel = "2.0"
paramstyle = "format"
class DBAPISet(frozenset):
"""A special type of set for which A == x is true if A is a
DBAPISet and x is a member of that set."""
def __eq__(self, other):
if isinstance(other, DBAPISet):
return not self.difference(other)
return other in self
STRING = DBAPISet([FIELD_TYPE.ENUM, FIELD_TYPE.STRING, FIELD_TYPE.VAR_STRING])
BINARY = DBAPISet(
[
FIELD_TYPE.BLOB,
FIELD_TYPE.LONG_BLOB,
FIELD_TYPE.MEDIUM_BLOB,
FIELD_TYPE.TINY_BLOB,
]
)
NUMBER = DBAPISet(
[
FIELD_TYPE.DECIMAL,
FIELD_TYPE.DOUBLE,
FIELD_TYPE.FLOAT,
FIELD_TYPE.INT24,
FIELD_TYPE.LONG,
FIELD_TYPE.LONGLONG,
FIELD_TYPE.TINY,
FIELD_TYPE.YEAR,
FIELD_TYPE.NEWDECIMAL,
]
)
DATE = DBAPISet([FIELD_TYPE.DATE])
TIME = DBAPISet([FIELD_TYPE.TIME])
TIMESTAMP = DBAPISet([FIELD_TYPE.TIMESTAMP, FIELD_TYPE.DATETIME])
DATETIME = TIMESTAMP
ROWID = DBAPISet()
def test_DBAPISet_set_equality():
assert STRING == STRING
def test_DBAPISet_set_inequality():
assert STRING != NUMBER
def test_DBAPISet_set_equality_membership():
assert FIELD_TYPE.VAR_STRING == STRING
def test_DBAPISet_set_inequality_membership():
assert FIELD_TYPE.DATE != STRING
def Binary(x):
return bytes(x)
def Connect(*args, **kwargs):
"""Factory function for connections.Connection."""
from MySQLdb.connections import Connection
return Connection(*args, **kwargs)
connect = Connection = Connect
__all__ = [
"BINARY",
"Binary",
"Connect",
"Connection",
"DATE",
"Date",
"Time",
"Timestamp",
"DateFromTicks",
"TimeFromTicks",
"TimestampFromTicks",
"DataError",
"DatabaseError",
"Error",
"FIELD_TYPE",
"IntegrityError",
"InterfaceError",
"InternalError",
"MySQLError",
"NUMBER",
"NotSupportedError",
"DBAPISet",
"OperationalError",
"ProgrammingError",
"ROWID",
"STRING",
"TIME",
"TIMESTAMP",
"Warning",
"apilevel",
"connect",
"connections",
"constants",
"converters",
"cursors",
"debug",
"escape",
"escape_string",
"get_client_info",
"paramstyle",
"string_literal",
"threadsafety",
"version_info",
]

@ -0,0 +1,69 @@
"""Exception classes for _mysql and MySQLdb.
These classes are dictated by the DB API v2.0:
https://www.python.org/dev/peps/pep-0249/
"""
class MySQLError(Exception):
"""Exception related to operation with MySQL."""
class Warning(Warning, MySQLError):
"""Exception raised for important warnings like data truncations
while inserting, etc."""
class Error(MySQLError):
"""Exception that is the base class of all other error exceptions
(not Warning)."""
class InterfaceError(Error):
"""Exception raised for errors that are related to the database
interface rather than the database itself."""
class DatabaseError(Error):
"""Exception raised for errors that are related to the
database."""
class DataError(DatabaseError):
"""Exception raised for errors that are due to problems with the
processed data like division by zero, numeric value out of range,
etc."""
class OperationalError(DatabaseError):
"""Exception raised for errors that are related to the database's
operation and not necessarily under the control of the programmer,
e.g. an unexpected disconnect occurs, the data source name is not
found, a transaction could not be processed, a memory allocation
error occurred during processing, etc."""
class IntegrityError(DatabaseError):
"""Exception raised when the relational integrity of the database
is affected, e.g. a foreign key check fails, duplicate key,
etc."""
class InternalError(DatabaseError):
"""Exception raised when the database encounters an internal
error, e.g. the cursor is not valid anymore, the transaction is
out of sync, etc."""
class ProgrammingError(DatabaseError):
"""Exception raised for programming errors, e.g. table not found
or already exists, syntax error in the SQL statement, wrong number
of parameters specified, etc."""
class NotSupportedError(DatabaseError):
"""Exception raised in case a method or database API was used
which is not supported by the database, e.g. requesting a
.rollback() on a connection that does not support transaction or
has transactions turned off."""

@ -0,0 +1,360 @@
"""
This module implements connections for MySQLdb. Presently there is
only one class: Connection. Others are unlikely. However, you might
want to make your own subclasses. In most cases, you will probably
override Connection.default_cursor with a non-standard Cursor class.
"""
import re
from . import cursors, _mysql
from ._exceptions import (
Warning,
Error,
InterfaceError,
DataError,
DatabaseError,
OperationalError,
IntegrityError,
InternalError,
NotSupportedError,
ProgrammingError,
)
# Mapping from MySQL charset name to Python codec name
_charset_to_encoding = {
"utf8mb4": "utf8",
"utf8mb3": "utf8",
"latin1": "cp1252",
"koi8r": "koi8_r",
"koi8u": "koi8_u",
}
re_numeric_part = re.compile(r"^(\d+)")
def numeric_part(s):
"""Returns the leading numeric part of a string.
>>> numeric_part("20-alpha")
20
>>> numeric_part("foo")
>>> numeric_part("16b")
16
"""
m = re_numeric_part.match(s)
if m:
return int(m.group(1))
return None
class Connection(_mysql.connection):
"""MySQL Database Connection Object"""
default_cursor = cursors.Cursor
def __init__(self, *args, **kwargs):
"""
Create a connection to the database. It is strongly recommended
that you only use keyword parameters. Consult the MySQL C API
documentation for more information.
:param str host: host to connect
:param str user: user to connect as
:param str password: password to use
:param str passwd: alias of password, for backward compatibility
:param str database: database to use
:param str db: alias of database, for backward compatibility
:param int port: TCP/IP port to connect to
:param str unix_socket: location of unix_socket to use
:param dict conv: conversion dictionary, see MySQLdb.converters
:param int connect_timeout:
number of seconds to wait before the connection attempt fails.
:param bool compress: if set, compression is enabled
:param str named_pipe: if set, a named pipe is used to connect (Windows only)
:param str init_command:
command which is run once the connection is created
:param str read_default_file:
file from which default client values are read
:param str read_default_group:
configuration group to use from the default file
:param type cursorclass:
class object, used to create cursors (keyword only)
:param bool use_unicode:
If True, text-like columns are returned as unicode objects
using the connection's character set. Otherwise, text-like
columns are returned as bytes. Unicode objects will always
be encoded to the connection's character set regardless of
this setting.
Default to True.
:param str charset:
If supplied, the connection character set will be changed
to this character set.
:param str auth_plugin:
If supplied, the connection default authentication plugin will be
changed to this value. Example values:
`mysql_native_password` or `caching_sha2_password`
:param str sql_mode:
If supplied, the session SQL mode will be changed to this
setting.
For more details and legal values, see the MySQL documentation.
:param int client_flag:
flags to use or 0 (see MySQL docs or constants/CLIENTS.py)
:param str ssl_mode:
specify the security settings for connection to the server;
see the MySQL documentation for more details
(mysql_option(), MYSQL_OPT_SSL_MODE).
Only one of 'DISABLED', 'PREFERRED', 'REQUIRED',
'VERIFY_CA', 'VERIFY_IDENTITY' can be specified.
:param dict ssl:
dictionary or mapping contains SSL connection parameters;
see the MySQL documentation for more details
(mysql_ssl_set()). If this is set, and the client does not
support SSL, NotSupportedError will be raised.
:param bool local_infile:
enables LOAD LOCAL INFILE; zero disables
:param bool autocommit:
If False (default), autocommit is disabled.
If True, autocommit is enabled.
If None, autocommit isn't set and server default is used.
:param bool binary_prefix:
If set, the '_binary' prefix will be used for raw byte query
arguments (e.g. Binary). This is disabled by default.
There are a number of undocumented, non-standard methods. See the
documentation for the MySQL C API for some hints on what they do.
"""
from MySQLdb.constants import CLIENT, FIELD_TYPE
from MySQLdb.converters import conversions, _bytes_or_str
from weakref import proxy
kwargs2 = kwargs.copy()
if "database" in kwargs2:
kwargs2["db"] = kwargs2.pop("database")
if "password" in kwargs2:
kwargs2["passwd"] = kwargs2.pop("password")
if "conv" in kwargs:
conv = kwargs["conv"]
else:
conv = conversions
conv2 = {}
for k, v in conv.items():
if isinstance(k, int) and isinstance(v, list):
conv2[k] = v[:]
else:
conv2[k] = v
kwargs2["conv"] = conv2
cursorclass = kwargs2.pop("cursorclass", self.default_cursor)
charset = kwargs2.get("charset", "")
use_unicode = kwargs2.pop("use_unicode", True)
sql_mode = kwargs2.pop("sql_mode", "")
self._binary_prefix = kwargs2.pop("binary_prefix", False)
client_flag = kwargs.get("client_flag", 0)
client_version = tuple(
[numeric_part(n) for n in _mysql.get_client_info().split(".")[:2]]
)
if client_version >= (4, 1):
client_flag |= CLIENT.MULTI_STATEMENTS
if client_version >= (5, 0):
client_flag |= CLIENT.MULTI_RESULTS
kwargs2["client_flag"] = client_flag
# PEP-249 requires autocommit to be initially off
autocommit = kwargs2.pop("autocommit", False)
super().__init__(*args, **kwargs2)
self.cursorclass = cursorclass
self.encoders = {k: v for k, v in conv.items() if type(k) is not int}
# XXX THIS IS GARBAGE: While this is just a garbage and undocumented,
# Django 1.11 depends on it. And they don't fix it because
# they are in security-only fix mode.
# So keep this garbage for now. This will be removed in 1.5.
# See PyMySQL/mysqlclient-python#306
self.encoders[bytes] = bytes
self._server_version = tuple(
[numeric_part(n) for n in self.get_server_info().split(".")[:2]]
)
self.encoding = "ascii" # overridden in set_character_set()
db = proxy(self)
def unicode_literal(u, dummy=None):
return db.string_literal(u.encode(db.encoding))
if not charset:
charset = self.character_set_name()
self.set_character_set(charset)
if sql_mode:
self.set_sql_mode(sql_mode)
if use_unicode:
for t in (
FIELD_TYPE.STRING,
FIELD_TYPE.VAR_STRING,
FIELD_TYPE.VARCHAR,
FIELD_TYPE.TINY_BLOB,
FIELD_TYPE.MEDIUM_BLOB,
FIELD_TYPE.LONG_BLOB,
FIELD_TYPE.BLOB,
):
self.converter[t] = _bytes_or_str
# Unlike other string/blob types, JSON is always text.
# MySQL may return JSON with charset==binary.
self.converter[FIELD_TYPE.JSON] = str
self.encoders[str] = unicode_literal
self._transactional = self.server_capabilities & CLIENT.TRANSACTIONS
if self._transactional:
if autocommit is not None:
self.autocommit(autocommit)
self.messages = []
def __enter__(self):
return self
def __exit__(self, exc_type, exc_value, traceback):
self.close()
def autocommit(self, on):
on = bool(on)
if self.get_autocommit() != on:
_mysql.connection.autocommit(self, on)
def cursor(self, cursorclass=None):
"""
Create a cursor on which queries may be performed. The
optional cursorclass parameter is used to create the
Cursor. By default, self.cursorclass=cursors.Cursor is
used.
"""
return (cursorclass or self.cursorclass)(self)
def query(self, query):
# Since _mysql releases GIL while querying, we need immutable buffer.
if isinstance(query, bytearray):
query = bytes(query)
_mysql.connection.query(self, query)
def _bytes_literal(self, bs):
assert isinstance(bs, (bytes, bytearray))
x = self.string_literal(bs) # x is escaped and quoted bytes
if self._binary_prefix:
return b"_binary" + x
return x
def _tuple_literal(self, t):
return b"(%s)" % (b",".join(map(self.literal, t)))
def literal(self, o):
"""If o is a single object, returns an SQL literal as a string.
If o is a non-string sequence, the items of the sequence are
converted and returned as a sequence.
Non-standard. For internal use; do not use this in your
applications.
"""
if isinstance(o, str):
s = self.string_literal(o.encode(self.encoding))
elif isinstance(o, bytearray):
s = self._bytes_literal(o)
elif isinstance(o, bytes):
s = self._bytes_literal(o)
elif isinstance(o, (tuple, list)):
s = self._tuple_literal(o)
else:
s = self.escape(o, self.encoders)
if isinstance(s, str):
s = s.encode(self.encoding)
assert isinstance(s, bytes)
return s
def begin(self):
"""Explicitly begin a connection.
This method is not used when autocommit=False (default).
"""
self.query(b"BEGIN")
if not hasattr(_mysql.connection, "warning_count"):
def warning_count(self):
"""Return the number of warnings generated from the
last query. This is derived from the info() method."""
info = self.info()
if info:
return int(info.split()[-1])
else:
return 0
def set_character_set(self, charset):
"""Set the connection character set to charset. The character
set can only be changed in MySQL-4.1 and newer. If you try
to change the character set from the current value in an
older version, NotSupportedError will be raised."""
py_charset = _charset_to_encoding.get(charset, charset)
if self.character_set_name() != charset:
try:
super().set_character_set(charset)
except AttributeError:
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set charset")
self.query("SET NAMES %s" % charset)
self.store_result()
self.encoding = py_charset
def set_sql_mode(self, sql_mode):
"""Set the connection sql_mode. See MySQL documentation for
legal values."""
if self._server_version < (4, 1):
raise NotSupportedError("server is too old to set sql_mode")
self.query("SET SESSION sql_mode='%s'" % sql_mode)
self.store_result()
def show_warnings(self):
"""Return detailed information about warnings as a
sequence of tuples of (Level, Code, Message). This
is only supported in MySQL-4.1 and up. If your server
is an earlier version, an empty sequence is returned."""
if self._server_version < (4, 1):
return ()
self.query("SHOW WARNINGS")
r = self.store_result()
warnings = r.fetch_row(0)
return warnings
Warning = Warning
Error = Error
InterfaceError = InterfaceError
DatabaseError = DatabaseError
DataError = DataError
OperationalError = OperationalError
IntegrityError = IntegrityError
InternalError = InternalError
ProgrammingError = ProgrammingError
NotSupportedError = NotSupportedError
# vim: colorcolumn=100

@ -0,0 +1,27 @@
"""MySQL CLIENT constants
These constants are used when creating the connection. Use bitwise-OR
(|) to combine options together, and pass them as the client_flags
parameter to MySQLdb.Connection. For more information on these flags,
see the MySQL C API documentation for mysql_real_connect().
"""
LONG_PASSWORD = 1
FOUND_ROWS = 2
LONG_FLAG = 4
CONNECT_WITH_DB = 8
NO_SCHEMA = 16
COMPRESS = 32
ODBC = 64
LOCAL_FILES = 128
IGNORE_SPACE = 256
CHANGE_USER = 512
INTERACTIVE = 1024
SSL = 2048
IGNORE_SIGPIPE = 4096
TRANSACTIONS = 8192 # mysql_com.h was WRONG prior to 3.23.35
RESERVED = 16384
SECURE_CONNECTION = 32768
MULTI_STATEMENTS = 65536
MULTI_RESULTS = 131072

@ -0,0 +1,105 @@
"""MySQL Connection Errors
Nearly all of these raise OperationalError. COMMANDS_OUT_OF_SYNC
raises ProgrammingError.
"""
if __name__ == "__main__":
"""
Usage: python CR.py [/path/to/mysql/errmsg.h ...] >> CR.py
"""
import fileinput
import re
data = {}
error_last = None
for line in fileinput.input():
line = re.sub(r"/\*.*?\*/", "", line)
m = re.match(r"^\s*#define\s+CR_([A-Z0-9_]+)\s+(\d+)(\s.*|$)", line)
if m:
name = m.group(1)
value = int(m.group(2))
if name == "ERROR_LAST":
if error_last is None or error_last < value:
error_last = value
continue
if value not in data:
data[value] = set()
data[value].add(name)
for value, names in sorted(data.items()):
for name in sorted(names):
print("{} = {}".format(name, value))
if error_last is not None:
print("ERROR_LAST = %s" % error_last)
ERROR_FIRST = 2000
MIN_ERROR = 2000
UNKNOWN_ERROR = 2000
SOCKET_CREATE_ERROR = 2001
CONNECTION_ERROR = 2002
CONN_HOST_ERROR = 2003
IPSOCK_ERROR = 2004
UNKNOWN_HOST = 2005
SERVER_GONE_ERROR = 2006
VERSION_ERROR = 2007
OUT_OF_MEMORY = 2008
WRONG_HOST_INFO = 2009
LOCALHOST_CONNECTION = 2010
TCP_CONNECTION = 2011
SERVER_HANDSHAKE_ERR = 2012
SERVER_LOST = 2013
COMMANDS_OUT_OF_SYNC = 2014
NAMEDPIPE_CONNECTION = 2015
NAMEDPIPEWAIT_ERROR = 2016
NAMEDPIPEOPEN_ERROR = 2017
NAMEDPIPESETSTATE_ERROR = 2018
CANT_READ_CHARSET = 2019
NET_PACKET_TOO_LARGE = 2020
EMBEDDED_CONNECTION = 2021
PROBE_SLAVE_STATUS = 2022
PROBE_SLAVE_HOSTS = 2023
PROBE_SLAVE_CONNECT = 2024
PROBE_MASTER_CONNECT = 2025
SSL_CONNECTION_ERROR = 2026
MALFORMED_PACKET = 2027
WRONG_LICENSE = 2028
NULL_POINTER = 2029
NO_PREPARE_STMT = 2030
PARAMS_NOT_BOUND = 2031
DATA_TRUNCATED = 2032
NO_PARAMETERS_EXISTS = 2033
INVALID_PARAMETER_NO = 2034
INVALID_BUFFER_USE = 2035
UNSUPPORTED_PARAM_TYPE = 2036
SHARED_MEMORY_CONNECTION = 2037
SHARED_MEMORY_CONNECT_REQUEST_ERROR = 2038
SHARED_MEMORY_CONNECT_ANSWER_ERROR = 2039
SHARED_MEMORY_CONNECT_FILE_MAP_ERROR = 2040
SHARED_MEMORY_CONNECT_MAP_ERROR = 2041
SHARED_MEMORY_FILE_MAP_ERROR = 2042
SHARED_MEMORY_MAP_ERROR = 2043
SHARED_MEMORY_EVENT_ERROR = 2044
SHARED_MEMORY_CONNECT_ABANDONED_ERROR = 2045
SHARED_MEMORY_CONNECT_SET_ERROR = 2046
CONN_UNKNOW_PROTOCOL = 2047
INVALID_CONN_HANDLE = 2048
UNUSED_1 = 2049
FETCH_CANCELED = 2050
NO_DATA = 2051
NO_STMT_METADATA = 2052
NO_RESULT_SET = 2053
NOT_IMPLEMENTED = 2054
SERVER_LOST_EXTENDED = 2055
STMT_CLOSED = 2056
NEW_STMT_METADATA = 2057
ALREADY_CONNECTED = 2058
AUTH_PLUGIN_CANNOT_LOAD = 2059
DUPLICATE_CONNECTION_ATTR = 2060
AUTH_PLUGIN_ERR = 2061
INSECURE_API_ERR = 2062
FILE_NAME_TOO_LONG = 2063
SSL_FIPS_MODE_ERR = 2064
MAX_ERROR = 2999
ERROR_LAST = 2064

@ -0,0 +1,827 @@
"""MySQL ER Constants
These constants are error codes for the bulk of the error conditions
that may occur.
"""
if __name__ == "__main__":
"""
Usage: python ER.py [/path/to/mysql/mysqld_error.h ...] >> ER.py
"""
import fileinput
import re
data = {}
error_last = None
for line in fileinput.input():
line = re.sub(r"/\*.*?\*/", "", line)
m = re.match(r"^\s*#define\s+((ER|WARN)_[A-Z0-9_]+)\s+(\d+)\s*", line)
if m:
name = m.group(1)
if name.startswith("ER_"):
name = name[3:]
value = int(m.group(3))
if name == "ERROR_LAST":
if error_last is None or error_last < value:
error_last = value
continue
if value not in data:
data[value] = set()
data[value].add(name)
for value, names in sorted(data.items()):
for name in sorted(names):
print("{} = {}".format(name, value))
if error_last is not None:
print("ERROR_LAST = %s" % error_last)
ERROR_FIRST = 1000
NO = 1002
YES = 1003
CANT_CREATE_FILE = 1004
CANT_CREATE_TABLE = 1005
CANT_CREATE_DB = 1006
DB_CREATE_EXISTS = 1007
DB_DROP_EXISTS = 1008
DB_DROP_RMDIR = 1010
CANT_FIND_SYSTEM_REC = 1012
CANT_GET_STAT = 1013
CANT_LOCK = 1015
CANT_OPEN_FILE = 1016
FILE_NOT_FOUND = 1017
CANT_READ_DIR = 1018
CHECKREAD = 1020
DUP_KEY = 1022
ERROR_ON_READ = 1024
ERROR_ON_RENAME = 1025
ERROR_ON_WRITE = 1026
FILE_USED = 1027
FILSORT_ABORT = 1028
GET_ERRNO = 1030
ILLEGAL_HA = 1031
KEY_NOT_FOUND = 1032
NOT_FORM_FILE = 1033
NOT_KEYFILE = 1034
OLD_KEYFILE = 1035
OPEN_AS_READONLY = 1036
OUTOFMEMORY = 1037
OUT_OF_SORTMEMORY = 1038
CON_COUNT_ERROR = 1040
OUT_OF_RESOURCES = 1041
BAD_HOST_ERROR = 1042
HANDSHAKE_ERROR = 1043
DBACCESS_DENIED_ERROR = 1044
ACCESS_DENIED_ERROR = 1045
NO_DB_ERROR = 1046
UNKNOWN_COM_ERROR = 1047
BAD_NULL_ERROR = 1048
BAD_DB_ERROR = 1049
TABLE_EXISTS_ERROR = 1050
BAD_TABLE_ERROR = 1051
NON_UNIQ_ERROR = 1052
SERVER_SHUTDOWN = 1053
BAD_FIELD_ERROR = 1054
WRONG_FIELD_WITH_GROUP = 1055
WRONG_GROUP_FIELD = 1056
WRONG_SUM_SELECT = 1057
WRONG_VALUE_COUNT = 1058
TOO_LONG_IDENT = 1059
DUP_FIELDNAME = 1060
DUP_KEYNAME = 1061
DUP_ENTRY = 1062
WRONG_FIELD_SPEC = 1063
PARSE_ERROR = 1064
EMPTY_QUERY = 1065
NONUNIQ_TABLE = 1066
INVALID_DEFAULT = 1067
MULTIPLE_PRI_KEY = 1068
TOO_MANY_KEYS = 1069
TOO_MANY_KEY_PARTS = 1070
TOO_LONG_KEY = 1071
KEY_COLUMN_DOES_NOT_EXITS = 1072
BLOB_USED_AS_KEY = 1073
TOO_BIG_FIELDLENGTH = 1074
WRONG_AUTO_KEY = 1075
READY = 1076
SHUTDOWN_COMPLETE = 1079
FORCING_CLOSE = 1080
IPSOCK_ERROR = 1081
NO_SUCH_INDEX = 1082
WRONG_FIELD_TERMINATORS = 1083
BLOBS_AND_NO_TERMINATED = 1084
TEXTFILE_NOT_READABLE = 1085
FILE_EXISTS_ERROR = 1086
LOAD_INFO = 1087
ALTER_INFO = 1088
WRONG_SUB_KEY = 1089
CANT_REMOVE_ALL_FIELDS = 1090
CANT_DROP_FIELD_OR_KEY = 1091
INSERT_INFO = 1092
UPDATE_TABLE_USED = 1093
NO_SUCH_THREAD = 1094
KILL_DENIED_ERROR = 1095
NO_TABLES_USED = 1096
TOO_BIG_SET = 1097
NO_UNIQUE_LOGFILE = 1098
TABLE_NOT_LOCKED_FOR_WRITE = 1099
TABLE_NOT_LOCKED = 1100
BLOB_CANT_HAVE_DEFAULT = 1101
WRONG_DB_NAME = 1102
WRONG_TABLE_NAME = 1103
TOO_BIG_SELECT = 1104
UNKNOWN_ERROR = 1105
UNKNOWN_PROCEDURE = 1106
WRONG_PARAMCOUNT_TO_PROCEDURE = 1107
WRONG_PARAMETERS_TO_PROCEDURE = 1108
UNKNOWN_TABLE = 1109
FIELD_SPECIFIED_TWICE = 1110
INVALID_GROUP_FUNC_USE = 1111
UNSUPPORTED_EXTENSION = 1112
TABLE_MUST_HAVE_COLUMNS = 1113
RECORD_FILE_FULL = 1114
UNKNOWN_CHARACTER_SET = 1115
TOO_MANY_TABLES = 1116
TOO_MANY_FIELDS = 1117
TOO_BIG_ROWSIZE = 1118
STACK_OVERRUN = 1119
WRONG_OUTER_JOIN_UNUSED = 1120
NULL_COLUMN_IN_INDEX = 1121
CANT_FIND_UDF = 1122
CANT_INITIALIZE_UDF = 1123
UDF_NO_PATHS = 1124
UDF_EXISTS = 1125
CANT_OPEN_LIBRARY = 1126
CANT_FIND_DL_ENTRY = 1127
FUNCTION_NOT_DEFINED = 1128
HOST_IS_BLOCKED = 1129
HOST_NOT_PRIVILEGED = 1130
PASSWORD_ANONYMOUS_USER = 1131
PASSWORD_NOT_ALLOWED = 1132
PASSWORD_NO_MATCH = 1133
UPDATE_INFO = 1134
CANT_CREATE_THREAD = 1135
WRONG_VALUE_COUNT_ON_ROW = 1136
CANT_REOPEN_TABLE = 1137
INVALID_USE_OF_NULL = 1138
REGEXP_ERROR = 1139
MIX_OF_GROUP_FUNC_AND_FIELDS = 1140
NONEXISTING_GRANT = 1141
TABLEACCESS_DENIED_ERROR = 1142
COLUMNACCESS_DENIED_ERROR = 1143
ILLEGAL_GRANT_FOR_TABLE = 1144
GRANT_WRONG_HOST_OR_USER = 1145
NO_SUCH_TABLE = 1146
NONEXISTING_TABLE_GRANT = 1147
NOT_ALLOWED_COMMAND = 1148
SYNTAX_ERROR = 1149
ABORTING_CONNECTION = 1152
NET_PACKET_TOO_LARGE = 1153
NET_READ_ERROR_FROM_PIPE = 1154
NET_FCNTL_ERROR = 1155
NET_PACKETS_OUT_OF_ORDER = 1156
NET_UNCOMPRESS_ERROR = 1157
NET_READ_ERROR = 1158
NET_READ_INTERRUPTED = 1159
NET_ERROR_ON_WRITE = 1160
NET_WRITE_INTERRUPTED = 1161
TOO_LONG_STRING = 1162
TABLE_CANT_HANDLE_BLOB = 1163
TABLE_CANT_HANDLE_AUTO_INCREMENT = 1164
WRONG_COLUMN_NAME = 1166
WRONG_KEY_COLUMN = 1167
WRONG_MRG_TABLE = 1168
DUP_UNIQUE = 1169
BLOB_KEY_WITHOUT_LENGTH = 1170
PRIMARY_CANT_HAVE_NULL = 1171
TOO_MANY_ROWS = 1172
REQUIRES_PRIMARY_KEY = 1173
UPDATE_WITHOUT_KEY_IN_SAFE_MODE = 1175
KEY_DOES_NOT_EXITS = 1176
CHECK_NO_SUCH_TABLE = 1177
CHECK_NOT_IMPLEMENTED = 1178
CANT_DO_THIS_DURING_AN_TRANSACTION = 1179
ERROR_DURING_COMMIT = 1180
ERROR_DURING_ROLLBACK = 1181
ERROR_DURING_FLUSH_LOGS = 1182
NEW_ABORTING_CONNECTION = 1184
MASTER = 1188
MASTER_NET_READ = 1189
MASTER_NET_WRITE = 1190
FT_MATCHING_KEY_NOT_FOUND = 1191
LOCK_OR_ACTIVE_TRANSACTION = 1192
UNKNOWN_SYSTEM_VARIABLE = 1193
CRASHED_ON_USAGE = 1194
CRASHED_ON_REPAIR = 1195
WARNING_NOT_COMPLETE_ROLLBACK = 1196
TRANS_CACHE_FULL = 1197
SLAVE_NOT_RUNNING = 1199
BAD_SLAVE = 1200
MASTER_INFO = 1201
SLAVE_THREAD = 1202
TOO_MANY_USER_CONNECTIONS = 1203
SET_CONSTANTS_ONLY = 1204
LOCK_WAIT_TIMEOUT = 1205
LOCK_TABLE_FULL = 1206
READ_ONLY_TRANSACTION = 1207
WRONG_ARGUMENTS = 1210
NO_PERMISSION_TO_CREATE_USER = 1211
LOCK_DEADLOCK = 1213
TABLE_CANT_HANDLE_FT = 1214
CANNOT_ADD_FOREIGN = 1215
NO_REFERENCED_ROW = 1216
ROW_IS_REFERENCED = 1217
CONNECT_TO_MASTER = 1218
ERROR_WHEN_EXECUTING_COMMAND = 1220
WRONG_USAGE = 1221
WRONG_NUMBER_OF_COLUMNS_IN_SELECT = 1222
CANT_UPDATE_WITH_READLOCK = 1223
MIXING_NOT_ALLOWED = 1224
DUP_ARGUMENT = 1225
USER_LIMIT_REACHED = 1226
SPECIFIC_ACCESS_DENIED_ERROR = 1227
LOCAL_VARIABLE = 1228
GLOBAL_VARIABLE = 1229
NO_DEFAULT = 1230
WRONG_VALUE_FOR_VAR = 1231
WRONG_TYPE_FOR_VAR = 1232
VAR_CANT_BE_READ = 1233
CANT_USE_OPTION_HERE = 1234
NOT_SUPPORTED_YET = 1235
MASTER_FATAL_ERROR_READING_BINLOG = 1236
SLAVE_IGNORED_TABLE = 1237
INCORRECT_GLOBAL_LOCAL_VAR = 1238
WRONG_FK_DEF = 1239
KEY_REF_DO_NOT_MATCH_TABLE_REF = 1240
OPERAND_COLUMNS = 1241
SUBQUERY_NO_1_ROW = 1242
UNKNOWN_STMT_HANDLER = 1243
CORRUPT_HELP_DB = 1244
AUTO_CONVERT = 1246
ILLEGAL_REFERENCE = 1247
DERIVED_MUST_HAVE_ALIAS = 1248
SELECT_REDUCED = 1249
TABLENAME_NOT_ALLOWED_HERE = 1250
NOT_SUPPORTED_AUTH_MODE = 1251
SPATIAL_CANT_HAVE_NULL = 1252
COLLATION_CHARSET_MISMATCH = 1253
TOO_BIG_FOR_UNCOMPRESS = 1256
ZLIB_Z_MEM_ERROR = 1257
ZLIB_Z_BUF_ERROR = 1258
ZLIB_Z_DATA_ERROR = 1259
CUT_VALUE_GROUP_CONCAT = 1260
WARN_TOO_FEW_RECORDS = 1261
WARN_TOO_MANY_RECORDS = 1262
WARN_NULL_TO_NOTNULL = 1263
WARN_DATA_OUT_OF_RANGE = 1264
WARN_DATA_TRUNCATED = 1265
WARN_USING_OTHER_HANDLER = 1266
CANT_AGGREGATE_2COLLATIONS = 1267
REVOKE_GRANTS = 1269
CANT_AGGREGATE_3COLLATIONS = 1270
CANT_AGGREGATE_NCOLLATIONS = 1271
VARIABLE_IS_NOT_STRUCT = 1272
UNKNOWN_COLLATION = 1273
SLAVE_IGNORED_SSL_PARAMS = 1274
SERVER_IS_IN_SECURE_AUTH_MODE = 1275
WARN_FIELD_RESOLVED = 1276
BAD_SLAVE_UNTIL_COND = 1277
MISSING_SKIP_SLAVE = 1278
UNTIL_COND_IGNORED = 1279
WRONG_NAME_FOR_INDEX = 1280
WRONG_NAME_FOR_CATALOG = 1281
BAD_FT_COLUMN = 1283
UNKNOWN_KEY_CACHE = 1284
WARN_HOSTNAME_WONT_WORK = 1285
UNKNOWN_STORAGE_ENGINE = 1286
WARN_DEPRECATED_SYNTAX = 1287
NON_UPDATABLE_TABLE = 1288
FEATURE_DISABLED = 1289
OPTION_PREVENTS_STATEMENT = 1290
DUPLICATED_VALUE_IN_TYPE = 1291
TRUNCATED_WRONG_VALUE = 1292
INVALID_ON_UPDATE = 1294
UNSUPPORTED_PS = 1295
GET_ERRMSG = 1296
GET_TEMPORARY_ERRMSG = 1297
UNKNOWN_TIME_ZONE = 1298
WARN_INVALID_TIMESTAMP = 1299
INVALID_CHARACTER_STRING = 1300
WARN_ALLOWED_PACKET_OVERFLOWED = 1301
CONFLICTING_DECLARATIONS = 1302
SP_NO_RECURSIVE_CREATE = 1303
SP_ALREADY_EXISTS = 1304
SP_DOES_NOT_EXIST = 1305
SP_DROP_FAILED = 1306
SP_STORE_FAILED = 1307
SP_LILABEL_MISMATCH = 1308
SP_LABEL_REDEFINE = 1309
SP_LABEL_MISMATCH = 1310
SP_UNINIT_VAR = 1311
SP_BADSELECT = 1312
SP_BADRETURN = 1313
SP_BADSTATEMENT = 1314
UPDATE_LOG_DEPRECATED_IGNORED = 1315
UPDATE_LOG_DEPRECATED_TRANSLATED = 1316
QUERY_INTERRUPTED = 1317
SP_WRONG_NO_OF_ARGS = 1318
SP_COND_MISMATCH = 1319
SP_NORETURN = 1320
SP_NORETURNEND = 1321
SP_BAD_CURSOR_QUERY = 1322
SP_BAD_CURSOR_SELECT = 1323
SP_CURSOR_MISMATCH = 1324
SP_CURSOR_ALREADY_OPEN = 1325
SP_CURSOR_NOT_OPEN = 1326
SP_UNDECLARED_VAR = 1327
SP_WRONG_NO_OF_FETCH_ARGS = 1328
SP_FETCH_NO_DATA = 1329
SP_DUP_PARAM = 1330
SP_DUP_VAR = 1331
SP_DUP_COND = 1332
SP_DUP_CURS = 1333
SP_CANT_ALTER = 1334
SP_SUBSELECT_NYI = 1335
STMT_NOT_ALLOWED_IN_SF_OR_TRG = 1336
SP_VARCOND_AFTER_CURSHNDLR = 1337
SP_CURSOR_AFTER_HANDLER = 1338
SP_CASE_NOT_FOUND = 1339
FPARSER_TOO_BIG_FILE = 1340
FPARSER_BAD_HEADER = 1341
FPARSER_EOF_IN_COMMENT = 1342
FPARSER_ERROR_IN_PARAMETER = 1343
FPARSER_EOF_IN_UNKNOWN_PARAMETER = 1344
VIEW_NO_EXPLAIN = 1345
WRONG_OBJECT = 1347
NONUPDATEABLE_COLUMN = 1348
VIEW_SELECT_CLAUSE = 1350
VIEW_SELECT_VARIABLE = 1351
VIEW_SELECT_TMPTABLE = 1352
VIEW_WRONG_LIST = 1353
WARN_VIEW_MERGE = 1354
WARN_VIEW_WITHOUT_KEY = 1355
VIEW_INVALID = 1356
SP_NO_DROP_SP = 1357
TRG_ALREADY_EXISTS = 1359
TRG_DOES_NOT_EXIST = 1360
TRG_ON_VIEW_OR_TEMP_TABLE = 1361
TRG_CANT_CHANGE_ROW = 1362
TRG_NO_SUCH_ROW_IN_TRG = 1363
NO_DEFAULT_FOR_FIELD = 1364
DIVISION_BY_ZERO = 1365
TRUNCATED_WRONG_VALUE_FOR_FIELD = 1366
ILLEGAL_VALUE_FOR_TYPE = 1367
VIEW_NONUPD_CHECK = 1368
VIEW_CHECK_FAILED = 1369
PROCACCESS_DENIED_ERROR = 1370
RELAY_LOG_FAIL = 1371
UNKNOWN_TARGET_BINLOG = 1373
IO_ERR_LOG_INDEX_READ = 1374
BINLOG_PURGE_PROHIBITED = 1375
FSEEK_FAIL = 1376
BINLOG_PURGE_FATAL_ERR = 1377
LOG_IN_USE = 1378
LOG_PURGE_UNKNOWN_ERR = 1379
RELAY_LOG_INIT = 1380
NO_BINARY_LOGGING = 1381
RESERVED_SYNTAX = 1382
PS_MANY_PARAM = 1390
KEY_PART_0 = 1391
VIEW_CHECKSUM = 1392
VIEW_MULTIUPDATE = 1393
VIEW_NO_INSERT_FIELD_LIST = 1394
VIEW_DELETE_MERGE_VIEW = 1395
CANNOT_USER = 1396
XAER_NOTA = 1397
XAER_INVAL = 1398
XAER_RMFAIL = 1399
XAER_OUTSIDE = 1400
XAER_RMERR = 1401
XA_RBROLLBACK = 1402
NONEXISTING_PROC_GRANT = 1403
PROC_AUTO_GRANT_FAIL = 1404
PROC_AUTO_REVOKE_FAIL = 1405
DATA_TOO_LONG = 1406
SP_BAD_SQLSTATE = 1407
STARTUP = 1408
LOAD_FROM_FIXED_SIZE_ROWS_TO_VAR = 1409
CANT_CREATE_USER_WITH_GRANT = 1410
WRONG_VALUE_FOR_TYPE = 1411
TABLE_DEF_CHANGED = 1412
SP_DUP_HANDLER = 1413
SP_NOT_VAR_ARG = 1414
SP_NO_RETSET = 1415
CANT_CREATE_GEOMETRY_OBJECT = 1416
BINLOG_UNSAFE_ROUTINE = 1418
BINLOG_CREATE_ROUTINE_NEED_SUPER = 1419
STMT_HAS_NO_OPEN_CURSOR = 1421
COMMIT_NOT_ALLOWED_IN_SF_OR_TRG = 1422
NO_DEFAULT_FOR_VIEW_FIELD = 1423
SP_NO_RECURSION = 1424
TOO_BIG_SCALE = 1425
TOO_BIG_PRECISION = 1426
M_BIGGER_THAN_D = 1427
WRONG_LOCK_OF_SYSTEM_TABLE = 1428
CONNECT_TO_FOREIGN_DATA_SOURCE = 1429
QUERY_ON_FOREIGN_DATA_SOURCE = 1430
FOREIGN_DATA_SOURCE_DOESNT_EXIST = 1431
FOREIGN_DATA_STRING_INVALID_CANT_CREATE = 1432
FOREIGN_DATA_STRING_INVALID = 1433
TRG_IN_WRONG_SCHEMA = 1435
STACK_OVERRUN_NEED_MORE = 1436
TOO_LONG_BODY = 1437
WARN_CANT_DROP_DEFAULT_KEYCACHE = 1438
TOO_BIG_DISPLAYWIDTH = 1439
XAER_DUPID = 1440
DATETIME_FUNCTION_OVERFLOW = 1441
CANT_UPDATE_USED_TABLE_IN_SF_OR_TRG = 1442
VIEW_PREVENT_UPDATE = 1443
PS_NO_RECURSION = 1444
SP_CANT_SET_AUTOCOMMIT = 1445
VIEW_FRM_NO_USER = 1447
VIEW_OTHER_USER = 1448
NO_SUCH_USER = 1449
FORBID_SCHEMA_CHANGE = 1450
ROW_IS_REFERENCED_2 = 1451
NO_REFERENCED_ROW_2 = 1452
SP_BAD_VAR_SHADOW = 1453
TRG_NO_DEFINER = 1454
OLD_FILE_FORMAT = 1455
SP_RECURSION_LIMIT = 1456
SP_WRONG_NAME = 1458
TABLE_NEEDS_UPGRADE = 1459
SP_NO_AGGREGATE = 1460
MAX_PREPARED_STMT_COUNT_REACHED = 1461
VIEW_RECURSIVE = 1462
NON_GROUPING_FIELD_USED = 1463
TABLE_CANT_HANDLE_SPKEYS = 1464
NO_TRIGGERS_ON_SYSTEM_SCHEMA = 1465
REMOVED_SPACES = 1466
AUTOINC_READ_FAILED = 1467
USERNAME = 1468
HOSTNAME = 1469
WRONG_STRING_LENGTH = 1470
NON_INSERTABLE_TABLE = 1471
ADMIN_WRONG_MRG_TABLE = 1472
TOO_HIGH_LEVEL_OF_NESTING_FOR_SELECT = 1473
NAME_BECOMES_EMPTY = 1474
AMBIGUOUS_FIELD_TERM = 1475
FOREIGN_SERVER_EXISTS = 1476
FOREIGN_SERVER_DOESNT_EXIST = 1477
ILLEGAL_HA_CREATE_OPTION = 1478
PARTITION_REQUIRES_VALUES_ERROR = 1479
PARTITION_WRONG_VALUES_ERROR = 1480
PARTITION_MAXVALUE_ERROR = 1481
PARTITION_WRONG_NO_PART_ERROR = 1484
PARTITION_WRONG_NO_SUBPART_ERROR = 1485
WRONG_EXPR_IN_PARTITION_FUNC_ERROR = 1486
FIELD_NOT_FOUND_PART_ERROR = 1488
INCONSISTENT_PARTITION_INFO_ERROR = 1490
PARTITION_FUNC_NOT_ALLOWED_ERROR = 1491
PARTITIONS_MUST_BE_DEFINED_ERROR = 1492
RANGE_NOT_INCREASING_ERROR = 1493
INCONSISTENT_TYPE_OF_FUNCTIONS_ERROR = 1494
MULTIPLE_DEF_CONST_IN_LIST_PART_ERROR = 1495
PARTITION_ENTRY_ERROR = 1496
MIX_HANDLER_ERROR = 1497
PARTITION_NOT_DEFINED_ERROR = 1498
TOO_MANY_PARTITIONS_ERROR = 1499
SUBPARTITION_ERROR = 1500
CANT_CREATE_HANDLER_FILE = 1501
BLOB_FIELD_IN_PART_FUNC_ERROR = 1502
UNIQUE_KEY_NEED_ALL_FIELDS_IN_PF = 1503
NO_PARTS_ERROR = 1504
PARTITION_MGMT_ON_NONPARTITIONED = 1505
FOREIGN_KEY_ON_PARTITIONED = 1506
DROP_PARTITION_NON_EXISTENT = 1507
DROP_LAST_PARTITION = 1508
COALESCE_ONLY_ON_HASH_PARTITION = 1509
REORG_HASH_ONLY_ON_SAME_NO = 1510
REORG_NO_PARAM_ERROR = 1511
ONLY_ON_RANGE_LIST_PARTITION = 1512
ADD_PARTITION_SUBPART_ERROR = 1513
ADD_PARTITION_NO_NEW_PARTITION = 1514
COALESCE_PARTITION_NO_PARTITION = 1515
REORG_PARTITION_NOT_EXIST = 1516
SAME_NAME_PARTITION = 1517
NO_BINLOG_ERROR = 1518
CONSECUTIVE_REORG_PARTITIONS = 1519
REORG_OUTSIDE_RANGE = 1520
PARTITION_FUNCTION_FAILURE = 1521
LIMITED_PART_RANGE = 1523
PLUGIN_IS_NOT_LOADED = 1524
WRONG_VALUE = 1525
NO_PARTITION_FOR_GIVEN_VALUE = 1526
FILEGROUP_OPTION_ONLY_ONCE = 1527
CREATE_FILEGROUP_FAILED = 1528
DROP_FILEGROUP_FAILED = 1529
TABLESPACE_AUTO_EXTEND_ERROR = 1530
WRONG_SIZE_NUMBER = 1531
SIZE_OVERFLOW_ERROR = 1532
ALTER_FILEGROUP_FAILED = 1533
BINLOG_ROW_LOGGING_FAILED = 1534
EVENT_ALREADY_EXISTS = 1537
EVENT_DOES_NOT_EXIST = 1539
EVENT_INTERVAL_NOT_POSITIVE_OR_TOO_BIG = 1542
EVENT_ENDS_BEFORE_STARTS = 1543
EVENT_EXEC_TIME_IN_THE_PAST = 1544
EVENT_SAME_NAME = 1551
DROP_INDEX_FK = 1553
WARN_DEPRECATED_SYNTAX_WITH_VER = 1554
CANT_LOCK_LOG_TABLE = 1556
FOREIGN_DUPLICATE_KEY_OLD_UNUSED = 1557
COL_COUNT_DOESNT_MATCH_PLEASE_UPDATE = 1558
TEMP_TABLE_PREVENTS_SWITCH_OUT_OF_RBR = 1559
STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1560
PARTITION_NO_TEMPORARY = 1562
PARTITION_CONST_DOMAIN_ERROR = 1563
PARTITION_FUNCTION_IS_NOT_ALLOWED = 1564
NULL_IN_VALUES_LESS_THAN = 1566
WRONG_PARTITION_NAME = 1567
CANT_CHANGE_TX_CHARACTERISTICS = 1568
DUP_ENTRY_AUTOINCREMENT_CASE = 1569
EVENT_SET_VAR_ERROR = 1571
PARTITION_MERGE_ERROR = 1572
BASE64_DECODE_ERROR = 1575
EVENT_RECURSION_FORBIDDEN = 1576
ONLY_INTEGERS_ALLOWED = 1578
UNSUPORTED_LOG_ENGINE = 1579
BAD_LOG_STATEMENT = 1580
CANT_RENAME_LOG_TABLE = 1581
WRONG_PARAMCOUNT_TO_NATIVE_FCT = 1582
WRONG_PARAMETERS_TO_NATIVE_FCT = 1583
WRONG_PARAMETERS_TO_STORED_FCT = 1584
NATIVE_FCT_NAME_COLLISION = 1585
DUP_ENTRY_WITH_KEY_NAME = 1586
BINLOG_PURGE_EMFILE = 1587
EVENT_CANNOT_CREATE_IN_THE_PAST = 1588
EVENT_CANNOT_ALTER_IN_THE_PAST = 1589
NO_PARTITION_FOR_GIVEN_VALUE_SILENT = 1591
BINLOG_UNSAFE_STATEMENT = 1592
BINLOG_FATAL_ERROR = 1593
BINLOG_LOGGING_IMPOSSIBLE = 1598
VIEW_NO_CREATION_CTX = 1599
VIEW_INVALID_CREATION_CTX = 1600
TRG_CORRUPTED_FILE = 1602
TRG_NO_CREATION_CTX = 1603
TRG_INVALID_CREATION_CTX = 1604
EVENT_INVALID_CREATION_CTX = 1605
TRG_CANT_OPEN_TABLE = 1606
NO_FORMAT_DESCRIPTION_EVENT_BEFORE_BINLOG_STATEMENT = 1609
SLAVE_CORRUPT_EVENT = 1610
LOG_PURGE_NO_FILE = 1612
XA_RBTIMEOUT = 1613
XA_RBDEADLOCK = 1614
NEED_REPREPARE = 1615
WARN_NO_MASTER_INFO = 1617
WARN_OPTION_IGNORED = 1618
PLUGIN_DELETE_BUILTIN = 1619
WARN_PLUGIN_BUSY = 1620
VARIABLE_IS_READONLY = 1621
WARN_ENGINE_TRANSACTION_ROLLBACK = 1622
SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE = 1624
NDB_REPLICATION_SCHEMA_ERROR = 1625
CONFLICT_FN_PARSE_ERROR = 1626
EXCEPTIONS_WRITE_ERROR = 1627
TOO_LONG_TABLE_COMMENT = 1628
TOO_LONG_FIELD_COMMENT = 1629
FUNC_INEXISTENT_NAME_COLLISION = 1630
DATABASE_NAME = 1631
TABLE_NAME = 1632
PARTITION_NAME = 1633
SUBPARTITION_NAME = 1634
TEMPORARY_NAME = 1635
RENAMED_NAME = 1636
TOO_MANY_CONCURRENT_TRXS = 1637
WARN_NON_ASCII_SEPARATOR_NOT_IMPLEMENTED = 1638
DEBUG_SYNC_TIMEOUT = 1639
DEBUG_SYNC_HIT_LIMIT = 1640
DUP_SIGNAL_SET = 1641
SIGNAL_WARN = 1642
SIGNAL_NOT_FOUND = 1643
SIGNAL_EXCEPTION = 1644
RESIGNAL_WITHOUT_ACTIVE_HANDLER = 1645
SIGNAL_BAD_CONDITION_TYPE = 1646
WARN_COND_ITEM_TRUNCATED = 1647
COND_ITEM_TOO_LONG = 1648
UNKNOWN_LOCALE = 1649
SLAVE_IGNORE_SERVER_IDS = 1650
SAME_NAME_PARTITION_FIELD = 1652
PARTITION_COLUMN_LIST_ERROR = 1653
WRONG_TYPE_COLUMN_VALUE_ERROR = 1654
TOO_MANY_PARTITION_FUNC_FIELDS_ERROR = 1655
MAXVALUE_IN_VALUES_IN = 1656
TOO_MANY_VALUES_ERROR = 1657
ROW_SINGLE_PARTITION_FIELD_ERROR = 1658
FIELD_TYPE_NOT_ALLOWED_AS_PARTITION_FIELD = 1659
PARTITION_FIELDS_TOO_LONG = 1660
BINLOG_ROW_ENGINE_AND_STMT_ENGINE = 1661
BINLOG_ROW_MODE_AND_STMT_ENGINE = 1662
BINLOG_UNSAFE_AND_STMT_ENGINE = 1663
BINLOG_ROW_INJECTION_AND_STMT_ENGINE = 1664
BINLOG_STMT_MODE_AND_ROW_ENGINE = 1665
BINLOG_ROW_INJECTION_AND_STMT_MODE = 1666
BINLOG_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1667
BINLOG_UNSAFE_LIMIT = 1668
BINLOG_UNSAFE_SYSTEM_TABLE = 1670
BINLOG_UNSAFE_AUTOINC_COLUMNS = 1671
BINLOG_UNSAFE_UDF = 1672
BINLOG_UNSAFE_SYSTEM_VARIABLE = 1673
BINLOG_UNSAFE_SYSTEM_FUNCTION = 1674
BINLOG_UNSAFE_NONTRANS_AFTER_TRANS = 1675
MESSAGE_AND_STATEMENT = 1676
SLAVE_CANT_CREATE_CONVERSION = 1678
INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_FORMAT = 1679
PATH_LENGTH = 1680
WARN_DEPRECATED_SYNTAX_NO_REPLACEMENT = 1681
WRONG_NATIVE_TABLE_STRUCTURE = 1682
WRONG_PERFSCHEMA_USAGE = 1683
WARN_I_S_SKIPPED_TABLE = 1684
INSIDE_TRANSACTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1685
STORED_FUNCTION_PREVENTS_SWITCH_BINLOG_DIRECT = 1686
SPATIAL_MUST_HAVE_GEOM_COL = 1687
TOO_LONG_INDEX_COMMENT = 1688
LOCK_ABORTED = 1689
DATA_OUT_OF_RANGE = 1690
WRONG_SPVAR_TYPE_IN_LIMIT = 1691
BINLOG_UNSAFE_MULTIPLE_ENGINES_AND_SELF_LOGGING_ENGINE = 1692
BINLOG_UNSAFE_MIXED_STATEMENT = 1693
INSIDE_TRANSACTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1694
STORED_FUNCTION_PREVENTS_SWITCH_SQL_LOG_BIN = 1695
FAILED_READ_FROM_PAR_FILE = 1696
VALUES_IS_NOT_INT_TYPE_ERROR = 1697
ACCESS_DENIED_NO_PASSWORD_ERROR = 1698
SET_PASSWORD_AUTH_PLUGIN = 1699
TRUNCATE_ILLEGAL_FK = 1701
PLUGIN_IS_PERMANENT = 1702
SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MIN = 1703
SLAVE_HEARTBEAT_VALUE_OUT_OF_RANGE_MAX = 1704
STMT_CACHE_FULL = 1705
MULTI_UPDATE_KEY_CONFLICT = 1706
TABLE_NEEDS_REBUILD = 1707
WARN_OPTION_BELOW_LIMIT = 1708
INDEX_COLUMN_TOO_LONG = 1709
ERROR_IN_TRIGGER_BODY = 1710
ERROR_IN_UNKNOWN_TRIGGER_BODY = 1711
INDEX_CORRUPT = 1712
UNDO_RECORD_TOO_BIG = 1713
BINLOG_UNSAFE_INSERT_IGNORE_SELECT = 1714
BINLOG_UNSAFE_INSERT_SELECT_UPDATE = 1715
BINLOG_UNSAFE_REPLACE_SELECT = 1716
BINLOG_UNSAFE_CREATE_IGNORE_SELECT = 1717
BINLOG_UNSAFE_CREATE_REPLACE_SELECT = 1718
BINLOG_UNSAFE_UPDATE_IGNORE = 1719
PLUGIN_NO_UNINSTALL = 1720
PLUGIN_NO_INSTALL = 1721
BINLOG_UNSAFE_WRITE_AUTOINC_SELECT = 1722
BINLOG_UNSAFE_CREATE_SELECT_AUTOINC = 1723
BINLOG_UNSAFE_INSERT_TWO_KEYS = 1724
TABLE_IN_FK_CHECK = 1725
UNSUPPORTED_ENGINE = 1726
BINLOG_UNSAFE_AUTOINC_NOT_FIRST = 1727
CANNOT_LOAD_FROM_TABLE_V2 = 1728
MASTER_DELAY_VALUE_OUT_OF_RANGE = 1729
ONLY_FD_AND_RBR_EVENTS_ALLOWED_IN_BINLOG_STATEMENT = 1730
PARTITION_EXCHANGE_DIFFERENT_OPTION = 1731
PARTITION_EXCHANGE_PART_TABLE = 1732
PARTITION_EXCHANGE_TEMP_TABLE = 1733
PARTITION_INSTEAD_OF_SUBPARTITION = 1734
UNKNOWN_PARTITION = 1735
TABLES_DIFFERENT_METADATA = 1736
ROW_DOES_NOT_MATCH_PARTITION = 1737
BINLOG_CACHE_SIZE_GREATER_THAN_MAX = 1738
WARN_INDEX_NOT_APPLICABLE = 1739
PARTITION_EXCHANGE_FOREIGN_KEY = 1740
RPL_INFO_DATA_TOO_LONG = 1742
BINLOG_STMT_CACHE_SIZE_GREATER_THAN_MAX = 1745
CANT_UPDATE_TABLE_IN_CREATE_TABLE_SELECT = 1746
PARTITION_CLAUSE_ON_NONPARTITIONED = 1747
ROW_DOES_NOT_MATCH_GIVEN_PARTITION_SET = 1748
CHANGE_RPL_INFO_REPOSITORY_FAILURE = 1750
WARNING_NOT_COMPLETE_ROLLBACK_WITH_CREATED_TEMP_TABLE = 1751
WARNING_NOT_COMPLETE_ROLLBACK_WITH_DROPPED_TEMP_TABLE = 1752
MTS_FEATURE_IS_NOT_SUPPORTED = 1753
MTS_UPDATED_DBS_GREATER_MAX = 1754
MTS_CANT_PARALLEL = 1755
MTS_INCONSISTENT_DATA = 1756
FULLTEXT_NOT_SUPPORTED_WITH_PARTITIONING = 1757
DA_INVALID_CONDITION_NUMBER = 1758
INSECURE_PLAIN_TEXT = 1759
INSECURE_CHANGE_MASTER = 1760
FOREIGN_DUPLICATE_KEY_WITH_CHILD_INFO = 1761
FOREIGN_DUPLICATE_KEY_WITHOUT_CHILD_INFO = 1762
SQLTHREAD_WITH_SECURE_SLAVE = 1763
TABLE_HAS_NO_FT = 1764
VARIABLE_NOT_SETTABLE_IN_SF_OR_TRIGGER = 1765
VARIABLE_NOT_SETTABLE_IN_TRANSACTION = 1766
SET_STATEMENT_CANNOT_INVOKE_FUNCTION = 1769
GTID_NEXT_CANT_BE_AUTOMATIC_IF_GTID_NEXT_LIST_IS_NON_NULL = 1770
MALFORMED_GTID_SET_SPECIFICATION = 1772
MALFORMED_GTID_SET_ENCODING = 1773
MALFORMED_GTID_SPECIFICATION = 1774
GNO_EXHAUSTED = 1775
BAD_SLAVE_AUTO_POSITION = 1776
AUTO_POSITION_REQUIRES_GTID_MODE_NOT_OFF = 1777
CANT_DO_IMPLICIT_COMMIT_IN_TRX_WHEN_GTID_NEXT_IS_SET = 1778
GTID_MODE_ON_REQUIRES_ENFORCE_GTID_CONSISTENCY_ON = 1779
CANT_SET_GTID_NEXT_TO_GTID_WHEN_GTID_MODE_IS_OFF = 1781
CANT_SET_GTID_NEXT_TO_ANONYMOUS_WHEN_GTID_MODE_IS_ON = 1782
CANT_SET_GTID_NEXT_LIST_TO_NON_NULL_WHEN_GTID_MODE_IS_OFF = 1783
GTID_UNSAFE_NON_TRANSACTIONAL_TABLE = 1785
GTID_UNSAFE_CREATE_SELECT = 1786
GTID_UNSAFE_CREATE_DROP_TEMPORARY_TABLE_IN_TRANSACTION = 1787
GTID_MODE_CAN_ONLY_CHANGE_ONE_STEP_AT_A_TIME = 1788
MASTER_HAS_PURGED_REQUIRED_GTIDS = 1789
CANT_SET_GTID_NEXT_WHEN_OWNING_GTID = 1790
UNKNOWN_EXPLAIN_FORMAT = 1791
CANT_EXECUTE_IN_READ_ONLY_TRANSACTION = 1792
TOO_LONG_TABLE_PARTITION_COMMENT = 1793
SLAVE_CONFIGURATION = 1794
INNODB_FT_LIMIT = 1795
INNODB_NO_FT_TEMP_TABLE = 1796
INNODB_FT_WRONG_DOCID_COLUMN = 1797
INNODB_FT_WRONG_DOCID_INDEX = 1798
INNODB_ONLINE_LOG_TOO_BIG = 1799
UNKNOWN_ALTER_ALGORITHM = 1800
UNKNOWN_ALTER_LOCK = 1801
MTS_CHANGE_MASTER_CANT_RUN_WITH_GAPS = 1802
MTS_RECOVERY_FAILURE = 1803
MTS_RESET_WORKERS = 1804
COL_COUNT_DOESNT_MATCH_CORRUPTED_V2 = 1805
SLAVE_SILENT_RETRY_TRANSACTION = 1806
DISCARD_FK_CHECKS_RUNNING = 1807
TABLE_SCHEMA_MISMATCH = 1808
TABLE_IN_SYSTEM_TABLESPACE = 1809
IO_READ_ERROR = 1810
IO_WRITE_ERROR = 1811
TABLESPACE_MISSING = 1812
TABLESPACE_EXISTS = 1813
TABLESPACE_DISCARDED = 1814
INTERNAL_ERROR = 1815
INNODB_IMPORT_ERROR = 1816
INNODB_INDEX_CORRUPT = 1817
INVALID_YEAR_COLUMN_LENGTH = 1818
NOT_VALID_PASSWORD = 1819
MUST_CHANGE_PASSWORD = 1820
FK_NO_INDEX_CHILD = 1821
FK_NO_INDEX_PARENT = 1822
FK_FAIL_ADD_SYSTEM = 1823
FK_CANNOT_OPEN_PARENT = 1824
FK_INCORRECT_OPTION = 1825
FK_DUP_NAME = 1826
PASSWORD_FORMAT = 1827
FK_COLUMN_CANNOT_DROP = 1828
FK_COLUMN_CANNOT_DROP_CHILD = 1829
FK_COLUMN_NOT_NULL = 1830
DUP_INDEX = 1831
FK_COLUMN_CANNOT_CHANGE = 1832
FK_COLUMN_CANNOT_CHANGE_CHILD = 1833
MALFORMED_PACKET = 1835
READ_ONLY_MODE = 1836
GTID_NEXT_TYPE_UNDEFINED_GTID = 1837
VARIABLE_NOT_SETTABLE_IN_SP = 1838
CANT_SET_GTID_PURGED_WHEN_GTID_EXECUTED_IS_NOT_EMPTY = 1840
CANT_SET_GTID_PURGED_WHEN_OWNED_GTIDS_IS_NOT_EMPTY = 1841
GTID_PURGED_WAS_CHANGED = 1842
GTID_EXECUTED_WAS_CHANGED = 1843
BINLOG_STMT_MODE_AND_NO_REPL_TABLES = 1844
ALTER_OPERATION_NOT_SUPPORTED = 1845
ALTER_OPERATION_NOT_SUPPORTED_REASON = 1846
ALTER_OPERATION_NOT_SUPPORTED_REASON_COPY = 1847
ALTER_OPERATION_NOT_SUPPORTED_REASON_PARTITION = 1848
ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_RENAME = 1849
ALTER_OPERATION_NOT_SUPPORTED_REASON_COLUMN_TYPE = 1850
ALTER_OPERATION_NOT_SUPPORTED_REASON_FK_CHECK = 1851
ALTER_OPERATION_NOT_SUPPORTED_REASON_NOPK = 1853
ALTER_OPERATION_NOT_SUPPORTED_REASON_AUTOINC = 1854
ALTER_OPERATION_NOT_SUPPORTED_REASON_HIDDEN_FTS = 1855
ALTER_OPERATION_NOT_SUPPORTED_REASON_CHANGE_FTS = 1856
ALTER_OPERATION_NOT_SUPPORTED_REASON_FTS = 1857
SQL_SLAVE_SKIP_COUNTER_NOT_SETTABLE_IN_GTID_MODE = 1858
DUP_UNKNOWN_IN_INDEX = 1859
IDENT_CAUSES_TOO_LONG_PATH = 1860
ALTER_OPERATION_NOT_SUPPORTED_REASON_NOT_NULL = 1861
MUST_CHANGE_PASSWORD_LOGIN = 1862
ROW_IN_WRONG_PARTITION = 1863
MTS_EVENT_BIGGER_PENDING_JOBS_SIZE_MAX = 1864
BINLOG_LOGICAL_CORRUPTION = 1866
WARN_PURGE_LOG_IN_USE = 1867
WARN_PURGE_LOG_IS_ACTIVE = 1868
AUTO_INCREMENT_CONFLICT = 1869
WARN_ON_BLOCKHOLE_IN_RBR = 1870
SLAVE_MI_INIT_REPOSITORY = 1871
SLAVE_RLI_INIT_REPOSITORY = 1872
ACCESS_DENIED_CHANGE_USER_ERROR = 1873
INNODB_READ_ONLY = 1874
STOP_SLAVE_SQL_THREAD_TIMEOUT = 1875
STOP_SLAVE_IO_THREAD_TIMEOUT = 1876
TABLE_CORRUPT = 1877
TEMP_FILE_WRITE_FAILURE = 1878
INNODB_FT_AUX_NOT_HEX_ID = 1879
OLD_TEMPORALS_UPGRADED = 1880
INNODB_FORCED_RECOVERY = 1881
AES_INVALID_IV = 1882
PLUGIN_CANNOT_BE_UNINSTALLED = 1883
GTID_UNSAFE_BINLOG_SPLITTABLE_STATEMENT_AND_ASSIGNED_GTID = 1884
SLAVE_HAS_MORE_GTIDS_THAN_MASTER = 1885
MISSING_KEY = 1886
ERROR_LAST = 1973

@ -0,0 +1,40 @@
"""MySQL FIELD_TYPE Constants
These constants represent the various column (field) types that are
supported by MySQL.
"""
DECIMAL = 0
TINY = 1
SHORT = 2
LONG = 3
FLOAT = 4
DOUBLE = 5
NULL = 6
TIMESTAMP = 7
LONGLONG = 8
INT24 = 9
DATE = 10
TIME = 11
DATETIME = 12
YEAR = 13
# NEWDATE = 14 # Internal to MySQL.
VARCHAR = 15
BIT = 16
# TIMESTAMP2 = 17
# DATETIME2 = 18
# TIME2 = 19
JSON = 245
NEWDECIMAL = 246
ENUM = 247
SET = 248
TINY_BLOB = 249
MEDIUM_BLOB = 250
LONG_BLOB = 251
BLOB = 252
VAR_STRING = 253
STRING = 254
GEOMETRY = 255
CHAR = TINY
INTERVAL = ENUM

@ -0,0 +1,23 @@
"""MySQL FLAG Constants
These flags are used along with the FIELD_TYPE to indicate various
properties of columns in a result set.
"""
NOT_NULL = 1
PRI_KEY = 2
UNIQUE_KEY = 4
MULTIPLE_KEY = 8
BLOB = 16
UNSIGNED = 32
ZEROFILL = 64
BINARY = 128
ENUM = 256
AUTO_INCREMENT = 512
TIMESTAMP = 1024
SET = 2048
NUM = 32768
PART_KEY = 16384
GROUP = 32768
UNIQUE = 65536

@ -0,0 +1 @@
__all__ = ["CR", "FIELD_TYPE", "CLIENT", "ER", "FLAG"]

@ -0,0 +1,139 @@
"""MySQLdb type conversion module
This module handles all the type conversions for MySQL. If the default
type conversions aren't what you need, you can make your own. The
dictionary conversions maps some kind of type to a conversion function
which returns the corresponding value:
Key: FIELD_TYPE.* (from MySQLdb.constants)
Conversion function:
Arguments: string
Returns: Python object
Key: Python type object (from types) or class
Conversion function:
Arguments: Python object of indicated type or class AND
conversion dictionary
Returns: SQL literal value
Notes: Most conversion functions can ignore the dictionary, but
it is a required parameter. It is necessary for converting
things like sequences and instances.
Don't modify conversions if you can avoid it. Instead, make copies
(with the copy() method), modify the copies, and then pass them to
MySQL.connect().
"""
from decimal import Decimal
from MySQLdb._mysql import string_literal
from MySQLdb.constants import FIELD_TYPE, FLAG
from MySQLdb.times import (
Date,
DateTimeType,
DateTime2literal,
DateTimeDeltaType,
DateTimeDelta2literal,
DateTime_or_None,
TimeDelta_or_None,
Date_or_None,
)
from MySQLdb._exceptions import ProgrammingError
import array
NoneType = type(None)
try:
ArrayType = array.ArrayType
except AttributeError:
ArrayType = array.array
def Bool2Str(s, d):
return b"1" if s else b"0"
def Set2Str(s, d):
# Only support ascii string. Not tested.
return string_literal(",".join(s))
def Thing2Str(s, d):
"""Convert something into a string via str()."""
return str(s)
def Float2Str(o, d):
s = repr(o)
if s in ("inf", "nan"):
raise ProgrammingError("%s can not be used with MySQL" % s)
if "e" not in s:
s += "e0"
return s
def None2NULL(o, d):
"""Convert None to NULL."""
return b"NULL"
def Thing2Literal(o, d):
"""Convert something into a SQL string literal. If using
MySQL-3.23 or newer, string_literal() is a method of the
_mysql.MYSQL object, and this function will be overridden with
that method when the connection is created."""
return string_literal(o)
def Decimal2Literal(o, d):
return format(o, "f")
def array2Str(o, d):
return Thing2Literal(o.tostring(), d)
# bytes or str regarding to BINARY_FLAG.
_bytes_or_str = ((FLAG.BINARY, bytes), (None, str))
conversions = {
int: Thing2Str,
float: Float2Str,
NoneType: None2NULL,
ArrayType: array2Str,
bool: Bool2Str,
Date: Thing2Literal,
DateTimeType: DateTime2literal,
DateTimeDeltaType: DateTimeDelta2literal,
set: Set2Str,
Decimal: Decimal2Literal,
FIELD_TYPE.TINY: int,
FIELD_TYPE.SHORT: int,
FIELD_TYPE.LONG: int,
FIELD_TYPE.FLOAT: float,
FIELD_TYPE.DOUBLE: float,
FIELD_TYPE.DECIMAL: Decimal,
FIELD_TYPE.NEWDECIMAL: Decimal,
FIELD_TYPE.LONGLONG: int,
FIELD_TYPE.INT24: int,
FIELD_TYPE.YEAR: int,
FIELD_TYPE.TIMESTAMP: DateTime_or_None,
FIELD_TYPE.DATETIME: DateTime_or_None,
FIELD_TYPE.TIME: TimeDelta_or_None,
FIELD_TYPE.DATE: Date_or_None,
FIELD_TYPE.TINY_BLOB: bytes,
FIELD_TYPE.MEDIUM_BLOB: bytes,
FIELD_TYPE.LONG_BLOB: bytes,
FIELD_TYPE.BLOB: bytes,
FIELD_TYPE.STRING: bytes,
FIELD_TYPE.VAR_STRING: bytes,
FIELD_TYPE.VARCHAR: bytes,
FIELD_TYPE.JSON: bytes,
}

@ -0,0 +1,489 @@
"""MySQLdb Cursors
This module implements Cursors of various types for MySQLdb. By
default, MySQLdb uses the Cursor class.
"""
import re
from ._exceptions import ProgrammingError
#: Regular expression for :meth:`Cursor.executemany`.
#: executemany only supports simple bulk insert.
#: You can use it to load large dataset.
RE_INSERT_VALUES = re.compile(
"".join(
[
r"\s*((?:INSERT|REPLACE)\b.+\bVALUES?\s*)",
r"(\(\s*(?:%s|%\(.+\)s)\s*(?:,\s*(?:%s|%\(.+\)s)\s*)*\))",
r"(\s*(?:ON DUPLICATE.*)?);?\s*\Z",
]
),
re.IGNORECASE | re.DOTALL,
)
class BaseCursor:
"""A base for Cursor classes. Useful attributes:
description
A tuple of DB API 7-tuples describing the columns in
the last executed query; see PEP-249 for details.
description_flags
Tuple of column flags for last query, one entry per column
in the result set. Values correspond to those in
MySQLdb.constants.FLAG. See MySQL documentation (C API)
for more information. Non-standard extension.
arraysize
default number of rows fetchmany() will fetch
"""
#: Max statement size which :meth:`executemany` generates.
#:
#: Max size of allowed statement is max_allowed_packet - packet_header_size.
#: Default value of max_allowed_packet is 1048576.
max_stmt_length = 64 * 1024
from ._exceptions import (
MySQLError,
Warning,
Error,
InterfaceError,
DatabaseError,
DataError,
OperationalError,
IntegrityError,
InternalError,
ProgrammingError,
NotSupportedError,
)
connection = None
def __init__(self, connection):
self.connection = connection
self.description = None
self.description_flags = None
self.rowcount = -1
self.arraysize = 1
self._executed = None
self.lastrowid = None
self._result = None
self.rownumber = None
self._rows = None
def close(self):
"""Close the cursor. No further queries will be possible."""
try:
if self.connection is None:
return
while self.nextset():
pass
finally:
self.connection = None
self._result = None
def __enter__(self):
return self
def __exit__(self, *exc_info):
del exc_info
self.close()
def _escape_args(self, args, conn):
encoding = conn.encoding
literal = conn.literal
def ensure_bytes(x):
if isinstance(x, str):
return x.encode(encoding)
elif isinstance(x, tuple):
return tuple(map(ensure_bytes, x))
elif isinstance(x, list):
return list(map(ensure_bytes, x))
return x
if isinstance(args, (tuple, list)):
ret = tuple(literal(ensure_bytes(arg)) for arg in args)
elif isinstance(args, dict):
ret = {
ensure_bytes(key): literal(ensure_bytes(val))
for (key, val) in args.items()
}
else:
# If it's not a dictionary let's try escaping it anyways.
# Worst case it will throw a Value error
ret = literal(ensure_bytes(args))
ensure_bytes = None # break circular reference
return ret
def _check_executed(self):
if not self._executed:
raise ProgrammingError("execute() first")
def nextset(self):
"""Advance to the next result set.
Returns None if there are no more result sets.
"""
if self._executed:
self.fetchall()
db = self._get_db()
nr = db.next_result()
if nr == -1:
return None
self._do_get_result(db)
self._post_get_result()
return 1
def _do_get_result(self, db):
self._result = result = self._get_result()
if result is None:
self.description = self.description_flags = None
else:
self.description = result.describe()
self.description_flags = result.field_flags()
self.rowcount = db.affected_rows()
self.rownumber = 0
self.lastrowid = db.insert_id()
def _post_get_result(self):
pass
def setinputsizes(self, *args):
"""Does nothing, required by DB API."""
def setoutputsizes(self, *args):
"""Does nothing, required by DB API."""
def _get_db(self):
con = self.connection
if con is None:
raise ProgrammingError("cursor closed")
return con
def execute(self, query, args=None):
"""Execute a query.
query -- string, query to execute on server
args -- optional sequence or mapping, parameters to use with query.
Note: If args is a sequence, then %s must be used as the
parameter placeholder in the query. If a mapping is used,
%(key)s must be used as the placeholder.
Returns integer represents rows affected, if any
"""
while self.nextset():
pass
db = self._get_db()
if isinstance(query, str):
query = query.encode(db.encoding)
if args is not None:
if isinstance(args, dict):
nargs = {}
for key, item in args.items():
if isinstance(key, str):
key = key.encode(db.encoding)
nargs[key] = db.literal(item)
args = nargs
else:
args = tuple(map(db.literal, args))
try:
query = query % args
except TypeError as m:
raise ProgrammingError(str(m))
assert isinstance(query, (bytes, bytearray))
res = self._query(query)
return res
def executemany(self, query, args):
# type: (str, list) -> int
"""Execute a multi-row query.
:param query: query to execute on server
:param args: Sequence of sequences or mappings. It is used as parameter.
:return: Number of rows affected, if any.
This method improves performance on multiple-row INSERT and
REPLACE. Otherwise it is equivalent to looping over args with
execute().
"""
if not args:
return
m = RE_INSERT_VALUES.match(query)
if m:
q_prefix = m.group(1) % ()
q_values = m.group(2).rstrip()
q_postfix = m.group(3) or ""
assert q_values[0] == "(" and q_values[-1] == ")"
return self._do_execute_many(
q_prefix,
q_values,
q_postfix,
args,
self.max_stmt_length,
self._get_db().encoding,
)
self.rowcount = sum(self.execute(query, arg) for arg in args)
return self.rowcount
def _do_execute_many(
self, prefix, values, postfix, args, max_stmt_length, encoding
):
conn = self._get_db()
escape = self._escape_args
if isinstance(prefix, str):
prefix = prefix.encode(encoding)
if isinstance(values, str):
values = values.encode(encoding)
if isinstance(postfix, str):
postfix = postfix.encode(encoding)
sql = bytearray(prefix)
args = iter(args)
v = values % escape(next(args), conn)
sql += v
rows = 0
for arg in args:
v = values % escape(arg, conn)
if len(sql) + len(v) + len(postfix) + 1 > max_stmt_length:
rows += self.execute(sql + postfix)
sql = bytearray(prefix)
else:
sql += b","
sql += v
rows += self.execute(sql + postfix)
self.rowcount = rows
return rows
def callproc(self, procname, args=()):
"""Execute stored procedure procname with args
procname -- string, name of procedure to execute on server
args -- Sequence of parameters to use with procedure
Returns the original args.
Compatibility warning: PEP-249 specifies that any modified
parameters must be returned. This is currently impossible
as they are only available by storing them in a server
variable and then retrieved by a query. Since stored
procedures return zero or more result sets, there is no
reliable way to get at OUT or INOUT parameters via callproc.
The server variables are named @_procname_n, where procname
is the parameter above and n is the position of the parameter
(from zero). Once all result sets generated by the procedure
have been fetched, you can issue a SELECT @_procname_0, ...
query using .execute() to get any OUT or INOUT values.
Compatibility warning: The act of calling a stored procedure
itself creates an empty result set. This appears after any
result sets generated by the procedure. This is non-standard
behavior with respect to the DB-API. Be sure to use nextset()
to advance through all result sets; otherwise you may get
disconnected.
"""
db = self._get_db()
if isinstance(procname, str):
procname = procname.encode(db.encoding)
if args:
fmt = b"@_" + procname + b"_%d=%s"
q = b"SET %s" % b",".join(
fmt % (index, db.literal(arg)) for index, arg in enumerate(args)
)
self._query(q)
self.nextset()
q = b"CALL %s(%s)" % (
procname,
b",".join([b"@_%s_%d" % (procname, i) for i in range(len(args))]),
)
self._query(q)
return args
def _query(self, q):
db = self._get_db()
self._result = None
db.query(q)
self._do_get_result(db)
self._post_get_result()
self._executed = q
return self.rowcount
def _fetch_row(self, size=1):
if not self._result:
return ()
return self._result.fetch_row(size, self._fetch_type)
def __iter__(self):
return iter(self.fetchone, None)
Warning = Warning
Error = Error
InterfaceError = InterfaceError
DatabaseError = DatabaseError
DataError = DataError
OperationalError = OperationalError
IntegrityError = IntegrityError
InternalError = InternalError
ProgrammingError = ProgrammingError
NotSupportedError = NotSupportedError
class CursorStoreResultMixIn:
"""This is a MixIn class which causes the entire result set to be
stored on the client side, i.e. it uses mysql_store_result(). If the
result set can be very large, consider adding a LIMIT clause to your
query, or using CursorUseResultMixIn instead."""
def _get_result(self):
return self._get_db().store_result()
def _post_get_result(self):
self._rows = self._fetch_row(0)
self._result = None
def fetchone(self):
"""Fetches a single row from the cursor. None indicates that
no more rows are available."""
self._check_executed()
if self.rownumber >= len(self._rows):
return None
result = self._rows[self.rownumber]
self.rownumber = self.rownumber + 1
return result
def fetchmany(self, size=None):
"""Fetch up to size rows from the cursor. Result set may be smaller
than size. If size is not defined, cursor.arraysize is used."""
self._check_executed()
end = self.rownumber + (size or self.arraysize)
result = self._rows[self.rownumber : end]
self.rownumber = min(end, len(self._rows))
return result
def fetchall(self):
"""Fetchs all available rows from the cursor."""
self._check_executed()
if self.rownumber:
result = self._rows[self.rownumber :]
else:
result = self._rows
self.rownumber = len(self._rows)
return result
def scroll(self, value, mode="relative"):
"""Scroll the cursor in the result set to a new position according
to mode.
If mode is 'relative' (default), value is taken as offset to
the current position in the result set, if set to 'absolute',
value states an absolute target position."""
self._check_executed()
if mode == "relative":
r = self.rownumber + value
elif mode == "absolute":
r = value
else:
raise ProgrammingError("unknown scroll mode %s" % repr(mode))
if r < 0 or r >= len(self._rows):
raise IndexError("out of range")
self.rownumber = r
def __iter__(self):
self._check_executed()
result = self.rownumber and self._rows[self.rownumber :] or self._rows
return iter(result)
class CursorUseResultMixIn:
"""This is a MixIn class which causes the result set to be stored
in the server and sent row-by-row to client side, i.e. it uses
mysql_use_result(). You MUST retrieve the entire result set and
close() the cursor before additional queries can be performed on
the connection."""
def _get_result(self):
return self._get_db().use_result()
def fetchone(self):
"""Fetches a single row from the cursor."""
self._check_executed()
r = self._fetch_row(1)
if not r:
return None
self.rownumber = self.rownumber + 1
return r[0]
def fetchmany(self, size=None):
"""Fetch up to size rows from the cursor. Result set may be smaller
than size. If size is not defined, cursor.arraysize is used."""
self._check_executed()
r = self._fetch_row(size or self.arraysize)
self.rownumber = self.rownumber + len(r)
return r
def fetchall(self):
"""Fetchs all available rows from the cursor."""
self._check_executed()
r = self._fetch_row(0)
self.rownumber = self.rownumber + len(r)
return r
def __iter__(self):
return self
def next(self):
row = self.fetchone()
if row is None:
raise StopIteration
return row
__next__ = next
class CursorTupleRowsMixIn:
"""This is a MixIn class that causes all rows to be returned as tuples,
which is the standard form required by DB API."""
_fetch_type = 0
class CursorDictRowsMixIn:
"""This is a MixIn class that causes all rows to be returned as
dictionaries. This is a non-standard feature."""
_fetch_type = 1
class Cursor(CursorStoreResultMixIn, CursorTupleRowsMixIn, BaseCursor):
"""This is the standard Cursor class that returns rows as tuples
and stores the result set in the client."""
class DictCursor(CursorStoreResultMixIn, CursorDictRowsMixIn, BaseCursor):
"""This is a Cursor class that returns rows as dictionaries and
stores the result set in the client."""
class SSCursor(CursorUseResultMixIn, CursorTupleRowsMixIn, BaseCursor):
"""This is a Cursor class that returns rows as tuples and stores
the result set in the server."""
class SSDictCursor(CursorUseResultMixIn, CursorDictRowsMixIn, BaseCursor):
"""This is a Cursor class that returns rows as dictionaries and
stores the result set in the server."""

@ -0,0 +1,4 @@
__author__ = "Inada Naoki <songofacandy@gmail.com>"
version_info = (2,0,1,'final',0)
__version__ = "2.0.1"

@ -0,0 +1,146 @@
"""times module
This module provides some Date and Time classes for dealing with MySQL data.
Use Python datetime module to handle date and time columns.
"""
from time import localtime
from datetime import date, datetime, time, timedelta
from MySQLdb._mysql import string_literal
Date = date
Time = time
TimeDelta = timedelta
Timestamp = datetime
DateTimeDeltaType = timedelta
DateTimeType = datetime
def DateFromTicks(ticks):
"""Convert UNIX ticks into a date instance."""
return date(*localtime(ticks)[:3])
def TimeFromTicks(ticks):
"""Convert UNIX ticks into a time instance."""
return time(*localtime(ticks)[3:6])
def TimestampFromTicks(ticks):
"""Convert UNIX ticks into a datetime instance."""
return datetime(*localtime(ticks)[:6])
format_TIME = format_DATE = str
def format_TIMEDELTA(v):
seconds = int(v.seconds) % 60
minutes = int(v.seconds // 60) % 60
hours = int(v.seconds // 3600) % 24
return "%d %d:%d:%d" % (v.days, hours, minutes, seconds)
def format_TIMESTAMP(d):
"""
:type d: datetime.datetime
"""
if d.microsecond:
fmt = " ".join(
[
"{0.year:04}-{0.month:02}-{0.day:02}",
"{0.hour:02}:{0.minute:02}:{0.second:02}.{0.microsecond:06}",
]
)
else:
fmt = " ".join(
[
"{0.year:04}-{0.month:02}-{0.day:02}",
"{0.hour:02}:{0.minute:02}:{0.second:02}",
]
)
return fmt.format(d)
def DateTime_or_None(s):
try:
if len(s) < 11:
return Date_or_None(s)
micros = s[20:]
if len(micros) == 0:
# 12:00:00
micros = 0
elif len(micros) < 7:
# 12:00:00.123456
micros = int(micros) * 10 ** (6 - len(micros))
else:
return None
return datetime(
int(s[:4]), # year
int(s[5:7]), # month
int(s[8:10]), # day
int(s[11:13] or 0), # hour
int(s[14:16] or 0), # minute
int(s[17:19] or 0), # second
micros, # microsecond
)
except ValueError:
return None
def TimeDelta_or_None(s):
try:
h, m, s = s.split(":")
if "." in s:
s, ms = s.split(".")
ms = ms.ljust(6, "0")
else:
ms = 0
if h[0] == "-":
negative = True
else:
negative = False
h, m, s, ms = abs(int(h)), int(m), int(s), int(ms)
td = timedelta(hours=h, minutes=m, seconds=s, microseconds=ms)
if negative:
return -td
else:
return td
except ValueError:
# unpacking or int/float conversion failed
return None
def Time_or_None(s):
try:
h, m, s = s.split(":")
if "." in s:
s, ms = s.split(".")
ms = ms.ljust(6, "0")
else:
ms = 0
h, m, s, ms = int(h), int(m), int(s), int(ms)
return time(hour=h, minute=m, second=s, microsecond=ms)
except ValueError:
return None
def Date_or_None(s):
try:
return date(int(s[:4]), int(s[5:7]), int(s[8:10]),) # year # month # day
except ValueError:
return None
def DateTime2literal(d, c):
"""Format a DateTime object as an ISO timestamp."""
return string_literal(format_TIMESTAMP(d))
def DateTimeDelta2literal(d, c):
"""Format a DateTimeDelta object as a time."""
return string_literal(format_TIMEDELTA(d))

@ -0,0 +1,39 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# Copyright 2007 Google Inc. All Rights Reserved.
__version__ = '3.12.2'
if __name__ != '__main__':
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
__path__ = __import__('pkgutil').extend_path(__path__, __name__)

@ -0,0 +1,78 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/any.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/any.proto',
package='google.protobuf',
syntax='proto3',
serialized_options=b'\n\023com.google.protobufB\010AnyProtoP\001Z%github.com/golang/protobuf/ptypes/any\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x19google/protobuf/any.proto\x12\x0fgoogle.protobuf\"&\n\x03\x41ny\x12\x10\n\x08type_url\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x0c\x42o\n\x13\x63om.google.protobufB\x08\x41nyProtoP\x01Z%github.com/golang/protobuf/ptypes/any\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3'
)
_ANY = _descriptor.Descriptor(
name='Any',
full_name='google.protobuf.Any',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='type_url', full_name='google.protobuf.Any.type_url', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.Any.value', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=46,
serialized_end=84,
)
DESCRIPTOR.message_types_by_name['Any'] = _ANY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Any = _reflection.GeneratedProtocolMessageType('Any', (_message.Message,), {
'DESCRIPTOR' : _ANY,
'__module__' : 'google.protobuf.any_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Any)
})
_sym_db.RegisterMessage(Any)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,88 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/any_test.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/any_test.proto',
package='protobuf_unittest',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1egoogle/protobuf/any_test.proto\x12\x11protobuf_unittest\x1a\x19google/protobuf/any.proto\"y\n\x07TestAny\x12\x13\n\x0bint32_value\x18\x01 \x01(\x05\x12\'\n\tany_value\x18\x02 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x30\n\x12repeated_any_value\x18\x03 \x03(\x0b\x32\x14.google.protobuf.Anyb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
_TESTANY = _descriptor.Descriptor(
name='TestAny',
full_name='protobuf_unittest.TestAny',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='int32_value', full_name='protobuf_unittest.TestAny.int32_value', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='any_value', full_name='protobuf_unittest.TestAny.any_value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_any_value', full_name='protobuf_unittest.TestAny.repeated_any_value', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=80,
serialized_end=201,
)
_TESTANY.fields_by_name['any_value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_TESTANY.fields_by_name['repeated_any_value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), {
'DESCRIPTOR' : _TESTANY,
'__module__' : 'google.protobuf.any_test_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestAny)
})
_sym_db.RegisterMessage(TestAny)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,252 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/api.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import source_context_pb2 as google_dot_protobuf_dot_source__context__pb2
from google.protobuf import type_pb2 as google_dot_protobuf_dot_type__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/api.proto',
package='google.protobuf',
syntax='proto3',
serialized_options=b'\n\023com.google.protobufB\010ApiProtoP\001Z+google.golang.org/genproto/protobuf/api;api\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x19google/protobuf/api.proto\x12\x0fgoogle.protobuf\x1a$google/protobuf/source_context.proto\x1a\x1agoogle/protobuf/type.proto\"\x81\x02\n\x03\x41pi\x12\x0c\n\x04name\x18\x01 \x01(\t\x12(\n\x07methods\x18\x02 \x03(\x0b\x32\x17.google.protobuf.Method\x12(\n\x07options\x18\x03 \x03(\x0b\x32\x17.google.protobuf.Option\x12\x0f\n\x07version\x18\x04 \x01(\t\x12\x36\n\x0esource_context\x18\x05 \x01(\x0b\x32\x1e.google.protobuf.SourceContext\x12&\n\x06mixins\x18\x06 \x03(\x0b\x32\x16.google.protobuf.Mixin\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"\xd5\x01\n\x06Method\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x18\n\x10request_type_url\x18\x02 \x01(\t\x12\x19\n\x11request_streaming\x18\x03 \x01(\x08\x12\x19\n\x11response_type_url\x18\x04 \x01(\t\x12\x1a\n\x12response_streaming\x18\x05 \x01(\x08\x12(\n\x07options\x18\x06 \x03(\x0b\x32\x17.google.protobuf.Option\x12\'\n\x06syntax\x18\x07 \x01(\x0e\x32\x17.google.protobuf.Syntax\"#\n\x05Mixin\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x0c\n\x04root\x18\x02 \x01(\tBu\n\x13\x63om.google.protobufB\x08\x41piProtoP\x01Z+google.golang.org/genproto/protobuf/api;api\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3'
,
dependencies=[google_dot_protobuf_dot_source__context__pb2.DESCRIPTOR,google_dot_protobuf_dot_type__pb2.DESCRIPTOR,])
_API = _descriptor.Descriptor(
name='Api',
full_name='google.protobuf.Api',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.protobuf.Api.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='methods', full_name='google.protobuf.Api.methods', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='options', full_name='google.protobuf.Api.options', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='version', full_name='google.protobuf.Api.version', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='source_context', full_name='google.protobuf.Api.source_context', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mixins', full_name='google.protobuf.Api.mixins', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='syntax', full_name='google.protobuf.Api.syntax', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=113,
serialized_end=370,
)
_METHOD = _descriptor.Descriptor(
name='Method',
full_name='google.protobuf.Method',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.protobuf.Method.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_type_url', full_name='google.protobuf.Method.request_type_url', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='request_streaming', full_name='google.protobuf.Method.request_streaming', index=2,
number=3, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='response_type_url', full_name='google.protobuf.Method.response_type_url', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='response_streaming', full_name='google.protobuf.Method.response_streaming', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='options', full_name='google.protobuf.Method.options', index=5,
number=6, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='syntax', full_name='google.protobuf.Method.syntax', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=373,
serialized_end=586,
)
_MIXIN = _descriptor.Descriptor(
name='Mixin',
full_name='google.protobuf.Mixin',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.protobuf.Mixin.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='root', full_name='google.protobuf.Mixin.root', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=588,
serialized_end=623,
)
_API.fields_by_name['methods'].message_type = _METHOD
_API.fields_by_name['options'].message_type = google_dot_protobuf_dot_type__pb2._OPTION
_API.fields_by_name['source_context'].message_type = google_dot_protobuf_dot_source__context__pb2._SOURCECONTEXT
_API.fields_by_name['mixins'].message_type = _MIXIN
_API.fields_by_name['syntax'].enum_type = google_dot_protobuf_dot_type__pb2._SYNTAX
_METHOD.fields_by_name['options'].message_type = google_dot_protobuf_dot_type__pb2._OPTION
_METHOD.fields_by_name['syntax'].enum_type = google_dot_protobuf_dot_type__pb2._SYNTAX
DESCRIPTOR.message_types_by_name['Api'] = _API
DESCRIPTOR.message_types_by_name['Method'] = _METHOD
DESCRIPTOR.message_types_by_name['Mixin'] = _MIXIN
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Api = _reflection.GeneratedProtocolMessageType('Api', (_message.Message,), {
'DESCRIPTOR' : _API,
'__module__' : 'google.protobuf.api_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Api)
})
_sym_db.RegisterMessage(Api)
Method = _reflection.GeneratedProtocolMessageType('Method', (_message.Message,), {
'DESCRIPTOR' : _METHOD,
'__module__' : 'google.protobuf.api_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Method)
})
_sym_db.RegisterMessage(Method)
Mixin = _reflection.GeneratedProtocolMessageType('Mixin', (_message.Message,), {
'DESCRIPTOR' : _MIXIN,
'__module__' : 'google.protobuf.api_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Mixin)
})
_sym_db.RegisterMessage(Mixin)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,293 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/compiler/plugin.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/compiler/plugin.proto',
package='google.protobuf.compiler',
syntax='proto2',
serialized_options=b'\n\034com.google.protobuf.compilerB\014PluginProtosZ9github.com/golang/protobuf/protoc-gen-go/plugin;plugin_go',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n%google/protobuf/compiler/plugin.proto\x12\x18google.protobuf.compiler\x1a google/protobuf/descriptor.proto\"F\n\x07Version\x12\r\n\x05major\x18\x01 \x01(\x05\x12\r\n\x05minor\x18\x02 \x01(\x05\x12\r\n\x05patch\x18\x03 \x01(\x05\x12\x0e\n\x06suffix\x18\x04 \x01(\t\"\xba\x01\n\x14\x43odeGeneratorRequest\x12\x18\n\x10\x66ile_to_generate\x18\x01 \x03(\t\x12\x11\n\tparameter\x18\x02 \x01(\t\x12\x38\n\nproto_file\x18\x0f \x03(\x0b\x32$.google.protobuf.FileDescriptorProto\x12;\n\x10\x63ompiler_version\x18\x03 \x01(\x0b\x32!.google.protobuf.compiler.Version\"\x80\x02\n\x15\x43odeGeneratorResponse\x12\r\n\x05\x65rror\x18\x01 \x01(\t\x12\x1a\n\x12supported_features\x18\x02 \x01(\x04\x12\x42\n\x04\x66ile\x18\x0f \x03(\x0b\x32\x34.google.protobuf.compiler.CodeGeneratorResponse.File\x1a>\n\x04\x46ile\x12\x0c\n\x04name\x18\x01 \x01(\t\x12\x17\n\x0finsertion_point\x18\x02 \x01(\t\x12\x0f\n\x07\x63ontent\x18\x0f \x01(\t\"8\n\x07\x46\x65\x61ture\x12\x10\n\x0c\x46\x45\x41TURE_NONE\x10\x00\x12\x1b\n\x17\x46\x45\x41TURE_PROTO3_OPTIONAL\x10\x01\x42g\n\x1c\x63om.google.protobuf.compilerB\x0cPluginProtosZ9github.com/golang/protobuf/protoc-gen-go/plugin;plugin_go'
,
dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
_CODEGENERATORRESPONSE_FEATURE = _descriptor.EnumDescriptor(
name='Feature',
full_name='google.protobuf.compiler.CodeGeneratorResponse.Feature',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FEATURE_NONE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FEATURE_PROTO3_OPTIONAL', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=563,
serialized_end=619,
)
_sym_db.RegisterEnumDescriptor(_CODEGENERATORRESPONSE_FEATURE)
_VERSION = _descriptor.Descriptor(
name='Version',
full_name='google.protobuf.compiler.Version',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='major', full_name='google.protobuf.compiler.Version.major', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='minor', full_name='google.protobuf.compiler.Version.minor', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='patch', full_name='google.protobuf.compiler.Version.patch', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='suffix', full_name='google.protobuf.compiler.Version.suffix', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=101,
serialized_end=171,
)
_CODEGENERATORREQUEST = _descriptor.Descriptor(
name='CodeGeneratorRequest',
full_name='google.protobuf.compiler.CodeGeneratorRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='file_to_generate', full_name='google.protobuf.compiler.CodeGeneratorRequest.file_to_generate', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='parameter', full_name='google.protobuf.compiler.CodeGeneratorRequest.parameter', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='proto_file', full_name='google.protobuf.compiler.CodeGeneratorRequest.proto_file', index=2,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='compiler_version', full_name='google.protobuf.compiler.CodeGeneratorRequest.compiler_version', index=3,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=174,
serialized_end=360,
)
_CODEGENERATORRESPONSE_FILE = _descriptor.Descriptor(
name='File',
full_name='google.protobuf.compiler.CodeGeneratorResponse.File',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='insertion_point', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.insertion_point', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='content', full_name='google.protobuf.compiler.CodeGeneratorResponse.File.content', index=2,
number=15, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=499,
serialized_end=561,
)
_CODEGENERATORRESPONSE = _descriptor.Descriptor(
name='CodeGeneratorResponse',
full_name='google.protobuf.compiler.CodeGeneratorResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='error', full_name='google.protobuf.compiler.CodeGeneratorResponse.error', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='supported_features', full_name='google.protobuf.compiler.CodeGeneratorResponse.supported_features', index=1,
number=2, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='file', full_name='google.protobuf.compiler.CodeGeneratorResponse.file', index=2,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_CODEGENERATORRESPONSE_FILE, ],
enum_types=[
_CODEGENERATORRESPONSE_FEATURE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=363,
serialized_end=619,
)
_CODEGENERATORREQUEST.fields_by_name['proto_file'].message_type = google_dot_protobuf_dot_descriptor__pb2._FILEDESCRIPTORPROTO
_CODEGENERATORREQUEST.fields_by_name['compiler_version'].message_type = _VERSION
_CODEGENERATORRESPONSE_FILE.containing_type = _CODEGENERATORRESPONSE
_CODEGENERATORRESPONSE.fields_by_name['file'].message_type = _CODEGENERATORRESPONSE_FILE
_CODEGENERATORRESPONSE_FEATURE.containing_type = _CODEGENERATORRESPONSE
DESCRIPTOR.message_types_by_name['Version'] = _VERSION
DESCRIPTOR.message_types_by_name['CodeGeneratorRequest'] = _CODEGENERATORREQUEST
DESCRIPTOR.message_types_by_name['CodeGeneratorResponse'] = _CODEGENERATORRESPONSE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Version = _reflection.GeneratedProtocolMessageType('Version', (_message.Message,), {
'DESCRIPTOR' : _VERSION,
'__module__' : 'google.protobuf.compiler.plugin_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.Version)
})
_sym_db.RegisterMessage(Version)
CodeGeneratorRequest = _reflection.GeneratedProtocolMessageType('CodeGeneratorRequest', (_message.Message,), {
'DESCRIPTOR' : _CODEGENERATORREQUEST,
'__module__' : 'google.protobuf.compiler.plugin_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorRequest)
})
_sym_db.RegisterMessage(CodeGeneratorRequest)
CodeGeneratorResponse = _reflection.GeneratedProtocolMessageType('CodeGeneratorResponse', (_message.Message,), {
'File' : _reflection.GeneratedProtocolMessageType('File', (_message.Message,), {
'DESCRIPTOR' : _CODEGENERATORRESPONSE_FILE,
'__module__' : 'google.protobuf.compiler.plugin_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse.File)
})
,
'DESCRIPTOR' : _CODEGENERATORRESPONSE,
'__module__' : 'google.protobuf.compiler.plugin_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.compiler.CodeGeneratorResponse)
})
_sym_db.RegisterMessage(CodeGeneratorResponse)
_sym_db.RegisterMessage(CodeGeneratorResponse.File)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

File diff suppressed because it is too large Load Diff

@ -0,0 +1,177 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides a container for DescriptorProtos."""
__author__ = 'matthewtoia@google.com (Matt Toia)'
import warnings
class Error(Exception):
pass
class DescriptorDatabaseConflictingDefinitionError(Error):
"""Raised when a proto is added with the same name & different descriptor."""
class DescriptorDatabase(object):
"""A container accepting FileDescriptorProtos and maps DescriptorProtos."""
def __init__(self):
self._file_desc_protos_by_file = {}
self._file_desc_protos_by_symbol = {}
def Add(self, file_desc_proto):
"""Adds the FileDescriptorProto and its types to this database.
Args:
file_desc_proto: The FileDescriptorProto to add.
Raises:
DescriptorDatabaseConflictingDefinitionError: if an attempt is made to
add a proto with the same name but different definition than an
exisiting proto in the database.
"""
proto_name = file_desc_proto.name
if proto_name not in self._file_desc_protos_by_file:
self._file_desc_protos_by_file[proto_name] = file_desc_proto
elif self._file_desc_protos_by_file[proto_name] != file_desc_proto:
raise DescriptorDatabaseConflictingDefinitionError(
'%s already added, but with different descriptor.' % proto_name)
else:
return
# Add all the top-level descriptors to the index.
package = file_desc_proto.package
for message in file_desc_proto.message_type:
for name in _ExtractSymbols(message, package):
self._AddSymbol(name, file_desc_proto)
for enum in file_desc_proto.enum_type:
self._AddSymbol(('.'.join((package, enum.name))), file_desc_proto)
for enum_value in enum.value:
self._file_desc_protos_by_symbol[
'.'.join((package, enum_value.name))] = file_desc_proto
for extension in file_desc_proto.extension:
self._AddSymbol(('.'.join((package, extension.name))), file_desc_proto)
for service in file_desc_proto.service:
self._AddSymbol(('.'.join((package, service.name))), file_desc_proto)
def FindFileByName(self, name):
"""Finds the file descriptor proto by file name.
Typically the file name is a relative path ending to a .proto file. The
proto with the given name will have to have been added to this database
using the Add method or else an error will be raised.
Args:
name: The file name to find.
Returns:
The file descriptor proto matching the name.
Raises:
KeyError if no file by the given name was added.
"""
return self._file_desc_protos_by_file[name]
def FindFileContainingSymbol(self, symbol):
"""Finds the file descriptor proto containing the specified symbol.
The symbol should be a fully qualified name including the file descriptor's
package and any containing messages. Some examples:
'some.package.name.Message'
'some.package.name.Message.NestedEnum'
'some.package.name.Message.some_field'
The file descriptor proto containing the specified symbol must be added to
this database using the Add method or else an error will be raised.
Args:
symbol: The fully qualified symbol name.
Returns:
The file descriptor proto containing the symbol.
Raises:
KeyError if no file contains the specified symbol.
"""
try:
return self._file_desc_protos_by_symbol[symbol]
except KeyError:
# Fields, enum values, and nested extensions are not in
# _file_desc_protos_by_symbol. Try to find the top level
# descriptor. Non-existent nested symbol under a valid top level
# descriptor can also be found. The behavior is the same with
# protobuf C++.
top_level, _, _ = symbol.rpartition('.')
try:
return self._file_desc_protos_by_symbol[top_level]
except KeyError:
# Raise the original symbol as a KeyError for better diagnostics.
raise KeyError(symbol)
def FindFileContainingExtension(self, extendee_name, extension_number):
# TODO(jieluo): implement this API.
return None
def FindAllExtensionNumbers(self, extendee_name):
# TODO(jieluo): implement this API.
return []
def _AddSymbol(self, name, file_desc_proto):
if name in self._file_desc_protos_by_symbol:
warn_msg = ('Conflict register for file "' + file_desc_proto.name +
'": ' + name +
' is already defined in file "' +
self._file_desc_protos_by_symbol[name].name + '"')
warnings.warn(warn_msg, RuntimeWarning)
self._file_desc_protos_by_symbol[name] = file_desc_proto
def _ExtractSymbols(desc_proto, package):
"""Pulls out all the symbols from a descriptor proto.
Args:
desc_proto: The proto to extract symbols from.
package: The package containing the descriptor type.
Yields:
The fully qualified name found in the descriptor.
"""
message_name = package + '.' + desc_proto.name if package else desc_proto.name
yield message_name
for nested_type in desc_proto.nested_type:
for symbol in _ExtractSymbols(nested_type, message_name):
yield symbol
for enum_type in desc_proto.enum_type:
yield '.'.join((message_name, enum_type.name))

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

@ -0,0 +1,78 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/duration.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/duration.proto',
package='google.protobuf',
syntax='proto3',
serialized_options=b'\n\023com.google.protobufB\rDurationProtoP\001Z*github.com/golang/protobuf/ptypes/duration\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1egoogle/protobuf/duration.proto\x12\x0fgoogle.protobuf\"*\n\x08\x44uration\x12\x0f\n\x07seconds\x18\x01 \x01(\x03\x12\r\n\x05nanos\x18\x02 \x01(\x05\x42|\n\x13\x63om.google.protobufB\rDurationProtoP\x01Z*github.com/golang/protobuf/ptypes/duration\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3'
)
_DURATION = _descriptor.Descriptor(
name='Duration',
full_name='google.protobuf.Duration',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='seconds', full_name='google.protobuf.Duration.seconds', index=0,
number=1, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nanos', full_name='google.protobuf.Duration.nanos', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=51,
serialized_end=93,
)
DESCRIPTOR.message_types_by_name['Duration'] = _DURATION
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Duration = _reflection.GeneratedProtocolMessageType('Duration', (_message.Message,), {
'DESCRIPTOR' : _DURATION,
'__module__' : 'google.protobuf.duration_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Duration)
})
_sym_db.RegisterMessage(Duration)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,64 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/empty.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/empty.proto',
package='google.protobuf',
syntax='proto3',
serialized_options=b'\n\023com.google.protobufB\nEmptyProtoP\001Z\'github.com/golang/protobuf/ptypes/empty\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1bgoogle/protobuf/empty.proto\x12\x0fgoogle.protobuf\"\x07\n\x05\x45mptyBv\n\x13\x63om.google.protobufB\nEmptyProtoP\x01Z\'github.com/golang/protobuf/ptypes/empty\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3'
)
_EMPTY = _descriptor.Descriptor(
name='Empty',
full_name='google.protobuf.Empty',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=48,
serialized_end=55,
)
DESCRIPTOR.message_types_by_name['Empty'] = _EMPTY
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Empty = _reflection.GeneratedProtocolMessageType('Empty', (_message.Message,), {
'DESCRIPTOR' : _EMPTY,
'__module__' : 'google.protobuf.empty_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Empty)
})
_sym_db.RegisterMessage(Empty)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,71 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/field_mask.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/field_mask.proto',
package='google.protobuf',
syntax='proto3',
serialized_options=b'\n\023com.google.protobufB\016FieldMaskProtoP\001Z9google.golang.org/genproto/protobuf/field_mask;field_mask\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n google/protobuf/field_mask.proto\x12\x0fgoogle.protobuf\"\x1a\n\tFieldMask\x12\r\n\x05paths\x18\x01 \x03(\tB\x8c\x01\n\x13\x63om.google.protobufB\x0e\x46ieldMaskProtoP\x01Z9google.golang.org/genproto/protobuf/field_mask;field_mask\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3'
)
_FIELDMASK = _descriptor.Descriptor(
name='FieldMask',
full_name='google.protobuf.FieldMask',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='paths', full_name='google.protobuf.FieldMask.paths', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=53,
serialized_end=79,
)
DESCRIPTOR.message_types_by_name['FieldMask'] = _FIELDMASK
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FieldMask = _reflection.GeneratedProtocolMessageType('FieldMask', (_message.Message,), {
'DESCRIPTOR' : _FIELDMASK,
'__module__' : 'google.protobuf.field_mask_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.FieldMask)
})
_sym_db.RegisterMessage(FieldMask)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,30 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.

@ -0,0 +1,449 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Adds support for parameterized tests to Python's unittest TestCase class.
A parameterized test is a method in a test case that is invoked with different
argument tuples.
A simple example:
class AdditionExample(parameterized.TestCase):
@parameterized.parameters(
(1, 2, 3),
(4, 5, 9),
(1, 1, 3))
def testAddition(self, op1, op2, result):
self.assertEqual(result, op1 + op2)
Each invocation is a separate test case and properly isolated just
like a normal test method, with its own setUp/tearDown cycle. In the
example above, there are three separate testcases, one of which will
fail due to an assertion error (1 + 1 != 3).
Parameters for individual test cases can be tuples (with positional parameters)
or dictionaries (with named parameters):
class AdditionExample(parameterized.TestCase):
@parameterized.parameters(
{'op1': 1, 'op2': 2, 'result': 3},
{'op1': 4, 'op2': 5, 'result': 9},
)
def testAddition(self, op1, op2, result):
self.assertEqual(result, op1 + op2)
If a parameterized test fails, the error message will show the
original test name (which is modified internally) and the arguments
for the specific invocation, which are part of the string returned by
the shortDescription() method on test cases.
The id method of the test, used internally by the unittest framework,
is also modified to show the arguments. To make sure that test names
stay the same across several invocations, object representations like
>>> class Foo(object):
... pass
>>> repr(Foo())
'<__main__.Foo object at 0x23d8610>'
are turned into '<__main__.Foo>'. For even more descriptive names,
especially in test logs, you can use the named_parameters decorator. In
this case, only tuples are supported, and the first parameters has to
be a string (or an object that returns an apt name when converted via
str()):
class NamedExample(parameterized.TestCase):
@parameterized.named_parameters(
('Normal', 'aa', 'aaa', True),
('EmptyPrefix', '', 'abc', True),
('BothEmpty', '', '', True))
def testStartsWith(self, prefix, string, result):
self.assertEqual(result, strings.startswith(prefix))
Named tests also have the benefit that they can be run individually
from the command line:
$ testmodule.py NamedExample.testStartsWithNormal
.
--------------------------------------------------------------------
Ran 1 test in 0.000s
OK
Parameterized Classes
=====================
If invocation arguments are shared across test methods in a single
TestCase class, instead of decorating all test methods
individually, the class itself can be decorated:
@parameterized.parameters(
(1, 2, 3)
(4, 5, 9))
class ArithmeticTest(parameterized.TestCase):
def testAdd(self, arg1, arg2, result):
self.assertEqual(arg1 + arg2, result)
def testSubtract(self, arg2, arg2, result):
self.assertEqual(result - arg1, arg2)
Inputs from Iterables
=====================
If parameters should be shared across several test cases, or are dynamically
created from other sources, a single non-tuple iterable can be passed into
the decorator. This iterable will be used to obtain the test cases:
class AdditionExample(parameterized.TestCase):
@parameterized.parameters(
c.op1, c.op2, c.result for c in testcases
)
def testAddition(self, op1, op2, result):
self.assertEqual(result, op1 + op2)
Single-Argument Test Methods
============================
If a test method takes only one argument, the single argument does not need to
be wrapped into a tuple:
class NegativeNumberExample(parameterized.TestCase):
@parameterized.parameters(
-1, -3, -4, -5
)
def testIsNegative(self, arg):
self.assertTrue(IsNegative(arg))
"""
__author__ = 'tmarek@google.com (Torsten Marek)'
import functools
import re
import types
try:
import unittest2 as unittest
except ImportError:
import unittest
import uuid
import six
try:
# Since python 3
import collections.abc as collections_abc
except ImportError:
# Won't work after python 3.8
import collections as collections_abc
ADDR_RE = re.compile(r'\<([a-zA-Z0-9_\-\.]+) object at 0x[a-fA-F0-9]+\>')
_SEPARATOR = uuid.uuid1().hex
_FIRST_ARG = object()
_ARGUMENT_REPR = object()
def _CleanRepr(obj):
return ADDR_RE.sub(r'<\1>', repr(obj))
# Helper function formerly from the unittest module, removed from it in
# Python 2.7.
def _StrClass(cls):
return '%s.%s' % (cls.__module__, cls.__name__)
def _NonStringIterable(obj):
return (isinstance(obj, collections_abc.Iterable) and not
isinstance(obj, six.string_types))
def _FormatParameterList(testcase_params):
if isinstance(testcase_params, collections_abc.Mapping):
return ', '.join('%s=%s' % (argname, _CleanRepr(value))
for argname, value in testcase_params.items())
elif _NonStringIterable(testcase_params):
return ', '.join(map(_CleanRepr, testcase_params))
else:
return _FormatParameterList((testcase_params,))
class _ParameterizedTestIter(object):
"""Callable and iterable class for producing new test cases."""
def __init__(self, test_method, testcases, naming_type):
"""Returns concrete test functions for a test and a list of parameters.
The naming_type is used to determine the name of the concrete
functions as reported by the unittest framework. If naming_type is
_FIRST_ARG, the testcases must be tuples, and the first element must
have a string representation that is a valid Python identifier.
Args:
test_method: The decorated test method.
testcases: (list of tuple/dict) A list of parameter
tuples/dicts for individual test invocations.
naming_type: The test naming type, either _NAMED or _ARGUMENT_REPR.
"""
self._test_method = test_method
self.testcases = testcases
self._naming_type = naming_type
def __call__(self, *args, **kwargs):
raise RuntimeError('You appear to be running a parameterized test case '
'without having inherited from parameterized.'
'TestCase. This is bad because none of '
'your test cases are actually being run.')
def __iter__(self):
test_method = self._test_method
naming_type = self._naming_type
def MakeBoundParamTest(testcase_params):
@functools.wraps(test_method)
def BoundParamTest(self):
if isinstance(testcase_params, collections_abc.Mapping):
test_method(self, **testcase_params)
elif _NonStringIterable(testcase_params):
test_method(self, *testcase_params)
else:
test_method(self, testcase_params)
if naming_type is _FIRST_ARG:
# Signal the metaclass that the name of the test function is unique
# and descriptive.
BoundParamTest.__x_use_name__ = True
BoundParamTest.__name__ += str(testcase_params[0])
testcase_params = testcase_params[1:]
elif naming_type is _ARGUMENT_REPR:
# __x_extra_id__ is used to pass naming information to the __new__
# method of TestGeneratorMetaclass.
# The metaclass will make sure to create a unique, but nondescriptive
# name for this test.
BoundParamTest.__x_extra_id__ = '(%s)' % (
_FormatParameterList(testcase_params),)
else:
raise RuntimeError('%s is not a valid naming type.' % (naming_type,))
BoundParamTest.__doc__ = '%s(%s)' % (
BoundParamTest.__name__, _FormatParameterList(testcase_params))
if test_method.__doc__:
BoundParamTest.__doc__ += '\n%s' % (test_method.__doc__,)
return BoundParamTest
return (MakeBoundParamTest(c) for c in self.testcases)
def _IsSingletonList(testcases):
"""True iff testcases contains only a single non-tuple element."""
return len(testcases) == 1 and not isinstance(testcases[0], tuple)
def _ModifyClass(class_object, testcases, naming_type):
assert not getattr(class_object, '_id_suffix', None), (
'Cannot add parameters to %s,'
' which already has parameterized methods.' % (class_object,))
class_object._id_suffix = id_suffix = {}
# We change the size of __dict__ while we iterate over it,
# which Python 3.x will complain about, so use copy().
for name, obj in class_object.__dict__.copy().items():
if (name.startswith(unittest.TestLoader.testMethodPrefix)
and isinstance(obj, types.FunctionType)):
delattr(class_object, name)
methods = {}
_UpdateClassDictForParamTestCase(
methods, id_suffix, name,
_ParameterizedTestIter(obj, testcases, naming_type))
for name, meth in methods.items():
setattr(class_object, name, meth)
def _ParameterDecorator(naming_type, testcases):
"""Implementation of the parameterization decorators.
Args:
naming_type: The naming type.
testcases: Testcase parameters.
Returns:
A function for modifying the decorated object.
"""
def _Apply(obj):
if isinstance(obj, type):
_ModifyClass(
obj,
list(testcases) if not isinstance(testcases, collections_abc.Sequence)
else testcases,
naming_type)
return obj
else:
return _ParameterizedTestIter(obj, testcases, naming_type)
if _IsSingletonList(testcases):
assert _NonStringIterable(testcases[0]), (
'Single parameter argument must be a non-string iterable')
testcases = testcases[0]
return _Apply
def parameters(*testcases): # pylint: disable=invalid-name
"""A decorator for creating parameterized tests.
See the module docstring for a usage example.
Args:
*testcases: Parameters for the decorated method, either a single
iterable, or a list of tuples/dicts/objects (for tests
with only one argument).
Returns:
A test generator to be handled by TestGeneratorMetaclass.
"""
return _ParameterDecorator(_ARGUMENT_REPR, testcases)
def named_parameters(*testcases): # pylint: disable=invalid-name
"""A decorator for creating parameterized tests.
See the module docstring for a usage example. The first element of
each parameter tuple should be a string and will be appended to the
name of the test method.
Args:
*testcases: Parameters for the decorated method, either a single
iterable, or a list of tuples.
Returns:
A test generator to be handled by TestGeneratorMetaclass.
"""
return _ParameterDecorator(_FIRST_ARG, testcases)
class TestGeneratorMetaclass(type):
"""Metaclass for test cases with test generators.
A test generator is an iterable in a testcase that produces callables. These
callables must be single-argument methods. These methods are injected into
the class namespace and the original iterable is removed. If the name of the
iterable conforms to the test pattern, the injected methods will be picked
up as tests by the unittest framework.
In general, it is supposed to be used in conjunction with the
parameters decorator.
"""
def __new__(mcs, class_name, bases, dct):
dct['_id_suffix'] = id_suffix = {}
for name, obj in dct.items():
if (name.startswith(unittest.TestLoader.testMethodPrefix) and
_NonStringIterable(obj)):
iterator = iter(obj)
dct.pop(name)
_UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator)
return type.__new__(mcs, class_name, bases, dct)
def _UpdateClassDictForParamTestCase(dct, id_suffix, name, iterator):
"""Adds individual test cases to a dictionary.
Args:
dct: The target dictionary.
id_suffix: The dictionary for mapping names to test IDs.
name: The original name of the test case.
iterator: The iterator generating the individual test cases.
"""
for idx, func in enumerate(iterator):
assert callable(func), 'Test generators must yield callables, got %r' % (
func,)
if getattr(func, '__x_use_name__', False):
new_name = func.__name__
else:
new_name = '%s%s%d' % (name, _SEPARATOR, idx)
assert new_name not in dct, (
'Name of parameterized test case "%s" not unique' % (new_name,))
dct[new_name] = func
id_suffix[new_name] = getattr(func, '__x_extra_id__', '')
class TestCase(unittest.TestCase):
"""Base class for test cases using the parameters decorator."""
__metaclass__ = TestGeneratorMetaclass
def _OriginalName(self):
return self._testMethodName.split(_SEPARATOR)[0]
def __str__(self):
return '%s (%s)' % (self._OriginalName(), _StrClass(self.__class__))
def id(self): # pylint: disable=invalid-name
"""Returns the descriptive ID of the test.
This is used internally by the unittesting framework to get a name
for the test to be used in reports.
Returns:
The test id.
"""
return '%s.%s%s' % (_StrClass(self.__class__),
self._OriginalName(),
self._id_suffix.get(self._testMethodName, ''))
def CoopTestCase(other_base_class):
"""Returns a new base class with a cooperative metaclass base.
This enables the TestCase to be used in combination
with other base classes that have custom metaclasses, such as
mox.MoxTestBase.
Only works with metaclasses that do not override type.__new__.
Example:
import google3
import mox
from google3.testing.pybase import parameterized
class ExampleTest(parameterized.CoopTestCase(mox.MoxTestBase)):
...
Args:
other_base_class: (class) A test case base class.
Returns:
A new class object.
"""
metaclass = type(
'CoopMetaclass',
(other_base_class.__metaclass__,
TestGeneratorMetaclass), {})
return metaclass(
'CoopTestCase',
(other_base_class, TestCase), {})

@ -0,0 +1,185 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/any_test.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import any_pb2 as google_dot_protobuf_dot_any__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/any_test.proto',
package='google.protobuf.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\'google/protobuf/internal/any_test.proto\x12\x18google.protobuf.internal\x1a\x19google/protobuf/any.proto\"\xc0\x01\n\x07TestAny\x12#\n\x05value\x18\x01 \x01(\x0b\x32\x14.google.protobuf.Any\x12\x11\n\tint_value\x18\x02 \x01(\x05\x12\x42\n\tmap_value\x18\x03 \x03(\x0b\x32/.google.protobuf.internal.TestAny.MapValueEntry\x1a/\n\rMapValueEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\r\n\x05value\x18\x02 \x01(\x05:\x02\x38\x01*\x08\x08\n\x10\x80\x80\x80\x80\x02\"\x85\x01\n\x11TestAnyExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32\x65\n\nextension1\x12!.google.protobuf.internal.TestAny\x18\xab\xff\xf6. \x01(\x0b\x32+.google.protobuf.internal.TestAnyExtension1'
,
dependencies=[google_dot_protobuf_dot_any__pb2.DESCRIPTOR,])
_TESTANY_MAPVALUEENTRY = _descriptor.Descriptor(
name='MapValueEntry',
full_name='google.protobuf.internal.TestAny.MapValueEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='google.protobuf.internal.TestAny.MapValueEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.internal.TestAny.MapValueEntry.value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=232,
serialized_end=279,
)
_TESTANY = _descriptor.Descriptor(
name='TestAny',
full_name='google.protobuf.internal.TestAny',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.internal.TestAny.value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='int_value', full_name='google.protobuf.internal.TestAny.int_value', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='map_value', full_name='google.protobuf.internal.TestAny.map_value', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TESTANY_MAPVALUEENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(10, 536870912), ],
oneofs=[
],
serialized_start=97,
serialized_end=289,
)
_TESTANYEXTENSION1 = _descriptor.Descriptor(
name='TestAnyExtension1',
full_name='google.protobuf.internal.TestAnyExtension1',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='i', full_name='google.protobuf.internal.TestAnyExtension1.i', index=0,
number=15, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='extension1', full_name='google.protobuf.internal.TestAnyExtension1.extension1', index=0,
number=98418603, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=292,
serialized_end=425,
)
_TESTANY_MAPVALUEENTRY.containing_type = _TESTANY
_TESTANY.fields_by_name['value'].message_type = google_dot_protobuf_dot_any__pb2._ANY
_TESTANY.fields_by_name['map_value'].message_type = _TESTANY_MAPVALUEENTRY
DESCRIPTOR.message_types_by_name['TestAny'] = _TESTANY
DESCRIPTOR.message_types_by_name['TestAnyExtension1'] = _TESTANYEXTENSION1
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestAny = _reflection.GeneratedProtocolMessageType('TestAny', (_message.Message,), {
'MapValueEntry' : _reflection.GeneratedProtocolMessageType('MapValueEntry', (_message.Message,), {
'DESCRIPTOR' : _TESTANY_MAPVALUEENTRY,
'__module__' : 'google.protobuf.internal.any_test_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestAny.MapValueEntry)
})
,
'DESCRIPTOR' : _TESTANY,
'__module__' : 'google.protobuf.internal.any_test_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestAny)
})
_sym_db.RegisterMessage(TestAny)
_sym_db.RegisterMessage(TestAny.MapValueEntry)
TestAnyExtension1 = _reflection.GeneratedProtocolMessageType('TestAnyExtension1', (_message.Message,), {
'DESCRIPTOR' : _TESTANYEXTENSION1,
'__module__' : 'google.protobuf.internal.any_test_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestAnyExtension1)
})
_sym_db.RegisterMessage(TestAnyExtension1)
_TESTANYEXTENSION1.extensions_by_name['extension1'].message_type = _TESTANYEXTENSION1
TestAny.RegisterExtension(_TESTANYEXTENSION1.extensions_by_name['extension1'])
_TESTANY_MAPVALUEENTRY._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,153 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Determine which implementation of the protobuf API is used in this process.
"""
import os
import warnings
import sys
try:
# pylint: disable=g-import-not-at-top
from google.protobuf.internal import _api_implementation
# The compile-time constants in the _api_implementation module can be used to
# switch to a certain implementation of the Python API at build time.
_api_version = _api_implementation.api_version
_proto_extension_modules_exist_in_build = True
except ImportError:
_api_version = -1 # Unspecified by compiler flags.
_proto_extension_modules_exist_in_build = False
if _api_version == 1:
raise ValueError('api_version=1 is no longer supported.')
if _api_version < 0: # Still unspecified?
try:
# The presence of this module in a build allows the proto implementation to
# be upgraded merely via build deps rather than a compiler flag or the
# runtime environment variable.
# pylint: disable=g-import-not-at-top
from google.protobuf import _use_fast_cpp_protos
# Work around a known issue in the classic bootstrap .par import hook.
if not _use_fast_cpp_protos:
raise ImportError('_use_fast_cpp_protos import succeeded but was None')
del _use_fast_cpp_protos
_api_version = 2
except ImportError:
try:
# pylint: disable=g-import-not-at-top
from google.protobuf.internal import use_pure_python
del use_pure_python # Avoids a pylint error and namespace pollution.
except ImportError:
# TODO(b/74017912): It's unsafe to enable :use_fast_cpp_protos by default;
# it can cause data loss if you have any Python-only extensions to any
# message passed back and forth with C++ code.
#
# TODO(b/17427486): Once that bug is fixed, we want to make both Python 2
# and Python 3 default to `_api_version = 2` (C++ implementation V2).
pass
_default_implementation_type = (
'python' if _api_version <= 0 else 'cpp')
# This environment variable can be used to switch to a certain implementation
# of the Python API, overriding the compile-time constants in the
# _api_implementation module. Right now only 'python' and 'cpp' are valid
# values. Any other value will be ignored.
_implementation_type = os.getenv('PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION',
_default_implementation_type)
if _implementation_type != 'python':
_implementation_type = 'cpp'
if 'PyPy' in sys.version and _implementation_type == 'cpp':
warnings.warn('PyPy does not work yet with cpp protocol buffers. '
'Falling back to the python implementation.')
_implementation_type = 'python'
# This environment variable can be used to switch between the two
# 'cpp' implementations, overriding the compile-time constants in the
# _api_implementation module. Right now only '2' is supported. Any other
# value will cause an error to be raised.
_implementation_version_str = os.getenv(
'PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION', '2')
if _implementation_version_str != '2':
raise ValueError(
'unsupported PROTOCOL_BUFFERS_PYTHON_IMPLEMENTATION_VERSION: "' +
_implementation_version_str + '" (supported versions: 2)'
)
_implementation_version = int(_implementation_version_str)
# Detect if serialization should be deterministic by default
try:
# The presence of this module in a build allows the proto implementation to
# be upgraded merely via build deps.
#
# NOTE: Merely importing this automatically enables deterministic proto
# serialization for C++ code, but we still need to export it as a boolean so
# that we can do the same for `_implementation_type == 'python'`.
#
# NOTE2: It is possible for C++ code to enable deterministic serialization by
# default _without_ affecting Python code, if the C++ implementation is not in
# use by this module. That is intended behavior, so we don't actually expose
# this boolean outside of this module.
#
# pylint: disable=g-import-not-at-top,unused-import
from google.protobuf import enable_deterministic_proto_serialization
_python_deterministic_proto_serialization = True
except ImportError:
_python_deterministic_proto_serialization = False
# Usage of this function is discouraged. Clients shouldn't care which
# implementation of the API is in use. Note that there is no guarantee
# that differences between APIs will be maintained.
# Please don't use this function if possible.
def Type():
return _implementation_type
def _SetType(implementation_type):
"""Never use! Only for protobuf benchmark."""
global _implementation_type
_implementation_type = implementation_type
# See comment on 'Type' above.
def Version():
return _implementation_version
# For internal use only
def IsPythonDefaultSerializationDeterministic():
return _python_deterministic_proto_serialization

@ -0,0 +1,781 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains container classes to represent different protocol buffer types.
This file defines container classes which represent categories of protocol
buffer field types which need extra maintenance. Currently these categories
are:
- Repeated scalar fields - These are all repeated fields which aren't
composite (e.g. they are of simple types like int32, string, etc).
- Repeated composite fields - Repeated fields which are composite. This
includes groups and nested messages.
"""
__author__ = 'petar@google.com (Petar Petrov)'
import sys
try:
# This fallback applies for all versions of Python before 3.3
import collections.abc as collections_abc
except ImportError:
import collections as collections_abc
if sys.version_info[0] < 3:
# We would use collections_abc.MutableMapping all the time, but in Python 2
# it doesn't define __slots__. This causes two significant problems:
#
# 1. we can't disallow arbitrary attribute assignment, even if our derived
# classes *do* define __slots__.
#
# 2. we can't safely derive a C type from it without __slots__ defined (the
# interpreter expects to find a dict at tp_dictoffset, which we can't
# robustly provide. And we don't want an instance dict anyway.
#
# So this is the Python 2.7 definition of Mapping/MutableMapping functions
# verbatim, except that:
# 1. We declare __slots__.
# 2. We don't declare this as a virtual base class. The classes defined
# in collections_abc are the interesting base classes, not us.
#
# Note: deriving from object is critical. It is the only thing that makes
# this a true type, allowing us to derive from it in C++ cleanly and making
# __slots__ properly disallow arbitrary element assignment.
class Mapping(object):
__slots__ = ()
def get(self, key, default=None):
try:
return self[key]
except KeyError:
return default
def __contains__(self, key):
try:
self[key]
except KeyError:
return False
else:
return True
def iterkeys(self):
return iter(self)
def itervalues(self):
for key in self:
yield self[key]
def iteritems(self):
for key in self:
yield (key, self[key])
def keys(self):
return list(self)
def items(self):
return [(key, self[key]) for key in self]
def values(self):
return [self[key] for key in self]
# Mappings are not hashable by default, but subclasses can change this
__hash__ = None
def __eq__(self, other):
if not isinstance(other, collections_abc.Mapping):
return NotImplemented
return dict(self.items()) == dict(other.items())
def __ne__(self, other):
return not (self == other)
class MutableMapping(Mapping):
__slots__ = ()
__marker = object()
def pop(self, key, default=__marker):
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def popitem(self):
try:
key = next(iter(self))
except StopIteration:
raise KeyError
value = self[key]
del self[key]
return key, value
def clear(self):
try:
while True:
self.popitem()
except KeyError:
pass
def update(*args, **kwds):
if len(args) > 2:
raise TypeError("update() takes at most 2 positional "
"arguments ({} given)".format(len(args)))
elif not args:
raise TypeError("update() takes at least 1 argument (0 given)")
self = args[0]
other = args[1] if len(args) >= 2 else ()
if isinstance(other, Mapping):
for key in other:
self[key] = other[key]
elif hasattr(other, "keys"):
for key in other.keys():
self[key] = other[key]
else:
for key, value in other:
self[key] = value
for key, value in kwds.items():
self[key] = value
def setdefault(self, key, default=None):
try:
return self[key]
except KeyError:
self[key] = default
return default
collections_abc.Mapping.register(Mapping)
collections_abc.MutableMapping.register(MutableMapping)
else:
# In Python 3 we can just use MutableMapping directly, because it defines
# __slots__.
MutableMapping = collections_abc.MutableMapping
class BaseContainer(object):
"""Base container class."""
# Minimizes memory usage and disallows assignment to other attributes.
__slots__ = ['_message_listener', '_values']
def __init__(self, message_listener):
"""
Args:
message_listener: A MessageListener implementation.
The RepeatedScalarFieldContainer will call this object's
Modified() method when it is modified.
"""
self._message_listener = message_listener
self._values = []
def __getitem__(self, key):
"""Retrieves item by the specified key."""
return self._values[key]
def __len__(self):
"""Returns the number of elements in the container."""
return len(self._values)
def __ne__(self, other):
"""Checks if another instance isn't equal to this one."""
# The concrete classes should define __eq__.
return not self == other
def __hash__(self):
raise TypeError('unhashable object')
def __repr__(self):
return repr(self._values)
def sort(self, *args, **kwargs):
# Continue to support the old sort_function keyword argument.
# This is expected to be a rare occurrence, so use LBYL to avoid
# the overhead of actually catching KeyError.
if 'sort_function' in kwargs:
kwargs['cmp'] = kwargs.pop('sort_function')
self._values.sort(*args, **kwargs)
collections_abc.MutableSequence.register(BaseContainer)
class RepeatedScalarFieldContainer(BaseContainer):
"""Simple, type-checked, list-like container for holding repeated scalars."""
# Disallows assignment to other attributes.
__slots__ = ['_type_checker']
def __init__(self, message_listener, type_checker):
"""Args:
message_listener: A MessageListener implementation. The
RepeatedScalarFieldContainer will call this object's Modified() method
when it is modified.
type_checker: A type_checkers.ValueChecker instance to run on elements
inserted into this container.
"""
super(RepeatedScalarFieldContainer, self).__init__(message_listener)
self._type_checker = type_checker
def append(self, value):
"""Appends an item to the list. Similar to list.append()."""
self._values.append(self._type_checker.CheckValue(value))
if not self._message_listener.dirty:
self._message_listener.Modified()
def insert(self, key, value):
"""Inserts the item at the specified position. Similar to list.insert()."""
self._values.insert(key, self._type_checker.CheckValue(value))
if not self._message_listener.dirty:
self._message_listener.Modified()
def extend(self, elem_seq):
"""Extends by appending the given iterable. Similar to list.extend()."""
if elem_seq is None:
return
try:
elem_seq_iter = iter(elem_seq)
except TypeError:
if not elem_seq:
# silently ignore falsy inputs :-/.
# TODO(ptucker): Deprecate this behavior. b/18413862
return
raise
new_values = [self._type_checker.CheckValue(elem) for elem in elem_seq_iter]
if new_values:
self._values.extend(new_values)
self._message_listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one. We do not check the types of the individual fields.
"""
self._values.extend(other._values)
self._message_listener.Modified()
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def pop(self, key=-1):
"""Removes and returns an item at a given index. Similar to list.pop()."""
value = self._values[key]
self.__delitem__(key)
return value
def __setitem__(self, key, value):
"""Sets the item on the specified position."""
if isinstance(key, slice): # PY3
if key.step is not None:
raise ValueError('Extended slices not supported')
self.__setslice__(key.start, key.stop, value)
else:
self._values[key] = self._type_checker.CheckValue(value)
self._message_listener.Modified()
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __setslice__(self, start, stop, values):
"""Sets the subset of items from between the specified indices."""
new_values = []
for value in values:
new_values.append(self._type_checker.CheckValue(value))
self._values[start:stop] = new_values
self._message_listener.Modified()
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
# Special case for the same type which should be common and fast.
if isinstance(other, self.__class__):
return other._values == self._values
# We are presumably comparing against some other sequence type.
return other == self._values
class RepeatedCompositeFieldContainer(BaseContainer):
"""Simple, list-like container for holding repeated composite fields."""
# Disallows assignment to other attributes.
__slots__ = ['_message_descriptor']
def __init__(self, message_listener, message_descriptor):
"""
Note that we pass in a descriptor instead of the generated directly,
since at the time we construct a _RepeatedCompositeFieldContainer we
haven't yet necessarily initialized the type that will be contained in the
container.
Args:
message_listener: A MessageListener implementation.
The RepeatedCompositeFieldContainer will call this object's
Modified() method when it is modified.
message_descriptor: A Descriptor instance describing the protocol type
that should be present in this container. We'll use the
_concrete_class field of this descriptor when the client calls add().
"""
super(RepeatedCompositeFieldContainer, self).__init__(message_listener)
self._message_descriptor = message_descriptor
def add(self, **kwargs):
"""Adds a new element at the end of the list and returns it. Keyword
arguments may be used to initialize the element.
"""
new_element = self._message_descriptor._concrete_class(**kwargs)
new_element._SetListener(self._message_listener)
self._values.append(new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
return new_element
def append(self, value):
"""Appends one element by copying the message."""
new_element = self._message_descriptor._concrete_class()
new_element._SetListener(self._message_listener)
new_element.CopyFrom(value)
self._values.append(new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
def insert(self, key, value):
"""Inserts the item at the specified position by copying."""
new_element = self._message_descriptor._concrete_class()
new_element._SetListener(self._message_listener)
new_element.CopyFrom(value)
self._values.insert(key, new_element)
if not self._message_listener.dirty:
self._message_listener.Modified()
def extend(self, elem_seq):
"""Extends by appending the given sequence of elements of the same type
as this one, copying each individual message.
"""
message_class = self._message_descriptor._concrete_class
listener = self._message_listener
values = self._values
for message in elem_seq:
new_element = message_class()
new_element._SetListener(listener)
new_element.MergeFrom(message)
values.append(new_element)
listener.Modified()
def MergeFrom(self, other):
"""Appends the contents of another repeated field of the same type to this
one, copying each individual message.
"""
self.extend(other._values)
def remove(self, elem):
"""Removes an item from the list. Similar to list.remove()."""
self._values.remove(elem)
self._message_listener.Modified()
def pop(self, key=-1):
"""Removes and returns an item at a given index. Similar to list.pop()."""
value = self._values[key]
self.__delitem__(key)
return value
def __getslice__(self, start, stop):
"""Retrieves the subset of items from between the specified indices."""
return self._values[start:stop]
def __delitem__(self, key):
"""Deletes the item at the specified position."""
del self._values[key]
self._message_listener.Modified()
def __delslice__(self, start, stop):
"""Deletes the subset of items from between the specified indices."""
del self._values[start:stop]
self._message_listener.Modified()
def __eq__(self, other):
"""Compares the current instance with another one."""
if self is other:
return True
if not isinstance(other, self.__class__):
raise TypeError('Can only compare repeated composite fields against '
'other repeated composite fields.')
return self._values == other._values
class ScalarMap(MutableMapping):
"""Simple, type-checked, dict-like container for holding repeated scalars."""
# Disallows assignment to other attributes.
__slots__ = ['_key_checker', '_value_checker', '_values', '_message_listener',
'_entry_descriptor']
def __init__(self, message_listener, key_checker, value_checker,
entry_descriptor):
"""
Args:
message_listener: A MessageListener implementation.
The ScalarMap will call this object's Modified() method when it
is modified.
key_checker: A type_checkers.ValueChecker instance to run on keys
inserted into this container.
value_checker: A type_checkers.ValueChecker instance to run on values
inserted into this container.
entry_descriptor: The MessageDescriptor of a map entry: key and value.
"""
self._message_listener = message_listener
self._key_checker = key_checker
self._value_checker = value_checker
self._entry_descriptor = entry_descriptor
self._values = {}
def __getitem__(self, key):
try:
return self._values[key]
except KeyError:
key = self._key_checker.CheckValue(key)
val = self._value_checker.DefaultValue()
self._values[key] = val
return val
def __contains__(self, item):
# We check the key's type to match the strong-typing flavor of the API.
# Also this makes it easier to match the behavior of the C++ implementation.
self._key_checker.CheckValue(item)
return item in self._values
# We need to override this explicitly, because our defaultdict-like behavior
# will make the default implementation (from our base class) always insert
# the key.
def get(self, key, default=None):
if key in self:
return self[key]
else:
return default
def __setitem__(self, key, value):
checked_key = self._key_checker.CheckValue(key)
checked_value = self._value_checker.CheckValue(value)
self._values[checked_key] = checked_value
self._message_listener.Modified()
def __delitem__(self, key):
del self._values[key]
self._message_listener.Modified()
def __len__(self):
return len(self._values)
def __iter__(self):
return iter(self._values)
def __repr__(self):
return repr(self._values)
def MergeFrom(self, other):
self._values.update(other._values)
self._message_listener.Modified()
def InvalidateIterators(self):
# It appears that the only way to reliably invalidate iterators to
# self._values is to ensure that its size changes.
original = self._values
self._values = original.copy()
original[None] = None
# This is defined in the abstract base, but we can do it much more cheaply.
def clear(self):
self._values.clear()
self._message_listener.Modified()
def GetEntryClass(self):
return self._entry_descriptor._concrete_class
class MessageMap(MutableMapping):
"""Simple, type-checked, dict-like container for with submessage values."""
# Disallows assignment to other attributes.
__slots__ = ['_key_checker', '_values', '_message_listener',
'_message_descriptor', '_entry_descriptor']
def __init__(self, message_listener, message_descriptor, key_checker,
entry_descriptor):
"""
Args:
message_listener: A MessageListener implementation.
The ScalarMap will call this object's Modified() method when it
is modified.
key_checker: A type_checkers.ValueChecker instance to run on keys
inserted into this container.
value_checker: A type_checkers.ValueChecker instance to run on values
inserted into this container.
entry_descriptor: The MessageDescriptor of a map entry: key and value.
"""
self._message_listener = message_listener
self._message_descriptor = message_descriptor
self._key_checker = key_checker
self._entry_descriptor = entry_descriptor
self._values = {}
def __getitem__(self, key):
key = self._key_checker.CheckValue(key)
try:
return self._values[key]
except KeyError:
new_element = self._message_descriptor._concrete_class()
new_element._SetListener(self._message_listener)
self._values[key] = new_element
self._message_listener.Modified()
return new_element
def get_or_create(self, key):
"""get_or_create() is an alias for getitem (ie. map[key]).
Args:
key: The key to get or create in the map.
This is useful in cases where you want to be explicit that the call is
mutating the map. This can avoid lint errors for statements like this
that otherwise would appear to be pointless statements:
msg.my_map[key]
"""
return self[key]
# We need to override this explicitly, because our defaultdict-like behavior
# will make the default implementation (from our base class) always insert
# the key.
def get(self, key, default=None):
if key in self:
return self[key]
else:
return default
def __contains__(self, item):
item = self._key_checker.CheckValue(item)
return item in self._values
def __setitem__(self, key, value):
raise ValueError('May not set values directly, call my_map[key].foo = 5')
def __delitem__(self, key):
key = self._key_checker.CheckValue(key)
del self._values[key]
self._message_listener.Modified()
def __len__(self):
return len(self._values)
def __iter__(self):
return iter(self._values)
def __repr__(self):
return repr(self._values)
def MergeFrom(self, other):
for key in other:
# According to documentation: "When parsing from the wire or when merging,
# if there are duplicate map keys the last key seen is used".
if key in self:
del self[key]
self[key].CopyFrom(other[key])
# self._message_listener.Modified() not required here, because
# mutations to submessages already propagate.
def InvalidateIterators(self):
# It appears that the only way to reliably invalidate iterators to
# self._values is to ensure that its size changes.
original = self._values
self._values = original.copy()
original[None] = None
# This is defined in the abstract base, but we can do it much more cheaply.
def clear(self):
self._values.clear()
self._message_listener.Modified()
def GetEntryClass(self):
return self._entry_descriptor._concrete_class
class _UnknownField(object):
"""A parsed unknown field."""
# Disallows assignment to other attributes.
__slots__ = ['_field_number', '_wire_type', '_data']
def __init__(self, field_number, wire_type, data):
self._field_number = field_number
self._wire_type = wire_type
self._data = data
return
def __lt__(self, other):
# pylint: disable=protected-access
return self._field_number < other._field_number
def __eq__(self, other):
if self is other:
return True
# pylint: disable=protected-access
return (self._field_number == other._field_number and
self._wire_type == other._wire_type and
self._data == other._data)
class UnknownFieldRef(object):
def __init__(self, parent, index):
self._parent = parent
self._index = index
return
def _check_valid(self):
if not self._parent:
raise ValueError('UnknownField does not exist. '
'The parent message might be cleared.')
if self._index >= len(self._parent):
raise ValueError('UnknownField does not exist. '
'The parent message might be cleared.')
@property
def field_number(self):
self._check_valid()
# pylint: disable=protected-access
return self._parent._internal_get(self._index)._field_number
@property
def wire_type(self):
self._check_valid()
# pylint: disable=protected-access
return self._parent._internal_get(self._index)._wire_type
@property
def data(self):
self._check_valid()
# pylint: disable=protected-access
return self._parent._internal_get(self._index)._data
class UnknownFieldSet(object):
"""UnknownField container"""
# Disallows assignment to other attributes.
__slots__ = ['_values']
def __init__(self):
self._values = []
def __getitem__(self, index):
if self._values is None:
raise ValueError('UnknownFields does not exist. '
'The parent message might be cleared.')
size = len(self._values)
if index < 0:
index += size
if index < 0 or index >= size:
raise IndexError('index %d out of range'.index)
return UnknownFieldRef(self, index)
def _internal_get(self, index):
return self._values[index]
def __len__(self):
if self._values is None:
raise ValueError('UnknownFields does not exist. '
'The parent message might be cleared.')
return len(self._values)
def _add(self, field_number, wire_type, data):
unknown_field = _UnknownField(field_number, wire_type, data)
self._values.append(unknown_field)
return unknown_field
def __iter__(self):
for i in range(len(self)):
yield UnknownFieldRef(self, i)
def _extend(self, other):
if other is None:
return
# pylint: disable=protected-access
self._values.extend(other._values)
def __eq__(self, other):
if self is other:
return True
# Sort unknown fields because their order shouldn't
# affect equality test.
values = list(self._values)
if other is None:
return not values
values.sort()
# pylint: disable=protected-access
other_values = sorted(other._values)
return values == other_values
def _clear(self):
for value in self._values:
# pylint: disable=protected-access
if isinstance(value._data, UnknownFieldSet):
value._data._clear() # pylint: disable=protected-access
self._values = None

File diff suppressed because it is too large Load Diff

@ -0,0 +1,132 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.descriptor_database."""
__author__ = 'matthewtoia@google.com (Matt Toia)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
import warnings
from google.protobuf import unittest_pb2
from google.protobuf import descriptor_pb2
from google.protobuf.internal import factory_test2_pb2
from google.protobuf.internal import no_package_pb2
from google.protobuf.internal import testing_refleaks
from google.protobuf import descriptor_database
@testing_refleaks.TestCase
class DescriptorDatabaseTest(unittest.TestCase):
def testAdd(self):
db = descriptor_database.DescriptorDatabase()
file_desc_proto = descriptor_pb2.FileDescriptorProto.FromString(
factory_test2_pb2.DESCRIPTOR.serialized_pb)
file_desc_proto2 = descriptor_pb2.FileDescriptorProto.FromString(
no_package_pb2.DESCRIPTOR.serialized_pb)
db.Add(file_desc_proto)
db.Add(file_desc_proto2)
self.assertEqual(file_desc_proto, db.FindFileByName(
'google/protobuf/internal/factory_test2.proto'))
# Can find message type.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message'))
# Can find nested message type.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message.NestedFactory2Message'))
# Can find enum type.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Enum'))
# Can find nested enum type.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message.NestedFactory2Enum'))
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum'))
# Can find field.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message.list_field'))
# Can find enum value.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Enum.FACTORY_2_VALUE_0'))
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.FACTORY_2_VALUE_0'))
self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol(
'.NO_PACKAGE_VALUE_0'))
# Can find top level extension.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.another_field'))
# Can find nested extension inside a message.
self.assertEqual(file_desc_proto, db.FindFileContainingSymbol(
'google.protobuf.python.internal.Factory2Message.one_more_field'))
# Can find service.
file_desc_proto2 = descriptor_pb2.FileDescriptorProto.FromString(
unittest_pb2.DESCRIPTOR.serialized_pb)
db.Add(file_desc_proto2)
self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol(
'protobuf_unittest.TestService'))
# Non-existent field under a valid top level symbol can also be
# found. The behavior is the same with protobuf C++.
self.assertEqual(file_desc_proto2, db.FindFileContainingSymbol(
'protobuf_unittest.TestAllTypes.none_field'))
with self.assertRaisesRegexp(KeyError, r'\'protobuf_unittest\.NoneMessage\''):
db.FindFileContainingSymbol('protobuf_unittest.NoneMessage')
def testConflictRegister(self):
db = descriptor_database.DescriptorDatabase()
unittest_fd = descriptor_pb2.FileDescriptorProto.FromString(
unittest_pb2.DESCRIPTOR.serialized_pb)
db.Add(unittest_fd)
conflict_fd = descriptor_pb2.FileDescriptorProto.FromString(
unittest_pb2.DESCRIPTOR.serialized_pb)
conflict_fd.name = 'other_file2'
with warnings.catch_warnings(record=True) as w:
# Cause all warnings to always be triggered.
warnings.simplefilter('always')
db.Add(conflict_fd)
self.assertTrue(len(w))
self.assertIs(w[0].category, RuntimeWarning)
self.assertIn('Conflict register for file "other_file2": ',
str(w[0].message))
self.assertIn('already defined in file '
'"google/protobuf/unittest.proto"',
str(w[0].message))
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,498 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/descriptor_pool_test1.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/descriptor_pool_test1.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n4google/protobuf/internal/descriptor_pool_test1.proto\x12\x1fgoogle.protobuf.python.internal\"\xfb\x05\n\x13\x44\x65scriptorPoolTest1\x12Z\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest1.NestedEnum:\x04\x42\x45TA\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage\x1a\xfd\x03\n\rNestedMessage\x12h\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.NestedEnum:\x04ZETA\x12\x1a\n\x0cnested_field\x18\x02 \x01(\t:\x04\x62\x65ta\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12y\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.NestedEnum:\x03\x45TA\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05theta\" \n\nNestedEnum\x12\x07\n\x03\x45TA\x10\x07\x12\t\n\x05THETA\x10\x08\"#\n\nNestedEnum\x12\x0b\n\x07\x45PSILON\x10\x05\x12\x08\n\x04ZETA\x10\x06\"!\n\nNestedEnum\x12\t\n\x05\x41LPHA\x10\x01\x12\x08\n\x04\x42\x45TA\x10\x02*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\"\xf1\x05\n\x13\x44\x65scriptorPoolTest2\x12[\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest2.NestedEnum:\x05GAMMA\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage\x1a\xfc\x03\n\rNestedMessage\x12h\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.NestedEnum:\x04IOTA\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05\x64\x65lta\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12x\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.NestedEnum:\x02MU\x12\x1c\n\x0cnested_field\x18\x02 \x01(\t:\x06lambda\" \n\nNestedEnum\x12\n\n\x06LAMBDA\x10\x0b\x12\x06\n\x02MU\x10\x0c\"!\n\nNestedEnum\x12\x08\n\x04IOTA\x10\t\x12\t\n\x05KAPPA\x10\n\"\"\n\nNestedEnum\x12\t\n\x05GAMMA\x10\x03\x12\t\n\x05\x44\x45LTA\x10\x04'
)
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ETA', index=0, number=7,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='THETA', index=1, number=8,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=738,
serialized_end=770,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM)
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='EPSILON', index=0, number=5,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ZETA', index=1, number=6,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=772,
serialized_end=807,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM)
_DESCRIPTORPOOLTEST1_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ALPHA', index=0, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BETA', index=1, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=809,
serialized_end=842,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST1_NESTEDENUM)
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='LAMBDA', index=0, number=11,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='MU', index=1, number=12,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1506,
serialized_end=1538,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM)
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='IOTA', index=0, number=9,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='KAPPA', index=1, number=10,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1540,
serialized_end=1573,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM)
_DESCRIPTORPOOLTEST2_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='GAMMA', index=0, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='DELTA', index=1, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1575,
serialized_end=1609,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST2_NESTEDENUM)
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor(
name='DeepNestedMessage',
full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=7,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage.nested_field', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"theta".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=565,
serialized_end=770,
)
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE = _descriptor.Descriptor(
name='NestedMessage',
full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=6,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.nested_field', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"beta".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.deep_nested_message', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ],
enum_types=[
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=298,
serialized_end=807,
)
_DESCRIPTORPOOLTEST1 = _descriptor.Descriptor(
name='DescriptorPoolTest1',
full_name='google.protobuf.python.internal.DescriptorPoolTest1',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest1.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest1.nested_message', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DESCRIPTORPOOLTEST1_NESTEDMESSAGE, ],
enum_types=[
_DESCRIPTORPOOLTEST1_NESTEDENUM,
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=90,
serialized_end=853,
)
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor(
name='DeepNestedMessage',
full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=12,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage.nested_field', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"lambda".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1333,
serialized_end=1538,
)
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE = _descriptor.Descriptor(
name='NestedMessage',
full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=9,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.nested_field', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"delta".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.deep_nested_message', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ],
enum_types=[
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1065,
serialized_end=1573,
)
_DESCRIPTORPOOLTEST2 = _descriptor.Descriptor(
name='DescriptorPoolTest2',
full_name='google.protobuf.python.internal.DescriptorPoolTest2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest2.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=3,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest2.nested_message', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DESCRIPTORPOOLTEST2_NESTEDMESSAGE, ],
enum_types=[
_DESCRIPTORPOOLTEST2_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=856,
serialized_end=1609,
)
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST1
_DESCRIPTORPOOLTEST1_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE
_DESCRIPTORPOOLTEST1.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST1_NESTEDENUM
_DESCRIPTORPOOLTEST1.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST1_NESTEDMESSAGE
_DESCRIPTORPOOLTEST1_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST1
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST2
_DESCRIPTORPOOLTEST2_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE
_DESCRIPTORPOOLTEST2.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST2_NESTEDENUM
_DESCRIPTORPOOLTEST2.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST2_NESTEDMESSAGE
_DESCRIPTORPOOLTEST2_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST2
DESCRIPTOR.message_types_by_name['DescriptorPoolTest1'] = _DESCRIPTORPOOLTEST1
DESCRIPTOR.message_types_by_name['DescriptorPoolTest2'] = _DESCRIPTORPOOLTEST2
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DescriptorPoolTest1 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest1', (_message.Message,), {
'NestedMessage' : _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), {
'DeepNestedMessage' : _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), {
'DESCRIPTOR' : _DESCRIPTORPOOLTEST1_NESTEDMESSAGE_DEEPNESTEDMESSAGE,
'__module__' : 'google.protobuf.internal.descriptor_pool_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage.DeepNestedMessage)
})
,
'DESCRIPTOR' : _DESCRIPTORPOOLTEST1_NESTEDMESSAGE,
'__module__' : 'google.protobuf.internal.descriptor_pool_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1.NestedMessage)
})
,
'DESCRIPTOR' : _DESCRIPTORPOOLTEST1,
'__module__' : 'google.protobuf.internal.descriptor_pool_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest1)
})
_sym_db.RegisterMessage(DescriptorPoolTest1)
_sym_db.RegisterMessage(DescriptorPoolTest1.NestedMessage)
_sym_db.RegisterMessage(DescriptorPoolTest1.NestedMessage.DeepNestedMessage)
DescriptorPoolTest2 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest2', (_message.Message,), {
'NestedMessage' : _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), {
'DeepNestedMessage' : _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), {
'DESCRIPTOR' : _DESCRIPTORPOOLTEST2_NESTEDMESSAGE_DEEPNESTEDMESSAGE,
'__module__' : 'google.protobuf.internal.descriptor_pool_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage.DeepNestedMessage)
})
,
'DESCRIPTOR' : _DESCRIPTORPOOLTEST2_NESTEDMESSAGE,
'__module__' : 'google.protobuf.internal.descriptor_pool_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2.NestedMessage)
})
,
'DESCRIPTOR' : _DESCRIPTORPOOLTEST2,
'__module__' : 'google.protobuf.internal.descriptor_pool_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest2)
})
_sym_db.RegisterMessage(DescriptorPoolTest2)
_sym_db.RegisterMessage(DescriptorPoolTest2.NestedMessage)
_sym_db.RegisterMessage(DescriptorPoolTest2.NestedMessage.DeepNestedMessage)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,278 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/descriptor_pool_test2.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf.internal import descriptor_pool_test1_pb2 as google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2
from google.protobuf.internal import more_messages_pb2 as google_dot_protobuf_dot_internal_dot_more__messages__pb2
from google.protobuf.internal.more_messages_pb2 import *
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/descriptor_pool_test2.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n4google/protobuf/internal/descriptor_pool_test2.proto\x12\x1fgoogle.protobuf.python.internal\x1a\x34google/protobuf/internal/descriptor_pool_test1.proto\x1a,google/protobuf/internal/more_messages.proto\"\xef\x06\n\x13\x44\x65scriptorPoolTest3\x12X\n\x0bnested_enum\x18\x01 \x01(\x0e\x32?.google.protobuf.python.internal.DescriptorPoolTest3.NestedEnum:\x02XI\x12Z\n\x0enested_message\x18\x02 \x01(\x0b\x32\x42.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage\x1a\xf7\x03\n\rNestedMessage\x12\x66\n\x0bnested_enum\x18\x01 \x01(\x0e\x32M.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.NestedEnum:\x02PI\x12\x18\n\x0cnested_field\x18\x02 \x01(\t:\x02nu\x12q\n\x13\x64\x65\x65p_nested_message\x18\x03 \x01(\x0b\x32T.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage\x1a\xcd\x01\n\x11\x44\x65\x65pNestedMessage\x12y\n\x0bnested_enum\x18\x01 \x01(\x0e\x32_.google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.NestedEnum:\x03RHO\x12\x1b\n\x0cnested_field\x18\x02 \x01(\t:\x05sigma\" \n\nNestedEnum\x12\x07\n\x03RHO\x10\x11\x12\t\n\x05SIGMA\x10\x12\"!\n\nNestedEnum\x12\x0b\n\x07OMICRON\x10\x0f\x12\x06\n\x02PI\x10\x10\"\x1c\n\nNestedEnum\x12\x06\n\x02NU\x10\r\x12\x06\n\x02XI\x10\x0e\x32\x89\x01\n\x14\x64\x65scriptor_pool_test\x12\x34.google.protobuf.python.internal.DescriptorPoolTest1\x18\xe9\x07 \x01(\x0b\x32\x34.google.protobuf.python.internal.DescriptorPoolTest3P\x01'
,
dependencies=[google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2.DESCRIPTOR,google_dot_protobuf_dot_internal_dot_more__messages__pb2.DESCRIPTOR,],
public_dependencies=[google_dot_protobuf_dot_internal_dot_more__messages__pb2.DESCRIPTOR,])
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='RHO', index=0, number=17,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SIGMA', index=1, number=18,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=832,
serialized_end=864,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='OMICRON', index=0, number=15,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='PI', index=1, number=16,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=866,
serialized_end=899,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM)
_DESCRIPTORPOOLTEST3_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NU', index=0, number=13,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='XI', index=1, number=14,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=901,
serialized_end=929,
)
_sym_db.RegisterEnumDescriptor(_DESCRIPTORPOOLTEST3_NESTEDENUM)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE = _descriptor.Descriptor(
name='DeepNestedMessage',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=17,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage.nested_field', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"sigma".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=659,
serialized_end=864,
)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE = _descriptor.Descriptor(
name='NestedMessage',
full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=16,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_field', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.nested_field', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"nu".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deep_nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.deep_nested_message', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE, ],
enum_types=[
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=396,
serialized_end=899,
)
_DESCRIPTORPOOLTEST3 = _descriptor.Descriptor(
name='DescriptorPoolTest3',
full_name='google.protobuf.python.internal.DescriptorPoolTest3',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='nested_enum', full_name='google.protobuf.python.internal.DescriptorPoolTest3.nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=14,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_message', full_name='google.protobuf.python.internal.DescriptorPoolTest3.nested_message', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='descriptor_pool_test', full_name='google.protobuf.python.internal.DescriptorPoolTest3.descriptor_pool_test', index=0,
number=1001, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[_DESCRIPTORPOOLTEST3_NESTEDMESSAGE, ],
enum_types=[
_DESCRIPTORPOOLTEST3_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=190,
serialized_end=1069,
)
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.fields_by_name['deep_nested_message'].message_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE.containing_type = _DESCRIPTORPOOLTEST3
_DESCRIPTORPOOLTEST3_NESTEDMESSAGE_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
_DESCRIPTORPOOLTEST3.fields_by_name['nested_enum'].enum_type = _DESCRIPTORPOOLTEST3_NESTEDENUM
_DESCRIPTORPOOLTEST3.fields_by_name['nested_message'].message_type = _DESCRIPTORPOOLTEST3_NESTEDMESSAGE
_DESCRIPTORPOOLTEST3_NESTEDENUM.containing_type = _DESCRIPTORPOOLTEST3
DESCRIPTOR.message_types_by_name['DescriptorPoolTest3'] = _DESCRIPTORPOOLTEST3
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DescriptorPoolTest3 = _reflection.GeneratedProtocolMessageType('DescriptorPoolTest3', (_message.Message,), {
'NestedMessage' : _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), {
'DeepNestedMessage' : _reflection.GeneratedProtocolMessageType('DeepNestedMessage', (_message.Message,), {
'DESCRIPTOR' : _DESCRIPTORPOOLTEST3_NESTEDMESSAGE_DEEPNESTEDMESSAGE,
'__module__' : 'google.protobuf.internal.descriptor_pool_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage.DeepNestedMessage)
})
,
'DESCRIPTOR' : _DESCRIPTORPOOLTEST3_NESTEDMESSAGE,
'__module__' : 'google.protobuf.internal.descriptor_pool_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3.NestedMessage)
})
,
'DESCRIPTOR' : _DESCRIPTORPOOLTEST3,
'__module__' : 'google.protobuf.internal.descriptor_pool_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.DescriptorPoolTest3)
})
_sym_db.RegisterMessage(DescriptorPoolTest3)
_sym_db.RegisterMessage(DescriptorPoolTest3.NestedMessage)
_sym_db.RegisterMessage(DescriptorPoolTest3.NestedMessage.DeepNestedMessage)
_DESCRIPTORPOOLTEST3.extensions_by_name['descriptor_pool_test'].message_type = _DESCRIPTORPOOLTEST3
google_dot_protobuf_dot_internal_dot_descriptor__pool__test1__pb2.DescriptorPoolTest1.RegisterExtension(_DESCRIPTORPOOLTEST3.extensions_by_name['descriptor_pool_test'])
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,828 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Code for encoding protocol message primitives.
Contains the logic for encoding every logical protocol field type
into one of the 5 physical wire types.
This code is designed to push the Python interpreter's performance to the
limits.
The basic idea is that at startup time, for every field (i.e. every
FieldDescriptor) we construct two functions: a "sizer" and an "encoder". The
sizer takes a value of this field's type and computes its byte size. The
encoder takes a writer function and a value. It encodes the value into byte
strings and invokes the writer function to write those strings. Typically the
writer function is the write() method of a BytesIO.
We try to do as much work as possible when constructing the writer and the
sizer rather than when calling them. In particular:
* We copy any needed global functions to local variables, so that we do not need
to do costly global table lookups at runtime.
* Similarly, we try to do any attribute lookups at startup time if possible.
* Every field's tag is encoded to bytes at startup, since it can't change at
runtime.
* Whatever component of the field size we can compute at startup, we do.
* We *avoid* sharing code if doing so would make the code slower and not sharing
does not burden us too much. For example, encoders for repeated fields do
not just call the encoders for singular fields in a loop because this would
add an extra function call overhead for every loop iteration; instead, we
manually inline the single-value encoder into the loop.
* If a Python function lacks a return statement, Python actually generates
instructions to pop the result of the last statement off the stack, push
None onto the stack, and then return that. If we really don't care what
value is returned, then we can save two instructions by returning the
result of the last statement. It looks funny but it helps.
* We assume that type and bounds checking has happened at a higher level.
"""
__author__ = 'kenton@google.com (Kenton Varda)'
import struct
import six
from google.protobuf.internal import wire_format
# This will overflow and thus become IEEE-754 "infinity". We would use
# "float('inf')" but it doesn't work on Windows pre-Python-2.6.
_POS_INF = 1e10000
_NEG_INF = -_POS_INF
def _VarintSize(value):
"""Compute the size of a varint value."""
if value <= 0x7f: return 1
if value <= 0x3fff: return 2
if value <= 0x1fffff: return 3
if value <= 0xfffffff: return 4
if value <= 0x7ffffffff: return 5
if value <= 0x3ffffffffff: return 6
if value <= 0x1ffffffffffff: return 7
if value <= 0xffffffffffffff: return 8
if value <= 0x7fffffffffffffff: return 9
return 10
def _SignedVarintSize(value):
"""Compute the size of a signed varint value."""
if value < 0: return 10
if value <= 0x7f: return 1
if value <= 0x3fff: return 2
if value <= 0x1fffff: return 3
if value <= 0xfffffff: return 4
if value <= 0x7ffffffff: return 5
if value <= 0x3ffffffffff: return 6
if value <= 0x1ffffffffffff: return 7
if value <= 0xffffffffffffff: return 8
if value <= 0x7fffffffffffffff: return 9
return 10
def _TagSize(field_number):
"""Returns the number of bytes required to serialize a tag with this field
number."""
# Just pass in type 0, since the type won't affect the tag+type size.
return _VarintSize(wire_format.PackTag(field_number, 0))
# --------------------------------------------------------------------
# In this section we define some generic sizers. Each of these functions
# takes parameters specific to a particular field type, e.g. int32 or fixed64.
# It returns another function which in turn takes parameters specific to a
# particular field, e.g. the field number and whether it is repeated or packed.
# Look at the next section to see how these are used.
def _SimpleSizer(compute_value_size):
"""A sizer which uses the function compute_value_size to compute the size of
each value. Typically compute_value_size is _VarintSize."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = 0
for element in value:
result += compute_value_size(element)
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += compute_value_size(element)
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + compute_value_size(value)
return FieldSize
return SpecificSizer
def _ModifiedSizer(compute_value_size, modify_value):
"""Like SimpleSizer, but modify_value is invoked on each value before it is
passed to compute_value_size. modify_value is typically ZigZagEncode."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = 0
for element in value:
result += compute_value_size(modify_value(element))
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += compute_value_size(modify_value(element))
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + compute_value_size(modify_value(value))
return FieldSize
return SpecificSizer
def _FixedSizer(value_size):
"""Like _SimpleSizer except for a fixed-size field. The input is the size
of one value."""
def SpecificSizer(field_number, is_repeated, is_packed):
tag_size = _TagSize(field_number)
if is_packed:
local_VarintSize = _VarintSize
def PackedFieldSize(value):
result = len(value) * value_size
return result + local_VarintSize(result) + tag_size
return PackedFieldSize
elif is_repeated:
element_size = value_size + tag_size
def RepeatedFieldSize(value):
return len(value) * element_size
return RepeatedFieldSize
else:
field_size = value_size + tag_size
def FieldSize(value):
return field_size
return FieldSize
return SpecificSizer
# ====================================================================
# Here we declare a sizer constructor for each field type. Each "sizer
# constructor" is a function that takes (field_number, is_repeated, is_packed)
# as parameters and returns a sizer, which in turn takes a field value as
# a parameter and returns its encoded size.
Int32Sizer = Int64Sizer = EnumSizer = _SimpleSizer(_SignedVarintSize)
UInt32Sizer = UInt64Sizer = _SimpleSizer(_VarintSize)
SInt32Sizer = SInt64Sizer = _ModifiedSizer(
_SignedVarintSize, wire_format.ZigZagEncode)
Fixed32Sizer = SFixed32Sizer = FloatSizer = _FixedSizer(4)
Fixed64Sizer = SFixed64Sizer = DoubleSizer = _FixedSizer(8)
BoolSizer = _FixedSizer(1)
def StringSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a string field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
local_len = len
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = local_len(element.encode('utf-8'))
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = local_len(value.encode('utf-8'))
return tag_size + local_VarintSize(l) + l
return FieldSize
def BytesSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a bytes field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
local_len = len
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = local_len(element)
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = local_len(value)
return tag_size + local_VarintSize(l) + l
return FieldSize
def GroupSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a group field."""
tag_size = _TagSize(field_number) * 2
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
result += element.ByteSize()
return result
return RepeatedFieldSize
else:
def FieldSize(value):
return tag_size + value.ByteSize()
return FieldSize
def MessageSizer(field_number, is_repeated, is_packed):
"""Returns a sizer for a message field."""
tag_size = _TagSize(field_number)
local_VarintSize = _VarintSize
assert not is_packed
if is_repeated:
def RepeatedFieldSize(value):
result = tag_size * len(value)
for element in value:
l = element.ByteSize()
result += local_VarintSize(l) + l
return result
return RepeatedFieldSize
else:
def FieldSize(value):
l = value.ByteSize()
return tag_size + local_VarintSize(l) + l
return FieldSize
# --------------------------------------------------------------------
# MessageSet is special: it needs custom logic to compute its size properly.
def MessageSetItemSizer(field_number):
"""Returns a sizer for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
static_size = (_TagSize(1) * 2 + _TagSize(2) + _VarintSize(field_number) +
_TagSize(3))
local_VarintSize = _VarintSize
def FieldSize(value):
l = value.ByteSize()
return static_size + local_VarintSize(l) + l
return FieldSize
# --------------------------------------------------------------------
# Map is special: it needs custom logic to compute its size properly.
def MapSizer(field_descriptor, is_message_map):
"""Returns a sizer for a map field."""
# Can't look at field_descriptor.message_type._concrete_class because it may
# not have been initialized yet.
message_type = field_descriptor.message_type
message_sizer = MessageSizer(field_descriptor.number, False, False)
def FieldSize(map_value):
total = 0
for key in map_value:
value = map_value[key]
# It's wasteful to create the messages and throw them away one second
# later since we'll do the same for the actual encode. But there's not an
# obvious way to avoid this within the current design without tons of code
# duplication. For message map, value.ByteSize() should be called to
# update the status.
entry_msg = message_type._concrete_class(key=key, value=value)
total += message_sizer(entry_msg)
if is_message_map:
value.ByteSize()
return total
return FieldSize
# ====================================================================
# Encoders!
def _VarintEncoder():
"""Return an encoder for a basic varint value (does not include tag)."""
def EncodeVarint(write, value, unused_deterministic=None):
bits = value & 0x7f
value >>= 7
while value:
write(six.int2byte(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(six.int2byte(bits))
return EncodeVarint
def _SignedVarintEncoder():
"""Return an encoder for a basic signed varint value (does not include
tag)."""
def EncodeSignedVarint(write, value, unused_deterministic=None):
if value < 0:
value += (1 << 64)
bits = value & 0x7f
value >>= 7
while value:
write(six.int2byte(0x80|bits))
bits = value & 0x7f
value >>= 7
return write(six.int2byte(bits))
return EncodeSignedVarint
_EncodeVarint = _VarintEncoder()
_EncodeSignedVarint = _SignedVarintEncoder()
def _VarintBytes(value):
"""Encode the given integer as a varint and return the bytes. This is only
called at startup time so it doesn't need to be fast."""
pieces = []
_EncodeVarint(pieces.append, value, True)
return b"".join(pieces)
def TagBytes(field_number, wire_type):
"""Encode the given tag and return the bytes. Only called at startup."""
return six.binary_type(
_VarintBytes(wire_format.PackTag(field_number, wire_type)))
# --------------------------------------------------------------------
# As with sizers (see above), we have a number of common encoder
# implementations.
def _SimpleEncoder(wire_type, encode_value, compute_value_size):
"""Return a constructor for an encoder for fields of a particular type.
Args:
wire_type: The field's wire type, for encoding tags.
encode_value: A function which encodes an individual value, e.g.
_EncodeVarint().
compute_value_size: A function which computes the size of an individual
value, e.g. _VarintSize().
"""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value, deterministic):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(element)
local_EncodeVarint(write, size, deterministic)
for element in value:
encode_value(write, element, deterministic)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(tag_bytes)
encode_value(write, element, deterministic)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value, deterministic):
write(tag_bytes)
return encode_value(write, value, deterministic)
return EncodeField
return SpecificEncoder
def _ModifiedEncoder(wire_type, encode_value, compute_value_size, modify_value):
"""Like SimpleEncoder but additionally invokes modify_value on every value
before passing it to encode_value. Usually modify_value is ZigZagEncode."""
def SpecificEncoder(field_number, is_repeated, is_packed):
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value, deterministic):
write(tag_bytes)
size = 0
for element in value:
size += compute_value_size(modify_value(element))
local_EncodeVarint(write, size, deterministic)
for element in value:
encode_value(write, modify_value(element), deterministic)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(tag_bytes)
encode_value(write, modify_value(element), deterministic)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value, deterministic):
write(tag_bytes)
return encode_value(write, modify_value(value), deterministic)
return EncodeField
return SpecificEncoder
def _StructPackEncoder(wire_type, format):
"""Return a constructor for an encoder for a fixed-width field.
Args:
wire_type: The field's wire type, for encoding tags.
format: The format string to pass to struct.pack().
"""
value_size = struct.calcsize(format)
def SpecificEncoder(field_number, is_repeated, is_packed):
local_struct_pack = struct.pack
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value, deterministic):
write(tag_bytes)
local_EncodeVarint(write, len(value) * value_size, deterministic)
for element in value:
write(local_struct_pack(format, element))
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value, unused_deterministic=None):
for element in value:
write(tag_bytes)
write(local_struct_pack(format, element))
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value, unused_deterministic=None):
write(tag_bytes)
return write(local_struct_pack(format, value))
return EncodeField
return SpecificEncoder
def _FloatingPointEncoder(wire_type, format):
"""Return a constructor for an encoder for float fields.
This is like StructPackEncoder, but catches errors that may be due to
passing non-finite floating-point values to struct.pack, and makes a
second attempt to encode those values.
Args:
wire_type: The field's wire type, for encoding tags.
format: The format string to pass to struct.pack().
"""
value_size = struct.calcsize(format)
if value_size == 4:
def EncodeNonFiniteOrRaise(write, value):
# Remember that the serialized form uses little-endian byte order.
if value == _POS_INF:
write(b'\x00\x00\x80\x7F')
elif value == _NEG_INF:
write(b'\x00\x00\x80\xFF')
elif value != value: # NaN
write(b'\x00\x00\xC0\x7F')
else:
raise
elif value_size == 8:
def EncodeNonFiniteOrRaise(write, value):
if value == _POS_INF:
write(b'\x00\x00\x00\x00\x00\x00\xF0\x7F')
elif value == _NEG_INF:
write(b'\x00\x00\x00\x00\x00\x00\xF0\xFF')
elif value != value: # NaN
write(b'\x00\x00\x00\x00\x00\x00\xF8\x7F')
else:
raise
else:
raise ValueError('Can\'t encode floating-point values that are '
'%d bytes long (only 4 or 8)' % value_size)
def SpecificEncoder(field_number, is_repeated, is_packed):
local_struct_pack = struct.pack
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value, deterministic):
write(tag_bytes)
local_EncodeVarint(write, len(value) * value_size, deterministic)
for element in value:
# This try/except block is going to be faster than any code that
# we could write to check whether element is finite.
try:
write(local_struct_pack(format, element))
except SystemError:
EncodeNonFiniteOrRaise(write, element)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeRepeatedField(write, value, unused_deterministic=None):
for element in value:
write(tag_bytes)
try:
write(local_struct_pack(format, element))
except SystemError:
EncodeNonFiniteOrRaise(write, element)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_type)
def EncodeField(write, value, unused_deterministic=None):
write(tag_bytes)
try:
write(local_struct_pack(format, value))
except SystemError:
EncodeNonFiniteOrRaise(write, value)
return EncodeField
return SpecificEncoder
# ====================================================================
# Here we declare an encoder constructor for each field type. These work
# very similarly to sizer constructors, described earlier.
Int32Encoder = Int64Encoder = EnumEncoder = _SimpleEncoder(
wire_format.WIRETYPE_VARINT, _EncodeSignedVarint, _SignedVarintSize)
UInt32Encoder = UInt64Encoder = _SimpleEncoder(
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize)
SInt32Encoder = SInt64Encoder = _ModifiedEncoder(
wire_format.WIRETYPE_VARINT, _EncodeVarint, _VarintSize,
wire_format.ZigZagEncode)
# Note that Python conveniently guarantees that when using the '<' prefix on
# formats, they will also have the same size across all platforms (as opposed
# to without the prefix, where their sizes depend on the C compiler's basic
# type sizes).
Fixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<I')
Fixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<Q')
SFixed32Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED32, '<i')
SFixed64Encoder = _StructPackEncoder(wire_format.WIRETYPE_FIXED64, '<q')
FloatEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED32, '<f')
DoubleEncoder = _FloatingPointEncoder(wire_format.WIRETYPE_FIXED64, '<d')
def BoolEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a boolean field."""
false_byte = b'\x00'
true_byte = b'\x01'
if is_packed:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
def EncodePackedField(write, value, deterministic):
write(tag_bytes)
local_EncodeVarint(write, len(value), deterministic)
for element in value:
if element:
write(true_byte)
else:
write(false_byte)
return EncodePackedField
elif is_repeated:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
def EncodeRepeatedField(write, value, unused_deterministic=None):
for element in value:
write(tag_bytes)
if element:
write(true_byte)
else:
write(false_byte)
return EncodeRepeatedField
else:
tag_bytes = TagBytes(field_number, wire_format.WIRETYPE_VARINT)
def EncodeField(write, value, unused_deterministic=None):
write(tag_bytes)
if value:
return write(true_byte)
return write(false_byte)
return EncodeField
def StringEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a string field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
local_len = len
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value, deterministic):
for element in value:
encoded = element.encode('utf-8')
write(tag)
local_EncodeVarint(write, local_len(encoded), deterministic)
write(encoded)
return EncodeRepeatedField
else:
def EncodeField(write, value, deterministic):
encoded = value.encode('utf-8')
write(tag)
local_EncodeVarint(write, local_len(encoded), deterministic)
return write(encoded)
return EncodeField
def BytesEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a bytes field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
local_len = len
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(tag)
local_EncodeVarint(write, local_len(element), deterministic)
write(element)
return EncodeRepeatedField
else:
def EncodeField(write, value, deterministic):
write(tag)
local_EncodeVarint(write, local_len(value), deterministic)
return write(value)
return EncodeField
def GroupEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a group field."""
start_tag = TagBytes(field_number, wire_format.WIRETYPE_START_GROUP)
end_tag = TagBytes(field_number, wire_format.WIRETYPE_END_GROUP)
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(start_tag)
element._InternalSerialize(write, deterministic)
write(end_tag)
return EncodeRepeatedField
else:
def EncodeField(write, value, deterministic):
write(start_tag)
value._InternalSerialize(write, deterministic)
return write(end_tag)
return EncodeField
def MessageEncoder(field_number, is_repeated, is_packed):
"""Returns an encoder for a message field."""
tag = TagBytes(field_number, wire_format.WIRETYPE_LENGTH_DELIMITED)
local_EncodeVarint = _EncodeVarint
assert not is_packed
if is_repeated:
def EncodeRepeatedField(write, value, deterministic):
for element in value:
write(tag)
local_EncodeVarint(write, element.ByteSize(), deterministic)
element._InternalSerialize(write, deterministic)
return EncodeRepeatedField
else:
def EncodeField(write, value, deterministic):
write(tag)
local_EncodeVarint(write, value.ByteSize(), deterministic)
return value._InternalSerialize(write, deterministic)
return EncodeField
# --------------------------------------------------------------------
# As before, MessageSet is special.
def MessageSetItemEncoder(field_number):
"""Encoder for extensions of MessageSet.
The message set message looks like this:
message MessageSet {
repeated group Item = 1 {
required int32 type_id = 2;
required string message = 3;
}
}
"""
start_bytes = b"".join([
TagBytes(1, wire_format.WIRETYPE_START_GROUP),
TagBytes(2, wire_format.WIRETYPE_VARINT),
_VarintBytes(field_number),
TagBytes(3, wire_format.WIRETYPE_LENGTH_DELIMITED)])
end_bytes = TagBytes(1, wire_format.WIRETYPE_END_GROUP)
local_EncodeVarint = _EncodeVarint
def EncodeField(write, value, deterministic):
write(start_bytes)
local_EncodeVarint(write, value.ByteSize(), deterministic)
value._InternalSerialize(write, deterministic)
return write(end_bytes)
return EncodeField
# --------------------------------------------------------------------
# As before, Map is special.
def MapEncoder(field_descriptor):
"""Encoder for extensions of MessageSet.
Maps always have a wire format like this:
message MapEntry {
key_type key = 1;
value_type value = 2;
}
repeated MapEntry map = N;
"""
# Can't look at field_descriptor.message_type._concrete_class because it may
# not have been initialized yet.
message_type = field_descriptor.message_type
encode_message = MessageEncoder(field_descriptor.number, False, False)
def EncodeField(write, value, deterministic):
value_keys = sorted(value.keys()) if deterministic else value
for key in value_keys:
entry_msg = message_type._concrete_class(key=key, value=value[key])
encode_message(write, entry_msg, deterministic)
return EncodeField

@ -0,0 +1,103 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A simple wrapper around enum types to expose utility functions.
Instances are created as properties with the same name as the enum they wrap
on proto classes. For usage, see:
reflection_test.py
"""
__author__ = 'rabsatt@google.com (Kevin Rabsatt)'
import six
class EnumTypeWrapper(object):
"""A utility for finding the names of enum values."""
DESCRIPTOR = None
def __init__(self, enum_type):
"""Inits EnumTypeWrapper with an EnumDescriptor."""
self._enum_type = enum_type
self.DESCRIPTOR = enum_type
def Name(self, number):
"""Returns a string containing the name of an enum value."""
if number in self._enum_type.values_by_number:
return self._enum_type.values_by_number[number].name
if not isinstance(number, six.integer_types):
raise TypeError('Enum value for %s must be an int, but got %r.' % (
self._enum_type.name, number))
else:
# %r here to handle the odd case when you pass in a boolean.
raise ValueError('Enum %s has no name defined for value %r' % (
self._enum_type.name, number))
def Value(self, name):
"""Returns the value corresponding to the given enum name."""
if name in self._enum_type.values_by_name:
return self._enum_type.values_by_name[name].number
raise ValueError('Enum %s has no value defined for name %s' % (
self._enum_type.name, name))
def keys(self):
"""Return a list of the string names in the enum.
These are returned in the order they were defined in the .proto file.
"""
return [value_descriptor.name
for value_descriptor in self._enum_type.values]
def values(self):
"""Return a list of the integer values in the enum.
These are returned in the order they were defined in the .proto file.
"""
return [value_descriptor.number
for value_descriptor in self._enum_type.values]
def items(self):
"""Return a list of the (name, value) pairs of the enum.
These are returned in the order they were defined in the .proto file.
"""
return [(value_descriptor.name, value_descriptor.number)
for value_descriptor in self._enum_type.values]
def __getattr__(self, name):
"""Returns the value corresponding to the given enum name."""
if name in self._enum_type.values_by_name:
return self._enum_type.values_by_name[name].number
raise AttributeError

@ -0,0 +1,213 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains _ExtensionDict class to represent extensions.
"""
from google.protobuf.internal import type_checkers
from google.protobuf.descriptor import FieldDescriptor
def _VerifyExtensionHandle(message, extension_handle):
"""Verify that the given extension handle is valid."""
if not isinstance(extension_handle, FieldDescriptor):
raise KeyError('HasExtension() expects an extension handle, got: %s' %
extension_handle)
if not extension_handle.is_extension:
raise KeyError('"%s" is not an extension.' % extension_handle.full_name)
if not extension_handle.containing_type:
raise KeyError('"%s" is missing a containing_type.'
% extension_handle.full_name)
if extension_handle.containing_type is not message.DESCRIPTOR:
raise KeyError('Extension "%s" extends message type "%s", but this '
'message is of type "%s".' %
(extension_handle.full_name,
extension_handle.containing_type.full_name,
message.DESCRIPTOR.full_name))
# TODO(robinson): Unify error handling of "unknown extension" crap.
# TODO(robinson): Support iteritems()-style iteration over all
# extensions with the "has" bits turned on?
class _ExtensionDict(object):
"""Dict-like container for Extension fields on proto instances.
Note that in all cases we expect extension handles to be
FieldDescriptors.
"""
def __init__(self, extended_message):
"""
Args:
extended_message: Message instance for which we are the Extensions dict.
"""
self._extended_message = extended_message
def __getitem__(self, extension_handle):
"""Returns the current value of the given extension handle."""
_VerifyExtensionHandle(self._extended_message, extension_handle)
result = self._extended_message._fields.get(extension_handle)
if result is not None:
return result
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
result = extension_handle._default_constructor(self._extended_message)
elif extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
message_type = extension_handle.message_type
if not hasattr(message_type, '_concrete_class'):
# pylint: disable=protected-access
self._extended_message._FACTORY.GetPrototype(message_type)
assert getattr(extension_handle.message_type, '_concrete_class', None), (
'Uninitialized concrete class found for field %r (message type %r)'
% (extension_handle.full_name,
extension_handle.message_type.full_name))
result = extension_handle.message_type._concrete_class()
try:
result._SetListener(self._extended_message._listener_for_children)
except ReferenceError:
pass
else:
# Singular scalar -- just return the default without inserting into the
# dict.
return extension_handle.default_value
# Atomically check if another thread has preempted us and, if not, swap
# in the new object we just created. If someone has preempted us, we
# take that object and discard ours.
# WARNING: We are relying on setdefault() being atomic. This is true
# in CPython but we haven't investigated others. This warning appears
# in several other locations in this file.
result = self._extended_message._fields.setdefault(
extension_handle, result)
return result
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
my_fields = self._extended_message.ListFields()
other_fields = other._extended_message.ListFields()
# Get rid of non-extension fields.
my_fields = [field for field in my_fields if field.is_extension]
other_fields = [field for field in other_fields if field.is_extension]
return my_fields == other_fields
def __ne__(self, other):
return not self == other
def __len__(self):
fields = self._extended_message.ListFields()
# Get rid of non-extension fields.
extension_fields = [field for field in fields if field[0].is_extension]
return len(extension_fields)
def __hash__(self):
raise TypeError('unhashable object')
# Note that this is only meaningful for non-repeated, scalar extension
# fields. Note also that we may have to call _Modified() when we do
# successfully set a field this way, to set any necessary "has" bits in the
# ancestors of the extended message.
def __setitem__(self, extension_handle, value):
"""If extension_handle specifies a non-repeated, scalar extension
field, sets the value of that field.
"""
_VerifyExtensionHandle(self._extended_message, extension_handle)
if (extension_handle.label == FieldDescriptor.LABEL_REPEATED or
extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE):
raise TypeError(
'Cannot assign to extension "%s" because it is a repeated or '
'composite type.' % extension_handle.full_name)
# It's slightly wasteful to lookup the type checker each time,
# but we expect this to be a vanishingly uncommon case anyway.
type_checker = type_checkers.GetTypeChecker(extension_handle)
# pylint: disable=protected-access
self._extended_message._fields[extension_handle] = (
type_checker.CheckValue(value))
self._extended_message._Modified()
def __delitem__(self, extension_handle):
self._extended_message.ClearExtension(extension_handle)
def _FindExtensionByName(self, name):
"""Tries to find a known extension with the specified name.
Args:
name: Extension full name.
Returns:
Extension field descriptor.
"""
return self._extended_message._extensions_by_name.get(name, None)
def _FindExtensionByNumber(self, number):
"""Tries to find a known extension with the field number.
Args:
number: Extension field number.
Returns:
Extension field descriptor.
"""
return self._extended_message._extensions_by_number.get(number, None)
def __iter__(self):
# Return a generator over the populated extension fields
return (f[0] for f in self._extended_message.ListFields()
if f[0].is_extension)
def __contains__(self, extension_handle):
_VerifyExtensionHandle(self._extended_message, extension_handle)
if extension_handle not in self._extended_message._fields:
return False
if extension_handle.label == FieldDescriptor.LABEL_REPEATED:
return bool(self._extended_message._fields.get(extension_handle))
if extension_handle.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
value = self._extended_message._fields.get(extension_handle)
# pylint: disable=protected-access
return value is not None and value._is_present_in_parent
return True

@ -0,0 +1,304 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/factory_test1.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/factory_test1.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n,google/protobuf/internal/factory_test1.proto\x12\x1fgoogle.protobuf.python.internal\"\xd5\x03\n\x0f\x46\x61\x63tory1Message\x12\x45\n\x0e\x66\x61\x63tory_1_enum\x18\x01 \x01(\x0e\x32-.google.protobuf.python.internal.Factory1Enum\x12\x62\n\x15nested_factory_1_enum\x18\x02 \x01(\x0e\x32\x43.google.protobuf.python.internal.Factory1Message.NestedFactory1Enum\x12h\n\x18nested_factory_1_message\x18\x03 \x01(\x0b\x32\x46.google.protobuf.python.internal.Factory1Message.NestedFactory1Message\x12\x14\n\x0cscalar_value\x18\x04 \x01(\x05\x12\x12\n\nlist_value\x18\x05 \x03(\t\x1a&\n\x15NestedFactory1Message\x12\r\n\x05value\x18\x01 \x01(\t\"P\n\x12NestedFactory1Enum\x12\x1c\n\x18NESTED_FACTORY_1_VALUE_0\x10\x00\x12\x1c\n\x18NESTED_FACTORY_1_VALUE_1\x10\x01*\t\x08\xe8\x07\x10\x80\x80\x80\x80\x02\")\n\x15\x46\x61\x63tory1MethodRequest\x12\x10\n\x08\x61rgument\x18\x01 \x01(\t\"(\n\x16\x46\x61\x63tory1MethodResponse\x12\x0e\n\x06result\x18\x01 \x01(\t*<\n\x0c\x46\x61\x63tory1Enum\x12\x15\n\x11\x46\x41\x43TORY_1_VALUE_0\x10\x00\x12\x15\n\x11\x46\x41\x43TORY_1_VALUE_1\x10\x01\x32\x97\x01\n\x0f\x46\x61\x63tory1Service\x12\x83\x01\n\x0e\x46\x61\x63tory1Method\x12\x36.google.protobuf.python.internal.Factory1MethodRequest\x1a\x37.google.protobuf.python.internal.Factory1MethodResponse\"\x00'
)
_FACTORY1ENUM = _descriptor.EnumDescriptor(
name='Factory1Enum',
full_name='google.protobuf.python.internal.Factory1Enum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FACTORY_1_VALUE_0', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FACTORY_1_VALUE_1', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=638,
serialized_end=698,
)
_sym_db.RegisterEnumDescriptor(_FACTORY1ENUM)
Factory1Enum = enum_type_wrapper.EnumTypeWrapper(_FACTORY1ENUM)
FACTORY_1_VALUE_0 = 0
FACTORY_1_VALUE_1 = 1
_FACTORY1MESSAGE_NESTEDFACTORY1ENUM = _descriptor.EnumDescriptor(
name='NestedFactory1Enum',
full_name='google.protobuf.python.internal.Factory1Message.NestedFactory1Enum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NESTED_FACTORY_1_VALUE_0', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NESTED_FACTORY_1_VALUE_1', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=460,
serialized_end=540,
)
_sym_db.RegisterEnumDescriptor(_FACTORY1MESSAGE_NESTEDFACTORY1ENUM)
_FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE = _descriptor.Descriptor(
name='NestedFactory1Message',
full_name='google.protobuf.python.internal.Factory1Message.NestedFactory1Message',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.python.internal.Factory1Message.NestedFactory1Message.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=420,
serialized_end=458,
)
_FACTORY1MESSAGE = _descriptor.Descriptor(
name='Factory1Message',
full_name='google.protobuf.python.internal.Factory1Message',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='factory_1_enum', full_name='google.protobuf.python.internal.Factory1Message.factory_1_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_factory_1_enum', full_name='google.protobuf.python.internal.Factory1Message.nested_factory_1_enum', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_factory_1_message', full_name='google.protobuf.python.internal.Factory1Message.nested_factory_1_message', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scalar_value', full_name='google.protobuf.python.internal.Factory1Message.scalar_value', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='list_value', full_name='google.protobuf.python.internal.Factory1Message.list_value', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE, ],
enum_types=[
_FACTORY1MESSAGE_NESTEDFACTORY1ENUM,
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1000, 536870912), ],
oneofs=[
],
serialized_start=82,
serialized_end=551,
)
_FACTORY1METHODREQUEST = _descriptor.Descriptor(
name='Factory1MethodRequest',
full_name='google.protobuf.python.internal.Factory1MethodRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='argument', full_name='google.protobuf.python.internal.Factory1MethodRequest.argument', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=553,
serialized_end=594,
)
_FACTORY1METHODRESPONSE = _descriptor.Descriptor(
name='Factory1MethodResponse',
full_name='google.protobuf.python.internal.Factory1MethodResponse',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='result', full_name='google.protobuf.python.internal.Factory1MethodResponse.result', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=596,
serialized_end=636,
)
_FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE.containing_type = _FACTORY1MESSAGE
_FACTORY1MESSAGE.fields_by_name['factory_1_enum'].enum_type = _FACTORY1ENUM
_FACTORY1MESSAGE.fields_by_name['nested_factory_1_enum'].enum_type = _FACTORY1MESSAGE_NESTEDFACTORY1ENUM
_FACTORY1MESSAGE.fields_by_name['nested_factory_1_message'].message_type = _FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE
_FACTORY1MESSAGE_NESTEDFACTORY1ENUM.containing_type = _FACTORY1MESSAGE
DESCRIPTOR.message_types_by_name['Factory1Message'] = _FACTORY1MESSAGE
DESCRIPTOR.message_types_by_name['Factory1MethodRequest'] = _FACTORY1METHODREQUEST
DESCRIPTOR.message_types_by_name['Factory1MethodResponse'] = _FACTORY1METHODRESPONSE
DESCRIPTOR.enum_types_by_name['Factory1Enum'] = _FACTORY1ENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Factory1Message = _reflection.GeneratedProtocolMessageType('Factory1Message', (_message.Message,), {
'NestedFactory1Message' : _reflection.GeneratedProtocolMessageType('NestedFactory1Message', (_message.Message,), {
'DESCRIPTOR' : _FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE,
'__module__' : 'google.protobuf.internal.factory_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory1Message.NestedFactory1Message)
})
,
'DESCRIPTOR' : _FACTORY1MESSAGE,
'__module__' : 'google.protobuf.internal.factory_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory1Message)
})
_sym_db.RegisterMessage(Factory1Message)
_sym_db.RegisterMessage(Factory1Message.NestedFactory1Message)
Factory1MethodRequest = _reflection.GeneratedProtocolMessageType('Factory1MethodRequest', (_message.Message,), {
'DESCRIPTOR' : _FACTORY1METHODREQUEST,
'__module__' : 'google.protobuf.internal.factory_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory1MethodRequest)
})
_sym_db.RegisterMessage(Factory1MethodRequest)
Factory1MethodResponse = _reflection.GeneratedProtocolMessageType('Factory1MethodResponse', (_message.Message,), {
'DESCRIPTOR' : _FACTORY1METHODRESPONSE,
'__module__' : 'google.protobuf.internal.factory_test1_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory1MethodResponse)
})
_sym_db.RegisterMessage(Factory1MethodResponse)
_FACTORY1SERVICE = _descriptor.ServiceDescriptor(
name='Factory1Service',
full_name='google.protobuf.python.internal.Factory1Service',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=701,
serialized_end=852,
methods=[
_descriptor.MethodDescriptor(
name='Factory1Method',
full_name='google.protobuf.python.internal.Factory1Service.Factory1Method',
index=0,
containing_service=None,
input_type=_FACTORY1METHODREQUEST,
output_type=_FACTORY1METHODRESPONSE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_FACTORY1SERVICE)
DESCRIPTOR.services_by_name['Factory1Service'] = _FACTORY1SERVICE
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,533 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/factory_test2.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf.internal import factory_test1_pb2 as google_dot_protobuf_dot_internal_dot_factory__test1__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/factory_test2.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n,google/protobuf/internal/factory_test2.proto\x12\x1fgoogle.protobuf.python.internal\x1a,google/protobuf/internal/factory_test1.proto\"\xd8\x0b\n\x0f\x46\x61\x63tory2Message\x12\x11\n\tmandatory\x18\x01 \x02(\x05\x12\x45\n\x0e\x66\x61\x63tory_2_enum\x18\x02 \x01(\x0e\x32-.google.protobuf.python.internal.Factory2Enum\x12\x62\n\x15nested_factory_2_enum\x18\x03 \x01(\x0e\x32\x43.google.protobuf.python.internal.Factory2Message.NestedFactory2Enum\x12h\n\x18nested_factory_2_message\x18\x04 \x01(\x0b\x32\x46.google.protobuf.python.internal.Factory2Message.NestedFactory2Message\x12K\n\x11\x66\x61\x63tory_1_message\x18\x05 \x01(\x0b\x32\x30.google.protobuf.python.internal.Factory1Message\x12\x45\n\x0e\x66\x61\x63tory_1_enum\x18\x06 \x01(\x0e\x32-.google.protobuf.python.internal.Factory1Enum\x12\x62\n\x15nested_factory_1_enum\x18\x07 \x01(\x0e\x32\x43.google.protobuf.python.internal.Factory1Message.NestedFactory1Enum\x12h\n\x18nested_factory_1_message\x18\x08 \x01(\x0b\x32\x46.google.protobuf.python.internal.Factory1Message.NestedFactory1Message\x12J\n\x10\x63ircular_message\x18\t \x01(\x0b\x32\x30.google.protobuf.python.internal.Factory2Message\x12\x14\n\x0cscalar_value\x18\n \x01(\t\x12\x12\n\nlist_value\x18\x0b \x03(\t\x12I\n\x07grouped\x18\x0c \x03(\n28.google.protobuf.python.internal.Factory2Message.Grouped\x12:\n\x04loop\x18\x0f \x01(\x0b\x32,.google.protobuf.python.internal.LoopMessage\x12\x1e\n\x10int_with_default\x18\x10 \x01(\x05:\x04\x31\x37\x37\x36\x12!\n\x13\x64ouble_with_default\x18\x11 \x01(\x01:\x04\x39.99\x12(\n\x13string_with_default\x18\x12 \x01(\t:\x0bhello world\x12 \n\x11\x62ool_with_default\x18\x13 \x01(\x08:\x05\x66\x61lse\x12[\n\x11\x65num_with_default\x18\x14 \x01(\x0e\x32-.google.protobuf.python.internal.Factory2Enum:\x11\x46\x41\x43TORY_2_VALUE_1\x12&\n\x12\x62ytes_with_default\x18\x15 \x01(\x0c:\na\\373\\000c\x12\x13\n\toneof_int\x18\x16 \x01(\x05H\x00\x12\x16\n\x0coneof_string\x18\x17 \x01(\tH\x00\x1a&\n\x15NestedFactory2Message\x12\r\n\x05value\x18\x01 \x01(\t\x1a)\n\x07Grouped\x12\x0e\n\x06part_1\x18\r \x01(\t\x12\x0e\n\x06part_2\x18\x0e \x01(\t\"P\n\x12NestedFactory2Enum\x12\x1c\n\x18NESTED_FACTORY_2_VALUE_0\x10\x00\x12\x1c\n\x18NESTED_FACTORY_2_VALUE_1\x10\x01\x32I\n\x0eone_more_field\x12\x30.google.protobuf.python.internal.Factory1Message\x18\xe9\x07 \x01(\tB\r\n\x0boneof_field\"M\n\x0bLoopMessage\x12>\n\x04loop\x18\x01 \x01(\x0b\x32\x30.google.protobuf.python.internal.Factory2Message\"D\n\x19MessageWithNestedEnumOnly\"\'\n\nNestedEnum\x12\x19\n\x15NESTED_MESSAGE_ENUM_0\x10\x00\"\'\n\x11MessageWithOption\x12\x0e\n\x06\x66ield1\x18\x01 \x01(\x05:\x02\x10\x01*<\n\x0c\x46\x61\x63tory2Enum\x12\x15\n\x11\x46\x41\x43TORY_2_VALUE_0\x10\x00\x12\x15\n\x11\x46\x41\x43TORY_2_VALUE_1\x10\x01:H\n\ranother_field\x12\x30.google.protobuf.python.internal.Factory1Message\x18\xea\x07 \x01(\t'
,
dependencies=[google_dot_protobuf_dot_internal_dot_factory__test1__pb2.DESCRIPTOR,])
_FACTORY2ENUM = _descriptor.EnumDescriptor(
name='Factory2Enum',
full_name='google.protobuf.python.internal.Factory2Enum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FACTORY_2_VALUE_0', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FACTORY_2_VALUE_1', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1816,
serialized_end=1876,
)
_sym_db.RegisterEnumDescriptor(_FACTORY2ENUM)
Factory2Enum = enum_type_wrapper.EnumTypeWrapper(_FACTORY2ENUM)
FACTORY_2_VALUE_0 = 0
FACTORY_2_VALUE_1 = 1
ANOTHER_FIELD_FIELD_NUMBER = 1002
another_field = _descriptor.FieldDescriptor(
name='another_field', full_name='google.protobuf.python.internal.another_field', index=0,
number=1002, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_FACTORY2MESSAGE_NESTEDFACTORY2ENUM = _descriptor.EnumDescriptor(
name='NestedFactory2Enum',
full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Enum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NESTED_FACTORY_2_VALUE_0', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NESTED_FACTORY_2_VALUE_1', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1454,
serialized_end=1534,
)
_sym_db.RegisterEnumDescriptor(_FACTORY2MESSAGE_NESTEDFACTORY2ENUM)
_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.MessageWithNestedEnumOnly.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NESTED_MESSAGE_ENUM_0', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=1734,
serialized_end=1773,
)
_sym_db.RegisterEnumDescriptor(_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM)
_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE = _descriptor.Descriptor(
name='NestedFactory2Message',
full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Message',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.python.internal.Factory2Message.NestedFactory2Message.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1371,
serialized_end=1409,
)
_FACTORY2MESSAGE_GROUPED = _descriptor.Descriptor(
name='Grouped',
full_name='google.protobuf.python.internal.Factory2Message.Grouped',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='part_1', full_name='google.protobuf.python.internal.Factory2Message.Grouped.part_1', index=0,
number=13, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='part_2', full_name='google.protobuf.python.internal.Factory2Message.Grouped.part_2', index=1,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1411,
serialized_end=1452,
)
_FACTORY2MESSAGE = _descriptor.Descriptor(
name='Factory2Message',
full_name='google.protobuf.python.internal.Factory2Message',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='mandatory', full_name='google.protobuf.python.internal.Factory2Message.mandatory', index=0,
number=1, type=5, cpp_type=1, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='factory_2_enum', full_name='google.protobuf.python.internal.Factory2Message.factory_2_enum', index=1,
number=2, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_factory_2_enum', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_2_enum', index=2,
number=3, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_factory_2_message', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_2_message', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='factory_1_message', full_name='google.protobuf.python.internal.Factory2Message.factory_1_message', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='factory_1_enum', full_name='google.protobuf.python.internal.Factory2Message.factory_1_enum', index=5,
number=6, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_factory_1_enum', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_1_enum', index=6,
number=7, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nested_factory_1_message', full_name='google.protobuf.python.internal.Factory2Message.nested_factory_1_message', index=7,
number=8, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='circular_message', full_name='google.protobuf.python.internal.Factory2Message.circular_message', index=8,
number=9, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='scalar_value', full_name='google.protobuf.python.internal.Factory2Message.scalar_value', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='list_value', full_name='google.protobuf.python.internal.Factory2Message.list_value', index=10,
number=11, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='grouped', full_name='google.protobuf.python.internal.Factory2Message.grouped', index=11,
number=12, type=10, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='loop', full_name='google.protobuf.python.internal.Factory2Message.loop', index=12,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='int_with_default', full_name='google.protobuf.python.internal.Factory2Message.int_with_default', index=13,
number=16, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=1776,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='double_with_default', full_name='google.protobuf.python.internal.Factory2Message.double_with_default', index=14,
number=17, type=1, cpp_type=5, label=1,
has_default_value=True, default_value=float(9.99),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='string_with_default', full_name='google.protobuf.python.internal.Factory2Message.string_with_default', index=15,
number=18, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"hello world".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bool_with_default', full_name='google.protobuf.python.internal.Factory2Message.bool_with_default', index=16,
number=19, type=8, cpp_type=7, label=1,
has_default_value=True, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='enum_with_default', full_name='google.protobuf.python.internal.Factory2Message.enum_with_default', index=17,
number=20, type=14, cpp_type=8, label=1,
has_default_value=True, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bytes_with_default', full_name='google.protobuf.python.internal.Factory2Message.bytes_with_default', index=18,
number=21, type=12, cpp_type=9, label=1,
has_default_value=True, default_value=b"a\373\000c",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_int', full_name='google.protobuf.python.internal.Factory2Message.oneof_int', index=19,
number=22, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='oneof_string', full_name='google.protobuf.python.internal.Factory2Message.oneof_string', index=20,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='one_more_field', full_name='google.protobuf.python.internal.Factory2Message.one_more_field', index=0,
number=1001, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE, _FACTORY2MESSAGE_GROUPED, ],
enum_types=[
_FACTORY2MESSAGE_NESTEDFACTORY2ENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='oneof_field', full_name='google.protobuf.python.internal.Factory2Message.oneof_field',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=128,
serialized_end=1624,
)
_LOOPMESSAGE = _descriptor.Descriptor(
name='LoopMessage',
full_name='google.protobuf.python.internal.LoopMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='loop', full_name='google.protobuf.python.internal.LoopMessage.loop', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1626,
serialized_end=1703,
)
_MESSAGEWITHNESTEDENUMONLY = _descriptor.Descriptor(
name='MessageWithNestedEnumOnly',
full_name='google.protobuf.python.internal.MessageWithNestedEnumOnly',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1705,
serialized_end=1773,
)
_MESSAGEWITHOPTION = _descriptor.Descriptor(
name='MessageWithOption',
full_name='google.protobuf.python.internal.MessageWithOption',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='field1', full_name='google.protobuf.python.internal.MessageWithOption.field1', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\020\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1775,
serialized_end=1814,
)
_FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE.containing_type = _FACTORY2MESSAGE
_FACTORY2MESSAGE_GROUPED.containing_type = _FACTORY2MESSAGE
_FACTORY2MESSAGE.fields_by_name['factory_2_enum'].enum_type = _FACTORY2ENUM
_FACTORY2MESSAGE.fields_by_name['nested_factory_2_enum'].enum_type = _FACTORY2MESSAGE_NESTEDFACTORY2ENUM
_FACTORY2MESSAGE.fields_by_name['nested_factory_2_message'].message_type = _FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE
_FACTORY2MESSAGE.fields_by_name['factory_1_message'].message_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE
_FACTORY2MESSAGE.fields_by_name['factory_1_enum'].enum_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1ENUM
_FACTORY2MESSAGE.fields_by_name['nested_factory_1_enum'].enum_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE_NESTEDFACTORY1ENUM
_FACTORY2MESSAGE.fields_by_name['nested_factory_1_message'].message_type = google_dot_protobuf_dot_internal_dot_factory__test1__pb2._FACTORY1MESSAGE_NESTEDFACTORY1MESSAGE
_FACTORY2MESSAGE.fields_by_name['circular_message'].message_type = _FACTORY2MESSAGE
_FACTORY2MESSAGE.fields_by_name['grouped'].message_type = _FACTORY2MESSAGE_GROUPED
_FACTORY2MESSAGE.fields_by_name['loop'].message_type = _LOOPMESSAGE
_FACTORY2MESSAGE.fields_by_name['enum_with_default'].enum_type = _FACTORY2ENUM
_FACTORY2MESSAGE_NESTEDFACTORY2ENUM.containing_type = _FACTORY2MESSAGE
_FACTORY2MESSAGE.oneofs_by_name['oneof_field'].fields.append(
_FACTORY2MESSAGE.fields_by_name['oneof_int'])
_FACTORY2MESSAGE.fields_by_name['oneof_int'].containing_oneof = _FACTORY2MESSAGE.oneofs_by_name['oneof_field']
_FACTORY2MESSAGE.oneofs_by_name['oneof_field'].fields.append(
_FACTORY2MESSAGE.fields_by_name['oneof_string'])
_FACTORY2MESSAGE.fields_by_name['oneof_string'].containing_oneof = _FACTORY2MESSAGE.oneofs_by_name['oneof_field']
_LOOPMESSAGE.fields_by_name['loop'].message_type = _FACTORY2MESSAGE
_MESSAGEWITHNESTEDENUMONLY_NESTEDENUM.containing_type = _MESSAGEWITHNESTEDENUMONLY
DESCRIPTOR.message_types_by_name['Factory2Message'] = _FACTORY2MESSAGE
DESCRIPTOR.message_types_by_name['LoopMessage'] = _LOOPMESSAGE
DESCRIPTOR.message_types_by_name['MessageWithNestedEnumOnly'] = _MESSAGEWITHNESTEDENUMONLY
DESCRIPTOR.message_types_by_name['MessageWithOption'] = _MESSAGEWITHOPTION
DESCRIPTOR.enum_types_by_name['Factory2Enum'] = _FACTORY2ENUM
DESCRIPTOR.extensions_by_name['another_field'] = another_field
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Factory2Message = _reflection.GeneratedProtocolMessageType('Factory2Message', (_message.Message,), {
'NestedFactory2Message' : _reflection.GeneratedProtocolMessageType('NestedFactory2Message', (_message.Message,), {
'DESCRIPTOR' : _FACTORY2MESSAGE_NESTEDFACTORY2MESSAGE,
'__module__' : 'google.protobuf.internal.factory_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message.NestedFactory2Message)
})
,
'Grouped' : _reflection.GeneratedProtocolMessageType('Grouped', (_message.Message,), {
'DESCRIPTOR' : _FACTORY2MESSAGE_GROUPED,
'__module__' : 'google.protobuf.internal.factory_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message.Grouped)
})
,
'DESCRIPTOR' : _FACTORY2MESSAGE,
'__module__' : 'google.protobuf.internal.factory_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.Factory2Message)
})
_sym_db.RegisterMessage(Factory2Message)
_sym_db.RegisterMessage(Factory2Message.NestedFactory2Message)
_sym_db.RegisterMessage(Factory2Message.Grouped)
LoopMessage = _reflection.GeneratedProtocolMessageType('LoopMessage', (_message.Message,), {
'DESCRIPTOR' : _LOOPMESSAGE,
'__module__' : 'google.protobuf.internal.factory_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.LoopMessage)
})
_sym_db.RegisterMessage(LoopMessage)
MessageWithNestedEnumOnly = _reflection.GeneratedProtocolMessageType('MessageWithNestedEnumOnly', (_message.Message,), {
'DESCRIPTOR' : _MESSAGEWITHNESTEDENUMONLY,
'__module__' : 'google.protobuf.internal.factory_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.MessageWithNestedEnumOnly)
})
_sym_db.RegisterMessage(MessageWithNestedEnumOnly)
MessageWithOption = _reflection.GeneratedProtocolMessageType('MessageWithOption', (_message.Message,), {
'DESCRIPTOR' : _MESSAGEWITHOPTION,
'__module__' : 'google.protobuf.internal.factory_test2_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.MessageWithOption)
})
_sym_db.RegisterMessage(MessageWithOption)
google_dot_protobuf_dot_internal_dot_factory__test1__pb2.Factory1Message.RegisterExtension(another_field)
google_dot_protobuf_dot_internal_dot_factory__test1__pb2.Factory1Message.RegisterExtension(_FACTORY2MESSAGE.extensions_by_name['one_more_field'])
_MESSAGEWITHOPTION._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,83 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/file_options_test.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf import descriptor_pb2 as google_dot_protobuf_dot_descriptor__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/file_options_test.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n0google/protobuf/internal/file_options_test.proto\x12\x1fgoogle.protobuf.python.internal\x1a google/protobuf/descriptor.proto\"\x1e\n\nFooOptions\x12\x10\n\x08\x66oo_name\x18\x01 \x01(\t:a\n\x0b\x66oo_options\x12\x1c.google.protobuf.FileOptions\x18\xac\xec\xb6\x39 \x01(\x0b\x32+.google.protobuf.python.internal.FooOptions'
,
dependencies=[google_dot_protobuf_dot_descriptor__pb2.DESCRIPTOR,])
FOO_OPTIONS_FIELD_NUMBER = 120436268
foo_options = _descriptor.FieldDescriptor(
name='foo_options', full_name='google.protobuf.python.internal.foo_options', index=0,
number=120436268, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_FOOOPTIONS = _descriptor.Descriptor(
name='FooOptions',
full_name='google.protobuf.python.internal.FooOptions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='foo_name', full_name='google.protobuf.python.internal.FooOptions.foo_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=119,
serialized_end=149,
)
DESCRIPTOR.message_types_by_name['FooOptions'] = _FOOOPTIONS
DESCRIPTOR.extensions_by_name['foo_options'] = foo_options
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
FooOptions = _reflection.GeneratedProtocolMessageType('FooOptions', (_message.Message,), {
'DESCRIPTOR' : _FOOOPTIONS,
'__module__' : 'google.protobuf.internal.file_options_test_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.FooOptions)
})
_sym_db.RegisterMessage(FooOptions)
foo_options.message_type = _FOOOPTIONS
google_dot_protobuf_dot_descriptor__pb2.FileOptions.RegisterExtension(foo_options)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,359 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# TODO(robinson): Flesh this out considerably. We focused on reflection_test.py
# first, since it's testing the subtler code, and since it provides decent
# indirect testing of the protocol compiler output.
"""Unittest that directly tests the output of the pure-Python protocol
compiler. See //google/protobuf/internal/reflection_test.py for a test which
further ensures that we can use Python protocol message objects as we expect.
"""
__author__ = 'robinson@google.com (Will Robinson)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf.internal import test_bad_identifiers_pb2
from google.protobuf import unittest_custom_options_pb2
from google.protobuf import unittest_import_pb2
from google.protobuf import unittest_import_public_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_mset_wire_format_pb2
from google.protobuf import unittest_no_generic_services_pb2
from google.protobuf import unittest_pb2
from google.protobuf import service
from google.protobuf import symbol_database
MAX_EXTENSION = 536870912
class GeneratorTest(unittest.TestCase):
def testNestedMessageDescriptor(self):
field_name = 'optional_nested_message'
proto_type = unittest_pb2.TestAllTypes
self.assertEqual(
proto_type.NestedMessage.DESCRIPTOR,
proto_type.DESCRIPTOR.fields_by_name[field_name].message_type)
def testEnums(self):
# We test only module-level enums here.
# TODO(robinson): Examine descriptors directly to check
# enum descriptor output.
self.assertEqual(4, unittest_pb2.FOREIGN_FOO)
self.assertEqual(5, unittest_pb2.FOREIGN_BAR)
self.assertEqual(6, unittest_pb2.FOREIGN_BAZ)
proto = unittest_pb2.TestAllTypes()
self.assertEqual(1, proto.FOO)
self.assertEqual(1, unittest_pb2.TestAllTypes.FOO)
self.assertEqual(2, proto.BAR)
self.assertEqual(2, unittest_pb2.TestAllTypes.BAR)
self.assertEqual(3, proto.BAZ)
self.assertEqual(3, unittest_pb2.TestAllTypes.BAZ)
def testExtremeDefaultValues(self):
message = unittest_pb2.TestExtremeDefaultValues()
# Python pre-2.6 does not have isinf() or isnan() functions, so we have
# to provide our own.
def isnan(val):
# NaN is never equal to itself.
return val != val
def isinf(val):
# Infinity times zero equals NaN.
return not isnan(val) and isnan(val * 0)
self.assertTrue(isinf(message.inf_double))
self.assertTrue(message.inf_double > 0)
self.assertTrue(isinf(message.neg_inf_double))
self.assertTrue(message.neg_inf_double < 0)
self.assertTrue(isnan(message.nan_double))
self.assertTrue(isinf(message.inf_float))
self.assertTrue(message.inf_float > 0)
self.assertTrue(isinf(message.neg_inf_float))
self.assertTrue(message.neg_inf_float < 0)
self.assertTrue(isnan(message.nan_float))
self.assertEqual("? ? ?? ?? ??? ??/ ??-", message.cpp_trigraph)
def testHasDefaultValues(self):
desc = unittest_pb2.TestAllTypes.DESCRIPTOR
expected_has_default_by_name = {
'optional_int32': False,
'repeated_int32': False,
'optional_nested_message': False,
'default_int32': True,
}
has_default_by_name = dict(
[(f.name, f.has_default_value)
for f in desc.fields
if f.name in expected_has_default_by_name])
self.assertEqual(expected_has_default_by_name, has_default_by_name)
def testContainingTypeBehaviorForExtensions(self):
self.assertEqual(unittest_pb2.optional_int32_extension.containing_type,
unittest_pb2.TestAllExtensions.DESCRIPTOR)
self.assertEqual(unittest_pb2.TestRequired.single.containing_type,
unittest_pb2.TestAllExtensions.DESCRIPTOR)
def testExtensionScope(self):
self.assertEqual(unittest_pb2.optional_int32_extension.extension_scope,
None)
self.assertEqual(unittest_pb2.TestRequired.single.extension_scope,
unittest_pb2.TestRequired.DESCRIPTOR)
def testIsExtension(self):
self.assertTrue(unittest_pb2.optional_int32_extension.is_extension)
self.assertTrue(unittest_pb2.TestRequired.single.is_extension)
message_descriptor = unittest_pb2.TestRequired.DESCRIPTOR
non_extension_descriptor = message_descriptor.fields_by_name['a']
self.assertTrue(not non_extension_descriptor.is_extension)
def testOptions(self):
proto = unittest_mset_wire_format_pb2.TestMessageSet()
self.assertTrue(proto.DESCRIPTOR.GetOptions().message_set_wire_format)
def testMessageWithCustomOptions(self):
proto = unittest_custom_options_pb2.TestMessageWithCustomOptions()
enum_options = proto.DESCRIPTOR.enum_types_by_name['AnEnum'].GetOptions()
self.assertTrue(enum_options is not None)
# TODO(gps): We really should test for the presence of the enum_opt1
# extension and for its value to be set to -789.
def testNestedTypes(self):
self.assertEqual(
set(unittest_pb2.TestAllTypes.DESCRIPTOR.nested_types),
set([
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR,
unittest_pb2.TestAllTypes.OptionalGroup.DESCRIPTOR,
unittest_pb2.TestAllTypes.RepeatedGroup.DESCRIPTOR,
]))
self.assertEqual(unittest_pb2.TestEmptyMessage.DESCRIPTOR.nested_types, [])
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.nested_types, [])
def testContainingType(self):
self.assertTrue(
unittest_pb2.TestEmptyMessage.DESCRIPTOR.containing_type is None)
self.assertTrue(
unittest_pb2.TestAllTypes.DESCRIPTOR.containing_type is None)
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.containing_type,
unittest_pb2.TestAllTypes.DESCRIPTOR)
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR.containing_type,
unittest_pb2.TestAllTypes.DESCRIPTOR)
self.assertEqual(
unittest_pb2.TestAllTypes.RepeatedGroup.DESCRIPTOR.containing_type,
unittest_pb2.TestAllTypes.DESCRIPTOR)
def testContainingTypeInEnumDescriptor(self):
self.assertTrue(unittest_pb2._FOREIGNENUM.containing_type is None)
self.assertEqual(unittest_pb2._TESTALLTYPES_NESTEDENUM.containing_type,
unittest_pb2.TestAllTypes.DESCRIPTOR)
def testPackage(self):
self.assertEqual(
unittest_pb2.TestAllTypes.DESCRIPTOR.file.package,
'protobuf_unittest')
desc = unittest_pb2.TestAllTypes.NestedMessage.DESCRIPTOR
self.assertEqual(desc.file.package, 'protobuf_unittest')
self.assertEqual(
unittest_import_pb2.ImportMessage.DESCRIPTOR.file.package,
'protobuf_unittest_import')
self.assertEqual(
unittest_pb2._FOREIGNENUM.file.package, 'protobuf_unittest')
self.assertEqual(
unittest_pb2._TESTALLTYPES_NESTEDENUM.file.package,
'protobuf_unittest')
self.assertEqual(
unittest_import_pb2._IMPORTENUM.file.package,
'protobuf_unittest_import')
def testExtensionRange(self):
self.assertEqual(
unittest_pb2.TestAllTypes.DESCRIPTOR.extension_ranges, [])
self.assertEqual(
unittest_pb2.TestAllExtensions.DESCRIPTOR.extension_ranges,
[(1, MAX_EXTENSION)])
self.assertEqual(
unittest_pb2.TestMultipleExtensionRanges.DESCRIPTOR.extension_ranges,
[(42, 43), (4143, 4244), (65536, MAX_EXTENSION)])
def testFileDescriptor(self):
self.assertEqual(unittest_pb2.DESCRIPTOR.name,
'google/protobuf/unittest.proto')
self.assertEqual(unittest_pb2.DESCRIPTOR.package, 'protobuf_unittest')
self.assertFalse(unittest_pb2.DESCRIPTOR.serialized_pb is None)
self.assertEqual(unittest_pb2.DESCRIPTOR.dependencies,
[unittest_import_pb2.DESCRIPTOR])
self.assertEqual(unittest_import_pb2.DESCRIPTOR.dependencies,
[unittest_import_public_pb2.DESCRIPTOR])
self.assertEqual(unittest_import_pb2.DESCRIPTOR.public_dependencies,
[unittest_import_public_pb2.DESCRIPTOR])
def testNoGenericServices(self):
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "TestMessage"))
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "FOO"))
self.assertTrue(hasattr(unittest_no_generic_services_pb2, "test_extension"))
# Make sure unittest_no_generic_services_pb2 has no services subclassing
# Proto2 Service class.
if hasattr(unittest_no_generic_services_pb2, "TestService"):
self.assertFalse(issubclass(unittest_no_generic_services_pb2.TestService,
service.Service))
def testMessageTypesByName(self):
file_type = unittest_pb2.DESCRIPTOR
self.assertEqual(
unittest_pb2._TESTALLTYPES,
file_type.message_types_by_name[unittest_pb2._TESTALLTYPES.name])
# Nested messages shouldn't be included in the message_types_by_name
# dictionary (like in the C++ API).
self.assertFalse(
unittest_pb2._TESTALLTYPES_NESTEDMESSAGE.name in
file_type.message_types_by_name)
def testEnumTypesByName(self):
file_type = unittest_pb2.DESCRIPTOR
self.assertEqual(
unittest_pb2._FOREIGNENUM,
file_type.enum_types_by_name[unittest_pb2._FOREIGNENUM.name])
def testExtensionsByName(self):
file_type = unittest_pb2.DESCRIPTOR
self.assertEqual(
unittest_pb2.my_extension_string,
file_type.extensions_by_name[unittest_pb2.my_extension_string.name])
def testPublicImports(self):
# Test public imports as embedded message.
all_type_proto = unittest_pb2.TestAllTypes()
self.assertEqual(0, all_type_proto.optional_public_import_message.e)
# PublicImportMessage is actually defined in unittest_import_public_pb2
# module, and is public imported by unittest_import_pb2 module.
public_import_proto = unittest_import_pb2.PublicImportMessage()
self.assertEqual(0, public_import_proto.e)
self.assertTrue(unittest_import_public_pb2.PublicImportMessage is
unittest_import_pb2.PublicImportMessage)
def testBadIdentifiers(self):
# We're just testing that the code was imported without problems.
message = test_bad_identifiers_pb2.TestBadIdentifiers()
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.message],
"foo")
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.descriptor],
"bar")
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.reflection],
"baz")
self.assertEqual(message.Extensions[test_bad_identifiers_pb2.service],
"qux")
def testOneof(self):
desc = unittest_pb2.TestAllTypes.DESCRIPTOR
self.assertEqual(1, len(desc.oneofs))
self.assertEqual('oneof_field', desc.oneofs[0].name)
self.assertEqual(0, desc.oneofs[0].index)
self.assertIs(desc, desc.oneofs[0].containing_type)
self.assertIs(desc.oneofs[0], desc.oneofs_by_name['oneof_field'])
nested_names = set(['oneof_uint32', 'oneof_nested_message',
'oneof_string', 'oneof_bytes'])
self.assertEqual(
nested_names,
set([field.name for field in desc.oneofs[0].fields]))
for field_name, field_desc in desc.fields_by_name.items():
if field_name in nested_names:
self.assertIs(desc.oneofs[0], field_desc.containing_oneof)
else:
self.assertIsNone(field_desc.containing_oneof)
def testEnumWithDupValue(self):
self.assertEqual('FOO1',
unittest_pb2.TestEnumWithDupValue.Name(unittest_pb2.FOO1))
self.assertEqual('FOO1',
unittest_pb2.TestEnumWithDupValue.Name(unittest_pb2.FOO2))
self.assertEqual('BAR1',
unittest_pb2.TestEnumWithDupValue.Name(unittest_pb2.BAR1))
self.assertEqual('BAR1',
unittest_pb2.TestEnumWithDupValue.Name(unittest_pb2.BAR2))
class SymbolDatabaseRegistrationTest(unittest.TestCase):
"""Checks that messages, enums and files are correctly registered."""
def testGetSymbol(self):
self.assertEqual(
unittest_pb2.TestAllTypes, symbol_database.Default().GetSymbol(
'protobuf_unittest.TestAllTypes'))
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage,
symbol_database.Default().GetSymbol(
'protobuf_unittest.TestAllTypes.NestedMessage'))
with self.assertRaises(KeyError):
symbol_database.Default().GetSymbol('protobuf_unittest.NestedMessage')
self.assertEqual(
unittest_pb2.TestAllTypes.OptionalGroup,
symbol_database.Default().GetSymbol(
'protobuf_unittest.TestAllTypes.OptionalGroup'))
self.assertEqual(
unittest_pb2.TestAllTypes.RepeatedGroup,
symbol_database.Default().GetSymbol(
'protobuf_unittest.TestAllTypes.RepeatedGroup'))
def testEnums(self):
self.assertEqual(
'protobuf_unittest.ForeignEnum',
symbol_database.Default().pool.FindEnumTypeByName(
'protobuf_unittest.ForeignEnum').full_name)
self.assertEqual(
'protobuf_unittest.TestAllTypes.NestedEnum',
symbol_database.Default().pool.FindEnumTypeByName(
'protobuf_unittest.TestAllTypes.NestedEnum').full_name)
def testFindFileByName(self):
self.assertEqual(
'google/protobuf/unittest.proto',
symbol_database.Default().pool.FindFileByName(
'google/protobuf/unittest.proto').name)
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,33 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Sample module importing a nested proto from itself."""
from google.protobuf.internal.import_test_package import outer_pb2 as myproto

@ -0,0 +1,70 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/import_test_package/inner.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/import_test_package/inner.proto',
package='google.protobuf.python.internal.import_test_package',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n8google/protobuf/internal/import_test_package/inner.proto\x12\x33google.protobuf.python.internal.import_test_package\"\x1a\n\x05Inner\x12\x11\n\x05value\x18\x01 \x01(\x05:\x02\x35\x37'
)
_INNER = _descriptor.Descriptor(
name='Inner',
full_name='google.protobuf.python.internal.import_test_package.Inner',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.python.internal.import_test_package.Inner.value', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=True, default_value=57,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=113,
serialized_end=139,
)
DESCRIPTOR.message_types_by_name['Inner'] = _INNER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Inner = _reflection.GeneratedProtocolMessageType('Inner', (_message.Message,), {
'DESCRIPTOR' : _INNER,
'__module__' : 'google.protobuf.internal.import_test_package.inner_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.import_test_package.Inner)
})
_sym_db.RegisterMessage(Inner)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,73 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/import_test_package/outer.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf.internal.import_test_package import inner_pb2 as google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/import_test_package/outer.proto',
package='google.protobuf.python.internal.import_test_package',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n8google/protobuf/internal/import_test_package/outer.proto\x12\x33google.protobuf.python.internal.import_test_package\x1a\x38google/protobuf/internal/import_test_package/inner.proto\"R\n\x05Outer\x12I\n\x05inner\x18\x01 \x01(\x0b\x32:.google.protobuf.python.internal.import_test_package.Inner'
,
dependencies=[google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2.DESCRIPTOR,])
_OUTER = _descriptor.Descriptor(
name='Outer',
full_name='google.protobuf.python.internal.import_test_package.Outer',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='inner', full_name='google.protobuf.python.internal.import_test_package.Outer.inner', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=171,
serialized_end=253,
)
_OUTER.fields_by_name['inner'].message_type = google_dot_protobuf_dot_internal_dot_import__test__package_dot_inner__pb2._INNER
DESCRIPTOR.message_types_by_name['Outer'] = _OUTER
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Outer = _reflection.GeneratedProtocolMessageType('Outer', (_message.Message,), {
'DESCRIPTOR' : _OUTER,
'__module__' : 'google.protobuf.internal.import_test_package.outer_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.import_test_package.Outer)
})
_sym_db.RegisterMessage(Outer)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,105 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.internal.keywords."""
import unittest
from google.protobuf.internal import more_messages_pb2
from google.protobuf import descriptor_pool
class KeywordsConflictTest(unittest.TestCase):
def setUp(self):
super(KeywordsConflictTest, self).setUp()
self.pool = descriptor_pool.Default()
def testMessage(self):
message = getattr(more_messages_pb2, 'class')()
message.int_field = 123
self.assertEqual(message.int_field, 123)
des = self.pool.FindMessageTypeByName('google.protobuf.internal.class')
self.assertEqual(des.name, 'class')
def testNestedMessage(self):
message = getattr(more_messages_pb2, 'class')()
message.nested_message.field = 234
self.assertEqual(message.nested_message.field, 234)
des = self.pool.FindMessageTypeByName('google.protobuf.internal.class.try')
self.assertEqual(des.name, 'try')
def testField(self):
message = getattr(more_messages_pb2, 'class')()
setattr(message, 'if', 123)
setattr(message, 'as', 1)
self.assertEqual(getattr(message, 'if'), 123)
self.assertEqual(getattr(message, 'as'), 1)
def testEnum(self):
class_ = getattr(more_messages_pb2, 'class')
message = class_()
# Normal enum value.
message.enum_field = more_messages_pb2.default
self.assertEqual(message.enum_field, more_messages_pb2.default)
# Top level enum value.
message.enum_field = getattr(more_messages_pb2, 'else')
self.assertEqual(message.enum_field, 1)
# Nested enum value
message.nested_enum_field = getattr(class_, 'True')
self.assertEqual(message.nested_enum_field, 1)
def testExtension(self):
message = getattr(more_messages_pb2, 'class')()
# Top level extension
extension1 = getattr(more_messages_pb2, 'continue')
message.Extensions[extension1] = 456
self.assertEqual(message.Extensions[extension1], 456)
# None top level extension
extension2 = getattr(more_messages_pb2.ExtendClass, 'return')
message.Extensions[extension2] = 789
self.assertEqual(message.Extensions[extension2], 789)
def testExtensionForNestedMessage(self):
message = getattr(more_messages_pb2, 'class')()
extension = getattr(more_messages_pb2, 'with')
message.nested_message.Extensions[extension] = 999
self.assertEqual(message.nested_message.Extensions[extension], 999)
def TestFullKeywordUsed(self):
message = more_messages_pb2.TestFullKeyword()
message.field2.int_field = 123
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,223 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.message_factory."""
__author__ = 'matthewtoia@google.com (Matt Toia)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import descriptor_pb2
from google.protobuf.internal import api_implementation
from google.protobuf.internal import factory_test1_pb2
from google.protobuf.internal import factory_test2_pb2
from google.protobuf.internal import testing_refleaks
from google.protobuf import descriptor_database
from google.protobuf import descriptor_pool
from google.protobuf import message_factory
@testing_refleaks.TestCase
class MessageFactoryTest(unittest.TestCase):
def setUp(self):
self.factory_test1_fd = descriptor_pb2.FileDescriptorProto.FromString(
factory_test1_pb2.DESCRIPTOR.serialized_pb)
self.factory_test2_fd = descriptor_pb2.FileDescriptorProto.FromString(
factory_test2_pb2.DESCRIPTOR.serialized_pb)
def _ExerciseDynamicClass(self, cls):
msg = cls()
msg.mandatory = 42
msg.nested_factory_2_enum = 0
msg.nested_factory_2_message.value = 'nested message value'
msg.factory_1_message.factory_1_enum = 1
msg.factory_1_message.nested_factory_1_enum = 0
msg.factory_1_message.nested_factory_1_message.value = (
'nested message value')
msg.factory_1_message.scalar_value = 22
msg.factory_1_message.list_value.extend([u'one', u'two', u'three'])
msg.factory_1_message.list_value.append(u'four')
msg.factory_1_enum = 1
msg.nested_factory_1_enum = 0
msg.nested_factory_1_message.value = 'nested message value'
msg.circular_message.mandatory = 1
msg.circular_message.circular_message.mandatory = 2
msg.circular_message.scalar_value = 'one deep'
msg.scalar_value = 'zero deep'
msg.list_value.extend([u'four', u'three', u'two'])
msg.list_value.append(u'one')
msg.grouped.add()
msg.grouped[0].part_1 = 'hello'
msg.grouped[0].part_2 = 'world'
msg.grouped.add(part_1='testing', part_2='123')
msg.loop.loop.mandatory = 2
msg.loop.loop.loop.loop.mandatory = 4
serialized = msg.SerializeToString()
converted = factory_test2_pb2.Factory2Message.FromString(serialized)
reserialized = converted.SerializeToString()
self.assertEqual(serialized, reserialized)
result = cls.FromString(reserialized)
self.assertEqual(msg, result)
def testGetPrototype(self):
db = descriptor_database.DescriptorDatabase()
pool = descriptor_pool.DescriptorPool(db)
db.Add(self.factory_test1_fd)
db.Add(self.factory_test2_fd)
factory = message_factory.MessageFactory()
cls = factory.GetPrototype(pool.FindMessageTypeByName(
'google.protobuf.python.internal.Factory2Message'))
self.assertFalse(cls is factory_test2_pb2.Factory2Message)
self._ExerciseDynamicClass(cls)
cls2 = factory.GetPrototype(pool.FindMessageTypeByName(
'google.protobuf.python.internal.Factory2Message'))
self.assertTrue(cls is cls2)
def testGetMessages(self):
# performed twice because multiple calls with the same input must be allowed
for _ in range(2):
# GetMessage should work regardless of the order the FileDescriptorProto
# are provided. In particular, the function should succeed when the files
# are not in the topological order of dependencies.
# Assuming factory_test2_fd depends on factory_test1_fd.
self.assertIn(self.factory_test1_fd.name,
self.factory_test2_fd.dependency)
# Get messages should work when a file comes before its dependencies:
# factory_test2_fd comes before factory_test1_fd.
messages = message_factory.GetMessages([self.factory_test2_fd,
self.factory_test1_fd])
self.assertTrue(
set(['google.protobuf.python.internal.Factory2Message',
'google.protobuf.python.internal.Factory1Message'],
).issubset(set(messages.keys())))
self._ExerciseDynamicClass(
messages['google.protobuf.python.internal.Factory2Message'])
factory_msg1 = messages['google.protobuf.python.internal.Factory1Message']
self.assertTrue(set(
['google.protobuf.python.internal.Factory2Message.one_more_field',
'google.protobuf.python.internal.another_field'],).issubset(set(
ext.full_name
for ext in factory_msg1.DESCRIPTOR.file.pool.FindAllExtensions(
factory_msg1.DESCRIPTOR))))
msg1 = messages['google.protobuf.python.internal.Factory1Message']()
ext1 = msg1.Extensions._FindExtensionByName(
'google.protobuf.python.internal.Factory2Message.one_more_field')
ext2 = msg1.Extensions._FindExtensionByName(
'google.protobuf.python.internal.another_field')
self.assertEqual(0, len(msg1.Extensions))
msg1.Extensions[ext1] = 'test1'
msg1.Extensions[ext2] = 'test2'
self.assertEqual('test1', msg1.Extensions[ext1])
self.assertEqual('test2', msg1.Extensions[ext2])
self.assertEqual(None,
msg1.Extensions._FindExtensionByNumber(12321))
self.assertEqual(2, len(msg1.Extensions))
if api_implementation.Type() == 'cpp':
self.assertRaises(TypeError,
msg1.Extensions._FindExtensionByName, 0)
self.assertRaises(TypeError,
msg1.Extensions._FindExtensionByNumber, '')
else:
self.assertEqual(None,
msg1.Extensions._FindExtensionByName(0))
self.assertEqual(None,
msg1.Extensions._FindExtensionByNumber(''))
def testDuplicateExtensionNumber(self):
pool = descriptor_pool.DescriptorPool()
factory = message_factory.MessageFactory(pool=pool)
# Add Container message.
f = descriptor_pb2.FileDescriptorProto()
f.name = 'google/protobuf/internal/container.proto'
f.package = 'google.protobuf.python.internal'
msg = f.message_type.add()
msg.name = 'Container'
rng = msg.extension_range.add()
rng.start = 1
rng.end = 10
pool.Add(f)
msgs = factory.GetMessages([f.name])
self.assertIn('google.protobuf.python.internal.Container', msgs)
# Extend container.
f = descriptor_pb2.FileDescriptorProto()
f.name = 'google/protobuf/internal/extension.proto'
f.package = 'google.protobuf.python.internal'
f.dependency.append('google/protobuf/internal/container.proto')
msg = f.message_type.add()
msg.name = 'Extension'
ext = msg.extension.add()
ext.name = 'extension_field'
ext.number = 2
ext.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
ext.type_name = 'Extension'
ext.extendee = 'Container'
pool.Add(f)
msgs = factory.GetMessages([f.name])
self.assertIn('google.protobuf.python.internal.Extension', msgs)
# Add Duplicate extending the same field number.
f = descriptor_pb2.FileDescriptorProto()
f.name = 'google/protobuf/internal/duplicate.proto'
f.package = 'google.protobuf.python.internal'
f.dependency.append('google/protobuf/internal/container.proto')
msg = f.message_type.add()
msg.name = 'Duplicate'
ext = msg.extension.add()
ext.name = 'extension_field'
ext.number = 2
ext.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
ext.type_name = 'Duplicate'
ext.extendee = 'Container'
pool.Add(f)
with self.assertRaises(Exception) as cm:
factory.GetMessages([f.name])
self.assertIn(str(cm.exception),
['Extensions '
'"google.protobuf.python.internal.Duplicate.extension_field" and'
' "google.protobuf.python.internal.Extension.extension_field"'
' both try to extend message type'
' "google.protobuf.python.internal.Container"'
' with field number 2.',
'Double registration of Extensions'])
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,78 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Defines a listener interface for observing certain
state transitions on Message objects.
Also defines a null implementation of this interface.
"""
__author__ = 'robinson@google.com (Will Robinson)'
class MessageListener(object):
"""Listens for modifications made to a message. Meant to be registered via
Message._SetListener().
Attributes:
dirty: If True, then calling Modified() would be a no-op. This can be
used to avoid these calls entirely in the common case.
"""
def Modified(self):
"""Called every time the message is modified in such a way that the parent
message may need to be updated. This currently means either:
(a) The message was modified for the first time, so the parent message
should henceforth mark the message as present.
(b) The message's cached byte size became dirty -- i.e. the message was
modified for the first time after a previous call to ByteSize().
Therefore the parent should also mark its byte size as dirty.
Note that (a) implies (b), since new objects start out with a client cached
size (zero). However, we document (a) explicitly because it is important.
Modified() will *only* be called in response to one of these two events --
not every time the sub-message is modified.
Note that if the listener's |dirty| attribute is true, then calling
Modified at the moment would be a no-op, so it can be skipped. Performance-
sensitive callers should check this attribute directly before calling since
it will be true most of the time.
"""
raise NotImplementedError
class NullMessageListener(object):
"""No-op MessageListener implementation."""
def Modified(self):
pass

@ -0,0 +1,213 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/message_set_extensions.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/message_set_extensions.proto',
package='google.protobuf.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n5google/protobuf/internal/message_set_extensions.proto\x12\x18google.protobuf.internal\"\x1e\n\x0eTestMessageSet*\x08\x08\x04\x10\xff\xff\xff\xff\x07:\x02\x08\x01\"\xa5\x01\n\x18TestMessageSetExtension1\x12\t\n\x01i\x18\x0f \x01(\x05\x32~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xab\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension1\"\xa7\x01\n\x18TestMessageSetExtension2\x12\x0b\n\x03str\x18\x19 \x01(\t2~\n\x15message_set_extension\x12(.google.protobuf.internal.TestMessageSet\x18\xca\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension2\"(\n\x18TestMessageSetExtension3\x12\x0c\n\x04text\x18# \x01(\t:\x7f\n\x16message_set_extension3\x12(.google.protobuf.internal.TestMessageSet\x18\xdf\xff\xf6. \x01(\x0b\x32\x32.google.protobuf.internal.TestMessageSetExtension3'
)
MESSAGE_SET_EXTENSION3_FIELD_NUMBER = 98418655
message_set_extension3 = _descriptor.FieldDescriptor(
name='message_set_extension3', full_name='google.protobuf.internal.message_set_extension3', index=0,
number=98418655, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_TESTMESSAGESET = _descriptor.Descriptor(
name='TestMessageSet',
full_name='google.protobuf.internal.TestMessageSet',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\010\001',
is_extendable=True,
syntax='proto2',
extension_ranges=[(4, 2147483647), ],
oneofs=[
],
serialized_start=83,
serialized_end=113,
)
_TESTMESSAGESETEXTENSION1 = _descriptor.Descriptor(
name='TestMessageSetExtension1',
full_name='google.protobuf.internal.TestMessageSetExtension1',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='i', full_name='google.protobuf.internal.TestMessageSetExtension1.i', index=0,
number=15, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='message_set_extension', full_name='google.protobuf.internal.TestMessageSetExtension1.message_set_extension', index=0,
number=98418603, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=116,
serialized_end=281,
)
_TESTMESSAGESETEXTENSION2 = _descriptor.Descriptor(
name='TestMessageSetExtension2',
full_name='google.protobuf.internal.TestMessageSetExtension2',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='str', full_name='google.protobuf.internal.TestMessageSetExtension2.str', index=0,
number=25, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
_descriptor.FieldDescriptor(
name='message_set_extension', full_name='google.protobuf.internal.TestMessageSetExtension2.message_set_extension', index=0,
number=98418634, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=284,
serialized_end=451,
)
_TESTMESSAGESETEXTENSION3 = _descriptor.Descriptor(
name='TestMessageSetExtension3',
full_name='google.protobuf.internal.TestMessageSetExtension3',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='text', full_name='google.protobuf.internal.TestMessageSetExtension3.text', index=0,
number=35, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=453,
serialized_end=493,
)
DESCRIPTOR.message_types_by_name['TestMessageSet'] = _TESTMESSAGESET
DESCRIPTOR.message_types_by_name['TestMessageSetExtension1'] = _TESTMESSAGESETEXTENSION1
DESCRIPTOR.message_types_by_name['TestMessageSetExtension2'] = _TESTMESSAGESETEXTENSION2
DESCRIPTOR.message_types_by_name['TestMessageSetExtension3'] = _TESTMESSAGESETEXTENSION3
DESCRIPTOR.extensions_by_name['message_set_extension3'] = message_set_extension3
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestMessageSet = _reflection.GeneratedProtocolMessageType('TestMessageSet', (_message.Message,), {
'DESCRIPTOR' : _TESTMESSAGESET,
'__module__' : 'google.protobuf.internal.message_set_extensions_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSet)
})
_sym_db.RegisterMessage(TestMessageSet)
TestMessageSetExtension1 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension1', (_message.Message,), {
'DESCRIPTOR' : _TESTMESSAGESETEXTENSION1,
'__module__' : 'google.protobuf.internal.message_set_extensions_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension1)
})
_sym_db.RegisterMessage(TestMessageSetExtension1)
TestMessageSetExtension2 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension2', (_message.Message,), {
'DESCRIPTOR' : _TESTMESSAGESETEXTENSION2,
'__module__' : 'google.protobuf.internal.message_set_extensions_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension2)
})
_sym_db.RegisterMessage(TestMessageSetExtension2)
TestMessageSetExtension3 = _reflection.GeneratedProtocolMessageType('TestMessageSetExtension3', (_message.Message,), {
'DESCRIPTOR' : _TESTMESSAGESETEXTENSION3,
'__module__' : 'google.protobuf.internal.message_set_extensions_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TestMessageSetExtension3)
})
_sym_db.RegisterMessage(TestMessageSetExtension3)
message_set_extension3.message_type = _TESTMESSAGESETEXTENSION3
TestMessageSet.RegisterExtension(message_set_extension3)
_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION1
TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION1.extensions_by_name['message_set_extension'])
_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'].message_type = _TESTMESSAGESETEXTENSION2
TestMessageSet.RegisterExtension(_TESTMESSAGESETEXTENSION2.extensions_by_name['message_set_extension'])
_TESTMESSAGESET._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,235 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/missing_enum_values.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/missing_enum_values.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n2google/protobuf/internal/missing_enum_values.proto\x12\x1fgoogle.protobuf.python.internal\"\xc1\x02\n\x0eTestEnumValues\x12X\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12X\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnum\x12Z\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32:.google.protobuf.python.internal.TestEnumValues.NestedEnumB\x02\x10\x01\"\x1f\n\nNestedEnum\x12\x08\n\x04ZERO\x10\x00\x12\x07\n\x03ONE\x10\x01\"\xd3\x02\n\x15TestMissingEnumValues\x12_\n\x14optional_nested_enum\x18\x01 \x01(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12_\n\x14repeated_nested_enum\x18\x02 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnum\x12\x61\n\x12packed_nested_enum\x18\x03 \x03(\x0e\x32\x41.google.protobuf.python.internal.TestMissingEnumValues.NestedEnumB\x02\x10\x01\"\x15\n\nNestedEnum\x12\x07\n\x03TWO\x10\x02\"\x1b\n\nJustString\x12\r\n\x05\x64ummy\x18\x01 \x02(\t'
)
_TESTENUMVALUES_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.TestEnumValues.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='ZERO', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='ONE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=378,
serialized_end=409,
)
_sym_db.RegisterEnumDescriptor(_TESTENUMVALUES_NESTEDENUM)
_TESTMISSINGENUMVALUES_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.TestMissingEnumValues.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='TWO', index=0, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=730,
serialized_end=751,
)
_sym_db.RegisterEnumDescriptor(_TESTMISSINGENUMVALUES_NESTEDENUM)
_TESTENUMVALUES = _descriptor.Descriptor(
name='TestEnumValues',
full_name='google.protobuf.python.internal.TestEnumValues',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='optional_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.optional_nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.repeated_nested_enum', index=1,
number=2, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_nested_enum', full_name='google.protobuf.python.internal.TestEnumValues.packed_nested_enum', index=2,
number=3, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_TESTENUMVALUES_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=88,
serialized_end=409,
)
_TESTMISSINGENUMVALUES = _descriptor.Descriptor(
name='TestMissingEnumValues',
full_name='google.protobuf.python.internal.TestMissingEnumValues',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='optional_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.optional_nested_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=2,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.repeated_nested_enum', index=1,
number=2, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='packed_nested_enum', full_name='google.protobuf.python.internal.TestMissingEnumValues.packed_nested_enum', index=2,
number=3, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\001', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_TESTMISSINGENUMVALUES_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=412,
serialized_end=751,
)
_JUSTSTRING = _descriptor.Descriptor(
name='JustString',
full_name='google.protobuf.python.internal.JustString',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='dummy', full_name='google.protobuf.python.internal.JustString.dummy', index=0,
number=1, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=753,
serialized_end=780,
)
_TESTENUMVALUES.fields_by_name['optional_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM
_TESTENUMVALUES.fields_by_name['repeated_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM
_TESTENUMVALUES.fields_by_name['packed_nested_enum'].enum_type = _TESTENUMVALUES_NESTEDENUM
_TESTENUMVALUES_NESTEDENUM.containing_type = _TESTENUMVALUES
_TESTMISSINGENUMVALUES.fields_by_name['optional_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM
_TESTMISSINGENUMVALUES.fields_by_name['repeated_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM
_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum'].enum_type = _TESTMISSINGENUMVALUES_NESTEDENUM
_TESTMISSINGENUMVALUES_NESTEDENUM.containing_type = _TESTMISSINGENUMVALUES
DESCRIPTOR.message_types_by_name['TestEnumValues'] = _TESTENUMVALUES
DESCRIPTOR.message_types_by_name['TestMissingEnumValues'] = _TESTMISSINGENUMVALUES
DESCRIPTOR.message_types_by_name['JustString'] = _JUSTSTRING
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestEnumValues = _reflection.GeneratedProtocolMessageType('TestEnumValues', (_message.Message,), {
'DESCRIPTOR' : _TESTENUMVALUES,
'__module__' : 'google.protobuf.internal.missing_enum_values_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestEnumValues)
})
_sym_db.RegisterMessage(TestEnumValues)
TestMissingEnumValues = _reflection.GeneratedProtocolMessageType('TestMissingEnumValues', (_message.Message,), {
'DESCRIPTOR' : _TESTMISSINGENUMVALUES,
'__module__' : 'google.protobuf.internal.missing_enum_values_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestMissingEnumValues)
})
_sym_db.RegisterMessage(TestMissingEnumValues)
JustString = _reflection.GeneratedProtocolMessageType('JustString', (_message.Message,), {
'DESCRIPTOR' : _JUSTSTRING,
'__module__' : 'google.protobuf.internal.missing_enum_values_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.JustString)
})
_sym_db.RegisterMessage(JustString)
_TESTENUMVALUES.fields_by_name['packed_nested_enum']._options = None
_TESTMISSINGENUMVALUES.fields_by_name['packed_nested_enum']._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,104 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/more_extensions_dynamic.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from google.protobuf.internal import more_extensions_pb2 as google_dot_protobuf_dot_internal_dot_more__extensions__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/more_extensions_dynamic.proto',
package='google.protobuf.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n6google/protobuf/internal/more_extensions_dynamic.proto\x12\x18google.protobuf.internal\x1a.google/protobuf/internal/more_extensions.proto\"\x1f\n\x12\x44ynamicMessageType\x12\t\n\x01\x61\x18\x01 \x01(\x05:J\n\x17\x64ynamic_int32_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x64 \x01(\x05:z\n\x19\x64ynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x65 \x01(\x0b\x32,.google.protobuf.internal.DynamicMessageType:\x83\x01\n\"repeated_dynamic_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x66 \x03(\x0b\x32,.google.protobuf.internal.DynamicMessageType'
,
dependencies=[google_dot_protobuf_dot_internal_dot_more__extensions__pb2.DESCRIPTOR,])
DYNAMIC_INT32_EXTENSION_FIELD_NUMBER = 100
dynamic_int32_extension = _descriptor.FieldDescriptor(
name='dynamic_int32_extension', full_name='google.protobuf.internal.dynamic_int32_extension', index=0,
number=100, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
DYNAMIC_MESSAGE_EXTENSION_FIELD_NUMBER = 101
dynamic_message_extension = _descriptor.FieldDescriptor(
name='dynamic_message_extension', full_name='google.protobuf.internal.dynamic_message_extension', index=1,
number=101, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
REPEATED_DYNAMIC_MESSAGE_EXTENSION_FIELD_NUMBER = 102
repeated_dynamic_message_extension = _descriptor.FieldDescriptor(
name='repeated_dynamic_message_extension', full_name='google.protobuf.internal.repeated_dynamic_message_extension', index=2,
number=102, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_DYNAMICMESSAGETYPE = _descriptor.Descriptor(
name='DynamicMessageType',
full_name='google.protobuf.internal.DynamicMessageType',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='a', full_name='google.protobuf.internal.DynamicMessageType.a', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=132,
serialized_end=163,
)
DESCRIPTOR.message_types_by_name['DynamicMessageType'] = _DYNAMICMESSAGETYPE
DESCRIPTOR.extensions_by_name['dynamic_int32_extension'] = dynamic_int32_extension
DESCRIPTOR.extensions_by_name['dynamic_message_extension'] = dynamic_message_extension
DESCRIPTOR.extensions_by_name['repeated_dynamic_message_extension'] = repeated_dynamic_message_extension
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
DynamicMessageType = _reflection.GeneratedProtocolMessageType('DynamicMessageType', (_message.Message,), {
'DESCRIPTOR' : _DYNAMICMESSAGETYPE,
'__module__' : 'google.protobuf.internal.more_extensions_dynamic_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.DynamicMessageType)
})
_sym_db.RegisterMessage(DynamicMessageType)
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_int32_extension)
dynamic_message_extension.message_type = _DYNAMICMESSAGETYPE
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(dynamic_message_extension)
repeated_dynamic_message_extension.message_type = _DYNAMICMESSAGETYPE
google_dot_protobuf_dot_internal_dot_more__extensions__pb2.ExtendedMessage.RegisterExtension(repeated_dynamic_message_extension)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,200 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/more_extensions.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/more_extensions.proto',
package='google.protobuf.internal',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n.google/protobuf/internal/more_extensions.proto\x12\x18google.protobuf.internal\"P\n\x0fTopLevelMessage\x12=\n\nsubmessage\x18\x01 \x01(\x0b\x32).google.protobuf.internal.ExtendedMessage\"K\n\x0f\x45xtendedMessage\x12\x17\n\x0eoptional_int32\x18\xe9\x07 \x01(\x05\x12\x18\n\x0frepeated_string\x18\xea\x07 \x03(\t*\x05\x08\x01\x10\xe8\x07\"-\n\x0e\x46oreignMessage\x12\x1b\n\x13\x66oreign_message_int\x18\x01 \x01(\x05:I\n\x16optional_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x01 \x01(\x05:w\n\x1aoptional_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x02 \x01(\x0b\x32(.google.protobuf.internal.ForeignMessage:I\n\x16repeated_int_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x03 \x03(\x05:w\n\x1arepeated_message_extension\x12).google.protobuf.internal.ExtendedMessage\x18\x04 \x03(\x0b\x32(.google.protobuf.internal.ForeignMessage'
)
OPTIONAL_INT_EXTENSION_FIELD_NUMBER = 1
optional_int_extension = _descriptor.FieldDescriptor(
name='optional_int_extension', full_name='google.protobuf.internal.optional_int_extension', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
OPTIONAL_MESSAGE_EXTENSION_FIELD_NUMBER = 2
optional_message_extension = _descriptor.FieldDescriptor(
name='optional_message_extension', full_name='google.protobuf.internal.optional_message_extension', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
REPEATED_INT_EXTENSION_FIELD_NUMBER = 3
repeated_int_extension = _descriptor.FieldDescriptor(
name='repeated_int_extension', full_name='google.protobuf.internal.repeated_int_extension', index=2,
number=3, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
REPEATED_MESSAGE_EXTENSION_FIELD_NUMBER = 4
repeated_message_extension = _descriptor.FieldDescriptor(
name='repeated_message_extension', full_name='google.protobuf.internal.repeated_message_extension', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_TOPLEVELMESSAGE = _descriptor.Descriptor(
name='TopLevelMessage',
full_name='google.protobuf.internal.TopLevelMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='submessage', full_name='google.protobuf.internal.TopLevelMessage.submessage', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=76,
serialized_end=156,
)
_EXTENDEDMESSAGE = _descriptor.Descriptor(
name='ExtendedMessage',
full_name='google.protobuf.internal.ExtendedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='optional_int32', full_name='google.protobuf.internal.ExtendedMessage.optional_int32', index=0,
number=1001, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_string', full_name='google.protobuf.internal.ExtendedMessage.repeated_string', index=1,
number=1002, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1, 1000), ],
oneofs=[
],
serialized_start=158,
serialized_end=233,
)
_FOREIGNMESSAGE = _descriptor.Descriptor(
name='ForeignMessage',
full_name='google.protobuf.internal.ForeignMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='foreign_message_int', full_name='google.protobuf.internal.ForeignMessage.foreign_message_int', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=235,
serialized_end=280,
)
_TOPLEVELMESSAGE.fields_by_name['submessage'].message_type = _EXTENDEDMESSAGE
DESCRIPTOR.message_types_by_name['TopLevelMessage'] = _TOPLEVELMESSAGE
DESCRIPTOR.message_types_by_name['ExtendedMessage'] = _EXTENDEDMESSAGE
DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE
DESCRIPTOR.extensions_by_name['optional_int_extension'] = optional_int_extension
DESCRIPTOR.extensions_by_name['optional_message_extension'] = optional_message_extension
DESCRIPTOR.extensions_by_name['repeated_int_extension'] = repeated_int_extension
DESCRIPTOR.extensions_by_name['repeated_message_extension'] = repeated_message_extension
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TopLevelMessage = _reflection.GeneratedProtocolMessageType('TopLevelMessage', (_message.Message,), {
'DESCRIPTOR' : _TOPLEVELMESSAGE,
'__module__' : 'google.protobuf.internal.more_extensions_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.TopLevelMessage)
})
_sym_db.RegisterMessage(TopLevelMessage)
ExtendedMessage = _reflection.GeneratedProtocolMessageType('ExtendedMessage', (_message.Message,), {
'DESCRIPTOR' : _EXTENDEDMESSAGE,
'__module__' : 'google.protobuf.internal.more_extensions_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.ExtendedMessage)
})
_sym_db.RegisterMessage(ExtendedMessage)
ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), {
'DESCRIPTOR' : _FOREIGNMESSAGE,
'__module__' : 'google.protobuf.internal.more_extensions_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.internal.ForeignMessage)
})
_sym_db.RegisterMessage(ForeignMessage)
ExtendedMessage.RegisterExtension(optional_int_extension)
optional_message_extension.message_type = _FOREIGNMESSAGE
ExtendedMessage.RegisterExtension(optional_message_extension)
ExtendedMessage.RegisterExtension(repeated_int_extension)
repeated_message_extension.message_type = _FOREIGNMESSAGE
ExtendedMessage.RegisterExtension(repeated_message_extension)
# @@protoc_insertion_point(module_scope)

File diff suppressed because one or more lines are too long

@ -0,0 +1,101 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/no_package.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/no_package.proto',
package='',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n)google/protobuf/internal/no_package.proto\";\n\x10NoPackageMessage\x12\'\n\x0fno_package_enum\x18\x01 \x01(\x0e\x32\x0e.NoPackageEnum*?\n\rNoPackageEnum\x12\x16\n\x12NO_PACKAGE_VALUE_0\x10\x00\x12\x16\n\x12NO_PACKAGE_VALUE_1\x10\x01'
)
_NOPACKAGEENUM = _descriptor.EnumDescriptor(
name='NoPackageEnum',
full_name='NoPackageEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NO_PACKAGE_VALUE_0', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NO_PACKAGE_VALUE_1', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=106,
serialized_end=169,
)
_sym_db.RegisterEnumDescriptor(_NOPACKAGEENUM)
NoPackageEnum = enum_type_wrapper.EnumTypeWrapper(_NOPACKAGEENUM)
NO_PACKAGE_VALUE_0 = 0
NO_PACKAGE_VALUE_1 = 1
_NOPACKAGEMESSAGE = _descriptor.Descriptor(
name='NoPackageMessage',
full_name='NoPackageMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='no_package_enum', full_name='NoPackageMessage.no_package_enum', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=45,
serialized_end=104,
)
_NOPACKAGEMESSAGE.fields_by_name['no_package_enum'].enum_type = _NOPACKAGEENUM
DESCRIPTOR.message_types_by_name['NoPackageMessage'] = _NOPACKAGEMESSAGE
DESCRIPTOR.enum_types_by_name['NoPackageEnum'] = _NOPACKAGEENUM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
NoPackageMessage = _reflection.GeneratedProtocolMessageType('NoPackageMessage', (_message.Message,), {
'DESCRIPTOR' : _NOPACKAGEMESSAGE,
'__module__' : 'google.protobuf.internal.no_package_pb2'
# @@protoc_insertion_point(class_scope:NoPackageMessage)
})
_sym_db.RegisterMessage(NoPackageMessage)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,340 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/packed_field_test.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/packed_field_test.proto',
package='google.protobuf.python.internal',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n0google/protobuf/internal/packed_field_test.proto\x12\x1fgoogle.protobuf.python.internal\"\xdb\x03\n\x0fTestPackedTypes\x12\x16\n\x0erepeated_int32\x18\x01 \x03(\x05\x12\x16\n\x0erepeated_int64\x18\x02 \x03(\x03\x12\x17\n\x0frepeated_uint32\x18\x03 \x03(\r\x12\x17\n\x0frepeated_uint64\x18\x04 \x03(\x04\x12\x17\n\x0frepeated_sint32\x18\x05 \x03(\x11\x12\x17\n\x0frepeated_sint64\x18\x06 \x03(\x12\x12\x18\n\x10repeated_fixed32\x18\x07 \x03(\x07\x12\x18\n\x10repeated_fixed64\x18\x08 \x03(\x06\x12\x19\n\x11repeated_sfixed32\x18\t \x03(\x0f\x12\x19\n\x11repeated_sfixed64\x18\n \x03(\x10\x12\x16\n\x0erepeated_float\x18\x0b \x03(\x02\x12\x17\n\x0frepeated_double\x18\x0c \x03(\x01\x12\x15\n\rrepeated_bool\x18\r \x03(\x08\x12Y\n\x14repeated_nested_enum\x18\x0e \x03(\x0e\x32;.google.protobuf.python.internal.TestPackedTypes.NestedEnum\"\'\n\nNestedEnum\x12\x07\n\x03\x46OO\x10\x00\x12\x07\n\x03\x42\x41R\x10\x01\x12\x07\n\x03\x42\x41Z\x10\x02\"\xec\x03\n\x11TestUnpackedTypes\x12\x1a\n\x0erepeated_int32\x18\x01 \x03(\x05\x42\x02\x10\x00\x12\x1a\n\x0erepeated_int64\x18\x02 \x03(\x03\x42\x02\x10\x00\x12\x1b\n\x0frepeated_uint32\x18\x03 \x03(\rB\x02\x10\x00\x12\x1b\n\x0frepeated_uint64\x18\x04 \x03(\x04\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint32\x18\x05 \x03(\x11\x42\x02\x10\x00\x12\x1b\n\x0frepeated_sint64\x18\x06 \x03(\x12\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed32\x18\x07 \x03(\x07\x42\x02\x10\x00\x12\x1c\n\x10repeated_fixed64\x18\x08 \x03(\x06\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed32\x18\t \x03(\x0f\x42\x02\x10\x00\x12\x1d\n\x11repeated_sfixed64\x18\n \x03(\x10\x42\x02\x10\x00\x12\x1a\n\x0erepeated_float\x18\x0b \x03(\x02\x42\x02\x10\x00\x12\x1b\n\x0frepeated_double\x18\x0c \x03(\x01\x42\x02\x10\x00\x12\x19\n\rrepeated_bool\x18\r \x03(\x08\x42\x02\x10\x00\x12]\n\x14repeated_nested_enum\x18\x0e \x03(\x0e\x32;.google.protobuf.python.internal.TestPackedTypes.NestedEnumB\x02\x10\x00\x62\x06proto3'
)
_TESTPACKEDTYPES_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.TestPackedTypes.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='FOO', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAR', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAZ', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=522,
serialized_end=561,
)
_sym_db.RegisterEnumDescriptor(_TESTPACKEDTYPES_NESTEDENUM)
_TESTPACKEDTYPES = _descriptor.Descriptor(
name='TestPackedTypes',
full_name='google.protobuf.python.internal.TestPackedTypes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='repeated_int32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_int32', index=0,
number=1, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_int64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_int64', index=1,
number=2, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_uint32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_uint32', index=2,
number=3, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_uint64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_uint64', index=3,
number=4, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sint32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sint32', index=4,
number=5, type=17, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sint64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sint64', index=5,
number=6, type=18, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_fixed32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_fixed32', index=6,
number=7, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_fixed64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_fixed64', index=7,
number=8, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sfixed32', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sfixed32', index=8,
number=9, type=15, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sfixed64', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_sfixed64', index=9,
number=10, type=16, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_float', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_float', index=10,
number=11, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_double', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_double', index=11,
number=12, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_bool', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_bool', index=12,
number=13, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestPackedTypes.repeated_nested_enum', index=13,
number=14, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_TESTPACKEDTYPES_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=86,
serialized_end=561,
)
_TESTUNPACKEDTYPES = _descriptor.Descriptor(
name='TestUnpackedTypes',
full_name='google.protobuf.python.internal.TestUnpackedTypes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='repeated_int32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_int32', index=0,
number=1, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_int64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_int64', index=1,
number=2, type=3, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_uint32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_uint32', index=2,
number=3, type=13, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_uint64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_uint64', index=3,
number=4, type=4, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sint32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sint32', index=4,
number=5, type=17, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sint64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sint64', index=5,
number=6, type=18, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_fixed32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_fixed32', index=6,
number=7, type=7, cpp_type=3, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_fixed64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_fixed64', index=7,
number=8, type=6, cpp_type=4, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sfixed32', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sfixed32', index=8,
number=9, type=15, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_sfixed64', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_sfixed64', index=9,
number=10, type=16, cpp_type=2, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_float', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_float', index=10,
number=11, type=2, cpp_type=6, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_double', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_double', index=11,
number=12, type=1, cpp_type=5, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_bool', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_bool', index=12,
number=13, type=8, cpp_type=7, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repeated_nested_enum', full_name='google.protobuf.python.internal.TestUnpackedTypes.repeated_nested_enum', index=13,
number=14, type=14, cpp_type=8, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=b'\020\000', file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=564,
serialized_end=1056,
)
_TESTPACKEDTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTPACKEDTYPES_NESTEDENUM
_TESTPACKEDTYPES_NESTEDENUM.containing_type = _TESTPACKEDTYPES
_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum'].enum_type = _TESTPACKEDTYPES_NESTEDENUM
DESCRIPTOR.message_types_by_name['TestPackedTypes'] = _TESTPACKEDTYPES
DESCRIPTOR.message_types_by_name['TestUnpackedTypes'] = _TESTUNPACKEDTYPES
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestPackedTypes = _reflection.GeneratedProtocolMessageType('TestPackedTypes', (_message.Message,), {
'DESCRIPTOR' : _TESTPACKEDTYPES,
'__module__' : 'google.protobuf.internal.packed_field_test_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestPackedTypes)
})
_sym_db.RegisterMessage(TestPackedTypes)
TestUnpackedTypes = _reflection.GeneratedProtocolMessageType('TestUnpackedTypes', (_message.Message,), {
'DESCRIPTOR' : _TESTUNPACKEDTYPES,
'__module__' : 'google.protobuf.internal.packed_field_test_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestUnpackedTypes)
})
_sym_db.RegisterMessage(TestUnpackedTypes)
_TESTUNPACKEDTYPES.fields_by_name['repeated_int32']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_int64']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_uint32']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_uint64']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_sint32']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_sint64']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed32']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_fixed64']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed32']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_sfixed64']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_float']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_double']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_bool']._options = None
_TESTUNPACKEDTYPES.fields_by_name['repeated_nested_enum']._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,96 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.proto_builder."""
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict #PY26
try:
import unittest2 as unittest
except ImportError:
import unittest
from google.protobuf import descriptor_pb2
from google.protobuf import descriptor_pool
from google.protobuf import proto_builder
from google.protobuf import text_format
class ProtoBuilderTest(unittest.TestCase):
def setUp(self):
self.ordered_fields = OrderedDict([
('foo', descriptor_pb2.FieldDescriptorProto.TYPE_INT64),
('bar', descriptor_pb2.FieldDescriptorProto.TYPE_STRING),
])
self._fields = dict(self.ordered_fields)
def testMakeSimpleProtoClass(self):
"""Test that we can create a proto class."""
proto_cls = proto_builder.MakeSimpleProtoClass(
self._fields,
full_name='net.proto2.python.public.proto_builder_test.Test')
proto = proto_cls()
proto.foo = 12345
proto.bar = 'asdf'
self.assertMultiLineEqual(
'bar: "asdf"\nfoo: 12345\n', text_format.MessageToString(proto))
def testOrderedFields(self):
"""Test that the field order is maintained when given an OrderedDict."""
proto_cls = proto_builder.MakeSimpleProtoClass(
self.ordered_fields,
full_name='net.proto2.python.public.proto_builder_test.OrderedTest')
proto = proto_cls()
proto.foo = 12345
proto.bar = 'asdf'
self.assertMultiLineEqual(
'foo: 12345\nbar: "asdf"\n', text_format.MessageToString(proto))
def testMakeSameProtoClassTwice(self):
"""Test that the DescriptorPool is used."""
pool = descriptor_pool.DescriptorPool()
proto_cls1 = proto_builder.MakeSimpleProtoClass(
self._fields,
full_name='net.proto2.python.public.proto_builder_test.Test',
pool=pool)
proto_cls2 = proto_builder.MakeSimpleProtoClass(
self._fields,
full_name='net.proto2.python.public.proto_builder_test.Test',
pool=pool)
self.assertIs(proto_cls1.DESCRIPTOR, proto_cls2.DESCRIPTOR)
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,144 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.internal.service_reflection."""
__author__ = 'petar@google.com (Petar Petrov)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import unittest_pb2
from google.protobuf import service_reflection
from google.protobuf import service
class FooUnitTest(unittest.TestCase):
def testService(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request, response, callback):
self.method = method
self.controller = controller
self.request = request
callback(response)
class MockRpcController(service.RpcController):
def SetFailed(self, msg):
self.failure_message = msg
self.callback_response = None
class MyService(unittest_pb2.TestService):
pass
self.callback_response = None
def MyCallback(response):
self.callback_response = response
rpc_controller = MockRpcController()
channel = MockRpcChannel()
srvc = MyService()
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual('Method Foo not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
rpc_controller.failure_message = None
service_descriptor = unittest_pb2.TestService.GetDescriptor()
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertTrue(srvc.GetRequestClass(service_descriptor.methods[1]) is
unittest_pb2.BarRequest)
self.assertTrue(srvc.GetResponseClass(service_descriptor.methods[1]) is
unittest_pb2.BarResponse)
self.assertEqual('Method Bar not implemented.',
rpc_controller.failure_message)
self.assertEqual(None, self.callback_response)
class MyServiceImpl(unittest_pb2.TestService):
def Foo(self, rpc_controller, request, done):
self.foo_called = True
def Bar(self, rpc_controller, request, done):
self.bar_called = True
srvc = MyServiceImpl()
rpc_controller.failure_message = None
srvc.Foo(rpc_controller, unittest_pb2.FooRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.foo_called)
rpc_controller.failure_message = None
srvc.CallMethod(service_descriptor.methods[1], rpc_controller,
unittest_pb2.BarRequest(), MyCallback)
self.assertEqual(None, rpc_controller.failure_message)
self.assertEqual(True, srvc.bar_called)
def testServiceStub(self):
class MockRpcChannel(service.RpcChannel):
def CallMethod(self, method, controller, request,
response_class, callback):
self.method = method
self.controller = controller
self.request = request
callback(response_class())
self.callback_response = None
def MyCallback(response):
self.callback_response = response
channel = MockRpcChannel()
stub = unittest_pb2.TestService_Stub(channel)
rpc_controller = 'controller'
request = 'request'
# GetDescriptor now static, still works as instance method for compatibility
self.assertEqual(unittest_pb2.TestService_Stub.GetDescriptor(),
stub.GetDescriptor())
# Invoke method.
stub.Foo(rpc_controller, request, MyCallback)
self.assertIsInstance(self.callback_response, unittest_pb2.FooResponse)
self.assertEqual(request, channel.request)
self.assertEqual(rpc_controller, channel.controller)
self.assertEqual(stub.GetDescriptor().methods[0], channel.method)
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,138 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.symbol_database."""
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import unittest_pb2
from google.protobuf import descriptor
from google.protobuf import descriptor_pool
from google.protobuf import symbol_database
class SymbolDatabaseTest(unittest.TestCase):
def _Database(self):
if descriptor._USE_C_DESCRIPTORS:
# The C++ implementation does not allow mixing descriptors from
# different pools.
db = symbol_database.SymbolDatabase(pool=descriptor_pool.Default())
else:
db = symbol_database.SymbolDatabase()
# Register representative types from unittest_pb2.
db.RegisterFileDescriptor(unittest_pb2.DESCRIPTOR)
db.RegisterMessage(unittest_pb2.TestAllTypes)
db.RegisterMessage(unittest_pb2.TestAllTypes.NestedMessage)
db.RegisterMessage(unittest_pb2.TestAllTypes.OptionalGroup)
db.RegisterMessage(unittest_pb2.TestAllTypes.RepeatedGroup)
db.RegisterEnumDescriptor(unittest_pb2.ForeignEnum.DESCRIPTOR)
db.RegisterEnumDescriptor(unittest_pb2.TestAllTypes.NestedEnum.DESCRIPTOR)
db.RegisterServiceDescriptor(unittest_pb2._TESTSERVICE)
return db
def testGetPrototype(self):
instance = self._Database().GetPrototype(
unittest_pb2.TestAllTypes.DESCRIPTOR)
self.assertTrue(instance is unittest_pb2.TestAllTypes)
def testGetMessages(self):
messages = self._Database().GetMessages(
['google/protobuf/unittest.proto'])
self.assertTrue(
unittest_pb2.TestAllTypes is
messages['protobuf_unittest.TestAllTypes'])
def testGetSymbol(self):
self.assertEqual(
unittest_pb2.TestAllTypes, self._Database().GetSymbol(
'protobuf_unittest.TestAllTypes'))
self.assertEqual(
unittest_pb2.TestAllTypes.NestedMessage, self._Database().GetSymbol(
'protobuf_unittest.TestAllTypes.NestedMessage'))
self.assertEqual(
unittest_pb2.TestAllTypes.OptionalGroup, self._Database().GetSymbol(
'protobuf_unittest.TestAllTypes.OptionalGroup'))
self.assertEqual(
unittest_pb2.TestAllTypes.RepeatedGroup, self._Database().GetSymbol(
'protobuf_unittest.TestAllTypes.RepeatedGroup'))
def testEnums(self):
# Check registration of types in the pool.
self.assertEqual(
'protobuf_unittest.ForeignEnum',
self._Database().pool.FindEnumTypeByName(
'protobuf_unittest.ForeignEnum').full_name)
self.assertEqual(
'protobuf_unittest.TestAllTypes.NestedEnum',
self._Database().pool.FindEnumTypeByName(
'protobuf_unittest.TestAllTypes.NestedEnum').full_name)
def testFindMessageTypeByName(self):
self.assertEqual(
'protobuf_unittest.TestAllTypes',
self._Database().pool.FindMessageTypeByName(
'protobuf_unittest.TestAllTypes').full_name)
self.assertEqual(
'protobuf_unittest.TestAllTypes.NestedMessage',
self._Database().pool.FindMessageTypeByName(
'protobuf_unittest.TestAllTypes.NestedMessage').full_name)
def testFindServiceByName(self):
self.assertEqual(
'protobuf_unittest.TestService',
self._Database().pool.FindServiceByName(
'protobuf_unittest.TestService').full_name)
def testFindFileContainingSymbol(self):
# Lookup based on either enum or message.
self.assertEqual(
'google/protobuf/unittest.proto',
self._Database().pool.FindFileContainingSymbol(
'protobuf_unittest.TestAllTypes.NestedEnum').name)
self.assertEqual(
'google/protobuf/unittest.proto',
self._Database().pool.FindFileContainingSymbol(
'protobuf_unittest.TestAllTypes').name)
def testFindFileByName(self):
self.assertEqual(
'google/protobuf/unittest.proto',
self._Database().pool.FindFileByName(
'google/protobuf/unittest.proto').name)
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,166 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/test_bad_identifiers.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import service as _service
from google.protobuf import service_reflection
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/test_bad_identifiers.proto',
package='protobuf_unittest',
syntax='proto2',
serialized_options=b'\220\001\001',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n3google/protobuf/internal/test_bad_identifiers.proto\x12\x11protobuf_unittest\"\x1e\n\x12TestBadIdentifiers*\x08\x08\x64\x10\x80\x80\x80\x80\x02\"\x10\n\x0e\x41notherMessage2\x10\n\x0e\x41notherService:;\n\x07message\x12%.protobuf_unittest.TestBadIdentifiers\x18\x64 \x01(\t:\x03\x66oo:>\n\ndescriptor\x12%.protobuf_unittest.TestBadIdentifiers\x18\x65 \x01(\t:\x03\x62\x61r:>\n\nreflection\x12%.protobuf_unittest.TestBadIdentifiers\x18\x66 \x01(\t:\x03\x62\x61z:;\n\x07service\x12%.protobuf_unittest.TestBadIdentifiers\x18g \x01(\t:\x03quxB\x03\x90\x01\x01'
)
MESSAGE_FIELD_NUMBER = 100
message = _descriptor.FieldDescriptor(
name='message', full_name='protobuf_unittest.message', index=0,
number=100, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"foo".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
DESCRIPTOR_FIELD_NUMBER = 101
descriptor = _descriptor.FieldDescriptor(
name='descriptor', full_name='protobuf_unittest.descriptor', index=1,
number=101, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"bar".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
REFLECTION_FIELD_NUMBER = 102
reflection = _descriptor.FieldDescriptor(
name='reflection', full_name='protobuf_unittest.reflection', index=2,
number=102, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"baz".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
SERVICE_FIELD_NUMBER = 103
service = _descriptor.FieldDescriptor(
name='service', full_name='protobuf_unittest.service', index=3,
number=103, type=9, cpp_type=9, label=1,
has_default_value=True, default_value=b"qux".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_TESTBADIDENTIFIERS = _descriptor.Descriptor(
name='TestBadIdentifiers',
full_name='protobuf_unittest.TestBadIdentifiers',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(100, 536870912), ],
oneofs=[
],
serialized_start=74,
serialized_end=104,
)
_ANOTHERMESSAGE = _descriptor.Descriptor(
name='AnotherMessage',
full_name='protobuf_unittest.AnotherMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=106,
serialized_end=122,
)
DESCRIPTOR.message_types_by_name['TestBadIdentifiers'] = _TESTBADIDENTIFIERS
DESCRIPTOR.message_types_by_name['AnotherMessage'] = _ANOTHERMESSAGE
DESCRIPTOR.extensions_by_name['message'] = message
DESCRIPTOR.extensions_by_name['descriptor'] = descriptor
DESCRIPTOR.extensions_by_name['reflection'] = reflection
DESCRIPTOR.extensions_by_name['service'] = service
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestBadIdentifiers = _reflection.GeneratedProtocolMessageType('TestBadIdentifiers', (_message.Message,), {
'DESCRIPTOR' : _TESTBADIDENTIFIERS,
'__module__' : 'google.protobuf.internal.test_bad_identifiers_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.TestBadIdentifiers)
})
_sym_db.RegisterMessage(TestBadIdentifiers)
AnotherMessage = _reflection.GeneratedProtocolMessageType('AnotherMessage', (_message.Message,), {
'DESCRIPTOR' : _ANOTHERMESSAGE,
'__module__' : 'google.protobuf.internal.test_bad_identifiers_pb2'
# @@protoc_insertion_point(class_scope:protobuf_unittest.AnotherMessage)
})
_sym_db.RegisterMessage(AnotherMessage)
TestBadIdentifiers.RegisterExtension(message)
TestBadIdentifiers.RegisterExtension(descriptor)
TestBadIdentifiers.RegisterExtension(reflection)
TestBadIdentifiers.RegisterExtension(service)
DESCRIPTOR._options = None
_ANOTHERSERVICE = _descriptor.ServiceDescriptor(
name='AnotherService',
full_name='protobuf_unittest.AnotherService',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=124,
serialized_end=140,
methods=[
])
_sym_db.RegisterServiceDescriptor(_ANOTHERSERVICE)
DESCRIPTOR.services_by_name['AnotherService'] = _ANOTHERSERVICE
AnotherService = service_reflection.GeneratedServiceType('AnotherService', (_service.Service,), dict(
DESCRIPTOR = _ANOTHERSERVICE,
__module__ = 'google.protobuf.internal.test_bad_identifiers_pb2'
))
AnotherService_Stub = service_reflection.GeneratedServiceStubType('AnotherService_Stub', (AnotherService,), dict(
DESCRIPTOR = _ANOTHERSERVICE,
__module__ = 'google.protobuf.internal.test_bad_identifiers_pb2'
))
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,410 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/internal/test_proto3_optional.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/internal/test_proto3_optional.proto',
package='google.protobuf.python.internal',
syntax='proto3',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n3google/protobuf/internal/test_proto3_optional.proto\x12\x1fgoogle.protobuf.python.internal\"\xf8\x08\n\x12TestProto3Optional\x12\x1b\n\x0eoptional_int32\x18\x01 \x01(\x05H\x00\x88\x01\x01\x12\x1b\n\x0eoptional_int64\x18\x02 \x01(\x03H\x01\x88\x01\x01\x12\x1c\n\x0foptional_uint32\x18\x03 \x01(\rH\x02\x88\x01\x01\x12\x1c\n\x0foptional_uint64\x18\x04 \x01(\x04H\x03\x88\x01\x01\x12\x1c\n\x0foptional_sint32\x18\x05 \x01(\x11H\x04\x88\x01\x01\x12\x1c\n\x0foptional_sint64\x18\x06 \x01(\x12H\x05\x88\x01\x01\x12\x1d\n\x10optional_fixed32\x18\x07 \x01(\x07H\x06\x88\x01\x01\x12\x1d\n\x10optional_fixed64\x18\x08 \x01(\x06H\x07\x88\x01\x01\x12\x1e\n\x11optional_sfixed32\x18\t \x01(\x0fH\x08\x88\x01\x01\x12\x1e\n\x11optional_sfixed64\x18\n \x01(\x10H\t\x88\x01\x01\x12\x1b\n\x0eoptional_float\x18\x0b \x01(\x02H\n\x88\x01\x01\x12\x1c\n\x0foptional_double\x18\x0c \x01(\x01H\x0b\x88\x01\x01\x12\x1a\n\roptional_bool\x18\r \x01(\x08H\x0c\x88\x01\x01\x12\x1c\n\x0foptional_string\x18\x0e \x01(\tH\r\x88\x01\x01\x12\x1b\n\x0eoptional_bytes\x18\x0f \x01(\x0cH\x0e\x88\x01\x01\x12g\n\x17optional_nested_message\x18\x12 \x01(\x0b\x32\x41.google.protobuf.python.internal.TestProto3Optional.NestedMessageH\x0f\x88\x01\x01\x12\x61\n\x14optional_nested_enum\x18\x15 \x01(\x0e\x32>.google.protobuf.python.internal.TestProto3Optional.NestedEnumH\x10\x88\x01\x01\x1a\'\n\rNestedMessage\x12\x0f\n\x02\x62\x62\x18\x01 \x01(\x05H\x00\x88\x01\x01\x42\x05\n\x03_bb\"J\n\nNestedEnum\x12\x0f\n\x0bUNSPECIFIED\x10\x00\x12\x07\n\x03\x46OO\x10\x01\x12\x07\n\x03\x42\x41R\x10\x02\x12\x07\n\x03\x42\x41Z\x10\x03\x12\x10\n\x03NEG\x10\xff\xff\xff\xff\xff\xff\xff\xff\xff\x01\x42\x11\n\x0f_optional_int32B\x11\n\x0f_optional_int64B\x12\n\x10_optional_uint32B\x12\n\x10_optional_uint64B\x12\n\x10_optional_sint32B\x12\n\x10_optional_sint64B\x13\n\x11_optional_fixed32B\x13\n\x11_optional_fixed64B\x14\n\x12_optional_sfixed32B\x14\n\x12_optional_sfixed64B\x11\n\x0f_optional_floatB\x12\n\x10_optional_doubleB\x10\n\x0e_optional_boolB\x12\n\x10_optional_stringB\x11\n\x0f_optional_bytesB\x1a\n\x18_optional_nested_messageB\x17\n\x15_optional_nested_enumb\x06proto3'
)
_TESTPROTO3OPTIONAL_NESTEDENUM = _descriptor.EnumDescriptor(
name='NestedEnum',
full_name='google.protobuf.python.internal.TestProto3Optional.NestedEnum',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNSPECIFIED', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='FOO', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAR', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BAZ', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NEG', index=4, number=-1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=806,
serialized_end=880,
)
_sym_db.RegisterEnumDescriptor(_TESTPROTO3OPTIONAL_NESTEDENUM)
_TESTPROTO3OPTIONAL_NESTEDMESSAGE = _descriptor.Descriptor(
name='NestedMessage',
full_name='google.protobuf.python.internal.TestProto3Optional.NestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='bb', full_name='google.protobuf.python.internal.TestProto3Optional.NestedMessage.bb', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_bb', full_name='google.protobuf.python.internal.TestProto3Optional.NestedMessage._bb',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=765,
serialized_end=804,
)
_TESTPROTO3OPTIONAL = _descriptor.Descriptor(
name='TestProto3Optional',
full_name='google.protobuf.python.internal.TestProto3Optional',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='optional_int32', full_name='google.protobuf.python.internal.TestProto3Optional.optional_int32', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_int64', full_name='google.protobuf.python.internal.TestProto3Optional.optional_int64', index=1,
number=2, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_uint32', full_name='google.protobuf.python.internal.TestProto3Optional.optional_uint32', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_uint64', full_name='google.protobuf.python.internal.TestProto3Optional.optional_uint64', index=3,
number=4, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sint32', full_name='google.protobuf.python.internal.TestProto3Optional.optional_sint32', index=4,
number=5, type=17, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sint64', full_name='google.protobuf.python.internal.TestProto3Optional.optional_sint64', index=5,
number=6, type=18, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_fixed32', full_name='google.protobuf.python.internal.TestProto3Optional.optional_fixed32', index=6,
number=7, type=7, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_fixed64', full_name='google.protobuf.python.internal.TestProto3Optional.optional_fixed64', index=7,
number=8, type=6, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sfixed32', full_name='google.protobuf.python.internal.TestProto3Optional.optional_sfixed32', index=8,
number=9, type=15, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_sfixed64', full_name='google.protobuf.python.internal.TestProto3Optional.optional_sfixed64', index=9,
number=10, type=16, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_float', full_name='google.protobuf.python.internal.TestProto3Optional.optional_float', index=10,
number=11, type=2, cpp_type=6, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_double', full_name='google.protobuf.python.internal.TestProto3Optional.optional_double', index=11,
number=12, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_bool', full_name='google.protobuf.python.internal.TestProto3Optional.optional_bool', index=12,
number=13, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_string', full_name='google.protobuf.python.internal.TestProto3Optional.optional_string', index=13,
number=14, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_bytes', full_name='google.protobuf.python.internal.TestProto3Optional.optional_bytes', index=14,
number=15, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_nested_message', full_name='google.protobuf.python.internal.TestProto3Optional.optional_nested_message', index=15,
number=18, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_nested_enum', full_name='google.protobuf.python.internal.TestProto3Optional.optional_nested_enum', index=16,
number=21, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TESTPROTO3OPTIONAL_NESTEDMESSAGE, ],
enum_types=[
_TESTPROTO3OPTIONAL_NESTEDENUM,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='_optional_int32', full_name='google.protobuf.python.internal.TestProto3Optional._optional_int32',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_int64', full_name='google.protobuf.python.internal.TestProto3Optional._optional_int64',
index=1, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_uint32', full_name='google.protobuf.python.internal.TestProto3Optional._optional_uint32',
index=2, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_uint64', full_name='google.protobuf.python.internal.TestProto3Optional._optional_uint64',
index=3, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_sint32', full_name='google.protobuf.python.internal.TestProto3Optional._optional_sint32',
index=4, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_sint64', full_name='google.protobuf.python.internal.TestProto3Optional._optional_sint64',
index=5, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_fixed32', full_name='google.protobuf.python.internal.TestProto3Optional._optional_fixed32',
index=6, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_fixed64', full_name='google.protobuf.python.internal.TestProto3Optional._optional_fixed64',
index=7, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_sfixed32', full_name='google.protobuf.python.internal.TestProto3Optional._optional_sfixed32',
index=8, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_sfixed64', full_name='google.protobuf.python.internal.TestProto3Optional._optional_sfixed64',
index=9, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_float', full_name='google.protobuf.python.internal.TestProto3Optional._optional_float',
index=10, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_double', full_name='google.protobuf.python.internal.TestProto3Optional._optional_double',
index=11, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_bool', full_name='google.protobuf.python.internal.TestProto3Optional._optional_bool',
index=12, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_string', full_name='google.protobuf.python.internal.TestProto3Optional._optional_string',
index=13, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_bytes', full_name='google.protobuf.python.internal.TestProto3Optional._optional_bytes',
index=14, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_nested_message', full_name='google.protobuf.python.internal.TestProto3Optional._optional_nested_message',
index=15, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
_descriptor.OneofDescriptor(
name='_optional_nested_enum', full_name='google.protobuf.python.internal.TestProto3Optional._optional_nested_enum',
index=16, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=89,
serialized_end=1233,
)
_TESTPROTO3OPTIONAL_NESTEDMESSAGE.containing_type = _TESTPROTO3OPTIONAL
_TESTPROTO3OPTIONAL_NESTEDMESSAGE.oneofs_by_name['_bb'].fields.append(
_TESTPROTO3OPTIONAL_NESTEDMESSAGE.fields_by_name['bb'])
_TESTPROTO3OPTIONAL_NESTEDMESSAGE.fields_by_name['bb'].containing_oneof = _TESTPROTO3OPTIONAL_NESTEDMESSAGE.oneofs_by_name['_bb']
_TESTPROTO3OPTIONAL.fields_by_name['optional_nested_message'].message_type = _TESTPROTO3OPTIONAL_NESTEDMESSAGE
_TESTPROTO3OPTIONAL.fields_by_name['optional_nested_enum'].enum_type = _TESTPROTO3OPTIONAL_NESTEDENUM
_TESTPROTO3OPTIONAL_NESTEDENUM.containing_type = _TESTPROTO3OPTIONAL
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_int32'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_int32'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_int32'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_int32']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_int64'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_int64'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_int64'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_int64']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_uint32'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_uint32'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_uint32'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_uint32']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_uint64'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_uint64'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_uint64'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_uint64']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_sint32'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_sint32'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_sint32'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_sint32']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_sint64'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_sint64'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_sint64'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_sint64']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_fixed32'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_fixed32'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_fixed32'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_fixed32']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_fixed64'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_fixed64'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_fixed64'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_fixed64']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_sfixed32'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_sfixed32'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_sfixed32'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_sfixed32']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_sfixed64'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_sfixed64'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_sfixed64'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_sfixed64']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_float'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_float'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_float'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_float']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_double'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_double'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_double'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_double']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_bool'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_bool'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_bool'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_bool']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_string'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_string'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_string'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_string']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_bytes'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_bytes'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_bytes'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_bytes']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_nested_message'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_nested_message'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_nested_message'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_nested_message']
_TESTPROTO3OPTIONAL.oneofs_by_name['_optional_nested_enum'].fields.append(
_TESTPROTO3OPTIONAL.fields_by_name['optional_nested_enum'])
_TESTPROTO3OPTIONAL.fields_by_name['optional_nested_enum'].containing_oneof = _TESTPROTO3OPTIONAL.oneofs_by_name['_optional_nested_enum']
DESCRIPTOR.message_types_by_name['TestProto3Optional'] = _TESTPROTO3OPTIONAL
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestProto3Optional = _reflection.GeneratedProtocolMessageType('TestProto3Optional', (_message.Message,), {
'NestedMessage' : _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), {
'DESCRIPTOR' : _TESTPROTO3OPTIONAL_NESTEDMESSAGE,
'__module__' : 'google.protobuf.internal.test_proto3_optional_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestProto3Optional.NestedMessage)
})
,
'DESCRIPTOR' : _TESTPROTO3OPTIONAL,
'__module__' : 'google.protobuf.internal.test_proto3_optional_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestProto3Optional)
})
_sym_db.RegisterMessage(TestProto3Optional)
_sym_db.RegisterMessage(TestProto3Optional.NestedMessage)
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,868 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Utilities for Python proto2 tests.
This is intentionally modeled on C++ code in
//google/protobuf/test_util.*.
"""
__author__ = 'robinson@google.com (Will Robinson)'
import numbers
import operator
import os.path
from google.protobuf import unittest_import_pb2
from google.protobuf import unittest_pb2
try:
long # Python 2
except NameError:
long = int # Python 3
# Tests whether the given TestAllTypes message is proto2 or not.
# This is used to gate several fields/features that only exist
# for the proto2 version of the message.
def IsProto2(message):
return message.DESCRIPTOR.syntax == "proto2"
def SetAllNonLazyFields(message):
"""Sets every non-lazy field in the message to a unique value.
Args:
message: A TestAllTypes instance.
"""
#
# Optional fields.
#
message.optional_int32 = 101
message.optional_int64 = 102
message.optional_uint32 = 103
message.optional_uint64 = 104
message.optional_sint32 = 105
message.optional_sint64 = 106
message.optional_fixed32 = 107
message.optional_fixed64 = 108
message.optional_sfixed32 = 109
message.optional_sfixed64 = 110
message.optional_float = 111
message.optional_double = 112
message.optional_bool = True
message.optional_string = u'115'
message.optional_bytes = b'116'
if IsProto2(message):
message.optionalgroup.a = 117
message.optional_nested_message.bb = 118
message.optional_foreign_message.c = 119
message.optional_import_message.d = 120
message.optional_public_import_message.e = 126
message.optional_nested_enum = unittest_pb2.TestAllTypes.BAZ
message.optional_foreign_enum = unittest_pb2.FOREIGN_BAZ
if IsProto2(message):
message.optional_import_enum = unittest_import_pb2.IMPORT_BAZ
message.optional_string_piece = u'124'
message.optional_cord = u'125'
#
# Repeated fields.
#
message.repeated_int32.append(201)
message.repeated_int64.append(202)
message.repeated_uint32.append(203)
message.repeated_uint64.append(204)
message.repeated_sint32.append(205)
message.repeated_sint64.append(206)
message.repeated_fixed32.append(207)
message.repeated_fixed64.append(208)
message.repeated_sfixed32.append(209)
message.repeated_sfixed64.append(210)
message.repeated_float.append(211)
message.repeated_double.append(212)
message.repeated_bool.append(True)
message.repeated_string.append(u'215')
message.repeated_bytes.append(b'216')
if IsProto2(message):
message.repeatedgroup.add().a = 217
message.repeated_nested_message.add().bb = 218
message.repeated_foreign_message.add().c = 219
message.repeated_import_message.add().d = 220
message.repeated_lazy_message.add().bb = 227
message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAR)
message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAR)
if IsProto2(message):
message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAR)
message.repeated_string_piece.append(u'224')
message.repeated_cord.append(u'225')
# Add a second one of each field and set value by index.
message.repeated_int32.append(0)
message.repeated_int64.append(0)
message.repeated_uint32.append(0)
message.repeated_uint64.append(0)
message.repeated_sint32.append(0)
message.repeated_sint64.append(0)
message.repeated_fixed32.append(0)
message.repeated_fixed64.append(0)
message.repeated_sfixed32.append(0)
message.repeated_sfixed64.append(0)
message.repeated_float.append(0)
message.repeated_double.append(0)
message.repeated_bool.append(True)
message.repeated_string.append(u'0')
message.repeated_bytes.append(b'0')
message.repeated_int32[1] = 301
message.repeated_int64[1] = 302
message.repeated_uint32[1] = 303
message.repeated_uint64[1] = 304
message.repeated_sint32[1] = 305
message.repeated_sint64[1] = 306
message.repeated_fixed32[1] = 307
message.repeated_fixed64[1] = 308
message.repeated_sfixed32[1] = 309
message.repeated_sfixed64[1] = 310
message.repeated_float[1] = 311
message.repeated_double[1] = 312
message.repeated_bool[1] = False
message.repeated_string[1] = u'315'
message.repeated_bytes[1] = b'316'
if IsProto2(message):
message.repeatedgroup.add().a = 317
message.repeated_nested_message.add().bb = 318
message.repeated_foreign_message.add().c = 319
message.repeated_import_message.add().d = 320
message.repeated_lazy_message.add().bb = 327
message.repeated_nested_enum.append(unittest_pb2.TestAllTypes.BAR)
message.repeated_nested_enum[1] = unittest_pb2.TestAllTypes.BAZ
message.repeated_foreign_enum.append(unittest_pb2.FOREIGN_BAZ)
if IsProto2(message):
message.repeated_import_enum.append(unittest_import_pb2.IMPORT_BAZ)
message.repeated_string_piece.append(u'324')
message.repeated_cord.append(u'325')
#
# Fields that have defaults.
#
if IsProto2(message):
message.default_int32 = 401
message.default_int64 = 402
message.default_uint32 = 403
message.default_uint64 = 404
message.default_sint32 = 405
message.default_sint64 = 406
message.default_fixed32 = 407
message.default_fixed64 = 408
message.default_sfixed32 = 409
message.default_sfixed64 = 410
message.default_float = 411
message.default_double = 412
message.default_bool = False
message.default_string = '415'
message.default_bytes = b'416'
message.default_nested_enum = unittest_pb2.TestAllTypes.FOO
message.default_foreign_enum = unittest_pb2.FOREIGN_FOO
message.default_import_enum = unittest_import_pb2.IMPORT_FOO
message.default_string_piece = '424'
message.default_cord = '425'
message.oneof_uint32 = 601
message.oneof_nested_message.bb = 602
message.oneof_string = '603'
message.oneof_bytes = b'604'
def SetAllFields(message):
SetAllNonLazyFields(message)
message.optional_lazy_message.bb = 127
def SetAllExtensions(message):
"""Sets every extension in the message to a unique value.
Args:
message: A unittest_pb2.TestAllExtensions instance.
"""
extensions = message.Extensions
pb2 = unittest_pb2
import_pb2 = unittest_import_pb2
#
# Optional fields.
#
extensions[pb2.optional_int32_extension] = 101
extensions[pb2.optional_int64_extension] = 102
extensions[pb2.optional_uint32_extension] = 103
extensions[pb2.optional_uint64_extension] = 104
extensions[pb2.optional_sint32_extension] = 105
extensions[pb2.optional_sint64_extension] = 106
extensions[pb2.optional_fixed32_extension] = 107
extensions[pb2.optional_fixed64_extension] = 108
extensions[pb2.optional_sfixed32_extension] = 109
extensions[pb2.optional_sfixed64_extension] = 110
extensions[pb2.optional_float_extension] = 111
extensions[pb2.optional_double_extension] = 112
extensions[pb2.optional_bool_extension] = True
extensions[pb2.optional_string_extension] = u'115'
extensions[pb2.optional_bytes_extension] = b'116'
extensions[pb2.optionalgroup_extension].a = 117
extensions[pb2.optional_nested_message_extension].bb = 118
extensions[pb2.optional_foreign_message_extension].c = 119
extensions[pb2.optional_import_message_extension].d = 120
extensions[pb2.optional_public_import_message_extension].e = 126
extensions[pb2.optional_lazy_message_extension].bb = 127
extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ
extensions[pb2.optional_nested_enum_extension] = pb2.TestAllTypes.BAZ
extensions[pb2.optional_foreign_enum_extension] = pb2.FOREIGN_BAZ
extensions[pb2.optional_import_enum_extension] = import_pb2.IMPORT_BAZ
extensions[pb2.optional_string_piece_extension] = u'124'
extensions[pb2.optional_cord_extension] = u'125'
#
# Repeated fields.
#
extensions[pb2.repeated_int32_extension].append(201)
extensions[pb2.repeated_int64_extension].append(202)
extensions[pb2.repeated_uint32_extension].append(203)
extensions[pb2.repeated_uint64_extension].append(204)
extensions[pb2.repeated_sint32_extension].append(205)
extensions[pb2.repeated_sint64_extension].append(206)
extensions[pb2.repeated_fixed32_extension].append(207)
extensions[pb2.repeated_fixed64_extension].append(208)
extensions[pb2.repeated_sfixed32_extension].append(209)
extensions[pb2.repeated_sfixed64_extension].append(210)
extensions[pb2.repeated_float_extension].append(211)
extensions[pb2.repeated_double_extension].append(212)
extensions[pb2.repeated_bool_extension].append(True)
extensions[pb2.repeated_string_extension].append(u'215')
extensions[pb2.repeated_bytes_extension].append(b'216')
extensions[pb2.repeatedgroup_extension].add().a = 217
extensions[pb2.repeated_nested_message_extension].add().bb = 218
extensions[pb2.repeated_foreign_message_extension].add().c = 219
extensions[pb2.repeated_import_message_extension].add().d = 220
extensions[pb2.repeated_lazy_message_extension].add().bb = 227
extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAR)
extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAR)
extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAR)
extensions[pb2.repeated_string_piece_extension].append(u'224')
extensions[pb2.repeated_cord_extension].append(u'225')
# Append a second one of each field.
extensions[pb2.repeated_int32_extension].append(301)
extensions[pb2.repeated_int64_extension].append(302)
extensions[pb2.repeated_uint32_extension].append(303)
extensions[pb2.repeated_uint64_extension].append(304)
extensions[pb2.repeated_sint32_extension].append(305)
extensions[pb2.repeated_sint64_extension].append(306)
extensions[pb2.repeated_fixed32_extension].append(307)
extensions[pb2.repeated_fixed64_extension].append(308)
extensions[pb2.repeated_sfixed32_extension].append(309)
extensions[pb2.repeated_sfixed64_extension].append(310)
extensions[pb2.repeated_float_extension].append(311)
extensions[pb2.repeated_double_extension].append(312)
extensions[pb2.repeated_bool_extension].append(False)
extensions[pb2.repeated_string_extension].append(u'315')
extensions[pb2.repeated_bytes_extension].append(b'316')
extensions[pb2.repeatedgroup_extension].add().a = 317
extensions[pb2.repeated_nested_message_extension].add().bb = 318
extensions[pb2.repeated_foreign_message_extension].add().c = 319
extensions[pb2.repeated_import_message_extension].add().d = 320
extensions[pb2.repeated_lazy_message_extension].add().bb = 327
extensions[pb2.repeated_nested_enum_extension].append(pb2.TestAllTypes.BAZ)
extensions[pb2.repeated_foreign_enum_extension].append(pb2.FOREIGN_BAZ)
extensions[pb2.repeated_import_enum_extension].append(import_pb2.IMPORT_BAZ)
extensions[pb2.repeated_string_piece_extension].append(u'324')
extensions[pb2.repeated_cord_extension].append(u'325')
#
# Fields with defaults.
#
extensions[pb2.default_int32_extension] = 401
extensions[pb2.default_int64_extension] = 402
extensions[pb2.default_uint32_extension] = 403
extensions[pb2.default_uint64_extension] = 404
extensions[pb2.default_sint32_extension] = 405
extensions[pb2.default_sint64_extension] = 406
extensions[pb2.default_fixed32_extension] = 407
extensions[pb2.default_fixed64_extension] = 408
extensions[pb2.default_sfixed32_extension] = 409
extensions[pb2.default_sfixed64_extension] = 410
extensions[pb2.default_float_extension] = 411
extensions[pb2.default_double_extension] = 412
extensions[pb2.default_bool_extension] = False
extensions[pb2.default_string_extension] = u'415'
extensions[pb2.default_bytes_extension] = b'416'
extensions[pb2.default_nested_enum_extension] = pb2.TestAllTypes.FOO
extensions[pb2.default_foreign_enum_extension] = pb2.FOREIGN_FOO
extensions[pb2.default_import_enum_extension] = import_pb2.IMPORT_FOO
extensions[pb2.default_string_piece_extension] = u'424'
extensions[pb2.default_cord_extension] = '425'
extensions[pb2.oneof_uint32_extension] = 601
extensions[pb2.oneof_nested_message_extension].bb = 602
extensions[pb2.oneof_string_extension] = u'603'
extensions[pb2.oneof_bytes_extension] = b'604'
def SetAllFieldsAndExtensions(message):
"""Sets every field and extension in the message to a unique value.
Args:
message: A unittest_pb2.TestAllExtensions message.
"""
message.my_int = 1
message.my_string = 'foo'
message.my_float = 1.0
message.Extensions[unittest_pb2.my_extension_int] = 23
message.Extensions[unittest_pb2.my_extension_string] = 'bar'
def ExpectAllFieldsAndExtensionsInOrder(serialized):
"""Ensures that serialized is the serialization we expect for a message
filled with SetAllFieldsAndExtensions(). (Specifically, ensures that the
serialization is in canonical, tag-number order).
"""
my_extension_int = unittest_pb2.my_extension_int
my_extension_string = unittest_pb2.my_extension_string
expected_strings = []
message = unittest_pb2.TestFieldOrderings()
message.my_int = 1 # Field 1.
expected_strings.append(message.SerializeToString())
message.Clear()
message.Extensions[my_extension_int] = 23 # Field 5.
expected_strings.append(message.SerializeToString())
message.Clear()
message.my_string = 'foo' # Field 11.
expected_strings.append(message.SerializeToString())
message.Clear()
message.Extensions[my_extension_string] = 'bar' # Field 50.
expected_strings.append(message.SerializeToString())
message.Clear()
message.my_float = 1.0
expected_strings.append(message.SerializeToString())
message.Clear()
expected = b''.join(expected_strings)
if expected != serialized:
raise ValueError('Expected %r, found %r' % (expected, serialized))
def ExpectAllFieldsSet(test_case, message):
"""Check all fields for correct values have after Set*Fields() is called."""
test_case.assertTrue(message.HasField('optional_int32'))
test_case.assertTrue(message.HasField('optional_int64'))
test_case.assertTrue(message.HasField('optional_uint32'))
test_case.assertTrue(message.HasField('optional_uint64'))
test_case.assertTrue(message.HasField('optional_sint32'))
test_case.assertTrue(message.HasField('optional_sint64'))
test_case.assertTrue(message.HasField('optional_fixed32'))
test_case.assertTrue(message.HasField('optional_fixed64'))
test_case.assertTrue(message.HasField('optional_sfixed32'))
test_case.assertTrue(message.HasField('optional_sfixed64'))
test_case.assertTrue(message.HasField('optional_float'))
test_case.assertTrue(message.HasField('optional_double'))
test_case.assertTrue(message.HasField('optional_bool'))
test_case.assertTrue(message.HasField('optional_string'))
test_case.assertTrue(message.HasField('optional_bytes'))
if IsProto2(message):
test_case.assertTrue(message.HasField('optionalgroup'))
test_case.assertTrue(message.HasField('optional_nested_message'))
test_case.assertTrue(message.HasField('optional_foreign_message'))
test_case.assertTrue(message.HasField('optional_import_message'))
test_case.assertTrue(message.optionalgroup.HasField('a'))
test_case.assertTrue(message.optional_nested_message.HasField('bb'))
test_case.assertTrue(message.optional_foreign_message.HasField('c'))
test_case.assertTrue(message.optional_import_message.HasField('d'))
test_case.assertTrue(message.HasField('optional_nested_enum'))
test_case.assertTrue(message.HasField('optional_foreign_enum'))
if IsProto2(message):
test_case.assertTrue(message.HasField('optional_import_enum'))
test_case.assertTrue(message.HasField('optional_string_piece'))
test_case.assertTrue(message.HasField('optional_cord'))
test_case.assertEqual(101, message.optional_int32)
test_case.assertEqual(102, message.optional_int64)
test_case.assertEqual(103, message.optional_uint32)
test_case.assertEqual(104, message.optional_uint64)
test_case.assertEqual(105, message.optional_sint32)
test_case.assertEqual(106, message.optional_sint64)
test_case.assertEqual(107, message.optional_fixed32)
test_case.assertEqual(108, message.optional_fixed64)
test_case.assertEqual(109, message.optional_sfixed32)
test_case.assertEqual(110, message.optional_sfixed64)
test_case.assertEqual(111, message.optional_float)
test_case.assertEqual(112, message.optional_double)
test_case.assertEqual(True, message.optional_bool)
test_case.assertEqual('115', message.optional_string)
test_case.assertEqual(b'116', message.optional_bytes)
if IsProto2(message):
test_case.assertEqual(117, message.optionalgroup.a)
test_case.assertEqual(118, message.optional_nested_message.bb)
test_case.assertEqual(119, message.optional_foreign_message.c)
test_case.assertEqual(120, message.optional_import_message.d)
test_case.assertEqual(126, message.optional_public_import_message.e)
test_case.assertEqual(127, message.optional_lazy_message.bb)
test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ,
message.optional_nested_enum)
test_case.assertEqual(unittest_pb2.FOREIGN_BAZ,
message.optional_foreign_enum)
if IsProto2(message):
test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ,
message.optional_import_enum)
# -----------------------------------------------------------------
test_case.assertEqual(2, len(message.repeated_int32))
test_case.assertEqual(2, len(message.repeated_int64))
test_case.assertEqual(2, len(message.repeated_uint32))
test_case.assertEqual(2, len(message.repeated_uint64))
test_case.assertEqual(2, len(message.repeated_sint32))
test_case.assertEqual(2, len(message.repeated_sint64))
test_case.assertEqual(2, len(message.repeated_fixed32))
test_case.assertEqual(2, len(message.repeated_fixed64))
test_case.assertEqual(2, len(message.repeated_sfixed32))
test_case.assertEqual(2, len(message.repeated_sfixed64))
test_case.assertEqual(2, len(message.repeated_float))
test_case.assertEqual(2, len(message.repeated_double))
test_case.assertEqual(2, len(message.repeated_bool))
test_case.assertEqual(2, len(message.repeated_string))
test_case.assertEqual(2, len(message.repeated_bytes))
if IsProto2(message):
test_case.assertEqual(2, len(message.repeatedgroup))
test_case.assertEqual(2, len(message.repeated_nested_message))
test_case.assertEqual(2, len(message.repeated_foreign_message))
test_case.assertEqual(2, len(message.repeated_import_message))
test_case.assertEqual(2, len(message.repeated_nested_enum))
test_case.assertEqual(2, len(message.repeated_foreign_enum))
if IsProto2(message):
test_case.assertEqual(2, len(message.repeated_import_enum))
test_case.assertEqual(2, len(message.repeated_string_piece))
test_case.assertEqual(2, len(message.repeated_cord))
test_case.assertEqual(201, message.repeated_int32[0])
test_case.assertEqual(202, message.repeated_int64[0])
test_case.assertEqual(203, message.repeated_uint32[0])
test_case.assertEqual(204, message.repeated_uint64[0])
test_case.assertEqual(205, message.repeated_sint32[0])
test_case.assertEqual(206, message.repeated_sint64[0])
test_case.assertEqual(207, message.repeated_fixed32[0])
test_case.assertEqual(208, message.repeated_fixed64[0])
test_case.assertEqual(209, message.repeated_sfixed32[0])
test_case.assertEqual(210, message.repeated_sfixed64[0])
test_case.assertEqual(211, message.repeated_float[0])
test_case.assertEqual(212, message.repeated_double[0])
test_case.assertEqual(True, message.repeated_bool[0])
test_case.assertEqual('215', message.repeated_string[0])
test_case.assertEqual(b'216', message.repeated_bytes[0])
if IsProto2(message):
test_case.assertEqual(217, message.repeatedgroup[0].a)
test_case.assertEqual(218, message.repeated_nested_message[0].bb)
test_case.assertEqual(219, message.repeated_foreign_message[0].c)
test_case.assertEqual(220, message.repeated_import_message[0].d)
test_case.assertEqual(227, message.repeated_lazy_message[0].bb)
test_case.assertEqual(unittest_pb2.TestAllTypes.BAR,
message.repeated_nested_enum[0])
test_case.assertEqual(unittest_pb2.FOREIGN_BAR,
message.repeated_foreign_enum[0])
if IsProto2(message):
test_case.assertEqual(unittest_import_pb2.IMPORT_BAR,
message.repeated_import_enum[0])
test_case.assertEqual(301, message.repeated_int32[1])
test_case.assertEqual(302, message.repeated_int64[1])
test_case.assertEqual(303, message.repeated_uint32[1])
test_case.assertEqual(304, message.repeated_uint64[1])
test_case.assertEqual(305, message.repeated_sint32[1])
test_case.assertEqual(306, message.repeated_sint64[1])
test_case.assertEqual(307, message.repeated_fixed32[1])
test_case.assertEqual(308, message.repeated_fixed64[1])
test_case.assertEqual(309, message.repeated_sfixed32[1])
test_case.assertEqual(310, message.repeated_sfixed64[1])
test_case.assertEqual(311, message.repeated_float[1])
test_case.assertEqual(312, message.repeated_double[1])
test_case.assertEqual(False, message.repeated_bool[1])
test_case.assertEqual('315', message.repeated_string[1])
test_case.assertEqual(b'316', message.repeated_bytes[1])
if IsProto2(message):
test_case.assertEqual(317, message.repeatedgroup[1].a)
test_case.assertEqual(318, message.repeated_nested_message[1].bb)
test_case.assertEqual(319, message.repeated_foreign_message[1].c)
test_case.assertEqual(320, message.repeated_import_message[1].d)
test_case.assertEqual(327, message.repeated_lazy_message[1].bb)
test_case.assertEqual(unittest_pb2.TestAllTypes.BAZ,
message.repeated_nested_enum[1])
test_case.assertEqual(unittest_pb2.FOREIGN_BAZ,
message.repeated_foreign_enum[1])
if IsProto2(message):
test_case.assertEqual(unittest_import_pb2.IMPORT_BAZ,
message.repeated_import_enum[1])
# -----------------------------------------------------------------
if IsProto2(message):
test_case.assertTrue(message.HasField('default_int32'))
test_case.assertTrue(message.HasField('default_int64'))
test_case.assertTrue(message.HasField('default_uint32'))
test_case.assertTrue(message.HasField('default_uint64'))
test_case.assertTrue(message.HasField('default_sint32'))
test_case.assertTrue(message.HasField('default_sint64'))
test_case.assertTrue(message.HasField('default_fixed32'))
test_case.assertTrue(message.HasField('default_fixed64'))
test_case.assertTrue(message.HasField('default_sfixed32'))
test_case.assertTrue(message.HasField('default_sfixed64'))
test_case.assertTrue(message.HasField('default_float'))
test_case.assertTrue(message.HasField('default_double'))
test_case.assertTrue(message.HasField('default_bool'))
test_case.assertTrue(message.HasField('default_string'))
test_case.assertTrue(message.HasField('default_bytes'))
test_case.assertTrue(message.HasField('default_nested_enum'))
test_case.assertTrue(message.HasField('default_foreign_enum'))
test_case.assertTrue(message.HasField('default_import_enum'))
test_case.assertEqual(401, message.default_int32)
test_case.assertEqual(402, message.default_int64)
test_case.assertEqual(403, message.default_uint32)
test_case.assertEqual(404, message.default_uint64)
test_case.assertEqual(405, message.default_sint32)
test_case.assertEqual(406, message.default_sint64)
test_case.assertEqual(407, message.default_fixed32)
test_case.assertEqual(408, message.default_fixed64)
test_case.assertEqual(409, message.default_sfixed32)
test_case.assertEqual(410, message.default_sfixed64)
test_case.assertEqual(411, message.default_float)
test_case.assertEqual(412, message.default_double)
test_case.assertEqual(False, message.default_bool)
test_case.assertEqual('415', message.default_string)
test_case.assertEqual(b'416', message.default_bytes)
test_case.assertEqual(unittest_pb2.TestAllTypes.FOO,
message.default_nested_enum)
test_case.assertEqual(unittest_pb2.FOREIGN_FOO,
message.default_foreign_enum)
test_case.assertEqual(unittest_import_pb2.IMPORT_FOO,
message.default_import_enum)
def GoldenFile(filename):
"""Finds the given golden file and returns a file object representing it."""
# Search up the directory tree looking for the C++ protobuf source code.
path = '.'
while os.path.exists(path):
if os.path.exists(os.path.join(path, 'src/google/protobuf')):
# Found it. Load the golden file from the testdata directory.
full_path = os.path.join(path, 'src/google/protobuf/testdata', filename)
return open(full_path, 'rb')
path = os.path.join(path, '..')
# Search internally.
path = '.'
full_path = os.path.join(path, 'third_party/py/google/protobuf/testdata',
filename)
if os.path.exists(full_path):
# Found it. Load the golden file from the testdata directory.
return open(full_path, 'rb')
raise RuntimeError(
'Could not find golden files. This test must be run from within the '
'protobuf source package so that it can read test data files from the '
'C++ source tree.')
def GoldenFileData(filename):
"""Finds the given golden file and returns its contents."""
with GoldenFile(filename) as f:
return f.read()
def SetAllPackedFields(message):
"""Sets every field in the message to a unique value.
Args:
message: A TestPackedTypes instance.
"""
message.packed_int32.extend([601, 701])
message.packed_int64.extend([602, 702])
message.packed_uint32.extend([603, 703])
message.packed_uint64.extend([604, 704])
message.packed_sint32.extend([605, 705])
message.packed_sint64.extend([606, 706])
message.packed_fixed32.extend([607, 707])
message.packed_fixed64.extend([608, 708])
message.packed_sfixed32.extend([609, 709])
message.packed_sfixed64.extend([610, 710])
message.packed_float.extend([611.0, 711.0])
message.packed_double.extend([612.0, 712.0])
message.packed_bool.extend([True, False])
message.packed_enum.extend([unittest_pb2.FOREIGN_BAR,
unittest_pb2.FOREIGN_BAZ])
def SetAllPackedExtensions(message):
"""Sets every extension in the message to a unique value.
Args:
message: A unittest_pb2.TestPackedExtensions instance.
"""
extensions = message.Extensions
pb2 = unittest_pb2
extensions[pb2.packed_int32_extension].extend([601, 701])
extensions[pb2.packed_int64_extension].extend([602, 702])
extensions[pb2.packed_uint32_extension].extend([603, 703])
extensions[pb2.packed_uint64_extension].extend([604, 704])
extensions[pb2.packed_sint32_extension].extend([605, 705])
extensions[pb2.packed_sint64_extension].extend([606, 706])
extensions[pb2.packed_fixed32_extension].extend([607, 707])
extensions[pb2.packed_fixed64_extension].extend([608, 708])
extensions[pb2.packed_sfixed32_extension].extend([609, 709])
extensions[pb2.packed_sfixed64_extension].extend([610, 710])
extensions[pb2.packed_float_extension].extend([611.0, 711.0])
extensions[pb2.packed_double_extension].extend([612.0, 712.0])
extensions[pb2.packed_bool_extension].extend([True, False])
extensions[pb2.packed_enum_extension].extend([unittest_pb2.FOREIGN_BAR,
unittest_pb2.FOREIGN_BAZ])
def SetAllUnpackedFields(message):
"""Sets every field in the message to a unique value.
Args:
message: A unittest_pb2.TestUnpackedTypes instance.
"""
message.unpacked_int32.extend([601, 701])
message.unpacked_int64.extend([602, 702])
message.unpacked_uint32.extend([603, 703])
message.unpacked_uint64.extend([604, 704])
message.unpacked_sint32.extend([605, 705])
message.unpacked_sint64.extend([606, 706])
message.unpacked_fixed32.extend([607, 707])
message.unpacked_fixed64.extend([608, 708])
message.unpacked_sfixed32.extend([609, 709])
message.unpacked_sfixed64.extend([610, 710])
message.unpacked_float.extend([611.0, 711.0])
message.unpacked_double.extend([612.0, 712.0])
message.unpacked_bool.extend([True, False])
message.unpacked_enum.extend([unittest_pb2.FOREIGN_BAR,
unittest_pb2.FOREIGN_BAZ])
class NonStandardInteger(numbers.Integral):
"""An integer object that does not subclass int.
This is used to verify that both C++ and regular proto systems can handle
integer others than int and long and that they handle them in predictable
ways.
NonStandardInteger is the minimal legal specification for a custom Integral.
As such, it does not support 0 < x < 5 and it is not hashable.
Note: This is added here instead of relying on numpy or a similar library
with custom integers to limit dependencies.
"""
def __init__(self, val, error_string_on_conversion=None):
assert isinstance(val, numbers.Integral)
if isinstance(val, NonStandardInteger):
val = val.val
self.val = val
self.error_string_on_conversion = error_string_on_conversion
def __long__(self):
if self.error_string_on_conversion:
raise RuntimeError(self.error_string_on_conversion)
return long(self.val)
def __abs__(self):
return NonStandardInteger(operator.abs(self.val))
def __add__(self, y):
return NonStandardInteger(operator.add(self.val, y))
def __div__(self, y):
return NonStandardInteger(operator.div(self.val, y))
def __eq__(self, y):
return operator.eq(self.val, y)
def __floordiv__(self, y):
return NonStandardInteger(operator.floordiv(self.val, y))
def __truediv__(self, y):
return NonStandardInteger(operator.truediv(self.val, y))
def __invert__(self):
return NonStandardInteger(operator.invert(self.val))
def __mod__(self, y):
return NonStandardInteger(operator.mod(self.val, y))
def __mul__(self, y):
return NonStandardInteger(operator.mul(self.val, y))
def __neg__(self):
return NonStandardInteger(operator.neg(self.val))
def __pos__(self):
return NonStandardInteger(operator.pos(self.val))
def __pow__(self, y):
return NonStandardInteger(operator.pow(self.val, y))
def __trunc__(self):
return int(self.val)
def __radd__(self, y):
return NonStandardInteger(operator.add(y, self.val))
def __rdiv__(self, y):
return NonStandardInteger(operator.div(y, self.val))
def __rmod__(self, y):
return NonStandardInteger(operator.mod(y, self.val))
def __rmul__(self, y):
return NonStandardInteger(operator.mul(y, self.val))
def __rpow__(self, y):
return NonStandardInteger(operator.pow(y, self.val))
def __rfloordiv__(self, y):
return NonStandardInteger(operator.floordiv(y, self.val))
def __rtruediv__(self, y):
return NonStandardInteger(operator.truediv(y, self.val))
def __lshift__(self, y):
return NonStandardInteger(operator.lshift(self.val, y))
def __rshift__(self, y):
return NonStandardInteger(operator.rshift(self.val, y))
def __rlshift__(self, y):
return NonStandardInteger(operator.lshift(y, self.val))
def __rrshift__(self, y):
return NonStandardInteger(operator.rshift(y, self.val))
def __le__(self, y):
if isinstance(y, NonStandardInteger):
y = y.val
return operator.le(self.val, y)
def __lt__(self, y):
if isinstance(y, NonStandardInteger):
y = y.val
return operator.lt(self.val, y)
def __and__(self, y):
return NonStandardInteger(operator.and_(self.val, y))
def __or__(self, y):
return NonStandardInteger(operator.or_(self.val, y))
def __xor__(self, y):
return NonStandardInteger(operator.xor(self.val, y))
def __rand__(self, y):
return NonStandardInteger(operator.and_(y, self.val))
def __ror__(self, y):
return NonStandardInteger(operator.or_(y, self.val))
def __rxor__(self, y):
return NonStandardInteger(operator.xor(y, self.val))
def __bool__(self):
return self.val
def __nonzero__(self):
return self.val
def __ceil__(self):
return self
def __floor__(self):
return self
def __int__(self):
if self.error_string_on_conversion:
raise RuntimeError(self.error_string_on_conversion)
return int(self.val)
def __round__(self):
return self
def __repr__(self):
return 'NonStandardInteger(%s)' % self.val

@ -0,0 +1,133 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A subclass of unittest.TestCase which checks for reference leaks.
To use:
- Use testing_refleak.BaseTestCase instead of unittest.TestCase
- Configure and compile Python with --with-pydebug
If sys.gettotalrefcount() is not available (because Python was built without
the Py_DEBUG option), then this module is a no-op and tests will run normally.
"""
import gc
import sys
try:
import copy_reg as copyreg #PY26
except ImportError:
import copyreg
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
class LocalTestResult(unittest.TestResult):
"""A TestResult which forwards events to a parent object, except for Skips."""
def __init__(self, parent_result):
unittest.TestResult.__init__(self)
self.parent_result = parent_result
def addError(self, test, error):
self.parent_result.addError(test, error)
def addFailure(self, test, error):
self.parent_result.addFailure(test, error)
def addSkip(self, test, reason):
pass
class ReferenceLeakCheckerMixin(object):
"""A mixin class for TestCase, which checks reference counts."""
NB_RUNS = 3
def run(self, result=None):
# python_message.py registers all Message classes to some pickle global
# registry, which makes the classes immortal.
# We save a copy of this registry, and reset it before we could references.
self._saved_pickle_registry = copyreg.dispatch_table.copy()
# Run the test twice, to warm up the instance attributes.
super(ReferenceLeakCheckerMixin, self).run(result=result)
super(ReferenceLeakCheckerMixin, self).run(result=result)
oldrefcount = 0
local_result = LocalTestResult(result)
refcount_deltas = []
for _ in range(self.NB_RUNS):
oldrefcount = self._getRefcounts()
super(ReferenceLeakCheckerMixin, self).run(result=local_result)
newrefcount = self._getRefcounts()
refcount_deltas.append(newrefcount - oldrefcount)
print(refcount_deltas, self)
try:
self.assertEqual(refcount_deltas, [0] * self.NB_RUNS)
except Exception: # pylint: disable=broad-except
result.addError(self, sys.exc_info())
def _getRefcounts(self):
copyreg.dispatch_table.clear()
copyreg.dispatch_table.update(self._saved_pickle_registry)
# It is sometimes necessary to gc.collect() multiple times, to ensure
# that all objects can be collected.
gc.collect()
gc.collect()
gc.collect()
return sys.gettotalrefcount()
if hasattr(sys, 'gettotalrefcount'):
def TestCase(test_class):
new_bases = (ReferenceLeakCheckerMixin,) + test_class.__bases__
new_class = type(test_class)(
test_class.__name__, new_bases, dict(test_class.__dict__))
return new_class
SkipReferenceLeakChecker = unittest.skip
else:
# When PyDEBUG is not enabled, run the tests normally.
def TestCase(test_class):
return test_class
def SkipReferenceLeakChecker(reason):
del reason # Don't skip, so don't need a reason.
def Same(func):
return func
return Same

@ -0,0 +1,72 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Tests for google.protobuf.text_encoding."""
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import text_encoding
TEST_VALUES = [
("foo\\rbar\\nbaz\\t",
"foo\\rbar\\nbaz\\t",
b"foo\rbar\nbaz\t"),
("\\'full of \\\"sound\\\" and \\\"fury\\\"\\'",
"\\'full of \\\"sound\\\" and \\\"fury\\\"\\'",
b"'full of \"sound\" and \"fury\"'"),
("signi\\\\fying\\\\ nothing\\\\",
"signi\\\\fying\\\\ nothing\\\\",
b"signi\\fying\\ nothing\\"),
("\\010\\t\\n\\013\\014\\r",
"\x08\\t\\n\x0b\x0c\\r",
b"\010\011\012\013\014\015")]
class TextEncodingTestCase(unittest.TestCase):
def testCEscape(self):
for escaped, escaped_utf8, unescaped in TEST_VALUES:
self.assertEqual(escaped,
text_encoding.CEscape(unescaped, as_utf8=False))
self.assertEqual(escaped_utf8,
text_encoding.CEscape(unescaped, as_utf8=True))
def testCUnescape(self):
for escaped, escaped_utf8, unescaped in TEST_VALUES:
self.assertEqual(unescaped, text_encoding.CUnescape(escaped))
self.assertEqual(unescaped, text_encoding.CUnescape(escaped_utf8))
if __name__ == "__main__":
unittest.main()

@ -0,0 +1,431 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides type checking routines.
This module defines type checking utilities in the forms of dictionaries:
VALUE_CHECKERS: A dictionary of field types and a value validation object.
TYPE_TO_BYTE_SIZE_FN: A dictionary with field types and a size computing
function.
TYPE_TO_SERIALIZE_METHOD: A dictionary with field types and serialization
function.
FIELD_TYPE_TO_WIRE_TYPE: A dictionary with field typed and their
corresponding wire types.
TYPE_TO_DESERIALIZE_METHOD: A dictionary with field types and deserialization
function.
"""
__author__ = 'robinson@google.com (Will Robinson)'
try:
import ctypes
except Exception: # pylint: disable=broad-except
ctypes = None
import struct
import numbers
import six
if six.PY3:
long = int
from google.protobuf.internal import api_implementation
from google.protobuf.internal import decoder
from google.protobuf.internal import encoder
from google.protobuf.internal import wire_format
from google.protobuf import descriptor
_FieldDescriptor = descriptor.FieldDescriptor
def TruncateToFourByteFloat(original):
if ctypes:
return ctypes.c_float(original).value
else:
return struct.unpack('<f', struct.pack('<f', original))[0]
def ToShortestFloat(original):
"""Returns the shortest float that has same value in wire."""
# Return the original value if it is not truncated. This may happen
# if someone mixes this code with an old protobuf runtime.
# TODO(jieluo): Remove it after maybe 2022.
if TruncateToFourByteFloat(original) != original:
return original
# All 4 byte floats have between 6 and 9 significant digits, so we
# start with 6 as the lower bound.
# It has to be iterative because use '.9g' directly can not get rid
# of the noises for most values. For example if set a float_field=0.9
# use '.9g' will print 0.899999976.
precision = 6
rounded = float('{0:.{1}g}'.format(original, precision))
while TruncateToFourByteFloat(rounded) != original:
precision += 1
rounded = float('{0:.{1}g}'.format(original, precision))
return rounded
def SupportsOpenEnums(field_descriptor):
return field_descriptor.containing_type.syntax == "proto3"
def GetTypeChecker(field):
"""Returns a type checker for a message field of the specified types.
Args:
field: FieldDescriptor object for this field.
Returns:
An instance of TypeChecker which can be used to verify the types
of values assigned to a field of the specified type.
"""
if (field.cpp_type == _FieldDescriptor.CPPTYPE_STRING and
field.type == _FieldDescriptor.TYPE_STRING):
return UnicodeValueChecker()
if field.cpp_type == _FieldDescriptor.CPPTYPE_ENUM:
if SupportsOpenEnums(field):
# When open enums are supported, any int32 can be assigned.
return _VALUE_CHECKERS[_FieldDescriptor.CPPTYPE_INT32]
else:
return EnumValueChecker(field.enum_type)
return _VALUE_CHECKERS[field.cpp_type]
# None of the typecheckers below make any attempt to guard against people
# subclassing builtin types and doing weird things. We're not trying to
# protect against malicious clients here, just people accidentally shooting
# themselves in the foot in obvious ways.
class TypeChecker(object):
"""Type checker used to catch type errors as early as possible
when the client is setting scalar fields in protocol messages.
"""
def __init__(self, *acceptable_types):
self._acceptable_types = acceptable_types
def CheckValue(self, proposed_value):
"""Type check the provided value and return it.
The returned value might have been normalized to another type.
"""
if not isinstance(proposed_value, self._acceptable_types):
message = ('%.1024r has type %s, but expected one of: %s' %
(proposed_value, type(proposed_value), self._acceptable_types))
raise TypeError(message)
# Some field types(float, double and bool) accept other types, must
# convert to the correct type in such cases.
if self._acceptable_types:
if self._acceptable_types[0] in (bool, float):
return self._acceptable_types[0](proposed_value)
return proposed_value
class TypeCheckerWithDefault(TypeChecker):
def __init__(self, default_value, *acceptable_types):
TypeChecker.__init__(self, *acceptable_types)
self._default_value = default_value
def DefaultValue(self):
return self._default_value
# IntValueChecker and its subclasses perform integer type-checks
# and bounds-checks.
class IntValueChecker(object):
"""Checker used for integer fields. Performs type-check and range check."""
def CheckValue(self, proposed_value):
if not isinstance(proposed_value, numbers.Integral):
message = ('%.1024r has type %s, but expected one of: %s' %
(proposed_value, type(proposed_value), six.integer_types))
raise TypeError(message)
if not self._MIN <= int(proposed_value) <= self._MAX:
raise ValueError('Value out of range: %d' % proposed_value)
# We force 32-bit values to int and 64-bit values to long to make
# alternate implementations where the distinction is more significant
# (e.g. the C++ implementation) simpler.
proposed_value = self._TYPE(proposed_value)
return proposed_value
def DefaultValue(self):
return 0
class EnumValueChecker(object):
"""Checker used for enum fields. Performs type-check and range check."""
def __init__(self, enum_type):
self._enum_type = enum_type
def CheckValue(self, proposed_value):
if not isinstance(proposed_value, numbers.Integral):
message = ('%.1024r has type %s, but expected one of: %s' %
(proposed_value, type(proposed_value), six.integer_types))
raise TypeError(message)
if int(proposed_value) not in self._enum_type.values_by_number:
raise ValueError('Unknown enum value: %d' % proposed_value)
return proposed_value
def DefaultValue(self):
return self._enum_type.values[0].number
class UnicodeValueChecker(object):
"""Checker used for string fields.
Always returns a unicode value, even if the input is of type str.
"""
def CheckValue(self, proposed_value):
if not isinstance(proposed_value, (bytes, six.text_type)):
message = ('%.1024r has type %s, but expected one of: %s' %
(proposed_value, type(proposed_value), (bytes, six.text_type)))
raise TypeError(message)
# If the value is of type 'bytes' make sure that it is valid UTF-8 data.
if isinstance(proposed_value, bytes):
try:
proposed_value = proposed_value.decode('utf-8')
except UnicodeDecodeError:
raise ValueError('%.1024r has type bytes, but isn\'t valid UTF-8 '
'encoding. Non-UTF-8 strings must be converted to '
'unicode objects before being added.' %
(proposed_value))
else:
try:
proposed_value.encode('utf8')
except UnicodeEncodeError:
raise ValueError('%.1024r isn\'t a valid unicode string and '
'can\'t be encoded in UTF-8.'%
(proposed_value))
return proposed_value
def DefaultValue(self):
return u""
class Int32ValueChecker(IntValueChecker):
# We're sure to use ints instead of longs here since comparison may be more
# efficient.
_MIN = -2147483648
_MAX = 2147483647
_TYPE = int
class Uint32ValueChecker(IntValueChecker):
_MIN = 0
_MAX = (1 << 32) - 1
_TYPE = int
class Int64ValueChecker(IntValueChecker):
_MIN = -(1 << 63)
_MAX = (1 << 63) - 1
_TYPE = long
class Uint64ValueChecker(IntValueChecker):
_MIN = 0
_MAX = (1 << 64) - 1
_TYPE = long
# The max 4 bytes float is about 3.4028234663852886e+38
_FLOAT_MAX = float.fromhex('0x1.fffffep+127')
_FLOAT_MIN = -_FLOAT_MAX
_INF = float('inf')
_NEG_INF = float('-inf')
class FloatValueChecker(object):
"""Checker used for float fields. Performs type-check and range check.
Values exceeding a 32-bit float will be converted to inf/-inf.
"""
def CheckValue(self, proposed_value):
"""Check and convert proposed_value to float."""
if not isinstance(proposed_value, numbers.Real):
message = ('%.1024r has type %s, but expected one of: numbers.Real' %
(proposed_value, type(proposed_value)))
raise TypeError(message)
converted_value = float(proposed_value)
# This inf rounding matches the C++ proto SafeDoubleToFloat logic.
if converted_value > _FLOAT_MAX:
return _INF
if converted_value < _FLOAT_MIN:
return _NEG_INF
return TruncateToFourByteFloat(converted_value)
def DefaultValue(self):
return 0.0
# Type-checkers for all scalar CPPTYPEs.
_VALUE_CHECKERS = {
_FieldDescriptor.CPPTYPE_INT32: Int32ValueChecker(),
_FieldDescriptor.CPPTYPE_INT64: Int64ValueChecker(),
_FieldDescriptor.CPPTYPE_UINT32: Uint32ValueChecker(),
_FieldDescriptor.CPPTYPE_UINT64: Uint64ValueChecker(),
_FieldDescriptor.CPPTYPE_DOUBLE: TypeCheckerWithDefault(
0.0, float, numbers.Real),
_FieldDescriptor.CPPTYPE_FLOAT: FloatValueChecker(),
_FieldDescriptor.CPPTYPE_BOOL: TypeCheckerWithDefault(
False, bool, numbers.Integral),
_FieldDescriptor.CPPTYPE_STRING: TypeCheckerWithDefault(b'', bytes),
}
# Map from field type to a function F, such that F(field_num, value)
# gives the total byte size for a value of the given type. This
# byte size includes tag information and any other additional space
# associated with serializing "value".
TYPE_TO_BYTE_SIZE_FN = {
_FieldDescriptor.TYPE_DOUBLE: wire_format.DoubleByteSize,
_FieldDescriptor.TYPE_FLOAT: wire_format.FloatByteSize,
_FieldDescriptor.TYPE_INT64: wire_format.Int64ByteSize,
_FieldDescriptor.TYPE_UINT64: wire_format.UInt64ByteSize,
_FieldDescriptor.TYPE_INT32: wire_format.Int32ByteSize,
_FieldDescriptor.TYPE_FIXED64: wire_format.Fixed64ByteSize,
_FieldDescriptor.TYPE_FIXED32: wire_format.Fixed32ByteSize,
_FieldDescriptor.TYPE_BOOL: wire_format.BoolByteSize,
_FieldDescriptor.TYPE_STRING: wire_format.StringByteSize,
_FieldDescriptor.TYPE_GROUP: wire_format.GroupByteSize,
_FieldDescriptor.TYPE_MESSAGE: wire_format.MessageByteSize,
_FieldDescriptor.TYPE_BYTES: wire_format.BytesByteSize,
_FieldDescriptor.TYPE_UINT32: wire_format.UInt32ByteSize,
_FieldDescriptor.TYPE_ENUM: wire_format.EnumByteSize,
_FieldDescriptor.TYPE_SFIXED32: wire_format.SFixed32ByteSize,
_FieldDescriptor.TYPE_SFIXED64: wire_format.SFixed64ByteSize,
_FieldDescriptor.TYPE_SINT32: wire_format.SInt32ByteSize,
_FieldDescriptor.TYPE_SINT64: wire_format.SInt64ByteSize
}
# Maps from field types to encoder constructors.
TYPE_TO_ENCODER = {
_FieldDescriptor.TYPE_DOUBLE: encoder.DoubleEncoder,
_FieldDescriptor.TYPE_FLOAT: encoder.FloatEncoder,
_FieldDescriptor.TYPE_INT64: encoder.Int64Encoder,
_FieldDescriptor.TYPE_UINT64: encoder.UInt64Encoder,
_FieldDescriptor.TYPE_INT32: encoder.Int32Encoder,
_FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Encoder,
_FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Encoder,
_FieldDescriptor.TYPE_BOOL: encoder.BoolEncoder,
_FieldDescriptor.TYPE_STRING: encoder.StringEncoder,
_FieldDescriptor.TYPE_GROUP: encoder.GroupEncoder,
_FieldDescriptor.TYPE_MESSAGE: encoder.MessageEncoder,
_FieldDescriptor.TYPE_BYTES: encoder.BytesEncoder,
_FieldDescriptor.TYPE_UINT32: encoder.UInt32Encoder,
_FieldDescriptor.TYPE_ENUM: encoder.EnumEncoder,
_FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Encoder,
_FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Encoder,
_FieldDescriptor.TYPE_SINT32: encoder.SInt32Encoder,
_FieldDescriptor.TYPE_SINT64: encoder.SInt64Encoder,
}
# Maps from field types to sizer constructors.
TYPE_TO_SIZER = {
_FieldDescriptor.TYPE_DOUBLE: encoder.DoubleSizer,
_FieldDescriptor.TYPE_FLOAT: encoder.FloatSizer,
_FieldDescriptor.TYPE_INT64: encoder.Int64Sizer,
_FieldDescriptor.TYPE_UINT64: encoder.UInt64Sizer,
_FieldDescriptor.TYPE_INT32: encoder.Int32Sizer,
_FieldDescriptor.TYPE_FIXED64: encoder.Fixed64Sizer,
_FieldDescriptor.TYPE_FIXED32: encoder.Fixed32Sizer,
_FieldDescriptor.TYPE_BOOL: encoder.BoolSizer,
_FieldDescriptor.TYPE_STRING: encoder.StringSizer,
_FieldDescriptor.TYPE_GROUP: encoder.GroupSizer,
_FieldDescriptor.TYPE_MESSAGE: encoder.MessageSizer,
_FieldDescriptor.TYPE_BYTES: encoder.BytesSizer,
_FieldDescriptor.TYPE_UINT32: encoder.UInt32Sizer,
_FieldDescriptor.TYPE_ENUM: encoder.EnumSizer,
_FieldDescriptor.TYPE_SFIXED32: encoder.SFixed32Sizer,
_FieldDescriptor.TYPE_SFIXED64: encoder.SFixed64Sizer,
_FieldDescriptor.TYPE_SINT32: encoder.SInt32Sizer,
_FieldDescriptor.TYPE_SINT64: encoder.SInt64Sizer,
}
# Maps from field type to a decoder constructor.
TYPE_TO_DECODER = {
_FieldDescriptor.TYPE_DOUBLE: decoder.DoubleDecoder,
_FieldDescriptor.TYPE_FLOAT: decoder.FloatDecoder,
_FieldDescriptor.TYPE_INT64: decoder.Int64Decoder,
_FieldDescriptor.TYPE_UINT64: decoder.UInt64Decoder,
_FieldDescriptor.TYPE_INT32: decoder.Int32Decoder,
_FieldDescriptor.TYPE_FIXED64: decoder.Fixed64Decoder,
_FieldDescriptor.TYPE_FIXED32: decoder.Fixed32Decoder,
_FieldDescriptor.TYPE_BOOL: decoder.BoolDecoder,
_FieldDescriptor.TYPE_STRING: decoder.StringDecoder,
_FieldDescriptor.TYPE_GROUP: decoder.GroupDecoder,
_FieldDescriptor.TYPE_MESSAGE: decoder.MessageDecoder,
_FieldDescriptor.TYPE_BYTES: decoder.BytesDecoder,
_FieldDescriptor.TYPE_UINT32: decoder.UInt32Decoder,
_FieldDescriptor.TYPE_ENUM: decoder.EnumDecoder,
_FieldDescriptor.TYPE_SFIXED32: decoder.SFixed32Decoder,
_FieldDescriptor.TYPE_SFIXED64: decoder.SFixed64Decoder,
_FieldDescriptor.TYPE_SINT32: decoder.SInt32Decoder,
_FieldDescriptor.TYPE_SINT64: decoder.SInt64Decoder,
}
# Maps from field type to expected wiretype.
FIELD_TYPE_TO_WIRE_TYPE = {
_FieldDescriptor.TYPE_DOUBLE: wire_format.WIRETYPE_FIXED64,
_FieldDescriptor.TYPE_FLOAT: wire_format.WIRETYPE_FIXED32,
_FieldDescriptor.TYPE_INT64: wire_format.WIRETYPE_VARINT,
_FieldDescriptor.TYPE_UINT64: wire_format.WIRETYPE_VARINT,
_FieldDescriptor.TYPE_INT32: wire_format.WIRETYPE_VARINT,
_FieldDescriptor.TYPE_FIXED64: wire_format.WIRETYPE_FIXED64,
_FieldDescriptor.TYPE_FIXED32: wire_format.WIRETYPE_FIXED32,
_FieldDescriptor.TYPE_BOOL: wire_format.WIRETYPE_VARINT,
_FieldDescriptor.TYPE_STRING:
wire_format.WIRETYPE_LENGTH_DELIMITED,
_FieldDescriptor.TYPE_GROUP: wire_format.WIRETYPE_START_GROUP,
_FieldDescriptor.TYPE_MESSAGE:
wire_format.WIRETYPE_LENGTH_DELIMITED,
_FieldDescriptor.TYPE_BYTES:
wire_format.WIRETYPE_LENGTH_DELIMITED,
_FieldDescriptor.TYPE_UINT32: wire_format.WIRETYPE_VARINT,
_FieldDescriptor.TYPE_ENUM: wire_format.WIRETYPE_VARINT,
_FieldDescriptor.TYPE_SFIXED32: wire_format.WIRETYPE_FIXED32,
_FieldDescriptor.TYPE_SFIXED64: wire_format.WIRETYPE_FIXED64,
_FieldDescriptor.TYPE_SINT32: wire_format.WIRETYPE_VARINT,
_FieldDescriptor.TYPE_SINT64: wire_format.WIRETYPE_VARINT,
}

@ -0,0 +1,454 @@
#! /usr/bin/env python
# -*- coding: utf-8 -*-
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for preservation of unknown fields in the pure Python implementation."""
__author__ = 'bohdank@google.com (Bohdan Koval)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import map_unittest_pb2
from google.protobuf import unittest_mset_pb2
from google.protobuf import unittest_pb2
from google.protobuf import unittest_proto3_arena_pb2
from google.protobuf.internal import api_implementation
from google.protobuf.internal import encoder
from google.protobuf.internal import message_set_extensions_pb2
from google.protobuf.internal import missing_enum_values_pb2
from google.protobuf.internal import test_util
from google.protobuf.internal import testing_refleaks
from google.protobuf.internal import type_checkers
from google.protobuf.internal import wire_format
from google.protobuf import descriptor
@testing_refleaks.TestCase
class UnknownFieldsTest(unittest.TestCase):
def setUp(self):
self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
self.all_fields = unittest_pb2.TestAllTypes()
test_util.SetAllFields(self.all_fields)
self.all_fields_data = self.all_fields.SerializeToString()
self.empty_message = unittest_pb2.TestEmptyMessage()
self.empty_message.ParseFromString(self.all_fields_data)
def testSerialize(self):
data = self.empty_message.SerializeToString()
# Don't use assertEqual because we don't want to dump raw binary data to
# stdout.
self.assertTrue(data == self.all_fields_data)
def testSerializeProto3(self):
# Verify proto3 unknown fields behavior.
message = unittest_proto3_arena_pb2.TestEmptyMessage()
message.ParseFromString(self.all_fields_data)
self.assertEqual(self.all_fields_data, message.SerializeToString())
def testByteSize(self):
self.assertEqual(self.all_fields.ByteSize(), self.empty_message.ByteSize())
def testListFields(self):
# Make sure ListFields doesn't return unknown fields.
self.assertEqual(0, len(self.empty_message.ListFields()))
def testSerializeMessageSetWireFormatUnknownExtension(self):
# Create a message using the message set wire format with an unknown
# message.
raw = unittest_mset_pb2.RawMessageSet()
# Add an unknown extension.
item = raw.item.add()
item.type_id = 98218603
message1 = message_set_extensions_pb2.TestMessageSetExtension1()
message1.i = 12345
item.message = message1.SerializeToString()
serialized = raw.SerializeToString()
# Parse message using the message set wire format.
proto = message_set_extensions_pb2.TestMessageSet()
proto.MergeFromString(serialized)
unknown_fields = proto.UnknownFields()
self.assertEqual(len(unknown_fields), 1)
# Unknown field should have wire format data which can be parsed back to
# original message.
self.assertEqual(unknown_fields[0].field_number, item.type_id)
self.assertEqual(unknown_fields[0].wire_type,
wire_format.WIRETYPE_LENGTH_DELIMITED)
d = unknown_fields[0].data
message_new = message_set_extensions_pb2.TestMessageSetExtension1()
message_new.ParseFromString(d)
self.assertEqual(message1, message_new)
# Verify that the unknown extension is serialized unchanged
reserialized = proto.SerializeToString()
new_raw = unittest_mset_pb2.RawMessageSet()
new_raw.MergeFromString(reserialized)
self.assertEqual(raw, new_raw)
def testEquals(self):
message = unittest_pb2.TestEmptyMessage()
message.ParseFromString(self.all_fields_data)
self.assertEqual(self.empty_message, message)
self.all_fields.ClearField('optional_string')
message.ParseFromString(self.all_fields.SerializeToString())
self.assertNotEqual(self.empty_message, message)
def testDiscardUnknownFields(self):
self.empty_message.DiscardUnknownFields()
self.assertEqual(b'', self.empty_message.SerializeToString())
# Test message field and repeated message field.
message = unittest_pb2.TestAllTypes()
other_message = unittest_pb2.TestAllTypes()
other_message.optional_string = 'discard'
message.optional_nested_message.ParseFromString(
other_message.SerializeToString())
message.repeated_nested_message.add().ParseFromString(
other_message.SerializeToString())
self.assertNotEqual(
b'', message.optional_nested_message.SerializeToString())
self.assertNotEqual(
b'', message.repeated_nested_message[0].SerializeToString())
message.DiscardUnknownFields()
self.assertEqual(b'', message.optional_nested_message.SerializeToString())
self.assertEqual(
b'', message.repeated_nested_message[0].SerializeToString())
msg = map_unittest_pb2.TestMap()
msg.map_int32_all_types[1].optional_nested_message.ParseFromString(
other_message.SerializeToString())
msg.map_string_string['1'] = 'test'
self.assertNotEqual(
b'',
msg.map_int32_all_types[1].optional_nested_message.SerializeToString())
msg.DiscardUnknownFields()
self.assertEqual(
b'',
msg.map_int32_all_types[1].optional_nested_message.SerializeToString())
@testing_refleaks.TestCase
class UnknownFieldsAccessorsTest(unittest.TestCase):
def setUp(self):
self.descriptor = unittest_pb2.TestAllTypes.DESCRIPTOR
self.all_fields = unittest_pb2.TestAllTypes()
test_util.SetAllFields(self.all_fields)
self.all_fields_data = self.all_fields.SerializeToString()
self.empty_message = unittest_pb2.TestEmptyMessage()
self.empty_message.ParseFromString(self.all_fields_data)
# InternalCheckUnknownField() is an additional Pure Python check which checks
# a detail of unknown fields. It cannot be used by the C++
# implementation because some protect members are called.
# The test is added for historical reasons. It is not necessary as
# serialized string is checked.
# TODO(jieluo): Remove message._unknown_fields.
def InternalCheckUnknownField(self, name, expected_value):
if api_implementation.Type() == 'cpp':
return
field_descriptor = self.descriptor.fields_by_name[name]
wire_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[field_descriptor.type]
field_tag = encoder.TagBytes(field_descriptor.number, wire_type)
result_dict = {}
for tag_bytes, value in self.empty_message._unknown_fields:
if tag_bytes == field_tag:
decoder = unittest_pb2.TestAllTypes._decoders_by_tag[tag_bytes][0]
decoder(memoryview(value), 0, len(value), self.all_fields, result_dict)
self.assertEqual(expected_value, result_dict[field_descriptor])
def CheckUnknownField(self, name, unknown_fields, expected_value):
field_descriptor = self.descriptor.fields_by_name[name]
expected_type = type_checkers.FIELD_TYPE_TO_WIRE_TYPE[
field_descriptor.type]
for unknown_field in unknown_fields:
if unknown_field.field_number == field_descriptor.number:
self.assertEqual(expected_type, unknown_field.wire_type)
if expected_type == 3:
# Check group
self.assertEqual(expected_value[0],
unknown_field.data[0].field_number)
self.assertEqual(expected_value[1], unknown_field.data[0].wire_type)
self.assertEqual(expected_value[2], unknown_field.data[0].data)
continue
if expected_type == wire_format.WIRETYPE_LENGTH_DELIMITED:
self.assertIn(type(unknown_field.data), (str, bytes))
if field_descriptor.label == descriptor.FieldDescriptor.LABEL_REPEATED:
self.assertIn(unknown_field.data, expected_value)
else:
self.assertEqual(expected_value, unknown_field.data)
def testCheckUnknownFieldValue(self):
unknown_fields = self.empty_message.UnknownFields()
# Test enum.
self.CheckUnknownField('optional_nested_enum',
unknown_fields,
self.all_fields.optional_nested_enum)
self.InternalCheckUnknownField('optional_nested_enum',
self.all_fields.optional_nested_enum)
# Test repeated enum.
self.CheckUnknownField('repeated_nested_enum',
unknown_fields,
self.all_fields.repeated_nested_enum)
self.InternalCheckUnknownField('repeated_nested_enum',
self.all_fields.repeated_nested_enum)
# Test varint.
self.CheckUnknownField('optional_int32',
unknown_fields,
self.all_fields.optional_int32)
self.InternalCheckUnknownField('optional_int32',
self.all_fields.optional_int32)
# Test fixed32.
self.CheckUnknownField('optional_fixed32',
unknown_fields,
self.all_fields.optional_fixed32)
self.InternalCheckUnknownField('optional_fixed32',
self.all_fields.optional_fixed32)
# Test fixed64.
self.CheckUnknownField('optional_fixed64',
unknown_fields,
self.all_fields.optional_fixed64)
self.InternalCheckUnknownField('optional_fixed64',
self.all_fields.optional_fixed64)
# Test length delimited.
self.CheckUnknownField('optional_string',
unknown_fields,
self.all_fields.optional_string.encode('utf-8'))
self.InternalCheckUnknownField('optional_string',
self.all_fields.optional_string)
# Test group.
self.CheckUnknownField('optionalgroup',
unknown_fields,
(17, 0, 117))
self.InternalCheckUnknownField('optionalgroup',
self.all_fields.optionalgroup)
self.assertEqual(97, len(unknown_fields))
def testCopyFrom(self):
message = unittest_pb2.TestEmptyMessage()
message.CopyFrom(self.empty_message)
self.assertEqual(message.SerializeToString(), self.all_fields_data)
def testMergeFrom(self):
message = unittest_pb2.TestAllTypes()
message.optional_int32 = 1
message.optional_uint32 = 2
source = unittest_pb2.TestEmptyMessage()
source.ParseFromString(message.SerializeToString())
message.ClearField('optional_int32')
message.optional_int64 = 3
message.optional_uint32 = 4
destination = unittest_pb2.TestEmptyMessage()
unknown_fields = destination.UnknownFields()
self.assertEqual(0, len(unknown_fields))
destination.ParseFromString(message.SerializeToString())
# ParseFromString clears the message thus unknown fields is invalid.
with self.assertRaises(ValueError) as context:
len(unknown_fields)
self.assertIn('UnknownFields does not exist.',
str(context.exception))
unknown_fields = destination.UnknownFields()
self.assertEqual(2, len(unknown_fields))
destination.MergeFrom(source)
self.assertEqual(4, len(unknown_fields))
# Check that the fields where correctly merged, even stored in the unknown
# fields set.
message.ParseFromString(destination.SerializeToString())
self.assertEqual(message.optional_int32, 1)
self.assertEqual(message.optional_uint32, 2)
self.assertEqual(message.optional_int64, 3)
def testClear(self):
unknown_fields = self.empty_message.UnknownFields()
self.empty_message.Clear()
# All cleared, even unknown fields.
self.assertEqual(self.empty_message.SerializeToString(), b'')
with self.assertRaises(ValueError) as context:
len(unknown_fields)
self.assertIn('UnknownFields does not exist.',
str(context.exception))
def testSubUnknownFields(self):
message = unittest_pb2.TestAllTypes()
message.optionalgroup.a = 123
destination = unittest_pb2.TestEmptyMessage()
destination.ParseFromString(message.SerializeToString())
sub_unknown_fields = destination.UnknownFields()[0].data
self.assertEqual(1, len(sub_unknown_fields))
self.assertEqual(sub_unknown_fields[0].data, 123)
destination.Clear()
with self.assertRaises(ValueError) as context:
len(sub_unknown_fields)
self.assertIn('UnknownFields does not exist.',
str(context.exception))
with self.assertRaises(ValueError) as context:
# pylint: disable=pointless-statement
sub_unknown_fields[0]
self.assertIn('UnknownFields does not exist.',
str(context.exception))
message.Clear()
message.optional_uint32 = 456
nested_message = unittest_pb2.NestedTestAllTypes()
nested_message.payload.optional_nested_message.ParseFromString(
message.SerializeToString())
unknown_fields = (
nested_message.payload.optional_nested_message.UnknownFields())
self.assertEqual(unknown_fields[0].data, 456)
nested_message.ClearField('payload')
self.assertEqual(unknown_fields[0].data, 456)
unknown_fields = (
nested_message.payload.optional_nested_message.UnknownFields())
self.assertEqual(0, len(unknown_fields))
def testUnknownField(self):
message = unittest_pb2.TestAllTypes()
message.optional_int32 = 123
destination = unittest_pb2.TestEmptyMessage()
destination.ParseFromString(message.SerializeToString())
unknown_field = destination.UnknownFields()[0]
destination.Clear()
with self.assertRaises(ValueError) as context:
unknown_field.data # pylint: disable=pointless-statement
self.assertIn('The parent message might be cleared.',
str(context.exception))
def testUnknownExtensions(self):
message = unittest_pb2.TestEmptyMessageWithExtensions()
message.ParseFromString(self.all_fields_data)
self.assertEqual(len(message.UnknownFields()), 97)
self.assertEqual(message.SerializeToString(), self.all_fields_data)
@testing_refleaks.TestCase
class UnknownEnumValuesTest(unittest.TestCase):
def setUp(self):
self.descriptor = missing_enum_values_pb2.TestEnumValues.DESCRIPTOR
self.message = missing_enum_values_pb2.TestEnumValues()
# TestEnumValues.ZERO = 0, but does not exist in the other NestedEnum.
self.message.optional_nested_enum = (
missing_enum_values_pb2.TestEnumValues.ZERO)
self.message.repeated_nested_enum.extend([
missing_enum_values_pb2.TestEnumValues.ZERO,
missing_enum_values_pb2.TestEnumValues.ONE,
])
self.message.packed_nested_enum.extend([
missing_enum_values_pb2.TestEnumValues.ZERO,
missing_enum_values_pb2.TestEnumValues.ONE,
])
self.message_data = self.message.SerializeToString()
self.missing_message = missing_enum_values_pb2.TestMissingEnumValues()
self.missing_message.ParseFromString(self.message_data)
# CheckUnknownField() is an additional Pure Python check which checks
# a detail of unknown fields. It cannot be used by the C++
# implementation because some protect members are called.
# The test is added for historical reasons. It is not necessary as
# serialized string is checked.
def CheckUnknownField(self, name, expected_value):
field_descriptor = self.descriptor.fields_by_name[name]
unknown_fields = self.missing_message.UnknownFields()
count = 0
for field in unknown_fields:
if field.field_number == field_descriptor.number:
count += 1
if field_descriptor.label == descriptor.FieldDescriptor.LABEL_REPEATED:
self.assertIn(field.data, expected_value)
else:
self.assertEqual(expected_value, field.data)
if field_descriptor.label == descriptor.FieldDescriptor.LABEL_REPEATED:
self.assertEqual(count, len(expected_value))
else:
self.assertEqual(count, 1)
def testUnknownParseMismatchEnumValue(self):
just_string = missing_enum_values_pb2.JustString()
just_string.dummy = 'blah'
missing = missing_enum_values_pb2.TestEnumValues()
# The parse is invalid, storing the string proto into the set of
# unknown fields.
missing.ParseFromString(just_string.SerializeToString())
# Fetching the enum field shouldn't crash, instead returning the
# default value.
self.assertEqual(missing.optional_nested_enum, 0)
def testUnknownEnumValue(self):
self.assertFalse(self.missing_message.HasField('optional_nested_enum'))
self.assertEqual(self.missing_message.optional_nested_enum, 2)
# Clear does not do anything.
serialized = self.missing_message.SerializeToString()
self.missing_message.ClearField('optional_nested_enum')
self.assertEqual(self.missing_message.SerializeToString(), serialized)
def testUnknownRepeatedEnumValue(self):
self.assertEqual([], self.missing_message.repeated_nested_enum)
def testUnknownPackedEnumValue(self):
self.assertEqual([], self.missing_message.packed_nested_enum)
def testCheckUnknownFieldValueForEnum(self):
unknown_fields = self.missing_message.UnknownFields()
self.assertEqual(len(unknown_fields), 5)
self.CheckUnknownField('optional_nested_enum',
self.message.optional_nested_enum)
self.CheckUnknownField('repeated_nested_enum',
self.message.repeated_nested_enum)
self.CheckUnknownField('packed_nested_enum',
self.message.packed_nested_enum)
def testRoundTrip(self):
new_message = missing_enum_values_pb2.TestEnumValues()
new_message.ParseFromString(self.missing_message.SerializeToString())
self.assertEqual(self.message, new_message)
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,859 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains well known classes.
This files defines well known classes which need extra maintenance including:
- Any
- Duration
- FieldMask
- Struct
- Timestamp
"""
__author__ = 'jieluo@google.com (Jie Luo)'
import calendar
from datetime import datetime
from datetime import timedelta
import six
try:
# Since python 3
import collections.abc as collections_abc
except ImportError:
# Won't work after python 3.8
import collections as collections_abc
from google.protobuf.descriptor import FieldDescriptor
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
_NANOS_PER_SECOND = 1000000000
_NANOS_PER_MILLISECOND = 1000000
_NANOS_PER_MICROSECOND = 1000
_MILLIS_PER_SECOND = 1000
_MICROS_PER_SECOND = 1000000
_SECONDS_PER_DAY = 24 * 3600
_DURATION_SECONDS_MAX = 315576000000
class Any(object):
"""Class for Any Message type."""
__slots__ = ()
def Pack(self, msg, type_url_prefix='type.googleapis.com/',
deterministic=None):
"""Packs the specified message into current Any message."""
if len(type_url_prefix) < 1 or type_url_prefix[-1] != '/':
self.type_url = '%s/%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
else:
self.type_url = '%s%s' % (type_url_prefix, msg.DESCRIPTOR.full_name)
self.value = msg.SerializeToString(deterministic=deterministic)
def Unpack(self, msg):
"""Unpacks the current Any message into specified message."""
descriptor = msg.DESCRIPTOR
if not self.Is(descriptor):
return False
msg.ParseFromString(self.value)
return True
def TypeName(self):
"""Returns the protobuf type name of the inner message."""
# Only last part is to be used: b/25630112
return self.type_url.split('/')[-1]
def Is(self, descriptor):
"""Checks if this Any represents the given protobuf type."""
return '/' in self.type_url and self.TypeName() == descriptor.full_name
_EPOCH_DATETIME = datetime.utcfromtimestamp(0)
class Timestamp(object):
"""Class for Timestamp message type."""
__slots__ = ()
def ToJsonString(self):
"""Converts Timestamp to RFC 3339 date string format.
Returns:
A string converted from timestamp. The string is always Z-normalized
and uses 3, 6 or 9 fractional digits as required to represent the
exact time. Example of the return format: '1972-01-01T10:00:20.021Z'
"""
nanos = self.nanos % _NANOS_PER_SECOND
total_sec = self.seconds + (self.nanos - nanos) // _NANOS_PER_SECOND
seconds = total_sec % _SECONDS_PER_DAY
days = (total_sec - seconds) // _SECONDS_PER_DAY
dt = datetime(1970, 1, 1) + timedelta(days, seconds)
result = dt.isoformat()
if (nanos % 1e9) == 0:
# If there are 0 fractional digits, the fractional
# point '.' should be omitted when serializing.
return result + 'Z'
if (nanos % 1e6) == 0:
# Serialize 3 fractional digits.
return result + '.%03dZ' % (nanos / 1e6)
if (nanos % 1e3) == 0:
# Serialize 6 fractional digits.
return result + '.%06dZ' % (nanos / 1e3)
# Serialize 9 fractional digits.
return result + '.%09dZ' % nanos
def FromJsonString(self, value):
"""Parse a RFC 3339 date string format to Timestamp.
Args:
value: A date string. Any fractional digits (or none) and any offset are
accepted as long as they fit into nano-seconds precision.
Example of accepted format: '1972-01-01T10:00:20.021-05:00'
Raises:
ValueError: On parsing problems.
"""
timezone_offset = value.find('Z')
if timezone_offset == -1:
timezone_offset = value.find('+')
if timezone_offset == -1:
timezone_offset = value.rfind('-')
if timezone_offset == -1:
raise ValueError(
'Failed to parse timestamp: missing valid timezone offset.')
time_value = value[0:timezone_offset]
# Parse datetime and nanos.
point_position = time_value.find('.')
if point_position == -1:
second_value = time_value
nano_value = ''
else:
second_value = time_value[:point_position]
nano_value = time_value[point_position + 1:]
date_object = datetime.strptime(second_value, _TIMESTAMPFOMAT)
td = date_object - datetime(1970, 1, 1)
seconds = td.seconds + td.days * _SECONDS_PER_DAY
if len(nano_value) > 9:
raise ValueError(
'Failed to parse Timestamp: nanos {0} more than '
'9 fractional digits.'.format(nano_value))
if nano_value:
nanos = round(float('0.' + nano_value) * 1e9)
else:
nanos = 0
# Parse timezone offsets.
if value[timezone_offset] == 'Z':
if len(value) != timezone_offset + 1:
raise ValueError('Failed to parse timestamp: invalid trailing'
' data {0}.'.format(value))
else:
timezone = value[timezone_offset:]
pos = timezone.find(':')
if pos == -1:
raise ValueError(
'Invalid timezone offset value: {0}.'.format(timezone))
if timezone[0] == '+':
seconds -= (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
else:
seconds += (int(timezone[1:pos])*60+int(timezone[pos+1:]))*60
# Set seconds and nanos
self.seconds = int(seconds)
self.nanos = int(nanos)
def GetCurrentTime(self):
"""Get the current UTC into Timestamp."""
self.FromDatetime(datetime.utcnow())
def ToNanoseconds(self):
"""Converts Timestamp to nanoseconds since epoch."""
return self.seconds * _NANOS_PER_SECOND + self.nanos
def ToMicroseconds(self):
"""Converts Timestamp to microseconds since epoch."""
return (self.seconds * _MICROS_PER_SECOND +
self.nanos // _NANOS_PER_MICROSECOND)
def ToMilliseconds(self):
"""Converts Timestamp to milliseconds since epoch."""
return (self.seconds * _MILLIS_PER_SECOND +
self.nanos // _NANOS_PER_MILLISECOND)
def ToSeconds(self):
"""Converts Timestamp to seconds since epoch."""
return self.seconds
def FromNanoseconds(self, nanos):
"""Converts nanoseconds since epoch to Timestamp."""
self.seconds = nanos // _NANOS_PER_SECOND
self.nanos = nanos % _NANOS_PER_SECOND
def FromMicroseconds(self, micros):
"""Converts microseconds since epoch to Timestamp."""
self.seconds = micros // _MICROS_PER_SECOND
self.nanos = (micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND
def FromMilliseconds(self, millis):
"""Converts milliseconds since epoch to Timestamp."""
self.seconds = millis // _MILLIS_PER_SECOND
self.nanos = (millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND
def FromSeconds(self, seconds):
"""Converts seconds since epoch to Timestamp."""
self.seconds = seconds
self.nanos = 0
def ToDatetime(self):
"""Converts Timestamp to datetime."""
return _EPOCH_DATETIME + timedelta(
seconds=self.seconds, microseconds=_RoundTowardZero(
self.nanos, _NANOS_PER_MICROSECOND))
def FromDatetime(self, dt):
"""Converts datetime to Timestamp."""
# Using this guide: http://wiki.python.org/moin/WorkingWithTime
# And this conversion guide: http://docs.python.org/library/time.html
# Turn the date parameter into a tuple (struct_time) that can then be
# manipulated into a long value of seconds. During the conversion from
# struct_time to long, the source date in UTC, and so it follows that the
# correct transformation is calendar.timegm()
self.seconds = calendar.timegm(dt.utctimetuple())
self.nanos = dt.microsecond * _NANOS_PER_MICROSECOND
class Duration(object):
"""Class for Duration message type."""
__slots__ = ()
def ToJsonString(self):
"""Converts Duration to string format.
Returns:
A string converted from self. The string format will contains
3, 6, or 9 fractional digits depending on the precision required to
represent the exact Duration value. For example: "1s", "1.010s",
"1.000000100s", "-3.100s"
"""
_CheckDurationValid(self.seconds, self.nanos)
if self.seconds < 0 or self.nanos < 0:
result = '-'
seconds = - self.seconds + int((0 - self.nanos) // 1e9)
nanos = (0 - self.nanos) % 1e9
else:
result = ''
seconds = self.seconds + int(self.nanos // 1e9)
nanos = self.nanos % 1e9
result += '%d' % seconds
if (nanos % 1e9) == 0:
# If there are 0 fractional digits, the fractional
# point '.' should be omitted when serializing.
return result + 's'
if (nanos % 1e6) == 0:
# Serialize 3 fractional digits.
return result + '.%03ds' % (nanos / 1e6)
if (nanos % 1e3) == 0:
# Serialize 6 fractional digits.
return result + '.%06ds' % (nanos / 1e3)
# Serialize 9 fractional digits.
return result + '.%09ds' % nanos
def FromJsonString(self, value):
"""Converts a string to Duration.
Args:
value: A string to be converted. The string must end with 's'. Any
fractional digits (or none) are accepted as long as they fit into
precision. For example: "1s", "1.01s", "1.0000001s", "-3.100s
Raises:
ValueError: On parsing problems.
"""
if len(value) < 1 or value[-1] != 's':
raise ValueError(
'Duration must end with letter "s": {0}.'.format(value))
try:
pos = value.find('.')
if pos == -1:
seconds = int(value[:-1])
nanos = 0
else:
seconds = int(value[:pos])
if value[0] == '-':
nanos = int(round(float('-0{0}'.format(value[pos: -1])) *1e9))
else:
nanos = int(round(float('0{0}'.format(value[pos: -1])) *1e9))
_CheckDurationValid(seconds, nanos)
self.seconds = seconds
self.nanos = nanos
except ValueError as e:
raise ValueError(
'Couldn\'t parse duration: {0} : {1}.'.format(value, e))
def ToNanoseconds(self):
"""Converts a Duration to nanoseconds."""
return self.seconds * _NANOS_PER_SECOND + self.nanos
def ToMicroseconds(self):
"""Converts a Duration to microseconds."""
micros = _RoundTowardZero(self.nanos, _NANOS_PER_MICROSECOND)
return self.seconds * _MICROS_PER_SECOND + micros
def ToMilliseconds(self):
"""Converts a Duration to milliseconds."""
millis = _RoundTowardZero(self.nanos, _NANOS_PER_MILLISECOND)
return self.seconds * _MILLIS_PER_SECOND + millis
def ToSeconds(self):
"""Converts a Duration to seconds."""
return self.seconds
def FromNanoseconds(self, nanos):
"""Converts nanoseconds to Duration."""
self._NormalizeDuration(nanos // _NANOS_PER_SECOND,
nanos % _NANOS_PER_SECOND)
def FromMicroseconds(self, micros):
"""Converts microseconds to Duration."""
self._NormalizeDuration(
micros // _MICROS_PER_SECOND,
(micros % _MICROS_PER_SECOND) * _NANOS_PER_MICROSECOND)
def FromMilliseconds(self, millis):
"""Converts milliseconds to Duration."""
self._NormalizeDuration(
millis // _MILLIS_PER_SECOND,
(millis % _MILLIS_PER_SECOND) * _NANOS_PER_MILLISECOND)
def FromSeconds(self, seconds):
"""Converts seconds to Duration."""
self.seconds = seconds
self.nanos = 0
def ToTimedelta(self):
"""Converts Duration to timedelta."""
return timedelta(
seconds=self.seconds, microseconds=_RoundTowardZero(
self.nanos, _NANOS_PER_MICROSECOND))
def FromTimedelta(self, td):
"""Converts timedelta to Duration."""
self._NormalizeDuration(td.seconds + td.days * _SECONDS_PER_DAY,
td.microseconds * _NANOS_PER_MICROSECOND)
def _NormalizeDuration(self, seconds, nanos):
"""Set Duration by seconds and nanos."""
# Force nanos to be negative if the duration is negative.
if seconds < 0 and nanos > 0:
seconds += 1
nanos -= _NANOS_PER_SECOND
self.seconds = seconds
self.nanos = nanos
def _CheckDurationValid(seconds, nanos):
if seconds < -_DURATION_SECONDS_MAX or seconds > _DURATION_SECONDS_MAX:
raise ValueError(
'Duration is not valid: Seconds {0} must be in range '
'[-315576000000, 315576000000].'.format(seconds))
if nanos <= -_NANOS_PER_SECOND or nanos >= _NANOS_PER_SECOND:
raise ValueError(
'Duration is not valid: Nanos {0} must be in range '
'[-999999999, 999999999].'.format(nanos))
if (nanos < 0 and seconds > 0) or (nanos > 0 and seconds < 0):
raise ValueError(
'Duration is not valid: Sign mismatch.')
def _RoundTowardZero(value, divider):
"""Truncates the remainder part after division."""
# For some languanges, the sign of the remainder is implementation
# dependent if any of the operands is negative. Here we enforce
# "rounded toward zero" semantics. For example, for (-5) / 2 an
# implementation may give -3 as the result with the remainder being
# 1. This function ensures we always return -2 (closer to zero).
result = value // divider
remainder = value % divider
if result < 0 and remainder > 0:
return result + 1
else:
return result
class FieldMask(object):
"""Class for FieldMask message type."""
__slots__ = ()
def ToJsonString(self):
"""Converts FieldMask to string according to proto3 JSON spec."""
camelcase_paths = []
for path in self.paths:
camelcase_paths.append(_SnakeCaseToCamelCase(path))
return ','.join(camelcase_paths)
def FromJsonString(self, value):
"""Converts string to FieldMask according to proto3 JSON spec."""
self.Clear()
if value:
for path in value.split(','):
self.paths.append(_CamelCaseToSnakeCase(path))
def IsValidForDescriptor(self, message_descriptor):
"""Checks whether the FieldMask is valid for Message Descriptor."""
for path in self.paths:
if not _IsValidPath(message_descriptor, path):
return False
return True
def AllFieldsFromDescriptor(self, message_descriptor):
"""Gets all direct fields of Message Descriptor to FieldMask."""
self.Clear()
for field in message_descriptor.fields:
self.paths.append(field.name)
def CanonicalFormFromMask(self, mask):
"""Converts a FieldMask to the canonical form.
Removes paths that are covered by another path. For example,
"foo.bar" is covered by "foo" and will be removed if "foo"
is also in the FieldMask. Then sorts all paths in alphabetical order.
Args:
mask: The original FieldMask to be converted.
"""
tree = _FieldMaskTree(mask)
tree.ToFieldMask(self)
def Union(self, mask1, mask2):
"""Merges mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
tree.MergeFromFieldMask(mask2)
tree.ToFieldMask(self)
def Intersect(self, mask1, mask2):
"""Intersects mask1 and mask2 into this FieldMask."""
_CheckFieldMaskMessage(mask1)
_CheckFieldMaskMessage(mask2)
tree = _FieldMaskTree(mask1)
intersection = _FieldMaskTree()
for path in mask2.paths:
tree.IntersectPath(path, intersection)
intersection.ToFieldMask(self)
def MergeMessage(
self, source, destination,
replace_message_field=False, replace_repeated_field=False):
"""Merges fields specified in FieldMask from source to destination.
Args:
source: Source message.
destination: The destination message to be merged into.
replace_message_field: Replace message field if True. Merge message
field if False.
replace_repeated_field: Replace repeated field if True. Append
elements of repeated field if False.
"""
tree = _FieldMaskTree(self)
tree.MergeMessage(
source, destination, replace_message_field, replace_repeated_field)
def _IsValidPath(message_descriptor, path):
"""Checks whether the path is valid for Message Descriptor."""
parts = path.split('.')
last = parts.pop()
for name in parts:
field = message_descriptor.fields_by_name.get(name)
if (field is None or
field.label == FieldDescriptor.LABEL_REPEATED or
field.type != FieldDescriptor.TYPE_MESSAGE):
return False
message_descriptor = field.message_type
return last in message_descriptor.fields_by_name
def _CheckFieldMaskMessage(message):
"""Raises ValueError if message is not a FieldMask."""
message_descriptor = message.DESCRIPTOR
if (message_descriptor.name != 'FieldMask' or
message_descriptor.file.name != 'google/protobuf/field_mask.proto'):
raise ValueError('Message {0} is not a FieldMask.'.format(
message_descriptor.full_name))
def _SnakeCaseToCamelCase(path_name):
"""Converts a path name from snake_case to camelCase."""
result = []
after_underscore = False
for c in path_name:
if c.isupper():
raise ValueError(
'Fail to print FieldMask to Json string: Path name '
'{0} must not contain uppercase letters.'.format(path_name))
if after_underscore:
if c.islower():
result.append(c.upper())
after_underscore = False
else:
raise ValueError(
'Fail to print FieldMask to Json string: The '
'character after a "_" must be a lowercase letter '
'in path name {0}.'.format(path_name))
elif c == '_':
after_underscore = True
else:
result += c
if after_underscore:
raise ValueError('Fail to print FieldMask to Json string: Trailing "_" '
'in path name {0}.'.format(path_name))
return ''.join(result)
def _CamelCaseToSnakeCase(path_name):
"""Converts a field name from camelCase to snake_case."""
result = []
for c in path_name:
if c == '_':
raise ValueError('Fail to parse FieldMask: Path name '
'{0} must not contain "_"s.'.format(path_name))
if c.isupper():
result += '_'
result += c.lower()
else:
result += c
return ''.join(result)
class _FieldMaskTree(object):
"""Represents a FieldMask in a tree structure.
For example, given a FieldMask "foo.bar,foo.baz,bar.baz",
the FieldMaskTree will be:
[_root] -+- foo -+- bar
| |
| +- baz
|
+- bar --- baz
In the tree, each leaf node represents a field path.
"""
__slots__ = ('_root',)
def __init__(self, field_mask=None):
"""Initializes the tree by FieldMask."""
self._root = {}
if field_mask:
self.MergeFromFieldMask(field_mask)
def MergeFromFieldMask(self, field_mask):
"""Merges a FieldMask to the tree."""
for path in field_mask.paths:
self.AddPath(path)
def AddPath(self, path):
"""Adds a field path into the tree.
If the field path to add is a sub-path of an existing field path
in the tree (i.e., a leaf node), it means the tree already matches
the given path so nothing will be added to the tree. If the path
matches an existing non-leaf node in the tree, that non-leaf node
will be turned into a leaf node with all its children removed because
the path matches all the node's children. Otherwise, a new path will
be added.
Args:
path: The field path to add.
"""
node = self._root
for name in path.split('.'):
if name not in node:
node[name] = {}
elif not node[name]:
# Pre-existing empty node implies we already have this entire tree.
return
node = node[name]
# Remove any sub-trees we might have had.
node.clear()
def ToFieldMask(self, field_mask):
"""Converts the tree to a FieldMask."""
field_mask.Clear()
_AddFieldPaths(self._root, '', field_mask)
def IntersectPath(self, path, intersection):
"""Calculates the intersection part of a field path with this tree.
Args:
path: The field path to calculates.
intersection: The out tree to record the intersection part.
"""
node = self._root
for name in path.split('.'):
if name not in node:
return
elif not node[name]:
intersection.AddPath(path)
return
node = node[name]
intersection.AddLeafNodes(path, node)
def AddLeafNodes(self, prefix, node):
"""Adds leaf nodes begin with prefix to this tree."""
if not node:
self.AddPath(prefix)
for name in node:
child_path = prefix + '.' + name
self.AddLeafNodes(child_path, node[name])
def MergeMessage(
self, source, destination,
replace_message, replace_repeated):
"""Merge all fields specified by this tree from source to destination."""
_MergeMessage(
self._root, source, destination, replace_message, replace_repeated)
def _StrConvert(value):
"""Converts value to str if it is not."""
# This file is imported by c extension and some methods like ClearField
# requires string for the field name. py2/py3 has different text
# type and may use unicode.
if not isinstance(value, str):
return value.encode('utf-8')
return value
def _MergeMessage(
node, source, destination, replace_message, replace_repeated):
"""Merge all fields specified by a sub-tree from source to destination."""
source_descriptor = source.DESCRIPTOR
for name in node:
child = node[name]
field = source_descriptor.fields_by_name[name]
if field is None:
raise ValueError('Error: Can\'t find field {0} in message {1}.'.format(
name, source_descriptor.full_name))
if child:
# Sub-paths are only allowed for singular message fields.
if (field.label == FieldDescriptor.LABEL_REPEATED or
field.cpp_type != FieldDescriptor.CPPTYPE_MESSAGE):
raise ValueError('Error: Field {0} in message {1} is not a singular '
'message field and cannot have sub-fields.'.format(
name, source_descriptor.full_name))
if source.HasField(name):
_MergeMessage(
child, getattr(source, name), getattr(destination, name),
replace_message, replace_repeated)
continue
if field.label == FieldDescriptor.LABEL_REPEATED:
if replace_repeated:
destination.ClearField(_StrConvert(name))
repeated_source = getattr(source, name)
repeated_destination = getattr(destination, name)
repeated_destination.MergeFrom(repeated_source)
else:
if field.cpp_type == FieldDescriptor.CPPTYPE_MESSAGE:
if replace_message:
destination.ClearField(_StrConvert(name))
if source.HasField(name):
getattr(destination, name).MergeFrom(getattr(source, name))
else:
setattr(destination, name, getattr(source, name))
def _AddFieldPaths(node, prefix, field_mask):
"""Adds the field paths descended from node to field_mask."""
if not node and prefix:
field_mask.paths.append(prefix)
return
for name in sorted(node):
if prefix:
child_path = prefix + '.' + name
else:
child_path = name
_AddFieldPaths(node[name], child_path, field_mask)
_INT_OR_FLOAT = six.integer_types + (float,)
def _SetStructValue(struct_value, value):
if value is None:
struct_value.null_value = 0
elif isinstance(value, bool):
# Note: this check must come before the number check because in Python
# True and False are also considered numbers.
struct_value.bool_value = value
elif isinstance(value, six.string_types):
struct_value.string_value = value
elif isinstance(value, _INT_OR_FLOAT):
struct_value.number_value = value
elif isinstance(value, (dict, Struct)):
struct_value.struct_value.Clear()
struct_value.struct_value.update(value)
elif isinstance(value, (list, ListValue)):
struct_value.list_value.Clear()
struct_value.list_value.extend(value)
else:
raise ValueError('Unexpected type')
def _GetStructValue(struct_value):
which = struct_value.WhichOneof('kind')
if which == 'struct_value':
return struct_value.struct_value
elif which == 'null_value':
return None
elif which == 'number_value':
return struct_value.number_value
elif which == 'string_value':
return struct_value.string_value
elif which == 'bool_value':
return struct_value.bool_value
elif which == 'list_value':
return struct_value.list_value
elif which is None:
raise ValueError('Value not set')
class Struct(object):
"""Class for Struct message type."""
__slots__ = ()
def __getitem__(self, key):
return _GetStructValue(self.fields[key])
def __contains__(self, item):
return item in self.fields
def __setitem__(self, key, value):
_SetStructValue(self.fields[key], value)
def __delitem__(self, key):
del self.fields[key]
def __len__(self):
return len(self.fields)
def __iter__(self):
return iter(self.fields)
def keys(self): # pylint: disable=invalid-name
return self.fields.keys()
def values(self): # pylint: disable=invalid-name
return [self[key] for key in self]
def items(self): # pylint: disable=invalid-name
return [(key, self[key]) for key in self]
def get_or_create_list(self, key):
"""Returns a list for this key, creating if it didn't exist already."""
if not self.fields[key].HasField('list_value'):
# Clear will mark list_value modified which will indeed create a list.
self.fields[key].list_value.Clear()
return self.fields[key].list_value
def get_or_create_struct(self, key):
"""Returns a struct for this key, creating if it didn't exist already."""
if not self.fields[key].HasField('struct_value'):
# Clear will mark struct_value modified which will indeed create a struct.
self.fields[key].struct_value.Clear()
return self.fields[key].struct_value
def update(self, dictionary): # pylint: disable=invalid-name
for key, value in dictionary.items():
_SetStructValue(self.fields[key], value)
collections_abc.MutableMapping.register(Struct)
class ListValue(object):
"""Class for ListValue message type."""
__slots__ = ()
def __len__(self):
return len(self.values)
def append(self, value):
_SetStructValue(self.values.add(), value)
def extend(self, elem_seq):
for value in elem_seq:
self.append(value)
def __getitem__(self, index):
"""Retrieves item by the specified index."""
return _GetStructValue(self.values.__getitem__(index))
def __setitem__(self, index, value):
_SetStructValue(self.values.__getitem__(index), value)
def __delitem__(self, key):
del self.values[key]
def items(self):
for i in range(len(self)):
yield self[i]
def add_struct(self):
"""Appends and returns a struct value as the next value in the list."""
struct_value = self.values.add().struct_value
# Clear will mark struct_value modified which will indeed create a struct.
struct_value.Clear()
return struct_value
def add_list(self):
"""Appends and returns a list value as the next value in the list."""
list_value = self.values.add().list_value
# Clear will mark list_value modified which will indeed create a list.
list_value.Clear()
return list_value
collections_abc.MutableSequence.register(ListValue)
WKTBASES = {
'google.protobuf.Any': Any,
'google.protobuf.Duration': Duration,
'google.protobuf.FieldMask': FieldMask,
'google.protobuf.ListValue': ListValue,
'google.protobuf.Struct': Struct,
'google.protobuf.Timestamp': Timestamp,
}

@ -0,0 +1,268 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Constants and static functions to support protocol buffer wire format."""
__author__ = 'robinson@google.com (Will Robinson)'
import struct
from google.protobuf import descriptor
from google.protobuf import message
TAG_TYPE_BITS = 3 # Number of bits used to hold type info in a proto tag.
TAG_TYPE_MASK = (1 << TAG_TYPE_BITS) - 1 # 0x7
# These numbers identify the wire type of a protocol buffer value.
# We use the least-significant TAG_TYPE_BITS bits of the varint-encoded
# tag-and-type to store one of these WIRETYPE_* constants.
# These values must match WireType enum in google/protobuf/wire_format.h.
WIRETYPE_VARINT = 0
WIRETYPE_FIXED64 = 1
WIRETYPE_LENGTH_DELIMITED = 2
WIRETYPE_START_GROUP = 3
WIRETYPE_END_GROUP = 4
WIRETYPE_FIXED32 = 5
_WIRETYPE_MAX = 5
# Bounds for various integer types.
INT32_MAX = int((1 << 31) - 1)
INT32_MIN = int(-(1 << 31))
UINT32_MAX = (1 << 32) - 1
INT64_MAX = (1 << 63) - 1
INT64_MIN = -(1 << 63)
UINT64_MAX = (1 << 64) - 1
# "struct" format strings that will encode/decode the specified formats.
FORMAT_UINT32_LITTLE_ENDIAN = '<I'
FORMAT_UINT64_LITTLE_ENDIAN = '<Q'
FORMAT_FLOAT_LITTLE_ENDIAN = '<f'
FORMAT_DOUBLE_LITTLE_ENDIAN = '<d'
# We'll have to provide alternate implementations of AppendLittleEndian*() on
# any architectures where these checks fail.
if struct.calcsize(FORMAT_UINT32_LITTLE_ENDIAN) != 4:
raise AssertionError('Format "I" is not a 32-bit number.')
if struct.calcsize(FORMAT_UINT64_LITTLE_ENDIAN) != 8:
raise AssertionError('Format "Q" is not a 64-bit number.')
def PackTag(field_number, wire_type):
"""Returns an unsigned 32-bit integer that encodes the field number and
wire type information in standard protocol message wire format.
Args:
field_number: Expected to be an integer in the range [1, 1 << 29)
wire_type: One of the WIRETYPE_* constants.
"""
if not 0 <= wire_type <= _WIRETYPE_MAX:
raise message.EncodeError('Unknown wire type: %d' % wire_type)
return (field_number << TAG_TYPE_BITS) | wire_type
def UnpackTag(tag):
"""The inverse of PackTag(). Given an unsigned 32-bit number,
returns a (field_number, wire_type) tuple.
"""
return (tag >> TAG_TYPE_BITS), (tag & TAG_TYPE_MASK)
def ZigZagEncode(value):
"""ZigZag Transform: Encodes signed integers so that they can be
effectively used with varint encoding. See wire_format.h for
more details.
"""
if value >= 0:
return value << 1
return (value << 1) ^ (~0)
def ZigZagDecode(value):
"""Inverse of ZigZagEncode()."""
if not value & 0x1:
return value >> 1
return (value >> 1) ^ (~0)
# The *ByteSize() functions below return the number of bytes required to
# serialize "field number + type" information and then serialize the value.
def Int32ByteSize(field_number, int32):
return Int64ByteSize(field_number, int32)
def Int32ByteSizeNoTag(int32):
return _VarUInt64ByteSizeNoTag(0xffffffffffffffff & int32)
def Int64ByteSize(field_number, int64):
# Have to convert to uint before calling UInt64ByteSize().
return UInt64ByteSize(field_number, 0xffffffffffffffff & int64)
def UInt32ByteSize(field_number, uint32):
return UInt64ByteSize(field_number, uint32)
def UInt64ByteSize(field_number, uint64):
return TagByteSize(field_number) + _VarUInt64ByteSizeNoTag(uint64)
def SInt32ByteSize(field_number, int32):
return UInt32ByteSize(field_number, ZigZagEncode(int32))
def SInt64ByteSize(field_number, int64):
return UInt64ByteSize(field_number, ZigZagEncode(int64))
def Fixed32ByteSize(field_number, fixed32):
return TagByteSize(field_number) + 4
def Fixed64ByteSize(field_number, fixed64):
return TagByteSize(field_number) + 8
def SFixed32ByteSize(field_number, sfixed32):
return TagByteSize(field_number) + 4
def SFixed64ByteSize(field_number, sfixed64):
return TagByteSize(field_number) + 8
def FloatByteSize(field_number, flt):
return TagByteSize(field_number) + 4
def DoubleByteSize(field_number, double):
return TagByteSize(field_number) + 8
def BoolByteSize(field_number, b):
return TagByteSize(field_number) + 1
def EnumByteSize(field_number, enum):
return UInt32ByteSize(field_number, enum)
def StringByteSize(field_number, string):
return BytesByteSize(field_number, string.encode('utf-8'))
def BytesByteSize(field_number, b):
return (TagByteSize(field_number)
+ _VarUInt64ByteSizeNoTag(len(b))
+ len(b))
def GroupByteSize(field_number, message):
return (2 * TagByteSize(field_number) # START and END group.
+ message.ByteSize())
def MessageByteSize(field_number, message):
return (TagByteSize(field_number)
+ _VarUInt64ByteSizeNoTag(message.ByteSize())
+ message.ByteSize())
def MessageSetItemByteSize(field_number, msg):
# First compute the sizes of the tags.
# There are 2 tags for the beginning and ending of the repeated group, that
# is field number 1, one with field number 2 (type_id) and one with field
# number 3 (message).
total_size = (2 * TagByteSize(1) + TagByteSize(2) + TagByteSize(3))
# Add the number of bytes for type_id.
total_size += _VarUInt64ByteSizeNoTag(field_number)
message_size = msg.ByteSize()
# The number of bytes for encoding the length of the message.
total_size += _VarUInt64ByteSizeNoTag(message_size)
# The size of the message.
total_size += message_size
return total_size
def TagByteSize(field_number):
"""Returns the bytes required to serialize a tag with this field number."""
# Just pass in type 0, since the type won't affect the tag+type size.
return _VarUInt64ByteSizeNoTag(PackTag(field_number, 0))
# Private helper function for the *ByteSize() functions above.
def _VarUInt64ByteSizeNoTag(uint64):
"""Returns the number of bytes required to serialize a single varint
using boundary value comparisons. (unrolled loop optimization -WPierce)
uint64 must be unsigned.
"""
if uint64 <= 0x7f: return 1
if uint64 <= 0x3fff: return 2
if uint64 <= 0x1fffff: return 3
if uint64 <= 0xfffffff: return 4
if uint64 <= 0x7ffffffff: return 5
if uint64 <= 0x3ffffffffff: return 6
if uint64 <= 0x1ffffffffffff: return 7
if uint64 <= 0xffffffffffffff: return 8
if uint64 <= 0x7fffffffffffffff: return 9
if uint64 > UINT64_MAX:
raise message.EncodeError('Value out of range: %d' % uint64)
return 10
NON_PACKABLE_TYPES = (
descriptor.FieldDescriptor.TYPE_STRING,
descriptor.FieldDescriptor.TYPE_GROUP,
descriptor.FieldDescriptor.TYPE_MESSAGE,
descriptor.FieldDescriptor.TYPE_BYTES
)
def IsTypePackable(field_type):
"""Return true iff packable = true is valid for fields of this type.
Args:
field_type: a FieldDescriptor::Type value.
Returns:
True iff fields of this type are packable.
"""
return field_type not in NON_PACKABLE_TYPES

@ -0,0 +1,257 @@
#! /usr/bin/env python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for google.protobuf.internal.wire_format."""
__author__ = 'robinson@google.com (Will Robinson)'
try:
import unittest2 as unittest #PY26
except ImportError:
import unittest
from google.protobuf import message
from google.protobuf.internal import wire_format
class WireFormatTest(unittest.TestCase):
def testPackTag(self):
field_number = 0xabc
tag_type = 2
self.assertEqual((field_number << 3) | tag_type,
wire_format.PackTag(field_number, tag_type))
PackTag = wire_format.PackTag
# Number too high.
self.assertRaises(message.EncodeError, PackTag, field_number, 6)
# Number too low.
self.assertRaises(message.EncodeError, PackTag, field_number, -1)
def testUnpackTag(self):
# Test field numbers that will require various varint sizes.
for expected_field_number in (1, 15, 16, 2047, 2048):
for expected_wire_type in range(6): # Highest-numbered wiretype is 5.
field_number, wire_type = wire_format.UnpackTag(
wire_format.PackTag(expected_field_number, expected_wire_type))
self.assertEqual(expected_field_number, field_number)
self.assertEqual(expected_wire_type, wire_type)
self.assertRaises(TypeError, wire_format.UnpackTag, None)
self.assertRaises(TypeError, wire_format.UnpackTag, 'abc')
self.assertRaises(TypeError, wire_format.UnpackTag, 0.0)
self.assertRaises(TypeError, wire_format.UnpackTag, object())
def testZigZagEncode(self):
Z = wire_format.ZigZagEncode
self.assertEqual(0, Z(0))
self.assertEqual(1, Z(-1))
self.assertEqual(2, Z(1))
self.assertEqual(3, Z(-2))
self.assertEqual(4, Z(2))
self.assertEqual(0xfffffffe, Z(0x7fffffff))
self.assertEqual(0xffffffff, Z(-0x80000000))
self.assertEqual(0xfffffffffffffffe, Z(0x7fffffffffffffff))
self.assertEqual(0xffffffffffffffff, Z(-0x8000000000000000))
self.assertRaises(TypeError, Z, None)
self.assertRaises(TypeError, Z, 'abcd')
self.assertRaises(TypeError, Z, 0.0)
self.assertRaises(TypeError, Z, object())
def testZigZagDecode(self):
Z = wire_format.ZigZagDecode
self.assertEqual(0, Z(0))
self.assertEqual(-1, Z(1))
self.assertEqual(1, Z(2))
self.assertEqual(-2, Z(3))
self.assertEqual(2, Z(4))
self.assertEqual(0x7fffffff, Z(0xfffffffe))
self.assertEqual(-0x80000000, Z(0xffffffff))
self.assertEqual(0x7fffffffffffffff, Z(0xfffffffffffffffe))
self.assertEqual(-0x8000000000000000, Z(0xffffffffffffffff))
self.assertRaises(TypeError, Z, None)
self.assertRaises(TypeError, Z, 'abcd')
self.assertRaises(TypeError, Z, 0.0)
self.assertRaises(TypeError, Z, object())
def NumericByteSizeTestHelper(self, byte_size_fn, value, expected_value_size):
# Use field numbers that cause various byte sizes for the tag information.
for field_number, tag_bytes in ((15, 1), (16, 2), (2047, 2), (2048, 3)):
expected_size = expected_value_size + tag_bytes
actual_size = byte_size_fn(field_number, value)
self.assertEqual(expected_size, actual_size,
'byte_size_fn: %s, field_number: %d, value: %r\n'
'Expected: %d, Actual: %d'% (
byte_size_fn, field_number, value, expected_size, actual_size))
def testByteSizeFunctions(self):
# Test all numeric *ByteSize() functions.
NUMERIC_ARGS = [
# Int32ByteSize().
[wire_format.Int32ByteSize, 0, 1],
[wire_format.Int32ByteSize, 127, 1],
[wire_format.Int32ByteSize, 128, 2],
[wire_format.Int32ByteSize, -1, 10],
# Int64ByteSize().
[wire_format.Int64ByteSize, 0, 1],
[wire_format.Int64ByteSize, 127, 1],
[wire_format.Int64ByteSize, 128, 2],
[wire_format.Int64ByteSize, -1, 10],
# UInt32ByteSize().
[wire_format.UInt32ByteSize, 0, 1],
[wire_format.UInt32ByteSize, 127, 1],
[wire_format.UInt32ByteSize, 128, 2],
[wire_format.UInt32ByteSize, wire_format.UINT32_MAX, 5],
# UInt64ByteSize().
[wire_format.UInt64ByteSize, 0, 1],
[wire_format.UInt64ByteSize, 127, 1],
[wire_format.UInt64ByteSize, 128, 2],
[wire_format.UInt64ByteSize, wire_format.UINT64_MAX, 10],
# SInt32ByteSize().
[wire_format.SInt32ByteSize, 0, 1],
[wire_format.SInt32ByteSize, -1, 1],
[wire_format.SInt32ByteSize, 1, 1],
[wire_format.SInt32ByteSize, -63, 1],
[wire_format.SInt32ByteSize, 63, 1],
[wire_format.SInt32ByteSize, -64, 1],
[wire_format.SInt32ByteSize, 64, 2],
# SInt64ByteSize().
[wire_format.SInt64ByteSize, 0, 1],
[wire_format.SInt64ByteSize, -1, 1],
[wire_format.SInt64ByteSize, 1, 1],
[wire_format.SInt64ByteSize, -63, 1],
[wire_format.SInt64ByteSize, 63, 1],
[wire_format.SInt64ByteSize, -64, 1],
[wire_format.SInt64ByteSize, 64, 2],
# Fixed32ByteSize().
[wire_format.Fixed32ByteSize, 0, 4],
[wire_format.Fixed32ByteSize, wire_format.UINT32_MAX, 4],
# Fixed64ByteSize().
[wire_format.Fixed64ByteSize, 0, 8],
[wire_format.Fixed64ByteSize, wire_format.UINT64_MAX, 8],
# SFixed32ByteSize().
[wire_format.SFixed32ByteSize, 0, 4],
[wire_format.SFixed32ByteSize, wire_format.INT32_MIN, 4],
[wire_format.SFixed32ByteSize, wire_format.INT32_MAX, 4],
# SFixed64ByteSize().
[wire_format.SFixed64ByteSize, 0, 8],
[wire_format.SFixed64ByteSize, wire_format.INT64_MIN, 8],
[wire_format.SFixed64ByteSize, wire_format.INT64_MAX, 8],
# FloatByteSize().
[wire_format.FloatByteSize, 0.0, 4],
[wire_format.FloatByteSize, 1000000000.0, 4],
[wire_format.FloatByteSize, -1000000000.0, 4],
# DoubleByteSize().
[wire_format.DoubleByteSize, 0.0, 8],
[wire_format.DoubleByteSize, 1000000000.0, 8],
[wire_format.DoubleByteSize, -1000000000.0, 8],
# BoolByteSize().
[wire_format.BoolByteSize, False, 1],
[wire_format.BoolByteSize, True, 1],
# EnumByteSize().
[wire_format.EnumByteSize, 0, 1],
[wire_format.EnumByteSize, 127, 1],
[wire_format.EnumByteSize, 128, 2],
[wire_format.EnumByteSize, wire_format.UINT32_MAX, 5],
]
for args in NUMERIC_ARGS:
self.NumericByteSizeTestHelper(*args)
# Test strings and bytes.
for byte_size_fn in (wire_format.StringByteSize, wire_format.BytesByteSize):
# 1 byte for tag, 1 byte for length, 3 bytes for contents.
self.assertEqual(5, byte_size_fn(10, 'abc'))
# 2 bytes for tag, 1 byte for length, 3 bytes for contents.
self.assertEqual(6, byte_size_fn(16, 'abc'))
# 2 bytes for tag, 2 bytes for length, 128 bytes for contents.
self.assertEqual(132, byte_size_fn(16, 'a' * 128))
# Test UTF-8 string byte size calculation.
# 1 byte for tag, 1 byte for length, 8 bytes for content.
self.assertEqual(10, wire_format.StringByteSize(
5, b'\xd0\xa2\xd0\xb5\xd1\x81\xd1\x82'.decode('utf-8')))
class MockMessage(object):
def __init__(self, byte_size):
self.byte_size = byte_size
def ByteSize(self):
return self.byte_size
message_byte_size = 10
mock_message = MockMessage(byte_size=message_byte_size)
# Test groups.
# (2 * 1) bytes for begin and end tags, plus message_byte_size.
self.assertEqual(2 + message_byte_size,
wire_format.GroupByteSize(1, mock_message))
# (2 * 2) bytes for begin and end tags, plus message_byte_size.
self.assertEqual(4 + message_byte_size,
wire_format.GroupByteSize(16, mock_message))
# Test messages.
# 1 byte for tag, plus 1 byte for length, plus contents.
self.assertEqual(2 + mock_message.byte_size,
wire_format.MessageByteSize(1, mock_message))
# 2 bytes for tag, plus 1 byte for length, plus contents.
self.assertEqual(3 + mock_message.byte_size,
wire_format.MessageByteSize(16, mock_message))
# 2 bytes for tag, plus 2 bytes for length, plus contents.
mock_message.byte_size = 128
self.assertEqual(4 + mock_message.byte_size,
wire_format.MessageByteSize(16, mock_message))
# Test message set item byte size.
# 4 bytes for tags, plus 1 byte for length, plus 1 byte for type_id,
# plus contents.
mock_message.byte_size = 10
self.assertEqual(mock_message.byte_size + 6,
wire_format.MessageSetItemByteSize(1, mock_message))
# 4 bytes for tags, plus 2 bytes for length, plus 1 byte for type_id,
# plus contents.
mock_message.byte_size = 128
self.assertEqual(mock_message.byte_size + 7,
wire_format.MessageSetItemByteSize(1, mock_message))
# 4 bytes for tags, plus 2 bytes for length, plus 2 byte for type_id,
# plus contents.
self.assertEqual(mock_message.byte_size + 8,
wire_format.MessageSetItemByteSize(128, mock_message))
# Too-long varint.
self.assertRaises(message.EncodeError,
wire_format.UInt64ByteSize, 1, 1 << 128)
if __name__ == '__main__':
unittest.main()

@ -0,0 +1,834 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains routines for printing protocol messages in JSON format.
Simple usage example:
# Create a proto object and serialize it to a json format string.
message = my_proto_pb2.MyMessage(foo='bar')
json_string = json_format.MessageToJson(message)
# Parse a json format string to proto object.
message = json_format.Parse(json_string, my_proto_pb2.MyMessage())
"""
__author__ = 'jieluo@google.com (Jie Luo)'
# pylint: disable=g-statement-before-imports,g-import-not-at-top
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict # PY26
# pylint: enable=g-statement-before-imports,g-import-not-at-top
import base64
import json
import math
from operator import methodcaller
import re
import sys
import six
from google.protobuf.internal import type_checkers
from google.protobuf import descriptor
from google.protobuf import symbol_database
_TIMESTAMPFOMAT = '%Y-%m-%dT%H:%M:%S'
_INT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT32,
descriptor.FieldDescriptor.CPPTYPE_UINT32,
descriptor.FieldDescriptor.CPPTYPE_INT64,
descriptor.FieldDescriptor.CPPTYPE_UINT64])
_INT64_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_INT64,
descriptor.FieldDescriptor.CPPTYPE_UINT64])
_FLOAT_TYPES = frozenset([descriptor.FieldDescriptor.CPPTYPE_FLOAT,
descriptor.FieldDescriptor.CPPTYPE_DOUBLE])
_INFINITY = 'Infinity'
_NEG_INFINITY = '-Infinity'
_NAN = 'NaN'
_UNPAIRED_SURROGATE_PATTERN = re.compile(six.u(
r'[\ud800-\udbff](?![\udc00-\udfff])|(?<![\ud800-\udbff])[\udc00-\udfff]'
))
_VALID_EXTENSION_NAME = re.compile(r'\[[a-zA-Z0-9\._]*\]$')
class Error(Exception):
"""Top-level module error for json_format."""
class SerializeToJsonError(Error):
"""Thrown if serialization to JSON fails."""
class ParseError(Error):
"""Thrown in case of parsing error."""
def MessageToJson(
message,
including_default_value_fields=False,
preserving_proto_field_name=False,
indent=2,
sort_keys=False,
use_integers_for_enums=False,
descriptor_pool=None,
float_precision=None):
"""Converts protobuf message to JSON format.
Args:
message: The protocol buffers message instance to serialize.
including_default_value_fields: If True, singular primitive fields,
repeated fields, and map fields will always be serialized. If
False, only serialize non-empty fields. Singular message fields
and oneof fields are not affected by this option.
preserving_proto_field_name: If True, use the original proto field
names as defined in the .proto file. If False, convert the field
names to lowerCamelCase.
indent: The JSON object will be pretty-printed with this indent level.
An indent level of 0 or negative will only insert newlines.
sort_keys: If True, then the output will be sorted by field names.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
float_precision: If set, use this to specify float field valid digits.
Returns:
A string containing the JSON formatted protocol buffer message.
"""
printer = _Printer(
including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums,
descriptor_pool,
float_precision=float_precision)
return printer.ToJsonString(message, indent, sort_keys)
def MessageToDict(
message,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
descriptor_pool=None,
float_precision=None):
"""Converts protobuf message to a dictionary.
When the dictionary is encoded to JSON, it conforms to proto3 JSON spec.
Args:
message: The protocol buffers message instance to serialize.
including_default_value_fields: If True, singular primitive fields,
repeated fields, and map fields will always be serialized. If
False, only serialize non-empty fields. Singular message fields
and oneof fields are not affected by this option.
preserving_proto_field_name: If True, use the original proto field
names as defined in the .proto file. If False, convert the field
names to lowerCamelCase.
use_integers_for_enums: If true, print integers instead of enum names.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
float_precision: If set, use this to specify float field valid digits.
Returns:
A dict representation of the protocol buffer message.
"""
printer = _Printer(
including_default_value_fields,
preserving_proto_field_name,
use_integers_for_enums,
descriptor_pool,
float_precision=float_precision)
# pylint: disable=protected-access
return printer._MessageToJsonObject(message)
def _IsMapEntry(field):
return (field.type == descriptor.FieldDescriptor.TYPE_MESSAGE and
field.message_type.has_options and
field.message_type.GetOptions().map_entry)
class _Printer(object):
"""JSON format printer for protocol message."""
def __init__(
self,
including_default_value_fields=False,
preserving_proto_field_name=False,
use_integers_for_enums=False,
descriptor_pool=None,
float_precision=None):
self.including_default_value_fields = including_default_value_fields
self.preserving_proto_field_name = preserving_proto_field_name
self.use_integers_for_enums = use_integers_for_enums
self.descriptor_pool = descriptor_pool
# TODO(jieluo): change the float precision default to 8 valid digits.
if float_precision:
self.float_format = '.{}g'.format(float_precision)
else:
self.float_format = None
def ToJsonString(self, message, indent, sort_keys):
js = self._MessageToJsonObject(message)
return json.dumps(js, indent=indent, sort_keys=sort_keys)
def _MessageToJsonObject(self, message):
"""Converts message to an object according to Proto3 JSON Specification."""
message_descriptor = message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
return self._WrapperMessageToJsonObject(message)
if full_name in _WKTJSONMETHODS:
return methodcaller(_WKTJSONMETHODS[full_name][0], message)(self)
js = {}
return self._RegularMessageToJsonObject(message, js)
def _RegularMessageToJsonObject(self, message, js):
"""Converts normal message according to Proto3 JSON Specification."""
fields = message.ListFields()
try:
for field, value in fields:
if self.preserving_proto_field_name:
name = field.name
else:
name = field.json_name
if _IsMapEntry(field):
# Convert a map field.
v_field = field.message_type.fields_by_name['value']
js_map = {}
for key in value:
if isinstance(key, bool):
if key:
recorded_key = 'true'
else:
recorded_key = 'false'
else:
recorded_key = key
js_map[recorded_key] = self._FieldToJsonObject(
v_field, value[key])
js[name] = js_map
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
# Convert a repeated field.
js[name] = [self._FieldToJsonObject(field, k)
for k in value]
elif field.is_extension:
full_qualifier = field.full_name[:-len(field.name)]
name = '[%s%s]' % (full_qualifier, name)
js[name] = self._FieldToJsonObject(field, value)
else:
js[name] = self._FieldToJsonObject(field, value)
# Serialize default value if including_default_value_fields is True.
if self.including_default_value_fields:
message_descriptor = message.DESCRIPTOR
for field in message_descriptor.fields:
# Singular message fields and oneof fields will not be affected.
if ((field.label != descriptor.FieldDescriptor.LABEL_REPEATED and
field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE) or
field.containing_oneof):
continue
if self.preserving_proto_field_name:
name = field.name
else:
name = field.json_name
if name in js:
# Skip the field which has been serialized already.
continue
if _IsMapEntry(field):
js[name] = {}
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
js[name] = []
else:
js[name] = self._FieldToJsonObject(field, field.default_value)
except ValueError as e:
raise SerializeToJsonError(
'Failed to serialize {0} field: {1}.'.format(field.name, e))
return js
def _FieldToJsonObject(self, field, value):
"""Converts field value according to Proto3 JSON Specification."""
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
return self._MessageToJsonObject(value)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
if self.use_integers_for_enums:
return value
enum_value = field.enum_type.values_by_number.get(value, None)
if enum_value is not None:
return enum_value.name
else:
if field.file.syntax == 'proto3':
return value
raise SerializeToJsonError('Enum field contains an integer value '
'which can not mapped to an enum value.')
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
# Use base64 Data encoding for bytes
return base64.b64encode(value).decode('utf-8')
else:
return value
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
return bool(value)
elif field.cpp_type in _INT64_TYPES:
return str(value)
elif field.cpp_type in _FLOAT_TYPES:
if math.isinf(value):
if value < 0.0:
return _NEG_INFINITY
else:
return _INFINITY
if math.isnan(value):
return _NAN
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_FLOAT:
if self.float_format:
return float(format(value, self.float_format))
else:
return type_checkers.ToShortestFloat(value)
return value
def _AnyMessageToJsonObject(self, message):
"""Converts Any message according to Proto3 JSON Specification."""
if not message.ListFields():
return {}
# Must print @type first, use OrderedDict instead of {}
js = OrderedDict()
type_url = message.type_url
js['@type'] = type_url
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
sub_message.ParseFromString(message.value)
message_descriptor = sub_message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
js['value'] = self._WrapperMessageToJsonObject(sub_message)
return js
if full_name in _WKTJSONMETHODS:
js['value'] = methodcaller(_WKTJSONMETHODS[full_name][0],
sub_message)(self)
return js
return self._RegularMessageToJsonObject(sub_message, js)
def _GenericMessageToJsonObject(self, message):
"""Converts message according to Proto3 JSON Specification."""
# Duration, Timestamp and FieldMask have ToJsonString method to do the
# convert. Users can also call the method directly.
return message.ToJsonString()
def _ValueMessageToJsonObject(self, message):
"""Converts Value message according to Proto3 JSON Specification."""
which = message.WhichOneof('kind')
# If the Value message is not set treat as null_value when serialize
# to JSON. The parse back result will be different from original message.
if which is None or which == 'null_value':
return None
if which == 'list_value':
return self._ListValueMessageToJsonObject(message.list_value)
if which == 'struct_value':
value = message.struct_value
else:
value = getattr(message, which)
oneof_descriptor = message.DESCRIPTOR.fields_by_name[which]
return self._FieldToJsonObject(oneof_descriptor, value)
def _ListValueMessageToJsonObject(self, message):
"""Converts ListValue message according to Proto3 JSON Specification."""
return [self._ValueMessageToJsonObject(value)
for value in message.values]
def _StructMessageToJsonObject(self, message):
"""Converts Struct message according to Proto3 JSON Specification."""
fields = message.fields
ret = {}
for key in fields:
ret[key] = self._ValueMessageToJsonObject(fields[key])
return ret
def _WrapperMessageToJsonObject(self, message):
return self._FieldToJsonObject(
message.DESCRIPTOR.fields_by_name['value'], message.value)
def _IsWrapperMessage(message_descriptor):
return message_descriptor.file.name == 'google/protobuf/wrappers.proto'
def _DuplicateChecker(js):
result = {}
for name, value in js:
if name in result:
raise ParseError('Failed to load JSON: duplicate key {0}.'.format(name))
result[name] = value
return result
def _CreateMessageFromTypeUrl(type_url, descriptor_pool):
"""Creates a message from a type URL."""
db = symbol_database.Default()
pool = db.pool if descriptor_pool is None else descriptor_pool
type_name = type_url.split('/')[-1]
try:
message_descriptor = pool.FindMessageTypeByName(type_name)
except KeyError:
raise TypeError(
'Can not find message descriptor by type_url: {0}.'.format(type_url))
message_class = db.GetPrototype(message_descriptor)
return message_class()
def Parse(text, message, ignore_unknown_fields=False, descriptor_pool=None):
"""Parses a JSON representation of a protocol message into a message.
Args:
text: Message JSON representation.
message: A protocol buffer message to merge into.
ignore_unknown_fields: If True, do not raise errors for unknown fields.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
The same message passed as argument.
Raises::
ParseError: On JSON parsing problems.
"""
if not isinstance(text, six.text_type): text = text.decode('utf-8')
try:
js = json.loads(text, object_pairs_hook=_DuplicateChecker)
except ValueError as e:
raise ParseError('Failed to load JSON: {0}.'.format(str(e)))
return ParseDict(js, message, ignore_unknown_fields, descriptor_pool)
def ParseDict(js_dict,
message,
ignore_unknown_fields=False,
descriptor_pool=None):
"""Parses a JSON dictionary representation into a message.
Args:
js_dict: Dict representation of a JSON message.
message: A protocol buffer message to merge into.
ignore_unknown_fields: If True, do not raise errors for unknown fields.
descriptor_pool: A Descriptor Pool for resolving types. If None use the
default.
Returns:
The same message passed as argument.
"""
parser = _Parser(ignore_unknown_fields, descriptor_pool)
parser.ConvertMessage(js_dict, message)
return message
_INT_OR_FLOAT = six.integer_types + (float,)
class _Parser(object):
"""JSON format parser for protocol message."""
def __init__(self, ignore_unknown_fields, descriptor_pool):
self.ignore_unknown_fields = ignore_unknown_fields
self.descriptor_pool = descriptor_pool
def ConvertMessage(self, value, message):
"""Convert a JSON object into a message.
Args:
value: A JSON object.
message: A WKT or regular protocol message to record the data.
Raises:
ParseError: In case of convert problems.
"""
message_descriptor = message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
self._ConvertWrapperMessage(value, message)
elif full_name in _WKTJSONMETHODS:
methodcaller(_WKTJSONMETHODS[full_name][1], value, message)(self)
else:
self._ConvertFieldValuePair(value, message)
def _ConvertFieldValuePair(self, js, message):
"""Convert field value pairs into regular message.
Args:
js: A JSON object to convert the field value pairs.
message: A regular protocol message to record the data.
Raises:
ParseError: In case of problems converting.
"""
names = []
message_descriptor = message.DESCRIPTOR
fields_by_json_name = dict((f.json_name, f)
for f in message_descriptor.fields)
for name in js:
try:
field = fields_by_json_name.get(name, None)
if not field:
field = message_descriptor.fields_by_name.get(name, None)
if not field and _VALID_EXTENSION_NAME.match(name):
if not message_descriptor.is_extendable:
raise ParseError('Message type {0} does not have extensions'.format(
message_descriptor.full_name))
identifier = name[1:-1] # strip [] brackets
# pylint: disable=protected-access
field = message.Extensions._FindExtensionByName(identifier)
# pylint: enable=protected-access
if not field:
# Try looking for extension by the message type name, dropping the
# field name following the final . separator in full_name.
identifier = '.'.join(identifier.split('.')[:-1])
# pylint: disable=protected-access
field = message.Extensions._FindExtensionByName(identifier)
# pylint: enable=protected-access
if not field:
if self.ignore_unknown_fields:
continue
raise ParseError(
('Message type "{0}" has no field named "{1}".\n'
' Available Fields(except extensions): {2}').format(
message_descriptor.full_name, name,
[f.json_name for f in message_descriptor.fields]))
if name in names:
raise ParseError('Message type "{0}" should not have multiple '
'"{1}" fields.'.format(
message.DESCRIPTOR.full_name, name))
names.append(name)
# Check no other oneof field is parsed.
if field.containing_oneof is not None:
oneof_name = field.containing_oneof.name
if oneof_name in names:
raise ParseError('Message type "{0}" should not have multiple '
'"{1}" oneof fields.'.format(
message.DESCRIPTOR.full_name, oneof_name))
names.append(oneof_name)
value = js[name]
if value is None:
if (field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE
and field.message_type.full_name == 'google.protobuf.Value'):
sub_message = getattr(message, field.name)
sub_message.null_value = 0
else:
message.ClearField(field.name)
continue
# Parse field value.
if _IsMapEntry(field):
message.ClearField(field.name)
self._ConvertMapFieldValue(value, message, field)
elif field.label == descriptor.FieldDescriptor.LABEL_REPEATED:
message.ClearField(field.name)
if not isinstance(value, list):
raise ParseError('repeated field {0} must be in [] which is '
'{1}.'.format(name, value))
if field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
# Repeated message field.
for item in value:
sub_message = getattr(message, field.name).add()
# None is a null_value in Value.
if (item is None and
sub_message.DESCRIPTOR.full_name != 'google.protobuf.Value'):
raise ParseError('null is not allowed to be used as an element'
' in a repeated field.')
self.ConvertMessage(item, sub_message)
else:
# Repeated scalar field.
for item in value:
if item is None:
raise ParseError('null is not allowed to be used as an element'
' in a repeated field.')
getattr(message, field.name).append(
_ConvertScalarFieldValue(item, field))
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
if field.is_extension:
sub_message = message.Extensions[field]
else:
sub_message = getattr(message, field.name)
sub_message.SetInParent()
self.ConvertMessage(value, sub_message)
else:
if field.is_extension:
message.Extensions[field] = _ConvertScalarFieldValue(value, field)
else:
setattr(message, field.name, _ConvertScalarFieldValue(value, field))
except ParseError as e:
if field and field.containing_oneof is None:
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
else:
raise ParseError(str(e))
except ValueError as e:
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
except TypeError as e:
raise ParseError('Failed to parse {0} field: {1}.'.format(name, e))
def _ConvertAnyMessage(self, value, message):
"""Convert a JSON representation into Any message."""
if isinstance(value, dict) and not value:
return
try:
type_url = value['@type']
except KeyError:
raise ParseError('@type is missing when parsing any message.')
sub_message = _CreateMessageFromTypeUrl(type_url, self.descriptor_pool)
message_descriptor = sub_message.DESCRIPTOR
full_name = message_descriptor.full_name
if _IsWrapperMessage(message_descriptor):
self._ConvertWrapperMessage(value['value'], sub_message)
elif full_name in _WKTJSONMETHODS:
methodcaller(
_WKTJSONMETHODS[full_name][1], value['value'], sub_message)(self)
else:
del value['@type']
self._ConvertFieldValuePair(value, sub_message)
value['@type'] = type_url
# Sets Any message
message.value = sub_message.SerializeToString()
message.type_url = type_url
def _ConvertGenericMessage(self, value, message):
"""Convert a JSON representation into message with FromJsonString."""
# Duration, Timestamp, FieldMask have a FromJsonString method to do the
# conversion. Users can also call the method directly.
try:
message.FromJsonString(value)
except ValueError as e:
raise ParseError(e)
def _ConvertValueMessage(self, value, message):
"""Convert a JSON representation into Value message."""
if isinstance(value, dict):
self._ConvertStructMessage(value, message.struct_value)
elif isinstance(value, list):
self. _ConvertListValueMessage(value, message.list_value)
elif value is None:
message.null_value = 0
elif isinstance(value, bool):
message.bool_value = value
elif isinstance(value, six.string_types):
message.string_value = value
elif isinstance(value, _INT_OR_FLOAT):
message.number_value = value
else:
raise ParseError('Unexpected type for Value message.')
def _ConvertListValueMessage(self, value, message):
"""Convert a JSON representation into ListValue message."""
if not isinstance(value, list):
raise ParseError(
'ListValue must be in [] which is {0}.'.format(value))
message.ClearField('values')
for item in value:
self._ConvertValueMessage(item, message.values.add())
def _ConvertStructMessage(self, value, message):
"""Convert a JSON representation into Struct message."""
if not isinstance(value, dict):
raise ParseError(
'Struct must be in a dict which is {0}.'.format(value))
# Clear will mark the struct as modified so it will be created even if
# there are no values.
message.Clear()
for key in value:
self._ConvertValueMessage(value[key], message.fields[key])
return
def _ConvertWrapperMessage(self, value, message):
"""Convert a JSON representation into Wrapper message."""
field = message.DESCRIPTOR.fields_by_name['value']
setattr(message, 'value', _ConvertScalarFieldValue(value, field))
def _ConvertMapFieldValue(self, value, message, field):
"""Convert map field value for a message map field.
Args:
value: A JSON object to convert the map field value.
message: A protocol message to record the converted data.
field: The descriptor of the map field to be converted.
Raises:
ParseError: In case of convert problems.
"""
if not isinstance(value, dict):
raise ParseError(
'Map field {0} must be in a dict which is {1}.'.format(
field.name, value))
key_field = field.message_type.fields_by_name['key']
value_field = field.message_type.fields_by_name['value']
for key in value:
key_value = _ConvertScalarFieldValue(key, key_field, True)
if value_field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_MESSAGE:
self.ConvertMessage(value[key], getattr(
message, field.name)[key_value])
else:
getattr(message, field.name)[key_value] = _ConvertScalarFieldValue(
value[key], value_field)
def _ConvertScalarFieldValue(value, field, require_str=False):
"""Convert a single scalar field value.
Args:
value: A scalar value to convert the scalar field value.
field: The descriptor of the field to convert.
require_str: If True, the field value must be a str.
Returns:
The converted scalar field value
Raises:
ParseError: In case of convert problems.
"""
if field.cpp_type in _INT_TYPES:
return _ConvertInteger(value)
elif field.cpp_type in _FLOAT_TYPES:
return _ConvertFloat(value)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_BOOL:
return _ConvertBool(value, require_str)
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_STRING:
if field.type == descriptor.FieldDescriptor.TYPE_BYTES:
return base64.b64decode(value)
else:
# Checking for unpaired surrogates appears to be unreliable,
# depending on the specific Python version, so we check manually.
if _UNPAIRED_SURROGATE_PATTERN.search(value):
raise ParseError('Unpaired surrogate')
return value
elif field.cpp_type == descriptor.FieldDescriptor.CPPTYPE_ENUM:
# Convert an enum value.
enum_value = field.enum_type.values_by_name.get(value, None)
if enum_value is None:
try:
number = int(value)
enum_value = field.enum_type.values_by_number.get(number, None)
except ValueError:
raise ParseError('Invalid enum value {0} for enum type {1}.'.format(
value, field.enum_type.full_name))
if enum_value is None:
if field.file.syntax == 'proto3':
# Proto3 accepts unknown enums.
return number
raise ParseError('Invalid enum value {0} for enum type {1}.'.format(
value, field.enum_type.full_name))
return enum_value.number
def _ConvertInteger(value):
"""Convert an integer.
Args:
value: A scalar value to convert.
Returns:
The integer value.
Raises:
ParseError: If an integer couldn't be consumed.
"""
if isinstance(value, float) and not value.is_integer():
raise ParseError('Couldn\'t parse integer: {0}.'.format(value))
if isinstance(value, six.text_type) and value.find(' ') != -1:
raise ParseError('Couldn\'t parse integer: "{0}".'.format(value))
return int(value)
def _ConvertFloat(value):
"""Convert an floating point number."""
if value == 'nan':
raise ParseError('Couldn\'t parse float "nan", use "NaN" instead.')
try:
# Assume Python compatible syntax.
return float(value)
except ValueError:
# Check alternative spellings.
if value == _NEG_INFINITY:
return float('-inf')
elif value == _INFINITY:
return float('inf')
elif value == _NAN:
return float('nan')
else:
raise ParseError('Couldn\'t parse float: {0}.'.format(value))
def _ConvertBool(value, require_str):
"""Convert a boolean value.
Args:
value: A scalar value to convert.
require_str: If True, value must be a str.
Returns:
The bool parsed.
Raises:
ParseError: If a boolean value couldn't be consumed.
"""
if require_str:
if value == 'true':
return True
elif value == 'false':
return False
else:
raise ParseError('Expected "true" or "false", not {0}.'.format(value))
if not isinstance(value, bool):
raise ParseError('Expected true or false without quotes.')
return value
_WKTJSONMETHODS = {
'google.protobuf.Any': ['_AnyMessageToJsonObject',
'_ConvertAnyMessage'],
'google.protobuf.Duration': ['_GenericMessageToJsonObject',
'_ConvertGenericMessage'],
'google.protobuf.FieldMask': ['_GenericMessageToJsonObject',
'_ConvertGenericMessage'],
'google.protobuf.ListValue': ['_ListValueMessageToJsonObject',
'_ConvertListValueMessage'],
'google.protobuf.Struct': ['_StructMessageToJsonObject',
'_ConvertStructMessage'],
'google.protobuf.Timestamp': ['_GenericMessageToJsonObject',
'_ConvertGenericMessage'],
'google.protobuf.Value': ['_ValueMessageToJsonObject',
'_ConvertValueMessage']
}

File diff suppressed because one or more lines are too long

@ -0,0 +1,413 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# TODO(robinson): We should just make these methods all "pure-virtual" and move
# all implementation out, into reflection.py for now.
"""Contains an abstract base class for protocol messages."""
__author__ = 'robinson@google.com (Will Robinson)'
class Error(Exception):
"""Base error type for this module."""
pass
class DecodeError(Error):
"""Exception raised when deserializing messages."""
pass
class EncodeError(Error):
"""Exception raised when serializing messages."""
pass
class Message(object):
"""Abstract base class for protocol messages.
Protocol message classes are almost always generated by the protocol
compiler. These generated types subclass Message and implement the methods
shown below.
"""
# TODO(robinson): Link to an HTML document here.
# TODO(robinson): Document that instances of this class will also
# have an Extensions attribute with __getitem__ and __setitem__.
# Again, not sure how to best convey this.
# TODO(robinson): Document that the class must also have a static
# RegisterExtension(extension_field) method.
# Not sure how to best express at this point.
# TODO(robinson): Document these fields and methods.
__slots__ = []
#: The :class:`google.protobuf.descriptor.Descriptor` for this message type.
DESCRIPTOR = None
def __deepcopy__(self, memo=None):
clone = type(self)()
clone.MergeFrom(self)
return clone
def __eq__(self, other_msg):
"""Recursively compares two messages by value and structure."""
raise NotImplementedError
def __ne__(self, other_msg):
# Can't just say self != other_msg, since that would infinitely recurse. :)
return not self == other_msg
def __hash__(self):
raise TypeError('unhashable object')
def __str__(self):
"""Outputs a human-readable representation of the message."""
raise NotImplementedError
def __unicode__(self):
"""Outputs a human-readable representation of the message."""
raise NotImplementedError
def MergeFrom(self, other_msg):
"""Merges the contents of the specified message into current message.
This method merges the contents of the specified message into the current
message. Singular fields that are set in the specified message overwrite
the corresponding fields in the current message. Repeated fields are
appended. Singular sub-messages and groups are recursively merged.
Args:
other_msg (Message): A message to merge into the current message.
"""
raise NotImplementedError
def CopyFrom(self, other_msg):
"""Copies the content of the specified message into the current message.
The method clears the current message and then merges the specified
message using MergeFrom.
Args:
other_msg (Message): A message to copy into the current one.
"""
if self is other_msg:
return
self.Clear()
self.MergeFrom(other_msg)
def Clear(self):
"""Clears all data that was set in the message."""
raise NotImplementedError
def SetInParent(self):
"""Mark this as present in the parent.
This normally happens automatically when you assign a field of a
sub-message, but sometimes you want to make the sub-message
present while keeping it empty. If you find yourself using this,
you may want to reconsider your design.
"""
raise NotImplementedError
def IsInitialized(self):
"""Checks if the message is initialized.
Returns:
bool: The method returns True if the message is initialized (i.e. all of
its required fields are set).
"""
raise NotImplementedError
# TODO(robinson): MergeFromString() should probably return None and be
# implemented in terms of a helper that returns the # of bytes read. Our
# deserialization routines would use the helper when recursively
# deserializing, but the end user would almost always just want the no-return
# MergeFromString().
def MergeFromString(self, serialized):
"""Merges serialized protocol buffer data into this message.
When we find a field in `serialized` that is already present
in this message:
- If it's a "repeated" field, we append to the end of our list.
- Else, if it's a scalar, we overwrite our field.
- Else, (it's a nonrepeated composite), we recursively merge
into the existing composite.
Args:
serialized (bytes): Any object that allows us to call
``memoryview(serialized)`` to access a string of bytes using the
buffer interface.
Returns:
int: The number of bytes read from `serialized`.
For non-group messages, this will always be `len(serialized)`,
but for messages which are actually groups, this will
generally be less than `len(serialized)`, since we must
stop when we reach an ``END_GROUP`` tag. Note that if
we *do* stop because of an ``END_GROUP`` tag, the number
of bytes returned does not include the bytes
for the ``END_GROUP`` tag information.
Raises:
DecodeError: if the input cannot be parsed.
"""
# TODO(robinson): Document handling of unknown fields.
# TODO(robinson): When we switch to a helper, this will return None.
raise NotImplementedError
def ParseFromString(self, serialized):
"""Parse serialized protocol buffer data into this message.
Like :func:`MergeFromString()`, except we clear the object first.
"""
self.Clear()
return self.MergeFromString(serialized)
def SerializeToString(self, **kwargs):
"""Serializes the protocol message to a binary string.
Keyword Args:
deterministic (bool): If true, requests deterministic serialization
of the protobuf, with predictable ordering of map keys.
Returns:
A binary string representation of the message if all of the required
fields in the message are set (i.e. the message is initialized).
Raises:
EncodeError: if the message isn't initialized (see :func:`IsInitialized`).
"""
raise NotImplementedError
def SerializePartialToString(self, **kwargs):
"""Serializes the protocol message to a binary string.
This method is similar to SerializeToString but doesn't check if the
message is initialized.
Keyword Args:
deterministic (bool): If true, requests deterministic serialization
of the protobuf, with predictable ordering of map keys.
Returns:
bytes: A serialized representation of the partial message.
"""
raise NotImplementedError
# TODO(robinson): Decide whether we like these better
# than auto-generated has_foo() and clear_foo() methods
# on the instances themselves. This way is less consistent
# with C++, but it makes reflection-type access easier and
# reduces the number of magically autogenerated things.
#
# TODO(robinson): Be sure to document (and test) exactly
# which field names are accepted here. Are we case-sensitive?
# What do we do with fields that share names with Python keywords
# like 'lambda' and 'yield'?
#
# nnorwitz says:
# """
# Typically (in python), an underscore is appended to names that are
# keywords. So they would become lambda_ or yield_.
# """
def ListFields(self):
"""Returns a list of (FieldDescriptor, value) tuples for present fields.
A message field is non-empty if HasField() would return true. A singular
primitive field is non-empty if HasField() would return true in proto2 or it
is non zero in proto3. A repeated field is non-empty if it contains at least
one element. The fields are ordered by field number.
Returns:
list[tuple(FieldDescriptor, value)]: field descriptors and values
for all fields in the message which are not empty. The values vary by
field type.
"""
raise NotImplementedError
def HasField(self, field_name):
"""Checks if a certain field is set for the message.
For a oneof group, checks if any field inside is set. Note that if the
field_name is not defined in the message descriptor, :exc:`ValueError` will
be raised.
Args:
field_name (str): The name of the field to check for presence.
Returns:
bool: Whether a value has been set for the named field.
Raises:
ValueError: if the `field_name` is not a member of this message.
"""
raise NotImplementedError
def ClearField(self, field_name):
"""Clears the contents of a given field.
Inside a oneof group, clears the field set. If the name neither refers to a
defined field or oneof group, :exc:`ValueError` is raised.
Args:
field_name (str): The name of the field to check for presence.
Raises:
ValueError: if the `field_name` is not a member of this message.
"""
raise NotImplementedError
def WhichOneof(self, oneof_group):
"""Returns the name of the field that is set inside a oneof group.
If no field is set, returns None.
Args:
oneof_group (str): the name of the oneof group to check.
Returns:
str or None: The name of the group that is set, or None.
Raises:
ValueError: no group with the given name exists
"""
raise NotImplementedError
def HasExtension(self, extension_handle):
"""Checks if a certain extension is present for this message.
Extensions are retrieved using the :attr:`Extensions` mapping (if present).
Args:
extension_handle: The handle for the extension to check.
Returns:
bool: Whether the extension is present for this message.
Raises:
KeyError: if the extension is repeated. Similar to repeated fields,
there is no separate notion of presence: a "not present" repeated
extension is an empty list.
"""
raise NotImplementedError
def ClearExtension(self, extension_handle):
"""Clears the contents of a given extension.
Args:
extension_handle: The handle for the extension to clear.
"""
raise NotImplementedError
def UnknownFields(self):
"""Returns the UnknownFieldSet.
Returns:
UnknownFieldSet: The unknown fields stored in this message.
"""
raise NotImplementedError
def DiscardUnknownFields(self):
"""Clears all fields in the :class:`UnknownFieldSet`.
This operation is recursive for nested message.
"""
raise NotImplementedError
def ByteSize(self):
"""Returns the serialized size of this message.
Recursively calls ByteSize() on all contained messages.
Returns:
int: The number of bytes required to serialize this message.
"""
raise NotImplementedError
def _SetListener(self, message_listener):
"""Internal method used by the protocol message implementation.
Clients should not call this directly.
Sets a listener that this message will call on certain state transitions.
The purpose of this method is to register back-edges from children to
parents at runtime, for the purpose of setting "has" bits and
byte-size-dirty bits in the parent and ancestor objects whenever a child or
descendant object is modified.
If the client wants to disconnect this Message from the object tree, she
explicitly sets callback to None.
If message_listener is None, unregisters any existing listener. Otherwise,
message_listener must implement the MessageListener interface in
internal/message_listener.py, and we discard any listener registered
via a previous _SetListener() call.
"""
raise NotImplementedError
def __getstate__(self):
"""Support the pickle protocol."""
return dict(serialized=self.SerializePartialToString())
def __setstate__(self, state):
"""Support the pickle protocol."""
self.__init__()
serialized = state['serialized']
# On Python 3, using encoding='latin1' is required for unpickling
# protos pickled by Python 2.
if not isinstance(serialized, bytes):
serialized = serialized.encode('latin1')
self.ParseFromString(serialized)
def __reduce__(self):
message_descriptor = self.DESCRIPTOR
if message_descriptor.containing_type is None:
return type(self), (), self.__getstate__()
# the message type must be nested.
# Python does not pickle nested classes; use the symbol_database on the
# receiving end.
container = message_descriptor
return (_InternalConstructMessage, (container.full_name,),
self.__getstate__())
def _InternalConstructMessage(full_name):
"""Constructs a nested message."""
from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top
return symbol_database.Default().GetSymbol(full_name)()

@ -0,0 +1,162 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Provides a factory class for generating dynamic messages.
The easiest way to use this class is if you have access to the FileDescriptor
protos containing the messages you want to create you can just do the following:
message_classes = message_factory.GetMessages(iterable_of_file_descriptors)
my_proto_instance = message_classes['some.proto.package.MessageName']()
"""
__author__ = 'matthewtoia@google.com (Matt Toia)'
from google.protobuf.internal import api_implementation
from google.protobuf import descriptor_pool
from google.protobuf import message
if api_implementation.Type() == 'cpp':
from google.protobuf.pyext import cpp_message as message_impl
else:
from google.protobuf.internal import python_message as message_impl
# The type of all Message classes.
_GENERATED_PROTOCOL_MESSAGE_TYPE = message_impl.GeneratedProtocolMessageType
class MessageFactory(object):
"""Factory for creating Proto2 messages from descriptors in a pool."""
def __init__(self, pool=None):
"""Initializes a new factory."""
self.pool = pool or descriptor_pool.DescriptorPool()
# local cache of all classes built from protobuf descriptors
self._classes = {}
def GetPrototype(self, descriptor):
"""Builds a proto2 message class based on the passed in descriptor.
Passing a descriptor with a fully qualified name matching a previous
invocation will cause the same class to be returned.
Args:
descriptor: The descriptor to build from.
Returns:
A class describing the passed in descriptor.
"""
if descriptor not in self._classes:
descriptor_name = descriptor.name
if str is bytes: # PY2
descriptor_name = descriptor.name.encode('ascii', 'ignore')
result_class = _GENERATED_PROTOCOL_MESSAGE_TYPE(
descriptor_name,
(message.Message,),
{'DESCRIPTOR': descriptor, '__module__': None})
# pylint: disable=protected-access
result_class._FACTORY = self
# If module not set, it wrongly points to message_factory module.
self._classes[descriptor] = result_class
for field in descriptor.fields:
if field.message_type:
self.GetPrototype(field.message_type)
for extension in result_class.DESCRIPTOR.extensions:
if extension.containing_type not in self._classes:
self.GetPrototype(extension.containing_type)
extended_class = self._classes[extension.containing_type]
extended_class.RegisterExtension(extension)
return self._classes[descriptor]
def GetMessages(self, files):
"""Gets all the messages from a specified file.
This will find and resolve dependencies, failing if the descriptor
pool cannot satisfy them.
Args:
files: The file names to extract messages from.
Returns:
A dictionary mapping proto names to the message classes. This will include
any dependent messages as well as any messages defined in the same file as
a specified message.
"""
result = {}
for file_name in files:
file_desc = self.pool.FindFileByName(file_name)
for desc in file_desc.message_types_by_name.values():
result[desc.full_name] = self.GetPrototype(desc)
# While the extension FieldDescriptors are created by the descriptor pool,
# the python classes created in the factory need them to be registered
# explicitly, which is done below.
#
# The call to RegisterExtension will specifically check if the
# extension was already registered on the object and either
# ignore the registration if the original was the same, or raise
# an error if they were different.
for extension in file_desc.extensions_by_name.values():
if extension.containing_type not in self._classes:
self.GetPrototype(extension.containing_type)
extended_class = self._classes[extension.containing_type]
extended_class.RegisterExtension(extension)
return result
_FACTORY = MessageFactory()
def GetMessages(file_protos):
"""Builds a dictionary of all the messages available in a set of files.
Args:
file_protos: Iterable of FileDescriptorProto to build messages out of.
Returns:
A dictionary mapping proto names to the message classes. This will include
any dependent messages as well as any messages defined in the same file as
a specified message.
"""
# The cpp implementation of the protocol buffer library requires to add the
# message in topological order of the dependency graph.
file_by_name = {file_proto.name: file_proto for file_proto in file_protos}
def _AddFile(file_proto):
for dependency in file_proto.dependency:
if dependency in file_by_name:
# Remove from elements to be visited, in order to cut cycles.
_AddFile(file_by_name.pop(dependency))
_FACTORY.pool.Add(file_proto)
while file_by_name:
_AddFile(file_by_name.popitem()[1])
return _FACTORY.GetMessages([file_proto.name for file_proto in file_protos])

@ -0,0 +1,130 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Dynamic Protobuf class creator."""
try:
from collections import OrderedDict
except ImportError:
from ordereddict import OrderedDict #PY26
import hashlib
import os
from google.protobuf import descriptor_pb2
from google.protobuf import message_factory
def _GetMessageFromFactory(factory, full_name):
"""Get a proto class from the MessageFactory by name.
Args:
factory: a MessageFactory instance.
full_name: str, the fully qualified name of the proto type.
Returns:
A class, for the type identified by full_name.
Raises:
KeyError, if the proto is not found in the factory's descriptor pool.
"""
proto_descriptor = factory.pool.FindMessageTypeByName(full_name)
proto_cls = factory.GetPrototype(proto_descriptor)
return proto_cls
def MakeSimpleProtoClass(fields, full_name=None, pool=None):
"""Create a Protobuf class whose fields are basic types.
Note: this doesn't validate field names!
Args:
fields: dict of {name: field_type} mappings for each field in the proto. If
this is an OrderedDict the order will be maintained, otherwise the
fields will be sorted by name.
full_name: optional str, the fully-qualified name of the proto type.
pool: optional DescriptorPool instance.
Returns:
a class, the new protobuf class with a FileDescriptor.
"""
factory = message_factory.MessageFactory(pool=pool)
if full_name is not None:
try:
proto_cls = _GetMessageFromFactory(factory, full_name)
return proto_cls
except KeyError:
# The factory's DescriptorPool doesn't know about this class yet.
pass
# Get a list of (name, field_type) tuples from the fields dict. If fields was
# an OrderedDict we keep the order, but otherwise we sort the field to ensure
# consistent ordering.
field_items = fields.items()
if not isinstance(fields, OrderedDict):
field_items = sorted(field_items)
# Use a consistent file name that is unlikely to conflict with any imported
# proto files.
fields_hash = hashlib.sha1()
for f_name, f_type in field_items:
fields_hash.update(f_name.encode('utf-8'))
fields_hash.update(str(f_type).encode('utf-8'))
proto_file_name = fields_hash.hexdigest() + '.proto'
# If the proto is anonymous, use the same hash to name it.
if full_name is None:
full_name = ('net.proto2.python.public.proto_builder.AnonymousProto_' +
fields_hash.hexdigest())
try:
proto_cls = _GetMessageFromFactory(factory, full_name)
return proto_cls
except KeyError:
# The factory's DescriptorPool doesn't know about this class yet.
pass
# This is the first time we see this proto: add a new descriptor to the pool.
factory.pool.Add(
_MakeFileDescriptorProto(proto_file_name, full_name, field_items))
return _GetMessageFromFactory(factory, full_name)
def _MakeFileDescriptorProto(proto_file_name, full_name, field_items):
"""Populate FileDescriptorProto for MessageFactory's DescriptorPool."""
package, name = full_name.rsplit('.', 1)
file_proto = descriptor_pb2.FileDescriptorProto()
file_proto.name = os.path.join(package.replace('.', '/'), proto_file_name)
file_proto.package = package
desc_proto = file_proto.message_type.add()
desc_proto.name = name
for f_number, (f_name, f_type) in enumerate(field_items, 1):
field_proto = desc_proto.field.add()
field_proto.name = f_name
field_proto.number = f_number
field_proto.label = descriptor_pb2.FieldDescriptorProto.LABEL_OPTIONAL
field_proto.type = f_type
return file_proto

@ -0,0 +1,4 @@
try:
__import__('pkg_resources').declare_namespace(__name__)
except ImportError:
__path__ = __import__('pkgutil').extend_path(__path__, __name__)

@ -0,0 +1,65 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Protocol message implementation hooks for C++ implementation.
Contains helper functions used to create protocol message classes from
Descriptor objects at runtime backed by the protocol buffer C++ API.
"""
__author__ = 'tibell@google.com (Johan Tibell)'
from google.protobuf.pyext import _message
class GeneratedProtocolMessageType(_message.MessageMeta):
"""Metaclass for protocol message classes created at runtime from Descriptors.
The protocol compiler currently uses this metaclass to create protocol
message classes at runtime. Clients can also manually create their own
classes at runtime, as in this example:
mydescriptor = Descriptor(.....)
factory = symbol_database.Default()
factory.pool.AddDescriptor(mydescriptor)
MyProtoClass = factory.GetPrototype(mydescriptor)
myproto_instance = MyProtoClass()
myproto.foo_field = 23
...
The above example will not work for nested types. If you wish to include them,
use reflection.MakeClass() instead of manually instantiating the class in
order to create the appropriate class structure.
"""
# Must be consistent with the protocol-compiler code in
# proto2/compiler/internal/generator.*.
_DESCRIPTOR_KEY = 'DESCRIPTOR'

@ -0,0 +1,237 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/pyext/python.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/pyext/python.proto',
package='google.protobuf.python.internal',
syntax='proto2',
serialized_options=b'H\001',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\"google/protobuf/pyext/python.proto\x12\x1fgoogle.protobuf.python.internal\"\xbc\x02\n\x0cTestAllTypes\x12\\\n\x17repeated_nested_message\x18\x01 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\\\n\x17optional_nested_message\x18\x02 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage\x12\x16\n\x0eoptional_int32\x18\x03 \x01(\x05\x1aX\n\rNestedMessage\x12\n\n\x02\x62\x62\x18\x01 \x01(\x05\x12;\n\x02\x63\x63\x18\x02 \x01(\x0b\x32/.google.protobuf.python.internal.ForeignMessage\"&\n\x0e\x46oreignMessage\x12\t\n\x01\x63\x18\x01 \x01(\x05\x12\t\n\x01\x64\x18\x02 \x03(\x05\"\x1d\n\x11TestAllExtensions*\x08\x08\x01\x10\x80\x80\x80\x80\x02:\x9a\x01\n!optional_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x01 \x01(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessage:\x9a\x01\n!repeated_nested_message_extension\x12\x32.google.protobuf.python.internal.TestAllExtensions\x18\x02 \x03(\x0b\x32;.google.protobuf.python.internal.TestAllTypes.NestedMessageB\x02H\x01'
)
OPTIONAL_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 1
optional_nested_message_extension = _descriptor.FieldDescriptor(
name='optional_nested_message_extension', full_name='google.protobuf.python.internal.optional_nested_message_extension', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
REPEATED_NESTED_MESSAGE_EXTENSION_FIELD_NUMBER = 2
repeated_nested_message_extension = _descriptor.FieldDescriptor(
name='repeated_nested_message_extension', full_name='google.protobuf.python.internal.repeated_nested_message_extension', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=True, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key)
_TESTALLTYPES_NESTEDMESSAGE = _descriptor.Descriptor(
name='NestedMessage',
full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='bb', full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage.bb', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='cc', full_name='google.protobuf.python.internal.TestAllTypes.NestedMessage.cc', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=300,
serialized_end=388,
)
_TESTALLTYPES = _descriptor.Descriptor(
name='TestAllTypes',
full_name='google.protobuf.python.internal.TestAllTypes',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='repeated_nested_message', full_name='google.protobuf.python.internal.TestAllTypes.repeated_nested_message', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_nested_message', full_name='google.protobuf.python.internal.TestAllTypes.optional_nested_message', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='optional_int32', full_name='google.protobuf.python.internal.TestAllTypes.optional_int32', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_TESTALLTYPES_NESTEDMESSAGE, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=72,
serialized_end=388,
)
_FOREIGNMESSAGE = _descriptor.Descriptor(
name='ForeignMessage',
full_name='google.protobuf.python.internal.ForeignMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='c', full_name='google.protobuf.python.internal.ForeignMessage.c', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='d', full_name='google.protobuf.python.internal.ForeignMessage.d', index=1,
number=2, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=390,
serialized_end=428,
)
_TESTALLEXTENSIONS = _descriptor.Descriptor(
name='TestAllExtensions',
full_name='google.protobuf.python.internal.TestAllExtensions',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=True,
syntax='proto2',
extension_ranges=[(1, 536870912), ],
oneofs=[
],
serialized_start=430,
serialized_end=459,
)
_TESTALLTYPES_NESTEDMESSAGE.fields_by_name['cc'].message_type = _FOREIGNMESSAGE
_TESTALLTYPES_NESTEDMESSAGE.containing_type = _TESTALLTYPES
_TESTALLTYPES.fields_by_name['repeated_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
_TESTALLTYPES.fields_by_name['optional_nested_message'].message_type = _TESTALLTYPES_NESTEDMESSAGE
DESCRIPTOR.message_types_by_name['TestAllTypes'] = _TESTALLTYPES
DESCRIPTOR.message_types_by_name['ForeignMessage'] = _FOREIGNMESSAGE
DESCRIPTOR.message_types_by_name['TestAllExtensions'] = _TESTALLEXTENSIONS
DESCRIPTOR.extensions_by_name['optional_nested_message_extension'] = optional_nested_message_extension
DESCRIPTOR.extensions_by_name['repeated_nested_message_extension'] = repeated_nested_message_extension
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
TestAllTypes = _reflection.GeneratedProtocolMessageType('TestAllTypes', (_message.Message,), {
'NestedMessage' : _reflection.GeneratedProtocolMessageType('NestedMessage', (_message.Message,), {
'DESCRIPTOR' : _TESTALLTYPES_NESTEDMESSAGE,
'__module__' : 'google.protobuf.pyext.python_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllTypes.NestedMessage)
})
,
'DESCRIPTOR' : _TESTALLTYPES,
'__module__' : 'google.protobuf.pyext.python_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllTypes)
})
_sym_db.RegisterMessage(TestAllTypes)
_sym_db.RegisterMessage(TestAllTypes.NestedMessage)
ForeignMessage = _reflection.GeneratedProtocolMessageType('ForeignMessage', (_message.Message,), {
'DESCRIPTOR' : _FOREIGNMESSAGE,
'__module__' : 'google.protobuf.pyext.python_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.ForeignMessage)
})
_sym_db.RegisterMessage(ForeignMessage)
TestAllExtensions = _reflection.GeneratedProtocolMessageType('TestAllExtensions', (_message.Message,), {
'DESCRIPTOR' : _TESTALLEXTENSIONS,
'__module__' : 'google.protobuf.pyext.python_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.python.internal.TestAllExtensions)
})
_sym_db.RegisterMessage(TestAllExtensions)
optional_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
TestAllExtensions.RegisterExtension(optional_nested_message_extension)
repeated_nested_message_extension.message_type = _TESTALLTYPES_NESTEDMESSAGE
TestAllExtensions.RegisterExtension(repeated_nested_message_extension)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,95 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# This code is meant to work on Python 2.4 and above only.
"""Contains a metaclass and helper functions used to create
protocol message classes from Descriptor objects at runtime.
Recall that a metaclass is the "type" of a class.
(A class is to a metaclass what an instance is to a class.)
In this case, we use the GeneratedProtocolMessageType metaclass
to inject all the useful functionality into the classes
output by the protocol compiler at compile-time.
The upshot of all this is that the real implementation
details for ALL pure-Python protocol buffers are *here in
this file*.
"""
__author__ = 'robinson@google.com (Will Robinson)'
from google.protobuf import message_factory
from google.protobuf import symbol_database
# The type of all Message classes.
# Part of the public interface, but normally only used by message factories.
GeneratedProtocolMessageType = message_factory._GENERATED_PROTOCOL_MESSAGE_TYPE
MESSAGE_CLASS_CACHE = {}
# Deprecated. Please NEVER use reflection.ParseMessage().
def ParseMessage(descriptor, byte_str):
"""Generate a new Message instance from this Descriptor and a byte string.
DEPRECATED: ParseMessage is deprecated because it is using MakeClass().
Please use MessageFactory.GetPrototype() instead.
Args:
descriptor: Protobuf Descriptor object
byte_str: Serialized protocol buffer byte string
Returns:
Newly created protobuf Message object.
"""
result_class = MakeClass(descriptor)
new_msg = result_class()
new_msg.ParseFromString(byte_str)
return new_msg
# Deprecated. Please NEVER use reflection.MakeClass().
def MakeClass(descriptor):
"""Construct a class object for a protobuf described by descriptor.
DEPRECATED: use MessageFactory.GetPrototype() instead.
Args:
descriptor: A descriptor.Descriptor object describing the protobuf.
Returns:
The Message class object described by the descriptor.
"""
# Original implementation leads to duplicate message classes, which won't play
# well with extensions. Message factory info is also missing.
# Redirect to message_factory.
return symbol_database.Default().GetPrototype(descriptor)

@ -0,0 +1,228 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""DEPRECATED: Declares the RPC service interfaces.
This module declares the abstract interfaces underlying proto2 RPC
services. These are intended to be independent of any particular RPC
implementation, so that proto2 services can be used on top of a variety
of implementations. Starting with version 2.3.0, RPC implementations should
not try to build on these, but should instead provide code generator plugins
which generate code specific to the particular RPC implementation. This way
the generated code can be more appropriate for the implementation in use
and can avoid unnecessary layers of indirection.
"""
__author__ = 'petar@google.com (Petar Petrov)'
class RpcException(Exception):
"""Exception raised on failed blocking RPC method call."""
pass
class Service(object):
"""Abstract base interface for protocol-buffer-based RPC services.
Services themselves are abstract classes (implemented either by servers or as
stubs), but they subclass this base interface. The methods of this
interface can be used to call the methods of the service without knowing
its exact type at compile time (analogous to the Message interface).
"""
def GetDescriptor():
"""Retrieves this service's descriptor."""
raise NotImplementedError
def CallMethod(self, method_descriptor, rpc_controller,
request, done):
"""Calls a method of the service specified by method_descriptor.
If "done" is None then the call is blocking and the response
message will be returned directly. Otherwise the call is asynchronous
and "done" will later be called with the response value.
In the blocking case, RpcException will be raised on error.
Preconditions:
* method_descriptor.service == GetDescriptor
* request is of the exact same classes as returned by
GetRequestClass(method).
* After the call has started, the request must not be modified.
* "rpc_controller" is of the correct type for the RPC implementation being
used by this Service. For stubs, the "correct type" depends on the
RpcChannel which the stub is using.
Postconditions:
* "done" will be called when the method is complete. This may be
before CallMethod() returns or it may be at some point in the future.
* If the RPC failed, the response value passed to "done" will be None.
Further details about the failure can be found by querying the
RpcController.
"""
raise NotImplementedError
def GetRequestClass(self, method_descriptor):
"""Returns the class of the request message for the specified method.
CallMethod() requires that the request is of a particular subclass of
Message. GetRequestClass() gets the default instance of this required
type.
Example:
method = service.GetDescriptor().FindMethodByName("Foo")
request = stub.GetRequestClass(method)()
request.ParseFromString(input)
service.CallMethod(method, request, callback)
"""
raise NotImplementedError
def GetResponseClass(self, method_descriptor):
"""Returns the class of the response message for the specified method.
This method isn't really needed, as the RpcChannel's CallMethod constructs
the response protocol message. It's provided anyway in case it is useful
for the caller to know the response type in advance.
"""
raise NotImplementedError
class RpcController(object):
"""An RpcController mediates a single method call.
The primary purpose of the controller is to provide a way to manipulate
settings specific to the RPC implementation and to find out about RPC-level
errors. The methods provided by the RpcController interface are intended
to be a "least common denominator" set of features which we expect all
implementations to support. Specific implementations may provide more
advanced features (e.g. deadline propagation).
"""
# Client-side methods below
def Reset(self):
"""Resets the RpcController to its initial state.
After the RpcController has been reset, it may be reused in
a new call. Must not be called while an RPC is in progress.
"""
raise NotImplementedError
def Failed(self):
"""Returns true if the call failed.
After a call has finished, returns true if the call failed. The possible
reasons for failure depend on the RPC implementation. Failed() must not
be called before a call has finished. If Failed() returns true, the
contents of the response message are undefined.
"""
raise NotImplementedError
def ErrorText(self):
"""If Failed is true, returns a human-readable description of the error."""
raise NotImplementedError
def StartCancel(self):
"""Initiate cancellation.
Advises the RPC system that the caller desires that the RPC call be
canceled. The RPC system may cancel it immediately, may wait awhile and
then cancel it, or may not even cancel the call at all. If the call is
canceled, the "done" callback will still be called and the RpcController
will indicate that the call failed at that time.
"""
raise NotImplementedError
# Server-side methods below
def SetFailed(self, reason):
"""Sets a failure reason.
Causes Failed() to return true on the client side. "reason" will be
incorporated into the message returned by ErrorText(). If you find
you need to return machine-readable information about failures, you
should incorporate it into your response protocol buffer and should
NOT call SetFailed().
"""
raise NotImplementedError
def IsCanceled(self):
"""Checks if the client cancelled the RPC.
If true, indicates that the client canceled the RPC, so the server may
as well give up on replying to it. The server should still call the
final "done" callback.
"""
raise NotImplementedError
def NotifyOnCancel(self, callback):
"""Sets a callback to invoke on cancel.
Asks that the given callback be called when the RPC is canceled. The
callback will always be called exactly once. If the RPC completes without
being canceled, the callback will be called after completion. If the RPC
has already been canceled when NotifyOnCancel() is called, the callback
will be called immediately.
NotifyOnCancel() must be called no more than once per request.
"""
raise NotImplementedError
class RpcChannel(object):
"""Abstract interface for an RPC channel.
An RpcChannel represents a communication line to a service which can be used
to call that service's methods. The service may be running on another
machine. Normally, you should not use an RpcChannel directly, but instead
construct a stub {@link Service} wrapping it. Example:
Example:
RpcChannel channel = rpcImpl.Channel("remotehost.example.com:1234")
RpcController controller = rpcImpl.Controller()
MyService service = MyService_Stub(channel)
service.MyMethod(controller, request, callback)
"""
def CallMethod(self, method_descriptor, rpc_controller,
request, response_class, done):
"""Calls the method identified by the descriptor.
Call the given method of the remote service. The signature of this
procedure looks the same as Service.CallMethod(), but the requirements
are less strict in one important way: the request object doesn't have to
be of any specific class as long as its descriptor is method.input_type.
"""
raise NotImplementedError

@ -0,0 +1,300 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Contains metaclasses used to create protocol service and service stub
classes from ServiceDescriptor objects at runtime.
The GeneratedServiceType and GeneratedServiceStubType metaclasses are used to
inject all useful functionality into the classes output by the protocol
compiler at compile-time.
"""
__author__ = 'petar@google.com (Petar Petrov)'
from google.protobuf.internal import api_implementation
if api_implementation.Type() == 'cpp':
# pylint: disable=g-import-not-at-top
from google.protobuf.pyext import _message
class GeneratedServiceType(type):
"""Metaclass for service classes created at runtime from ServiceDescriptors.
Implementations for all methods described in the Service class are added here
by this class. We also create properties to allow getting/setting all fields
in the protocol message.
The protocol compiler currently uses this metaclass to create protocol service
classes at runtime. Clients can also manually create their own classes at
runtime, as in this example::
mydescriptor = ServiceDescriptor(.....)
class MyProtoService(service.Service):
__metaclass__ = GeneratedServiceType
DESCRIPTOR = mydescriptor
myservice_instance = MyProtoService()
# ...
"""
_DESCRIPTOR_KEY = 'DESCRIPTOR'
def __init__(cls, name, bases, dictionary):
"""Creates a message service class.
Args:
name: Name of the class (ignored, but required by the metaclass
protocol).
bases: Base classes of the class being constructed.
dictionary: The class dictionary of the class being constructed.
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
describing this protocol service type.
"""
# Don't do anything if this class doesn't have a descriptor. This happens
# when a service class is subclassed.
if GeneratedServiceType._DESCRIPTOR_KEY not in dictionary:
return
descriptor = dictionary[GeneratedServiceType._DESCRIPTOR_KEY]
if isinstance(descriptor, str):
descriptor = _message.default_pool.FindServiceByName(descriptor)
dictionary[GeneratedServiceType._DESCRIPTOR_KEY] = descriptor
service_builder = _ServiceBuilder(descriptor)
service_builder.BuildService(cls)
cls.DESCRIPTOR = descriptor
class GeneratedServiceStubType(GeneratedServiceType):
"""Metaclass for service stubs created at runtime from ServiceDescriptors.
This class has similar responsibilities as GeneratedServiceType, except that
it creates the service stub classes.
"""
_DESCRIPTOR_KEY = 'DESCRIPTOR'
def __init__(cls, name, bases, dictionary):
"""Creates a message service stub class.
Args:
name: Name of the class (ignored, here).
bases: Base classes of the class being constructed.
dictionary: The class dictionary of the class being constructed.
dictionary[_DESCRIPTOR_KEY] must contain a ServiceDescriptor object
describing this protocol service type.
"""
descriptor = dictionary.get(cls._DESCRIPTOR_KEY)
if isinstance(descriptor, str):
descriptor = _message.default_pool.FindServiceByName(descriptor)
dictionary[GeneratedServiceStubType._DESCRIPTOR_KEY] = descriptor
super(GeneratedServiceStubType, cls).__init__(name, bases, dictionary)
# Don't do anything if this class doesn't have a descriptor. This happens
# when a service stub is subclassed.
if GeneratedServiceStubType._DESCRIPTOR_KEY not in dictionary:
return
service_stub_builder = _ServiceStubBuilder(descriptor)
service_stub_builder.BuildServiceStub(cls)
class _ServiceBuilder(object):
"""This class constructs a protocol service class using a service descriptor.
Given a service descriptor, this class constructs a class that represents
the specified service descriptor. One service builder instance constructs
exactly one service class. That means all instances of that class share the
same builder.
"""
def __init__(self, service_descriptor):
"""Initializes an instance of the service class builder.
Args:
service_descriptor: ServiceDescriptor to use when constructing the
service class.
"""
self.descriptor = service_descriptor
def BuildService(self, cls):
"""Constructs the service class.
Args:
cls: The class that will be constructed.
"""
# CallMethod needs to operate with an instance of the Service class. This
# internal wrapper function exists only to be able to pass the service
# instance to the method that does the real CallMethod work.
def _WrapCallMethod(srvc, method_descriptor,
rpc_controller, request, callback):
return self._CallMethod(srvc, method_descriptor,
rpc_controller, request, callback)
self.cls = cls
cls.CallMethod = _WrapCallMethod
cls.GetDescriptor = staticmethod(lambda: self.descriptor)
cls.GetDescriptor.__doc__ = "Returns the service descriptor."
cls.GetRequestClass = self._GetRequestClass
cls.GetResponseClass = self._GetResponseClass
for method in self.descriptor.methods:
setattr(cls, method.name, self._GenerateNonImplementedMethod(method))
def _CallMethod(self, srvc, method_descriptor,
rpc_controller, request, callback):
"""Calls the method described by a given method descriptor.
Args:
srvc: Instance of the service for which this method is called.
method_descriptor: Descriptor that represent the method to call.
rpc_controller: RPC controller to use for this method's execution.
request: Request protocol message.
callback: A callback to invoke after the method has completed.
"""
if method_descriptor.containing_service != self.descriptor:
raise RuntimeError(
'CallMethod() given method descriptor for wrong service type.')
method = getattr(srvc, method_descriptor.name)
return method(rpc_controller, request, callback)
def _GetRequestClass(self, method_descriptor):
"""Returns the class of the request protocol message.
Args:
method_descriptor: Descriptor of the method for which to return the
request protocol message class.
Returns:
A class that represents the input protocol message of the specified
method.
"""
if method_descriptor.containing_service != self.descriptor:
raise RuntimeError(
'GetRequestClass() given method descriptor for wrong service type.')
return method_descriptor.input_type._concrete_class
def _GetResponseClass(self, method_descriptor):
"""Returns the class of the response protocol message.
Args:
method_descriptor: Descriptor of the method for which to return the
response protocol message class.
Returns:
A class that represents the output protocol message of the specified
method.
"""
if method_descriptor.containing_service != self.descriptor:
raise RuntimeError(
'GetResponseClass() given method descriptor for wrong service type.')
return method_descriptor.output_type._concrete_class
def _GenerateNonImplementedMethod(self, method):
"""Generates and returns a method that can be set for a service methods.
Args:
method: Descriptor of the service method for which a method is to be
generated.
Returns:
A method that can be added to the service class.
"""
return lambda inst, rpc_controller, request, callback: (
self._NonImplementedMethod(method.name, rpc_controller, callback))
def _NonImplementedMethod(self, method_name, rpc_controller, callback):
"""The body of all methods in the generated service class.
Args:
method_name: Name of the method being executed.
rpc_controller: RPC controller used to execute this method.
callback: A callback which will be invoked when the method finishes.
"""
rpc_controller.SetFailed('Method %s not implemented.' % method_name)
callback(None)
class _ServiceStubBuilder(object):
"""Constructs a protocol service stub class using a service descriptor.
Given a service descriptor, this class constructs a suitable stub class.
A stub is just a type-safe wrapper around an RpcChannel which emulates a
local implementation of the service.
One service stub builder instance constructs exactly one class. It means all
instances of that class share the same service stub builder.
"""
def __init__(self, service_descriptor):
"""Initializes an instance of the service stub class builder.
Args:
service_descriptor: ServiceDescriptor to use when constructing the
stub class.
"""
self.descriptor = service_descriptor
def BuildServiceStub(self, cls):
"""Constructs the stub class.
Args:
cls: The class that will be constructed.
"""
def _ServiceStubInit(stub, rpc_channel):
stub.rpc_channel = rpc_channel
self.cls = cls
cls.__init__ = _ServiceStubInit
for method in self.descriptor.methods:
setattr(cls, method.name, self._GenerateStubMethod(method))
def _GenerateStubMethod(self, method):
return (lambda inst, rpc_controller, request, callback=None:
self._StubMethod(inst, method, rpc_controller, request, callback))
def _StubMethod(self, stub, method_descriptor,
rpc_controller, request, callback):
"""The body of all service methods in the generated stub class.
Args:
stub: Stub instance.
method_descriptor: Descriptor of the invoked method.
rpc_controller: Rpc controller to execute the method.
request: Request protocol message.
callback: A callback to execute when the method finishes.
Returns:
Response message (in case of blocking call).
"""
return stub.rpc_channel.CallMethod(
method_descriptor, rpc_controller, request,
method_descriptor.output_type._concrete_class, callback)

@ -0,0 +1,71 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/source_context.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/source_context.proto',
package='google.protobuf',
syntax='proto3',
serialized_options=b'\n\023com.google.protobufB\022SourceContextProtoP\001ZAgoogle.golang.org/genproto/protobuf/source_context;source_context\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n$google/protobuf/source_context.proto\x12\x0fgoogle.protobuf\"\"\n\rSourceContext\x12\x11\n\tfile_name\x18\x01 \x01(\tB\x95\x01\n\x13\x63om.google.protobufB\x12SourceContextProtoP\x01ZAgoogle.golang.org/genproto/protobuf/source_context;source_context\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3'
)
_SOURCECONTEXT = _descriptor.Descriptor(
name='SourceContext',
full_name='google.protobuf.SourceContext',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='file_name', full_name='google.protobuf.SourceContext.file_name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=57,
serialized_end=91,
)
DESCRIPTOR.message_types_by_name['SourceContext'] = _SOURCECONTEXT
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
SourceContext = _reflection.GeneratedProtocolMessageType('SourceContext', (_message.Message,), {
'DESCRIPTOR' : _SOURCECONTEXT,
'__module__' : 'google.protobuf.source_context_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.SourceContext)
})
_sym_db.RegisterMessage(SourceContext)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,287 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: google/protobuf/struct.proto
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='google/protobuf/struct.proto',
package='google.protobuf',
syntax='proto3',
serialized_options=b'\n\023com.google.protobufB\013StructProtoP\001Z1github.com/golang/protobuf/ptypes/struct;structpb\370\001\001\242\002\003GPB\252\002\036Google.Protobuf.WellKnownTypes',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x1cgoogle/protobuf/struct.proto\x12\x0fgoogle.protobuf\"\x84\x01\n\x06Struct\x12\x33\n\x06\x66ields\x18\x01 \x03(\x0b\x32#.google.protobuf.Struct.FieldsEntry\x1a\x45\n\x0b\x46ieldsEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12%\n\x05value\x18\x02 \x01(\x0b\x32\x16.google.protobuf.Value:\x02\x38\x01\"\xea\x01\n\x05Value\x12\x30\n\nnull_value\x18\x01 \x01(\x0e\x32\x1a.google.protobuf.NullValueH\x00\x12\x16\n\x0cnumber_value\x18\x02 \x01(\x01H\x00\x12\x16\n\x0cstring_value\x18\x03 \x01(\tH\x00\x12\x14\n\nbool_value\x18\x04 \x01(\x08H\x00\x12/\n\x0cstruct_value\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructH\x00\x12\x30\n\nlist_value\x18\x06 \x01(\x0b\x32\x1a.google.protobuf.ListValueH\x00\x42\x06\n\x04kind\"3\n\tListValue\x12&\n\x06values\x18\x01 \x03(\x0b\x32\x16.google.protobuf.Value*\x1b\n\tNullValue\x12\x0e\n\nNULL_VALUE\x10\x00\x42\x81\x01\n\x13\x63om.google.protobufB\x0bStructProtoP\x01Z1github.com/golang/protobuf/ptypes/struct;structpb\xf8\x01\x01\xa2\x02\x03GPB\xaa\x02\x1eGoogle.Protobuf.WellKnownTypesb\x06proto3'
)
_NULLVALUE = _descriptor.EnumDescriptor(
name='NullValue',
full_name='google.protobuf.NullValue',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='NULL_VALUE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=474,
serialized_end=501,
)
_sym_db.RegisterEnumDescriptor(_NULLVALUE)
NullValue = enum_type_wrapper.EnumTypeWrapper(_NULLVALUE)
NULL_VALUE = 0
_STRUCT_FIELDSENTRY = _descriptor.Descriptor(
name='FieldsEntry',
full_name='google.protobuf.Struct.FieldsEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='google.protobuf.Struct.FieldsEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='google.protobuf.Struct.FieldsEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=113,
serialized_end=182,
)
_STRUCT = _descriptor.Descriptor(
name='Struct',
full_name='google.protobuf.Struct',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='fields', full_name='google.protobuf.Struct.fields', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_STRUCT_FIELDSENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=50,
serialized_end=182,
)
_VALUE = _descriptor.Descriptor(
name='Value',
full_name='google.protobuf.Value',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='null_value', full_name='google.protobuf.Value.null_value', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='number_value', full_name='google.protobuf.Value.number_value', index=1,
number=2, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='string_value', full_name='google.protobuf.Value.string_value', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='bool_value', full_name='google.protobuf.Value.bool_value', index=3,
number=4, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='struct_value', full_name='google.protobuf.Value.struct_value', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='list_value', full_name='google.protobuf.Value.list_value', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='kind', full_name='google.protobuf.Value.kind',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=185,
serialized_end=419,
)
_LISTVALUE = _descriptor.Descriptor(
name='ListValue',
full_name='google.protobuf.ListValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='values', full_name='google.protobuf.ListValue.values', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=421,
serialized_end=472,
)
_STRUCT_FIELDSENTRY.fields_by_name['value'].message_type = _VALUE
_STRUCT_FIELDSENTRY.containing_type = _STRUCT
_STRUCT.fields_by_name['fields'].message_type = _STRUCT_FIELDSENTRY
_VALUE.fields_by_name['null_value'].enum_type = _NULLVALUE
_VALUE.fields_by_name['struct_value'].message_type = _STRUCT
_VALUE.fields_by_name['list_value'].message_type = _LISTVALUE
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['null_value'])
_VALUE.fields_by_name['null_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['number_value'])
_VALUE.fields_by_name['number_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['string_value'])
_VALUE.fields_by_name['string_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['bool_value'])
_VALUE.fields_by_name['bool_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['struct_value'])
_VALUE.fields_by_name['struct_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_VALUE.oneofs_by_name['kind'].fields.append(
_VALUE.fields_by_name['list_value'])
_VALUE.fields_by_name['list_value'].containing_oneof = _VALUE.oneofs_by_name['kind']
_LISTVALUE.fields_by_name['values'].message_type = _VALUE
DESCRIPTOR.message_types_by_name['Struct'] = _STRUCT
DESCRIPTOR.message_types_by_name['Value'] = _VALUE
DESCRIPTOR.message_types_by_name['ListValue'] = _LISTVALUE
DESCRIPTOR.enum_types_by_name['NullValue'] = _NULLVALUE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Struct = _reflection.GeneratedProtocolMessageType('Struct', (_message.Message,), {
'FieldsEntry' : _reflection.GeneratedProtocolMessageType('FieldsEntry', (_message.Message,), {
'DESCRIPTOR' : _STRUCT_FIELDSENTRY,
'__module__' : 'google.protobuf.struct_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Struct.FieldsEntry)
})
,
'DESCRIPTOR' : _STRUCT,
'__module__' : 'google.protobuf.struct_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Struct)
})
_sym_db.RegisterMessage(Struct)
_sym_db.RegisterMessage(Struct.FieldsEntry)
Value = _reflection.GeneratedProtocolMessageType('Value', (_message.Message,), {
'DESCRIPTOR' : _VALUE,
'__module__' : 'google.protobuf.struct_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.Value)
})
_sym_db.RegisterMessage(Value)
ListValue = _reflection.GeneratedProtocolMessageType('ListValue', (_message.Message,), {
'DESCRIPTOR' : _LISTVALUE,
'__module__' : 'google.protobuf.struct_pb2'
# @@protoc_insertion_point(class_scope:google.protobuf.ListValue)
})
_sym_db.RegisterMessage(ListValue)
DESCRIPTOR._options = None
_STRUCT_FIELDSENTRY._options = None
# @@protoc_insertion_point(module_scope)

@ -0,0 +1,194 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""A database of Python protocol buffer generated symbols.
SymbolDatabase is the MessageFactory for messages generated at compile time,
and makes it easy to create new instances of a registered type, given only the
type's protocol buffer symbol name.
Example usage::
db = symbol_database.SymbolDatabase()
# Register symbols of interest, from one or multiple files.
db.RegisterFileDescriptor(my_proto_pb2.DESCRIPTOR)
db.RegisterMessage(my_proto_pb2.MyMessage)
db.RegisterEnumDescriptor(my_proto_pb2.MyEnum.DESCRIPTOR)
# The database can be used as a MessageFactory, to generate types based on
# their name:
types = db.GetMessages(['my_proto.proto'])
my_message_instance = types['MyMessage']()
# The database's underlying descriptor pool can be queried, so it's not
# necessary to know a type's filename to be able to generate it:
filename = db.pool.FindFileContainingSymbol('MyMessage')
my_message_instance = db.GetMessages([filename])['MyMessage']()
# This functionality is also provided directly via a convenience method:
my_message_instance = db.GetSymbol('MyMessage')()
"""
from google.protobuf.internal import api_implementation
from google.protobuf import descriptor_pool
from google.protobuf import message_factory
class SymbolDatabase(message_factory.MessageFactory):
"""A database of Python generated symbols."""
def RegisterMessage(self, message):
"""Registers the given message type in the local database.
Calls to GetSymbol() and GetMessages() will return messages registered here.
Args:
message: A :class:`google.protobuf.message.Message` subclass (or
instance); its descriptor will be registered.
Returns:
The provided message.
"""
desc = message.DESCRIPTOR
self._classes[desc] = message
self.RegisterMessageDescriptor(desc)
return message
def RegisterMessageDescriptor(self, message_descriptor):
"""Registers the given message descriptor in the local database.
Args:
message_descriptor (Descriptor): the message descriptor to add.
"""
if api_implementation.Type() == 'python':
# pylint: disable=protected-access
self.pool._AddDescriptor(message_descriptor)
def RegisterEnumDescriptor(self, enum_descriptor):
"""Registers the given enum descriptor in the local database.
Args:
enum_descriptor (EnumDescriptor): The enum descriptor to register.
Returns:
EnumDescriptor: The provided descriptor.
"""
if api_implementation.Type() == 'python':
# pylint: disable=protected-access
self.pool._AddEnumDescriptor(enum_descriptor)
return enum_descriptor
def RegisterServiceDescriptor(self, service_descriptor):
"""Registers the given service descriptor in the local database.
Args:
service_descriptor (ServiceDescriptor): the service descriptor to
register.
"""
if api_implementation.Type() == 'python':
# pylint: disable=protected-access
self.pool._AddServiceDescriptor(service_descriptor)
def RegisterFileDescriptor(self, file_descriptor):
"""Registers the given file descriptor in the local database.
Args:
file_descriptor (FileDescriptor): The file descriptor to register.
"""
if api_implementation.Type() == 'python':
# pylint: disable=protected-access
self.pool._InternalAddFileDescriptor(file_descriptor)
def GetSymbol(self, symbol):
"""Tries to find a symbol in the local database.
Currently, this method only returns message.Message instances, however, if
may be extended in future to support other symbol types.
Args:
symbol (str): a protocol buffer symbol.
Returns:
A Python class corresponding to the symbol.
Raises:
KeyError: if the symbol could not be found.
"""
return self._classes[self.pool.FindMessageTypeByName(symbol)]
def GetMessages(self, files):
# TODO(amauryfa): Fix the differences with MessageFactory.
"""Gets all registered messages from a specified file.
Only messages already created and registered will be returned; (this is the
case for imported _pb2 modules)
But unlike MessageFactory, this version also returns already defined nested
messages, but does not register any message extensions.
Args:
files (list[str]): The file names to extract messages from.
Returns:
A dictionary mapping proto names to the message classes.
Raises:
KeyError: if a file could not be found.
"""
def _GetAllMessages(desc):
"""Walk a message Descriptor and recursively yields all message names."""
yield desc
for msg_desc in desc.nested_types:
for nested_desc in _GetAllMessages(msg_desc):
yield nested_desc
result = {}
for file_name in files:
file_desc = self.pool.FindFileByName(file_name)
for msg_desc in file_desc.message_types_by_name.values():
for desc in _GetAllMessages(msg_desc):
try:
result[desc.full_name] = self._classes[desc]
except KeyError:
# This descriptor has no registered class, skip it.
pass
return result
_DEFAULT = SymbolDatabase(pool=descriptor_pool.Default())
def Default():
"""Returns the default SymbolDatabase."""
return _DEFAULT

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

@ -0,0 +1,117 @@
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Encoding related utilities."""
import re
import six
_cescape_chr_to_symbol_map = {}
_cescape_chr_to_symbol_map[9] = r'\t' # optional escape
_cescape_chr_to_symbol_map[10] = r'\n' # optional escape
_cescape_chr_to_symbol_map[13] = r'\r' # optional escape
_cescape_chr_to_symbol_map[34] = r'\"' # necessary escape
_cescape_chr_to_symbol_map[39] = r"\'" # optional escape
_cescape_chr_to_symbol_map[92] = r'\\' # necessary escape
# Lookup table for unicode
_cescape_unicode_to_str = [chr(i) for i in range(0, 256)]
for byte, string in _cescape_chr_to_symbol_map.items():
_cescape_unicode_to_str[byte] = string
# Lookup table for non-utf8, with necessary escapes at (o >= 127 or o < 32)
_cescape_byte_to_str = ([r'\%03o' % i for i in range(0, 32)] +
[chr(i) for i in range(32, 127)] +
[r'\%03o' % i for i in range(127, 256)])
for byte, string in _cescape_chr_to_symbol_map.items():
_cescape_byte_to_str[byte] = string
del byte, string
def CEscape(text, as_utf8):
# type: (...) -> str
"""Escape a bytes string for use in an text protocol buffer.
Args:
text: A byte string to be escaped.
as_utf8: Specifies if result may contain non-ASCII characters.
In Python 3 this allows unescaped non-ASCII Unicode characters.
In Python 2 the return value will be valid UTF-8 rather than only ASCII.
Returns:
Escaped string (str).
"""
# Python's text.encode() 'string_escape' or 'unicode_escape' codecs do not
# satisfy our needs; they encodes unprintable characters using two-digit hex
# escapes whereas our C++ unescaping function allows hex escapes to be any
# length. So, "\0011".encode('string_escape') ends up being "\\x011", which
# will be decoded in C++ as a single-character string with char code 0x11.
if six.PY3:
text_is_unicode = isinstance(text, str)
if as_utf8 and text_is_unicode:
# We're already unicode, no processing beyond control char escapes.
return text.translate(_cescape_chr_to_symbol_map)
ord_ = ord if text_is_unicode else lambda x: x # bytes iterate as ints.
else:
ord_ = ord # PY2
if as_utf8:
return ''.join(_cescape_unicode_to_str[ord_(c)] for c in text)
return ''.join(_cescape_byte_to_str[ord_(c)] for c in text)
_CUNESCAPE_HEX = re.compile(r'(\\+)x([0-9a-fA-F])(?![0-9a-fA-F])')
def CUnescape(text):
# type: (str) -> bytes
"""Unescape a text string with C-style escape sequences to UTF-8 bytes.
Args:
text: The data to parse in a str.
Returns:
A byte string.
"""
def ReplaceHex(m):
# Only replace the match if the number of leading back slashes is odd. i.e.
# the slash itself is not escaped.
if len(m.group(1)) & 1:
return m.group(1) + 'x0' + m.group(2)
return m.group(0)
# This is required because the 'string_escape' encoding doesn't
# allow single-digit hex escapes (like '\xf').
result = _CUNESCAPE_HEX.sub(ReplaceHex, text)
if six.PY2:
return result.decode('string_escape')
return (result.encode('utf-8') # PY3: Make it bytes to allow decode.
.decode('unicode_escape')
# Make it bytes again to return the proper type.
.encode('raw_unicode_escape'))

File diff suppressed because it is too large Load Diff

Some files were not shown because too many files have changed in this diff Show More

Loading…
Cancel
Save