From 334961054d875641d150eec4d6938f6f824ea655 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 2 Dec 2014 14:42:00 -0500 Subject: [PATCH 1/6] Add initializer for top-level '_gcloud_vendor' package. --- _gcloud_vendor/__init__.py | 8 ++++++++ 1 file changed, 8 insertions(+) create mode 100644 _gcloud_vendor/__init__.py diff --git a/_gcloud_vendor/__init__.py b/_gcloud_vendor/__init__.py new file mode 100644 index 000000000000..9ee34b0c867b --- /dev/null +++ b/_gcloud_vendor/__init__.py @@ -0,0 +1,8 @@ +"""Dependencies "vendored in", due to dependencies, Python versions, etc. + +Current set +----------- + +``apitools`` (pending release to PyPI, plus acceptable Python version + support for its dependencies). Review before M2. +""" From 83affe2a60ab95800ff305b3563356e0320ca740 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 2 Dec 2014 14:37:55 -0500 Subject: [PATCH 2/6] Don't run pep8/pylint on vendored-in sources. --- run_pylint.py | 6 ++++++ tox.ini | 2 +- 2 files changed, 7 insertions(+), 1 deletion(-) diff --git a/run_pylint.py b/run_pylint.py index fa3c7f7cd1a8..a6b6684d0594 100644 --- a/run_pylint.py +++ b/run_pylint.py @@ -14,6 +14,9 @@ import sys +IGNORED_DIRECTORIES = [ + '_gcloud_vendor/', +] IGNORED_FILES = [ 'gcloud/datastore/datastore_v1_pb2.py', 'docs/conf.py', @@ -73,6 +76,9 @@ def make_test_rc(base_rc_filename, additions_dict, target_filename): def valid_filename(filename): """Checks if a file is a Python file and is not ignored.""" + for directory in IGNORED_DIRECTORIES: + if filename.startswith(directory): + return False return (filename.endswith('.py') and filename not in IGNORED_FILES) diff --git a/tox.ini b/tox.ini index 9e6c1c65421d..437b711b4626 100644 --- a/tox.ini +++ b/tox.ini @@ -49,7 +49,7 @@ deps = Sphinx [pep8] -exclude = gcloud/datastore/datastore_v1_pb2.py,docs/conf.py,*.egg/,.*/ +exclude = gcloud/datastore/datastore_v1_pb2.py,docs/conf.py,*.egg/,.*/,_gcloud_vendor/ verbose = 1 [testenv:lint] From 4c27079cf6d7f9814b36cfd16f3402455f768094 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 2 Dec 2014 14:32:17 -0500 Subject: [PATCH 3/6] Vendor in 'apitools/base/py' and parents. Pulled from e5a5c36e24926310712d20b93b4cdd02424a81f5. --- _gcloud_vendor/apitools/__init__.py | 5 + _gcloud_vendor/apitools/base/__init__.py | 5 + _gcloud_vendor/apitools/base/py/__init__.py | 15 + _gcloud_vendor/apitools/base/py/app2.py | 347 +++++++++ _gcloud_vendor/apitools/base/py/base_api.py | 583 ++++++++++++++ .../apitools/base/py/base_api_test.py | 113 +++ _gcloud_vendor/apitools/base/py/base_cli.py | 151 ++++ _gcloud_vendor/apitools/base/py/batch.py | 441 +++++++++++ _gcloud_vendor/apitools/base/py/cli.py | 13 + .../apitools/base/py/credentials_lib.py | 221 ++++++ .../apitools/base/py/credentials_lib_test.py | 54 ++ _gcloud_vendor/apitools/base/py/encoding.py | 486 ++++++++++++ .../apitools/base/py/encoding_test.py | 269 +++++++ _gcloud_vendor/apitools/base/py/exceptions.py | 100 +++ .../apitools/base/py/extra_types.py | 283 +++++++ .../apitools/base/py/extra_types_test.py | 175 +++++ .../apitools/base/py/http_wrapper.py | 182 +++++ _gcloud_vendor/apitools/base/py/list_pager.py | 49 ++ _gcloud_vendor/apitools/base/py/transfer.py | 716 ++++++++++++++++++ _gcloud_vendor/apitools/base/py/util.py | 165 ++++ 20 files changed, 4373 insertions(+) create mode 100644 _gcloud_vendor/apitools/__init__.py create mode 100644 _gcloud_vendor/apitools/base/__init__.py create mode 100644 _gcloud_vendor/apitools/base/py/__init__.py create mode 100644 _gcloud_vendor/apitools/base/py/app2.py create mode 100644 _gcloud_vendor/apitools/base/py/base_api.py create mode 100644 _gcloud_vendor/apitools/base/py/base_api_test.py create mode 100644 _gcloud_vendor/apitools/base/py/base_cli.py create mode 100644 _gcloud_vendor/apitools/base/py/batch.py create mode 100644 _gcloud_vendor/apitools/base/py/cli.py create mode 100644 _gcloud_vendor/apitools/base/py/credentials_lib.py create mode 100644 _gcloud_vendor/apitools/base/py/credentials_lib_test.py create mode 100644 _gcloud_vendor/apitools/base/py/encoding.py create mode 100644 _gcloud_vendor/apitools/base/py/encoding_test.py create mode 100644 _gcloud_vendor/apitools/base/py/exceptions.py create mode 100644 _gcloud_vendor/apitools/base/py/extra_types.py create mode 100644 _gcloud_vendor/apitools/base/py/extra_types_test.py create mode 100644 _gcloud_vendor/apitools/base/py/http_wrapper.py create mode 100644 _gcloud_vendor/apitools/base/py/list_pager.py create mode 100644 _gcloud_vendor/apitools/base/py/transfer.py create mode 100644 _gcloud_vendor/apitools/base/py/util.py diff --git a/_gcloud_vendor/apitools/__init__.py b/_gcloud_vendor/apitools/__init__.py new file mode 100644 index 000000000000..54fa3d53924e --- /dev/null +++ b/_gcloud_vendor/apitools/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +"""Shared __init__.py for apitools.""" + +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/_gcloud_vendor/apitools/base/__init__.py b/_gcloud_vendor/apitools/base/__init__.py new file mode 100644 index 000000000000..54fa3d53924e --- /dev/null +++ b/_gcloud_vendor/apitools/base/__init__.py @@ -0,0 +1,5 @@ +#!/usr/bin/env python +"""Shared __init__.py for apitools.""" + +from pkgutil import extend_path +__path__ = extend_path(__path__, __name__) diff --git a/_gcloud_vendor/apitools/base/py/__init__.py b/_gcloud_vendor/apitools/base/py/__init__.py new file mode 100644 index 000000000000..cbf7f86f3485 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/__init__.py @@ -0,0 +1,15 @@ +#!/usr/bin/env python +"""Top-level imports for apitools base files.""" + +# pylint:disable=wildcard-import +from apitools.base.py.base_api import * +from apitools.base.py.batch import * +from apitools.base.py.credentials_lib import * +from apitools.base.py.encoding import * +from apitools.base.py.exceptions import * +from apitools.base.py.extra_types import * +from apitools.base.py.http_wrapper import * +from apitools.base.py.list_pager import * +from apitools.base.py.transfer import * +from apitools.base.py.util import * + diff --git a/_gcloud_vendor/apitools/base/py/app2.py b/_gcloud_vendor/apitools/base/py/app2.py new file mode 100644 index 000000000000..2a90d5525649 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/app2.py @@ -0,0 +1,347 @@ +#!/usr/bin/env python +"""Appcommands-compatible command class with extra fixins.""" + +import cmd +import inspect +import pdb +import shlex +import sys +import traceback +import types + +from google.apputils import app +from google.apputils import appcommands +import gflags as flags + +__all__ = [ + 'NewCmd', + 'Repl', +] + +flags.DEFINE_boolean( + 'debug_mode', False, + 'Show tracebacks on Python exceptions.') +flags.DEFINE_boolean( + 'headless', False, + 'Assume no user is at the controlling console.') +FLAGS = flags.FLAGS + + +def _SafeMakeAscii(s): + if isinstance(s, unicode): + return s.encode('ascii') + elif isinstance(s, str): + return s.decode('ascii') + else: + return unicode(s).encode('ascii', 'backslashreplace') + + +class NewCmd(appcommands.Cmd): + """Featureful extension of appcommands.Cmd.""" + + def __init__(self, name, flag_values): + super(NewCmd, self).__init__(name, flag_values) + run_with_args = getattr(self, 'RunWithArgs', None) + self._new_style = isinstance(run_with_args, types.MethodType) + if self._new_style: + func = run_with_args.im_func + + argspec = inspect.getargspec(func) + if argspec.args and argspec.args[0] == 'self': + argspec = argspec._replace( # pylint: disable=protected-access + args=argspec.args[1:]) + self._argspec = argspec + # TODO(craigcitro): Do we really want to support all this + # nonsense? + self._star_args = self._argspec.varargs is not None + self._star_kwds = self._argspec.keywords is not None + self._max_args = len(self._argspec.args or ()) + self._min_args = self._max_args - len(self._argspec.defaults or ()) + if self._star_args: + self._max_args = sys.maxint + + self._debug_mode = FLAGS.debug_mode + self.surface_in_shell = True + self.__doc__ = self.RunWithArgs.__doc__ + + def __getattr__(self, name): + if name in self._command_flags: + return self._command_flags[name].value + return super(NewCmd, self).__getattribute__(name) + + def _GetFlag(self, flagname): + if flagname in self._command_flags: + return self._command_flags[flagname] + else: + return None + + def Run(self, argv): + """Run this command. + + If self is a new-style command, we set up arguments and call + self.RunWithArgs, gracefully handling exceptions. If not, we + simply call self.Run(argv). + + Args: + argv: List of arguments as strings. + + Returns: + 0 on success, nonzero on failure. + """ + if not self._new_style: + return super(NewCmd, self).Run(argv) + + # TODO(craigcitro): We need to save and restore flags each time so + # that we can per-command flags in the REPL. + args = argv[1:] + fail = None + if len(args) < self._min_args: + fail = 'Not enough positional args; found %d, expected at least %d' % ( + len(args), self._min_args) + if len(args) > self._max_args: + fail = 'Too many positional args; found %d, expected at most %d' % ( + len(args), self._max_args) + if fail: + print fail + if self.usage: + print 'Usage: %s' % (self.usage,) + return 1 + + if self._debug_mode: + return self.RunDebug(args, {}) + else: + return self.RunSafely(args, {}) + + def RunCmdLoop(self, argv): + """Hook for use in cmd.Cmd-based command shells.""" + try: + args = shlex.split(argv) + except ValueError as e: + raise SyntaxError(self.EncodeForPrinting(e)) + return self.Run([self._command_name] + args) + + @staticmethod + def EncodeForPrinting(s): + """Safely encode a string as the encoding for sys.stdout.""" + encoding = sys.stdout.encoding or 'ascii' + return unicode(s).encode(encoding, 'backslashreplace') + + def _FormatError(self, e): + """Hook for subclasses to modify how error messages are printed.""" + return _SafeMakeAscii(e) + + def _HandleError(self, e): + message = self._FormatError(e) + print 'Exception raised in %s operation: %s' % (self._command_name, message) + return 1 + + def _IsDebuggableException(self, e): + """Hook for subclasses to skip debugging on certain exceptions.""" + return not isinstance(e, app.UsageError) + + def RunDebug(self, args, kwds): + """Run this command in debug mode.""" + try: + return_value = self.RunWithArgs(*args, **kwds) + except BaseException, e: + # Don't break into the debugger for expected exceptions. + if not self._IsDebuggableException(e): + return self._HandleError(e) + print + print '****************************************************' + print '** Unexpected Exception raised in execution! **' + if FLAGS.headless: + print '** --headless mode enabled, exiting. **' + print '** See STDERR for traceback. **' + else: + print '** --debug_mode enabled, starting pdb. **' + print '****************************************************' + print + traceback.print_exc() + print + if not FLAGS.headless: + pdb.post_mortem() + return 1 + return return_value + + def RunSafely(self, args, kwds): + """Run this command, turning exceptions into print statements.""" + try: + return_value = self.RunWithArgs(*args, **kwds) + except BaseException, e: + return self._HandleError(e) + return return_value + + +class CommandLoop(cmd.Cmd): + """Instance of cmd.Cmd built to work with NewCmd.""" + + class TerminateSignal(Exception): + """Exception type used for signaling loop completion.""" + + def __init__(self, commands, prompt): + cmd.Cmd.__init__(self) + self._commands = {'help': commands['help']} + self._special_command_names = ['help', 'repl', 'EOF'] + for name, command in commands.iteritems(): + if (name not in self._special_command_names and + isinstance(command, NewCmd) and + command.surface_in_shell): + self._commands[name] = command + setattr(self, 'do_%s' % (name,), command.RunCmdLoop) + self._default_prompt = prompt + self._set_prompt() + self._last_return_code = 0 + + @property + def last_return_code(self): + return self._last_return_code + + def _set_prompt(self): + self.prompt = self._default_prompt + + def do_EOF(self, *unused_args): + """Terminate the running command loop. + + This function raises an exception to avoid the need to do + potentially-error-prone string parsing inside onecmd. + + Args: + *unused_args: unused. + + Returns: + Never returns. + + Raises: + CommandLoop.TerminateSignal: always. + """ + raise CommandLoop.TerminateSignal() + + def postloop(self): + print 'Goodbye.' + + def completedefault(self, unused_text, line, unused_begidx, unused_endidx): + if not line: + return [] + else: + command_name = line.partition(' ')[0].lower() + usage = '' + if command_name in self._commands: + usage = self._commands[command_name].usage + if usage: + print + print usage + print '%s%s' % (self.prompt, line), + return [] + + def emptyline(self): + print 'Available commands:', + print ' '.join(list(self._commands)) + + def precmd(self, line): + """Preprocess the shell input.""" + if line == 'EOF': + return line + if line.startswith('exit') or line.startswith('quit'): + return 'EOF' + words = line.strip().split() + if len(words) == 1 and words[0] not in ['help', 'ls', 'version']: + return 'help %s' % (line.strip(),) + return line + + def onecmd(self, line): + """Process a single command. + + Runs a single command, and stores the return code in + self._last_return_code. Always returns False unless the command + was EOF. + + Args: + line: (str) Command line to process. + + Returns: + A bool signaling whether or not the command loop should terminate. + """ + try: + self._last_return_code = cmd.Cmd.onecmd(self, line) + except CommandLoop.TerminateSignal: + return True + except BaseException as e: + name = line.split(' ')[0] + print 'Error running %s:' % name + print e + self._last_return_code = 1 + return False + + def get_names(self): + names = dir(self) + commands = (name for name in self._commands + if name not in self._special_command_names) + names.extend('do_%s' % (name,) for name in commands) + names.remove('do_EOF') + return names + + def do_help(self, command_name): + """Print the help for command_name (if present) or general help.""" + + # TODO(craigcitro): Add command-specific flags. + def FormatOneCmd(name, command, command_names): + indent_size = appcommands.GetMaxCommandLength() + 3 + if len(command_names) > 1: + indent = ' ' * indent_size + command_help = flags.TextWrap( + command.CommandGetHelp('', cmd_names=command_names), + indent=indent, + firstline_indent='') + first_help_line, _, rest = command_help.partition('\n') + first_line = '%-*s%s' % (indent_size, name + ':', first_help_line) + return '\n'.join((first_line, rest)) + else: + default_indent = ' ' + return '\n' + flags.TextWrap( + command.CommandGetHelp('', cmd_names=command_names), + indent=default_indent, + firstline_indent=default_indent) + '\n' + + if not command_name: + print '\nHelp for commands:\n' + command_names = list(self._commands) + print '\n\n'.join( + FormatOneCmd(name, command, command_names) + for name, command in self._commands.iteritems() + if name not in self._special_command_names) + print + elif command_name in self._commands: + print FormatOneCmd(command_name, self._commands[command_name], + command_names=[command_name]) + return 0 + + def postcmd(self, stop, line): + return bool(stop) or line == 'EOF' +# pylint: enable=g-bad-name + + +class Repl(NewCmd): + """Start an interactive session.""" + PROMPT = '> ' + + def __init__(self, name, fv): + super(Repl, self).__init__(name, fv) + self.surface_in_shell = False + flags.DEFINE_string( + 'prompt', '', + 'Prompt to use for interactive shell.', + flag_values=fv) + + def RunWithArgs(self): + """Start an interactive session.""" + prompt = FLAGS.prompt or self.PROMPT + repl = CommandLoop(appcommands.GetCommandList(), prompt=prompt) + print 'Welcome! (Type help for more information.)' + while True: + try: + repl.cmdloop() + break + except KeyboardInterrupt: + print + return repl.last_return_code diff --git a/_gcloud_vendor/apitools/base/py/base_api.py b/_gcloud_vendor/apitools/base/py/base_api.py new file mode 100644 index 000000000000..2ee5fcf233d5 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/base_api.py @@ -0,0 +1,583 @@ +#!/usr/bin/env python +"""Base class for api services.""" + +import contextlib +import datetime +import httplib +import logging +import pprint +import types +import urllib +import urlparse + + +from protorpc import message_types +from protorpc import messages + +from apitools.base.py import credentials_lib +from apitools.base.py import encoding +from apitools.base.py import exceptions +from apitools.base.py import http_wrapper +from apitools.base.py import util + +__all__ = [ + 'ApiMethodInfo', + 'ApiUploadInfo', + 'BaseApiClient', + 'BaseApiService', + 'NormalizeApiEndpoint', +] + +# TODO(craigcitro): Remove this once we quiet the spurious logging in +# oauth2client (or drop oauth2client). +logging.getLogger('oauth2client.util').setLevel(logging.ERROR) + +_MAX_URL_LENGTH = 2048 + + +class ApiUploadInfo(messages.Message): + """Media upload information for a method. + + Fields: + accept: (repeated) MIME Media Ranges for acceptable media uploads + to this method. + max_size: (integer) Maximum size of a media upload, such as 3MB + or 1TB (converted to an integer). + resumable_path: Path to use for resumable uploads. + resumable_multipart: (boolean) Whether or not the resumable endpoint + supports multipart uploads. + simple_path: Path to use for simple uploads. + simple_multipart: (boolean) Whether or not the simple endpoint + supports multipart uploads. + """ + accept = messages.StringField(1, repeated=True) + max_size = messages.IntegerField(2) + resumable_path = messages.StringField(3) + resumable_multipart = messages.BooleanField(4) + simple_path = messages.StringField(5) + simple_multipart = messages.BooleanField(6) + + +class ApiMethodInfo(messages.Message): + """Configuration info for an API method. + + All fields are strings unless noted otherwise. + + Fields: + relative_path: Relative path for this method. + method_id: ID for this method. + http_method: HTTP verb to use for this method. + path_params: (repeated) path parameters for this method. + query_params: (repeated) query parameters for this method. + ordered_params: (repeated) ordered list of parameters for + this method. + description: description of this method. + request_type_name: name of the request type. + response_type_name: name of the response type. + request_field: if not null, the field to pass as the body + of this POST request. may also be the REQUEST_IS_BODY + value below to indicate the whole message is the body. + upload_config: (ApiUploadInfo) Information about the upload + configuration supported by this method. + supports_download: (boolean) If True, this method supports + downloading the request via the `alt=media` query + parameter. + """ + + relative_path = messages.StringField(1) + method_id = messages.StringField(2) + http_method = messages.StringField(3) + path_params = messages.StringField(4, repeated=True) + query_params = messages.StringField(5, repeated=True) + ordered_params = messages.StringField(6, repeated=True) + description = messages.StringField(7) + request_type_name = messages.StringField(8) + response_type_name = messages.StringField(9) + request_field = messages.StringField(10, default='') + upload_config = messages.MessageField(ApiUploadInfo, 11) + supports_download = messages.BooleanField(12, default=False) +REQUEST_IS_BODY = '' + + +def _LoadClass(name, messages_module): + if name.startswith('message_types.'): + _, _, classname = name.partition('.') + return getattr(message_types, classname) + elif '.' not in name: + return getattr(messages_module, name) + else: + raise exceptions.GeneratedClientError('Unknown class %s' % name) + + +def _RequireClassAttrs(obj, attrs): + for attr in attrs: + attr_name = attr.upper() + if not hasattr(obj, '%s' % attr_name) or not getattr(obj, attr_name): + msg = 'No %s specified for object of class %s.' % ( + attr_name, type(obj).__name__) + raise exceptions.GeneratedClientError(msg) + + +def NormalizeApiEndpoint(api_endpoint): + if not api_endpoint.endswith('/'): + api_endpoint += '/' + return api_endpoint + + +class _UrlBuilder(object): + """Convenient container for url data.""" + + def __init__(self, base_url, relative_path=None, query_params=None): + components = urlparse.urlsplit(urlparse.urljoin( + base_url, relative_path or '')) + if components.fragment: + raise exceptions.ConfigurationValueError( + 'Unexpected url fragment: %s' % components.fragment) + self.query_params = urlparse.parse_qs(components.query or '') + if query_params is not None: + self.query_params.update(query_params) + self.__scheme = components.scheme + self.__netloc = components.netloc + self.relative_path = components.path or '' + + @classmethod + def FromUrl(cls, url): + urlparts = urlparse.urlsplit(url) + query_params = urlparse.parse_qs(urlparts.query) + base_url = urlparse.urlunsplit(( + urlparts.scheme, urlparts.netloc, '', None, None)) + relative_path = urlparts.path or '' + return cls(base_url, relative_path=relative_path, query_params=query_params) + + @property + def base_url(self): + return urlparse.urlunsplit((self.__scheme, self.__netloc, '', '', '')) + + @base_url.setter + def base_url(self, value): + components = urlparse.urlsplit(value) + if components.path or components.query or components.fragment: + raise exceptions.ConfigurationValueError('Invalid base url: %s' % value) + self.__scheme = components.scheme + self.__netloc = components.netloc + + @property + def query(self): + # TODO(craigcitro): In the case that some of the query params are + # non-ASCII, we may silently fail to encode correctly. We should + # figure out who is responsible for owning the object -> str + # conversion. + return urllib.urlencode(self.query_params, doseq=True) + + @property + def url(self): + if '{' in self.relative_path or '}' in self.relative_path: + raise exceptions.ConfigurationValueError( + 'Cannot create url with relative path %s' % self.relative_path) + return urlparse.urlunsplit(( + self.__scheme, self.__netloc, self.relative_path, self.query, '')) + + +class BaseApiClient(object): + """Base class for client libraries.""" + MESSAGES_MODULE = None + + _API_KEY = '' + _CLIENT_ID = '' + _CLIENT_SECRET = '' + _PACKAGE = '' + _SCOPES = [] + _USER_AGENT = '' + + def __init__(self, url, credentials=None, get_credentials=True, http=None, + model=None, log_request=False, log_response=False, num_retries=5, + credentials_args=None, default_global_params=None, + additional_http_headers=None): + _RequireClassAttrs(self, ('_package', '_scopes', 'messages_module')) + if default_global_params is not None: + util.Typecheck(default_global_params, self.params_type) + self.__default_global_params = default_global_params + self.log_request = log_request + self.log_response = log_response + self.__num_retries = 5 + # We let the @property machinery below do our validation. + self.num_retries = num_retries + self._credentials = credentials + if get_credentials and not credentials: + credentials_args = credentials_args or {} + self._SetCredentials(**credentials_args) + self._url = NormalizeApiEndpoint(url) + self._http = http or http_wrapper.GetHttp() + # Note that "no credentials" is totally possible. + if self._credentials is not None: + self._http = self._credentials.authorize(self._http) + # TODO(craigcitro): Remove this field when we switch to proto2. + self.__include_fields = None + + self.additional_http_headers = additional_http_headers or {} + + # TODO(craigcitro): Finish deprecating these fields. + _ = model + + self.__response_type_model = 'proto' + + def _SetCredentials(self, **kwds): + """Fetch credentials, and set them for this client. + + Note that we can't simply return credentials, since creating them + may involve side-effecting self. + + Args: + **kwds: Additional keyword arguments are passed on to GetCredentials. + + Returns: + None. Sets self._credentials. + """ + args = { + 'api_key': self._API_KEY, + 'client': self, + 'client_id': self._CLIENT_ID, + 'client_secret': self._CLIENT_SECRET, + 'package_name': self._PACKAGE, + 'scopes': self._SCOPES, + 'user_agent': self._USER_AGENT, + } + args.update(kwds) + # TODO(craigcitro): It's a bit dangerous to pass this + # still-half-initialized self into this method, but we might need + # to set attributes on it associated with our credentials. + # Consider another way around this (maybe a callback?) and whether + # or not it's worth it. + self._credentials = credentials_lib.GetCredentials(**args) + + @classmethod + def ClientInfo(cls): + return { + 'client_id': cls._CLIENT_ID, + 'client_secret': cls._CLIENT_SECRET, + 'scope': ' '.join(sorted(util.NormalizeScopes(cls._SCOPES))), + 'user_agent': cls._USER_AGENT, + } + + @property + def base_model_class(self): + return None + + @property + def http(self): + return self._http + + @property + def url(self): + return self._url + + @classmethod + def GetScopes(cls): + return cls._SCOPES + + @property + def params_type(self): + return _LoadClass('StandardQueryParameters', self.MESSAGES_MODULE) + + @property + def user_agent(self): + return self._USER_AGENT + + @property + def _default_global_params(self): + if self.__default_global_params is None: + self.__default_global_params = self.params_type() + return self.__default_global_params + + def AddGlobalParam(self, name, value): + params = self._default_global_params + setattr(params, name, value) + + @property + def global_params(self): + return encoding.CopyProtoMessage(self._default_global_params) + + @contextlib.contextmanager + def IncludeFields(self, include_fields): + self.__include_fields = include_fields + yield + self.__include_fields = None + + @property + def response_type_model(self): + return self.__response_type_model + + @contextlib.contextmanager + def JsonResponseModel(self): + """In this context, return raw JSON instead of proto.""" + old_model = self.response_type_model + self.__response_type_model = 'json' + yield + self.__response_type_model = old_model + + @property + def num_retries(self): + return self.__num_retries + + @num_retries.setter + def num_retries(self, value): + util.Typecheck(value, (int, long)) + if value < 0: + raise exceptions.InvalidDataError( + 'Cannot have negative value for num_retries') + self.__num_retries = value + + @contextlib.contextmanager + def WithRetries(self, num_retries): + old_num_retries = self.num_retries + self.num_retries = num_retries + yield + self.num_retries = old_num_retries + + def ProcessRequest(self, method_config, request): + """Hook for pre-processing of requests.""" + if self.log_request: + logging.info( + 'Calling method %s with %s: %s', method_config.method_id, + method_config.request_type_name, request) + return request + + def ProcessHttpRequest(self, http_request): + """Hook for pre-processing of http requests.""" + http_request.headers.update(self.additional_http_headers) + if self.log_request: + logging.info('Making http %s to %s', + http_request.http_method, http_request.url) + logging.info('Headers: %s', pprint.pformat(http_request.headers)) + if http_request.body: + # TODO(craigcitro): Make this safe to print in the case of + # non-printable body characters. + logging.info('Body:\n%s', http_request.body) + else: + logging.info('Body: (none)') + return http_request + + def ProcessResponse(self, method_config, response): + if self.log_response: + logging.info('Response of type %s: %s', + method_config.response_type_name, response) + return response + + # TODO(craigcitro): Decide where these two functions should live. + def SerializeMessage(self, message): + return encoding.MessageToJson(message, include_fields=self.__include_fields) + + def DeserializeMessage(self, response_type, data): + """Deserialize the given data as method_config.response_type.""" + try: + message = encoding.JsonToMessage(response_type, data) + except (exceptions.InvalidDataFromServerError, + messages.ValidationError) as e: + raise exceptions.InvalidDataFromServerError( + 'Error decoding response "%s" as type %s: %s' % ( + data, response_type.__name__, e)) + return message + + def FinalizeTransferUrl(self, url): + """Modify the url for a given transfer, based on auth and version.""" + url_builder = _UrlBuilder.FromUrl(url) + if self.global_params.key: + url_builder.query_params['key'] = self.global_params.key + return url_builder.url + + +class BaseApiService(object): + """Base class for generated API services.""" + + def __init__(self, client): + self.__client = client + self._method_configs = {} + self._upload_configs = {} + + @property + def _client(self): + return self.__client + + @property + def client(self): + return self.__client + + def GetMethodConfig(self, method): + return self._method_configs[method] + + def GetUploadConfig(self, method): + return self._upload_configs.get(method) + + def GetRequestType(self, method): + method_config = self.GetMethodConfig(method) + return getattr(self.client.MESSAGES_MODULE, + method_config.request_type_name) + + def GetResponseType(self, method): + method_config = self.GetMethodConfig(method) + return getattr(self.client.MESSAGES_MODULE, + method_config.response_type_name) + + def __CombineGlobalParams(self, global_params, default_params): + util.Typecheck(global_params, (types.NoneType, self.__client.params_type)) + result = self.__client.params_type() + global_params = global_params or self.__client.params_type() + for field in result.all_fields(): + value = (global_params.get_assigned_value(field.name) or + default_params.get_assigned_value(field.name)) + if value not in (None, [], ()): + setattr(result, field.name, value) + return result + + def __ConstructQueryParams(self, query_params, request, global_params): + """Construct a dictionary of query parameters for this request.""" + global_params = self.__CombineGlobalParams( + global_params, self.__client.global_params) + query_info = dict((field.name, getattr(global_params, field.name)) + for field in self.__client.params_type.all_fields()) + query_info.update( + (param, getattr(request, param, None)) for param in query_params) + query_info = dict((k, v) for k, v in query_info.iteritems() + if v is not None) + for k, v in query_info.iteritems(): + if isinstance(v, unicode): + query_info[k] = v.encode('utf8') + elif isinstance(v, str): + query_info[k] = v.decode('utf8') + elif isinstance(v, datetime.datetime): + query_info[k] = v.isoformat() + return query_info + + def __ConstructRelativePath(self, method_config, request, relative_path=None): + """Determine the relative path for request.""" + params = dict([(param, getattr(request, param, None)) + for param in method_config.path_params]) + return util.ExpandRelativePath(method_config, params, + relative_path=relative_path) + + def __FinalizeRequest(self, http_request, url_builder): + """Make any final general adjustments to the request.""" + if (http_request.http_method == 'GET' and + len(http_request.url) > _MAX_URL_LENGTH): + http_request.http_method = 'POST' + http_request.headers['x-http-method-override'] = 'GET' + http_request.headers['content-type'] = 'application/x-www-form-urlencoded' + http_request.body = url_builder.query + url_builder.query_params = {} + http_request.url = url_builder.url + + def __ProcessHttpResponse(self, method_config, http_response): + """Process the given http response.""" + if http_response.status_code not in (httplib.OK, httplib.NO_CONTENT): + raise exceptions.HttpError.FromResponse(http_response) + if http_response.status_code == httplib.NO_CONTENT: + # TODO(craigcitro): Find out why _replace doesn't seem to work here. + http_response = http_wrapper.Response( + info=http_response.info, content='{}', + request_url=http_response.request_url) + if self.__client.response_type_model == 'json': + return http_response.content + else: + response_type = _LoadClass( + method_config.response_type_name, self.__client.MESSAGES_MODULE) + return self.__client.DeserializeMessage( + response_type, http_response.content) + + def __SetBaseHeaders(self, http_request, client): + """Fill in the basic headers on http_request.""" + # TODO(craigcitro): Make the default a little better here, and + # include the apitools version. + user_agent = client.user_agent or 'apitools-client/1.0' + http_request.headers['user-agent'] = user_agent + http_request.headers['accept'] = 'application/json' + http_request.headers['accept-encoding'] = 'gzip, deflate' + + def __SetBody(self, http_request, method_config, request, upload): + """Fill in the body on http_request.""" + if not method_config.request_field: + return + + request_type = _LoadClass( + method_config.request_type_name, self.__client.MESSAGES_MODULE) + if method_config.request_field == REQUEST_IS_BODY: + body_value = request + body_type = request_type + else: + body_value = getattr(request, method_config.request_field) + body_field = request_type.field_by_name(method_config.request_field) + util.Typecheck(body_field, messages.MessageField) + body_type = body_field.type + + if upload and not body_value: + # We're going to fill in the body later. + return + util.Typecheck(body_value, body_type) + http_request.headers['content-type'] = 'application/json' + http_request.body = self.__client.SerializeMessage(body_value) + + def PrepareHttpRequest(self, method_config, request, global_params=None, + upload=None, upload_config=None, download=None): + """Prepares an HTTP request to be sent.""" + request_type = _LoadClass( + method_config.request_type_name, self.__client.MESSAGES_MODULE) + util.Typecheck(request, request_type) + request = self.__client.ProcessRequest(method_config, request) + + http_request = http_wrapper.Request(http_method=method_config.http_method) + self.__SetBaseHeaders(http_request, self.__client) + self.__SetBody(http_request, method_config, request, upload) + + url_builder = _UrlBuilder( + self.__client.url, relative_path=method_config.relative_path) + url_builder.query_params = self.__ConstructQueryParams( + method_config.query_params, request, global_params) + + # It's important that upload and download go before we fill in the + # relative path, so that they can replace it. + if upload is not None: + upload.ConfigureRequest(upload_config, http_request, url_builder) + if download is not None: + download.ConfigureRequest(http_request, url_builder) + + url_builder.relative_path = self.__ConstructRelativePath( + method_config, request, relative_path=url_builder.relative_path) + self.__FinalizeRequest(http_request, url_builder) + + return self.__client.ProcessHttpRequest(http_request) + + def _RunMethod(self, method_config, request, global_params=None, + upload=None, upload_config=None, download=None): + """Call this method with request.""" + if upload is not None and download is not None: + # TODO(craigcitro): This just involves refactoring the logic + # below into callbacks that we can pass around; in particular, + # the order should be that the upload gets the initial request, + # and then passes its reply to a download if one exists, and + # then that goes to ProcessResponse and is returned. + raise exceptions.NotYetImplementedError( + 'Cannot yet use both upload and download at once') + + http_request = self.PrepareHttpRequest( + method_config, request, global_params, upload, upload_config, download) + + # TODO(craigcitro): Make num_retries customizable on Transfer + # objects, and pass in self.__client.num_retries when initializing + # an upload or download. + if download is not None: + download.InitializeDownload(http_request, client=self.client) + return + + http_response = None + if upload is not None: + http_response = upload.InitializeUpload(http_request, client=self.client) + if http_response is None: + http_response = http_wrapper.MakeRequest( + self.__client.http, http_request, retries=self.__client.num_retries) + + return self.ProcessHttpResponse(method_config, http_response) + + def ProcessHttpResponse(self, method_config, http_response): + """Convert an HTTP response to the expected message type.""" + return self.__client.ProcessResponse( + method_config, + self.__ProcessHttpResponse(method_config, http_response)) diff --git a/_gcloud_vendor/apitools/base/py/base_api_test.py b/_gcloud_vendor/apitools/base/py/base_api_test.py new file mode 100644 index 000000000000..6d43dead1d91 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/base_api_test.py @@ -0,0 +1,113 @@ +#!/usr/bin/env python + + +import datetime +import sys +import urllib + +from protorpc import message_types +from protorpc import messages + +import unittest2 + +from apitools.base.py import base_api +from apitools.base.py import http_wrapper + + +class SimpleMessage(messages.Message): + field = messages.StringField(1) + + +class MessageWithTime(messages.Message): + timestamp = message_types.DateTimeField(1) + + +class StandardQueryParameters(messages.Message): + field = messages.StringField(1) + + +class FakeCredentials(object): + + def authorize(self, _): # pylint: disable=invalid-name + return None + + +class FakeClient(base_api.BaseApiClient): + MESSAGES_MODULE = sys.modules[__name__] + _PACKAGE = 'package' + _SCOPES = ['scope1'] + _CLIENT_ID = 'client_id' + _CLIENT_SECRET = 'client_secret' + + +class FakeService(base_api.BaseApiService): + + def __init__(self, client=None): + client = client or FakeClient( + 'http://www.example.com/', credentials=FakeCredentials()) + super(FakeService, self).__init__(client) + + +class BaseApiTest(unittest2.TestCase): + + def __GetFakeClient(self): + return FakeClient('', credentials=FakeCredentials()) + + def testUrlNormalization(self): + client = FakeClient('http://www.googleapis.com', get_credentials=False) + self.assertTrue(client.url.endswith('/')) + + def testNoCredentials(self): + client = FakeClient('', get_credentials=False) + self.assertIsNotNone(client) + self.assertIsNone(client._credentials) + + def testIncludeEmptyFieldsClient(self): + msg = SimpleMessage() + client = self.__GetFakeClient() + self.assertEqual('{}', client.SerializeMessage(msg)) + with client.IncludeFields(('field',)): + self.assertEqual('{"field": null}', client.SerializeMessage(msg)) + + def testJsonResponse(self): + method_config = base_api.ApiMethodInfo(response_type_name='SimpleMessage') + service = FakeService() + http_response = http_wrapper.Response( + info={'status': '200'}, content='{"field": "abc"}', + request_url='http://www.google.com') + response_message = SimpleMessage(field='abc') + self.assertEqual(response_message, service.ProcessHttpResponse( + method_config, http_response)) + with service.client.JsonResponseModel(): + self.assertEqual(http_response.content, service.ProcessHttpResponse( + method_config, http_response)) + + def testAdditionalHeaders(self): + additional_headers = {'Request-Is-Awesome': '1'} + client = self.__GetFakeClient() + + # No headers to start + http_request = http_wrapper.Request('http://www.example.com') + new_request = client.ProcessHttpRequest(http_request) + self.assertFalse('Request-Is-Awesome' in new_request.headers) + + # Add a new header and ensure it's added to the request. + client.additional_http_headers = additional_headers + http_request = http_wrapper.Request('http://www.example.com') + new_request = client.ProcessHttpRequest(http_request) + self.assertTrue('Request-Is-Awesome' in new_request.headers) + + def testQueryEncoding(self): + method_config = base_api.ApiMethodInfo( + request_type_name='MessageWithTime', query_params=['timestamp']) + service = FakeService() + request = MessageWithTime( + timestamp=datetime.datetime(2014, 10, 07, 12, 53, 13)) + http_request = service.PrepareHttpRequest(method_config, request) + + url_timestamp = urllib.quote(request.timestamp.isoformat()) + self.assertTrue(http_request.url.endswith(url_timestamp)) + + +if __name__ == '__main__': + unittest2.main() diff --git a/_gcloud_vendor/apitools/base/py/base_cli.py b/_gcloud_vendor/apitools/base/py/base_cli.py new file mode 100644 index 000000000000..f9d7d1a40058 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/base_cli.py @@ -0,0 +1,151 @@ +#!/usr/bin/env python +"""Base script for generated CLI.""" + + +import atexit +import code +import logging +import os +import readline +import rlcompleter +import sys + +from google.apputils import appcommands +import gflags as flags + +from apitools.base.py import encoding +from apitools.base.py import exceptions + +__all__ = [ + 'ConsoleWithReadline', + 'DeclareBaseFlags', + 'FormatOutput', + 'SetupLogger', + 'run_main', +] + + +# TODO(craigcitro): We should move all the flags for the +# StandardQueryParameters into this file, so that they can be used +# elsewhere easily. + +_BASE_FLAGS_DECLARED = False +_OUTPUT_FORMATTER_MAP = { + 'protorpc': lambda x: x, + 'json': encoding.MessageToJson, +} + + +def DeclareBaseFlags(): + """Declare base flags for all CLIs.""" + # TODO(craigcitro): FlagValidators? + global _BASE_FLAGS_DECLARED + if _BASE_FLAGS_DECLARED: + return + flags.DEFINE_boolean( + 'log_request', False, + 'Log requests.') + flags.DEFINE_boolean( + 'log_response', False, + 'Log responses.') + flags.DEFINE_boolean( + 'log_request_response', False, + 'Log requests and responses.') + flags.DEFINE_enum( + 'output_format', + 'protorpc', + _OUTPUT_FORMATTER_MAP.viewkeys(), + 'Display format for results.') + + _BASE_FLAGS_DECLARED = True + +# NOTE: This is specified here so that it can be read by other files +# without depending on the flag to be registered. +TRACE_HELP = ( + 'A tracing token of the form "token:" ' + 'to include in api requests.') +FLAGS = flags.FLAGS + + +def SetupLogger(): + if FLAGS.log_request or FLAGS.log_response or FLAGS.log_request_response: + logging.basicConfig() + logging.getLogger().setLevel(logging.INFO) + + +def FormatOutput(message, output_format=None): + """Convert the output to the user-specified format.""" + output_format = output_format or FLAGS.output_format + formatter = _OUTPUT_FORMATTER_MAP.get(FLAGS.output_format) + if formatter is None: + raise exceptions.UserError('Unknown output format: %s' % output_format) + return formatter(message) + + +class _SmartCompleter(rlcompleter.Completer): + + def _callable_postfix(self, val, word): + if ('(' in readline.get_line_buffer() or + not callable(val)): + return word + else: + return word + '(' + + def complete(self, text, state): + if not readline.get_line_buffer().strip(): + if not state: + return ' ' + else: + return None + return rlcompleter.Completer.complete(self, text, state) + + +class ConsoleWithReadline(code.InteractiveConsole): + """InteractiveConsole with readline, tab completion, and history.""" + + def __init__(self, env, filename='', histfile=None): + new_locals = dict(env) + new_locals.update({ + '_SmartCompleter': _SmartCompleter, + 'readline': readline, + 'rlcompleter': rlcompleter, + }) + code.InteractiveConsole.__init__(self, new_locals, filename) + readline.parse_and_bind('tab: complete') + readline.set_completer(_SmartCompleter(new_locals).complete) + if histfile is not None: + histfile = os.path.expanduser(histfile) + if os.path.exists(histfile): + readline.read_history_file(histfile) + atexit.register(lambda: readline.write_history_file(histfile)) + + +def run_main(): + """Function to be used as setuptools script entry point. + + Appcommands assumes that it always runs as __main__, but launching + via a setuptools-generated entry_point breaks this rule. We do some + trickery here to make sure that appcommands and flags find their + state where they expect to by faking ourselves as __main__. + """ + + # Put the flags for this module somewhere the flags module will look + # for them. + # pylint: disable=protected-access + new_name = flags._GetMainModule() + sys.modules[new_name] = sys.modules['__main__'] + for flag in FLAGS.FlagsByModuleDict().get(__name__, []): + FLAGS._RegisterFlagByModule(new_name, flag) + for key_flag in FLAGS.KeyFlagsByModuleDict().get(__name__, []): + FLAGS._RegisterKeyFlagForModule(new_name, key_flag) + # pylint: enable=protected-access + + # Now set __main__ appropriately so that appcommands will be + # happy. + sys.modules['__main__'] = sys.modules[__name__] + appcommands.Run() + sys.modules['__main__'] = sys.modules.pop(new_name) + + +if __name__ == '__main__': + appcommands.Run() diff --git a/_gcloud_vendor/apitools/base/py/batch.py b/_gcloud_vendor/apitools/base/py/batch.py new file mode 100644 index 000000000000..eaf5eba2ea57 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/batch.py @@ -0,0 +1,441 @@ +#!/usr/bin/env python +"""Library for handling batch HTTP requests for apitools.""" + +import collections +import email.generator as generator +import email.mime.multipart as mime_multipart +import email.mime.nonmultipart as mime_nonmultipart +import email.parser as email_parser +import httplib +import itertools +import StringIO +import time +import urllib +import urlparse +import uuid + +from apitools.base.py import exceptions +from apitools.base.py import http_wrapper + +__all__ = [ + 'BatchApiRequest', +] + + +class RequestResponseAndHandler(collections.namedtuple( + 'RequestResponseAndHandler', ['request', 'response', 'handler'])): + """Container for data related to completing an HTTP request. + + This contains an HTTP request, its response, and a callback for handling + the response from the server. + + Attributes: + request: An http_wrapper.Request object representing the HTTP request. + response: The http_wrapper.Response object returned from the server. + handler: A callback function accepting two arguments, response + and exception. Response is an http_wrapper.Response object, and + exception is an apiclient.errors.HttpError object if an error + occurred, or otherwise None. + """ + + +class BatchApiRequest(object): + + class ApiCall(object): + """Holds request and response information for each request. + + ApiCalls are ultimately exposed to the client once the HTTP batch request + has been completed. + + Attributes: + http_request: A client-supplied http_wrapper.Request to be + submitted to the server. + response: A http_wrapper.Response object given by the server as a + response to the user request, or None if an error occurred. + exception: An apiclient.errors.HttpError object if an error + occurred, or None. + """ + + def __init__(self, request, retryable_codes, service, method_config): + """Initialize an individual API request. + + Args: + request: An http_wrapper.Request object. + retryable_codes: A list of integer HTTP codes that can be retried. + service: A service inheriting from base_api.BaseApiService. + method_config: Method config for the desired API request. + """ + self.__retryable_codes = list( + set(retryable_codes + [httplib.UNAUTHORIZED])) + self.__http_response = None + self.__service = service + self.__method_config = method_config + + self.http_request = request + # TODO(user): Add some validation to these fields. + self.__response = None + self.__exception = None + + @property + def is_error(self): + return self.exception is not None + + @property + def response(self): + return self.__response + + @property + def exception(self): + return self.__exception + + @property + def authorization_failed(self): + return (self.__http_response and ( + self.__http_response.status_code == httplib.UNAUTHORIZED)) + + @property + def terminal_state(self): + return (self.__http_response and ( + self.__http_response.status_code not in self.__retryable_codes)) + + def HandleResponse(self, http_response, exception): + """Handles an incoming http response to the request in http_request. + + This is intended to be used as a callback function for + BatchHttpRequest.Add. + + Args: + http_response: Deserialized http_wrapper.Response object. + exception: apiclient.errors.HttpError object if an error occurred. + """ + self.__http_response = http_response + self.__exception = exception + if self.terminal_state and not self.__exception: + self.__response = self.__service.ProcessHttpResponse( + self.__method_config, self.__http_response) + + def __init__(self, batch_url=None, retryable_codes=None): + """Initialize a batch API request object. + + Args: + batch_url: Base URL for batch API calls. + retryable_codes: A list of integer HTTP codes that can be retried. + """ + self.api_requests = [] + self.retryable_codes = retryable_codes or [] + self.batch_url = batch_url or 'https://www.googleapis.com/batch' + + def Add(self, service, method, request, global_params=None): + """Add a request to the batch. + + Args: + service: A class inheriting base_api.BaseApiService. + method: A string indicated desired method from the service. See + the example in the class docstring. + request: An input message appropriate for the specified service.method. + global_params: Optional additional parameters to pass into + method.PrepareHttpRequest. + + Returns: + None + """ + # Retrieve the configs for the desired method and service. + method_config = service.GetMethodConfig(method) + upload_config = service.GetUploadConfig(method) + + # Prepare the HTTP Request. + http_request = service.PrepareHttpRequest( + method_config, request, global_params=global_params, + upload_config=upload_config) + + # Create the request and add it to our master list. + api_request = self.ApiCall( + http_request, self.retryable_codes, service, method_config) + self.api_requests.append(api_request) + + def Execute(self, http, sleep_between_polls=5, max_retries=5): + """Execute all of the requests in the batch. + + Args: + http: httplib2.Http object for use in the request. + sleep_between_polls: Integer number of seconds to sleep between polls. + max_retries: Max retries. Any requests that have not succeeded by + this number of retries simply report the last response or + exception, whatever it happened to be. + + Returns: + List of ApiCalls. + """ + requests = [request for request in self.api_requests if not + request.terminal_state] + + for attempt in xrange(max_retries): + if attempt: + time.sleep(sleep_between_polls) + + # Create a batch_http_request object and populate it with incomplete + # requests. + batch_http_request = BatchHttpRequest(batch_url=self.batch_url) + for request in requests: + batch_http_request.Add(request.http_request, request.HandleResponse) + batch_http_request.Execute(http) + + # Collect retryable requests. + requests = [request for request in self.api_requests if not + request.terminal_state] + + if (any(request.authorization_failed for request in requests) + and hasattr(http.request, 'credentials')): + http.request.credentials.refresh(http) + + if not requests: + break + + return self.api_requests + + +class BatchHttpRequest(object): + """Batches multiple http_wrapper.Request objects into a single request.""" + + def __init__(self, batch_url, callback=None): + """Constructor for a BatchHttpRequest. + + Args: + batch_url: URL to send batch requests to. + callback: A callback to be called for each response, of the + form callback(response, exception). The first parameter is + the deserialized Response object. The second is an + apiclient.errors.HttpError exception object if an HTTP error + occurred while processing the request, or None if no error occurred. + """ + # Endpoint to which these requests are sent. + self.__batch_url = batch_url + + # Global callback to be called for each individual response in the batch. + self.__callback = callback + + # List of requests, responses and handlers. + self.__request_response_handlers = {} + + # The last auto generated id. + self.__last_auto_id = itertools.count() + + # Unique ID on which to base the Content-ID headers. + self.__base_id = uuid.uuid4() + + def _ConvertIdToHeader(self, request_id): + """Convert an id to a Content-ID header value. + + Args: + request_id: String identifier for a individual request. + + Returns: + A Content-ID header with the id_ encoded into it. A UUID is prepended to + the value because Content-ID headers are supposed to be universally + unique. + """ + return '<%s+%s>' % (self.__base_id, urllib.quote(request_id)) + + @staticmethod + def _ConvertHeaderToId(header): + """Convert a Content-ID header value to an id. + + Presumes the Content-ID header conforms to the format that + _ConvertIdToHeader() returns. + + Args: + header: A string indicating the Content-ID header value. + + Returns: + The extracted id value. + + Raises: + BatchError if the header is not in the expected format. + """ + if not (header.startswith('<') or header.endswith('>')): + raise exceptions.BatchError('Invalid value for Content-ID: %s' % header) + if '+' not in header: + raise exceptions.BatchError('Invalid value for Content-ID: %s' % header) + _, request_id = header[1:-1].rsplit('+', 1) + + return urllib.unquote(request_id) + + def _SerializeRequest(self, request): + """Convert a http_wrapper.Request object into a string. + + Args: + request: A http_wrapper.Request to serialize. + + Returns: + The request as a string in application/http format. + """ + # Construct status line + parsed = urlparse.urlsplit(request.url) + request_line = urlparse.urlunsplit( + (None, None, parsed.path, parsed.query, None)) + status_line = request.http_method + ' ' + request_line + ' HTTP/1.1\n' + major, minor = request.headers.get( + 'content-type', 'application/json').split('/') + msg = mime_nonmultipart.MIMENonMultipart(major, minor) + + # MIMENonMultipart adds its own Content-Type header. + # Keep all of the other headers in headers. + for key, value in request.headers.iteritems(): + if key == 'content-type': + continue + msg[key] = value + + msg['Host'] = parsed.netloc + msg.set_unixfrom(None) + + if request.body is not None: + msg.set_payload(request.body) + + # Serialize the mime message. + str_io = StringIO.StringIO() + # maxheaderlen=0 means don't line wrap headers. + gen = generator.Generator(str_io, maxheaderlen=0) + gen.flatten(msg, unixfrom=False) + body = str_io.getvalue() + + # Strip off the \n\n that the MIME lib tacks onto the end of the payload. + if request.body is None: + body = body[:-2] + + return status_line.encode('utf-8') + body + + def _DeserializeResponse(self, payload): + """Convert string into Response and content. + + Args: + payload: Header and body string to be deserialized. + + Returns: + A Response object + """ + # Strip off the status line. + status_line, payload = payload.split('\n', 1) + _, status, _ = status_line.split(' ', 2) + + # Parse the rest of the response. + parser = email_parser.Parser() + msg = parser.parsestr(payload) + + # Get the headers. + info = dict(msg) + info['status'] = status + + # Create Response from the parsed headers. + content = msg.get_payload() + + return http_wrapper.Response(info, content, self.__batch_url) + + def _NewId(self): + """Create a new id. + + Auto incrementing number that avoids conflicts with ids already used. + + Returns: + A new unique id string. + """ + return str(self.__last_auto_id.next()) + + def Add(self, request, callback=None): + """Add a new request. + + Args: + request: A http_wrapper.Request to add to the batch. + callback: A callback to be called for this response, of the + form callback(response, exception). The first parameter is the + deserialized response object. The second is an + apiclient.errors.HttpError exception object if an HTTP error + occurred while processing the request, or None if no errors occurred. + + Returns: + None + """ + self.__request_response_handlers[self._NewId()] = RequestResponseAndHandler( + request, None, callback) + + def _Execute(self, http): + """Serialize batch request, send to server, process response. + + Args: + http: A httplib2.Http object to be used to make the request with. + + Raises: + httplib2.HttpLib2Error if a transport error has occured. + apiclient.errors.BatchError if the response is the wrong format. + """ + message = mime_multipart.MIMEMultipart('mixed') + # Message should not write out its own headers. + setattr(message, '_write_headers', lambda self: None) + + # Add all the individual requests. + for key in self.__request_response_handlers: + msg = mime_nonmultipart.MIMENonMultipart('application', 'http') + msg['Content-Transfer-Encoding'] = 'binary' + msg['Content-ID'] = self._ConvertIdToHeader(key) + + body = self._SerializeRequest( + self.__request_response_handlers[key].request) + msg.set_payload(body) + message.attach(msg) + + request = http_wrapper.Request(self.__batch_url, 'POST') + request.body = message.as_string() + request.headers['content-type'] = ( + 'multipart/mixed; boundary="%s"') % message.get_boundary() + + response = http_wrapper.MakeRequest(http, request) + + if response.status_code >= 300: + raise exceptions.HttpError.FromResponse(response) + + # Prepend with a content-type header so Parser can handle it. + header = 'content-type: %s\r\n\r\n' % response.info['content-type'] + + parser = email_parser.Parser() + mime_response = parser.parsestr(header + response.content) + + if not mime_response.is_multipart(): + raise exceptions.BatchError('Response not in multipart/mixed format.') + + for part in mime_response.get_payload(): + request_id = self._ConvertHeaderToId(part['Content-ID']) + response = self._DeserializeResponse(part.get_payload()) + + # Disable protected access because namedtuple._replace(...) + # is not actually meant to be protected. + self.__request_response_handlers[request_id] = ( + self.__request_response_handlers[request_id]._replace( # pylint: disable=protected-access + response=response)) + + def Execute(self, http): + """Execute all the requests as a single batched HTTP request. + + Args: + http: A httplib2.Http object to be used with the request. + + Returns: + None + + Raises: + BatchError if the response is the wrong format. + """ + + self._Execute(http) + + for key in self.__request_response_handlers: + response = self.__request_response_handlers[key].response + callback = self.__request_response_handlers[key].handler + + exception = None + + if response.status_code >= 300: + exception = exceptions.HttpError.FromResponse(response) + + if callback is not None: + callback(response, exception) + if self.__callback is not None: + self.__callback(response, exception) diff --git a/_gcloud_vendor/apitools/base/py/cli.py b/_gcloud_vendor/apitools/base/py/cli.py new file mode 100644 index 000000000000..b24470bb3d83 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/cli.py @@ -0,0 +1,13 @@ +#!/usr/bin/env python +"""Top-level import for all CLI-related functionality in apitools. + +Note that importing this file will ultimately have side-effects, and +may require imports not available in all environments (such as App +Engine). In particular, picking up some readline-related imports can +cause pain. +""" + +# pylint:disable=wildcard-import + +from apitools.base.py.app2 import * +from apitools.base.py.base_cli import * diff --git a/_gcloud_vendor/apitools/base/py/credentials_lib.py b/_gcloud_vendor/apitools/base/py/credentials_lib.py new file mode 100644 index 000000000000..b4d660d70a31 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/credentials_lib.py @@ -0,0 +1,221 @@ +#!/usr/bin/env python +"""Common credentials classes and constructors.""" + +import httplib +import json +import os +import urllib2 + + +import httplib2 +import oauth2client +import oauth2client.client +import oauth2client.gce +import oauth2client.multistore_file +import oauth2client.tools + +try: + from gflags import FLAGS +except ImportError: + FLAGS = None + +import logging + +from apitools.base.py import exceptions +from apitools.base.py import util + +__all__ = [ + 'CredentialsFromFile', + 'GaeAssertionCredentials', + 'GceAssertionCredentials', + 'GetCredentials', + 'ServiceAccountCredentials', + 'ServiceAccountCredentialsFromFile', +] + + + +# TODO(craigcitro): Expose the extra args here somewhere higher up, +# possibly as flags in the generated CLI. +def GetCredentials(package_name, scopes, client_id, client_secret, user_agent, + credentials_filename=None, + service_account_name=None, service_account_keyfile=None, + api_key=None, client=None): + """Attempt to get credentials, using an oauth dance as the last resort.""" + scopes = util.NormalizeScopes(scopes) + # TODO(craigcitro): Error checking. + client_info = { + 'client_id': client_id, + 'client_secret': client_secret, + 'scope': ' '.join(sorted(util.NormalizeScopes(scopes))), + 'user_agent': user_agent or '%s-generated/0.1' % package_name, + } + if service_account_name is not None: + credentials = ServiceAccountCredentialsFromFile( + service_account_name, service_account_keyfile, scopes) + if credentials is not None: + return credentials + credentials = GaeAssertionCredentials.Get(scopes) + if credentials is not None: + return credentials + credentials = GceAssertionCredentials.Get(scopes) + if credentials is not None: + return credentials + credentials_filename = credentials_filename or os.path.expanduser( + '~/.apitools.token') + credentials = CredentialsFromFile(credentials_filename, client_info) + if credentials is not None: + return credentials + raise exceptions.CredentialsError('Could not create valid credentials') + + +def ServiceAccountCredentialsFromFile( + service_account_name, private_key_filename, scopes): + with open(private_key_filename) as key_file: + return ServiceAccountCredentials( + service_account_name, key_file.read(), scopes) + + +def ServiceAccountCredentials(service_account_name, private_key, scopes): + scopes = util.NormalizeScopes(scopes) + return oauth2client.client.SignedJwtAssertionCredentials( + service_account_name, private_key, scopes) + + +# TODO(craigcitro): We override to add some utility code, and to +# update the old refresh implementation. Either push this code into +# oauth2client or drop oauth2client. +class GceAssertionCredentials(oauth2client.gce.AppAssertionCredentials): + """Assertion credentials for GCE instances.""" + + def __init__(self, scopes=None, service_account_name='default', **kwds): + if not util.DetectGce(): + raise exceptions.ResourceUnavailableError( + 'GCE credentials requested outside a GCE instance') + self.__service_account_name = service_account_name + if scopes: + scope_ls = util.NormalizeScopes(scopes) + instance_scopes = self.GetInstanceScopes() + if scope_ls > instance_scopes: + raise exceptions.CredentialsError( + 'Instance did not have access to scopes %s' % ( + sorted(list(scope_ls - instance_scopes)),)) + else: + scopes = self.GetInstanceScopes() + super(GceAssertionCredentials, self).__init__(scopes, **kwds) + + @classmethod + def Get(cls, *args, **kwds): + try: + return cls(*args, **kwds) + except exceptions.Error: + return None + + def GetInstanceScopes(self): + # Extra header requirement can be found here: + # https://developers.google.com/compute/docs/metadata + scopes_uri = ( + 'http://metadata.google.internal/computeMetadata/v1/instance/' + 'service-accounts/%s/scopes') % self.__service_account_name + additional_headers = {'X-Google-Metadata-Request': 'True'} + request = urllib2.Request(scopes_uri, headers=additional_headers) + try: + response = urllib2.urlopen(request) + except urllib2.URLError as e: + raise exceptions.CommunicationError( + 'Could not reach metadata service: %s' % e.reason) + return util.NormalizeScopes(scope.strip() for scope in response.readlines()) + + def _refresh(self, do_request): + """Refresh self.access_token. + + Args: + do_request: A function matching httplib2.Http.request's signature. + """ + token_uri = ( + 'http://metadata.google.internal/computeMetadata/v1beta1/instance/' + 'service-accounts/%s/token') % self.__service_account_name + extra_headers = {'X-Google-Metadata-Request': 'True'} + response, content = do_request(token_uri, headers=extra_headers) + if response.status != httplib.OK: + raise exceptions.CredentialsError( + 'Error refreshing credentials: %s' % content) + try: + credential_info = json.loads(content) + except ValueError: + raise exceptions.CredentialsError( + 'Invalid credentials response: %s' % content) + self.access_token = credential_info['access_token'] + + +# TODO(craigcitro): Currently, we can't even *load* +# `oauth2client.appengine` without being on appengine, because of how +# it handles imports. Fix that by splitting that module into +# GAE-specific and GAE-independent bits, and guarding imports. +class GaeAssertionCredentials(oauth2client.client.AssertionCredentials): + """Assertion credentials for Google App Engine apps.""" + + def __init__(self, scopes, **kwds): + if not util.DetectGae(): + raise exceptions.ResourceUnavailableError( + 'GCE credentials requested outside a GCE instance') + self._scopes = list(util.NormalizeScopes(scopes)) + super(GaeAssertionCredentials, self).__init__(None, **kwds) + + @classmethod + def Get(cls, *args, **kwds): + try: + return cls(*args, **kwds) + except exceptions.Error: + return None + + @classmethod + def from_json(cls, json_data): + data = json.loads(json_data) + return GaeAssertionCredentials(data['_scopes']) + + def _refresh(self, _): + """Refresh self.access_token. + + Args: + _: (ignored) A function matching httplib2.Http.request's signature. + """ + from google.appengine.api import app_identity + try: + token, _ = app_identity.get_access_token(self._scopes) + except app_identity.Error as e: + raise exceptions.CredentialsError(str(e)) + self.access_token = token + + +# TODO(craigcitro): Switch this from taking a path to taking a stream. +def CredentialsFromFile(path, client_info): + """Read credentials from a file.""" + credential_store = oauth2client.multistore_file.get_credential_storage( + path, + client_info['client_id'], + client_info['user_agent'], + client_info['scope']) + if hasattr(FLAGS, 'auth_local_webserver'): + FLAGS.auth_local_webserver = False + credentials = credential_store.get() + if credentials is None or credentials.invalid: + print 'Generating new OAuth credentials ...' + while True: + # If authorization fails, we want to retry, rather than let this + # cascade up and get caught elsewhere. If users want out of the + # retry loop, they can ^C. + try: + flow = oauth2client.client.OAuth2WebServerFlow(**client_info) + credentials = oauth2client.tools.run(flow, credential_store) + break + except (oauth2client.client.FlowExchangeError, SystemExit) as e: + # Here SystemExit is "no credential at all", and the + # FlowExchangeError is "invalid" -- usually because you reused + # a token. + print 'Invalid authorization: %s' % (e,) + except httplib2.HttpLib2Error as e: + print 'Communication error: %s' % (e,) + raise exceptions.CredentialsError( + 'Communication error creating credentials: %s' % e) + return credentials diff --git a/_gcloud_vendor/apitools/base/py/credentials_lib_test.py b/_gcloud_vendor/apitools/base/py/credentials_lib_test.py new file mode 100644 index 000000000000..e4e461c1e9ca --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/credentials_lib_test.py @@ -0,0 +1,54 @@ +#!/usr/bin/env python + + +import httplib +import re +import StringIO +import urllib2 + +import mock +import unittest2 + +from apitools.base.py import credentials_lib +from apitools.base.py import util + + +def CreateUriValidator(uri_regexp, content=''): + def CheckUri(uri, headers=None): + if 'X-Google-Metadata-Request' not in headers: + raise ValueError('Missing required header') + if uri_regexp.match(uri): + message = content + status = httplib.OK + else: + message = 'Expected uri matching pattern %s' % uri_regexp.pattern + status = httplib.BAD_REQUEST + return type('HttpResponse', (object,), {'status': status})(), message + return CheckUri + + +class CredentialsLibTest(unittest2.TestCase): + + def _GetServiceCreds(self, service_account_name=None, scopes=None): + scopes = scopes or ['scope1'] + kwargs = {} + if service_account_name is not None: + kwargs['service_account_name'] = service_account_name + service_account_name = service_account_name or 'default' + with mock.patch.object(urllib2, 'urlopen', autospec=True) as urllib_mock: + urllib_mock.return_value = StringIO.StringIO(''.join(scopes)) + with mock.patch.object(util, 'DetectGce', autospec=True) as mock_util: + mock_util.return_value = True + validator = CreateUriValidator( + re.compile(r'.*/%s/.*' % service_account_name), + content='{"access_token": "token"}') + credentials = credentials_lib.GceAssertionCredentials(scopes, **kwargs) + self.assertIsNone(credentials._refresh(validator)) + + def testGceServiceAccounts(self): + self._GetServiceCreds() + self._GetServiceCreds(service_account_name='my_service_account') + + +if __name__ == '__main__': + unittest2.main() diff --git a/_gcloud_vendor/apitools/base/py/encoding.py b/_gcloud_vendor/apitools/base/py/encoding.py new file mode 100644 index 000000000000..c44897f12757 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/encoding.py @@ -0,0 +1,486 @@ +#!/usr/bin/env python +"""Common code for converting proto to other formats, such as JSON.""" + +import base64 +import collections +import datetime +import json +import logging + + +from protorpc import message_types +from protorpc import messages +from protorpc import protojson + +from apitools.base.py import exceptions + +__all__ = [ + 'CopyProtoMessage', + 'JsonToMessage', + 'MessageToJson', + 'DictToMessage', + 'MessageToDict', + 'PyValueToMessage', + 'MessageToPyValue', + 'MessageToRepr', +] + + +_Codec = collections.namedtuple('_Codec', ['encoder', 'decoder']) +CodecResult = collections.namedtuple('CodecResult', ['value', 'complete']) + + +# TODO(craigcitro): Make these non-global. +_UNRECOGNIZED_FIELD_MAPPINGS = {} +_CUSTOM_MESSAGE_CODECS = {} +_CUSTOM_FIELD_CODECS = {} +_FIELD_TYPE_CODECS = {} + + +def MapUnrecognizedFields(field_name): + """Register field_name as a container for unrecognized fields in message.""" + def Register(cls): + _UNRECOGNIZED_FIELD_MAPPINGS[cls] = field_name + return cls + return Register + + +def RegisterCustomMessageCodec(encoder, decoder): + """Register a custom encoder/decoder for this message class.""" + def Register(cls): + _CUSTOM_MESSAGE_CODECS[cls] = _Codec(encoder=encoder, decoder=decoder) + return cls + return Register + + +def RegisterCustomFieldCodec(encoder, decoder): + """Register a custom encoder/decoder for this field.""" + def Register(field): + _CUSTOM_FIELD_CODECS[field] = _Codec(encoder=encoder, decoder=decoder) + return field + return Register + + +def RegisterFieldTypeCodec(encoder, decoder): + """Register a custom encoder/decoder for all fields of this type.""" + def Register(field_type): + _FIELD_TYPE_CODECS[field_type] = _Codec(encoder=encoder, decoder=decoder) + return field_type + return Register + + +# TODO(craigcitro): Delete this function with the switch to proto2. +def CopyProtoMessage(message): + codec = protojson.ProtoJson() + return codec.decode_message(type(message), codec.encode_message(message)) + + +def MessageToJson(message, include_fields=None): + """Convert the given message to JSON.""" + result = _ProtoJsonApiTools.Get().encode_message(message) + return _IncludeFields(result, message, include_fields) + + +def JsonToMessage(message_type, message): + """Convert the given JSON to a message of type message_type.""" + return _ProtoJsonApiTools.Get().decode_message(message_type, message) + + +# TODO(craigcitro): Do this directly, instead of via JSON. +def DictToMessage(d, message_type): + """Convert the given dictionary to a message of type message_type.""" + return JsonToMessage(message_type, json.dumps(d)) + + +def MessageToDict(message): + """Convert the given message to a dictionary.""" + return json.loads(MessageToJson(message)) + + +def PyValueToMessage(message_type, value): + """Convert the given python value to a message of type message_type.""" + return JsonToMessage(message_type, json.dumps(value)) + + +def MessageToPyValue(message): + """Convert the given message to a python value.""" + return json.loads(MessageToJson(message)) + + +def MessageToRepr(msg, multiline=False, **kwargs): + """Return a repr-style string for a protorpc message. + + protorpc.Message.__repr__ does not return anything that could be considered + python code. Adding this function lets us print a protorpc message in such + a way that it could be pasted into code later, and used to compare against + other things. + + Args: + msg: protorpc.Message, the message to be repr'd. + multiline: bool, True if the returned string should have each field + assignment on its own line. + **kwargs: {str:str}, Additional flags for how to format the string. + + Known **kwargs: + shortstrings: bool, True if all string values should be truncated at + 100 characters, since when mocking the contents typically don't matter + except for IDs, and IDs are usually less than 100 characters. + no_modules: bool, True if the long module name should not be printed with + each type. + + Returns: + str, A string of valid python (assuming the right imports have been made) + that recreates the message passed into this function. + """ + + # TODO(user): craigcitro suggests a pretty-printer from apitools/gen. + + indent = kwargs.get('indent', 0) + + def IndentKwargs(kwargs): + kwargs = dict(kwargs) + kwargs['indent'] = kwargs.get('indent', 0) + 4 + return kwargs + + if isinstance(msg, list): + s = '[' + for item in msg: + if multiline: + s += '\n' + ' '*(indent + 4) + s += MessageToRepr( + item, multiline=multiline, **IndentKwargs(kwargs)) + ',' + if multiline: + s += '\n' + ' '*indent + s += ']' + return s + + if isinstance(msg, messages.Message): + s = type(msg).__name__ + '(' + if not kwargs.get('no_modules'): + s = msg.__module__ + '.' + s + names = sorted([field.name for field in msg.all_fields()]) + for name in names: + field = msg.field_by_name(name) + if multiline: + s += '\n' + ' '*(indent + 4) + value = getattr(msg, field.name) + s += field.name + '=' + MessageToRepr( + value, multiline=multiline, **IndentKwargs(kwargs)) + ',' + if multiline: + s += '\n'+' '*indent + s += ')' + return s + + if isinstance(msg, basestring): + if kwargs.get('shortstrings') and len(msg) > 100: + msg = msg[:100] + + if isinstance(msg, datetime.datetime): + + class SpecialTZInfo(datetime.tzinfo): + + def __init__(self, offset): + super(SpecialTZInfo, self).__init__() + self.offset = offset + + def __repr__(self): + s = 'TimeZoneOffset(' + repr(self.offset) + ')' + if not kwargs.get('no_modules'): + s = 'protorpc.util.' + s + return s + + msg = datetime.datetime( + msg.year, msg.month, msg.day, msg.hour, msg.minute, msg.second, + msg.microsecond, SpecialTZInfo(msg.tzinfo.utcoffset(0))) + + return repr(msg) + + +def _GetField(message, field_path): + for field in field_path: + if field not in dir(message): + raise KeyError('no field "%s"' % field) + message = getattr(message, field) + return message + + +def _SetField(dictblob, field_path, value): + for field in field_path[:-1]: + dictblob[field] = {} + dictblob = dictblob[field] + dictblob[field_path[-1]] = value + + +def _IncludeFields(encoded_message, message, include_fields): + """Add the requested fields to the encoded message.""" + if include_fields is None: + return encoded_message + result = json.loads(encoded_message) + for field_name in include_fields: + try: + value = _GetField(message, field_name.split('.')) + nullvalue = None + if isinstance(value, list): + nullvalue = [] + except KeyError: + raise exceptions.InvalidDataError( + 'No field named %s in message of type %s' % ( + field_name, type(message))) + _SetField(result, field_name.split('.'), nullvalue) + return json.dumps(result) + + +def _GetFieldCodecs(field, attr): + result = [ + getattr(_CUSTOM_FIELD_CODECS.get(field), attr, None), + getattr(_FIELD_TYPE_CODECS.get(type(field)), attr, None), + ] + return [x for x in result if x is not None] + + +class _ProtoJsonApiTools(protojson.ProtoJson): + """JSON encoder used by apitools clients.""" + _INSTANCE = None + + @classmethod + def Get(cls): + if cls._INSTANCE is None: + cls._INSTANCE = cls() + return cls._INSTANCE + + def decode_message(self, message_type, encoded_message): + if message_type in _CUSTOM_MESSAGE_CODECS: + return _CUSTOM_MESSAGE_CODECS[message_type].decoder(encoded_message) + # We turn off the default logging in protorpc. We may want to + # remove this later. + old_level = logging.getLogger().level + logging.getLogger().setLevel(logging.ERROR) + result = super(_ProtoJsonApiTools, self).decode_message( + message_type, encoded_message) + logging.getLogger().setLevel(old_level) + result = _ProcessUnknownEnums(result, encoded_message) + result = _ProcessUnknownMessages(result, encoded_message) + return _DecodeUnknownFields(result, encoded_message) + + def decode_field(self, field, value): + """Decode the given JSON value. + + Args: + field: a messages.Field for the field we're decoding. + value: a python value we'd like to decode. + + Returns: + A value suitable for assignment to field. + """ + for decoder in _GetFieldCodecs(field, 'decoder'): + result = decoder(field, value) + value = result.value + if result.complete: + return value + if isinstance(field, messages.MessageField): + field_value = self.decode_message(field.message_type, json.dumps(value)) + elif isinstance(field, messages.EnumField): + try: + field_value = super(_ProtoJsonApiTools, self).decode_field(field, value) + except messages.DecodeError: + if not isinstance(value, basestring): + raise + field_value = None + else: + field_value = super(_ProtoJsonApiTools, self).decode_field(field, value) + return field_value + + def encode_message(self, message): + if isinstance(message, messages.FieldList): + return '[%s]' % (', '.join(self.encode_message(x) for x in message)) + if type(message) in _CUSTOM_MESSAGE_CODECS: + return _CUSTOM_MESSAGE_CODECS[type(message)].encoder(message) + message = _EncodeUnknownFields(message) + return super(_ProtoJsonApiTools, self).encode_message(message) + + def encode_field(self, field, value): + """Encode the given value as JSON. + + Args: + field: a messages.Field for the field we're encoding. + value: a value for field. + + Returns: + A python value suitable for json.dumps. + """ + for encoder in _GetFieldCodecs(field, 'encoder'): + result = encoder(field, value) + value = result.value + if result.complete: + return value + if (isinstance(field, messages.MessageField) and + not isinstance(field, message_types.DateTimeField)): + value = json.loads(self.encode_message(value)) + return super(_ProtoJsonApiTools, self).encode_field(field, value) + + +# TODO(craigcitro): Fold this and _IncludeFields in as codecs. +def _DecodeUnknownFields(message, encoded_message): + """Rewrite unknown fields in message into message.destination.""" + destination = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message)) + if destination is None: + return message + pair_field = message.field_by_name(destination) + if not isinstance(pair_field, messages.MessageField): + raise exceptions.InvalidDataFromServerError( + 'Unrecognized fields must be mapped to a compound ' + 'message type.') + pair_type = pair_field.message_type + # TODO(craigcitro): Add more error checking around the pair + # type being exactly what we suspect (field names, etc). + if isinstance(pair_type.value, messages.MessageField): + new_values = _DecodeUnknownMessages( + message, json.loads(encoded_message), pair_type) + else: + new_values = _DecodeUnrecognizedFields(message, pair_type) + setattr(message, destination, new_values) + # We could probably get away with not setting this, but + # why not clear it? + setattr(message, '_Message__unrecognized_fields', {}) + return message + + +def _DecodeUnknownMessages(message, encoded_message, pair_type): + """Process unknown fields in encoded_message of a message type.""" + field_type = pair_type.value.type + new_values = [] + all_field_names = [x.name for x in message.all_fields()] + for name, value_dict in encoded_message.iteritems(): + if name in all_field_names: + continue + value = PyValueToMessage(field_type, value_dict) + new_pair = pair_type(key=name, value=value) + new_values.append(new_pair) + return new_values + + +def _DecodeUnrecognizedFields(message, pair_type): + """Process unrecognized fields in message.""" + new_values = [] + for unknown_field in message.all_unrecognized_fields(): + # TODO(craigcitro): Consider validating the variant if + # the assignment below doesn't take care of it. It may + # also be necessary to check it in the case that the + # type has multiple encodings. + value, _ = message.get_unrecognized_field_info(unknown_field) + value_type = pair_type.field_by_name('value') + if isinstance(value_type, messages.MessageField): + decoded_value = DictToMessage(value, pair_type.value.message_type) + else: + decoded_value = value + new_pair = pair_type(key=str(unknown_field), value=decoded_value) + new_values.append(new_pair) + return new_values + + +def _EncodeUnknownFields(message): + """Remap unknown fields in message out of message.source.""" + source = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message)) + if source is None: + return message + result = CopyProtoMessage(message) + pairs_field = message.field_by_name(source) + if not isinstance(pairs_field, messages.MessageField): + raise exceptions.InvalidUserInputError( + 'Invalid pairs field %s' % pairs_field) + pairs_type = pairs_field.message_type + value_variant = pairs_type.field_by_name('value').variant + pairs = getattr(message, source) + for pair in pairs: + if value_variant == messages.Variant.MESSAGE: + encoded_value = MessageToDict(pair.value) + else: + encoded_value = pair.value + result.set_unrecognized_field(pair.key, encoded_value, value_variant) + setattr(result, source, []) + return result + + +def _SafeEncodeBytes(field, value): + """Encode the bytes in value as urlsafe base64.""" + try: + if field.repeated: + result = [base64.urlsafe_b64encode(byte) for byte in value] + else: + result = base64.urlsafe_b64encode(value) + complete = True + except TypeError: + result = value + complete = False + return CodecResult(value=result, complete=complete) + + +def _SafeDecodeBytes(unused_field, value): + """Decode the urlsafe base64 value into bytes.""" + try: + result = base64.urlsafe_b64decode(str(value)) + complete = True + except TypeError: + result = value + complete = False + return CodecResult(value=result, complete=complete) + + +def _ProcessUnknownEnums(message, encoded_message): + """Add unknown enum values from encoded_message as unknown fields. + + ProtoRPC diverges from the usual protocol buffer behavior here and + doesn't allow unknown fields. Throwing on unknown fields makes it + impossible to let servers add new enum values and stay compatible + with older clients, which isn't reasonable for us. We simply store + unrecognized enum values as unknown fields, and all is well. + + Args: + message: Proto message we've decoded thus far. + encoded_message: JSON string we're decoding. + + Returns: + message, with any unknown enums stored as unrecognized fields. + """ + if not encoded_message: + return message + decoded_message = json.loads(encoded_message) + for field in message.all_fields(): + if (isinstance(field, messages.EnumField) and + field.name in decoded_message and + message.get_assigned_value(field.name) is None): + message.set_unrecognized_field(field.name, decoded_message[field.name], + messages.Variant.ENUM) + return message + + +def _ProcessUnknownMessages(message, encoded_message): + """Store any remaining unknown fields as strings. + + ProtoRPC currently ignores unknown values for which no type can be + determined (and logs a "No variant found" message). For the purposes + of reserializing, this is quite harmful (since it throws away + information). Here we simply add those as unknown fields of type + string (so that they can easily be reserialized). + + Args: + message: Proto message we've decoded thus far. + encoded_message: JSON string we're decoding. + + Returns: + message, with any remaining unrecognized fields saved. + """ + if not encoded_message: + return message + decoded_message = json.loads(encoded_message) + message_fields = [x.name for x in message.all_fields()] + list( + message.all_unrecognized_fields()) + missing_fields = [x for x in decoded_message.iterkeys() + if x not in message_fields] + for field_name in missing_fields: + message.set_unrecognized_field(field_name, decoded_message[field_name], + messages.Variant.STRING) + return message + + +RegisterFieldTypeCodec(_SafeEncodeBytes, _SafeDecodeBytes)(messages.BytesField) diff --git a/_gcloud_vendor/apitools/base/py/encoding_test.py b/_gcloud_vendor/apitools/base/py/encoding_test.py new file mode 100644 index 000000000000..77224d6e3582 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/encoding_test.py @@ -0,0 +1,269 @@ +#!/usr/bin/env python + + +import base64 +import datetime +import json + +from protorpc import message_types +from protorpc import messages +from protorpc import util +import unittest2 + +from apitools.base.py import encoding + + +class SimpleMessage(messages.Message): + field = messages.StringField(1) + repfield = messages.StringField(2, repeated=True) + + +class BytesMessage(messages.Message): + field = messages.BytesField(1) + repfield = messages.BytesField(2, repeated=True) + + +class TimeMessage(messages.Message): + timefield = message_types.DateTimeField(3) + + +@encoding.MapUnrecognizedFields('additional_properties') +class AdditionalPropertiesMessage(messages.Message): + + class AdditionalProperty(messages.Message): + key = messages.StringField(1) + value = messages.StringField(2) + + additional_properties = messages.MessageField( + AdditionalProperty, 1, repeated=True) + + +class CompoundPropertyType(messages.Message): + index = messages.IntegerField(1) + name = messages.StringField(2) + + +class MessageWithEnum(messages.Message): + + class ThisEnum(messages.Enum): + VALUE_ONE = 1 + VALUE_TWO = 2 + + field_one = messages.EnumField(ThisEnum, 1) + field_two = messages.EnumField(ThisEnum, 2, default=ThisEnum.VALUE_TWO) + ignored_field = messages.EnumField(ThisEnum, 3) + + +@encoding.MapUnrecognizedFields('additional_properties') +class AdditionalMessagePropertiesMessage(messages.Message): + + class AdditionalProperty(messages.Message): + key = messages.StringField(1) + value = messages.MessageField(CompoundPropertyType, 2) + + additional_properties = messages.MessageField( + 'AdditionalProperty', 1, repeated=True) + + +class HasNestedMessage(messages.Message): + nested = messages.MessageField(AdditionalPropertiesMessage, 1) + nested_list = messages.StringField(2, repeated=True) + + +class ExtraNestedMessage(messages.Message): + nested = messages.MessageField(HasNestedMessage, 1) + + +class EncodingTest(unittest2.TestCase): + + def testCopyProtoMessage(self): + msg = SimpleMessage(field='abc') + new_msg = encoding.CopyProtoMessage(msg) + self.assertEqual(msg.field, new_msg.field) + msg.field = 'def' + self.assertNotEqual(msg.field, new_msg.field) + + def testBytesEncoding(self): + b64_str = 'AAc+' + b64_msg = '{"field": "%s"}' % b64_str + urlsafe_b64_str = 'AAc-' + urlsafe_b64_msg = '{"field": "%s"}' % urlsafe_b64_str + data = base64.b64decode(b64_str) + msg = BytesMessage(field=data) + self.assertEqual(msg, encoding.JsonToMessage(BytesMessage, urlsafe_b64_msg)) + self.assertEqual(msg, encoding.JsonToMessage(BytesMessage, b64_msg)) + self.assertEqual(urlsafe_b64_msg, encoding.MessageToJson(msg)) + + enc_rep_msg = '{"repfield": ["%(b)s", "%(b)s"]}' % { + 'b': urlsafe_b64_str, + } + rep_msg = BytesMessage(repfield=[data, data]) + self.assertEqual(rep_msg, encoding.JsonToMessage(BytesMessage, enc_rep_msg)) + self.assertEqual(enc_rep_msg, encoding.MessageToJson(rep_msg)) + + def testIncludeFields(self): + msg = SimpleMessage() + self.assertEqual('{}', encoding.MessageToJson(msg)) + self.assertEqual( + '{"field": null}', + encoding.MessageToJson(msg, include_fields=['field'])) + self.assertEqual( + '{"repfield": []}', + encoding.MessageToJson(msg, include_fields=['repfield'])) + + def testNestedIncludeFields(self): + msg = HasNestedMessage( + nested=AdditionalPropertiesMessage( + additional_properties=[])) + self.assertEqual( + '{"nested": null}', + encoding.MessageToJson(msg, include_fields=['nested'])) + self.assertEqual( + '{"nested": {"additional_properties": []}}', + encoding.MessageToJson( + msg, include_fields=['nested.additional_properties'])) + msg = ExtraNestedMessage(nested=msg) + self.assertEqual( + '{"nested": {"nested": null}}', + encoding.MessageToJson(msg, include_fields=['nested.nested'])) + self.assertEqual( + '{"nested": {"nested_list": []}}', + encoding.MessageToJson(msg, include_fields=['nested.nested_list'])) + self.assertEqual( + '{"nested": {"nested": {"additional_properties": []}}}', + encoding.MessageToJson( + msg, include_fields=['nested.nested.additional_properties'])) + + def testAdditionalPropertyMapping(self): + msg = AdditionalPropertiesMessage() + msg.additional_properties = [ + AdditionalPropertiesMessage.AdditionalProperty( + key='key_one', value='value_one'), + AdditionalPropertiesMessage.AdditionalProperty( + key='key_two', value='value_two'), + ] + + encoded_msg = encoding.MessageToJson(msg) + self.assertEqual( + {'key_one': 'value_one', 'key_two': 'value_two'}, + json.loads(encoded_msg)) + + new_msg = encoding.JsonToMessage(type(msg), encoded_msg) + self.assertEqual( + set(('key_one', 'key_two')), + set([x.key for x in new_msg.additional_properties])) + self.assertIsNot(msg, new_msg) + + new_msg.additional_properties.pop() + self.assertEqual(1, len(new_msg.additional_properties)) + self.assertEqual(2, len(msg.additional_properties)) + + def testAdditionalMessageProperties(self): + json_msg = '{"input": {"index": 0, "name": "output"}}' + result = encoding.JsonToMessage( + AdditionalMessagePropertiesMessage, json_msg) + self.assertEqual(1, len(result.additional_properties)) + self.assertEqual(0, result.additional_properties[0].value.index) + + def testNestedFieldMapping(self): + nested_msg = AdditionalPropertiesMessage() + nested_msg.additional_properties = [ + AdditionalPropertiesMessage.AdditionalProperty( + key='key_one', value='value_one'), + AdditionalPropertiesMessage.AdditionalProperty( + key='key_two', value='value_two'), + ] + msg = HasNestedMessage(nested=nested_msg) + + encoded_msg = encoding.MessageToJson(msg) + self.assertEqual( + {'nested': {'key_one': 'value_one', 'key_two': 'value_two'}}, + json.loads(encoded_msg)) + + new_msg = encoding.JsonToMessage(type(msg), encoded_msg) + self.assertEqual( + set(('key_one', 'key_two')), + set([x.key for x in new_msg.nested.additional_properties])) + + new_msg.nested.additional_properties.pop() + self.assertEqual(1, len(new_msg.nested.additional_properties)) + self.assertEqual(2, len(msg.nested.additional_properties)) + + def testValidEnums(self): + message_json = '{"field_one": "VALUE_ONE"}' + message = encoding.JsonToMessage(MessageWithEnum, message_json) + self.assertEqual(MessageWithEnum.ThisEnum.VALUE_ONE, message.field_one) + self.assertEqual(MessageWithEnum.ThisEnum.VALUE_TWO, message.field_two) + self.assertEqual(json.loads(message_json), + json.loads(encoding.MessageToJson(message))) + + def testIgnoredEnums(self): + json_with_typo = '{"field_one": "VALUE_OEN"}' + message = encoding.JsonToMessage(MessageWithEnum, json_with_typo) + self.assertEqual(None, message.field_one) + self.assertEqual(('VALUE_OEN', messages.Variant.ENUM), + message.get_unrecognized_field_info('field_one')) + self.assertEqual(json.loads(json_with_typo), + json.loads(encoding.MessageToJson(message))) + + empty_json = '' + message = encoding.JsonToMessage(MessageWithEnum, empty_json) + self.assertEqual(None, message.field_one) + + def testIgnoredEnumsWithDefaults(self): + json_with_typo = '{"field_two": "VALUE_OEN"}' + message = encoding.JsonToMessage(MessageWithEnum, json_with_typo) + self.assertEqual(MessageWithEnum.ThisEnum.VALUE_TWO, message.field_two) + self.assertEqual(json.loads(json_with_typo), + json.loads(encoding.MessageToJson(message))) + + def testUnknownNestedRoundtrip(self): + json_message = '{"field": "abc", "submessage": {"a": 1, "b": "foo"}}' + message = encoding.JsonToMessage(SimpleMessage, json_message) + self.assertEqual(json.loads(json_message), + json.loads(encoding.MessageToJson(message))) + + def testJsonDatetime(self): + msg = TimeMessage(timefield=datetime.datetime( + 2014, 7, 2, 23, 33, 25, 541000, + tzinfo=util.TimeZoneOffset(datetime.timedelta(0)))) + self.assertEqual( + '{"timefield": "2014-07-02T23:33:25.541000+00:00"}', + encoding.MessageToJson(msg)) + + def testMessageToRepr(self): + # pylint:disable=bad-whitespace, Using the same string returned by + # MessageToRepr, with the module names fixed. + msg = SimpleMessage(field='field',repfield=['field','field',],) + self.assertEqual( + encoding.MessageToRepr(msg), + r"%s.SimpleMessage(field='field',repfield=['field','field',],)" % ( + __name__,)) + self.assertEqual( + encoding.MessageToRepr(msg, no_modules=True), + r"SimpleMessage(field='field',repfield=['field','field',],)") + + def testMessageToReprWithTime(self): + msg = TimeMessage(timefield=datetime.datetime( + 2014, 7, 2, 23, 33, 25, 541000, + tzinfo=util.TimeZoneOffset(datetime.timedelta(0)))) + self.assertEqual( + encoding.MessageToRepr(msg, multiline=True), + # pylint:disable=line-too-long, Too much effort to make MessageToRepr + # wrap lines properly. + """\ +%s.TimeMessage( + timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, tzinfo=protorpc.util.TimeZoneOffset(datetime.timedelta(0))), +)""" % __name__) + self.assertEqual( + encoding.MessageToRepr(msg, multiline=True, no_modules=True), + # pylint:disable=line-too-long, Too much effort to make MessageToRepr + # wrap lines properly. + """\ +TimeMessage( + timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, tzinfo=TimeZoneOffset(datetime.timedelta(0))), +)""") + + +if __name__ == '__main__': + unittest2.main() diff --git a/_gcloud_vendor/apitools/base/py/exceptions.py b/_gcloud_vendor/apitools/base/py/exceptions.py new file mode 100644 index 000000000000..55faa4970ebb --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/exceptions.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python +"""Exceptions for generated client libraries.""" + + +class Error(Exception): + """Base class for all exceptions.""" + + +class TypecheckError(Error, TypeError): + """An object of an incorrect type is provided.""" + + +class NotFoundError(Error): + """A specified resource could not be found.""" + + +class UserError(Error): + """Base class for errors related to user input.""" + + +class InvalidDataError(Error): + """Base class for any invalid data error.""" + + +class CommunicationError(Error): + """Any communication error talking to an API server.""" + + +class HttpError(CommunicationError): + """Error making a request. Soon to be HttpError.""" + + def __init__(self, response, content, url): + super(HttpError, self).__init__() + self.response = response + self.content = content + self.url = url + + def __str__(self): + content = self.content.decode('ascii', 'replace') + return 'HttpError accessing <%s>: response: <%s>, content <%s>' % ( + self.url, self.response, content) + + @property + def status_code(self): + # TODO(craigcitro): Turn this into something better than a + # KeyError if there is no status. + return int(self.response['status']) + + @classmethod + def FromResponse(cls, http_response): + return cls(http_response.info, http_response.content, + http_response.request_url) + + +class InvalidUserInputError(InvalidDataError): + """User-provided input is invalid.""" + + +class InvalidDataFromServerError(InvalidDataError, CommunicationError): + """Data received from the server is malformed.""" + + +class BatchError(Error): + """Error generated while constructing a batch request.""" + + +class ConfigurationError(Error): + """Base class for configuration errors.""" + + +class GeneratedClientError(Error): + """The generated client configuration is invalid.""" + + +class ConfigurationValueError(UserError): + """Some part of the user-specified client configuration is invalid.""" + + +class ResourceUnavailableError(Error): + """User requested an unavailable resource.""" + + +class CredentialsError(Error): + """Errors related to invalid credentials.""" + + +class TransferError(CommunicationError): + """Errors related to transfers.""" + + +class TransferInvalidError(TransferError): + """The given transfer is invalid.""" + + +class NotYetImplementedError(GeneratedClientError): + """This functionality is not yet implemented.""" + + +class StreamExhausted(Error): + """Attempted to read more bytes from a stream than were available.""" diff --git a/_gcloud_vendor/apitools/base/py/extra_types.py b/_gcloud_vendor/apitools/base/py/extra_types.py new file mode 100644 index 000000000000..4b15683a8d49 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/extra_types.py @@ -0,0 +1,283 @@ +#!/usr/bin/env python +"""Extra types understood by apitools. + +This file will be replaced by a .proto file when we switch to proto2 +from protorpc. +""" + +import collections +import datetime +import json +import numbers + +from protorpc import message_types +from protorpc import messages +from protorpc import protojson + +from apitools.base.py import encoding +from apitools.base.py import exceptions +from apitools.base.py import util + +__all__ = [ + 'DateField', + 'DateTimeMessage', + 'JsonArray', + 'JsonObject', + 'JsonValue', + 'JsonProtoEncoder', + 'JsonProtoDecoder', +] + +# We import from protorpc. +# pylint:disable=invalid-name +DateTimeMessage = message_types.DateTimeMessage +# pylint:enable=invalid-name + + +class DateField(messages.Field): + """Field definition for Date values.""" + + # We insert our own metaclass here to avoid letting ProtoRPC + # register this as the default field type for strings. + # * since ProtoRPC does this via metaclasses, we don't have any + # choice but to use one ourselves + # * since a subclass's metaclass must inherit from its superclass's + # metaclass, we're forced to have this hard-to-read inheritance. + # + class __metaclass__(messages.Field.__metaclass__): # pylint: disable=invalid-name + + def __init__(cls, name, bases, dct): # pylint: disable=no-self-argument + super(messages.Field.__metaclass__, cls).__init__(name, bases, dct) + + VARIANTS = frozenset([messages.Variant.STRING]) + DEFAULT_VARIANT = messages.Variant.STRING + type = datetime.date + + +def _ValidateJsonValue(json_value): + entries = [(f, json_value.get_assigned_value(f.name)) + for f in json_value.all_fields()] + assigned_entries = [(f, value) for f, value in entries if value is not None] + if len(assigned_entries) != 1: + raise exceptions.InvalidDataError('Malformed JsonValue: %s' % json_value) + + +def _JsonValueToPythonValue(json_value): + """Convert the given JsonValue to a json string.""" + util.Typecheck(json_value, JsonValue) + _ValidateJsonValue(json_value) + if json_value.is_null: + return None + entries = [(f, json_value.get_assigned_value(f.name)) + for f in json_value.all_fields()] + assigned_entries = [(f, value) for f, value in entries if value is not None] + field, value = assigned_entries[0] + if not isinstance(field, messages.MessageField): + return value + elif field.message_type is JsonObject: + return _JsonObjectToPythonValue(value) + elif field.message_type is JsonArray: + return _JsonArrayToPythonValue(value) + + +def _JsonObjectToPythonValue(json_value): + util.Typecheck(json_value, JsonObject) + return dict([(prop.key, _JsonValueToPythonValue(prop.value)) for prop + in json_value.properties]) + + +def _JsonArrayToPythonValue(json_value): + util.Typecheck(json_value, JsonArray) + return [_JsonValueToPythonValue(e) for e in json_value.entries] + + +_MAXINT64 = 2 << 63 - 1 +_MININT64 = -(2 << 63) + + +def _PythonValueToJsonValue(py_value): + """Convert the given python value to a JsonValue.""" + if py_value is None: + return JsonValue(is_null=True) + if isinstance(py_value, bool): + return JsonValue(boolean_value=py_value) + if isinstance(py_value, basestring): + return JsonValue(string_value=py_value) + if isinstance(py_value, numbers.Number): + if isinstance(py_value, (int, long)): + if _MININT64 < py_value < _MAXINT64: + return JsonValue(integer_value=py_value) + return JsonValue(double_value=float(py_value)) + if isinstance(py_value, dict): + return JsonValue(object_value=_PythonValueToJsonObject(py_value)) + if isinstance(py_value, collections.Iterable): + return JsonValue(array_value=_PythonValueToJsonArray(py_value)) + raise exceptions.InvalidDataError( + 'Cannot convert "%s" to JsonValue' % py_value) + + +def _PythonValueToJsonObject(py_value): + util.Typecheck(py_value, dict) + return JsonObject( + properties=[ + JsonObject.Property(key=key, value=_PythonValueToJsonValue(value)) + for key, value in py_value.iteritems()]) + + +def _PythonValueToJsonArray(py_value): + return JsonArray(entries=map(_PythonValueToJsonValue, py_value)) + + +class JsonValue(messages.Message): + """Any valid JSON value.""" + # Is this JSON object `null`? + is_null = messages.BooleanField(1, default=False) + + # Exactly one of the following is provided if is_null is False; none + # should be provided if is_null is True. + boolean_value = messages.BooleanField(2) + string_value = messages.StringField(3) + # We keep two numeric fields to keep int64 round-trips exact. + double_value = messages.FloatField(4, variant=messages.Variant.DOUBLE) + integer_value = messages.IntegerField(5, variant=messages.Variant.INT64) + # Compound types + object_value = messages.MessageField('JsonObject', 6) + array_value = messages.MessageField('JsonArray', 7) + + +class JsonObject(messages.Message): + """A JSON object value. + + Messages: + Property: A property of a JsonObject. + + Fields: + properties: A list of properties of a JsonObject. + """ + + class Property(messages.Message): + """A property of a JSON object. + + Fields: + key: Name of the property. + value: A JsonValue attribute. + """ + key = messages.StringField(1) + value = messages.MessageField(JsonValue, 2) + + properties = messages.MessageField(Property, 1, repeated=True) + + +class JsonArray(messages.Message): + """A JSON array value.""" + entries = messages.MessageField(JsonValue, 1, repeated=True) + + +_JSON_PROTO_TO_PYTHON_MAP = { + JsonArray: _JsonArrayToPythonValue, + JsonObject: _JsonObjectToPythonValue, + JsonValue: _JsonValueToPythonValue, +} +_JSON_PROTO_TYPES = tuple(_JSON_PROTO_TO_PYTHON_MAP.keys()) + + +def _JsonProtoToPythonValue(json_proto): + util.Typecheck(json_proto, _JSON_PROTO_TYPES) + return _JSON_PROTO_TO_PYTHON_MAP[type(json_proto)](json_proto) + + +def _PythonValueToJsonProto(py_value): + if isinstance(py_value, dict): + return _PythonValueToJsonObject(py_value) + if (isinstance(py_value, collections.Iterable) and + not isinstance(py_value, basestring)): + return _PythonValueToJsonArray(py_value) + return _PythonValueToJsonValue(py_value) + + +def _JsonProtoToJson(json_proto, unused_encoder=None): + return json.dumps(_JsonProtoToPythonValue(json_proto)) + + +def _JsonToJsonProto(json_data, unused_decoder=None): + return _PythonValueToJsonProto(json.loads(json_data)) + + +def _JsonToJsonValue(json_data, unused_decoder=None): + result = _PythonValueToJsonProto(json.loads(json_data)) + if isinstance(result, JsonValue): + return result + elif isinstance(result, JsonObject): + return JsonValue(object_value=result) + elif isinstance(result, JsonArray): + return JsonValue(array_value=result) + else: + raise exceptions.InvalidDataError( + 'Malformed JsonValue: %s' % json_data) + + +# pylint:disable=invalid-name +JsonProtoEncoder = _JsonProtoToJson +JsonProtoDecoder = _JsonToJsonProto +# pylint:enable=invalid-name +encoding.RegisterCustomMessageCodec( + encoder=JsonProtoEncoder, decoder=_JsonToJsonValue)(JsonValue) +encoding.RegisterCustomMessageCodec( + encoder=JsonProtoEncoder, decoder=JsonProtoDecoder)(JsonObject) +encoding.RegisterCustomMessageCodec( + encoder=JsonProtoEncoder, decoder=JsonProtoDecoder)(JsonArray) + + +def _EncodeDateTimeField(field, value): + result = protojson.ProtoJson().encode_field(field, value) + return encoding.CodecResult(value=result, complete=True) + + +def _DecodeDateTimeField(unused_field, value): + result = protojson.ProtoJson().decode_field( + message_types.DateTimeField(1), value) + return encoding.CodecResult(value=result, complete=True) + + +encoding.RegisterFieldTypeCodec(_EncodeDateTimeField, _DecodeDateTimeField)( + message_types.DateTimeField) + + +def _EncodeInt64Field(field, value): + """Handle the special case of int64 as a string.""" + capabilities = [ + messages.Variant.INT64, + messages.Variant.UINT64, + ] + if field.variant not in capabilities: + return encoding.CodecResult(value=value, complete=False) + + if field.repeated: + result = [str(x) for x in value] + else: + result = str(value) + return encoding.CodecResult(value=result, complete=True) + + +def _DecodeInt64Field(unused_field, value): + # Don't need to do anything special, they're decoded just fine + return encoding.CodecResult(value=value, complete=False) + +encoding.RegisterFieldTypeCodec(_EncodeInt64Field, _DecodeInt64Field)( + messages.IntegerField) + + +def _EncodeDateField(field, value): + """Encoder for datetime.date objects.""" + if field.repeated: + result = [d.isoformat() for d in value] + else: + result = value.isoformat() + return encoding.CodecResult(value=result, complete=True) + + +def _DecodeDateField(unused_field, value): + date = datetime.datetime.strptime(value, '%Y-%m-%d').date() + return encoding.CodecResult(value=date, complete=True) + +encoding.RegisterFieldTypeCodec(_EncodeDateField, _DecodeDateField)(DateField) diff --git a/_gcloud_vendor/apitools/base/py/extra_types_test.py b/_gcloud_vendor/apitools/base/py/extra_types_test.py new file mode 100644 index 000000000000..457c606a2a99 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/extra_types_test.py @@ -0,0 +1,175 @@ +#!/usr/bin/env python + + +import datetime +import json +import math + +from protorpc import messages +import unittest2 + +from apitools.base.py import encoding +from apitools.base.py import exceptions +from apitools.base.py import extra_types + + +class ExtraTypesTest(unittest2.TestCase): + + def assertRoundTrip(self, value): + if isinstance(value, extra_types._JSON_PROTO_TYPES): + self.assertEqual( + value, + extra_types._PythonValueToJsonProto( + extra_types._JsonProtoToPythonValue(value))) + else: + self.assertEqual( + value, + extra_types._JsonProtoToPythonValue( + extra_types._PythonValueToJsonProto(value))) + + def assertTranslations(self, py_value, json_proto): + self.assertEqual(py_value, extra_types._JsonProtoToPythonValue(json_proto)) + self.assertEqual(json_proto, extra_types._PythonValueToJsonProto(py_value)) + + def testInvalidProtos(self): + with self.assertRaises(exceptions.InvalidDataError): + extra_types._ValidateJsonValue(extra_types.JsonValue()) + with self.assertRaises(exceptions.InvalidDataError): + extra_types._ValidateJsonValue( + extra_types.JsonValue(is_null=True, string_value='a')) + with self.assertRaises(exceptions.InvalidDataError): + extra_types._ValidateJsonValue( + extra_types.JsonValue(integer_value=3, string_value='a')) + + def testNullEncoding(self): + self.assertTranslations(None, extra_types.JsonValue(is_null=True)) + + def testJsonNumberEncoding(self): + seventeen = extra_types.JsonValue(integer_value=17) + self.assertRoundTrip(17) + self.assertRoundTrip(seventeen) + self.assertTranslations(17, seventeen) + + json_pi = extra_types.JsonValue(double_value=math.pi) + self.assertRoundTrip(math.pi) + self.assertRoundTrip(json_pi) + self.assertTranslations(math.pi, json_pi) + + def testArrayEncoding(self): + array = [3, 'four', False] + json_array = extra_types.JsonArray(entries=[ + extra_types.JsonValue(integer_value=3), + extra_types.JsonValue(string_value='four'), + extra_types.JsonValue(boolean_value=False), + ]) + self.assertRoundTrip(array) + self.assertRoundTrip(json_array) + self.assertTranslations(array, json_array) + + def testArrayAsValue(self): + array_json = '[3, "four", false]' + array = [3, 'four', False] + value = encoding.JsonToMessage(extra_types.JsonValue, array_json) + self.assertTrue(isinstance(value, extra_types.JsonValue)) + self.assertEqual(array, encoding.MessageToPyValue(value)) + + def testObjectAsValue(self): + obj_json = '{"works": true}' + obj = {'works': True} + value = encoding.JsonToMessage(extra_types.JsonValue, obj_json) + self.assertTrue(isinstance(value, extra_types.JsonValue)) + self.assertEqual(obj, encoding.MessageToPyValue(value)) + + def testDictEncoding(self): + d = {'a': 6, 'b': 'eleventeen'} + json_d = extra_types.JsonObject(properties=[ + extra_types.JsonObject.Property( + key='a', value=extra_types.JsonValue(integer_value=6)), + extra_types.JsonObject.Property( + key='b', value=extra_types.JsonValue(string_value='eleventeen')), + ]) + self.assertRoundTrip(d) + # We don't know json_d will round-trip, because of randomness in + # python dictionary iteration ordering. We also need to force + # comparison as lists, since hashing protos isn't helpful. + translated_properties = extra_types._PythonValueToJsonProto(d).properties + for p in json_d.properties: + self.assertIn(p, translated_properties) + for p in translated_properties: + self.assertIn(p, json_d.properties) + + def testJsonObjectPropertyTranslation(self): + value = extra_types.JsonValue(string_value='abc') + obj = extra_types.JsonObject(properties=[ + extra_types.JsonObject.Property(key='attr_name', value=value)]) + json_value = '"abc"' + json_obj = '{"attr_name": "abc"}' + + self.assertRoundTrip(value) + self.assertRoundTrip(obj) + self.assertRoundTrip(json_value) + self.assertRoundTrip(json_obj) + + self.assertEqual(json_value, encoding.MessageToJson(value)) + self.assertEqual(json_obj, encoding.MessageToJson(obj)) + + def testDateField(self): + + class DateMsg(messages.Message): + start_date = extra_types.DateField(1) + all_dates = extra_types.DateField(2, repeated=True) + + msg = DateMsg( + start_date=datetime.date(1752, 9, 9), all_dates=[ + datetime.date(1979, 5, 6), + datetime.date(1980, 10, 24), + datetime.date(1981, 1, 19), + ]) + json_msg = json.dumps({ + 'start_date': '1752-09-09', 'all_dates': [ + '1979-05-06', '1980-10-24', '1981-01-19', + ]}) + self.assertEqual(json_msg, encoding.MessageToJson(msg)) + self.assertEqual(msg, encoding.JsonToMessage(DateMsg, json_msg)) + + def testInt64(self): + # Testing roundtrip of type 'long' + + class DogeMsg(messages.Message): + such_string = messages.StringField(1) + wow = messages.IntegerField(2, variant=messages.Variant.INT64) + very_unsigned = messages.IntegerField(3, variant=messages.Variant.UINT64) + much_repeated = messages.IntegerField( + 4, variant=messages.Variant.INT64, repeated=True) + + def MtoJ(msg): + return encoding.MessageToJson(msg) + + def JtoM(class_type, json_str): + return encoding.JsonToMessage(class_type, json_str) + + def DoRoundtrip(class_type, json_msg=None, message=None, times=4): + if json_msg: + json_msg = MtoJ(JtoM(class_type, json_msg)) + if message: + message = JtoM(class_type, MtoJ(message)) + if times == 0: + result = json_msg if json_msg else message + return result + return DoRoundtrip(class_type=class_type, json_msg=json_msg, + message=message, times=times - 1) + + # Single + json_msg = ('{"such_string": "poot", "wow": "-1234",' + ' "very_unsigned": "999", "much_repeated": ["123", "456"]}') + out_json = MtoJ(JtoM(DogeMsg, json_msg)) + self.assertEqual(json.loads(out_json)['wow'], '-1234') + + # Repeated test case + msg = DogeMsg(such_string='wow', wow=-1234, + very_unsigned=800, much_repeated=[123, 456]) + self.assertEqual(msg, DoRoundtrip(DogeMsg, message=msg)) + + +if __name__ == '__main__': + unittest2.main() diff --git a/_gcloud_vendor/apitools/base/py/http_wrapper.py b/_gcloud_vendor/apitools/base/py/http_wrapper.py new file mode 100644 index 000000000000..8c3ee281f541 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/http_wrapper.py @@ -0,0 +1,182 @@ +#!/usr/bin/env python +"""HTTP wrapper for apitools. + +This library wraps the underlying http library we use, which is +currently httplib2. +""" + +import collections +import httplib +import logging +import socket +import time +import urlparse + +import httplib2 + +from apitools.base.py import exceptions +from apitools.base.py import util + +__all__ = [ + 'GetHttp', + 'MakeRequest', + 'Request', +] + + +# 308 and 429 don't have names in httplib. +RESUME_INCOMPLETE = 308 +TOO_MANY_REQUESTS = 429 +_REDIRECT_STATUS_CODES = ( + httplib.MOVED_PERMANENTLY, + httplib.FOUND, + httplib.SEE_OTHER, + httplib.TEMPORARY_REDIRECT, + RESUME_INCOMPLETE, +) + + +class Request(object): + """Class encapsulating the data for an HTTP request.""" + + def __init__(self, url='', http_method='GET', headers=None, body=''): + self.url = url + self.http_method = http_method + self.headers = headers or {} + self.__body = None + self.body = body + + @property + def body(self): + return self.__body + + @body.setter + def body(self, value): + self.__body = value + if value is not None: + self.headers['content-length'] = str(len(self.__body)) + else: + self.headers.pop('content-length', None) + + +# Note: currently the order of fields here is important, since we want +# to be able to pass in the result from httplib2.request. +class Response(collections.namedtuple( + 'HttpResponse', ['info', 'content', 'request_url'])): + """Class encapsulating data for an HTTP response.""" + __slots__ = () + + def __len__(self): + def ProcessContentRange(content_range): + _, _, range_spec = content_range.partition(' ') + byte_range, _, _ = range_spec.partition('/') + start, _, end = byte_range.partition('-') + return int(end) - int(start) + 1 + + if '-content-encoding' in self.info and 'content-range' in self.info: + # httplib2 rewrites content-length in the case of a compressed + # transfer; we can't trust the content-length header in that + # case, but we *can* trust content-range, if it's present. + return ProcessContentRange(self.info['content-range']) + elif 'content-length' in self.info: + return int(self.info.get('content-length')) + elif 'content-range' in self.info: + return ProcessContentRange(self.info['content-range']) + return len(self.content) + + @property + def status_code(self): + return int(self.info['status']) + + @property + def retry_after(self): + if 'retry-after' in self.info: + return int(self.info['retry-after']) + + @property + def is_redirect(self): + return (self.status_code in _REDIRECT_STATUS_CODES and + 'location' in self.info) + + +def MakeRequest(http, http_request, retries=5, redirections=5): + """Send http_request via the given http. + + This wrapper exists to handle translation between the plain httplib2 + request/response types and the Request and Response types above. + This will also be the hook for error/retry handling. + + Args: + http: An httplib2.Http instance, or a http multiplexer that delegates to + an underlying http, for example, HTTPMultiplexer. + http_request: A Request to send. + retries: (int, default 5) Number of retries to attempt on 5XX replies. + redirections: (int, default 5) Number of redirects to follow. + + Returns: + A Response object. + + Raises: + InvalidDataFromServerError: if there is no response after retries. + """ + response = None + exc = None + connection_type = None + # Handle overrides for connection types. This is used if the caller + # wants control over the underlying connection for managing callbacks + # or hash digestion. + if getattr(http, 'connections', None): + url_scheme = urlparse.urlsplit(http_request.url).scheme + if url_scheme and url_scheme in http.connections: + connection_type = http.connections[url_scheme] + for retry in xrange(retries + 1): + # Note that the str() calls here are important for working around + # some funny business with message construction and unicode in + # httplib itself. See, eg, + # http://bugs.python.org/issue11898 + info = None + try: + info, content = http.request( + str(http_request.url), method=str(http_request.http_method), + body=http_request.body, headers=http_request.headers, + redirections=redirections, connection_type=connection_type) + except httplib.BadStatusLine as e: + logging.error('Caught BadStatusLine from httplib, retrying: %s', e) + exc = e + except socket.error as e: + if http_request.http_method != 'GET': + raise + logging.error('Caught socket error, retrying: %s', e) + exc = e + except httplib.IncompleteRead as e: + if http_request.http_method != 'GET': + raise + logging.error('Caught IncompleteRead error, retrying: %s', e) + exc = e + if info is not None: + response = Response(info, content, http_request.url) + if (response.status_code < 500 and + response.status_code != TOO_MANY_REQUESTS and + not response.retry_after): + break + logging.info('Retrying request to url <%s> after status code %s.', + response.request_url, response.status_code) + elif isinstance(exc, httplib.IncompleteRead): + logging.info('Retrying request to url <%s> after incomplete read.', + str(http_request.url)) + else: + logging.info('Retrying request to url <%s> after connection break.', + str(http_request.url)) + # TODO(craigcitro): Make this timeout configurable. + if response: + time.sleep(response.retry_after or util.CalculateWaitForRetry(retry)) + else: + time.sleep(util.CalculateWaitForRetry(retry)) + if response is None: + raise exceptions.InvalidDataFromServerError( + 'HTTP error on final retry: %s' % exc) + return response + + +def GetHttp(): + return httplib2.Http() diff --git a/_gcloud_vendor/apitools/base/py/list_pager.py b/_gcloud_vendor/apitools/base/py/list_pager.py new file mode 100644 index 000000000000..d8f5971da336 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/list_pager.py @@ -0,0 +1,49 @@ +#!/usr/bin/env python +"""A helper function that executes a series of List queries for many APIs.""" + +import copy + +__all__ = [ + 'YieldFromList', +] + + +def YieldFromList( + service, request, limit=None, batch_size=100, + method='List', field='items', predicate=None): + """Make a series of List requests, keeping track of page tokens. + + Args: + service: apitools_base.BaseApiService, A service with a .List() method. + request: protorpc.messages.Message, The request message corresponding to the + service's .List() method, with all the attributes populated except + the .maxResults and .pageToken attributes. + limit: int, The maximum number of records to yield. None if all available + records should be yielded. + batch_size: int, The number of items to retrieve per request. + method: str, The name of the method used to fetch resources. + field: str, The field in the response that will be a list of items. + predicate: lambda, A function that returns true for items to be yielded. + + Yields: + protorpc.message.Message, The resources listed by the service. + + """ + request = copy.deepcopy(request) + request.maxResults = batch_size + request.pageToken = None + while limit is None or limit: + response = getattr(service, method)(request) + items = getattr(response, field) + if predicate: + items = filter(predicate, items) + for item in items: + yield item + if limit is None: + continue + limit -= 1 + if not limit: + return + request.pageToken = response.nextPageToken + if not request.pageToken: + return diff --git a/_gcloud_vendor/apitools/base/py/transfer.py b/_gcloud_vendor/apitools/base/py/transfer.py new file mode 100644 index 000000000000..610ef2d5868b --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/transfer.py @@ -0,0 +1,716 @@ +#!/usr/bin/env python +"""Upload and download support for apitools.""" + +import email.generator as email_generator +import email.mime.multipart as mime_multipart +import email.mime.nonmultipart as mime_nonmultipart +import httplib +import io +import json +import mimetypes +import os +import StringIO +import threading + +from apitools.base.py import exceptions +from apitools.base.py import http_wrapper +from apitools.base.py import util + +__all__ = [ + 'Download', + 'Upload', +] + +_RESUMABLE_UPLOAD_THRESHOLD = 5 << 20 +_SIMPLE_UPLOAD = 'simple' +_RESUMABLE_UPLOAD = 'resumable' + + +class _Transfer(object): + """Generic bits common to Uploads and Downloads.""" + + def __init__(self, stream, close_stream=False, chunksize=None, + auto_transfer=True, http=None): + self.__bytes_http = None + self.__close_stream = close_stream + self.__http = http + self.__stream = stream + self.__url = None + + self.auto_transfer = auto_transfer + self.chunksize = chunksize or 1048576L + + def __repr__(self): + return str(self) + + @property + def close_stream(self): + return self.__close_stream + + @property + def http(self): + return self.__http + + @property + def bytes_http(self): + return self.__bytes_http or self.http + + @bytes_http.setter + def bytes_http(self, value): + self.__bytes_http = value + + @property + def stream(self): + return self.__stream + + @property + def url(self): + return self.__url + + def _Initialize(self, http, url): + """Initialize this download by setting self.http and self.url. + + We want the user to be able to override self.http by having set + the value in the constructor; in that case, we ignore the provided + http. + + Args: + http: An httplib2.Http instance or None. + url: The url for this transfer. + + Returns: + None. Initializes self. + """ + self.EnsureUninitialized() + if self.http is None: + self.__http = http or http_wrapper.GetHttp() + self.__url = url + + @property + def initialized(self): + return self.url is not None and self.http is not None + + @property + def _type_name(self): + return type(self).__name__ + + def EnsureInitialized(self): + if not self.initialized: + raise exceptions.TransferInvalidError( + 'Cannot use uninitialized %s', self._type_name) + + def EnsureUninitialized(self): + if self.initialized: + raise exceptions.TransferInvalidError( + 'Cannot re-initialize %s', self._type_name) + + def __del__(self): + if self.__close_stream: + self.__stream.close() + + def _ExecuteCallback(self, callback, response): + # TODO(craigcitro): Push these into a queue. + if callback is not None: + threading.Thread(target=callback, args=(response, self)).start() + + +class Download(_Transfer): + """Data for a single download. + + Public attributes: + chunksize: default chunksize to use for transfers. + """ + _ACCEPTABLE_STATUSES = set(( + httplib.OK, + httplib.NO_CONTENT, + httplib.PARTIAL_CONTENT, + httplib.REQUESTED_RANGE_NOT_SATISFIABLE, + )) + _REQUIRED_SERIALIZATION_KEYS = set(( + 'auto_transfer', 'progress', 'total_size', 'url')) + + def __init__(self, *args, **kwds): + super(Download, self).__init__(*args, **kwds) + self.__initial_response = None + self.__progress = 0 + self.__total_size = None + + @property + def progress(self): + return self.__progress + + @classmethod + def FromFile(cls, filename, overwrite=False, auto_transfer=True): + """Create a new download object from a filename.""" + path = os.path.expanduser(filename) + if os.path.exists(path) and not overwrite: + raise exceptions.InvalidUserInputError( + 'File %s exists and overwrite not specified' % path) + return cls(open(path, 'wb'), close_stream=True, auto_transfer=auto_transfer) + + @classmethod + def FromStream(cls, stream, auto_transfer=True): + """Create a new Download object from a stream.""" + return cls(stream, auto_transfer=auto_transfer) + + @classmethod + def FromData(cls, stream, json_data, http=None, auto_transfer=None): + """Create a new Download object from a stream and serialized data.""" + info = json.loads(json_data) + missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys()) + if missing_keys: + raise exceptions.InvalidDataError( + 'Invalid serialization data, missing keys: %s' % ( + ', '.join(missing_keys))) + download = cls.FromStream(stream) + if auto_transfer is not None: + download.auto_transfer = auto_transfer + else: + download.auto_transfer = info['auto_transfer'] + setattr(download, '_Download__progress', info['progress']) + setattr(download, '_Download__total_size', info['total_size']) + download._Initialize(http, info['url']) # pylint: disable=protected-access + return download + + @property + def serialization_data(self): + self.EnsureInitialized() + return { + 'auto_transfer': self.auto_transfer, + 'progress': self.progress, + 'total_size': self.total_size, + 'url': self.url, + } + + @property + def total_size(self): + return self.__total_size + + def __str__(self): + if not self.initialized: + return 'Download (uninitialized)' + else: + return 'Download with %d/%s bytes transferred from url %s' % ( + self.progress, self.total_size, self.url) + + def ConfigureRequest(self, http_request, url_builder): + url_builder.query_params['alt'] = 'media' + http_request.headers['Range'] = 'bytes=0-%d' % (self.chunksize - 1,) + + def __SetTotal(self, info): + if 'content-range' in info: + _, _, total = info['content-range'].rpartition('/') + if total != '*': + self.__total_size = int(total) + # Note "total_size is None" means we don't know it; if no size + # info was returned on our initial range request, that means we + # have a 0-byte file. (That last statement has been verified + # empirically, but is not clearly documented anywhere.) + if self.total_size is None: + self.__total_size = 0 + + def InitializeDownload(self, http_request, http=None, client=None): + """Initialize this download by making a request. + + Args: + http_request: The HttpRequest to use to initialize this download. + http: The httplib2.Http instance for this request. + client: If provided, let this client process the final URL before + sending any additional requests. If client is provided and + http is not, client.http will be used instead. + """ + self.EnsureUninitialized() + if http is None and client is None: + raise exceptions.UserError('Must provide client or http.') + http = http or client.http + if client is not None: + http_request.url = client.FinalizeTransferUrl(http_request.url) + response = http_wrapper.MakeRequest(self.bytes_http or http, http_request) + if response.status_code not in self._ACCEPTABLE_STATUSES: + raise exceptions.HttpError.FromResponse(response) + self.__initial_response = response + self.__SetTotal(response.info) + url = response.info.get('content-location', response.request_url) + if client is not None: + url = client.FinalizeTransferUrl(url) + self._Initialize(http, url) + # Unless the user has requested otherwise, we want to just + # go ahead and pump the bytes now. + if self.auto_transfer: + self.StreamInChunks() + + @staticmethod + def _ArgPrinter(response, unused_download): + if 'content-range' in response.info: + print 'Received %s' % response.info['content-range'] + else: + print 'Received %d bytes' % len(response) + + @staticmethod + def _CompletePrinter(*unused_args): + print 'Download complete' + + def __NormalizeStartEnd(self, start, end=None): + if end is not None: + if start < 0: + raise exceptions.TransferInvalidError( + 'Cannot have end index with negative start index') + elif start >= self.total_size: + raise exceptions.TransferInvalidError( + 'Cannot have start index greater than total size') + end = min(end, self.total_size - 1) + if end < start: + raise exceptions.TransferInvalidError( + 'Range requested with end[%s] < start[%s]' % (end, start)) + return start, end + else: + if start < 0: + start = max(0, start + self.total_size) + return start, self.total_size + + def __SetRangeHeader(self, request, start, end=None): + if start < 0: + request.headers['range'] = 'bytes=%d' % start + elif end is None: + request.headers['range'] = 'bytes=%d-' % start + else: + request.headers['range'] = 'bytes=%d-%d' % (start, end) + + def __GetChunk(self, start, end=None, additional_headers=None): + """Retrieve a chunk, and return the full response.""" + self.EnsureInitialized() + end_byte = min(end or start + self.chunksize, self.total_size) + request = http_wrapper.Request(url=self.url) + self.__SetRangeHeader(request, start, end=end_byte) + if additional_headers is not None: + request.headers.update(additional_headers) + return http_wrapper.MakeRequest(self.bytes_http, request) + + def __ProcessResponse(self, response): + """Process this response (by updating self and writing to self.stream).""" + if response.status_code not in self._ACCEPTABLE_STATUSES: + raise exceptions.TransferInvalidError(response.content) + if response.status_code in (httplib.OK, httplib.PARTIAL_CONTENT): + self.stream.write(response.content) + self.__progress += len(response) + elif response.status_code == httplib.NO_CONTENT: + # It's important to write something to the stream for the case + # of a 0-byte download to a file, as otherwise python won't + # create the file. + self.stream.write('') + return response + + def GetRange(self, start, end=None, additional_headers=None): + """Retrieve a given byte range from this download, inclusive. + + Range must be of one of these three forms: + * 0 <= start, end = None: Fetch from start to the end of the file. + * 0 <= start <= end: Fetch the bytes from start to end. + * start < 0, end = None: Fetch the last -start bytes of the file. + + (These variations correspond to those described in the HTTP 1.1 + protocol for range headers in RFC 2616, sec. 14.35.1.) + + Args: + start: (int) Where to start fetching bytes. (See above.) + end: (int, optional) Where to stop fetching bytes. (See above.) + additional_headers: (bool, optional) Any additional headers to + pass with the request. + + Returns: + None. Streams bytes into self.stream. + """ + self.EnsureInitialized() + progress, end = self.__NormalizeStartEnd(start, end) + while progress < end: + chunk_end = min(progress + self.chunksize, end) + response = self.__GetChunk(progress, end=chunk_end, + additional_headers=additional_headers) + response = self.__ProcessResponse(response) + progress += len(response) + if not response: + raise exceptions.TransferInvalidError( + 'Zero bytes unexpectedly returned in download response') + + def StreamInChunks(self, callback=None, finish_callback=None, + additional_headers=None): + """Stream the entire download.""" + callback = callback or self._ArgPrinter + finish_callback = finish_callback or self._CompletePrinter + + self.EnsureInitialized() + while True: + if self.__initial_response is not None: + response = self.__initial_response + self.__initial_response = None + else: + response = self.__GetChunk(self.progress, + additional_headers=additional_headers) + response = self.__ProcessResponse(response) + self._ExecuteCallback(callback, response) + if (response.status_code == httplib.OK or + self.progress >= self.total_size): + break + self._ExecuteCallback(finish_callback, response) + + +class Upload(_Transfer): + """Data for a single Upload. + + Fields: + stream: The stream to upload. + mime_type: MIME type of the upload. + total_size: (optional) Total upload size for the stream. + close_stream: (default: False) Whether or not we should close the + stream when finished with the upload. + auto_transfer: (default: True) If True, stream all bytes as soon as + the upload is created. + """ + _REQUIRED_SERIALIZATION_KEYS = set(( + 'auto_transfer', 'mime_type', 'total_size', 'url')) + + def __init__(self, stream, mime_type, total_size=None, http=None, + close_stream=False, chunksize=None, auto_transfer=True): + super(Upload, self).__init__( + stream, close_stream=close_stream, chunksize=chunksize, + auto_transfer=auto_transfer, http=http) + self.__complete = False + self.__mime_type = mime_type + self.__progress = 0 + self.__server_chunk_granularity = None + self.__strategy = None + + self.total_size = total_size + + @property + def progress(self): + return self.__progress + + @classmethod + def FromFile(cls, filename, mime_type=None, auto_transfer=True): + """Create a new Upload object from a filename.""" + path = os.path.expanduser(filename) + if not os.path.exists(path): + raise exceptions.NotFoundError('Could not find file %s' % path) + if not mime_type: + mime_type, _ = mimetypes.guess_type(path) + if mime_type is None: + raise exceptions.InvalidUserInputError( + 'Could not guess mime type for %s' % path) + size = os.stat(path).st_size + return cls(open(path, 'rb'), mime_type, total_size=size, close_stream=True, + auto_transfer=auto_transfer) + + @classmethod + def FromStream(cls, stream, mime_type, total_size=None, auto_transfer=True): + """Create a new Upload object from a stream.""" + if mime_type is None: + raise exceptions.InvalidUserInputError( + 'No mime_type specified for stream') + return cls(stream, mime_type, total_size=total_size, close_stream=False, + auto_transfer=auto_transfer) + + @classmethod + def FromData(cls, stream, json_data, http, auto_transfer=None): + """Create a new Upload of stream from serialized json_data using http.""" + info = json.loads(json_data) + missing_keys = cls._REQUIRED_SERIALIZATION_KEYS - set(info.keys()) + if missing_keys: + raise exceptions.InvalidDataError( + 'Invalid serialization data, missing keys: %s' % ( + ', '.join(missing_keys))) + upload = cls.FromStream(stream, info['mime_type'], + total_size=info.get('total_size')) + if isinstance(stream, io.IOBase) and not stream.seekable(): + raise exceptions.InvalidUserInputError( + 'Cannot restart resumable upload on non-seekable stream') + if auto_transfer is not None: + upload.auto_transfer = auto_transfer + else: + upload.auto_transfer = info['auto_transfer'] + upload.strategy = _RESUMABLE_UPLOAD + upload._Initialize(http, info['url']) # pylint: disable=protected-access + upload._RefreshResumableUploadState() # pylint: disable=protected-access + upload.EnsureInitialized() + if upload.auto_transfer: + upload.StreamInChunks() + return upload + + @property + def serialization_data(self): + self.EnsureInitialized() + if self.strategy != _RESUMABLE_UPLOAD: + raise exceptions.InvalidDataError( + 'Serialization only supported for resumable uploads') + return { + 'auto_transfer': self.auto_transfer, + 'mime_type': self.mime_type, + 'total_size': self.total_size, + 'url': self.url, + } + + @property + def complete(self): + return self.__complete + + @property + def mime_type(self): + return self.__mime_type + + def __str__(self): + if not self.initialized: + return 'Upload (uninitialized)' + else: + return 'Upload with %d/%s bytes transferred for url %s' % ( + self.progress, self.total_size or '???', self.url) + + @property + def strategy(self): + return self.__strategy + + @strategy.setter + def strategy(self, value): + if value not in (_SIMPLE_UPLOAD, _RESUMABLE_UPLOAD): + raise exceptions.UserError(( + 'Invalid value "%s" for upload strategy, must be one of ' + '"simple" or "resumable".') % value) + self.__strategy = value + + @property + def total_size(self): + return self.__total_size + + @total_size.setter + def total_size(self, value): + self.EnsureUninitialized() + self.__total_size = value + + def __SetDefaultUploadStrategy(self, upload_config, http_request): + """Determine and set the default upload strategy for this upload. + + We generally prefer simple or multipart, unless we're forced to + use resumable. This happens when any of (1) the upload is too + large, (2) the simple endpoint doesn't support multipart requests + and we have metadata, or (3) there is no simple upload endpoint. + + Args: + upload_config: Configuration for the upload endpoint. + http_request: The associated http request. + + Returns: + None. + """ + if self.strategy is not None: + return + strategy = _SIMPLE_UPLOAD + if (self.total_size is not None and + self.total_size > _RESUMABLE_UPLOAD_THRESHOLD): + strategy = _RESUMABLE_UPLOAD + if http_request.body and not upload_config.simple_multipart: + strategy = _RESUMABLE_UPLOAD + if not upload_config.simple_path: + strategy = _RESUMABLE_UPLOAD + self.strategy = strategy + + def ConfigureRequest(self, upload_config, http_request, url_builder): + """Configure the request and url for this upload.""" + # Validate total_size vs. max_size + if (self.total_size and upload_config.max_size and + self.total_size > upload_config.max_size): + raise exceptions.InvalidUserInputError( + 'Upload too big: %s larger than max size %s' % ( + self.total_size, upload_config.max_size)) + # Validate mime type + if not util.AcceptableMimeType(upload_config.accept, self.mime_type): + raise exceptions.InvalidUserInputError( + 'MIME type %s does not match any accepted MIME ranges %s' % ( + self.mime_type, upload_config.accept)) + + self.__SetDefaultUploadStrategy(upload_config, http_request) + if self.strategy == _SIMPLE_UPLOAD: + url_builder.relative_path = upload_config.simple_path + if http_request.body: + url_builder.query_params['uploadType'] = 'multipart' + self.__ConfigureMultipartRequest(http_request) + else: + url_builder.query_params['uploadType'] = 'media' + self.__ConfigureMediaRequest(http_request) + else: + url_builder.relative_path = upload_config.resumable_path + url_builder.query_params['uploadType'] = 'resumable' + self.__ConfigureResumableRequest(http_request) + + def __ConfigureMediaRequest(self, http_request): + """Configure http_request as a simple request for this upload.""" + http_request.headers['content-type'] = self.mime_type + http_request.body = self.stream.read() + + def __ConfigureMultipartRequest(self, http_request): + """Configure http_request as a multipart request for this upload.""" + # This is a multipart/related upload. + msg_root = mime_multipart.MIMEMultipart('related') + # msg_root should not write out its own headers + setattr(msg_root, '_write_headers', lambda self: None) + + # attach the body as one part + msg = mime_nonmultipart.MIMENonMultipart( + *http_request.headers['content-type'].split('/')) + msg.set_payload(http_request.body) + msg_root.attach(msg) + + # attach the media as the second part + msg = mime_nonmultipart.MIMENonMultipart(*self.mime_type.split('/')) + msg['Content-Transfer-Encoding'] = 'binary' + msg.set_payload(self.stream.read()) + msg_root.attach(msg) + + # encode the body: note that we can't use `as_string`, because + # it plays games with `From ` lines. + fp = StringIO.StringIO() + g = email_generator.Generator(fp, mangle_from_=False) + g.flatten(msg_root, unixfrom=False) + http_request.body = fp.getvalue() + + multipart_boundary = msg_root.get_boundary() + http_request.headers['content-type'] = ( + 'multipart/related; boundary=%r' % multipart_boundary) + + def __ConfigureResumableRequest(self, http_request): + http_request.headers['X-Upload-Content-Type'] = self.mime_type + if self.total_size is not None: + http_request.headers['X-Upload-Content-Length'] = str(self.total_size) + + def _RefreshResumableUploadState(self): + """Talk to the server and refresh the state of this resumable upload.""" + if self.strategy != _RESUMABLE_UPLOAD: + return + self.EnsureInitialized() + refresh_request = http_wrapper.Request( + url=self.url, http_method='PUT', headers={'Content-Range': 'bytes */*'}) + refresh_response = http_wrapper.MakeRequest( + self.http, refresh_request, redirections=0) + range_header = refresh_response.info.get( + 'Range', refresh_response.info.get('range')) + if refresh_response.status_code in (httplib.OK, httplib.CREATED): + self.__complete = True + elif refresh_response.status_code == http_wrapper.RESUME_INCOMPLETE: + if range_header is None: + self.__progress = 0 + else: + self.__progress = self.__GetLastByte(range_header) + 1 + self.stream.seek(self.progress) + else: + raise exceptions.HttpError.FromResponse(refresh_response) + + def InitializeUpload(self, http_request, http=None, client=None): + """Initialize this upload from the given http_request.""" + if self.strategy is None: + raise exceptions.UserError( + 'No upload strategy set; did you call ConfigureRequest?') + if http is None and client is None: + raise exceptions.UserError('Must provide client or http.') + if self.strategy != _RESUMABLE_UPLOAD: + return + if self.total_size is None: + raise exceptions.InvalidUserInputError( + 'Cannot stream upload without total size') + http = http or client.http + if client is not None: + http_request.url = client.FinalizeTransferUrl(http_request.url) + self.EnsureUninitialized() + http_response = http_wrapper.MakeRequest(http, http_request) + if http_response.status_code != httplib.OK: + raise exceptions.HttpError.FromResponse(http_response) + + self.__server_chunk_granularity = http_response.info.get( + 'X-Goog-Upload-Chunk-Granularity') + self.__ValidateChunksize() + url = http_response.info['location'] + if client is not None: + url = client.FinalizeTransferUrl(url) + self._Initialize(http, url) + + # Unless the user has requested otherwise, we want to just + # go ahead and pump the bytes now. + if self.auto_transfer: + return self.StreamInChunks() + + def __GetLastByte(self, range_header): + _, _, end = range_header.partition('-') + # TODO(craigcitro): Validate start == 0? + return int(end) + + def __ValidateChunksize(self, chunksize=None): + if self.__server_chunk_granularity is None: + return + chunksize = chunksize or self.chunksize + if chunksize % self.__server_chunk_granularity: + raise exceptions.ConfigurationValueError( + 'Server requires chunksize to be a multiple of %d', + self.__server_chunk_granularity) + + @staticmethod + def _ArgPrinter(response, unused_upload): + print 'Sent %s' % response.info['range'] + + @staticmethod + def _CompletePrinter(*unused_args): + print 'Upload complete' + + def StreamInChunks(self, callback=None, finish_callback=None, + additional_headers=None): + """Send this (resumable) upload in chunks.""" + if self.strategy != _RESUMABLE_UPLOAD: + raise exceptions.InvalidUserInputError( + 'Cannot stream non-resumable upload') + if self.total_size is None: + raise exceptions.InvalidUserInputError( + 'Cannot stream upload without total size') + callback = callback or self._ArgPrinter + finish_callback = finish_callback or self._CompletePrinter + response = None + self.__ValidateChunksize(self.chunksize) + self.EnsureInitialized() + while not self.complete: + response = self.__SendChunk(self.stream.tell(), + additional_headers=additional_headers) + if response.status_code in (httplib.OK, httplib.CREATED): + self.__complete = True + break + self.__progress = self.__GetLastByte(response.info['range']) + if self.progress + 1 != self.stream.tell(): + # TODO(craigcitro): Add a better way to recover here. + raise exceptions.CommunicationError( + 'Failed to transfer all bytes in chunk, upload paused at byte ' + '%d' % self.progress) + self._ExecuteCallback(callback, response) + self._ExecuteCallback(finish_callback, response) + return response + + def __SendChunk(self, start, additional_headers=None, data=None): + """Send the specified chunk.""" + self.EnsureInitialized() + if data is None: + data = self.stream.read(self.chunksize) + end = start + len(data) + + request = http_wrapper.Request(url=self.url, http_method='PUT', body=data) + request.headers['Content-Type'] = self.mime_type + if data: + request.headers['Content-Range'] = 'bytes %s-%s/%s' % ( + start, end - 1, self.total_size) + if additional_headers: + request.headers.update(additional_headers) + + response = http_wrapper.MakeRequest(self.bytes_http, request) + if response.status_code not in (httplib.OK, httplib.CREATED, + http_wrapper.RESUME_INCOMPLETE): + raise exceptions.HttpError.FromResponse(response) + if response.status_code in (httplib.OK, httplib.CREATED): + return response + # TODO(craigcitro): Add retries on no progress? + last_byte = self.__GetLastByte(response.info['range']) + if last_byte + 1 != end: + new_start = last_byte + 1 - start + response = self.__SendChunk(last_byte + 1, data=data[new_start:]) + return response diff --git a/_gcloud_vendor/apitools/base/py/util.py b/_gcloud_vendor/apitools/base/py/util.py new file mode 100644 index 000000000000..cd882a7e9342 --- /dev/null +++ b/_gcloud_vendor/apitools/base/py/util.py @@ -0,0 +1,165 @@ +#!/usr/bin/env python +"""Assorted utilities shared between parts of apitools.""" + +import collections +import httplib +import os +import random +import types +import urllib +import urllib2 + +from apitools.base.py import exceptions + +__all__ = [ + 'DetectGae', + 'DetectGce', +] + +_RESERVED_URI_CHARS = r":/?#[]@!$&'()*+,;=" + + +def DetectGae(): + """Determine whether or not we're running on GAE. + + This is based on: + https://developers.google.com/appengine/docs/python/#The_Environment + + Returns: + True iff we're running on GAE. + """ + server_software = os.environ.get('SERVER_SOFTWARE', '') + return (server_software.startswith('Development/') or + server_software.startswith('Google App Engine/')) + + +def DetectGce(): + """Determine whether or not we're running on GCE. + + This is based on: + https://cloud.google.com/compute/docs/metadata#runninggce + + Returns: + True iff we're running on a GCE instance. + """ + try: + o = urllib2.urlopen('http://metadata.google.internal') + except urllib2.URLError: + return False + return (o.getcode() == httplib.OK and + o.headers.get('metadata-flavor') == 'Google') + + +def NormalizeScopes(scope_spec): + """Normalize scope_spec to a set of strings.""" + if isinstance(scope_spec, types.StringTypes): + return set(scope_spec.split(' ')) + elif isinstance(scope_spec, collections.Iterable): + return set(scope_spec) + raise exceptions.TypecheckError( + 'NormalizeScopes expected string or iterable, found %s' % ( + type(scope_spec),)) + + +def Typecheck(arg, arg_type, msg=None): + if not isinstance(arg, arg_type): + if msg is None: + if isinstance(arg_type, tuple): + msg = 'Type of arg is "%s", not one of %r' % (type(arg), arg_type) + else: + msg = 'Type of arg is "%s", not "%s"' % (type(arg), arg_type) + raise exceptions.TypecheckError(msg) + return arg + + +def ExpandRelativePath(method_config, params, relative_path=None): + """Determine the relative path for request.""" + path = relative_path or method_config.relative_path or '' + + for param in method_config.path_params: + param_template = '{%s}' % param + # For more details about "reserved word expansion", see: + # http://tools.ietf.org/html/rfc6570#section-3.2.2 + reserved_chars = '' + reserved_template = '{+%s}' % param + if reserved_template in path: + reserved_chars = _RESERVED_URI_CHARS + path = path.replace(reserved_template, param_template) + if param_template not in path: + raise exceptions.InvalidUserInputError( + 'Missing path parameter %s' % param) + try: + # TODO(craigcitro): Do we want to support some sophisticated + # mapping here? + value = params[param] + except KeyError: + raise exceptions.InvalidUserInputError( + 'Request missing required parameter %s' % param) + if value is None: + raise exceptions.InvalidUserInputError( + 'Request missing required parameter %s' % param) + try: + if not isinstance(value, basestring): + value = str(value) + path = path.replace(param_template, + urllib.quote(value.encode('utf_8'), reserved_chars)) + except TypeError as e: + raise exceptions.InvalidUserInputError( + 'Error setting required parameter %s to value %s: %s' % ( + param, value, e)) + return path + + +def CalculateWaitForRetry(retry_attempt, max_wait=60): + """Calculates amount of time to wait before a retry attempt. + + Wait time grows exponentially with the number of attempts. + A random amount of jitter is added to spread out retry attempts from different + clients. + + Args: + retry_attempt: Retry attempt counter. + max_wait: Upper bound for wait time. + + Returns: + Amount of time to wait before retrying request. + """ + + wait_time = 2 ** retry_attempt + # randrange requires a nonzero interval, so we want to drop it if + # the range is too small for jitter. + if retry_attempt: + max_jitter = (2 ** retry_attempt) / 2 + wait_time += random.randrange(-max_jitter, max_jitter) + return min(wait_time, max_wait) + + +def AcceptableMimeType(accept_patterns, mime_type): + """Return True iff mime_type is acceptable for one of accept_patterns. + + Note that this function assumes that all patterns in accept_patterns + will be simple types of the form "type/subtype", where one or both + of these can be "*". We do not support parameters (i.e. "; q=") in + patterns. + + Args: + accept_patterns: list of acceptable MIME types. + mime_type: the mime type we would like to match. + + Returns: + Whether or not mime_type matches (at least) one of these patterns. + """ + unsupported_patterns = [p for p in accept_patterns if ';' in p] + if unsupported_patterns: + raise exceptions.GeneratedClientError( + 'MIME patterns with parameter unsupported: "%s"' % ', '.join( + unsupported_patterns)) + def MimeTypeMatches(pattern, mime_type): + """Return True iff mime_type is acceptable for pattern.""" + # Some systems use a single '*' instead of '*/*'. + if pattern == '*': + pattern = '*/*' + return all(accept in ('*', provided) for accept, provided + in zip(pattern.split('/'), mime_type.split('/'))) + + return any(MimeTypeMatches(pattern, mime_type) for pattern in accept_patterns) From 565750ee7d19742b520dd62e2a4ff38325987284 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 2 Dec 2014 14:34:22 -0500 Subject: [PATCH 4/6] Rip out namsepace package support and convenience imports. --- _gcloud_vendor/apitools/__init__.py | 6 +----- _gcloud_vendor/apitools/base/__init__.py | 6 +----- _gcloud_vendor/apitools/base/py/__init__.py | 16 +--------------- 3 files changed, 3 insertions(+), 25 deletions(-) diff --git a/_gcloud_vendor/apitools/__init__.py b/_gcloud_vendor/apitools/__init__.py index 54fa3d53924e..9870b5e53b94 100644 --- a/_gcloud_vendor/apitools/__init__.py +++ b/_gcloud_vendor/apitools/__init__.py @@ -1,5 +1 @@ -#!/usr/bin/env python -"""Shared __init__.py for apitools.""" - -from pkgutil import extend_path -__path__ = extend_path(__path__, __name__) +"""Package stub.""" diff --git a/_gcloud_vendor/apitools/base/__init__.py b/_gcloud_vendor/apitools/base/__init__.py index 54fa3d53924e..9870b5e53b94 100644 --- a/_gcloud_vendor/apitools/base/__init__.py +++ b/_gcloud_vendor/apitools/base/__init__.py @@ -1,5 +1 @@ -#!/usr/bin/env python -"""Shared __init__.py for apitools.""" - -from pkgutil import extend_path -__path__ = extend_path(__path__, __name__) +"""Package stub.""" diff --git a/_gcloud_vendor/apitools/base/py/__init__.py b/_gcloud_vendor/apitools/base/py/__init__.py index cbf7f86f3485..9870b5e53b94 100644 --- a/_gcloud_vendor/apitools/base/py/__init__.py +++ b/_gcloud_vendor/apitools/base/py/__init__.py @@ -1,15 +1 @@ -#!/usr/bin/env python -"""Top-level imports for apitools base files.""" - -# pylint:disable=wildcard-import -from apitools.base.py.base_api import * -from apitools.base.py.batch import * -from apitools.base.py.credentials_lib import * -from apitools.base.py.encoding import * -from apitools.base.py.exceptions import * -from apitools.base.py.extra_types import * -from apitools.base.py.http_wrapper import * -from apitools.base.py.list_pager import * -from apitools.base.py.transfer import * -from apitools.base.py.util import * - +"""Package stub.""" From 67b06019549a4db8168ff4c5171c9d701ac94a15 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 2 Dec 2014 14:41:25 -0500 Subject: [PATCH 5/6] Rip out non-transport-related modules. --- _gcloud_vendor/apitools/base/py/app2.py | 347 ----------- _gcloud_vendor/apitools/base/py/base_api.py | 583 ------------------ .../apitools/base/py/base_api_test.py | 113 ---- _gcloud_vendor/apitools/base/py/base_cli.py | 151 ----- _gcloud_vendor/apitools/base/py/batch.py | 441 ------------- _gcloud_vendor/apitools/base/py/cli.py | 13 - .../apitools/base/py/credentials_lib.py | 221 ------- .../apitools/base/py/credentials_lib_test.py | 54 -- _gcloud_vendor/apitools/base/py/encoding.py | 486 --------------- .../apitools/base/py/encoding_test.py | 269 -------- .../apitools/base/py/extra_types.py | 283 --------- .../apitools/base/py/extra_types_test.py | 175 ------ _gcloud_vendor/apitools/base/py/list_pager.py | 49 -- 13 files changed, 3185 deletions(-) delete mode 100644 _gcloud_vendor/apitools/base/py/app2.py delete mode 100644 _gcloud_vendor/apitools/base/py/base_api.py delete mode 100644 _gcloud_vendor/apitools/base/py/base_api_test.py delete mode 100644 _gcloud_vendor/apitools/base/py/base_cli.py delete mode 100644 _gcloud_vendor/apitools/base/py/batch.py delete mode 100644 _gcloud_vendor/apitools/base/py/cli.py delete mode 100644 _gcloud_vendor/apitools/base/py/credentials_lib.py delete mode 100644 _gcloud_vendor/apitools/base/py/credentials_lib_test.py delete mode 100644 _gcloud_vendor/apitools/base/py/encoding.py delete mode 100644 _gcloud_vendor/apitools/base/py/encoding_test.py delete mode 100644 _gcloud_vendor/apitools/base/py/extra_types.py delete mode 100644 _gcloud_vendor/apitools/base/py/extra_types_test.py delete mode 100644 _gcloud_vendor/apitools/base/py/list_pager.py diff --git a/_gcloud_vendor/apitools/base/py/app2.py b/_gcloud_vendor/apitools/base/py/app2.py deleted file mode 100644 index 2a90d5525649..000000000000 --- a/_gcloud_vendor/apitools/base/py/app2.py +++ /dev/null @@ -1,347 +0,0 @@ -#!/usr/bin/env python -"""Appcommands-compatible command class with extra fixins.""" - -import cmd -import inspect -import pdb -import shlex -import sys -import traceback -import types - -from google.apputils import app -from google.apputils import appcommands -import gflags as flags - -__all__ = [ - 'NewCmd', - 'Repl', -] - -flags.DEFINE_boolean( - 'debug_mode', False, - 'Show tracebacks on Python exceptions.') -flags.DEFINE_boolean( - 'headless', False, - 'Assume no user is at the controlling console.') -FLAGS = flags.FLAGS - - -def _SafeMakeAscii(s): - if isinstance(s, unicode): - return s.encode('ascii') - elif isinstance(s, str): - return s.decode('ascii') - else: - return unicode(s).encode('ascii', 'backslashreplace') - - -class NewCmd(appcommands.Cmd): - """Featureful extension of appcommands.Cmd.""" - - def __init__(self, name, flag_values): - super(NewCmd, self).__init__(name, flag_values) - run_with_args = getattr(self, 'RunWithArgs', None) - self._new_style = isinstance(run_with_args, types.MethodType) - if self._new_style: - func = run_with_args.im_func - - argspec = inspect.getargspec(func) - if argspec.args and argspec.args[0] == 'self': - argspec = argspec._replace( # pylint: disable=protected-access - args=argspec.args[1:]) - self._argspec = argspec - # TODO(craigcitro): Do we really want to support all this - # nonsense? - self._star_args = self._argspec.varargs is not None - self._star_kwds = self._argspec.keywords is not None - self._max_args = len(self._argspec.args or ()) - self._min_args = self._max_args - len(self._argspec.defaults or ()) - if self._star_args: - self._max_args = sys.maxint - - self._debug_mode = FLAGS.debug_mode - self.surface_in_shell = True - self.__doc__ = self.RunWithArgs.__doc__ - - def __getattr__(self, name): - if name in self._command_flags: - return self._command_flags[name].value - return super(NewCmd, self).__getattribute__(name) - - def _GetFlag(self, flagname): - if flagname in self._command_flags: - return self._command_flags[flagname] - else: - return None - - def Run(self, argv): - """Run this command. - - If self is a new-style command, we set up arguments and call - self.RunWithArgs, gracefully handling exceptions. If not, we - simply call self.Run(argv). - - Args: - argv: List of arguments as strings. - - Returns: - 0 on success, nonzero on failure. - """ - if not self._new_style: - return super(NewCmd, self).Run(argv) - - # TODO(craigcitro): We need to save and restore flags each time so - # that we can per-command flags in the REPL. - args = argv[1:] - fail = None - if len(args) < self._min_args: - fail = 'Not enough positional args; found %d, expected at least %d' % ( - len(args), self._min_args) - if len(args) > self._max_args: - fail = 'Too many positional args; found %d, expected at most %d' % ( - len(args), self._max_args) - if fail: - print fail - if self.usage: - print 'Usage: %s' % (self.usage,) - return 1 - - if self._debug_mode: - return self.RunDebug(args, {}) - else: - return self.RunSafely(args, {}) - - def RunCmdLoop(self, argv): - """Hook for use in cmd.Cmd-based command shells.""" - try: - args = shlex.split(argv) - except ValueError as e: - raise SyntaxError(self.EncodeForPrinting(e)) - return self.Run([self._command_name] + args) - - @staticmethod - def EncodeForPrinting(s): - """Safely encode a string as the encoding for sys.stdout.""" - encoding = sys.stdout.encoding or 'ascii' - return unicode(s).encode(encoding, 'backslashreplace') - - def _FormatError(self, e): - """Hook for subclasses to modify how error messages are printed.""" - return _SafeMakeAscii(e) - - def _HandleError(self, e): - message = self._FormatError(e) - print 'Exception raised in %s operation: %s' % (self._command_name, message) - return 1 - - def _IsDebuggableException(self, e): - """Hook for subclasses to skip debugging on certain exceptions.""" - return not isinstance(e, app.UsageError) - - def RunDebug(self, args, kwds): - """Run this command in debug mode.""" - try: - return_value = self.RunWithArgs(*args, **kwds) - except BaseException, e: - # Don't break into the debugger for expected exceptions. - if not self._IsDebuggableException(e): - return self._HandleError(e) - print - print '****************************************************' - print '** Unexpected Exception raised in execution! **' - if FLAGS.headless: - print '** --headless mode enabled, exiting. **' - print '** See STDERR for traceback. **' - else: - print '** --debug_mode enabled, starting pdb. **' - print '****************************************************' - print - traceback.print_exc() - print - if not FLAGS.headless: - pdb.post_mortem() - return 1 - return return_value - - def RunSafely(self, args, kwds): - """Run this command, turning exceptions into print statements.""" - try: - return_value = self.RunWithArgs(*args, **kwds) - except BaseException, e: - return self._HandleError(e) - return return_value - - -class CommandLoop(cmd.Cmd): - """Instance of cmd.Cmd built to work with NewCmd.""" - - class TerminateSignal(Exception): - """Exception type used for signaling loop completion.""" - - def __init__(self, commands, prompt): - cmd.Cmd.__init__(self) - self._commands = {'help': commands['help']} - self._special_command_names = ['help', 'repl', 'EOF'] - for name, command in commands.iteritems(): - if (name not in self._special_command_names and - isinstance(command, NewCmd) and - command.surface_in_shell): - self._commands[name] = command - setattr(self, 'do_%s' % (name,), command.RunCmdLoop) - self._default_prompt = prompt - self._set_prompt() - self._last_return_code = 0 - - @property - def last_return_code(self): - return self._last_return_code - - def _set_prompt(self): - self.prompt = self._default_prompt - - def do_EOF(self, *unused_args): - """Terminate the running command loop. - - This function raises an exception to avoid the need to do - potentially-error-prone string parsing inside onecmd. - - Args: - *unused_args: unused. - - Returns: - Never returns. - - Raises: - CommandLoop.TerminateSignal: always. - """ - raise CommandLoop.TerminateSignal() - - def postloop(self): - print 'Goodbye.' - - def completedefault(self, unused_text, line, unused_begidx, unused_endidx): - if not line: - return [] - else: - command_name = line.partition(' ')[0].lower() - usage = '' - if command_name in self._commands: - usage = self._commands[command_name].usage - if usage: - print - print usage - print '%s%s' % (self.prompt, line), - return [] - - def emptyline(self): - print 'Available commands:', - print ' '.join(list(self._commands)) - - def precmd(self, line): - """Preprocess the shell input.""" - if line == 'EOF': - return line - if line.startswith('exit') or line.startswith('quit'): - return 'EOF' - words = line.strip().split() - if len(words) == 1 and words[0] not in ['help', 'ls', 'version']: - return 'help %s' % (line.strip(),) - return line - - def onecmd(self, line): - """Process a single command. - - Runs a single command, and stores the return code in - self._last_return_code. Always returns False unless the command - was EOF. - - Args: - line: (str) Command line to process. - - Returns: - A bool signaling whether or not the command loop should terminate. - """ - try: - self._last_return_code = cmd.Cmd.onecmd(self, line) - except CommandLoop.TerminateSignal: - return True - except BaseException as e: - name = line.split(' ')[0] - print 'Error running %s:' % name - print e - self._last_return_code = 1 - return False - - def get_names(self): - names = dir(self) - commands = (name for name in self._commands - if name not in self._special_command_names) - names.extend('do_%s' % (name,) for name in commands) - names.remove('do_EOF') - return names - - def do_help(self, command_name): - """Print the help for command_name (if present) or general help.""" - - # TODO(craigcitro): Add command-specific flags. - def FormatOneCmd(name, command, command_names): - indent_size = appcommands.GetMaxCommandLength() + 3 - if len(command_names) > 1: - indent = ' ' * indent_size - command_help = flags.TextWrap( - command.CommandGetHelp('', cmd_names=command_names), - indent=indent, - firstline_indent='') - first_help_line, _, rest = command_help.partition('\n') - first_line = '%-*s%s' % (indent_size, name + ':', first_help_line) - return '\n'.join((first_line, rest)) - else: - default_indent = ' ' - return '\n' + flags.TextWrap( - command.CommandGetHelp('', cmd_names=command_names), - indent=default_indent, - firstline_indent=default_indent) + '\n' - - if not command_name: - print '\nHelp for commands:\n' - command_names = list(self._commands) - print '\n\n'.join( - FormatOneCmd(name, command, command_names) - for name, command in self._commands.iteritems() - if name not in self._special_command_names) - print - elif command_name in self._commands: - print FormatOneCmd(command_name, self._commands[command_name], - command_names=[command_name]) - return 0 - - def postcmd(self, stop, line): - return bool(stop) or line == 'EOF' -# pylint: enable=g-bad-name - - -class Repl(NewCmd): - """Start an interactive session.""" - PROMPT = '> ' - - def __init__(self, name, fv): - super(Repl, self).__init__(name, fv) - self.surface_in_shell = False - flags.DEFINE_string( - 'prompt', '', - 'Prompt to use for interactive shell.', - flag_values=fv) - - def RunWithArgs(self): - """Start an interactive session.""" - prompt = FLAGS.prompt or self.PROMPT - repl = CommandLoop(appcommands.GetCommandList(), prompt=prompt) - print 'Welcome! (Type help for more information.)' - while True: - try: - repl.cmdloop() - break - except KeyboardInterrupt: - print - return repl.last_return_code diff --git a/_gcloud_vendor/apitools/base/py/base_api.py b/_gcloud_vendor/apitools/base/py/base_api.py deleted file mode 100644 index 2ee5fcf233d5..000000000000 --- a/_gcloud_vendor/apitools/base/py/base_api.py +++ /dev/null @@ -1,583 +0,0 @@ -#!/usr/bin/env python -"""Base class for api services.""" - -import contextlib -import datetime -import httplib -import logging -import pprint -import types -import urllib -import urlparse - - -from protorpc import message_types -from protorpc import messages - -from apitools.base.py import credentials_lib -from apitools.base.py import encoding -from apitools.base.py import exceptions -from apitools.base.py import http_wrapper -from apitools.base.py import util - -__all__ = [ - 'ApiMethodInfo', - 'ApiUploadInfo', - 'BaseApiClient', - 'BaseApiService', - 'NormalizeApiEndpoint', -] - -# TODO(craigcitro): Remove this once we quiet the spurious logging in -# oauth2client (or drop oauth2client). -logging.getLogger('oauth2client.util').setLevel(logging.ERROR) - -_MAX_URL_LENGTH = 2048 - - -class ApiUploadInfo(messages.Message): - """Media upload information for a method. - - Fields: - accept: (repeated) MIME Media Ranges for acceptable media uploads - to this method. - max_size: (integer) Maximum size of a media upload, such as 3MB - or 1TB (converted to an integer). - resumable_path: Path to use for resumable uploads. - resumable_multipart: (boolean) Whether or not the resumable endpoint - supports multipart uploads. - simple_path: Path to use for simple uploads. - simple_multipart: (boolean) Whether or not the simple endpoint - supports multipart uploads. - """ - accept = messages.StringField(1, repeated=True) - max_size = messages.IntegerField(2) - resumable_path = messages.StringField(3) - resumable_multipart = messages.BooleanField(4) - simple_path = messages.StringField(5) - simple_multipart = messages.BooleanField(6) - - -class ApiMethodInfo(messages.Message): - """Configuration info for an API method. - - All fields are strings unless noted otherwise. - - Fields: - relative_path: Relative path for this method. - method_id: ID for this method. - http_method: HTTP verb to use for this method. - path_params: (repeated) path parameters for this method. - query_params: (repeated) query parameters for this method. - ordered_params: (repeated) ordered list of parameters for - this method. - description: description of this method. - request_type_name: name of the request type. - response_type_name: name of the response type. - request_field: if not null, the field to pass as the body - of this POST request. may also be the REQUEST_IS_BODY - value below to indicate the whole message is the body. - upload_config: (ApiUploadInfo) Information about the upload - configuration supported by this method. - supports_download: (boolean) If True, this method supports - downloading the request via the `alt=media` query - parameter. - """ - - relative_path = messages.StringField(1) - method_id = messages.StringField(2) - http_method = messages.StringField(3) - path_params = messages.StringField(4, repeated=True) - query_params = messages.StringField(5, repeated=True) - ordered_params = messages.StringField(6, repeated=True) - description = messages.StringField(7) - request_type_name = messages.StringField(8) - response_type_name = messages.StringField(9) - request_field = messages.StringField(10, default='') - upload_config = messages.MessageField(ApiUploadInfo, 11) - supports_download = messages.BooleanField(12, default=False) -REQUEST_IS_BODY = '' - - -def _LoadClass(name, messages_module): - if name.startswith('message_types.'): - _, _, classname = name.partition('.') - return getattr(message_types, classname) - elif '.' not in name: - return getattr(messages_module, name) - else: - raise exceptions.GeneratedClientError('Unknown class %s' % name) - - -def _RequireClassAttrs(obj, attrs): - for attr in attrs: - attr_name = attr.upper() - if not hasattr(obj, '%s' % attr_name) or not getattr(obj, attr_name): - msg = 'No %s specified for object of class %s.' % ( - attr_name, type(obj).__name__) - raise exceptions.GeneratedClientError(msg) - - -def NormalizeApiEndpoint(api_endpoint): - if not api_endpoint.endswith('/'): - api_endpoint += '/' - return api_endpoint - - -class _UrlBuilder(object): - """Convenient container for url data.""" - - def __init__(self, base_url, relative_path=None, query_params=None): - components = urlparse.urlsplit(urlparse.urljoin( - base_url, relative_path or '')) - if components.fragment: - raise exceptions.ConfigurationValueError( - 'Unexpected url fragment: %s' % components.fragment) - self.query_params = urlparse.parse_qs(components.query or '') - if query_params is not None: - self.query_params.update(query_params) - self.__scheme = components.scheme - self.__netloc = components.netloc - self.relative_path = components.path or '' - - @classmethod - def FromUrl(cls, url): - urlparts = urlparse.urlsplit(url) - query_params = urlparse.parse_qs(urlparts.query) - base_url = urlparse.urlunsplit(( - urlparts.scheme, urlparts.netloc, '', None, None)) - relative_path = urlparts.path or '' - return cls(base_url, relative_path=relative_path, query_params=query_params) - - @property - def base_url(self): - return urlparse.urlunsplit((self.__scheme, self.__netloc, '', '', '')) - - @base_url.setter - def base_url(self, value): - components = urlparse.urlsplit(value) - if components.path or components.query or components.fragment: - raise exceptions.ConfigurationValueError('Invalid base url: %s' % value) - self.__scheme = components.scheme - self.__netloc = components.netloc - - @property - def query(self): - # TODO(craigcitro): In the case that some of the query params are - # non-ASCII, we may silently fail to encode correctly. We should - # figure out who is responsible for owning the object -> str - # conversion. - return urllib.urlencode(self.query_params, doseq=True) - - @property - def url(self): - if '{' in self.relative_path or '}' in self.relative_path: - raise exceptions.ConfigurationValueError( - 'Cannot create url with relative path %s' % self.relative_path) - return urlparse.urlunsplit(( - self.__scheme, self.__netloc, self.relative_path, self.query, '')) - - -class BaseApiClient(object): - """Base class for client libraries.""" - MESSAGES_MODULE = None - - _API_KEY = '' - _CLIENT_ID = '' - _CLIENT_SECRET = '' - _PACKAGE = '' - _SCOPES = [] - _USER_AGENT = '' - - def __init__(self, url, credentials=None, get_credentials=True, http=None, - model=None, log_request=False, log_response=False, num_retries=5, - credentials_args=None, default_global_params=None, - additional_http_headers=None): - _RequireClassAttrs(self, ('_package', '_scopes', 'messages_module')) - if default_global_params is not None: - util.Typecheck(default_global_params, self.params_type) - self.__default_global_params = default_global_params - self.log_request = log_request - self.log_response = log_response - self.__num_retries = 5 - # We let the @property machinery below do our validation. - self.num_retries = num_retries - self._credentials = credentials - if get_credentials and not credentials: - credentials_args = credentials_args or {} - self._SetCredentials(**credentials_args) - self._url = NormalizeApiEndpoint(url) - self._http = http or http_wrapper.GetHttp() - # Note that "no credentials" is totally possible. - if self._credentials is not None: - self._http = self._credentials.authorize(self._http) - # TODO(craigcitro): Remove this field when we switch to proto2. - self.__include_fields = None - - self.additional_http_headers = additional_http_headers or {} - - # TODO(craigcitro): Finish deprecating these fields. - _ = model - - self.__response_type_model = 'proto' - - def _SetCredentials(self, **kwds): - """Fetch credentials, and set them for this client. - - Note that we can't simply return credentials, since creating them - may involve side-effecting self. - - Args: - **kwds: Additional keyword arguments are passed on to GetCredentials. - - Returns: - None. Sets self._credentials. - """ - args = { - 'api_key': self._API_KEY, - 'client': self, - 'client_id': self._CLIENT_ID, - 'client_secret': self._CLIENT_SECRET, - 'package_name': self._PACKAGE, - 'scopes': self._SCOPES, - 'user_agent': self._USER_AGENT, - } - args.update(kwds) - # TODO(craigcitro): It's a bit dangerous to pass this - # still-half-initialized self into this method, but we might need - # to set attributes on it associated with our credentials. - # Consider another way around this (maybe a callback?) and whether - # or not it's worth it. - self._credentials = credentials_lib.GetCredentials(**args) - - @classmethod - def ClientInfo(cls): - return { - 'client_id': cls._CLIENT_ID, - 'client_secret': cls._CLIENT_SECRET, - 'scope': ' '.join(sorted(util.NormalizeScopes(cls._SCOPES))), - 'user_agent': cls._USER_AGENT, - } - - @property - def base_model_class(self): - return None - - @property - def http(self): - return self._http - - @property - def url(self): - return self._url - - @classmethod - def GetScopes(cls): - return cls._SCOPES - - @property - def params_type(self): - return _LoadClass('StandardQueryParameters', self.MESSAGES_MODULE) - - @property - def user_agent(self): - return self._USER_AGENT - - @property - def _default_global_params(self): - if self.__default_global_params is None: - self.__default_global_params = self.params_type() - return self.__default_global_params - - def AddGlobalParam(self, name, value): - params = self._default_global_params - setattr(params, name, value) - - @property - def global_params(self): - return encoding.CopyProtoMessage(self._default_global_params) - - @contextlib.contextmanager - def IncludeFields(self, include_fields): - self.__include_fields = include_fields - yield - self.__include_fields = None - - @property - def response_type_model(self): - return self.__response_type_model - - @contextlib.contextmanager - def JsonResponseModel(self): - """In this context, return raw JSON instead of proto.""" - old_model = self.response_type_model - self.__response_type_model = 'json' - yield - self.__response_type_model = old_model - - @property - def num_retries(self): - return self.__num_retries - - @num_retries.setter - def num_retries(self, value): - util.Typecheck(value, (int, long)) - if value < 0: - raise exceptions.InvalidDataError( - 'Cannot have negative value for num_retries') - self.__num_retries = value - - @contextlib.contextmanager - def WithRetries(self, num_retries): - old_num_retries = self.num_retries - self.num_retries = num_retries - yield - self.num_retries = old_num_retries - - def ProcessRequest(self, method_config, request): - """Hook for pre-processing of requests.""" - if self.log_request: - logging.info( - 'Calling method %s with %s: %s', method_config.method_id, - method_config.request_type_name, request) - return request - - def ProcessHttpRequest(self, http_request): - """Hook for pre-processing of http requests.""" - http_request.headers.update(self.additional_http_headers) - if self.log_request: - logging.info('Making http %s to %s', - http_request.http_method, http_request.url) - logging.info('Headers: %s', pprint.pformat(http_request.headers)) - if http_request.body: - # TODO(craigcitro): Make this safe to print in the case of - # non-printable body characters. - logging.info('Body:\n%s', http_request.body) - else: - logging.info('Body: (none)') - return http_request - - def ProcessResponse(self, method_config, response): - if self.log_response: - logging.info('Response of type %s: %s', - method_config.response_type_name, response) - return response - - # TODO(craigcitro): Decide where these two functions should live. - def SerializeMessage(self, message): - return encoding.MessageToJson(message, include_fields=self.__include_fields) - - def DeserializeMessage(self, response_type, data): - """Deserialize the given data as method_config.response_type.""" - try: - message = encoding.JsonToMessage(response_type, data) - except (exceptions.InvalidDataFromServerError, - messages.ValidationError) as e: - raise exceptions.InvalidDataFromServerError( - 'Error decoding response "%s" as type %s: %s' % ( - data, response_type.__name__, e)) - return message - - def FinalizeTransferUrl(self, url): - """Modify the url for a given transfer, based on auth and version.""" - url_builder = _UrlBuilder.FromUrl(url) - if self.global_params.key: - url_builder.query_params['key'] = self.global_params.key - return url_builder.url - - -class BaseApiService(object): - """Base class for generated API services.""" - - def __init__(self, client): - self.__client = client - self._method_configs = {} - self._upload_configs = {} - - @property - def _client(self): - return self.__client - - @property - def client(self): - return self.__client - - def GetMethodConfig(self, method): - return self._method_configs[method] - - def GetUploadConfig(self, method): - return self._upload_configs.get(method) - - def GetRequestType(self, method): - method_config = self.GetMethodConfig(method) - return getattr(self.client.MESSAGES_MODULE, - method_config.request_type_name) - - def GetResponseType(self, method): - method_config = self.GetMethodConfig(method) - return getattr(self.client.MESSAGES_MODULE, - method_config.response_type_name) - - def __CombineGlobalParams(self, global_params, default_params): - util.Typecheck(global_params, (types.NoneType, self.__client.params_type)) - result = self.__client.params_type() - global_params = global_params or self.__client.params_type() - for field in result.all_fields(): - value = (global_params.get_assigned_value(field.name) or - default_params.get_assigned_value(field.name)) - if value not in (None, [], ()): - setattr(result, field.name, value) - return result - - def __ConstructQueryParams(self, query_params, request, global_params): - """Construct a dictionary of query parameters for this request.""" - global_params = self.__CombineGlobalParams( - global_params, self.__client.global_params) - query_info = dict((field.name, getattr(global_params, field.name)) - for field in self.__client.params_type.all_fields()) - query_info.update( - (param, getattr(request, param, None)) for param in query_params) - query_info = dict((k, v) for k, v in query_info.iteritems() - if v is not None) - for k, v in query_info.iteritems(): - if isinstance(v, unicode): - query_info[k] = v.encode('utf8') - elif isinstance(v, str): - query_info[k] = v.decode('utf8') - elif isinstance(v, datetime.datetime): - query_info[k] = v.isoformat() - return query_info - - def __ConstructRelativePath(self, method_config, request, relative_path=None): - """Determine the relative path for request.""" - params = dict([(param, getattr(request, param, None)) - for param in method_config.path_params]) - return util.ExpandRelativePath(method_config, params, - relative_path=relative_path) - - def __FinalizeRequest(self, http_request, url_builder): - """Make any final general adjustments to the request.""" - if (http_request.http_method == 'GET' and - len(http_request.url) > _MAX_URL_LENGTH): - http_request.http_method = 'POST' - http_request.headers['x-http-method-override'] = 'GET' - http_request.headers['content-type'] = 'application/x-www-form-urlencoded' - http_request.body = url_builder.query - url_builder.query_params = {} - http_request.url = url_builder.url - - def __ProcessHttpResponse(self, method_config, http_response): - """Process the given http response.""" - if http_response.status_code not in (httplib.OK, httplib.NO_CONTENT): - raise exceptions.HttpError.FromResponse(http_response) - if http_response.status_code == httplib.NO_CONTENT: - # TODO(craigcitro): Find out why _replace doesn't seem to work here. - http_response = http_wrapper.Response( - info=http_response.info, content='{}', - request_url=http_response.request_url) - if self.__client.response_type_model == 'json': - return http_response.content - else: - response_type = _LoadClass( - method_config.response_type_name, self.__client.MESSAGES_MODULE) - return self.__client.DeserializeMessage( - response_type, http_response.content) - - def __SetBaseHeaders(self, http_request, client): - """Fill in the basic headers on http_request.""" - # TODO(craigcitro): Make the default a little better here, and - # include the apitools version. - user_agent = client.user_agent or 'apitools-client/1.0' - http_request.headers['user-agent'] = user_agent - http_request.headers['accept'] = 'application/json' - http_request.headers['accept-encoding'] = 'gzip, deflate' - - def __SetBody(self, http_request, method_config, request, upload): - """Fill in the body on http_request.""" - if not method_config.request_field: - return - - request_type = _LoadClass( - method_config.request_type_name, self.__client.MESSAGES_MODULE) - if method_config.request_field == REQUEST_IS_BODY: - body_value = request - body_type = request_type - else: - body_value = getattr(request, method_config.request_field) - body_field = request_type.field_by_name(method_config.request_field) - util.Typecheck(body_field, messages.MessageField) - body_type = body_field.type - - if upload and not body_value: - # We're going to fill in the body later. - return - util.Typecheck(body_value, body_type) - http_request.headers['content-type'] = 'application/json' - http_request.body = self.__client.SerializeMessage(body_value) - - def PrepareHttpRequest(self, method_config, request, global_params=None, - upload=None, upload_config=None, download=None): - """Prepares an HTTP request to be sent.""" - request_type = _LoadClass( - method_config.request_type_name, self.__client.MESSAGES_MODULE) - util.Typecheck(request, request_type) - request = self.__client.ProcessRequest(method_config, request) - - http_request = http_wrapper.Request(http_method=method_config.http_method) - self.__SetBaseHeaders(http_request, self.__client) - self.__SetBody(http_request, method_config, request, upload) - - url_builder = _UrlBuilder( - self.__client.url, relative_path=method_config.relative_path) - url_builder.query_params = self.__ConstructQueryParams( - method_config.query_params, request, global_params) - - # It's important that upload and download go before we fill in the - # relative path, so that they can replace it. - if upload is not None: - upload.ConfigureRequest(upload_config, http_request, url_builder) - if download is not None: - download.ConfigureRequest(http_request, url_builder) - - url_builder.relative_path = self.__ConstructRelativePath( - method_config, request, relative_path=url_builder.relative_path) - self.__FinalizeRequest(http_request, url_builder) - - return self.__client.ProcessHttpRequest(http_request) - - def _RunMethod(self, method_config, request, global_params=None, - upload=None, upload_config=None, download=None): - """Call this method with request.""" - if upload is not None and download is not None: - # TODO(craigcitro): This just involves refactoring the logic - # below into callbacks that we can pass around; in particular, - # the order should be that the upload gets the initial request, - # and then passes its reply to a download if one exists, and - # then that goes to ProcessResponse and is returned. - raise exceptions.NotYetImplementedError( - 'Cannot yet use both upload and download at once') - - http_request = self.PrepareHttpRequest( - method_config, request, global_params, upload, upload_config, download) - - # TODO(craigcitro): Make num_retries customizable on Transfer - # objects, and pass in self.__client.num_retries when initializing - # an upload or download. - if download is not None: - download.InitializeDownload(http_request, client=self.client) - return - - http_response = None - if upload is not None: - http_response = upload.InitializeUpload(http_request, client=self.client) - if http_response is None: - http_response = http_wrapper.MakeRequest( - self.__client.http, http_request, retries=self.__client.num_retries) - - return self.ProcessHttpResponse(method_config, http_response) - - def ProcessHttpResponse(self, method_config, http_response): - """Convert an HTTP response to the expected message type.""" - return self.__client.ProcessResponse( - method_config, - self.__ProcessHttpResponse(method_config, http_response)) diff --git a/_gcloud_vendor/apitools/base/py/base_api_test.py b/_gcloud_vendor/apitools/base/py/base_api_test.py deleted file mode 100644 index 6d43dead1d91..000000000000 --- a/_gcloud_vendor/apitools/base/py/base_api_test.py +++ /dev/null @@ -1,113 +0,0 @@ -#!/usr/bin/env python - - -import datetime -import sys -import urllib - -from protorpc import message_types -from protorpc import messages - -import unittest2 - -from apitools.base.py import base_api -from apitools.base.py import http_wrapper - - -class SimpleMessage(messages.Message): - field = messages.StringField(1) - - -class MessageWithTime(messages.Message): - timestamp = message_types.DateTimeField(1) - - -class StandardQueryParameters(messages.Message): - field = messages.StringField(1) - - -class FakeCredentials(object): - - def authorize(self, _): # pylint: disable=invalid-name - return None - - -class FakeClient(base_api.BaseApiClient): - MESSAGES_MODULE = sys.modules[__name__] - _PACKAGE = 'package' - _SCOPES = ['scope1'] - _CLIENT_ID = 'client_id' - _CLIENT_SECRET = 'client_secret' - - -class FakeService(base_api.BaseApiService): - - def __init__(self, client=None): - client = client or FakeClient( - 'http://www.example.com/', credentials=FakeCredentials()) - super(FakeService, self).__init__(client) - - -class BaseApiTest(unittest2.TestCase): - - def __GetFakeClient(self): - return FakeClient('', credentials=FakeCredentials()) - - def testUrlNormalization(self): - client = FakeClient('http://www.googleapis.com', get_credentials=False) - self.assertTrue(client.url.endswith('/')) - - def testNoCredentials(self): - client = FakeClient('', get_credentials=False) - self.assertIsNotNone(client) - self.assertIsNone(client._credentials) - - def testIncludeEmptyFieldsClient(self): - msg = SimpleMessage() - client = self.__GetFakeClient() - self.assertEqual('{}', client.SerializeMessage(msg)) - with client.IncludeFields(('field',)): - self.assertEqual('{"field": null}', client.SerializeMessage(msg)) - - def testJsonResponse(self): - method_config = base_api.ApiMethodInfo(response_type_name='SimpleMessage') - service = FakeService() - http_response = http_wrapper.Response( - info={'status': '200'}, content='{"field": "abc"}', - request_url='http://www.google.com') - response_message = SimpleMessage(field='abc') - self.assertEqual(response_message, service.ProcessHttpResponse( - method_config, http_response)) - with service.client.JsonResponseModel(): - self.assertEqual(http_response.content, service.ProcessHttpResponse( - method_config, http_response)) - - def testAdditionalHeaders(self): - additional_headers = {'Request-Is-Awesome': '1'} - client = self.__GetFakeClient() - - # No headers to start - http_request = http_wrapper.Request('http://www.example.com') - new_request = client.ProcessHttpRequest(http_request) - self.assertFalse('Request-Is-Awesome' in new_request.headers) - - # Add a new header and ensure it's added to the request. - client.additional_http_headers = additional_headers - http_request = http_wrapper.Request('http://www.example.com') - new_request = client.ProcessHttpRequest(http_request) - self.assertTrue('Request-Is-Awesome' in new_request.headers) - - def testQueryEncoding(self): - method_config = base_api.ApiMethodInfo( - request_type_name='MessageWithTime', query_params=['timestamp']) - service = FakeService() - request = MessageWithTime( - timestamp=datetime.datetime(2014, 10, 07, 12, 53, 13)) - http_request = service.PrepareHttpRequest(method_config, request) - - url_timestamp = urllib.quote(request.timestamp.isoformat()) - self.assertTrue(http_request.url.endswith(url_timestamp)) - - -if __name__ == '__main__': - unittest2.main() diff --git a/_gcloud_vendor/apitools/base/py/base_cli.py b/_gcloud_vendor/apitools/base/py/base_cli.py deleted file mode 100644 index f9d7d1a40058..000000000000 --- a/_gcloud_vendor/apitools/base/py/base_cli.py +++ /dev/null @@ -1,151 +0,0 @@ -#!/usr/bin/env python -"""Base script for generated CLI.""" - - -import atexit -import code -import logging -import os -import readline -import rlcompleter -import sys - -from google.apputils import appcommands -import gflags as flags - -from apitools.base.py import encoding -from apitools.base.py import exceptions - -__all__ = [ - 'ConsoleWithReadline', - 'DeclareBaseFlags', - 'FormatOutput', - 'SetupLogger', - 'run_main', -] - - -# TODO(craigcitro): We should move all the flags for the -# StandardQueryParameters into this file, so that they can be used -# elsewhere easily. - -_BASE_FLAGS_DECLARED = False -_OUTPUT_FORMATTER_MAP = { - 'protorpc': lambda x: x, - 'json': encoding.MessageToJson, -} - - -def DeclareBaseFlags(): - """Declare base flags for all CLIs.""" - # TODO(craigcitro): FlagValidators? - global _BASE_FLAGS_DECLARED - if _BASE_FLAGS_DECLARED: - return - flags.DEFINE_boolean( - 'log_request', False, - 'Log requests.') - flags.DEFINE_boolean( - 'log_response', False, - 'Log responses.') - flags.DEFINE_boolean( - 'log_request_response', False, - 'Log requests and responses.') - flags.DEFINE_enum( - 'output_format', - 'protorpc', - _OUTPUT_FORMATTER_MAP.viewkeys(), - 'Display format for results.') - - _BASE_FLAGS_DECLARED = True - -# NOTE: This is specified here so that it can be read by other files -# without depending on the flag to be registered. -TRACE_HELP = ( - 'A tracing token of the form "token:" ' - 'to include in api requests.') -FLAGS = flags.FLAGS - - -def SetupLogger(): - if FLAGS.log_request or FLAGS.log_response or FLAGS.log_request_response: - logging.basicConfig() - logging.getLogger().setLevel(logging.INFO) - - -def FormatOutput(message, output_format=None): - """Convert the output to the user-specified format.""" - output_format = output_format or FLAGS.output_format - formatter = _OUTPUT_FORMATTER_MAP.get(FLAGS.output_format) - if formatter is None: - raise exceptions.UserError('Unknown output format: %s' % output_format) - return formatter(message) - - -class _SmartCompleter(rlcompleter.Completer): - - def _callable_postfix(self, val, word): - if ('(' in readline.get_line_buffer() or - not callable(val)): - return word - else: - return word + '(' - - def complete(self, text, state): - if not readline.get_line_buffer().strip(): - if not state: - return ' ' - else: - return None - return rlcompleter.Completer.complete(self, text, state) - - -class ConsoleWithReadline(code.InteractiveConsole): - """InteractiveConsole with readline, tab completion, and history.""" - - def __init__(self, env, filename='', histfile=None): - new_locals = dict(env) - new_locals.update({ - '_SmartCompleter': _SmartCompleter, - 'readline': readline, - 'rlcompleter': rlcompleter, - }) - code.InteractiveConsole.__init__(self, new_locals, filename) - readline.parse_and_bind('tab: complete') - readline.set_completer(_SmartCompleter(new_locals).complete) - if histfile is not None: - histfile = os.path.expanduser(histfile) - if os.path.exists(histfile): - readline.read_history_file(histfile) - atexit.register(lambda: readline.write_history_file(histfile)) - - -def run_main(): - """Function to be used as setuptools script entry point. - - Appcommands assumes that it always runs as __main__, but launching - via a setuptools-generated entry_point breaks this rule. We do some - trickery here to make sure that appcommands and flags find their - state where they expect to by faking ourselves as __main__. - """ - - # Put the flags for this module somewhere the flags module will look - # for them. - # pylint: disable=protected-access - new_name = flags._GetMainModule() - sys.modules[new_name] = sys.modules['__main__'] - for flag in FLAGS.FlagsByModuleDict().get(__name__, []): - FLAGS._RegisterFlagByModule(new_name, flag) - for key_flag in FLAGS.KeyFlagsByModuleDict().get(__name__, []): - FLAGS._RegisterKeyFlagForModule(new_name, key_flag) - # pylint: enable=protected-access - - # Now set __main__ appropriately so that appcommands will be - # happy. - sys.modules['__main__'] = sys.modules[__name__] - appcommands.Run() - sys.modules['__main__'] = sys.modules.pop(new_name) - - -if __name__ == '__main__': - appcommands.Run() diff --git a/_gcloud_vendor/apitools/base/py/batch.py b/_gcloud_vendor/apitools/base/py/batch.py deleted file mode 100644 index eaf5eba2ea57..000000000000 --- a/_gcloud_vendor/apitools/base/py/batch.py +++ /dev/null @@ -1,441 +0,0 @@ -#!/usr/bin/env python -"""Library for handling batch HTTP requests for apitools.""" - -import collections -import email.generator as generator -import email.mime.multipart as mime_multipart -import email.mime.nonmultipart as mime_nonmultipart -import email.parser as email_parser -import httplib -import itertools -import StringIO -import time -import urllib -import urlparse -import uuid - -from apitools.base.py import exceptions -from apitools.base.py import http_wrapper - -__all__ = [ - 'BatchApiRequest', -] - - -class RequestResponseAndHandler(collections.namedtuple( - 'RequestResponseAndHandler', ['request', 'response', 'handler'])): - """Container for data related to completing an HTTP request. - - This contains an HTTP request, its response, and a callback for handling - the response from the server. - - Attributes: - request: An http_wrapper.Request object representing the HTTP request. - response: The http_wrapper.Response object returned from the server. - handler: A callback function accepting two arguments, response - and exception. Response is an http_wrapper.Response object, and - exception is an apiclient.errors.HttpError object if an error - occurred, or otherwise None. - """ - - -class BatchApiRequest(object): - - class ApiCall(object): - """Holds request and response information for each request. - - ApiCalls are ultimately exposed to the client once the HTTP batch request - has been completed. - - Attributes: - http_request: A client-supplied http_wrapper.Request to be - submitted to the server. - response: A http_wrapper.Response object given by the server as a - response to the user request, or None if an error occurred. - exception: An apiclient.errors.HttpError object if an error - occurred, or None. - """ - - def __init__(self, request, retryable_codes, service, method_config): - """Initialize an individual API request. - - Args: - request: An http_wrapper.Request object. - retryable_codes: A list of integer HTTP codes that can be retried. - service: A service inheriting from base_api.BaseApiService. - method_config: Method config for the desired API request. - """ - self.__retryable_codes = list( - set(retryable_codes + [httplib.UNAUTHORIZED])) - self.__http_response = None - self.__service = service - self.__method_config = method_config - - self.http_request = request - # TODO(user): Add some validation to these fields. - self.__response = None - self.__exception = None - - @property - def is_error(self): - return self.exception is not None - - @property - def response(self): - return self.__response - - @property - def exception(self): - return self.__exception - - @property - def authorization_failed(self): - return (self.__http_response and ( - self.__http_response.status_code == httplib.UNAUTHORIZED)) - - @property - def terminal_state(self): - return (self.__http_response and ( - self.__http_response.status_code not in self.__retryable_codes)) - - def HandleResponse(self, http_response, exception): - """Handles an incoming http response to the request in http_request. - - This is intended to be used as a callback function for - BatchHttpRequest.Add. - - Args: - http_response: Deserialized http_wrapper.Response object. - exception: apiclient.errors.HttpError object if an error occurred. - """ - self.__http_response = http_response - self.__exception = exception - if self.terminal_state and not self.__exception: - self.__response = self.__service.ProcessHttpResponse( - self.__method_config, self.__http_response) - - def __init__(self, batch_url=None, retryable_codes=None): - """Initialize a batch API request object. - - Args: - batch_url: Base URL for batch API calls. - retryable_codes: A list of integer HTTP codes that can be retried. - """ - self.api_requests = [] - self.retryable_codes = retryable_codes or [] - self.batch_url = batch_url or 'https://www.googleapis.com/batch' - - def Add(self, service, method, request, global_params=None): - """Add a request to the batch. - - Args: - service: A class inheriting base_api.BaseApiService. - method: A string indicated desired method from the service. See - the example in the class docstring. - request: An input message appropriate for the specified service.method. - global_params: Optional additional parameters to pass into - method.PrepareHttpRequest. - - Returns: - None - """ - # Retrieve the configs for the desired method and service. - method_config = service.GetMethodConfig(method) - upload_config = service.GetUploadConfig(method) - - # Prepare the HTTP Request. - http_request = service.PrepareHttpRequest( - method_config, request, global_params=global_params, - upload_config=upload_config) - - # Create the request and add it to our master list. - api_request = self.ApiCall( - http_request, self.retryable_codes, service, method_config) - self.api_requests.append(api_request) - - def Execute(self, http, sleep_between_polls=5, max_retries=5): - """Execute all of the requests in the batch. - - Args: - http: httplib2.Http object for use in the request. - sleep_between_polls: Integer number of seconds to sleep between polls. - max_retries: Max retries. Any requests that have not succeeded by - this number of retries simply report the last response or - exception, whatever it happened to be. - - Returns: - List of ApiCalls. - """ - requests = [request for request in self.api_requests if not - request.terminal_state] - - for attempt in xrange(max_retries): - if attempt: - time.sleep(sleep_between_polls) - - # Create a batch_http_request object and populate it with incomplete - # requests. - batch_http_request = BatchHttpRequest(batch_url=self.batch_url) - for request in requests: - batch_http_request.Add(request.http_request, request.HandleResponse) - batch_http_request.Execute(http) - - # Collect retryable requests. - requests = [request for request in self.api_requests if not - request.terminal_state] - - if (any(request.authorization_failed for request in requests) - and hasattr(http.request, 'credentials')): - http.request.credentials.refresh(http) - - if not requests: - break - - return self.api_requests - - -class BatchHttpRequest(object): - """Batches multiple http_wrapper.Request objects into a single request.""" - - def __init__(self, batch_url, callback=None): - """Constructor for a BatchHttpRequest. - - Args: - batch_url: URL to send batch requests to. - callback: A callback to be called for each response, of the - form callback(response, exception). The first parameter is - the deserialized Response object. The second is an - apiclient.errors.HttpError exception object if an HTTP error - occurred while processing the request, or None if no error occurred. - """ - # Endpoint to which these requests are sent. - self.__batch_url = batch_url - - # Global callback to be called for each individual response in the batch. - self.__callback = callback - - # List of requests, responses and handlers. - self.__request_response_handlers = {} - - # The last auto generated id. - self.__last_auto_id = itertools.count() - - # Unique ID on which to base the Content-ID headers. - self.__base_id = uuid.uuid4() - - def _ConvertIdToHeader(self, request_id): - """Convert an id to a Content-ID header value. - - Args: - request_id: String identifier for a individual request. - - Returns: - A Content-ID header with the id_ encoded into it. A UUID is prepended to - the value because Content-ID headers are supposed to be universally - unique. - """ - return '<%s+%s>' % (self.__base_id, urllib.quote(request_id)) - - @staticmethod - def _ConvertHeaderToId(header): - """Convert a Content-ID header value to an id. - - Presumes the Content-ID header conforms to the format that - _ConvertIdToHeader() returns. - - Args: - header: A string indicating the Content-ID header value. - - Returns: - The extracted id value. - - Raises: - BatchError if the header is not in the expected format. - """ - if not (header.startswith('<') or header.endswith('>')): - raise exceptions.BatchError('Invalid value for Content-ID: %s' % header) - if '+' not in header: - raise exceptions.BatchError('Invalid value for Content-ID: %s' % header) - _, request_id = header[1:-1].rsplit('+', 1) - - return urllib.unquote(request_id) - - def _SerializeRequest(self, request): - """Convert a http_wrapper.Request object into a string. - - Args: - request: A http_wrapper.Request to serialize. - - Returns: - The request as a string in application/http format. - """ - # Construct status line - parsed = urlparse.urlsplit(request.url) - request_line = urlparse.urlunsplit( - (None, None, parsed.path, parsed.query, None)) - status_line = request.http_method + ' ' + request_line + ' HTTP/1.1\n' - major, minor = request.headers.get( - 'content-type', 'application/json').split('/') - msg = mime_nonmultipart.MIMENonMultipart(major, minor) - - # MIMENonMultipart adds its own Content-Type header. - # Keep all of the other headers in headers. - for key, value in request.headers.iteritems(): - if key == 'content-type': - continue - msg[key] = value - - msg['Host'] = parsed.netloc - msg.set_unixfrom(None) - - if request.body is not None: - msg.set_payload(request.body) - - # Serialize the mime message. - str_io = StringIO.StringIO() - # maxheaderlen=0 means don't line wrap headers. - gen = generator.Generator(str_io, maxheaderlen=0) - gen.flatten(msg, unixfrom=False) - body = str_io.getvalue() - - # Strip off the \n\n that the MIME lib tacks onto the end of the payload. - if request.body is None: - body = body[:-2] - - return status_line.encode('utf-8') + body - - def _DeserializeResponse(self, payload): - """Convert string into Response and content. - - Args: - payload: Header and body string to be deserialized. - - Returns: - A Response object - """ - # Strip off the status line. - status_line, payload = payload.split('\n', 1) - _, status, _ = status_line.split(' ', 2) - - # Parse the rest of the response. - parser = email_parser.Parser() - msg = parser.parsestr(payload) - - # Get the headers. - info = dict(msg) - info['status'] = status - - # Create Response from the parsed headers. - content = msg.get_payload() - - return http_wrapper.Response(info, content, self.__batch_url) - - def _NewId(self): - """Create a new id. - - Auto incrementing number that avoids conflicts with ids already used. - - Returns: - A new unique id string. - """ - return str(self.__last_auto_id.next()) - - def Add(self, request, callback=None): - """Add a new request. - - Args: - request: A http_wrapper.Request to add to the batch. - callback: A callback to be called for this response, of the - form callback(response, exception). The first parameter is the - deserialized response object. The second is an - apiclient.errors.HttpError exception object if an HTTP error - occurred while processing the request, or None if no errors occurred. - - Returns: - None - """ - self.__request_response_handlers[self._NewId()] = RequestResponseAndHandler( - request, None, callback) - - def _Execute(self, http): - """Serialize batch request, send to server, process response. - - Args: - http: A httplib2.Http object to be used to make the request with. - - Raises: - httplib2.HttpLib2Error if a transport error has occured. - apiclient.errors.BatchError if the response is the wrong format. - """ - message = mime_multipart.MIMEMultipart('mixed') - # Message should not write out its own headers. - setattr(message, '_write_headers', lambda self: None) - - # Add all the individual requests. - for key in self.__request_response_handlers: - msg = mime_nonmultipart.MIMENonMultipart('application', 'http') - msg['Content-Transfer-Encoding'] = 'binary' - msg['Content-ID'] = self._ConvertIdToHeader(key) - - body = self._SerializeRequest( - self.__request_response_handlers[key].request) - msg.set_payload(body) - message.attach(msg) - - request = http_wrapper.Request(self.__batch_url, 'POST') - request.body = message.as_string() - request.headers['content-type'] = ( - 'multipart/mixed; boundary="%s"') % message.get_boundary() - - response = http_wrapper.MakeRequest(http, request) - - if response.status_code >= 300: - raise exceptions.HttpError.FromResponse(response) - - # Prepend with a content-type header so Parser can handle it. - header = 'content-type: %s\r\n\r\n' % response.info['content-type'] - - parser = email_parser.Parser() - mime_response = parser.parsestr(header + response.content) - - if not mime_response.is_multipart(): - raise exceptions.BatchError('Response not in multipart/mixed format.') - - for part in mime_response.get_payload(): - request_id = self._ConvertHeaderToId(part['Content-ID']) - response = self._DeserializeResponse(part.get_payload()) - - # Disable protected access because namedtuple._replace(...) - # is not actually meant to be protected. - self.__request_response_handlers[request_id] = ( - self.__request_response_handlers[request_id]._replace( # pylint: disable=protected-access - response=response)) - - def Execute(self, http): - """Execute all the requests as a single batched HTTP request. - - Args: - http: A httplib2.Http object to be used with the request. - - Returns: - None - - Raises: - BatchError if the response is the wrong format. - """ - - self._Execute(http) - - for key in self.__request_response_handlers: - response = self.__request_response_handlers[key].response - callback = self.__request_response_handlers[key].handler - - exception = None - - if response.status_code >= 300: - exception = exceptions.HttpError.FromResponse(response) - - if callback is not None: - callback(response, exception) - if self.__callback is not None: - self.__callback(response, exception) diff --git a/_gcloud_vendor/apitools/base/py/cli.py b/_gcloud_vendor/apitools/base/py/cli.py deleted file mode 100644 index b24470bb3d83..000000000000 --- a/_gcloud_vendor/apitools/base/py/cli.py +++ /dev/null @@ -1,13 +0,0 @@ -#!/usr/bin/env python -"""Top-level import for all CLI-related functionality in apitools. - -Note that importing this file will ultimately have side-effects, and -may require imports not available in all environments (such as App -Engine). In particular, picking up some readline-related imports can -cause pain. -""" - -# pylint:disable=wildcard-import - -from apitools.base.py.app2 import * -from apitools.base.py.base_cli import * diff --git a/_gcloud_vendor/apitools/base/py/credentials_lib.py b/_gcloud_vendor/apitools/base/py/credentials_lib.py deleted file mode 100644 index b4d660d70a31..000000000000 --- a/_gcloud_vendor/apitools/base/py/credentials_lib.py +++ /dev/null @@ -1,221 +0,0 @@ -#!/usr/bin/env python -"""Common credentials classes and constructors.""" - -import httplib -import json -import os -import urllib2 - - -import httplib2 -import oauth2client -import oauth2client.client -import oauth2client.gce -import oauth2client.multistore_file -import oauth2client.tools - -try: - from gflags import FLAGS -except ImportError: - FLAGS = None - -import logging - -from apitools.base.py import exceptions -from apitools.base.py import util - -__all__ = [ - 'CredentialsFromFile', - 'GaeAssertionCredentials', - 'GceAssertionCredentials', - 'GetCredentials', - 'ServiceAccountCredentials', - 'ServiceAccountCredentialsFromFile', -] - - - -# TODO(craigcitro): Expose the extra args here somewhere higher up, -# possibly as flags in the generated CLI. -def GetCredentials(package_name, scopes, client_id, client_secret, user_agent, - credentials_filename=None, - service_account_name=None, service_account_keyfile=None, - api_key=None, client=None): - """Attempt to get credentials, using an oauth dance as the last resort.""" - scopes = util.NormalizeScopes(scopes) - # TODO(craigcitro): Error checking. - client_info = { - 'client_id': client_id, - 'client_secret': client_secret, - 'scope': ' '.join(sorted(util.NormalizeScopes(scopes))), - 'user_agent': user_agent or '%s-generated/0.1' % package_name, - } - if service_account_name is not None: - credentials = ServiceAccountCredentialsFromFile( - service_account_name, service_account_keyfile, scopes) - if credentials is not None: - return credentials - credentials = GaeAssertionCredentials.Get(scopes) - if credentials is not None: - return credentials - credentials = GceAssertionCredentials.Get(scopes) - if credentials is not None: - return credentials - credentials_filename = credentials_filename or os.path.expanduser( - '~/.apitools.token') - credentials = CredentialsFromFile(credentials_filename, client_info) - if credentials is not None: - return credentials - raise exceptions.CredentialsError('Could not create valid credentials') - - -def ServiceAccountCredentialsFromFile( - service_account_name, private_key_filename, scopes): - with open(private_key_filename) as key_file: - return ServiceAccountCredentials( - service_account_name, key_file.read(), scopes) - - -def ServiceAccountCredentials(service_account_name, private_key, scopes): - scopes = util.NormalizeScopes(scopes) - return oauth2client.client.SignedJwtAssertionCredentials( - service_account_name, private_key, scopes) - - -# TODO(craigcitro): We override to add some utility code, and to -# update the old refresh implementation. Either push this code into -# oauth2client or drop oauth2client. -class GceAssertionCredentials(oauth2client.gce.AppAssertionCredentials): - """Assertion credentials for GCE instances.""" - - def __init__(self, scopes=None, service_account_name='default', **kwds): - if not util.DetectGce(): - raise exceptions.ResourceUnavailableError( - 'GCE credentials requested outside a GCE instance') - self.__service_account_name = service_account_name - if scopes: - scope_ls = util.NormalizeScopes(scopes) - instance_scopes = self.GetInstanceScopes() - if scope_ls > instance_scopes: - raise exceptions.CredentialsError( - 'Instance did not have access to scopes %s' % ( - sorted(list(scope_ls - instance_scopes)),)) - else: - scopes = self.GetInstanceScopes() - super(GceAssertionCredentials, self).__init__(scopes, **kwds) - - @classmethod - def Get(cls, *args, **kwds): - try: - return cls(*args, **kwds) - except exceptions.Error: - return None - - def GetInstanceScopes(self): - # Extra header requirement can be found here: - # https://developers.google.com/compute/docs/metadata - scopes_uri = ( - 'http://metadata.google.internal/computeMetadata/v1/instance/' - 'service-accounts/%s/scopes') % self.__service_account_name - additional_headers = {'X-Google-Metadata-Request': 'True'} - request = urllib2.Request(scopes_uri, headers=additional_headers) - try: - response = urllib2.urlopen(request) - except urllib2.URLError as e: - raise exceptions.CommunicationError( - 'Could not reach metadata service: %s' % e.reason) - return util.NormalizeScopes(scope.strip() for scope in response.readlines()) - - def _refresh(self, do_request): - """Refresh self.access_token. - - Args: - do_request: A function matching httplib2.Http.request's signature. - """ - token_uri = ( - 'http://metadata.google.internal/computeMetadata/v1beta1/instance/' - 'service-accounts/%s/token') % self.__service_account_name - extra_headers = {'X-Google-Metadata-Request': 'True'} - response, content = do_request(token_uri, headers=extra_headers) - if response.status != httplib.OK: - raise exceptions.CredentialsError( - 'Error refreshing credentials: %s' % content) - try: - credential_info = json.loads(content) - except ValueError: - raise exceptions.CredentialsError( - 'Invalid credentials response: %s' % content) - self.access_token = credential_info['access_token'] - - -# TODO(craigcitro): Currently, we can't even *load* -# `oauth2client.appengine` without being on appengine, because of how -# it handles imports. Fix that by splitting that module into -# GAE-specific and GAE-independent bits, and guarding imports. -class GaeAssertionCredentials(oauth2client.client.AssertionCredentials): - """Assertion credentials for Google App Engine apps.""" - - def __init__(self, scopes, **kwds): - if not util.DetectGae(): - raise exceptions.ResourceUnavailableError( - 'GCE credentials requested outside a GCE instance') - self._scopes = list(util.NormalizeScopes(scopes)) - super(GaeAssertionCredentials, self).__init__(None, **kwds) - - @classmethod - def Get(cls, *args, **kwds): - try: - return cls(*args, **kwds) - except exceptions.Error: - return None - - @classmethod - def from_json(cls, json_data): - data = json.loads(json_data) - return GaeAssertionCredentials(data['_scopes']) - - def _refresh(self, _): - """Refresh self.access_token. - - Args: - _: (ignored) A function matching httplib2.Http.request's signature. - """ - from google.appengine.api import app_identity - try: - token, _ = app_identity.get_access_token(self._scopes) - except app_identity.Error as e: - raise exceptions.CredentialsError(str(e)) - self.access_token = token - - -# TODO(craigcitro): Switch this from taking a path to taking a stream. -def CredentialsFromFile(path, client_info): - """Read credentials from a file.""" - credential_store = oauth2client.multistore_file.get_credential_storage( - path, - client_info['client_id'], - client_info['user_agent'], - client_info['scope']) - if hasattr(FLAGS, 'auth_local_webserver'): - FLAGS.auth_local_webserver = False - credentials = credential_store.get() - if credentials is None or credentials.invalid: - print 'Generating new OAuth credentials ...' - while True: - # If authorization fails, we want to retry, rather than let this - # cascade up and get caught elsewhere. If users want out of the - # retry loop, they can ^C. - try: - flow = oauth2client.client.OAuth2WebServerFlow(**client_info) - credentials = oauth2client.tools.run(flow, credential_store) - break - except (oauth2client.client.FlowExchangeError, SystemExit) as e: - # Here SystemExit is "no credential at all", and the - # FlowExchangeError is "invalid" -- usually because you reused - # a token. - print 'Invalid authorization: %s' % (e,) - except httplib2.HttpLib2Error as e: - print 'Communication error: %s' % (e,) - raise exceptions.CredentialsError( - 'Communication error creating credentials: %s' % e) - return credentials diff --git a/_gcloud_vendor/apitools/base/py/credentials_lib_test.py b/_gcloud_vendor/apitools/base/py/credentials_lib_test.py deleted file mode 100644 index e4e461c1e9ca..000000000000 --- a/_gcloud_vendor/apitools/base/py/credentials_lib_test.py +++ /dev/null @@ -1,54 +0,0 @@ -#!/usr/bin/env python - - -import httplib -import re -import StringIO -import urllib2 - -import mock -import unittest2 - -from apitools.base.py import credentials_lib -from apitools.base.py import util - - -def CreateUriValidator(uri_regexp, content=''): - def CheckUri(uri, headers=None): - if 'X-Google-Metadata-Request' not in headers: - raise ValueError('Missing required header') - if uri_regexp.match(uri): - message = content - status = httplib.OK - else: - message = 'Expected uri matching pattern %s' % uri_regexp.pattern - status = httplib.BAD_REQUEST - return type('HttpResponse', (object,), {'status': status})(), message - return CheckUri - - -class CredentialsLibTest(unittest2.TestCase): - - def _GetServiceCreds(self, service_account_name=None, scopes=None): - scopes = scopes or ['scope1'] - kwargs = {} - if service_account_name is not None: - kwargs['service_account_name'] = service_account_name - service_account_name = service_account_name or 'default' - with mock.patch.object(urllib2, 'urlopen', autospec=True) as urllib_mock: - urllib_mock.return_value = StringIO.StringIO(''.join(scopes)) - with mock.patch.object(util, 'DetectGce', autospec=True) as mock_util: - mock_util.return_value = True - validator = CreateUriValidator( - re.compile(r'.*/%s/.*' % service_account_name), - content='{"access_token": "token"}') - credentials = credentials_lib.GceAssertionCredentials(scopes, **kwargs) - self.assertIsNone(credentials._refresh(validator)) - - def testGceServiceAccounts(self): - self._GetServiceCreds() - self._GetServiceCreds(service_account_name='my_service_account') - - -if __name__ == '__main__': - unittest2.main() diff --git a/_gcloud_vendor/apitools/base/py/encoding.py b/_gcloud_vendor/apitools/base/py/encoding.py deleted file mode 100644 index c44897f12757..000000000000 --- a/_gcloud_vendor/apitools/base/py/encoding.py +++ /dev/null @@ -1,486 +0,0 @@ -#!/usr/bin/env python -"""Common code for converting proto to other formats, such as JSON.""" - -import base64 -import collections -import datetime -import json -import logging - - -from protorpc import message_types -from protorpc import messages -from protorpc import protojson - -from apitools.base.py import exceptions - -__all__ = [ - 'CopyProtoMessage', - 'JsonToMessage', - 'MessageToJson', - 'DictToMessage', - 'MessageToDict', - 'PyValueToMessage', - 'MessageToPyValue', - 'MessageToRepr', -] - - -_Codec = collections.namedtuple('_Codec', ['encoder', 'decoder']) -CodecResult = collections.namedtuple('CodecResult', ['value', 'complete']) - - -# TODO(craigcitro): Make these non-global. -_UNRECOGNIZED_FIELD_MAPPINGS = {} -_CUSTOM_MESSAGE_CODECS = {} -_CUSTOM_FIELD_CODECS = {} -_FIELD_TYPE_CODECS = {} - - -def MapUnrecognizedFields(field_name): - """Register field_name as a container for unrecognized fields in message.""" - def Register(cls): - _UNRECOGNIZED_FIELD_MAPPINGS[cls] = field_name - return cls - return Register - - -def RegisterCustomMessageCodec(encoder, decoder): - """Register a custom encoder/decoder for this message class.""" - def Register(cls): - _CUSTOM_MESSAGE_CODECS[cls] = _Codec(encoder=encoder, decoder=decoder) - return cls - return Register - - -def RegisterCustomFieldCodec(encoder, decoder): - """Register a custom encoder/decoder for this field.""" - def Register(field): - _CUSTOM_FIELD_CODECS[field] = _Codec(encoder=encoder, decoder=decoder) - return field - return Register - - -def RegisterFieldTypeCodec(encoder, decoder): - """Register a custom encoder/decoder for all fields of this type.""" - def Register(field_type): - _FIELD_TYPE_CODECS[field_type] = _Codec(encoder=encoder, decoder=decoder) - return field_type - return Register - - -# TODO(craigcitro): Delete this function with the switch to proto2. -def CopyProtoMessage(message): - codec = protojson.ProtoJson() - return codec.decode_message(type(message), codec.encode_message(message)) - - -def MessageToJson(message, include_fields=None): - """Convert the given message to JSON.""" - result = _ProtoJsonApiTools.Get().encode_message(message) - return _IncludeFields(result, message, include_fields) - - -def JsonToMessage(message_type, message): - """Convert the given JSON to a message of type message_type.""" - return _ProtoJsonApiTools.Get().decode_message(message_type, message) - - -# TODO(craigcitro): Do this directly, instead of via JSON. -def DictToMessage(d, message_type): - """Convert the given dictionary to a message of type message_type.""" - return JsonToMessage(message_type, json.dumps(d)) - - -def MessageToDict(message): - """Convert the given message to a dictionary.""" - return json.loads(MessageToJson(message)) - - -def PyValueToMessage(message_type, value): - """Convert the given python value to a message of type message_type.""" - return JsonToMessage(message_type, json.dumps(value)) - - -def MessageToPyValue(message): - """Convert the given message to a python value.""" - return json.loads(MessageToJson(message)) - - -def MessageToRepr(msg, multiline=False, **kwargs): - """Return a repr-style string for a protorpc message. - - protorpc.Message.__repr__ does not return anything that could be considered - python code. Adding this function lets us print a protorpc message in such - a way that it could be pasted into code later, and used to compare against - other things. - - Args: - msg: protorpc.Message, the message to be repr'd. - multiline: bool, True if the returned string should have each field - assignment on its own line. - **kwargs: {str:str}, Additional flags for how to format the string. - - Known **kwargs: - shortstrings: bool, True if all string values should be truncated at - 100 characters, since when mocking the contents typically don't matter - except for IDs, and IDs are usually less than 100 characters. - no_modules: bool, True if the long module name should not be printed with - each type. - - Returns: - str, A string of valid python (assuming the right imports have been made) - that recreates the message passed into this function. - """ - - # TODO(user): craigcitro suggests a pretty-printer from apitools/gen. - - indent = kwargs.get('indent', 0) - - def IndentKwargs(kwargs): - kwargs = dict(kwargs) - kwargs['indent'] = kwargs.get('indent', 0) + 4 - return kwargs - - if isinstance(msg, list): - s = '[' - for item in msg: - if multiline: - s += '\n' + ' '*(indent + 4) - s += MessageToRepr( - item, multiline=multiline, **IndentKwargs(kwargs)) + ',' - if multiline: - s += '\n' + ' '*indent - s += ']' - return s - - if isinstance(msg, messages.Message): - s = type(msg).__name__ + '(' - if not kwargs.get('no_modules'): - s = msg.__module__ + '.' + s - names = sorted([field.name for field in msg.all_fields()]) - for name in names: - field = msg.field_by_name(name) - if multiline: - s += '\n' + ' '*(indent + 4) - value = getattr(msg, field.name) - s += field.name + '=' + MessageToRepr( - value, multiline=multiline, **IndentKwargs(kwargs)) + ',' - if multiline: - s += '\n'+' '*indent - s += ')' - return s - - if isinstance(msg, basestring): - if kwargs.get('shortstrings') and len(msg) > 100: - msg = msg[:100] - - if isinstance(msg, datetime.datetime): - - class SpecialTZInfo(datetime.tzinfo): - - def __init__(self, offset): - super(SpecialTZInfo, self).__init__() - self.offset = offset - - def __repr__(self): - s = 'TimeZoneOffset(' + repr(self.offset) + ')' - if not kwargs.get('no_modules'): - s = 'protorpc.util.' + s - return s - - msg = datetime.datetime( - msg.year, msg.month, msg.day, msg.hour, msg.minute, msg.second, - msg.microsecond, SpecialTZInfo(msg.tzinfo.utcoffset(0))) - - return repr(msg) - - -def _GetField(message, field_path): - for field in field_path: - if field not in dir(message): - raise KeyError('no field "%s"' % field) - message = getattr(message, field) - return message - - -def _SetField(dictblob, field_path, value): - for field in field_path[:-1]: - dictblob[field] = {} - dictblob = dictblob[field] - dictblob[field_path[-1]] = value - - -def _IncludeFields(encoded_message, message, include_fields): - """Add the requested fields to the encoded message.""" - if include_fields is None: - return encoded_message - result = json.loads(encoded_message) - for field_name in include_fields: - try: - value = _GetField(message, field_name.split('.')) - nullvalue = None - if isinstance(value, list): - nullvalue = [] - except KeyError: - raise exceptions.InvalidDataError( - 'No field named %s in message of type %s' % ( - field_name, type(message))) - _SetField(result, field_name.split('.'), nullvalue) - return json.dumps(result) - - -def _GetFieldCodecs(field, attr): - result = [ - getattr(_CUSTOM_FIELD_CODECS.get(field), attr, None), - getattr(_FIELD_TYPE_CODECS.get(type(field)), attr, None), - ] - return [x for x in result if x is not None] - - -class _ProtoJsonApiTools(protojson.ProtoJson): - """JSON encoder used by apitools clients.""" - _INSTANCE = None - - @classmethod - def Get(cls): - if cls._INSTANCE is None: - cls._INSTANCE = cls() - return cls._INSTANCE - - def decode_message(self, message_type, encoded_message): - if message_type in _CUSTOM_MESSAGE_CODECS: - return _CUSTOM_MESSAGE_CODECS[message_type].decoder(encoded_message) - # We turn off the default logging in protorpc. We may want to - # remove this later. - old_level = logging.getLogger().level - logging.getLogger().setLevel(logging.ERROR) - result = super(_ProtoJsonApiTools, self).decode_message( - message_type, encoded_message) - logging.getLogger().setLevel(old_level) - result = _ProcessUnknownEnums(result, encoded_message) - result = _ProcessUnknownMessages(result, encoded_message) - return _DecodeUnknownFields(result, encoded_message) - - def decode_field(self, field, value): - """Decode the given JSON value. - - Args: - field: a messages.Field for the field we're decoding. - value: a python value we'd like to decode. - - Returns: - A value suitable for assignment to field. - """ - for decoder in _GetFieldCodecs(field, 'decoder'): - result = decoder(field, value) - value = result.value - if result.complete: - return value - if isinstance(field, messages.MessageField): - field_value = self.decode_message(field.message_type, json.dumps(value)) - elif isinstance(field, messages.EnumField): - try: - field_value = super(_ProtoJsonApiTools, self).decode_field(field, value) - except messages.DecodeError: - if not isinstance(value, basestring): - raise - field_value = None - else: - field_value = super(_ProtoJsonApiTools, self).decode_field(field, value) - return field_value - - def encode_message(self, message): - if isinstance(message, messages.FieldList): - return '[%s]' % (', '.join(self.encode_message(x) for x in message)) - if type(message) in _CUSTOM_MESSAGE_CODECS: - return _CUSTOM_MESSAGE_CODECS[type(message)].encoder(message) - message = _EncodeUnknownFields(message) - return super(_ProtoJsonApiTools, self).encode_message(message) - - def encode_field(self, field, value): - """Encode the given value as JSON. - - Args: - field: a messages.Field for the field we're encoding. - value: a value for field. - - Returns: - A python value suitable for json.dumps. - """ - for encoder in _GetFieldCodecs(field, 'encoder'): - result = encoder(field, value) - value = result.value - if result.complete: - return value - if (isinstance(field, messages.MessageField) and - not isinstance(field, message_types.DateTimeField)): - value = json.loads(self.encode_message(value)) - return super(_ProtoJsonApiTools, self).encode_field(field, value) - - -# TODO(craigcitro): Fold this and _IncludeFields in as codecs. -def _DecodeUnknownFields(message, encoded_message): - """Rewrite unknown fields in message into message.destination.""" - destination = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message)) - if destination is None: - return message - pair_field = message.field_by_name(destination) - if not isinstance(pair_field, messages.MessageField): - raise exceptions.InvalidDataFromServerError( - 'Unrecognized fields must be mapped to a compound ' - 'message type.') - pair_type = pair_field.message_type - # TODO(craigcitro): Add more error checking around the pair - # type being exactly what we suspect (field names, etc). - if isinstance(pair_type.value, messages.MessageField): - new_values = _DecodeUnknownMessages( - message, json.loads(encoded_message), pair_type) - else: - new_values = _DecodeUnrecognizedFields(message, pair_type) - setattr(message, destination, new_values) - # We could probably get away with not setting this, but - # why not clear it? - setattr(message, '_Message__unrecognized_fields', {}) - return message - - -def _DecodeUnknownMessages(message, encoded_message, pair_type): - """Process unknown fields in encoded_message of a message type.""" - field_type = pair_type.value.type - new_values = [] - all_field_names = [x.name for x in message.all_fields()] - for name, value_dict in encoded_message.iteritems(): - if name in all_field_names: - continue - value = PyValueToMessage(field_type, value_dict) - new_pair = pair_type(key=name, value=value) - new_values.append(new_pair) - return new_values - - -def _DecodeUnrecognizedFields(message, pair_type): - """Process unrecognized fields in message.""" - new_values = [] - for unknown_field in message.all_unrecognized_fields(): - # TODO(craigcitro): Consider validating the variant if - # the assignment below doesn't take care of it. It may - # also be necessary to check it in the case that the - # type has multiple encodings. - value, _ = message.get_unrecognized_field_info(unknown_field) - value_type = pair_type.field_by_name('value') - if isinstance(value_type, messages.MessageField): - decoded_value = DictToMessage(value, pair_type.value.message_type) - else: - decoded_value = value - new_pair = pair_type(key=str(unknown_field), value=decoded_value) - new_values.append(new_pair) - return new_values - - -def _EncodeUnknownFields(message): - """Remap unknown fields in message out of message.source.""" - source = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message)) - if source is None: - return message - result = CopyProtoMessage(message) - pairs_field = message.field_by_name(source) - if not isinstance(pairs_field, messages.MessageField): - raise exceptions.InvalidUserInputError( - 'Invalid pairs field %s' % pairs_field) - pairs_type = pairs_field.message_type - value_variant = pairs_type.field_by_name('value').variant - pairs = getattr(message, source) - for pair in pairs: - if value_variant == messages.Variant.MESSAGE: - encoded_value = MessageToDict(pair.value) - else: - encoded_value = pair.value - result.set_unrecognized_field(pair.key, encoded_value, value_variant) - setattr(result, source, []) - return result - - -def _SafeEncodeBytes(field, value): - """Encode the bytes in value as urlsafe base64.""" - try: - if field.repeated: - result = [base64.urlsafe_b64encode(byte) for byte in value] - else: - result = base64.urlsafe_b64encode(value) - complete = True - except TypeError: - result = value - complete = False - return CodecResult(value=result, complete=complete) - - -def _SafeDecodeBytes(unused_field, value): - """Decode the urlsafe base64 value into bytes.""" - try: - result = base64.urlsafe_b64decode(str(value)) - complete = True - except TypeError: - result = value - complete = False - return CodecResult(value=result, complete=complete) - - -def _ProcessUnknownEnums(message, encoded_message): - """Add unknown enum values from encoded_message as unknown fields. - - ProtoRPC diverges from the usual protocol buffer behavior here and - doesn't allow unknown fields. Throwing on unknown fields makes it - impossible to let servers add new enum values and stay compatible - with older clients, which isn't reasonable for us. We simply store - unrecognized enum values as unknown fields, and all is well. - - Args: - message: Proto message we've decoded thus far. - encoded_message: JSON string we're decoding. - - Returns: - message, with any unknown enums stored as unrecognized fields. - """ - if not encoded_message: - return message - decoded_message = json.loads(encoded_message) - for field in message.all_fields(): - if (isinstance(field, messages.EnumField) and - field.name in decoded_message and - message.get_assigned_value(field.name) is None): - message.set_unrecognized_field(field.name, decoded_message[field.name], - messages.Variant.ENUM) - return message - - -def _ProcessUnknownMessages(message, encoded_message): - """Store any remaining unknown fields as strings. - - ProtoRPC currently ignores unknown values for which no type can be - determined (and logs a "No variant found" message). For the purposes - of reserializing, this is quite harmful (since it throws away - information). Here we simply add those as unknown fields of type - string (so that they can easily be reserialized). - - Args: - message: Proto message we've decoded thus far. - encoded_message: JSON string we're decoding. - - Returns: - message, with any remaining unrecognized fields saved. - """ - if not encoded_message: - return message - decoded_message = json.loads(encoded_message) - message_fields = [x.name for x in message.all_fields()] + list( - message.all_unrecognized_fields()) - missing_fields = [x for x in decoded_message.iterkeys() - if x not in message_fields] - for field_name in missing_fields: - message.set_unrecognized_field(field_name, decoded_message[field_name], - messages.Variant.STRING) - return message - - -RegisterFieldTypeCodec(_SafeEncodeBytes, _SafeDecodeBytes)(messages.BytesField) diff --git a/_gcloud_vendor/apitools/base/py/encoding_test.py b/_gcloud_vendor/apitools/base/py/encoding_test.py deleted file mode 100644 index 77224d6e3582..000000000000 --- a/_gcloud_vendor/apitools/base/py/encoding_test.py +++ /dev/null @@ -1,269 +0,0 @@ -#!/usr/bin/env python - - -import base64 -import datetime -import json - -from protorpc import message_types -from protorpc import messages -from protorpc import util -import unittest2 - -from apitools.base.py import encoding - - -class SimpleMessage(messages.Message): - field = messages.StringField(1) - repfield = messages.StringField(2, repeated=True) - - -class BytesMessage(messages.Message): - field = messages.BytesField(1) - repfield = messages.BytesField(2, repeated=True) - - -class TimeMessage(messages.Message): - timefield = message_types.DateTimeField(3) - - -@encoding.MapUnrecognizedFields('additional_properties') -class AdditionalPropertiesMessage(messages.Message): - - class AdditionalProperty(messages.Message): - key = messages.StringField(1) - value = messages.StringField(2) - - additional_properties = messages.MessageField( - AdditionalProperty, 1, repeated=True) - - -class CompoundPropertyType(messages.Message): - index = messages.IntegerField(1) - name = messages.StringField(2) - - -class MessageWithEnum(messages.Message): - - class ThisEnum(messages.Enum): - VALUE_ONE = 1 - VALUE_TWO = 2 - - field_one = messages.EnumField(ThisEnum, 1) - field_two = messages.EnumField(ThisEnum, 2, default=ThisEnum.VALUE_TWO) - ignored_field = messages.EnumField(ThisEnum, 3) - - -@encoding.MapUnrecognizedFields('additional_properties') -class AdditionalMessagePropertiesMessage(messages.Message): - - class AdditionalProperty(messages.Message): - key = messages.StringField(1) - value = messages.MessageField(CompoundPropertyType, 2) - - additional_properties = messages.MessageField( - 'AdditionalProperty', 1, repeated=True) - - -class HasNestedMessage(messages.Message): - nested = messages.MessageField(AdditionalPropertiesMessage, 1) - nested_list = messages.StringField(2, repeated=True) - - -class ExtraNestedMessage(messages.Message): - nested = messages.MessageField(HasNestedMessage, 1) - - -class EncodingTest(unittest2.TestCase): - - def testCopyProtoMessage(self): - msg = SimpleMessage(field='abc') - new_msg = encoding.CopyProtoMessage(msg) - self.assertEqual(msg.field, new_msg.field) - msg.field = 'def' - self.assertNotEqual(msg.field, new_msg.field) - - def testBytesEncoding(self): - b64_str = 'AAc+' - b64_msg = '{"field": "%s"}' % b64_str - urlsafe_b64_str = 'AAc-' - urlsafe_b64_msg = '{"field": "%s"}' % urlsafe_b64_str - data = base64.b64decode(b64_str) - msg = BytesMessage(field=data) - self.assertEqual(msg, encoding.JsonToMessage(BytesMessage, urlsafe_b64_msg)) - self.assertEqual(msg, encoding.JsonToMessage(BytesMessage, b64_msg)) - self.assertEqual(urlsafe_b64_msg, encoding.MessageToJson(msg)) - - enc_rep_msg = '{"repfield": ["%(b)s", "%(b)s"]}' % { - 'b': urlsafe_b64_str, - } - rep_msg = BytesMessage(repfield=[data, data]) - self.assertEqual(rep_msg, encoding.JsonToMessage(BytesMessage, enc_rep_msg)) - self.assertEqual(enc_rep_msg, encoding.MessageToJson(rep_msg)) - - def testIncludeFields(self): - msg = SimpleMessage() - self.assertEqual('{}', encoding.MessageToJson(msg)) - self.assertEqual( - '{"field": null}', - encoding.MessageToJson(msg, include_fields=['field'])) - self.assertEqual( - '{"repfield": []}', - encoding.MessageToJson(msg, include_fields=['repfield'])) - - def testNestedIncludeFields(self): - msg = HasNestedMessage( - nested=AdditionalPropertiesMessage( - additional_properties=[])) - self.assertEqual( - '{"nested": null}', - encoding.MessageToJson(msg, include_fields=['nested'])) - self.assertEqual( - '{"nested": {"additional_properties": []}}', - encoding.MessageToJson( - msg, include_fields=['nested.additional_properties'])) - msg = ExtraNestedMessage(nested=msg) - self.assertEqual( - '{"nested": {"nested": null}}', - encoding.MessageToJson(msg, include_fields=['nested.nested'])) - self.assertEqual( - '{"nested": {"nested_list": []}}', - encoding.MessageToJson(msg, include_fields=['nested.nested_list'])) - self.assertEqual( - '{"nested": {"nested": {"additional_properties": []}}}', - encoding.MessageToJson( - msg, include_fields=['nested.nested.additional_properties'])) - - def testAdditionalPropertyMapping(self): - msg = AdditionalPropertiesMessage() - msg.additional_properties = [ - AdditionalPropertiesMessage.AdditionalProperty( - key='key_one', value='value_one'), - AdditionalPropertiesMessage.AdditionalProperty( - key='key_two', value='value_two'), - ] - - encoded_msg = encoding.MessageToJson(msg) - self.assertEqual( - {'key_one': 'value_one', 'key_two': 'value_two'}, - json.loads(encoded_msg)) - - new_msg = encoding.JsonToMessage(type(msg), encoded_msg) - self.assertEqual( - set(('key_one', 'key_two')), - set([x.key for x in new_msg.additional_properties])) - self.assertIsNot(msg, new_msg) - - new_msg.additional_properties.pop() - self.assertEqual(1, len(new_msg.additional_properties)) - self.assertEqual(2, len(msg.additional_properties)) - - def testAdditionalMessageProperties(self): - json_msg = '{"input": {"index": 0, "name": "output"}}' - result = encoding.JsonToMessage( - AdditionalMessagePropertiesMessage, json_msg) - self.assertEqual(1, len(result.additional_properties)) - self.assertEqual(0, result.additional_properties[0].value.index) - - def testNestedFieldMapping(self): - nested_msg = AdditionalPropertiesMessage() - nested_msg.additional_properties = [ - AdditionalPropertiesMessage.AdditionalProperty( - key='key_one', value='value_one'), - AdditionalPropertiesMessage.AdditionalProperty( - key='key_two', value='value_two'), - ] - msg = HasNestedMessage(nested=nested_msg) - - encoded_msg = encoding.MessageToJson(msg) - self.assertEqual( - {'nested': {'key_one': 'value_one', 'key_two': 'value_two'}}, - json.loads(encoded_msg)) - - new_msg = encoding.JsonToMessage(type(msg), encoded_msg) - self.assertEqual( - set(('key_one', 'key_two')), - set([x.key for x in new_msg.nested.additional_properties])) - - new_msg.nested.additional_properties.pop() - self.assertEqual(1, len(new_msg.nested.additional_properties)) - self.assertEqual(2, len(msg.nested.additional_properties)) - - def testValidEnums(self): - message_json = '{"field_one": "VALUE_ONE"}' - message = encoding.JsonToMessage(MessageWithEnum, message_json) - self.assertEqual(MessageWithEnum.ThisEnum.VALUE_ONE, message.field_one) - self.assertEqual(MessageWithEnum.ThisEnum.VALUE_TWO, message.field_two) - self.assertEqual(json.loads(message_json), - json.loads(encoding.MessageToJson(message))) - - def testIgnoredEnums(self): - json_with_typo = '{"field_one": "VALUE_OEN"}' - message = encoding.JsonToMessage(MessageWithEnum, json_with_typo) - self.assertEqual(None, message.field_one) - self.assertEqual(('VALUE_OEN', messages.Variant.ENUM), - message.get_unrecognized_field_info('field_one')) - self.assertEqual(json.loads(json_with_typo), - json.loads(encoding.MessageToJson(message))) - - empty_json = '' - message = encoding.JsonToMessage(MessageWithEnum, empty_json) - self.assertEqual(None, message.field_one) - - def testIgnoredEnumsWithDefaults(self): - json_with_typo = '{"field_two": "VALUE_OEN"}' - message = encoding.JsonToMessage(MessageWithEnum, json_with_typo) - self.assertEqual(MessageWithEnum.ThisEnum.VALUE_TWO, message.field_two) - self.assertEqual(json.loads(json_with_typo), - json.loads(encoding.MessageToJson(message))) - - def testUnknownNestedRoundtrip(self): - json_message = '{"field": "abc", "submessage": {"a": 1, "b": "foo"}}' - message = encoding.JsonToMessage(SimpleMessage, json_message) - self.assertEqual(json.loads(json_message), - json.loads(encoding.MessageToJson(message))) - - def testJsonDatetime(self): - msg = TimeMessage(timefield=datetime.datetime( - 2014, 7, 2, 23, 33, 25, 541000, - tzinfo=util.TimeZoneOffset(datetime.timedelta(0)))) - self.assertEqual( - '{"timefield": "2014-07-02T23:33:25.541000+00:00"}', - encoding.MessageToJson(msg)) - - def testMessageToRepr(self): - # pylint:disable=bad-whitespace, Using the same string returned by - # MessageToRepr, with the module names fixed. - msg = SimpleMessage(field='field',repfield=['field','field',],) - self.assertEqual( - encoding.MessageToRepr(msg), - r"%s.SimpleMessage(field='field',repfield=['field','field',],)" % ( - __name__,)) - self.assertEqual( - encoding.MessageToRepr(msg, no_modules=True), - r"SimpleMessage(field='field',repfield=['field','field',],)") - - def testMessageToReprWithTime(self): - msg = TimeMessage(timefield=datetime.datetime( - 2014, 7, 2, 23, 33, 25, 541000, - tzinfo=util.TimeZoneOffset(datetime.timedelta(0)))) - self.assertEqual( - encoding.MessageToRepr(msg, multiline=True), - # pylint:disable=line-too-long, Too much effort to make MessageToRepr - # wrap lines properly. - """\ -%s.TimeMessage( - timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, tzinfo=protorpc.util.TimeZoneOffset(datetime.timedelta(0))), -)""" % __name__) - self.assertEqual( - encoding.MessageToRepr(msg, multiline=True, no_modules=True), - # pylint:disable=line-too-long, Too much effort to make MessageToRepr - # wrap lines properly. - """\ -TimeMessage( - timefield=datetime.datetime(2014, 7, 2, 23, 33, 25, 541000, tzinfo=TimeZoneOffset(datetime.timedelta(0))), -)""") - - -if __name__ == '__main__': - unittest2.main() diff --git a/_gcloud_vendor/apitools/base/py/extra_types.py b/_gcloud_vendor/apitools/base/py/extra_types.py deleted file mode 100644 index 4b15683a8d49..000000000000 --- a/_gcloud_vendor/apitools/base/py/extra_types.py +++ /dev/null @@ -1,283 +0,0 @@ -#!/usr/bin/env python -"""Extra types understood by apitools. - -This file will be replaced by a .proto file when we switch to proto2 -from protorpc. -""" - -import collections -import datetime -import json -import numbers - -from protorpc import message_types -from protorpc import messages -from protorpc import protojson - -from apitools.base.py import encoding -from apitools.base.py import exceptions -from apitools.base.py import util - -__all__ = [ - 'DateField', - 'DateTimeMessage', - 'JsonArray', - 'JsonObject', - 'JsonValue', - 'JsonProtoEncoder', - 'JsonProtoDecoder', -] - -# We import from protorpc. -# pylint:disable=invalid-name -DateTimeMessage = message_types.DateTimeMessage -# pylint:enable=invalid-name - - -class DateField(messages.Field): - """Field definition for Date values.""" - - # We insert our own metaclass here to avoid letting ProtoRPC - # register this as the default field type for strings. - # * since ProtoRPC does this via metaclasses, we don't have any - # choice but to use one ourselves - # * since a subclass's metaclass must inherit from its superclass's - # metaclass, we're forced to have this hard-to-read inheritance. - # - class __metaclass__(messages.Field.__metaclass__): # pylint: disable=invalid-name - - def __init__(cls, name, bases, dct): # pylint: disable=no-self-argument - super(messages.Field.__metaclass__, cls).__init__(name, bases, dct) - - VARIANTS = frozenset([messages.Variant.STRING]) - DEFAULT_VARIANT = messages.Variant.STRING - type = datetime.date - - -def _ValidateJsonValue(json_value): - entries = [(f, json_value.get_assigned_value(f.name)) - for f in json_value.all_fields()] - assigned_entries = [(f, value) for f, value in entries if value is not None] - if len(assigned_entries) != 1: - raise exceptions.InvalidDataError('Malformed JsonValue: %s' % json_value) - - -def _JsonValueToPythonValue(json_value): - """Convert the given JsonValue to a json string.""" - util.Typecheck(json_value, JsonValue) - _ValidateJsonValue(json_value) - if json_value.is_null: - return None - entries = [(f, json_value.get_assigned_value(f.name)) - for f in json_value.all_fields()] - assigned_entries = [(f, value) for f, value in entries if value is not None] - field, value = assigned_entries[0] - if not isinstance(field, messages.MessageField): - return value - elif field.message_type is JsonObject: - return _JsonObjectToPythonValue(value) - elif field.message_type is JsonArray: - return _JsonArrayToPythonValue(value) - - -def _JsonObjectToPythonValue(json_value): - util.Typecheck(json_value, JsonObject) - return dict([(prop.key, _JsonValueToPythonValue(prop.value)) for prop - in json_value.properties]) - - -def _JsonArrayToPythonValue(json_value): - util.Typecheck(json_value, JsonArray) - return [_JsonValueToPythonValue(e) for e in json_value.entries] - - -_MAXINT64 = 2 << 63 - 1 -_MININT64 = -(2 << 63) - - -def _PythonValueToJsonValue(py_value): - """Convert the given python value to a JsonValue.""" - if py_value is None: - return JsonValue(is_null=True) - if isinstance(py_value, bool): - return JsonValue(boolean_value=py_value) - if isinstance(py_value, basestring): - return JsonValue(string_value=py_value) - if isinstance(py_value, numbers.Number): - if isinstance(py_value, (int, long)): - if _MININT64 < py_value < _MAXINT64: - return JsonValue(integer_value=py_value) - return JsonValue(double_value=float(py_value)) - if isinstance(py_value, dict): - return JsonValue(object_value=_PythonValueToJsonObject(py_value)) - if isinstance(py_value, collections.Iterable): - return JsonValue(array_value=_PythonValueToJsonArray(py_value)) - raise exceptions.InvalidDataError( - 'Cannot convert "%s" to JsonValue' % py_value) - - -def _PythonValueToJsonObject(py_value): - util.Typecheck(py_value, dict) - return JsonObject( - properties=[ - JsonObject.Property(key=key, value=_PythonValueToJsonValue(value)) - for key, value in py_value.iteritems()]) - - -def _PythonValueToJsonArray(py_value): - return JsonArray(entries=map(_PythonValueToJsonValue, py_value)) - - -class JsonValue(messages.Message): - """Any valid JSON value.""" - # Is this JSON object `null`? - is_null = messages.BooleanField(1, default=False) - - # Exactly one of the following is provided if is_null is False; none - # should be provided if is_null is True. - boolean_value = messages.BooleanField(2) - string_value = messages.StringField(3) - # We keep two numeric fields to keep int64 round-trips exact. - double_value = messages.FloatField(4, variant=messages.Variant.DOUBLE) - integer_value = messages.IntegerField(5, variant=messages.Variant.INT64) - # Compound types - object_value = messages.MessageField('JsonObject', 6) - array_value = messages.MessageField('JsonArray', 7) - - -class JsonObject(messages.Message): - """A JSON object value. - - Messages: - Property: A property of a JsonObject. - - Fields: - properties: A list of properties of a JsonObject. - """ - - class Property(messages.Message): - """A property of a JSON object. - - Fields: - key: Name of the property. - value: A JsonValue attribute. - """ - key = messages.StringField(1) - value = messages.MessageField(JsonValue, 2) - - properties = messages.MessageField(Property, 1, repeated=True) - - -class JsonArray(messages.Message): - """A JSON array value.""" - entries = messages.MessageField(JsonValue, 1, repeated=True) - - -_JSON_PROTO_TO_PYTHON_MAP = { - JsonArray: _JsonArrayToPythonValue, - JsonObject: _JsonObjectToPythonValue, - JsonValue: _JsonValueToPythonValue, -} -_JSON_PROTO_TYPES = tuple(_JSON_PROTO_TO_PYTHON_MAP.keys()) - - -def _JsonProtoToPythonValue(json_proto): - util.Typecheck(json_proto, _JSON_PROTO_TYPES) - return _JSON_PROTO_TO_PYTHON_MAP[type(json_proto)](json_proto) - - -def _PythonValueToJsonProto(py_value): - if isinstance(py_value, dict): - return _PythonValueToJsonObject(py_value) - if (isinstance(py_value, collections.Iterable) and - not isinstance(py_value, basestring)): - return _PythonValueToJsonArray(py_value) - return _PythonValueToJsonValue(py_value) - - -def _JsonProtoToJson(json_proto, unused_encoder=None): - return json.dumps(_JsonProtoToPythonValue(json_proto)) - - -def _JsonToJsonProto(json_data, unused_decoder=None): - return _PythonValueToJsonProto(json.loads(json_data)) - - -def _JsonToJsonValue(json_data, unused_decoder=None): - result = _PythonValueToJsonProto(json.loads(json_data)) - if isinstance(result, JsonValue): - return result - elif isinstance(result, JsonObject): - return JsonValue(object_value=result) - elif isinstance(result, JsonArray): - return JsonValue(array_value=result) - else: - raise exceptions.InvalidDataError( - 'Malformed JsonValue: %s' % json_data) - - -# pylint:disable=invalid-name -JsonProtoEncoder = _JsonProtoToJson -JsonProtoDecoder = _JsonToJsonProto -# pylint:enable=invalid-name -encoding.RegisterCustomMessageCodec( - encoder=JsonProtoEncoder, decoder=_JsonToJsonValue)(JsonValue) -encoding.RegisterCustomMessageCodec( - encoder=JsonProtoEncoder, decoder=JsonProtoDecoder)(JsonObject) -encoding.RegisterCustomMessageCodec( - encoder=JsonProtoEncoder, decoder=JsonProtoDecoder)(JsonArray) - - -def _EncodeDateTimeField(field, value): - result = protojson.ProtoJson().encode_field(field, value) - return encoding.CodecResult(value=result, complete=True) - - -def _DecodeDateTimeField(unused_field, value): - result = protojson.ProtoJson().decode_field( - message_types.DateTimeField(1), value) - return encoding.CodecResult(value=result, complete=True) - - -encoding.RegisterFieldTypeCodec(_EncodeDateTimeField, _DecodeDateTimeField)( - message_types.DateTimeField) - - -def _EncodeInt64Field(field, value): - """Handle the special case of int64 as a string.""" - capabilities = [ - messages.Variant.INT64, - messages.Variant.UINT64, - ] - if field.variant not in capabilities: - return encoding.CodecResult(value=value, complete=False) - - if field.repeated: - result = [str(x) for x in value] - else: - result = str(value) - return encoding.CodecResult(value=result, complete=True) - - -def _DecodeInt64Field(unused_field, value): - # Don't need to do anything special, they're decoded just fine - return encoding.CodecResult(value=value, complete=False) - -encoding.RegisterFieldTypeCodec(_EncodeInt64Field, _DecodeInt64Field)( - messages.IntegerField) - - -def _EncodeDateField(field, value): - """Encoder for datetime.date objects.""" - if field.repeated: - result = [d.isoformat() for d in value] - else: - result = value.isoformat() - return encoding.CodecResult(value=result, complete=True) - - -def _DecodeDateField(unused_field, value): - date = datetime.datetime.strptime(value, '%Y-%m-%d').date() - return encoding.CodecResult(value=date, complete=True) - -encoding.RegisterFieldTypeCodec(_EncodeDateField, _DecodeDateField)(DateField) diff --git a/_gcloud_vendor/apitools/base/py/extra_types_test.py b/_gcloud_vendor/apitools/base/py/extra_types_test.py deleted file mode 100644 index 457c606a2a99..000000000000 --- a/_gcloud_vendor/apitools/base/py/extra_types_test.py +++ /dev/null @@ -1,175 +0,0 @@ -#!/usr/bin/env python - - -import datetime -import json -import math - -from protorpc import messages -import unittest2 - -from apitools.base.py import encoding -from apitools.base.py import exceptions -from apitools.base.py import extra_types - - -class ExtraTypesTest(unittest2.TestCase): - - def assertRoundTrip(self, value): - if isinstance(value, extra_types._JSON_PROTO_TYPES): - self.assertEqual( - value, - extra_types._PythonValueToJsonProto( - extra_types._JsonProtoToPythonValue(value))) - else: - self.assertEqual( - value, - extra_types._JsonProtoToPythonValue( - extra_types._PythonValueToJsonProto(value))) - - def assertTranslations(self, py_value, json_proto): - self.assertEqual(py_value, extra_types._JsonProtoToPythonValue(json_proto)) - self.assertEqual(json_proto, extra_types._PythonValueToJsonProto(py_value)) - - def testInvalidProtos(self): - with self.assertRaises(exceptions.InvalidDataError): - extra_types._ValidateJsonValue(extra_types.JsonValue()) - with self.assertRaises(exceptions.InvalidDataError): - extra_types._ValidateJsonValue( - extra_types.JsonValue(is_null=True, string_value='a')) - with self.assertRaises(exceptions.InvalidDataError): - extra_types._ValidateJsonValue( - extra_types.JsonValue(integer_value=3, string_value='a')) - - def testNullEncoding(self): - self.assertTranslations(None, extra_types.JsonValue(is_null=True)) - - def testJsonNumberEncoding(self): - seventeen = extra_types.JsonValue(integer_value=17) - self.assertRoundTrip(17) - self.assertRoundTrip(seventeen) - self.assertTranslations(17, seventeen) - - json_pi = extra_types.JsonValue(double_value=math.pi) - self.assertRoundTrip(math.pi) - self.assertRoundTrip(json_pi) - self.assertTranslations(math.pi, json_pi) - - def testArrayEncoding(self): - array = [3, 'four', False] - json_array = extra_types.JsonArray(entries=[ - extra_types.JsonValue(integer_value=3), - extra_types.JsonValue(string_value='four'), - extra_types.JsonValue(boolean_value=False), - ]) - self.assertRoundTrip(array) - self.assertRoundTrip(json_array) - self.assertTranslations(array, json_array) - - def testArrayAsValue(self): - array_json = '[3, "four", false]' - array = [3, 'four', False] - value = encoding.JsonToMessage(extra_types.JsonValue, array_json) - self.assertTrue(isinstance(value, extra_types.JsonValue)) - self.assertEqual(array, encoding.MessageToPyValue(value)) - - def testObjectAsValue(self): - obj_json = '{"works": true}' - obj = {'works': True} - value = encoding.JsonToMessage(extra_types.JsonValue, obj_json) - self.assertTrue(isinstance(value, extra_types.JsonValue)) - self.assertEqual(obj, encoding.MessageToPyValue(value)) - - def testDictEncoding(self): - d = {'a': 6, 'b': 'eleventeen'} - json_d = extra_types.JsonObject(properties=[ - extra_types.JsonObject.Property( - key='a', value=extra_types.JsonValue(integer_value=6)), - extra_types.JsonObject.Property( - key='b', value=extra_types.JsonValue(string_value='eleventeen')), - ]) - self.assertRoundTrip(d) - # We don't know json_d will round-trip, because of randomness in - # python dictionary iteration ordering. We also need to force - # comparison as lists, since hashing protos isn't helpful. - translated_properties = extra_types._PythonValueToJsonProto(d).properties - for p in json_d.properties: - self.assertIn(p, translated_properties) - for p in translated_properties: - self.assertIn(p, json_d.properties) - - def testJsonObjectPropertyTranslation(self): - value = extra_types.JsonValue(string_value='abc') - obj = extra_types.JsonObject(properties=[ - extra_types.JsonObject.Property(key='attr_name', value=value)]) - json_value = '"abc"' - json_obj = '{"attr_name": "abc"}' - - self.assertRoundTrip(value) - self.assertRoundTrip(obj) - self.assertRoundTrip(json_value) - self.assertRoundTrip(json_obj) - - self.assertEqual(json_value, encoding.MessageToJson(value)) - self.assertEqual(json_obj, encoding.MessageToJson(obj)) - - def testDateField(self): - - class DateMsg(messages.Message): - start_date = extra_types.DateField(1) - all_dates = extra_types.DateField(2, repeated=True) - - msg = DateMsg( - start_date=datetime.date(1752, 9, 9), all_dates=[ - datetime.date(1979, 5, 6), - datetime.date(1980, 10, 24), - datetime.date(1981, 1, 19), - ]) - json_msg = json.dumps({ - 'start_date': '1752-09-09', 'all_dates': [ - '1979-05-06', '1980-10-24', '1981-01-19', - ]}) - self.assertEqual(json_msg, encoding.MessageToJson(msg)) - self.assertEqual(msg, encoding.JsonToMessage(DateMsg, json_msg)) - - def testInt64(self): - # Testing roundtrip of type 'long' - - class DogeMsg(messages.Message): - such_string = messages.StringField(1) - wow = messages.IntegerField(2, variant=messages.Variant.INT64) - very_unsigned = messages.IntegerField(3, variant=messages.Variant.UINT64) - much_repeated = messages.IntegerField( - 4, variant=messages.Variant.INT64, repeated=True) - - def MtoJ(msg): - return encoding.MessageToJson(msg) - - def JtoM(class_type, json_str): - return encoding.JsonToMessage(class_type, json_str) - - def DoRoundtrip(class_type, json_msg=None, message=None, times=4): - if json_msg: - json_msg = MtoJ(JtoM(class_type, json_msg)) - if message: - message = JtoM(class_type, MtoJ(message)) - if times == 0: - result = json_msg if json_msg else message - return result - return DoRoundtrip(class_type=class_type, json_msg=json_msg, - message=message, times=times - 1) - - # Single - json_msg = ('{"such_string": "poot", "wow": "-1234",' - ' "very_unsigned": "999", "much_repeated": ["123", "456"]}') - out_json = MtoJ(JtoM(DogeMsg, json_msg)) - self.assertEqual(json.loads(out_json)['wow'], '-1234') - - # Repeated test case - msg = DogeMsg(such_string='wow', wow=-1234, - very_unsigned=800, much_repeated=[123, 456]) - self.assertEqual(msg, DoRoundtrip(DogeMsg, message=msg)) - - -if __name__ == '__main__': - unittest2.main() diff --git a/_gcloud_vendor/apitools/base/py/list_pager.py b/_gcloud_vendor/apitools/base/py/list_pager.py deleted file mode 100644 index d8f5971da336..000000000000 --- a/_gcloud_vendor/apitools/base/py/list_pager.py +++ /dev/null @@ -1,49 +0,0 @@ -#!/usr/bin/env python -"""A helper function that executes a series of List queries for many APIs.""" - -import copy - -__all__ = [ - 'YieldFromList', -] - - -def YieldFromList( - service, request, limit=None, batch_size=100, - method='List', field='items', predicate=None): - """Make a series of List requests, keeping track of page tokens. - - Args: - service: apitools_base.BaseApiService, A service with a .List() method. - request: protorpc.messages.Message, The request message corresponding to the - service's .List() method, with all the attributes populated except - the .maxResults and .pageToken attributes. - limit: int, The maximum number of records to yield. None if all available - records should be yielded. - batch_size: int, The number of items to retrieve per request. - method: str, The name of the method used to fetch resources. - field: str, The field in the response that will be a list of items. - predicate: lambda, A function that returns true for items to be yielded. - - Yields: - protorpc.message.Message, The resources listed by the service. - - """ - request = copy.deepcopy(request) - request.maxResults = batch_size - request.pageToken = None - while limit is None or limit: - response = getattr(service, method)(request) - items = getattr(response, field) - if predicate: - items = filter(predicate, items) - for item in items: - yield item - if limit is None: - continue - limit -= 1 - if not limit: - return - request.pageToken = response.nextPageToken - if not request.pageToken: - return From f4a53ee64fad5f3d7f29a0341e6a72a060edfcc2 Mon Sep 17 00:00:00 2001 From: Tres Seaver Date: Tue, 2 Dec 2014 14:43:30 -0500 Subject: [PATCH 6/6] Fix up imports. --- _gcloud_vendor/apitools/base/py/http_wrapper.py | 4 ++-- _gcloud_vendor/apitools/base/py/transfer.py | 6 +++--- _gcloud_vendor/apitools/base/py/util.py | 2 +- 3 files changed, 6 insertions(+), 6 deletions(-) diff --git a/_gcloud_vendor/apitools/base/py/http_wrapper.py b/_gcloud_vendor/apitools/base/py/http_wrapper.py index 8c3ee281f541..80454f495752 100644 --- a/_gcloud_vendor/apitools/base/py/http_wrapper.py +++ b/_gcloud_vendor/apitools/base/py/http_wrapper.py @@ -14,8 +14,8 @@ import httplib2 -from apitools.base.py import exceptions -from apitools.base.py import util +from _gcloud_vendor.apitools.base.py import exceptions +from _gcloud_vendor.apitools.base.py import util __all__ = [ 'GetHttp', diff --git a/_gcloud_vendor/apitools/base/py/transfer.py b/_gcloud_vendor/apitools/base/py/transfer.py index 610ef2d5868b..46dbc7f22c82 100644 --- a/_gcloud_vendor/apitools/base/py/transfer.py +++ b/_gcloud_vendor/apitools/base/py/transfer.py @@ -12,9 +12,9 @@ import StringIO import threading -from apitools.base.py import exceptions -from apitools.base.py import http_wrapper -from apitools.base.py import util +from _gcloud_vendor.apitools.base.py import exceptions +from _gcloud_vendor.apitools.base.py import http_wrapper +from _gcloud_vendor.apitools.base.py import util __all__ = [ 'Download', diff --git a/_gcloud_vendor/apitools/base/py/util.py b/_gcloud_vendor/apitools/base/py/util.py index cd882a7e9342..4d64bedf538e 100644 --- a/_gcloud_vendor/apitools/base/py/util.py +++ b/_gcloud_vendor/apitools/base/py/util.py @@ -9,7 +9,7 @@ import urllib import urllib2 -from apitools.base.py import exceptions +from _gcloud_vendor.apitools.base.py import exceptions __all__ = [ 'DetectGae',