From 1be93d88b65c9196da2fa02432f29ba8aaa38d39 Mon Sep 17 00:00:00 2001 From: Jonas Kalderstam Date: Sat, 28 Sep 2013 12:15:47 +0200 Subject: [PATCH] Google App Engine server added Signed-off-by: Jonas Kalderstam --- app-engine-app/.gitignore | 2 + app-engine-app/Makefile | 13 + app-engine-app/README.md | 3 + app-engine-app/app.py | 228 ++ app-engine-app/app.yaml | 16 + app-engine-app/app_gcm.py | 78 + app-engine-app/appengine_config.py | 6 + app-engine-app/endpoints/LICENSE | 202 ++ app-engine-app/endpoints/__init__.py | 41 + app-engine-app/endpoints/api_backend.py | 102 + .../endpoints/api_backend_service.py | 188 ++ app-engine-app/endpoints/api_config.py | 2186 +++++++++++++++++ app-engine-app/endpoints/api_exceptions.py | 56 + app-engine-app/endpoints/apiserving.py | 508 ++++ app-engine-app/endpoints/message_parser.py | 227 ++ app-engine-app/endpoints/protojson.py | 90 + app-engine-app/endpoints/users_id_token.py | 641 +++++ app-engine-app/gcm/__init__.py | 4 + app-engine-app/gcm/gcm.py | 271 ++ app-engine-app/gcm/test.py | 222 ++ 20 files changed, 5084 insertions(+) create mode 100644 app-engine-app/.gitignore create mode 100644 app-engine-app/Makefile create mode 100644 app-engine-app/README.md create mode 100644 app-engine-app/app.py create mode 100644 app-engine-app/app.yaml create mode 100644 app-engine-app/app_gcm.py create mode 100644 app-engine-app/appengine_config.py create mode 100644 app-engine-app/endpoints/LICENSE create mode 100644 app-engine-app/endpoints/__init__.py create mode 100644 app-engine-app/endpoints/api_backend.py create mode 100644 app-engine-app/endpoints/api_backend_service.py create mode 100644 app-engine-app/endpoints/api_config.py create mode 100644 app-engine-app/endpoints/api_exceptions.py create mode 100644 app-engine-app/endpoints/apiserving.py create mode 100644 app-engine-app/endpoints/message_parser.py create mode 100644 app-engine-app/endpoints/protojson.py create mode 100644 app-engine-app/endpoints/users_id_token.py create mode 100644 app-engine-app/gcm/__init__.py create mode 100644 app-engine-app/gcm/gcm.py create mode 100644 app-engine-app/gcm/test.py diff --git a/app-engine-app/.gitignore b/app-engine-app/.gitignore new file mode 100644 index 0000000..4b6c254 --- /dev/null +++ b/app-engine-app/.gitignore @@ -0,0 +1,2 @@ +*.pyc +index.yaml diff --git a/app-engine-app/Makefile b/app-engine-app/Makefile new file mode 100644 index 0000000..f96e479 --- /dev/null +++ b/app-engine-app/Makefile @@ -0,0 +1,13 @@ +GAE=/home/jonas/Downloads/google_appengine +DEVSERVER=$(GAE)/dev_appserver.py +APPCFG=$(GAE)/appcfg.py + +# http://localhost:8080/_ah/api/explorer +local: + $(DEVSERVER) --host=0.0.0.0 ./ + +clear: + $(DEVSERVER) --clear_datastore=yes --host=0.0.0.0 ./ + +deploy: + $(APPCFG) update --oauth2 ./ diff --git a/app-engine-app/README.md b/app-engine-app/README.md new file mode 100644 index 0000000..130c27f --- /dev/null +++ b/app-engine-app/README.md @@ -0,0 +1,3 @@ +## Coming soon + +Meanwhile, test the apk diff --git a/app-engine-app/app.py b/app-engine-app/app.py new file mode 100644 index 0000000..2eb3195 --- /dev/null +++ b/app-engine-app/app.py @@ -0,0 +1,228 @@ +import os, binascii +from datetime import datetime + +import endpoints +#from google.appengine.ext import endpoints +from google.appengine.ext import ndb +from protorpc import messages +from protorpc import message_types +from protorpc import remote + +from app_gcm import send_link, GCMRegIdModel + + +def datetime_to_string(datetime_object): + '''Converts a datetime object to a + timestamp string in the format: + + 2013-09-23 23:23:12.123456''' + return datetime_object.isoformat(sep=' ') + +def parse_timestamp(timestamp): + '''Parses a timestamp string. + Supports two formats, examples: + + In second precision + >>> parse_timestamp("2013-09-29 13:21:42") + datetime object + + Or in fractional second precision (shown in microseconds) + >>> parse_timestamp("2013-09-29 13:21:42.123456") + datetime object + + Returns None on failure to parse + >>> parse_timestamp("2013-09-22") + None + ''' + result = None + try: + # Microseconds + result = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S.%f') + except ValueError: + pass + + try: + # Seconds + result = datetime.strptime(timestamp, '%Y-%m-%d %H:%M:%S') + except ValueError: + pass + + return result + +class Link(messages.Message): + url = messages.StringField(1, required=True) + sha = messages.StringField(2) + deleted = messages.BooleanField(3, default=False) + timestamp = messages.StringField(4) + +POST_REQUEST = endpoints.ResourceContainer( + Link, + regid=messages.StringField(2)) + + +class LinkModel(ndb.Model): + sha = ndb.StringProperty(required=True) + url = ndb.StringProperty(required=True) + deleted = ndb.BooleanProperty(required=True, default=False) + userid = ndb.UserProperty(required=True) + timestamp = ndb.DateTimeProperty(required=True, auto_now=True) + +# Used to request a link to be deleted. +# Has no body, only URL parameter +DELETE_REQUEST = endpoints.ResourceContainer( + message_types.VoidMessage, + sha=messages.StringField(2, required=True), + regid=messages.StringField(3)) + +class LinkList(messages.Message): + latestTimestamp = messages.StringField(2) + links = messages.MessageField(Link, 1, repeated=True) + +# Used to request the list with query parameters +LIST_REQUEST = endpoints.ResourceContainer( + message_types.VoidMessage, + showDeleted=messages.BooleanField(2, default=False), + timestampMin=messages.StringField(3)) + +# Add a device id to the user, database model in app_gcm.py +class GCMRegId(messages.Message): + regid = messages.StringField(1, required=True) + + +# Client id for webapps +CLIENT_ID = '86425096293.apps.googleusercontent.com' +# Client id for devices (android apps) +CLIENT_ID_ANDROID = '86425096293-v1er84h8bmp6c3pcsmdkgupr716u7jha.apps.googleusercontent.com' + +@endpoints.api(name='links', version='v1', + description='API for Link Management', + allowed_client_ids=[CLIENT_ID,CLIENT_ID_ANDROID, + endpoints.API_EXPLORER_CLIENT_ID] + ) +class LinkApi(remote.Service): + '''This is the REST API. Annotations + specify address, HTTP method and expected + messages.''' + + @endpoints.method(POST_REQUEST, Link, + name = 'link.insert', + path = 'links', + http_method = 'POST') + def add_link(self, request): + current_user = endpoints.get_current_user() + if current_user is None: + raise endpoints.UnauthorizedException('Invalid token.') + + # Generate an ID if one wasn't included + sha = request.sha + if sha is None: + sha = binascii.b2a_hex(os.urandom(15)) + # Construct object to save + link = LinkModel(key=ndb.Key(LinkModel, sha), + sha=sha, + url=request.url, + deleted=request.deleted, + userid=current_user) + # And save it + link.put() + + # Notify through GCM + send_link(link, request.regid) + + # Return a complete link + return Link(url = link.url, + sha = link.sha, + timestamp = datetime_to_string(link.timestamp)) + + @endpoints.method(DELETE_REQUEST, message_types.VoidMessage, + name = 'link.delete', + path = 'links/{sha}', + http_method = 'DELETE') + def delete_link(self, request): + current_user = endpoints.get_current_user() + if current_user is None: + raise endpoints.UnauthorizedException('Invalid token.') + + link_key = ndb.Key(LinkModel, request.sha) + link = link_key.get() + if link is not None: + link.deleted = True + link.put() + else: + raise endpoints.NotFoundException('No such item') + + # Notify through GCM + send_link(link, request.regid) + + return message_types.VoidMessage() + + @endpoints.method(LIST_REQUEST, LinkList, + name = 'link.list', + path = 'links', + http_method = 'GET') + def list_links(self, request): + current_user = endpoints.get_current_user() + if current_user is None: + raise endpoints.UnauthorizedException('Invalid token.') + + # Build the query + q = LinkModel.query(LinkModel.userid == current_user) + q = q.order(LinkModel.timestamp) + + # Filter on delete + if not request.showDeleted: + q = q.filter(LinkModel.deleted == False) + + # Filter on timestamp + if (request.timestampMin is not None and + parse_timestamp(request.timestampMin) is not None): + q = q.filter(LinkModel.timestamp >\ + parse_timestamp(request.timestampMin)) + + # Get the links + links = [] + latest_time = None + for link in q: + ts = link.timestamp + # Find the latest time + if latest_time is None: + latest_time = ts + else: + delta = ts - latest_time + if delta.total_seconds() > 0: + latest_time = ts + + # Append to results + links.append(Link(url=link.url, sha=link.sha, + deleted=link.deleted, + timestamp=datetime_to_string(ts))) + + if latest_time is None: + latest_time = datetime(1970, 1, 1, 0, 0) + + return LinkList(links=links, + latestTimestamp=datetime_to_string(latest_time)) + + @endpoints.method(GCMRegId, message_types.VoidMessage, + name = 'gcm.register', + path = 'registergcm', + http_method = 'POST') + def register_gcm(self, request): + current_user = endpoints.get_current_user() + if current_user is None: + raise endpoints.UnauthorizedException('Invalid token.') + + device = GCMRegIdModel(key=ndb.Key(GCMRegIdModel, request.regid), + regid=request.regid, + userid=current_user) + # And save it + device.put() + + # Return nothing + return message_types.VoidMessage() + + +if __name__ != "__main__": + # Set the application for GAE + application = endpoints.api_server([LinkApi], + restricted=False) diff --git a/app-engine-app/app.yaml b/app-engine-app/app.yaml new file mode 100644 index 0000000..77780c2 --- /dev/null +++ b/app-engine-app/app.yaml @@ -0,0 +1,16 @@ +application: esoteric-storm-343 +version: 1 +runtime: python27 +api_version: 1 +threadsafe: true + +handlers: +- url: /_ah/spi/.* + script: app.application + +libraries: +- name: endpoints + version: 1.0 +# Needed for endpoints/users_id_token.py. +- name: pycrypto + version: "2.6" \ No newline at end of file diff --git a/app-engine-app/app_gcm.py b/app-engine-app/app_gcm.py new file mode 100644 index 0000000..90227b9 --- /dev/null +++ b/app-engine-app/app_gcm.py @@ -0,0 +1,78 @@ +from __future__ import print_function, division +from threading import Thread +from functools import wraps +from gcm import GCM + +from google.appengine.ext import ndb + +gcm = GCM('Your API key here') + +class GCMRegIdModel(ndb.Model): + regid = ndb.StringProperty(required=True) + userid = ndb.UserProperty(required=True) + +def to_dict(link): + return dict(sha=link.sha, + url=link.url, + timestamp=link.timestamp.isoformat(sep=" "), + deleted=link.deleted) + + +def send_link(link, excludeid=None): + '''Transmits the link specified by the sha to the users devices. + + Does not run in a separate thread because App-Engine did not + seem to support that. + ''' + # Get devices + reg_ids = [] + query = GCMRegIdModel.query(GCMRegIdModel.userid == link.userid) + + for reg_model in query: + reg_ids.append(reg_model.regid) + + # Dont send to origin device, if specified + try: + reg_ids.remove(excludeid) + except ValueError: + pass # not in list, or None + + if len(reg_ids) < 1: + return + + _send(link.userid, reg_ids, to_dict(link)) + + +def _remove_regid(regid): + ndb.Key(GCMRegIdModel, regid).delete() + + +def _replace_regid(userid, oldid, newid): + _remove_regid(oldid) + device = GCMRegIdModel(key=ndb.Key(GCMRegIdModel, newid), + regid=newid, + userid=userid) + device.put() + + +def _send(userid, rids, data): + '''Send the data using GCM''' + response = gcm.json_request(registration_ids=rids, + data=data, + delay_while_idle=True) + + # A device has switched registration id + if 'canonical' in response: + for reg_id, canonical_id in response['canonical'].items(): + # Repace reg_id with canonical_id in your database + _replace_regid(userid, reg_id, canonical_id) + + # Handling errors + if 'errors' in response: + for error, reg_ids in response['errors'].items(): + # Check for errors and act accordingly + if (error == 'NotRegistered' or + error == 'InvalidRegistration'): + # Remove reg_ids from database + for regid in reg_ids: + _remove_regid(regid) diff --git a/app-engine-app/appengine_config.py b/app-engine-app/appengine_config.py new file mode 100644 index 0000000..5fb4e86 --- /dev/null +++ b/app-engine-app/appengine_config.py @@ -0,0 +1,6 @@ +import os +import sys + +ENDPOINTS_PROJECT_DIR = os.path.join(os.path.dirname(__file__), + 'endpoints-proto-datastore') +sys.path.append(ENDPOINTS_PROJECT_DIR) diff --git a/app-engine-app/endpoints/LICENSE b/app-engine-app/endpoints/LICENSE new file mode 100644 index 0000000..d645695 --- /dev/null +++ b/app-engine-app/endpoints/LICENSE @@ -0,0 +1,202 @@ + + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/app-engine-app/endpoints/__init__.py b/app-engine-app/endpoints/__init__.py new file mode 100644 index 0000000..8d38414 --- /dev/null +++ b/app-engine-app/endpoints/__init__.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + + + +"""Apiserving Module.""" + + + + + + +from api_config import api +from api_config import API_EXPLORER_CLIENT_ID +from api_config import AUTH_LEVEL +from api_config import EMAIL_SCOPE +from api_config import method +from api_config import ResourceContainer +from api_exceptions import * +from apiserving import * +import message_parser +from users_id_token import get_current_user +from users_id_token import InvalidGetUserCall +from users_id_token import SKIP_CLIENT_ID_CHECK + +__version__ = '1.0' diff --git a/app-engine-app/endpoints/api_backend.py b/app-engine-app/endpoints/api_backend.py new file mode 100644 index 0000000..26ba70b --- /dev/null +++ b/app-engine-app/endpoints/api_backend.py @@ -0,0 +1,102 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +"""Interface to the BackendService that serves API configurations.""" + + +import logging + +from protorpc import message_types +from protorpc import messages +from protorpc import remote + +package = 'google.appengine.endpoints' + + +__all__ = [ + 'GetApiConfigsRequest', + 'LogMessagesRequest', + 'ApiConfigList', + 'BackendService', + 'package', +] + + +class GetApiConfigsRequest(messages.Message): + """Request body for fetching API configs.""" + appRevision = messages.StringField(1) + + +class ApiConfigList(messages.Message): + """List of API configuration file contents.""" + items = messages.StringField(1, repeated=True) + + +class LogMessagesRequest(messages.Message): + """Request body for log messages sent by Swarm FE.""" + + class LogMessage(messages.Message): + """A single log message within a LogMessagesRequest.""" + + class Level(messages.Enum): + """Levels that can be specified for a log message.""" + debug = logging.DEBUG + info = logging.INFO + warning = logging.WARNING + error = logging.ERROR + critical = logging.CRITICAL + + level = messages.EnumField(Level, 1) + message = messages.StringField(2, required=True) + + messages = messages.MessageField(LogMessage, 1, repeated=True) + + +class BackendService(remote.Service): + """API config enumeration service used by Google API Server. + + This is a simple API providing a list of APIs served by this App Engine + instance. It is called by the Google API Server during app deployment + to get an updated interface for each of the supported APIs. + """ + + + + @remote.method(GetApiConfigsRequest, ApiConfigList) + def getApiConfigs(self, request): + """Return a list of active APIs and their configuration files. + + Args: + request: A request which may contain an app revision + + Returns: + List of ApiConfigMessages + """ + raise NotImplementedError() + + @remote.method(LogMessagesRequest, message_types.VoidMessage) + def logMessages(self, request): + """Write a log message from the Swarm FE to the log. + + Args: + request: A log message request. + + Returns: + Void message. + """ + raise NotImplementedError() diff --git a/app-engine-app/endpoints/api_backend_service.py b/app-engine-app/endpoints/api_backend_service.py new file mode 100644 index 0000000..9b46357 --- /dev/null +++ b/app-engine-app/endpoints/api_backend_service.py @@ -0,0 +1,188 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +"""Api serving config collection service implementation. + +Contains the implementation for BackendService as defined in api_backend.py. +""" + + + +try: + import json +except ImportError: + import simplejson as json +import logging + +from endpoints import api_backend +from endpoints import api_config +from endpoints import api_exceptions +from protorpc import message_types + + +__all__ = [ + 'ApiConfigRegistry', + 'BackendServiceImpl', +] + + +class ApiConfigRegistry(object): + """Registry of active APIs to be registered with Google API Server.""" + + def __init__(self): + + self.__registered_classes = set() + + self.__api_configs = set() + + self.__api_methods = {} + + + def register_spi(self, config_contents): + """Register a single SPI and its config contents. + + Args: + config_contents: String containing API configuration. + """ + if config_contents is None: + return + parsed_config = json.loads(config_contents) + self.__register_class(parsed_config) + self.__api_configs.add(config_contents) + self.__register_methods(parsed_config) + + def __register_class(self, parsed_config): + """Register the class implementing this config, so we only add it once. + + Args: + parsed_config: The JSON object with the API configuration being added. + + Raises: + ApiConfigurationError: If the class has already been registered. + """ + methods = parsed_config.get('methods') + if not methods: + return + + + service_classes = set() + for method in methods.itervalues(): + rosy_method = method.get('rosyMethod') + if rosy_method and '.' in rosy_method: + method_class = rosy_method.split('.', 1)[0] + service_classes.add(method_class) + + for service_class in service_classes: + if service_class in self.__registered_classes: + raise api_config.ApiConfigurationError( + 'SPI class %s has already been registered.' % service_class) + self.__registered_classes.add(service_class) + + def __register_methods(self, parsed_config): + """Register all methods from the given api config file. + + Methods are stored in a map from method_name to rosyMethod, + the name of the ProtoRPC method to be called on the backend. + If no rosyMethod was specified the value will be None. + + Args: + parsed_config: The JSON object with the API configuration being added. + """ + methods = parsed_config.get('methods') + if not methods: + return + + for method_name, method in methods.iteritems(): + self.__api_methods[method_name] = method.get('rosyMethod') + + def lookup_api_method(self, api_method_name): + """Looks an API method up by name to find the backend method to call. + + Args: + api_method_name: Name of the method in the API that was called. + + Returns: + Name of the ProtoRPC method called on the backend, or None if not found. + """ + return self.__api_methods.get(api_method_name) + + def all_api_configs(self): + """Return a list of all API configration specs as registered above.""" + return list(self.__api_configs) + + +class BackendServiceImpl(api_backend.BackendService): + """Implementation of BackendService.""" + + def __init__(self, api_config_registry, app_revision): + """Create a new BackendService implementation. + + Args: + api_config_registry: ApiConfigRegistry to register and look up configs. + app_revision: string containing the current app revision. + """ + self.__api_config_registry = api_config_registry + self.__app_revision = app_revision + + + + + @staticmethod + def definition_name(): + """Override definition_name so that it is not BackendServiceImpl.""" + return api_backend.BackendService.definition_name() + + def getApiConfigs(self, request): + """Return a list of active APIs and their configuration files. + + Args: + request: A request which may contain an app revision + + Returns: + ApiConfigList: A list of API config strings + """ + if request.appRevision and request.appRevision != self.__app_revision: + raise api_exceptions.BadRequestException( + message='API backend app revision %s not the same as expected %s' % ( + self.__app_revision, request.appRevision)) + + configs = self.__api_config_registry.all_api_configs() + return api_backend.ApiConfigList(items=configs) + + def logMessages(self, request): + """Write a log message from the Swarm FE to the log. + + Args: + request: A log message request. + + Returns: + Void message. + """ + Level = api_backend.LogMessagesRequest.LogMessage.Level + log = logging.getLogger(__name__) + for message in request.messages: + level = message.level if message.level is not None else Level.info + + + + record = logging.LogRecord(name=__name__, level=level.number, pathname='', + lineno='', msg=message.message, args=None, + exc_info=None) + log.handle(record) + + return message_types.VoidMessage() diff --git a/app-engine-app/endpoints/api_config.py b/app-engine-app/endpoints/api_config.py new file mode 100644 index 0000000..64b20c9 --- /dev/null +++ b/app-engine-app/endpoints/api_config.py @@ -0,0 +1,2186 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +"""Library for generating an API configuration document for a ProtoRPC backend. + +The protorpc.remote.Service is inspected and a JSON document describing +the API is returned. + + class MyResponse(messages.Message): + bool_value = messages.BooleanField(1) + int32_value = messages.IntegerField(2) + + class MyService(remote.Service): + + @remote.method(message_types.VoidMessage, MyResponse) + def entries_get(self, request): + pass + + api = ApiConfigGenerator().pretty_print_config_to_json(MyService) +""" + + + +try: + import json +except ImportError: + import simplejson as json +import logging +import re + +from endpoints import message_parser +from endpoints import users_id_token +from protorpc import message_types +from protorpc import messages +from protorpc import remote +from protorpc import util + +try: + + from google.appengine.api import app_identity +except ImportError: + + from google.appengine.api import app_identity + + +__all__ = [ + 'API_EXPLORER_CLIENT_ID', + 'ApiAuth', + 'ApiConfigGenerator', + 'ApiConfigurationError', + 'ApiFrontEndLimitRule', + 'ApiFrontEndLimits', + 'CacheControl', + 'ResourceContainer', + 'EMAIL_SCOPE', + 'api', + 'method', + 'AUTH_LEVEL' +] + + +API_EXPLORER_CLIENT_ID = '292824132082.apps.googleusercontent.com' +EMAIL_SCOPE = 'https://www.googleapis.com/auth/userinfo.email' +_PATH_VARIABLE_PATTERN = r'{([a-zA-Z_][a-zA-Z_.\d]*)}' + +_MULTICLASS_MISMATCH_ERROR_TEMPLATE = ( + 'Attempting to implement service %s, version %s, with multiple ' + 'classes that aren\'t compatible. See docstring for api() for ' + 'examples how to implement a multi-class API.') + + +def _Enum(docstring, *names): + """Utility to generate enum classes used by annotations. + + Args: + docstring: Docstring for the generated enum class. + *names: Enum names. + + Returns: + A class that contains enum names as attributes. + """ + enums = dict(zip(names, range(len(names)))) + reverse = dict((value, key) for key, value in enums.iteritems()) + enums['reverse_mapping'] = reverse + enums['__doc__'] = docstring + return type('Enum', (object,), enums) + +_AUTH_LEVEL_DOCSTRING = """ + Define the enums used by the auth_level annotation to specify frontend + authentication requirement. + + Frontend authentication is handled by a Google API server prior to the + request reaching backends. An early return before hitting the backend can + happen if the request does not fulfil the requirement specified by the + auth_level. + + Valid values of auth_level and their meanings are: + + AUTH_LEVEL.REQUIRED: Valid authentication credentials are required. Backend + will be called only if authentication credentials are present and valid. + + AUTH_LEVEL.OPTIONAL: Authentication is optional. If authentication credentials + are supplied they must be valid. Backend will be called if the request + contains valid authentication credentials or no authentication credentials. + + AUTH_LEVEL.OPTIONAL_CONTINUE: Authentication is optional and will be attempted + if authentication credentials are supplied. Invalid authentication + credentials will be removed but the request can always reach backend. + + AUTH_LEVEL.NONE: Frontend authentication will be skipped. If authentication is + desired, it will need to be performed by the backend. + """ + +AUTH_LEVEL = _Enum(_AUTH_LEVEL_DOCSTRING, 'REQUIRED', 'OPTIONAL', + 'OPTIONAL_CONTINUE', 'NONE') + + +class ApiConfigurationError(Exception): + """Exception thrown if there's an error in the configuration/annotations.""" + + +def _GetFieldAttributes(field): + """Decomposes field into the needed arguments to pass to the constructor. + + This can be used to create copies of the field or to compare if two fields + are "equal" (since __eq__ is not implemented on messages.Field). + + Args: + field: A ProtoRPC message field (potentially to be copied). + + Raises: + TypeError: If the field is not an instance of messages.Field. + + Returns: + A pair of relevant arguments to be passed to the constructor for the field + type. The first element is a list of positional arguments for the + constructor and the second is a dictionary of keyword arguments. + """ + if not isinstance(field, messages.Field): + raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,)) + + positional_args = [] + kwargs = { + 'required': field.required, + 'repeated': field.repeated, + 'variant': field.variant, + 'default': field._Field__default, + } + + if isinstance(field, messages.MessageField): + + kwargs.pop('default') + if not isinstance(field, message_types.DateTimeField): + positional_args.insert(0, field.message_type) + elif isinstance(field, messages.EnumField): + positional_args.insert(0, field.type) + + return positional_args, kwargs + + +def _CopyField(field, number=None): + """Copies a (potentially) owned ProtoRPC field instance into a new copy. + + Args: + field: A ProtoRPC message field to be copied. + number: An integer for the field to override the number of the field. + Defaults to None. + + Raises: + TypeError: If the field is not an instance of messages.Field. + + Returns: + A copy of the ProtoRPC message field. + """ + positional_args, kwargs = _GetFieldAttributes(field) + number = number or field.number + positional_args.append(number) + return field.__class__(*positional_args, **kwargs) + + +def _CompareFields(field, other_field): + """Checks if two ProtoRPC fields are "equal". + + Compares the arguments, rather than the id of the elements (which is + the default __eq__ behavior) as well as the class of the fields. + + Args: + field: A ProtoRPC message field to be compared. + other_field: A ProtoRPC message field to be compared. + + Returns: + Boolean indicating whether the fields are equal. + """ + field_attrs = _GetFieldAttributes(field) + other_field_attrs = _GetFieldAttributes(other_field) + if field_attrs != other_field_attrs: + return False + return field.__class__ == other_field.__class__ + + +class ResourceContainer(object): + """Container for a request body resource combined with parameters. + + Used for API methods which may also have path or query parameters in addition + to a request body. + + Attributes: + body_message_class: A message class to represent a request body. + parameters_message_class: A placeholder message class for request + parameters. + """ + + __remote_info_cache = {} + + __combined_message_class = None + + def __init__(self, _body_message_class=message_types.VoidMessage, **kwargs): + """Constructor for ResourceContainer. + + Stores a request body message class and attempts to create one from the + keyword arguments passed in. + + Args: + _body_message_class: A keyword argument to be treated like a positional + argument. This will not conflict with the potential names of fields + since they can't begin with underscore. We make this a keyword + argument since the default VoidMessage is a very common choice given + the prevalence of GET methods. + **kwargs: Keyword arguments specifying field names (the named arguments) + and instances of ProtoRPC fields as the values. + """ + self.body_message_class = _body_message_class + self.parameters_message_class = type('ParameterContainer', + (messages.Message,), kwargs) + + @property + def combined_message_class(self): + """A ProtoRPC message class with both request and parameters fields. + + Caches the result in a local private variable. Uses _CopyField to create + copies of the fields from the existing request and parameters classes since + those fields are "owned" by the message classes. + + Raises: + TypeError: If a field name is used in both the request message and the + parameters but the two fields do not represent the same type. + + Returns: + Value of combined message class for this property. + """ + if self.__combined_message_class is not None: + return self.__combined_message_class + + fields = {} + + + + + + + + field_number = 1 + for field in self.body_message_class.all_fields(): + fields[field.name] = _CopyField(field, number=field_number) + field_number += 1 + for field in self.parameters_message_class.all_fields(): + if field.name in fields: + if not _CompareFields(field, fields[field.name]): + raise TypeError('Field %r contained in both parameters and request ' + 'body, but the fields differ.' % (field.name,)) + else: + + continue + fields[field.name] = _CopyField(field, number=field_number) + field_number += 1 + + self.__combined_message_class = type('CombinedContainer', + (messages.Message,), fields) + return self.__combined_message_class + + @classmethod + def add_to_cache(cls, remote_info, container): + """Adds a ResourceContainer to a cache tying it to a protorpc method. + + Args: + remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding + to a method. + container: An instance of ResourceContainer. + + Raises: + TypeError: if the container is not an instance of cls. + KeyError: if the remote method has been reference by a container before. + This created remote method should never occur because a remote method + is created once. + """ + if not isinstance(container, cls): + raise TypeError('%r not an instance of %r, could not be added to cache.' % + (container, cls)) + if remote_info in cls.__remote_info_cache: + raise KeyError('Cache has collision but should not.') + cls.__remote_info_cache[remote_info] = container + + @classmethod + def get_request_message(cls, remote_info): + """Gets request message or container from remote info. + + Args: + remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding + to a method. + + Returns: + Either an instance of the request type from the remote or the + ResourceContainer that was cached with the remote method. + """ + if remote_info in cls.__remote_info_cache: + return cls.__remote_info_cache[remote_info] + else: + return remote_info.request_type() + + +def _CheckListType(settings, allowed_type, name, allow_none=True): + """Verify that settings in list are of the allowed type or raise TypeError. + + Args: + settings: The list of settings to check. + allowed_type: The allowed type of items in 'settings'. + name: Name of the setting, added to the exception. + allow_none: If set, None is also allowed. + + Raises: + TypeError: if setting is not of the allowed type. + + Returns: + The list of settings, for convenient use in assignment. + """ + if settings is None: + if not allow_none: + raise TypeError('%s is None, which is not allowed.' % name) + return settings + if not isinstance(settings, (tuple, list)): + raise TypeError('%s is not a list.' % name) + if not all(isinstance(i, allowed_type) for i in settings): + type_list = list(set(type(setting) for setting in settings)) + raise TypeError('%s contains types that don\'t match %s: %s' % + (name, allowed_type.__name__, type_list)) + return settings + + +def _CheckType(value, check_type, name, allow_none=True): + """Check that the type of an object is acceptable. + + Args: + value: The object whose type is to be checked. + check_type: The type that the object must be an instance of. + name: Name of the object, to be placed in any error messages. + allow_none: True if value can be None, false if not. + + Raises: + TypeError: If value is not an acceptable type. + """ + if value is None and allow_none: + return + if not isinstance(value, check_type): + raise TypeError('%s type doesn\'t match %s.' % (name, check_type)) + + +def _CheckEnum(value, check_type, name): + if value is None: + return + if value not in check_type.reverse_mapping: + raise TypeError('%s is not a valid value for %s' % (value, name)) + + + +class _ApiInfo(object): + """Configurable attributes of an API. + + A structured data object used to store API information associated with each + remote.Service-derived class that implements an API. This stores properties + that could be different for each class (such as the path or + collection/resource name), as well as properties common to all classes in + the API (such as API name and version). + """ + + @util.positional(2) + def __init__(self, common_info, resource_name=None, path=None, audiences=None, + scopes=None, allowed_client_ids=None, auth_level=None): + """Constructor for _ApiInfo. + + Args: + common_info: _ApiDecorator.__ApiCommonInfo, Information that's common for + all classes that implement an API. + resource_name: string, The collection that the annotated class will + implement in the API. (Default: None) + path: string, Base request path for all methods in this API. + (Default: None) + audiences: list of strings, Acceptable audiences for authentication. + (Default: None) + scopes: list of strings, Acceptable scopes for authentication. + (Default: None) + allowed_client_ids: list of strings, Acceptable client IDs for auth. + (Default: None) + auth_level: enum from AUTH_LEVEL, Frontend authentication level. + (Default: None) + """ + _CheckType(resource_name, basestring, 'resource_name') + _CheckType(path, basestring, 'path') + _CheckListType(audiences, basestring, 'audiences') + _CheckListType(scopes, basestring, 'scopes') + _CheckListType(allowed_client_ids, basestring, 'allowed_client_ids') + _CheckEnum(auth_level, AUTH_LEVEL, 'auth_level') + + self.__common_info = common_info + self.__resource_name = resource_name + self.__path = path + self.__audiences = audiences + self.__scopes = scopes + self.__allowed_client_ids = allowed_client_ids + self.__auth_level = auth_level + + def is_same_api(self, other): + """Check if this implements the same API as another _ApiInfo instance.""" + if not isinstance(other, _ApiInfo): + return False + + return self.__common_info is other.__common_info + + @property + def name(self): + """Name of the API.""" + return self.__common_info.name + + @property + def version(self): + """Version of the API.""" + return self.__common_info.version + + @property + def description(self): + """Description of the API.""" + return self.__common_info.description + + @property + def hostname(self): + """Hostname for the API.""" + return self.__common_info.hostname + + @property + def audiences(self): + """List of audiences accepted for the API, overriding the defaults.""" + if self.__audiences is not None: + return self.__audiences + return self.__common_info.audiences + + @property + def scopes(self): + """List of scopes accepted for the API, overriding the defaults.""" + if self.__scopes is not None: + return self.__scopes + return self.__common_info.scopes + + @property + def allowed_client_ids(self): + """List of client IDs accepted for the API, overriding the defaults.""" + if self.__allowed_client_ids is not None: + return self.__allowed_client_ids + return self.__common_info.allowed_client_ids + + @property + def auth_level(self): + """Enum from AUTH_LEVEL specifying the frontend authentication level.""" + if self.__auth_level is not None: + return self.__auth_level + return self.__common_info.auth_level + + @property + def canonical_name(self): + """Canonical name for the API.""" + return self.__common_info.canonical_name + + @property + def auth(self): + """Authentication configuration information for this API.""" + return self.__common_info.auth + + @property + def owner_domain(self): + """Domain of the owner of this API.""" + return self.__common_info.owner_domain + + @property + def owner_name(self): + """Name of the owner of this API.""" + return self.__common_info.owner_name + + @property + def package_path(self): + """Package this API belongs to, '/' delimited. Used by client libs.""" + return self.__common_info.package_path + + @property + def frontend_limits(self): + """Optional query limits for unregistered developers.""" + return self.__common_info.frontend_limits + + @property + def title(self): + """Human readable name of this API.""" + return self.__common_info.title + + @property + def documentation(self): + """Link to the documentation for this version of the API.""" + return self.__common_info.documentation + + @property + def resource_name(self): + """Resource name for the class this decorates.""" + return self.__resource_name + + @property + def path(self): + """Base path prepended to any method paths in the class this decorates.""" + return self.__path + + +class _ApiDecorator(object): + """Decorator for single- or multi-class APIs. + + An instance of this class can be used directly as a decorator for a + single-class API. Or call the api_class() method to decorate a multi-class + API. + """ + + @util.positional(3) + def __init__(self, name, version, description=None, hostname=None, + audiences=None, scopes=None, allowed_client_ids=None, + canonical_name=None, auth=None, owner_domain=None, + owner_name=None, package_path=None, frontend_limits=None, + title=None, documentation=None, auth_level=None): + """Constructor for _ApiDecorator. + + Args: + name: string, Name of the API. + version: string, Version of the API. + description: string, Short description of the API (Default: None) + hostname: string, Hostname of the API (Default: app engine default host) + audiences: list of strings, Acceptable audiences for authentication. + scopes: list of strings, Acceptable scopes for authentication. + allowed_client_ids: list of strings, Acceptable client IDs for auth. + canonical_name: string, the canonical name for the API, a more human + readable version of the name. + auth: ApiAuth instance, the authentication configuration information + for this API. + owner_domain: string, the domain of the person or company that owns + this API. Along with owner_name, this provides hints to properly + name client libraries for this API. + owner_name: string, the name of the owner of this API. Along with + owner_domain, this provides hints to properly name client libraries + for this API. + package_path: string, the "package" this API belongs to. This '/' + delimited value specifies logical groupings of APIs. This is used by + client libraries of this API. + frontend_limits: ApiFrontEndLimits, optional query limits for unregistered + developers. + title: string, the human readable title of your API. It is exposed in the + discovery service. + documentation: string, a URL where users can find documentation about this + version of the API. This will be surfaced in the API Explorer and GPE + plugin to allow users to learn about your service. + auth_level: enum from AUTH_LEVEL, Frontend authentication level. + """ + self.__common_info = self.__ApiCommonInfo( + name, version, description=description, hostname=hostname, + audiences=audiences, scopes=scopes, + allowed_client_ids=allowed_client_ids, + canonical_name=canonical_name, auth=auth, owner_domain=owner_domain, + owner_name=owner_name, package_path=package_path, + frontend_limits=frontend_limits, title=title, + documentation=documentation, auth_level=auth_level) + self.__classes = [] + + class __ApiCommonInfo(object): + """API information that's common among all classes that implement an API. + + When a remote.Service-derived class implements part of an API, there is + some common information that remains constant across all such classes + that implement the same API. This includes things like name, version, + hostname, and so on. __ApiComminInfo stores that common information, and + a single __ApiCommonInfo instance is shared among all classes that + implement the same API, guaranteeing that they share the same common + information. + + Some of these values can be overridden (such as audiences and scopes), + while some can't and remain the same for all classes that implement + the API (such as name and version). + """ + + @util.positional(3) + def __init__(self, name, version, description=None, hostname=None, + audiences=None, scopes=None, allowed_client_ids=None, + canonical_name=None, auth=None, owner_domain=None, + owner_name=None, package_path=None, frontend_limits=None, + title=None, documentation=None, auth_level=None): + """Constructor for _ApiCommonInfo. + + Args: + name: string, Name of the API. + version: string, Version of the API. + description: string, Short description of the API (Default: None) + hostname: string, Hostname of the API (Default: app engine default host) + audiences: list of strings, Acceptable audiences for authentication. + scopes: list of strings, Acceptable scopes for authentication. + allowed_client_ids: list of strings, Acceptable client IDs for auth. + canonical_name: string, the canonical name for the API, a more human + readable version of the name. + auth: ApiAuth instance, the authentication configuration information + for this API. + owner_domain: string, the domain of the person or company that owns + this API. Along with owner_name, this provides hints to properly + name client libraries for this API. + owner_name: string, the name of the owner of this API. Along with + owner_domain, this provides hints to properly name client libraries + for this API. + package_path: string, the "package" this API belongs to. This '/' + delimited value specifies logical groupings of APIs. This is used by + client libraries of this API. + frontend_limits: ApiFrontEndLimits, optional query limits for + unregistered developers. + title: string, the human readable title of your API. It is exposed in + the discovery service. + documentation: string, a URL where users can find documentation about + this version of the API. This will be surfaced in the API Explorer and + GPE plugin to allow users to learn about your service. + auth_level: enum from AUTH_LEVEL, Frontend authentication level. + """ + _CheckType(name, basestring, 'name', allow_none=False) + _CheckType(version, basestring, 'version', allow_none=False) + _CheckType(description, basestring, 'description') + _CheckType(hostname, basestring, 'hostname') + _CheckListType(audiences, basestring, 'audiences') + _CheckListType(scopes, basestring, 'scopes') + _CheckListType(allowed_client_ids, basestring, 'allowed_client_ids') + _CheckType(canonical_name, basestring, 'canonical_name') + _CheckType(auth, ApiAuth, 'auth') + _CheckType(owner_domain, basestring, 'owner_domain') + _CheckType(owner_name, basestring, 'owner_name') + _CheckType(package_path, basestring, 'package_path') + _CheckType(frontend_limits, ApiFrontEndLimits, 'frontend_limits') + _CheckType(title, basestring, 'title') + _CheckType(documentation, basestring, 'documentation') + _CheckEnum(auth_level, AUTH_LEVEL, 'auth_level') + + if hostname is None: + hostname = app_identity.get_default_version_hostname() + if audiences is None: + audiences = [] + if scopes is None: + scopes = [EMAIL_SCOPE] + if allowed_client_ids is None: + allowed_client_ids = [API_EXPLORER_CLIENT_ID] + if auth_level is None: + auth_level = AUTH_LEVEL.NONE + + self.__name = name + self.__version = version + self.__description = description + self.__hostname = hostname + self.__audiences = audiences + self.__scopes = scopes + self.__allowed_client_ids = allowed_client_ids + self.__canonical_name = canonical_name + self.__auth = auth + self.__owner_domain = owner_domain + self.__owner_name = owner_name + self.__package_path = package_path + self.__frontend_limits = frontend_limits + self.__title = title + self.__documentation = documentation + self.__auth_level = auth_level + + @property + def name(self): + """Name of the API.""" + return self.__name + + @property + def version(self): + """Version of the API.""" + return self.__version + + @property + def description(self): + """Description of the API.""" + return self.__description + + @property + def hostname(self): + """Hostname for the API.""" + return self.__hostname + + @property + def audiences(self): + """List of audiences accepted by default for the API.""" + return self.__audiences + + @property + def scopes(self): + """List of scopes accepted by default for the API.""" + return self.__scopes + + @property + def allowed_client_ids(self): + """List of client IDs accepted by default for the API.""" + return self.__allowed_client_ids + + @property + def auth_level(self): + """Enum from AUTH_LEVEL specifying default frontend auth level.""" + return self.__auth_level + + @property + def canonical_name(self): + """Canonical name for the API.""" + return self.__canonical_name + + @property + def auth(self): + """Authentication configuration for this API.""" + return self.__auth + + @property + def owner_domain(self): + """Domain of the owner of this API.""" + return self.__owner_domain + + @property + def owner_name(self): + """Name of the owner of this API.""" + return self.__owner_name + + @property + def package_path(self): + """Package this API belongs to, '/' delimited. Used by client libs.""" + return self.__package_path + + @property + def frontend_limits(self): + """Optional query limits for unregistered developers.""" + return self.__frontend_limits + + @property + def title(self): + """Human readable name of this API.""" + return self.__title + + @property + def documentation(self): + """Link to the documentation for this version of the API.""" + return self.__documentation + + def __call__(self, service_class): + """Decorator for ProtoRPC class that configures Google's API server. + + Args: + service_class: remote.Service class, ProtoRPC service class being wrapped. + + Returns: + Same class with API attributes assigned in api_info. + """ + return self.api_class()(service_class) + + def api_class(self, resource_name=None, path=None, audiences=None, + scopes=None, allowed_client_ids=None, auth_level=None): + """Get a decorator for a class that implements an API. + + This can be used for single-class or multi-class implementations. It's + used implicitly in simple single-class APIs that only use @api directly. + + Args: + resource_name: string, Resource name for the class this decorates. + (Default: None) + path: string, Base path prepended to any method paths in the class this + decorates. (Default: None) + audiences: list of strings, Acceptable audiences for authentication. + (Default: None) + scopes: list of strings, Acceptable scopes for authentication. + (Default: None) + allowed_client_ids: list of strings, Acceptable client IDs for auth. + (Default: None) + auth_level: enum from AUTH_LEVEL, Frontend authentication level. + (Default: None) + + Returns: + A decorator function to decorate a class that implements an API. + """ + + def apiserving_api_decorator(api_class): + """Decorator for ProtoRPC class that configures Google's API server. + + Args: + api_class: remote.Service class, ProtoRPC service class being wrapped. + + Returns: + Same class with API attributes assigned in api_info. + """ + self.__classes.append(api_class) + api_class.api_info = _ApiInfo( + self.__common_info, resource_name=resource_name, + path=path, audiences=audiences, scopes=scopes, + allowed_client_ids=allowed_client_ids, auth_level=auth_level) + return api_class + + return apiserving_api_decorator + + def get_api_classes(self): + """Get the list of remote.Service classes that implement this API.""" + return self.__classes + + +class ApiAuth(object): + """Optional authorization configuration information for an API.""" + + def __init__(self, allow_cookie_auth=None, blocked_regions=None): + """Constructor for ApiAuth, authentication information for an API. + + Args: + allow_cookie_auth: boolean, whether cooking auth is allowed. By + default, API methods do not allow cookie authentication, and + require the use of OAuth2 or ID tokens. Setting this field to + True will allow cookies to be used to access the API, with + potentially dangerous results. Please be very cautious in enabling + this setting, and make sure to require appropriate XSRF tokens to + protect your API. + blocked_regions: list of Strings, a list of 2-letter ISO region codes + to block. + """ + _CheckType(allow_cookie_auth, bool, 'allow_cookie_auth') + _CheckListType(blocked_regions, basestring, 'blocked_regions') + + self.__allow_cookie_auth = allow_cookie_auth + self.__blocked_regions = blocked_regions + + @property + def allow_cookie_auth(self): + """Whether cookie authentication is allowed for this API.""" + return self.__allow_cookie_auth + + @property + def blocked_regions(self): + """List of 2-letter ISO region codes to block.""" + return self.__blocked_regions + + +class ApiFrontEndLimitRule(object): + """Custom rule to limit unregistered traffic.""" + + def __init__(self, match=None, qps=None, user_qps=None, daily=None, + analytics_id=None): + """Constructor for ApiFrontEndLimitRule. + + Args: + match: string, the matching rule that defines this traffic segment. + qps: int, the aggregate QPS for this segment. + user_qps: int, the per-end-user QPS for this segment. + daily: int, the aggregate daily maximum for this segment. + analytics_id: string, the project ID under which traffic for this segment + will be logged. + """ + _CheckType(match, basestring, 'match') + _CheckType(qps, int, 'qps') + _CheckType(user_qps, int, 'user_qps') + _CheckType(daily, int, 'daily') + _CheckType(analytics_id, basestring, 'analytics_id') + + self.__match = match + self.__qps = qps + self.__user_qps = user_qps + self.__daily = daily + self.__analytics_id = analytics_id + + @property + def match(self): + """The matching rule that defines this traffic segment.""" + return self.__match + + @property + def qps(self): + """The aggregate QPS for this segment.""" + return self.__qps + + @property + def user_qps(self): + """The per-end-user QPS for this segment.""" + return self.__user_qps + + @property + def daily(self): + """The aggregate daily maximum for this segment.""" + return self.__daily + + @property + def analytics_id(self): + """Project ID under which traffic for this segment will be logged.""" + return self.__analytics_id + + +class ApiFrontEndLimits(object): + """Optional front end limit information for an API.""" + + def __init__(self, unregistered_user_qps=None, unregistered_qps=None, + unregistered_daily=None, rules=None): + """Constructor for ApiFrontEndLimits, front end limit info for an API. + + Args: + unregistered_user_qps: int, the per-end-user QPS. Users are identified + by their IP address. A value of 0 will block unregistered requests. + unregistered_qps: int, an aggregate QPS upper-bound for all unregistered + traffic. A value of 0 currently means unlimited, though it might change + in the future. To block unregistered requests, use unregistered_user_qps + or unregistered_daily instead. + unregistered_daily: int, an aggregate daily upper-bound for all + unregistered traffic. A value of 0 will block unregistered requests. + rules: A list or tuple of ApiFrontEndLimitRule instances: custom rules + used to apply limits to unregistered traffic. + """ + _CheckType(unregistered_user_qps, int, 'unregistered_user_qps') + _CheckType(unregistered_qps, int, 'unregistered_qps') + _CheckType(unregistered_daily, int, 'unregistered_daily') + _CheckListType(rules, ApiFrontEndLimitRule, 'rules') + + self.__unregistered_user_qps = unregistered_user_qps + self.__unregistered_qps = unregistered_qps + self.__unregistered_daily = unregistered_daily + self.__rules = rules + + @property + def unregistered_user_qps(self): + """Per-end-user QPS limit.""" + return self.__unregistered_user_qps + + @property + def unregistered_qps(self): + """Aggregate QPS upper-bound for all unregistered traffic.""" + return self.__unregistered_qps + + @property + def unregistered_daily(self): + """Aggregate daily upper-bound for all unregistered traffic.""" + return self.__unregistered_daily + + @property + def rules(self): + """Custom rules used to apply limits to unregistered traffic.""" + return self.__rules + + +@util.positional(2) +def api(name, version, description=None, hostname=None, audiences=None, + scopes=None, allowed_client_ids=None, canonical_name=None, + auth=None, owner_domain=None, owner_name=None, package_path=None, + frontend_limits=None, title=None, documentation=None, auth_level=None): + """Decorate a ProtoRPC Service class for use by the framework above. + + This decorator can be used to specify an API name, version, description, and + hostname for your API. + + Sample usage (python 2.7): + @endpoints.api(name='guestbook', version='v0.2', + description='Guestbook API') + class PostService(remote.Service): + ... + + Sample usage (python 2.5): + class PostService(remote.Service): + ... + endpoints.api(name='guestbook', version='v0.2', + description='Guestbook API')(PostService) + + Sample usage if multiple classes implement one API: + api_root = endpoints.api(name='library', version='v1.0') + + @api_root.api_class(resource_name='shelves') + class Shelves(remote.Service): + ... + + @api_root.api_class(resource_name='books', path='books') + class Books(remote.Service): + ... + + Args: + name: string, Name of the API. + version: string, Version of the API. + description: string, Short description of the API (Default: None) + hostname: string, Hostname of the API (Default: app engine default host) + audiences: list of strings, Acceptable audiences for authentication. + scopes: list of strings, Acceptable scopes for authentication. + allowed_client_ids: list of strings, Acceptable client IDs for auth. + canonical_name: string, the canonical name for the API, a more human + readable version of the name. + auth: ApiAuth instance, the authentication configuration information + for this API. + owner_domain: string, the domain of the person or company that owns + this API. Along with owner_name, this provides hints to properly + name client libraries for this API. + owner_name: string, the name of the owner of this API. Along with + owner_domain, this provides hints to properly name client libraries + for this API. + package_path: string, the "package" this API belongs to. This '/' + delimited value specifies logical groupings of APIs. This is used by + client libraries of this API. + frontend_limits: ApiFrontEndLimits, optional query limits for unregistered + developers. + title: string, the human readable title of your API. It is exposed in the + discovery service. + documentation: string, a URL where users can find documentation about this + version of the API. This will be surfaced in the API Explorer and GPE + plugin to allow users to learn about your service. + auth_level: enum from AUTH_LEVEL, frontend authentication level. + + Returns: + Class decorated with api_info attribute, an instance of ApiInfo. + """ + + return _ApiDecorator(name, version, description=description, + hostname=hostname, audiences=audiences, scopes=scopes, + allowed_client_ids=allowed_client_ids, + canonical_name=canonical_name, auth=auth, + owner_domain=owner_domain, owner_name=owner_name, + package_path=package_path, + frontend_limits=frontend_limits, title=title, + documentation=documentation, auth_level=auth_level) + + +class CacheControl(object): + """Cache control settings for an API method. + + Setting is composed of a directive and maximum cache age. + Available types: + PUBLIC - Allows clients and proxies to cache responses. + PRIVATE - Allows only clients to cache responses. + NO_CACHE - Allows none to cache responses. + """ + PUBLIC = 'public' + PRIVATE = 'private' + NO_CACHE = 'no-cache' + VALID_VALUES = (PUBLIC, PRIVATE, NO_CACHE) + + def __init__(self, directive=NO_CACHE, max_age_seconds=0): + """Constructor. + + Args: + directive: string, Cache control directive, as above. (Default: NO_CACHE) + max_age_seconds: int, Maximum age of cache responses. (Default: 0) + """ + if directive not in self.VALID_VALUES: + directive = self.NO_CACHE + self.__directive = directive + self.__max_age_seconds = max_age_seconds + + @property + def directive(self): + """The cache setting for this method, PUBLIC, PRIVATE, or NO_CACHE.""" + return self.__directive + + @property + def max_age_seconds(self): + """The maximum age of cache responses for this method, in seconds.""" + return self.__max_age_seconds + + +class _MethodInfo(object): + """Configurable attributes of an API method. + + Consolidates settings from @method decorator and/or any settings that were + calculating from the ProtoRPC method name, so they only need to be calculated + once. + """ + + @util.positional(1) + def __init__(self, name=None, path=None, http_method=None, + cache_control=None, scopes=None, audiences=None, + allowed_client_ids=None, auth_level=None): + """Constructor. + + Args: + name: string, Name of the method, prepended with . to make it + unique. + path: string, Path portion of the URL to the method, for RESTful methods. + http_method: string, HTTP method supported by the method. + cache_control: CacheControl, Cache settings for the API method. + scopes: list of string, OAuth2 token must contain one of these scopes. + audiences: list of string, IdToken must contain one of these audiences. + allowed_client_ids: list of string, Client IDs allowed to call the method. + auth_level: enum from AUTH_LEVEL, Frontend auth level for the method. + """ + self.__name = name + self.__path = path + self.__http_method = http_method + self.__cache_control = cache_control + self.__scopes = scopes + self.__audiences = audiences + self.__allowed_client_ids = allowed_client_ids + self.__auth_level = auth_level + + def __safe_name(self, method_name): + """Restrict method name to a-zA-Z0-9, first char lowercase.""" + + + safe_name = re.sub('[^\.a-zA-Z0-9]', '', method_name) + + return safe_name[0:1].lower() + safe_name[1:] + + @property + def name(self): + """Method name as specified in decorator or derived.""" + return self.__name + + def get_path(self, api_info): + """Get the path portion of the URL to the method (for RESTful methods). + + Request path can be specified in the method, and it could have a base + path prepended to it. + + Args: + api_info: API information for this API, possibly including a base path. + This is the api_info property on the class that's been annotated for + this API. + + Returns: + This method's request path (not including the http://.../_ah/api/ prefix). + + Raises: + ApiConfigurationError: If the path isn't properly formatted. + """ + path = self.__path or '' + if path and path[0] == '/': + + path = path[1:] + else: + + if api_info.path: + path = '%s%s%s' % (api_info.path, '/' if path else '', path) + + + for part in path.split('/'): + if part and '{' in part and '}' in part: + if re.match('^{[^{}]+}$', part) is None: + raise ApiConfigurationError('Invalid path segment: %s (part of %s)' % + (part, path)) + return path + + @property + def http_method(self): + """HTTP method supported by the method (e.g. GET, POST).""" + return self.__http_method + + @property + def cache_control(self): + """Cache control setting for the API method.""" + return self.__cache_control + + @property + def scopes(self): + """List of scopes for the API method.""" + return self.__scopes + + @property + def audiences(self): + """List of audiences for the API method.""" + return self.__audiences + + @property + def allowed_client_ids(self): + """List of allowed client IDs for the API method.""" + return self.__allowed_client_ids + + @property + def auth_level(self): + """Enum from AUTH_LEVEL specifying default frontend auth level.""" + return self.__auth_level + + def method_id(self, api_info): + """Computed method name.""" + + + + if api_info.resource_name: + resource_part = '.%s' % self.__safe_name(api_info.resource_name) + else: + resource_part = '' + return '%s%s.%s' % (self.__safe_name(api_info.name), resource_part, + self.__safe_name(self.name)) + + +@util.positional(2) +def method(request_message=message_types.VoidMessage, + response_message=message_types.VoidMessage, + name=None, + path=None, + http_method='POST', + cache_control=None, + scopes=None, + audiences=None, + allowed_client_ids=None, + auth_level=None): + """Decorate a ProtoRPC Method for use by the framework above. + + This decorator can be used to specify a method name, path, http method, + cache control, scopes, audiences, client ids and auth_level. + + Sample usage: + @api_config.method(RequestMessage, ResponseMessage, + name='insert', http_method='PUT') + def greeting_insert(request): + ... + return response + + Args: + request_message: Message type of expected request. + response_message: Message type of expected response. + name: string, Name of the method, prepended with . to make it + unique. (Default: python method name) + path: string, Path portion of the URL to the method, for RESTful methods. + http_method: string, HTTP method supported by the method. (Default: POST) + cache_control: CacheControl, Cache settings for the API method. + scopes: list of string, OAuth2 token must contain one of these scopes. + audiences: list of string, IdToken must contain one of these audiences. + allowed_client_ids: list of string, Client IDs allowed to call the method. + Currently limited to 5. If None, no calls will be allowed. + auth_level: enum from AUTH_LEVEL, Frontend auth level for the method. + + Returns: + 'apiserving_method_wrapper' function. + + Raises: + ValueError: if more than 5 allowed_client_ids are specified. + TypeError: if the request_type or response_type parameters are not + proper subclasses of messages.Message. + """ + + + DEFAULT_HTTP_METHOD = 'POST' + + def check_type(setting, allowed_type, name, allow_none=True): + """Verify that the setting is of the allowed type or raise TypeError. + + Args: + setting: The setting to check. + allowed_type: The allowed type. + name: Name of the setting, added to the exception. + allow_none: If set, None is also allowed. + + Raises: + TypeError: if setting is not of the allowed type. + + Returns: + The setting, for convenient use in assignment. + """ + if (setting is None and allow_none or + isinstance(setting, allowed_type)): + return setting + raise TypeError('%s is not of type %s' % (name, allowed_type.__name__)) + + def apiserving_method_decorator(api_method): + """Decorator for ProtoRPC method that configures Google's API server. + + Args: + api_method: Original method being wrapped. + + Returns: + Function responsible for actual invocation. + Assigns the following attributes to invocation function: + remote: Instance of RemoteInfo, contains remote method information. + remote.request_type: Expected request type for remote method. + remote.response_type: Response type returned from remote method. + method_info: Instance of _MethodInfo, api method configuration. + It is also assigned attributes corresponding to the aforementioned kwargs. + + Raises: + TypeError: if the request_type or response_type parameters are not + proper subclasses of messages.Message. + KeyError: if the request_message is a ResourceContainer and the newly + created remote method has been reference by the container before. This + should never occur because a remote method is created once. + """ + if isinstance(request_message, ResourceContainer): + remote_decorator = remote.method(request_message.combined_message_class, + response_message) + else: + remote_decorator = remote.method(request_message, response_message) + remote_method = remote_decorator(api_method) + + def invoke_remote(service_instance, request): + + + users_id_token._maybe_set_current_user_vars( + invoke_remote, api_info=getattr(service_instance, 'api_info', None), + request=request) + + return remote_method(service_instance, request) + + invoke_remote.remote = remote_method.remote + if isinstance(request_message, ResourceContainer): + ResourceContainer.add_to_cache(invoke_remote.remote, request_message) + + invoke_remote.method_info = _MethodInfo( + name=name or api_method.__name__, path=path or '', + http_method=http_method or DEFAULT_HTTP_METHOD, + cache_control=cache_control, scopes=scopes, audiences=audiences, + allowed_client_ids=allowed_client_ids, auth_level=auth_level) + invoke_remote.__name__ = invoke_remote.method_info.name + return invoke_remote + + check_type(cache_control, CacheControl, 'cache_control') + _CheckListType(scopes, basestring, 'scopes') + _CheckListType(audiences, basestring, 'audiences') + _CheckListType(allowed_client_ids, basestring, 'allowed_client_ids') + _CheckEnum(auth_level, AUTH_LEVEL, 'auth_level') + if allowed_client_ids is not None and len(allowed_client_ids) > 5: + raise ValueError('allowed_client_ids must have 5 or fewer entries.') + return apiserving_method_decorator + + +class ApiConfigGenerator(object): + """Generates an API configuration from a ProtoRPC service. + + Example: + + class HelloRequest(messages.Message): + my_name = messages.StringField(1, required=True) + + class HelloResponse(messages.Message): + hello = messages.StringField(1, required=True) + + class HelloService(remote.Service): + + @remote.method(HelloRequest, HelloResponse) + def hello(self, request): + return HelloResponse(hello='Hello there, %s!' % + request.my_name) + + api_config = ApiConfigGenerator().pretty_print_config_to_json(HelloService) + + The resulting api_config will be a JSON document describing the API + implemented by HelloService. + """ + + + + + __NO_BODY = 1 + __HAS_BODY = 2 + + def __init__(self): + self.__parser = message_parser.MessageTypeToJsonSchema() + + + self.__request_schema = {} + + + self.__response_schema = {} + + + self.__id_from_name = {} + + def __get_request_kind(self, method_info): + """Categorize the type of the request. + + Args: + method_info: _MethodInfo, method information. + + Returns: + The kind of request. + """ + if method_info.http_method in ('GET', 'DELETE'): + return self.__NO_BODY + else: + return self.__HAS_BODY + + def __field_to_subfields(self, field): + """Fully describes data represented by field, including the nested case. + + In the case that the field is not a message field, we have no fields nested + within a message definition, so we can simply return that field. However, in + the nested case, we can't simply describe the data with one field or even + with one chain of fields. + + For example, if we have a message field + + m_field = messages.MessageField(RefClass, 1) + + which references a class with two fields: + + class RefClass(messages.Message): + one = messages.StringField(1) + two = messages.IntegerField(2) + + then we would need to include both one and two to represent all the + data contained. + + Calling __field_to_subfields(m_field) would return: + [ + [, ], + [, ], + ] + + If the second field was instead a message field + + class RefClass(messages.Message): + one = messages.StringField(1) + two = messages.MessageField(OtherRefClass, 2) + + referencing another class with two fields + + class OtherRefClass(messages.Message): + three = messages.BooleanField(1) + four = messages.FloatField(2) + + then we would need to recurse one level deeper for two. + + With this change, calling __field_to_subfields(m_field) would return: + [ + [, ], + [, , ], + [, , ], + ] + + Args: + field: An instance of a subclass of messages.Field. + + Returns: + A list of lists, where each sublist is a list of fields. + """ + + if not isinstance(field, messages.MessageField): + return [[field]] + + result = [] + for subfield in sorted(field.message_type.all_fields(), + key=lambda f: f.number): + subfield_results = self.__field_to_subfields(subfield) + for subfields_list in subfield_results: + subfields_list.insert(0, field) + result.append(subfields_list) + return result + + + + + + def __field_to_parameter_type(self, field): + """Converts the field variant type into a string describing the parameter. + + Args: + field: An instance of a subclass of messages.Field. + + Returns: + A string corresponding to the variant enum of the field, with a few + exceptions. In the case of signed ints, the 's' is dropped; for the BOOL + variant, 'boolean' is used; and for the ENUM variant, 'string' is used. + + Raises: + TypeError: if the field variant is a message variant. + """ + + + + + + + variant = field.variant + if variant == messages.Variant.MESSAGE: + raise TypeError('A message variant can\'t be used in a parameter.') + + custom_variant_map = { + messages.Variant.SINT32: 'int32', + messages.Variant.SINT64: 'int64', + messages.Variant.BOOL: 'boolean', + messages.Variant.ENUM: 'string', + } + return custom_variant_map.get(variant) or variant.name.lower() + + def __get_path_parameters(self, path): + """Parses path paremeters from a URI path and organizes them by parameter. + + Some of the parameters may correspond to message fields, and so will be + represented as segments corresponding to each subfield; e.g. first.second if + the field "second" in the message field "first" is pulled from the path. + + The resulting dictionary uses the first segments as keys and each key has as + value the list of full parameter values with first segment equal to the key. + + If the match path parameter is null, that part of the path template is + ignored; this occurs if '{}' is used in a template. + + Args: + path: String; a URI path, potentially with some parameters. + + Returns: + A dictionary with strings as keys and list of strings as values. + """ + path_parameters_by_segment = {} + for format_var_name in re.findall(_PATH_VARIABLE_PATTERN, path): + first_segment = format_var_name.split('.', 1)[0] + matches = path_parameters_by_segment.setdefault(first_segment, []) + matches.append(format_var_name) + + return path_parameters_by_segment + + def __validate_simple_subfield(self, parameter, field, segment_list, + _segment_index=0): + """Verifies that a proposed subfield actually exists and is a simple field. + + Here, simple means it is not a MessageField (nested). + + Args: + parameter: String; the '.' delimited name of the current field being + considered. This is relative to some root. + field: An instance of a subclass of messages.Field. Corresponds to the + previous segment in the path (previous relative to _segment_index), + since this field should be a message field with the current segment + as a field in the message class. + segment_list: The full list of segments from the '.' delimited subfield + being validated. + _segment_index: Integer; used to hold the position of current segment so + that segment_list can be passed as a reference instead of having to + copy using segment_list[1:] at each step. + + Raises: + TypeError: If the final subfield (indicated by _segment_index relative + to the length of segment_list) is a MessageField. + TypeError: If at any stage the lookup at a segment fails, e.g if a.b + exists but a.b.c does not exist. This can happen either if a.b is not + a message field or if a.b.c is not a property on the message class from + a.b. + """ + if _segment_index >= len(segment_list): + + if isinstance(field, messages.MessageField): + field_class = field.__class__.__name__ + raise TypeError('Can\'t use messages in path. Subfield %r was ' + 'included but is a %s.' % (parameter, field_class)) + return + + segment = segment_list[_segment_index] + parameter += '.' + segment + try: + field = field.type.field_by_name(segment) + except (AttributeError, KeyError): + raise TypeError('Subfield %r from path does not exist.' % (parameter,)) + + self.__validate_simple_subfield(parameter, field, segment_list, + _segment_index=_segment_index + 1) + + def __validate_path_parameters(self, field, path_parameters): + """Verifies that all path parameters correspond to an existing subfield. + + Args: + field: An instance of a subclass of messages.Field. Should be the root + level property name in each path parameter in path_parameters. For + example, if the field is called 'foo', then each path parameter should + begin with 'foo.'. + path_parameters: A list of Strings representing URI parameter variables. + + Raises: + TypeError: If one of the path parameters does not start with field.name. + """ + for param in path_parameters: + segment_list = param.split('.') + if segment_list[0] != field.name: + raise TypeError('Subfield %r can\'t come from field %r.' + % (param, field.name)) + self.__validate_simple_subfield(field.name, field, segment_list[1:]) + + def __parameter_default(self, final_subfield): + """Returns default value of final subfield if it has one. + + If this subfield comes from a field list returned from __field_to_subfields, + none of the fields in the subfield list can have a default except the final + one since they all must be message fields. + + Args: + final_subfield: A simple field from the end of a subfield list. + + Returns: + The default value of the subfield, if any exists, with the exception of an + enum field, which will have its value cast to a string. + """ + if final_subfield.default: + if isinstance(final_subfield, messages.EnumField): + return final_subfield.default.name + else: + return final_subfield.default + + def __parameter_enum(self, final_subfield): + """Returns enum descriptor of final subfield if it is an enum. + + An enum descriptor is a dictionary with keys as the names from the enum and + each value is a dictionary with a single key "backendValue" and value equal + to the same enum name used to stored it in the descriptor. + + The key "description" can also be used next to "backendValue", but protorpc + Enum classes have no way of supporting a description for each value. + + Args: + final_subfield: A simple field from the end of a subfield list. + + Returns: + The enum descriptor for the field, if it's an enum descriptor, else + returns None. + """ + if isinstance(final_subfield, messages.EnumField): + enum_descriptor = {} + for enum_value in final_subfield.type.to_dict().keys(): + enum_descriptor[enum_value] = {'backendValue': enum_value} + return enum_descriptor + + def __parameter_descriptor(self, subfield_list): + """Creates descriptor for a parameter using the subfields that define it. + + Each parameter is defined by a list of fields, with all but the last being + a message field and the final being a simple (non-message) field. + + Many of the fields in the descriptor are determined solely by the simple + field at the end, though some (such as repeated and required) take the whole + chain of fields into consideration. + + Args: + subfield_list: List of fields describing the parameter. + + Returns: + Dictionary containing a descriptor for the parameter described by the list + of fields. + """ + descriptor = {} + final_subfield = subfield_list[-1] + + + if all(subfield.required for subfield in subfield_list): + descriptor['required'] = True + + + descriptor['type'] = self.__field_to_parameter_type(final_subfield) + + + default = self.__parameter_default(final_subfield) + if default is not None: + descriptor['default'] = default + + + if any(subfield.repeated for subfield in subfield_list): + descriptor['repeated'] = True + + + enum_descriptor = self.__parameter_enum(final_subfield) + if enum_descriptor is not None: + descriptor['enum'] = enum_descriptor + + return descriptor + + def __add_parameters_from_field(self, field, path_parameters, + params, param_order): + """Adds all parameters in a field to a method parameters descriptor. + + Simple fields will only have one parameter, but a message field 'x' that + corresponds to a message class with fields 'y' and 'z' will result in + parameters 'x.y' and 'x.z', for example. The mapping from field to + parameters is mostly handled by __field_to_subfields. + + Args: + field: Field from which parameters will be added to the method descriptor. + path_parameters: A list of parameters matched from a path for this field. + For example for the hypothetical 'x' from above if the path was + '/a/{x.z}/b/{other}' then this list would contain only the element + 'x.z' since 'other' does not match to this field. + params: Dictionary with parameter names as keys and parameter descriptors + as values. This will be updated for each parameter in the field. + param_order: List of required parameter names to give them an order in the + descriptor. All required parameters in the field will be added to this + list. + """ + for subfield_list in self.__field_to_subfields(field): + descriptor = self.__parameter_descriptor(subfield_list) + + qualified_name = '.'.join(subfield.name for subfield in subfield_list) + in_path = qualified_name in path_parameters + if descriptor.get('required', in_path): + descriptor['required'] = True + param_order.append(qualified_name) + + params[qualified_name] = descriptor + + def __params_descriptor_without_container(self, message_type, + request_kind, path): + """Describe parameters of a method which does not use a ResourceContainer. + + Makes sure that the path parameters are included in the message definition + and adds any required fields and URL query parameters. + + This method is to preserve backwards compatibility and will be removed in + a future release. + + Args: + message_type: messages.Message class, Message with parameters to describe. + request_kind: The type of request being made. + path: string, HTTP path to method. + + Returns: + A tuple (dict, list of string): Descriptor of the parameters, Order of the + parameters. + """ + params = {} + param_order = [] + + path_parameter_dict = self.__get_path_parameters(path) + for field in sorted(message_type.all_fields(), key=lambda f: f.number): + matched_path_parameters = path_parameter_dict.get(field.name, []) + self.__validate_path_parameters(field, matched_path_parameters) + if matched_path_parameters or request_kind == self.__NO_BODY: + self.__add_parameters_from_field(field, matched_path_parameters, + params, param_order) + + return params, param_order + + + + + def __params_descriptor(self, message_type, request_kind, path): + """Describe the parameters of a method. + + If the message_type is not a ResourceContainer, will fall back to + __params_descriptor_without_container (which will eventually be deprecated). + + If the message type is a ResourceContainer, then all path/query parameters + will come from the ResourceContainer. This method will also make sure all + path parameters are covered by the message fields. + + Args: + message_type: messages.Message or ResourceContainer class, Message with + parameters to describe. + request_kind: The type of request being made. + path: string, HTTP path to method. + + Returns: + A tuple (dict, list of string): Descriptor of the parameters, Order of the + parameters. + """ + path_parameter_dict = self.__get_path_parameters(path) + + if not isinstance(message_type, ResourceContainer): + if path_parameter_dict: + logging.warning('Method specifies path parameters but you are not ' + 'using a ResourceContainer. This will fail in future ' + 'releases; please switch to using ResourceContainer as ' + 'soon as possible.') + return self.__params_descriptor_without_container( + message_type, request_kind, path) + + + message_type = message_type.parameters_message_class() + + params = {} + param_order = [] + + + for field_name, matched_path_parameters in path_parameter_dict.iteritems(): + field = message_type.field_by_name(field_name) + self.__validate_path_parameters(field, matched_path_parameters) + + + for field in sorted(message_type.all_fields(), key=lambda f: f.number): + matched_path_parameters = path_parameter_dict.get(field.name, []) + self.__add_parameters_from_field(field, matched_path_parameters, + params, param_order) + + return params, param_order + + def __request_message_descriptor(self, request_kind, message_type, method_id, + path): + """Describes the parameters and body of the request. + + Args: + request_kind: The type of request being made. + message_type: messages.Message or ResourceContainer class. The message to + describe. + method_id: string, Unique method identifier (e.g. 'myapi.items.method') + path: string, HTTP path to method. + + Returns: + Dictionary describing the request. + + Raises: + ValueError: if the method path and request required fields do not match + """ + descriptor = {} + + params, param_order = self.__params_descriptor(message_type, + request_kind, path) + + if isinstance(message_type, ResourceContainer): + message_type = message_type.body_message_class() + + if (request_kind == self.__NO_BODY or + message_type == message_types.VoidMessage()): + descriptor['body'] = 'empty' + else: + descriptor['body'] = 'autoTemplate(backendRequest)' + descriptor['bodyName'] = 'resource' + self.__request_schema[method_id] = self.__parser.add_message( + message_type.__class__) + + if params: + descriptor['parameters'] = params + + if param_order: + descriptor['parameterOrder'] = param_order + + return descriptor + + def __response_message_descriptor(self, message_type, method_id, + cache_control): + """Describes the response. + + Args: + message_type: messages.Message class, The message to describe. + method_id: string, Unique method identifier (e.g. 'myapi.items.method') + cache_control: CacheControl, Cache settings for the API method. + + Returns: + Dictionary describing the response. + """ + descriptor = {} + + self.__parser.add_message(message_type.__class__) + if message_type == message_types.VoidMessage(): + descriptor['body'] = 'empty' + else: + descriptor['body'] = 'autoTemplate(backendResponse)' + descriptor['bodyName'] = 'resource' + self.__response_schema[method_id] = self.__parser.ref_for_message_type( + message_type.__class__) + + if cache_control is not None: + descriptor['cacheControl'] = { + 'type': cache_control.directive, + 'maxAge': cache_control.max_age_seconds, + } + + return descriptor + + def __method_descriptor(self, service, service_name, method_info, + protorpc_method_name, protorpc_method_info): + """Describes a method. + + Args: + service: endpoints.Service, Implementation of the API as a service. + service_name: string, Name of the service. + method_info: _MethodInfo, Configuration for the method. + protorpc_method_name: string, Name of the method as given in the + ProtoRPC implementation. + protorpc_method_info: protorpc.remote._RemoteMethodInfo, ProtoRPC + description of the method. + + Returns: + Dictionary describing the method. + """ + descriptor = {} + + request_message_type = ResourceContainer.get_request_message( + protorpc_method_info.remote) + request_kind = self.__get_request_kind(method_info) + remote_method = protorpc_method_info.remote + + descriptor['path'] = method_info.get_path(service.api_info) + descriptor['httpMethod'] = method_info.http_method + descriptor['rosyMethod'] = '%s.%s' % (service_name, protorpc_method_name) + descriptor['request'] = self.__request_message_descriptor( + request_kind, request_message_type, + method_info.method_id(service.api_info), + descriptor['path']) + descriptor['response'] = self.__response_message_descriptor( + remote_method.response_type(), method_info.method_id(service.api_info), + method_info.cache_control) + + + + + scopes = (method_info.scopes + if method_info.scopes is not None + else service.api_info.scopes) + if scopes: + descriptor['scopes'] = scopes + audiences = (method_info.audiences + if method_info.audiences is not None + else service.api_info.audiences) + if audiences: + descriptor['audiences'] = audiences + allowed_client_ids = (method_info.allowed_client_ids + if method_info.allowed_client_ids is not None + else service.api_info.allowed_client_ids) + if allowed_client_ids: + descriptor['clientIds'] = allowed_client_ids + + if remote_method.method.__doc__: + descriptor['description'] = remote_method.method.__doc__ + + auth_level = (method_info.auth_level + if method_info.auth_level is not None + else service.api_info.auth_level) + if auth_level: + descriptor['authLevel'] = AUTH_LEVEL.reverse_mapping[auth_level] + + return descriptor + + def __schema_descriptor(self, services): + """Descriptor for the all the JSON Schema used. + + Args: + services: List of protorpc.remote.Service instances implementing an + api/version. + + Returns: + Dictionary containing all the JSON Schema used in the service. + """ + methods_desc = {} + + for service in services: + protorpc_methods = service.all_remote_methods() + for protorpc_method_name in protorpc_methods.iterkeys(): + method_id = self.__id_from_name[protorpc_method_name] + + request_response = {} + + request_schema_id = self.__request_schema.get(method_id) + if request_schema_id: + request_response['request'] = { + '$ref': request_schema_id + } + + response_schema_id = self.__response_schema.get(method_id) + if response_schema_id: + request_response['response'] = { + '$ref': response_schema_id + } + + rosy_method = '%s.%s' % (service.__name__, protorpc_method_name) + methods_desc[rosy_method] = request_response + + descriptor = { + 'methods': methods_desc, + 'schemas': self.__parser.schemas(), + } + + return descriptor + + def __get_merged_api_info(self, services): + """Builds a description of an API. + + Args: + services: List of protorpc.remote.Service instances implementing an + api/version. + + Returns: + The _ApiInfo object to use for the API that the given services implement. + + Raises: + ApiConfigurationError: If there's something wrong with the API + configuration, such as a multiclass API decorated with different API + descriptors (see the docstring for api()). + """ + merged_api_info = services[0].api_info + + + + for service in services[1:]: + if not merged_api_info.is_same_api(service.api_info): + raise ApiConfigurationError(_MULTICLASS_MISMATCH_ERROR_TEMPLATE % ( + service.api_info.name, service.api_info.version)) + + return merged_api_info + + def __auth_descriptor(self, api_info): + if api_info.auth is None: + return None + + auth_descriptor = {} + if api_info.auth.allow_cookie_auth is not None: + auth_descriptor['allowCookieAuth'] = api_info.auth.allow_cookie_auth + if api_info.auth.blocked_regions: + auth_descriptor['blockedRegions'] = api_info.auth.blocked_regions + + return auth_descriptor + + def __frontend_limit_descriptor(self, api_info): + if api_info.frontend_limits is None: + return None + + descriptor = {} + for propname, descname in (('unregistered_user_qps', 'unregisteredUserQps'), + ('unregistered_qps', 'unregisteredQps'), + ('unregistered_daily', 'unregisteredDaily')): + if getattr(api_info.frontend_limits, propname) is not None: + descriptor[descname] = getattr(api_info.frontend_limits, propname) + + rules = self.__frontend_limit_rules_descriptor(api_info) + if rules: + descriptor['rules'] = rules + + return descriptor + + def __frontend_limit_rules_descriptor(self, api_info): + if not api_info.frontend_limits.rules: + return None + + rules = [] + for rule in api_info.frontend_limits.rules: + descriptor = {} + for propname, descname in (('match', 'match'), + ('qps', 'qps'), + ('user_qps', 'userQps'), + ('daily', 'daily'), + ('analytics_id', 'analyticsId')): + if getattr(rule, propname) is not None: + descriptor[descname] = getattr(rule, propname) + if descriptor: + rules.append(descriptor) + + return rules + + def __api_descriptor(self, services, hostname=None): + """Builds a description of an API. + + Args: + services: List of protorpc.remote.Service instances implementing an + api/version. + hostname: string, Hostname of the API, to override the value set on the + current service. Defaults to None. + + Returns: + A dictionary that can be deserialized into JSON and stored as an API + description document. + + Raises: + ApiConfigurationError: If there's something wrong with the API + configuration, such as a multiclass API decorated with different API + descriptors (see the docstring for api()), or a repeated method + signature. + """ + merged_api_info = self.__get_merged_api_info(services) + descriptor = self.get_descriptor_defaults(merged_api_info, + hostname=hostname) + description = merged_api_info.description + if not description and len(services) == 1: + description = services[0].__doc__ + if description: + descriptor['description'] = description + + auth_descriptor = self.__auth_descriptor(merged_api_info) + if auth_descriptor: + descriptor['auth'] = auth_descriptor + + frontend_limit_descriptor = self.__frontend_limit_descriptor( + merged_api_info) + if frontend_limit_descriptor: + descriptor['frontendLimits'] = frontend_limit_descriptor + + method_map = {} + method_collision_tracker = {} + rest_collision_tracker = {} + + for service in services: + remote_methods = service.all_remote_methods() + for protorpc_meth_name, protorpc_meth_info in remote_methods.iteritems(): + method_info = getattr(protorpc_meth_info, 'method_info', None) + + if method_info is None: + continue + method_id = method_info.method_id(service.api_info) + self.__id_from_name[protorpc_meth_name] = method_id + method_map[method_id] = self.__method_descriptor( + service, service.__name__, method_info, + protorpc_meth_name, protorpc_meth_info) + + + if method_id in method_collision_tracker: + raise ApiConfigurationError( + 'Method %s used multiple times, in classes %s and %s' % + (method_id, method_collision_tracker[method_id], + service.__name__)) + else: + method_collision_tracker[method_id] = service.__name__ + + + rest_identifier = (method_info.http_method, + method_info.get_path(service.api_info)) + if rest_identifier in rest_collision_tracker: + raise ApiConfigurationError( + '%s path "%s" used multiple times, in classes %s and %s' % + (method_info.http_method, method_info.get_path(service.api_info), + rest_collision_tracker[rest_identifier], + service.__name__)) + else: + rest_collision_tracker[rest_identifier] = service.__name__ + + if method_map: + descriptor['methods'] = method_map + descriptor['descriptor'] = self.__schema_descriptor(services) + + return descriptor + + def get_descriptor_defaults(self, api_info, hostname=None): + """Gets a default configuration for a service. + + Args: + api_info: _ApiInfo object for this service. + hostname: string, Hostname of the API, to override the value set on the + current service. Defaults to None. + + Returns: + A dictionary with the default configuration. + """ + hostname = hostname or api_info.hostname + defaults = { + 'extends': 'thirdParty.api', + 'root': 'https://%s/_ah/api' % hostname, + 'name': api_info.name, + 'version': api_info.version, + 'defaultVersion': True, + 'abstract': False, + 'adapter': { + 'bns': 'https://%s/_ah/spi' % hostname, + 'type': 'lily', + 'deadline': 10.0 + } + } + if api_info.canonical_name: + defaults['canonicalName'] = api_info.canonical_name + if api_info.owner_domain: + defaults['ownerDomain'] = api_info.owner_domain + if api_info.owner_name: + defaults['ownerName'] = api_info.owner_name + if api_info.package_path: + defaults['packagePath'] = api_info.package_path + if api_info.title: + defaults['title'] = api_info.title + if api_info.documentation: + defaults['documentation'] = api_info.documentation + return defaults + + def pretty_print_config_to_json(self, services, hostname=None): + """Description of a protorpc.remote.Service in API format. + + Args: + services: Either a single protorpc.remote.Service or a list of them + that implements an api/version. + hostname: string, Hostname of the API, to override the value set on the + current service. Defaults to None. + + Returns: + string, The API descriptor document as JSON. + """ + if not isinstance(services, (tuple, list)): + services = [services] + + + + _CheckListType(services, remote._ServiceClass, 'services', allow_none=False) + + descriptor = self.__api_descriptor(services, hostname=hostname) + return json.dumps(descriptor, sort_keys=True, indent=2) diff --git a/app-engine-app/endpoints/api_exceptions.py b/app-engine-app/endpoints/api_exceptions.py new file mode 100644 index 0000000..283b579 --- /dev/null +++ b/app-engine-app/endpoints/api_exceptions.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# +"""A library containing exception types used by Endpoints ProtoRPC services.""" + + + +import httplib + +from protorpc import remote + + +class ServiceException(remote.ApplicationError): + """Base class for request/service exceptions in Endpoints.""" + + def __init__(self, message=None): + super(ServiceException, self).__init__(message, + httplib.responses[self.http_status]) + + +class BadRequestException(ServiceException): + """Bad request exception that is mapped to a 400 response.""" + http_status = httplib.BAD_REQUEST + + +class ForbiddenException(ServiceException): + """Forbidden exception that is mapped to a 403 response.""" + http_status = httplib.FORBIDDEN + + +class InternalServerErrorException(ServiceException): + """Internal server exception that is mapped to a 500 response.""" + http_status = httplib.INTERNAL_SERVER_ERROR + + +class NotFoundException(ServiceException): + """Not found exception that is mapped to a 404 response.""" + http_status = httplib.NOT_FOUND + + +class UnauthorizedException(ServiceException): + """Unauthorized exception that is mapped to a 401 response.""" + http_status = httplib.UNAUTHORIZED diff --git a/app-engine-app/endpoints/apiserving.py b/app-engine-app/endpoints/apiserving.py new file mode 100644 index 0000000..a12648a --- /dev/null +++ b/app-engine-app/endpoints/apiserving.py @@ -0,0 +1,508 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +"""A library supporting use of the Google API Server. + +This library helps you configure a set of ProtoRPC services to act as +Endpoints backends. In addition to translating ProtoRPC to Endpoints +compatible errors, it exposes a helper service that describes your services. + + Usage: + 1) Create an endpoints.api_server instead of a webapp.WSGIApplication. + 2) Annotate your ProtoRPC Service class with @endpoints.api to give your + API a name, version, and short description + 3) To return an error from Google API Server raise an endpoints.*Exception + The ServiceException classes specify the http status code returned. + + For example: + raise endpoints.UnauthorizedException("Please log in as an admin user") + + + Sample usage: + - - - - app.yaml - - - - + + handlers: + # Path to your API backend. + - url: /_ah/spi/.* + # For the legacy python runtime this would be "script: services.py" + script: services.app + + - - - - services.py - - - - + + import endpoints + import postservice + + app = endpoints.api_server([postservice.PostService], debug=True) + + - - - - postservice.py - - - - + + @endpoints.api(name='guestbook', version='v0.2', description='Guestbook API') + class PostService(remote.Service): + ... + @endpoints.method(GetNotesRequest, Notes, name='notes.list', path='notes', + http_method='GET') + def list(self, request): + raise endpoints.UnauthorizedException("Please log in as an admin user") +""" + + +import cgi +import cStringIO +import httplib +import os + +from endpoints import api_backend_service +from endpoints import api_config +from endpoints import api_exceptions +from endpoints import protojson +from protorpc import messages +from protorpc import remote +from protorpc.wsgi import service as wsgi_service + +package = 'google.appengine.endpoints' + + +__all__ = [ + 'api_server', + 'EndpointsErrorMessage', + 'package', +] + + +_ERROR_NAME_MAP = dict((httplib.responses[c.http_status], c) for c in [ + api_exceptions.BadRequestException, + api_exceptions.ForbiddenException, + api_exceptions.InternalServerErrorException, + api_exceptions.NotFoundException, + api_exceptions.UnauthorizedException, + ]) + +_ALL_JSON_CONTENT_TYPES = frozenset( + [protojson.EndpointsProtoJson.CONTENT_TYPE] + + protojson.EndpointsProtoJson.ALTERNATIVE_CONTENT_TYPES) + + + + + +class EndpointsErrorMessage(messages.Message): + """Message for returning error back to Google Endpoints frontend. + + Fields: + state: State of RPC, should be 'APPLICATION_ERROR'. + error_message: Error message associated with status. + """ + + class State(messages.Enum): + """Enumeration of possible RPC states. + + Values: + OK: Completed successfully. + RUNNING: Still running, not complete. + REQUEST_ERROR: Request was malformed or incomplete. + SERVER_ERROR: Server experienced an unexpected error. + NETWORK_ERROR: An error occured on the network. + APPLICATION_ERROR: The application is indicating an error. + When in this state, RPC should also set application_error. + """ + OK = 0 + RUNNING = 1 + + REQUEST_ERROR = 2 + SERVER_ERROR = 3 + NETWORK_ERROR = 4 + APPLICATION_ERROR = 5 + METHOD_NOT_FOUND_ERROR = 6 + + state = messages.EnumField(State, 1, required=True) + error_message = messages.StringField(2) + + + +def _get_app_revision(environ=None): + """Gets the app revision (minor app version) of the current app. + + Args: + environ: A dictionary with a key CURRENT_VERSION_ID that maps to a version + string of the format .. + + Returns: + The app revision (minor version) of the current app, or None if one couldn't + be found. + """ + if environ is None: + environ = os.environ + if 'CURRENT_VERSION_ID' in environ: + return environ['CURRENT_VERSION_ID'].split('.')[1] + + +class _ApiServer(object): + """ProtoRPC wrapper, registers APIs and formats errors for Google API Server. + + - - - - ProtoRPC error format - - - - + HTTP/1.0 400 Please log in as an admin user. + content-type: application/json + + { + "state": "APPLICATION_ERROR", + "error_message": "Please log in as an admin user", + "error_name": "unauthorized", + } + + - - - - Reformatted error format - - - - + HTTP/1.0 401 UNAUTHORIZED + content-type: application/json + + { + "state": "APPLICATION_ERROR", + "error_message": "Please log in as an admin user" + } + """ + + + __SPI_PREFIX = '/_ah/spi/' + __BACKEND_SERVICE_ROOT = '%sBackendService' % __SPI_PREFIX + __SERVER_SOFTWARE = 'SERVER_SOFTWARE' + + + + + __IGNORE_RESTRICTION_PREFIXES = ('Development/', 'WSGIServer/', 'testutil/') + __HEADER_NAME_PEER = 'HTTP_X_APPENGINE_PEER' + __GOOGLE_PEER = 'apiserving' + + + __PROTOJSON = protojson.EndpointsProtoJson() + + def __init__(self, api_services, **kwargs): + """Initialize an _ApiServer instance. + + The primary function of this method is to set up the WSGIApplication + instance for the service handlers described by the services passed in. + Additionally, it registers each API in ApiConfigRegistry for later use + in the BackendService.getApiConfigs() (API config enumeration service). + + Args: + api_services: List of protorpc.remote.Service classes implementing the API + or a list of _ApiDecorator instances that decorate the service classes + for an API. + **kwargs: Passed through to protorpc.wsgi.service.service_handlers except: + protocols - ProtoRPC protocols are not supported, and are disallowed. + restricted - If True or unset, the API will only be allowed to serve to + Google's API serving infrastructure once deployed. Set to False to + allow other clients. Under dev_appserver, all clients are accepted. + NOTE! Under experimental launch, this is not a secure restriction and + other authentication mechanisms *must* be used to control access to + the API. The restriction is only intended to notify developers of + a possible upcoming feature to securely restrict access to the API. + + Raises: + TypeError: if protocols are configured (this feature is not supported). + ApiConfigurationError: if there's a problem with the API config. + """ + for entry in api_services[:]: + + if isinstance(entry, api_config._ApiDecorator): + api_services.remove(entry) + api_services.extend(entry.get_api_classes()) + + self.api_config_registry = api_backend_service.ApiConfigRegistry() + api_name_version_map = self.__create_name_version_map(api_services) + protorpc_services = self.__register_services(api_name_version_map, + self.api_config_registry) + + + backend_service = api_backend_service.BackendServiceImpl.new_factory( + self.api_config_registry, _get_app_revision()) + protorpc_services.insert(0, (self.__BACKEND_SERVICE_ROOT, backend_service)) + + + if 'protocols' in kwargs: + raise TypeError('__init__() got an unexpected keyword argument ' + "'protocols'") + protocols = remote.Protocols() + protocols.add_protocol(self.__PROTOJSON, 'protojson') + remote.Protocols.set_default(protocols) + + self.restricted = kwargs.pop('restricted', True) + self.service_app = wsgi_service.service_mappings(protorpc_services, + **kwargs) + + @staticmethod + def __create_name_version_map(api_services): + """Create a map from API name/version to Service class/factory. + + This creates a map from an API name and version to a list of remote.Service + factories that implement that API. + + Args: + api_services: A list of remote.Service-derived classes or factories + created with remote.Service.new_factory. + + Returns: + A mapping from (api name, api version) to a list of service factories, + for service classes that implement that API. + + Raises: + ApiConfigurationError: If a Service class appears more than once + in api_services. + """ + api_name_version_map = {} + for service_factory in api_services: + try: + service_class = service_factory.service_class + except AttributeError: + service_class = service_factory + service_factory = service_class.new_factory() + + key = service_class.api_info.name, service_class.api_info.version + service_factories = api_name_version_map.setdefault(key, []) + if service_factory in service_factories: + raise api_config.ApiConfigurationError( + 'Can\'t add the same class to an API twice: %s' % + service_factory.service_class.__name__) + + service_factories.append(service_factory) + return api_name_version_map + + @staticmethod + def __register_services(api_name_version_map, api_config_registry): + """Register & return a list of each SPI URL and class that handles that URL. + + This finds every service class in api_name_version_map, registers it with + the given ApiConfigRegistry, builds the SPI url for that class, and adds + the URL and its factory to a list that's returned. + + Args: + api_name_version_map: A mapping from (api name, api version) to a list of + service factories, as returned by __create_name_version_map. + api_config_registry: The ApiConfigRegistry where service classes will + be registered. + + Returns: + A list of (SPI URL, service_factory) for each service class in + api_name_version_map. + + Raises: + ApiConfigurationError: If a Service class appears more than once + in api_name_version_map. This could happen if one class is used to + implement multiple APIs. + """ + generator = api_config.ApiConfigGenerator() + protorpc_services = [] + for service_factories in api_name_version_map.itervalues(): + service_classes = [service_factory.service_class + for service_factory in service_factories] + config_file = generator.pretty_print_config_to_json(service_classes) + api_config_registry.register_spi(config_file) + + for service_factory in service_factories: + protorpc_class_name = service_factory.service_class.__name__ + root = _ApiServer.__SPI_PREFIX + protorpc_class_name + if any(service_map[0] == root or service_map[1] == service_factory + for service_map in protorpc_services): + raise api_config.ApiConfigurationError( + 'Can\'t reuse the same class in multiple APIs: %s' % + protorpc_class_name) + protorpc_services.append((root, service_factory)) + return protorpc_services + + def __is_request_restricted(self, environ): + """Determine if access to SPI should be denied. + + Access will always be allowed in dev_appserver and under unit tests, but + will only be allowed in production if the HTTP header HTTP_X_APPENGINE_PEER + is set to 'apiserving'. Google's Endpoints server sets this header by + default and App Engine may securely prevent outside callers from setting it + in the future to allow better protection of the API backend. + + Args: + environ: WSGI environment dictionary. + + Returns: + True if access should be denied, else False. + """ + if not self.restricted: + return False + server = environ.get(self.__SERVER_SOFTWARE, '') + for prefix in self.__IGNORE_RESTRICTION_PREFIXES: + if server.startswith(prefix): + return False + peer_name = environ.get(self.__HEADER_NAME_PEER, '') + return peer_name.lower() != self.__GOOGLE_PEER + + def __is_json_error(self, status, headers): + """Determine if response is an error. + + Args: + status: HTTP status code. + headers: Dictionary of (lowercase) header name to value. + + Returns: + True if the response was an error, else False. + """ + content_header = headers.get('content-type', '') + content_type, unused_params = cgi.parse_header(content_header) + return (status.startswith('400') and + content_type.lower() in _ALL_JSON_CONTENT_TYPES) + + def __write_error(self, status_code, error_message=None): + """Return the HTTP status line and body for a given error code and message. + + Args: + status_code: HTTP status code to be returned. + error_message: Error message to be returned. + + Returns: + Tuple (http_status, body): + http_status: HTTP status line, e.g. 200 OK. + body: Body of the HTTP request. + """ + if error_message is None: + error_message = httplib.responses[status_code] + status = '%d %s' % (status_code, httplib.responses[status_code]) + message = EndpointsErrorMessage( + state=EndpointsErrorMessage.State.APPLICATION_ERROR, + error_message=error_message) + return status, self.__PROTOJSON.encode_message(message) + + def protorpc_to_endpoints_error(self, status, body): + """Convert a ProtoRPC error to the format expected by Google Endpoints. + + If the body does not contain an ProtoRPC message in state APPLICATION_ERROR + the status and body will be returned unchanged. + + Args: + status: HTTP status of the response from the backend + body: JSON-encoded error in format expected by Endpoints frontend. + + Returns: + Tuple of (http status, body) + """ + try: + rpc_error = self.__PROTOJSON.decode_message(remote.RpcStatus, body) + except (ValueError, messages.ValidationError): + rpc_error = remote.RpcStatus() + + if rpc_error.state == remote.RpcStatus.State.APPLICATION_ERROR: + + + error_class = _ERROR_NAME_MAP.get(rpc_error.error_name) + if error_class: + status, body = self.__write_error(error_class.http_status, + rpc_error.error_message) + return status, body + + def __call__(self, environ, start_response): + """Wrapper for Swarm server app. + + Args: + environ: WSGI request environment. + start_response: WSGI start response function. + + Returns: + Response from service_app or appropriately transformed error response. + """ + + def StartResponse(status, headers, exc_info=None): + """Save args, defer start_response until response body is parsed. + + Create output buffer for body to be written into. + Note: this is not quite WSGI compliant: The body should come back as an + iterator returned from calling service_app() but instead, StartResponse + returns a writer that will be later called to output the body. + See google/appengine/ext/webapp/__init__.py::Response.wsgi_write() + write = start_response('%d %s' % self.__status, self.__wsgi_headers) + write(body) + + Args: + status: Http status to be sent with this response + headers: Http headers to be sent with this response + exc_info: Exception info to be displayed for this response + Returns: + callable that takes as an argument the body content + """ + call_context['status'] = status + call_context['headers'] = headers + call_context['exc_info'] = exc_info + + return body_buffer.write + + if self.__is_request_restricted(environ): + status, body = self.__write_error(httplib.NOT_FOUND) + headers = [('Content-Type', 'text/plain')] + exception = None + + else: + + call_context = {} + body_buffer = cStringIO.StringIO() + body_iter = self.service_app(environ, StartResponse) + status = call_context['status'] + headers = call_context['headers'] + exception = call_context['exc_info'] + + + body = body_buffer.getvalue() + + if not body: + body = ''.join(body_iter) + + + headers_dict = dict([(k.lower(), v) for k, v in headers]) + if self.__is_json_error(status, headers_dict): + status, body = self.protorpc_to_endpoints_error(status, body) + + start_response(status, headers, exception) + return [body] + + + + +def api_server(api_services, **kwargs): + """Create an api_server. + + The primary function of this method is to set up the WSGIApplication + instance for the service handlers described by the services passed in. + Additionally, it registers each API in ApiConfigRegistry for later use + in the BackendService.getApiConfigs() (API config enumeration service). + + Args: + api_services: List of protorpc.remote.Service classes implementing the API + or a list of _ApiDecorator instances that decorate the service classes + for an API. + **kwargs: Passed through to protorpc.wsgi.service.service_handlers except: + protocols - ProtoRPC protocols are not supported, and are disallowed. + restricted - If True or unset, the API will only be allowed to serve to + Google's API serving infrastructure once deployed. Set to False to + allow other clients. Under dev_appserver, all clients are accepted. + NOTE! Under experimental launch, this is not a secure restriction and + other authentication mechanisms *must* be used to control access to + the API. The restriction is only intended to notify developers of + a possible upcoming feature to securely restrict access to the API. + + Returns: + A new WSGIApplication that serves the API backend and config registry. + + Raises: + TypeError: if protocols are configured (this feature is not supported). + """ + + if 'protocols' in kwargs: + raise TypeError("__init__() got an unexpected keyword argument 'protocols'") + return _ApiServer(api_services, **kwargs) diff --git a/app-engine-app/endpoints/message_parser.py b/app-engine-app/endpoints/message_parser.py new file mode 100644 index 0000000..0d7a99f --- /dev/null +++ b/app-engine-app/endpoints/message_parser.py @@ -0,0 +1,227 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +"""Describe ProtoRPC Messages in JSON Schema. + +Add protorpc.message subclasses to MessageTypeToJsonSchema and get a JSON +Schema description of all the messages. +""" + + +import re + +from protorpc import message_types +from protorpc import messages + +__all__ = ['MessageTypeToJsonSchema'] + + +class MessageTypeToJsonSchema(object): + """Describe ProtoRPC messages in JSON Schema. + + Add protorpc.message subclasses to MessageTypeToJsonSchema and get a JSON + Schema description of all the messages. MessageTypeToJsonSchema handles + all the types of fields that can appear in a message. + """ + + + + + + + + __FIELD_TO_SCHEMA_TYPE_MAP = { + messages.IntegerField: {messages.Variant.INT32: ('integer', 'int32'), + messages.Variant.INT64: ('string', 'int64'), + messages.Variant.UINT32: ('integer', 'uint32'), + messages.Variant.UINT64: ('string', 'uint64'), + messages.Variant.SINT32: ('integer', 'int32'), + messages.Variant.SINT64: ('string', 'int64'), + None: ('integer', 'int32')}, + messages.FloatField: {messages.Variant.FLOAT: ('number', 'float'), + messages.Variant.DOUBLE: ('number', 'double'), + None: ('number', 'float')}, + messages.BooleanField: ('boolean', None), + messages.BytesField: ('string', 'byte'), + message_types.DateTimeField: ('string', 'date-time'), + messages.StringField: ('string', None), + messages.MessageField: ('object', None), + messages.EnumField: ('string', None), + } + + __DEFAULT_SCHEMA_TYPE = ('string', None) + + def __init__(self): + + self.__schemas = {} + + + self.__normalized_names = {} + + def add_message(self, message_type): + """Add a new message. + + Args: + message_type: protorpc.message.Message class to be parsed. + + Returns: + string, The JSON Schema id. + + Raises: + KeyError if the Schema id for this message_type would collide with the + Schema id of a different message_type that was already added. + """ + name = self.__normalized_name(message_type) + if name not in self.__schemas: + schema = self.__message_to_schema(message_type) + self.__schemas[name] = schema + return name + + def ref_for_message_type(self, message_type): + """Returns the JSON Schema id for the given message. + + Args: + message_type: protorpc.message.Message class to be parsed. + + Returns: + string, The JSON Schema id. + + Raises: + KeyError: if the message hasn't been parsed via add_message(). + """ + name = self.__normalized_name(message_type) + if name not in self.__schemas: + raise KeyError('Message has not been parsed: %s', name) + return name + + def schemas(self): + """Returns the JSON Schema of all the messages. + + Returns: + object: JSON Schema description of all messages. + """ + return self.__schemas.copy() + + def __normalized_name(self, message_type): + """Normalized schema name. + + Generate a normalized schema name, taking the class name and stripping out + everything but alphanumerics, and camel casing the remaining words. + A normalized schema name is a name that matches [a-zA-Z][a-zA-Z0-9]* + + Args: + message_type: protorpc.message.Message class being parsed. + + Returns: + A string, the normalized schema name. + + Raises: + KeyError if a collision is found between normalized names. + """ + + + name = message_type.definition_name() + + split_name = re.split(r'[^0-9a-zA-Z]', name) + normalized = ''.join( + part[0].upper() + part[1:] for part in split_name if part) + + previous = self.__normalized_names.get(normalized) + if previous: + if previous != name: + raise KeyError('Both %s and %s normalize to the same schema name: %s' % + (name, previous, normalized)) + else: + self.__normalized_names[normalized] = name + + return normalized + + def __message_to_schema(self, message_type): + """Parse a single message into JSON Schema. + + Will recursively descend the message structure + and also parse other messages references via MessageFields. + + Args: + message_type: protorpc.messages.Message class to parse. + + Returns: + An object representation of the schema. + """ + name = self.__normalized_name(message_type) + schema = { + 'id': name, + 'type': 'object', + } + if message_type.__doc__: + schema['description'] = message_type.__doc__ + properties = {} + for field in message_type.all_fields(): + descriptor = {} + + + + type_info = {} + + if type(field) == messages.MessageField: + field_type = field.type().__class__ + type_info['$ref'] = self.add_message(field_type) + if field_type.__doc__: + descriptor['description'] = field_type.__doc__ + else: + schema_type = self.__FIELD_TO_SCHEMA_TYPE_MAP.get( + type(field), self.__DEFAULT_SCHEMA_TYPE) + + + if isinstance(schema_type, dict): + variant_map = schema_type + variant = getattr(field, 'variant', None) + if variant in variant_map: + schema_type = variant_map[variant] + else: + + schema_type = variant_map[None] + type_info['type'] = schema_type[0] + if schema_type[1]: + type_info['format'] = schema_type[1] + + if type(field) == messages.EnumField: + sorted_enums = sorted([enum_info for enum_info in field.type], + key=lambda enum_info: enum_info.number) + type_info['enum'] = [enum_info.name for enum_info in sorted_enums] + + if field.required: + descriptor['required'] = True + + if field.default: + if type(field) == messages.EnumField: + descriptor['default'] = str(field.default) + else: + descriptor['default'] = field.default + + if field.repeated: + descriptor['items'] = type_info + descriptor['type'] = 'array' + else: + descriptor.update(type_info) + + properties[field.name] = descriptor + + schema['properties'] = properties + + return schema diff --git a/app-engine-app/endpoints/protojson.py b/app-engine-app/endpoints/protojson.py new file mode 100644 index 0000000..554cfac --- /dev/null +++ b/app-engine-app/endpoints/protojson.py @@ -0,0 +1,90 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +"""Endpoints-specific implementation of ProtoRPC's ProtoJson class.""" + + +import base64 + +from protorpc import messages +from protorpc import protojson + + + +__all__ = ['EndpointsProtoJson'] + + +class EndpointsProtoJson(protojson.ProtoJson): + """Endpoints-specific implementation of ProtoRPC's ProtoJson class. + + We need to adjust the way some types of data are encoded to ensure they're + consistent with the existing API pipeline. This class adjusts the JSON + encoding as needed. + + This may be used in a multithreaded environment, so take care to ensure + that this class (and its parent, protojson.ProtoJson) remain thread-safe. + """ + + def encode_field(self, field, value): + """Encode a python field value to a JSON value. + + Args: + field: A ProtoRPC field instance. + value: A python value supported by field. + + Returns: + A JSON serializable value appropriate for field. + """ + + + if (isinstance(field, messages.IntegerField) and + field.variant in (messages.Variant.INT64, + messages.Variant.UINT64, + messages.Variant.SINT64)): + if value not in (None, [], ()): + + if isinstance(value, list): + value = [str(subvalue) for subvalue in value] + else: + value = str(value) + return value + + return super(EndpointsProtoJson, self).encode_field(field, value) + + def decode_field(self, field, value): + """Decode a JSON value to a python value. + + Args: + field: A ProtoRPC field instance. + value: A serialized JSON value. + + Returns: + A Python value compatible with field. + """ + + + + if isinstance(field, messages.BytesField): + try: + + + return base64.urlsafe_b64decode(str(value)) + except (TypeError, UnicodeEncodeError), err: + raise messages.DecodeError('Base64 decoding error: %s' % err) + + return super(EndpointsProtoJson, self).decode_field(field, value) diff --git a/app-engine-app/endpoints/users_id_token.py b/app-engine-app/endpoints/users_id_token.py new file mode 100644 index 0000000..34d6717 --- /dev/null +++ b/app-engine-app/endpoints/users_id_token.py @@ -0,0 +1,641 @@ +#!/usr/bin/env python +# +# Copyright 2007 Google Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# + + +"""Utility library for reading user information from an id_token. + +This is an experimental library that can temporarily be used to extract +a user from an id_token. The functionality provided by this library +will be provided elsewhere in the future. +""" + + +import base64 + +try: + import json +except ImportError: + import simplejson as json +import logging +import os +import re +import time +import urllib + +try: + + from google.appengine.api import memcache + from google.appengine.api import oauth + from google.appengine.api import urlfetch + from google.appengine.api import users +except ImportError: + + from google.appengine.api import memcache + from google.appengine.api import oauth + from google.appengine.api import urlfetch + from google.appengine.api import users + +try: + + + + + + from Crypto.Hash import SHA256 + from Crypto.PublicKey import RSA + + _CRYPTO_LOADED = True +except ImportError: + _CRYPTO_LOADED = False + + +__all__ = ['get_current_user', + 'InvalidGetUserCall', + 'SKIP_CLIENT_ID_CHECK'] + +SKIP_CLIENT_ID_CHECK = ['*'] +_CLOCK_SKEW_SECS = 300 +_MAX_TOKEN_LIFETIME_SECS = 86400 +_DEFAULT_CERT_URI = ('https://www.googleapis.com/service_accounts/v1/metadata/' + 'raw/federated-signon@system.gserviceaccount.com') +_ENV_USE_OAUTH_SCOPE = 'ENDPOINTS_USE_OAUTH_SCOPE' +_ENV_AUTH_EMAIL = 'ENDPOINTS_AUTH_EMAIL' +_ENV_AUTH_DOMAIN = 'ENDPOINTS_AUTH_DOMAIN' +_EMAIL_SCOPE = 'https://www.googleapis.com/auth/userinfo.email' +_TOKENINFO_URL = 'https://www.googleapis.com/oauth2/v1/tokeninfo' +_MAX_AGE_REGEX = re.compile(r'\s*max-age\s*=\s*(\d+)\s*') +_CERT_NAMESPACE = '__verify_jwt' + + +class _AppIdentityError(Exception): + pass + + +class InvalidGetUserCall(Exception): + """Called get_current_user when the environment was not set up for it.""" + + + +def get_current_user(): + """Get user information from the id_token or oauth token in the request. + + This should only be called from within an Endpoints request handler, + decorated with an @endpoints.method decorator. The decorator should include + the https://www.googleapis.com/auth/userinfo.email scope. + + If the current request uses an id_token, this validates and parses the token + against the info in the current request handler and returns the user. + Or, for an Oauth token, this call validates the token against the tokeninfo + endpoint and oauth.get_current_user with the scopes provided in the method's + decorator. + + Returns: + None if there is no token or it's invalid. If the token was valid, this + returns a User. Only the user's email field is guaranteed to be set. + Other fields may be empty. + + Raises: + InvalidGetUserCall: if the environment variables necessary to determine the + endpoints user are not set. These are typically set when processing a + request using an Endpoints handler. If they are not set, it likely + indicates that this function was called from outside an Endpoints request + handler. + """ + if not _is_auth_info_available(): + + raise InvalidGetUserCall('No valid endpoints user in environment.') + + if _ENV_USE_OAUTH_SCOPE in os.environ: + + + + + return oauth.get_current_user(os.environ[_ENV_USE_OAUTH_SCOPE]) + + if (_ENV_AUTH_EMAIL in os.environ and + _ENV_AUTH_DOMAIN in os.environ): + if not os.environ[_ENV_AUTH_EMAIL]: + + + return None + + + return users.User(os.environ[_ENV_AUTH_EMAIL], + os.environ[_ENV_AUTH_DOMAIN] or None) + + + + return None + + + +def _is_auth_info_available(): + """Check if user auth info has been set in environment variables.""" + return ((_ENV_AUTH_EMAIL in os.environ and + _ENV_AUTH_DOMAIN in os.environ) or + _ENV_USE_OAUTH_SCOPE in os.environ) + + +def _maybe_set_current_user_vars(method, api_info=None, request=None): + """Get user information from the id_token or oauth token in the request. + + Used internally by Endpoints to set up environment variables for user + authentication. + + Args: + method: The class method that's handling this request. This method + should be annotated with @endpoints.method. + api_info: An api_config._ApiInfo instance. Optional. If None, will attempt + to parse api_info from the implicit instance of the method. + request: The current request, or None. + """ + if _is_auth_info_available(): + return + + + os.environ[_ENV_AUTH_EMAIL] = '' + os.environ[_ENV_AUTH_DOMAIN] = '' + + + + + try: + api_info = api_info or method.im_self.api_info + except AttributeError: + + + + + + logging.warning('AttributeError when accessing %s.im_self. An unbound ' + 'method was probably passed as an endpoints handler.', + method.__name__) + scopes = method.method_info.scopes + audiences = method.method_info.audiences + allowed_client_ids = method.method_info.allowed_client_ids + else: + scopes = (method.method_info.scopes + if method.method_info.scopes is not None + else api_info.scopes) + audiences = (method.method_info.audiences + if method.method_info.audiences is not None + else api_info.audiences) + allowed_client_ids = (method.method_info.allowed_client_ids + if method.method_info.allowed_client_ids is not None + else api_info.allowed_client_ids) + + if not scopes and not audiences and not allowed_client_ids: + + + + return + + token = _get_token(request) + if not token: + + return None + + + + + + if ((scopes == [_EMAIL_SCOPE] or scopes == (_EMAIL_SCOPE,)) and + allowed_client_ids): + + logging.debug('Checking for id_token.') + time_now = long(time.time()) + user = _get_id_token_user(token, audiences, allowed_client_ids, time_now, + memcache) + + if user: + os.environ[_ENV_AUTH_EMAIL] = user.email() + os.environ[_ENV_AUTH_DOMAIN] = user.auth_domain() + + return + + + if scopes: + logging.debug('Checking for oauth token.') + if _is_local_dev(): + _set_bearer_user_vars_local(token, allowed_client_ids, scopes) + else: + _set_bearer_user_vars(allowed_client_ids, scopes) + + + +def _get_token(request): + """Get the auth token for this request. + + Auth token may be specified in either the Authorization header or + as a query param (either access_token or bearer_token). We'll check in + this order: + 1. Authorization header. + 2. bearer_token query param. + 3. access_token query param. + + Args: + request: The current request, or None. + + Returns: + The token in the request or None. + """ + + auth_header = os.environ.get('HTTP_AUTHORIZATION') + + if auth_header: + allowed_auth_schemes = ('OAuth', 'Bearer') + for auth_scheme in allowed_auth_schemes: + if auth_header.startswith(auth_scheme): + + return auth_header[len(auth_scheme) + 1:] + + + return None + + + if request: + for key in ('bearer_token', 'access_token'): + token, _ = request.get_unrecognized_field_info(key) + if token: + + return token + + + +def _get_id_token_user(token, audiences, allowed_client_ids, time_now, cache): + """Get a User for the given id token, if the token is valid. + + Args: + token: The id_token to check. + audiences: List of audiences that are acceptable. + allowed_client_ids: List of client IDs that are acceptable. + time_now: The current time as a long (eg. long(time.time())). + cache: Cache to use (eg. the memcache module). + + Returns: + A User if the token is valid, None otherwise. + """ + + + try: + parsed_token = _verify_signed_jwt_with_certs(token, time_now, cache) + except _AppIdentityError, e: + logging.debug('id_token verification failed: %s', e) + return None + except: + logging.debug('id_token verification failed.') + return None + + if _verify_parsed_token(parsed_token, audiences, allowed_client_ids): + email = parsed_token['email'] + + + + + + + return users.User(email) + + + +def _set_oauth_user_vars(token_info, audiences, allowed_client_ids, scopes, + local_dev): + logging.warning('_set_oauth_user_vars is deprecated and will be removed ' + 'soon.') + return _set_bearer_user_vars(allowed_client_ids, scopes) + + + +def _set_bearer_user_vars(allowed_client_ids, scopes): + """Validate the oauth bearer token and set endpoints auth user variables. + + If the bearer token is valid, this sets ENDPOINTS_USE_OAUTH_SCOPE. This + provides enough information that our endpoints.get_current_user() function + can get the user. + + Args: + allowed_client_ids: List of client IDs that are acceptable. + scopes: List of acceptable scopes. + """ + for scope in scopes: + try: + client_id = oauth.get_client_id(scope) + except oauth.Error: + + continue + + + + + if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and + client_id not in allowed_client_ids): + logging.warning('Client ID is not allowed: %s', client_id) + return + + os.environ[_ENV_USE_OAUTH_SCOPE] = scope + logging.debug('Returning user from matched oauth_user.') + return + + logging.debug('Oauth framework user didn\'t match oauth token user.') + return None + + +def _set_bearer_user_vars_local(token, allowed_client_ids, scopes): + """Validate the oauth bearer token on the dev server. + + Since the functions in the oauth module return only example results in local + development, this hits the tokeninfo endpoint and attempts to validate the + token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we + can get the user from the token. + + Args: + token: String with the oauth token to validate. + allowed_client_ids: List of client IDs that are acceptable. + scopes: List of acceptable scopes. + """ + + result = urlfetch.fetch( + '%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token}))) + if result.status_code != 200: + try: + error_description = json.loads(result.content)['error_description'] + except (ValueError, KeyError): + error_description = '' + logging.error('Token info endpoint returned status %s: %s', + result.status_code, error_description) + return + token_info = json.loads(result.content) + + + if 'email' not in token_info: + logging.warning('Oauth token doesn\'t include an email address.') + return + if not token_info.get('verified_email'): + logging.warning('Oauth token email isn\'t verified.') + return + + + client_id = token_info.get('issued_to') + if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and + client_id not in allowed_client_ids): + logging.warning('Client ID is not allowed: %s', client_id) + return + + + token_scopes = token_info.get('scope', '').split(' ') + if not any(scope in scopes for scope in token_scopes): + logging.warning('Oauth token scopes don\'t match any acceptable scopes.') + return + + os.environ[_ENV_AUTH_EMAIL] = token_info['email'] + os.environ[_ENV_AUTH_DOMAIN] = '' + logging.debug('Local dev returning user from token.') + return + + +def _is_local_dev(): + return os.environ.get('SERVER_SOFTWARE', '').startswith('Development') + + +def _verify_parsed_token(parsed_token, audiences, allowed_client_ids): + + if parsed_token.get('iss') != 'accounts.google.com': + logging.warning('Issuer was not valid: %s', parsed_token.get('iss')) + return False + + + aud = parsed_token.get('aud') + if not aud: + logging.warning('No aud field in token') + return False + + + + cid = parsed_token.get('azp') + if aud != cid and aud not in audiences: + logging.warning('Audience not allowed: %s', aud) + return False + + + if list(allowed_client_ids) == SKIP_CLIENT_ID_CHECK: + logging.warning('Client ID check can\'t be skipped for ID tokens. ' + 'Id_token cannot be verified.') + return False + elif not cid or cid not in allowed_client_ids: + logging.warning('Client ID is not allowed: %s', cid) + return False + + if 'email' not in parsed_token: + return False + + return True + + +def _urlsafe_b64decode(b64string): + + b64string = b64string.encode('ascii') + padded = b64string + '=' * ((4 - len(b64string)) % 4) + return base64.urlsafe_b64decode(padded) + + +def _get_cert_expiration_time(headers): + """Get the expiration time for a cert, given the response headers. + + Get expiration time from the headers in the result. If we can't get + a time from the headers, this returns 0, indicating that the cert + shouldn't be cached. + + Args: + headers: A dict containing the response headers from the request to get + certs. + + Returns: + An integer with the number of seconds the cert should be cached. This + value is guaranteed to be >= 0. + """ + + cache_control = headers.get('Cache-Control', '') + + + + for entry in cache_control.split(','): + match = _MAX_AGE_REGEX.match(entry) + if match: + cache_time_seconds = int(match.group(1)) + break + else: + return 0 + + + age = headers.get('Age') + if age is not None: + try: + age = int(age) + except ValueError: + age = 0 + cache_time_seconds -= age + + return max(0, cache_time_seconds) + + +def _get_cached_certs(cert_uri, cache): + certs = cache.get(cert_uri, namespace=_CERT_NAMESPACE) + if certs is None: + logging.debug('Cert cache miss') + try: + result = urlfetch.fetch(cert_uri) + except AssertionError: + + return None + + if result.status_code == 200: + certs = json.loads(result.content) + expiration_time_seconds = _get_cert_expiration_time(result.headers) + if expiration_time_seconds: + cache.set(cert_uri, certs, time=expiration_time_seconds, + namespace=_CERT_NAMESPACE) + else: + logging.error( + 'Certs not available, HTTP request returned %d', result.status_code) + + return certs + + +def _b64_to_long(b): + b = b.encode('ascii') + b += '=' * ((4 - len(b)) % 4) + b = base64.b64decode(b) + return long(b.encode('hex'), 16) + + +def _verify_signed_jwt_with_certs( + jwt, time_now, cache, + cert_uri=_DEFAULT_CERT_URI): + """Verify a JWT against public certs. + + See http://self-issued.info/docs/draft-jones-json-web-token.html. + + The PyCrypto library included with Google App Engine is severely limited and + so you have to use it very carefully to verify JWT signatures. The first + issue is that the library can't read X.509 files, so we make a call to a + special URI that has the public cert in modulus/exponent form in JSON. + + The second issue is that the RSA.verify method doesn't work, at least for + how the JWT tokens are signed, so we have to manually verify the signature + of the JWT, which means hashing the signed part of the JWT and comparing + that to the signature that's been encrypted with the public key. + + Args: + jwt: string, A JWT. + time_now: The current time, as a long (eg. long(time.time())). + cache: Cache to use (eg. the memcache module). + cert_uri: string, URI to get cert modulus and exponent in JSON format. + + Returns: + dict, The deserialized JSON payload in the JWT. + + Raises: + _AppIdentityError: if any checks are failed. + """ + + segments = jwt.split('.') + + if len(segments) != 3: + raise _AppIdentityError('Wrong number of segments in token: %s' % jwt) + signed = '%s.%s' % (segments[0], segments[1]) + + signature = _urlsafe_b64decode(segments[2]) + + + + lsignature = long(signature.encode('hex'), 16) + + + header_body = _urlsafe_b64decode(segments[0]) + try: + header = json.loads(header_body) + except: + raise _AppIdentityError('Can\'t parse header: %s' % header_body) + if header.get('alg') != 'RS256': + raise _AppIdentityError('Unexpected encryption algorithm: %s' % + header.get('alg')) + + + json_body = _urlsafe_b64decode(segments[1]) + try: + parsed = json.loads(json_body) + except: + raise _AppIdentityError('Can\'t parse token: %s' % json_body) + + certs = _get_cached_certs(cert_uri, cache) + if certs is None: + raise _AppIdentityError( + 'Unable to retrieve certs needed to verify the signed JWT: %s' % jwt) + + + + if not _CRYPTO_LOADED: + raise _AppIdentityError('Unable to load pycrypto library. Can\'t verify ' + 'id_token signature. See http://www.pycrypto.org ' + 'for more information on pycrypto.') + + + + local_hash = SHA256.new(signed).hexdigest() + + + verified = False + for keyvalue in certs['keyvalues']: + modulus = _b64_to_long(keyvalue['modulus']) + exponent = _b64_to_long(keyvalue['exponent']) + key = RSA.construct((modulus, exponent)) + + + hexsig = '%064x' % key.encrypt(lsignature, '')[0] + + hexsig = hexsig[-64:] + + + + verified = (hexsig == local_hash) + if verified: + break + if not verified: + raise _AppIdentityError('Invalid token signature: %s' % jwt) + + + iat = parsed.get('iat') + if iat is None: + raise _AppIdentityError('No iat field in token: %s' % json_body) + earliest = iat - _CLOCK_SKEW_SECS + + + exp = parsed.get('exp') + if exp is None: + raise _AppIdentityError('No exp field in token: %s' % json_body) + if exp >= time_now + _MAX_TOKEN_LIFETIME_SECS: + raise _AppIdentityError('exp field too far in future: %s' % json_body) + latest = exp + _CLOCK_SKEW_SECS + + if time_now < earliest: + raise _AppIdentityError('Token used too early, %d < %d: %s' % + (time_now, earliest, json_body)) + if time_now > latest: + raise _AppIdentityError('Token used too late, %d > %d: %s' % + (time_now, latest, json_body)) + + return parsed diff --git a/app-engine-app/gcm/__init__.py b/app-engine-app/gcm/__init__.py new file mode 100644 index 0000000..252a330 --- /dev/null +++ b/app-engine-app/gcm/__init__.py @@ -0,0 +1,4 @@ + +import gcm + +GCM = gcm.GCM diff --git a/app-engine-app/gcm/gcm.py b/app-engine-app/gcm/gcm.py new file mode 100644 index 0000000..0aad3c1 --- /dev/null +++ b/app-engine-app/gcm/gcm.py @@ -0,0 +1,271 @@ +import urllib +import urllib2 +import json +from collections import defaultdict +import time +import random + +GCM_URL = 'https://android.googleapis.com/gcm/send' + + +class GCMException(Exception): pass +class GCMMalformedJsonException(GCMException): pass +class GCMConnectionException(GCMException): pass +class GCMAuthenticationException(GCMException): pass +class GCMTooManyRegIdsException(GCMException): pass +class GCMNoCollapseKeyException(GCMException): pass +class GCMInvalidTtlException(GCMException): pass + +# Exceptions from Google responses +class GCMMissingRegistrationException(GCMException): pass +class GCMMismatchSenderIdException(GCMException): pass +class GCMNotRegisteredException(GCMException): pass +class GCMMessageTooBigException(GCMException): pass +class GCMInvalidRegistrationException(GCMException): pass +class GCMUnavailableException(GCMException): pass + + +# TODO: Refactor this to be more human-readable +def group_response(response, registration_ids, key): + # Pair up results and reg_ids + mapping = zip(registration_ids, response['results']) + # Filter by key + filtered = filter(lambda x: key in x[1], mapping) + # Only consider the value in the dict + tupled = [(s[0], s[1][key]) for s in filtered] + # Grouping of errors and mapping of ids + if key is 'registration_id': + grouping = {} + for k, v in tupled: + grouping[k] = v + else: + grouping = defaultdict(list) + for k, v in tupled: + grouping[v].append(k) + + if len(grouping) == 0: + return + return grouping + + +class GCM(object): + + # Timeunit is milliseconds. + BACKOFF_INITIAL_DELAY = 1000; + MAX_BACKOFF_DELAY = 1024000; + + def __init__(self, api_key, url=GCM_URL, proxy=None): + """ api_key : google api key + url: url of gcm service. + proxy: can be string "http://host:port" or dict {'https':'host:port'} + """ + self.api_key = api_key + self.url = url + if proxy: + if isinstance(proxy,basestring): + protocol = url.split(':')[0] + proxy={protocol:proxy} + + auth = urllib2.HTTPBasicAuthHandler() + opener = urllib2.build_opener(urllib2.ProxyHandler(proxy), auth, urllib2.HTTPHandler) + urllib2.install_opener(opener) + + + def construct_payload(self, registration_ids, data=None, collapse_key=None, + delay_while_idle=False, time_to_live=None, is_json=True): + """ + Construct the dictionary mapping of parameters. + Encodes the dictionary into JSON if for json requests. + Helps appending 'data.' prefix to the plaintext data: 'hello' => 'data.hello' + + :return constructed dict or JSON payload + :raises GCMInvalidTtlException: if time_to_live is invalid + :raises GCMNoCollapseKeyException: if collapse_key is missing when time_to_live is used + """ + + if time_to_live: + if time_to_live > 2419200 or time_to_live < 0: + raise GCMInvalidTtlException("Invalid time to live value") + + if is_json: + payload = {'registration_ids': registration_ids} + if data: + payload['data'] = data + else: + payload = {'registration_id': registration_ids} + if data: + plaintext_data = data.copy() + for k in plaintext_data.keys(): + plaintext_data['data.%s' % k] = plaintext_data.pop(k) + payload.update(plaintext_data) + + if delay_while_idle: + payload['delay_while_idle'] = delay_while_idle + + if time_to_live: + payload['time_to_live'] = time_to_live + if collapse_key is None: + raise GCMNoCollapseKeyException("collapse_key is required when time_to_live is provided") + + if collapse_key: + payload['collapse_key'] = collapse_key + + if is_json: + payload = json.dumps(payload) + + return payload + + def make_request(self, data, is_json=True): + """ + Makes a HTTP request to GCM servers with the constructed payload + + :param data: return value from construct_payload method + :raises GCMMalformedJsonException: if malformed JSON request found + :raises GCMAuthenticationException: if there was a problem with authentication, invalid api key + :raises GCMConnectionException: if GCM is screwed + """ + + headers = { + 'Authorization': 'key=%s' % self.api_key, + } + # Default Content-Type is defaulted to application/x-www-form-urlencoded;charset=UTF-8 + if is_json: + headers['Content-Type'] = 'application/json' + + if not is_json: + data = urllib.urlencode(data) + req = urllib2.Request(self.url, data, headers) + + try: + response = urllib2.urlopen(req).read() + except urllib2.HTTPError as e: + if e.code == 400: + raise GCMMalformedJsonException("The request could not be parsed as JSON") + elif e.code == 401: + raise GCMAuthenticationException("There was an error authenticating the sender account") + elif e.code == 503: + raise GCMUnavailableException("GCM service is unavailable") + else: + error = "GCM service error: %d" % e.code + raise GCMUnavailableException(error) + except urllib2.URLError as e: + raise GCMConnectionException("There was an internal error in the GCM server while trying to process the request") + + if is_json: + response = json.loads(response) + return response + + def raise_error(self, error): + if error == 'InvalidRegistration': + raise GCMInvalidRegistrationException("Registration ID is invalid") + elif error == 'Unavailable': + # Plain-text requests will never return Unavailable as the error code. + # http://developer.android.com/guide/google/gcm/gcm.html#error_codes + raise GCMUnavailableException("Server unavailable. Resent the message") + elif error == 'NotRegistered': + raise GCMNotRegisteredException("Registration id is not valid anymore") + elif error == 'MismatchSenderId': + raise GCMMismatchSenderIdException("A Registration ID is tied to a certain group of senders") + elif error == 'MessageTooBig': + raise GCMMessageTooBigException("Message can't exceed 4096 bytes") + + def handle_plaintext_response(self, response): + + # Split response by line + response_lines = response.strip().split('\n') + # Split the first line by = + key, value = response_lines[0].split('=') + if key == 'Error': + self.raise_error(value) + else: + if len(response_lines) == 2: + return response_lines[1].split('=')[1] + return + + def handle_json_response(self, response, registration_ids): + errors = group_response(response, registration_ids, 'error') + canonical = group_response(response, registration_ids, 'registration_id') + + info = {} + if errors: + info.update({'errors': errors}) + if canonical: + info.update({'canonical': canonical}) + + return info + + def extract_unsent_reg_ids(self, info): + if 'errors' in info and 'Unavailable' in info['errors']: + return info['errors']['Unavailable'] + return [] + + def plaintext_request(self, registration_id, data=None, collapse_key=None, + delay_while_idle=False, time_to_live=None, retries=5): + """ + Makes a plaintext request to GCM servers + + :param registration_id: string of the registration id + :param data: dict mapping of key-value pairs of messages + :return dict of response body from Google including multicast_id, success, failure, canonical_ids, etc + :raises GCMMissingRegistrationException: if registration_id is not provided + """ + + if not registration_id: + raise GCMMissingRegistrationException("Missing registration_id") + + payload = self.construct_payload( + registration_id, data, collapse_key, + delay_while_idle, time_to_live, False + ) + + attempt = 0 + backoff = self.BACKOFF_INITIAL_DELAY + for attempt in range(retries): + try: + response = self.make_request(payload, is_json=False) + return self.handle_plaintext_response(response) + except GCMUnavailableException: + sleep_time = backoff / 2 + random.randrange(backoff) + time.sleep(float(sleep_time) / 1000) + if 2 * backoff < self.MAX_BACKOFF_DELAY: + backoff *= 2 + + raise IOError("Could not make request after %d attempts" % attempt) + + def json_request(self, registration_ids, data=None, collapse_key=None, + delay_while_idle=False, time_to_live=None, retries=5): + """ + Makes a JSON request to GCM servers + + :param registration_ids: list of the registration ids + :param data: dict mapping of key-value pairs of messages + :return dict of response body from Google including multicast_id, success, failure, canonical_ids, etc + :raises GCMMissingRegistrationException: if the list of registration_ids exceeds 1000 items + """ + + if not registration_ids: + raise GCMMissingRegistrationException("Missing registration_ids") + if len(registration_ids) > 1000: + raise GCMTooManyRegIdsException("Exceded number of registration_ids") + + attempt = 0 + backoff = self.BACKOFF_INITIAL_DELAY + for attempt in range(retries): + payload = self.construct_payload( + registration_ids, data, collapse_key, + delay_while_idle, time_to_live + ) + response = self.make_request(payload, is_json=True) + info = self.handle_json_response(response, registration_ids) + + unsent_reg_ids = self.extract_unsent_reg_ids(info) + if unsent_reg_ids: + registration_ids = unsent_reg_ids + sleep_time = backoff / 2 + random.randrange(backoff) + time.sleep(float(sleep_time) / 1000) + if 2 * backoff < self.MAX_BACKOFF_DELAY: + backoff *= 2 + else: + break + + return info diff --git a/app-engine-app/gcm/test.py b/app-engine-app/gcm/test.py new file mode 100644 index 0000000..b1cb8c3 --- /dev/null +++ b/app-engine-app/gcm/test.py @@ -0,0 +1,222 @@ +import unittest +from gcm import * +import json +from mock import MagicMock +import time + + +# Helper method to return a different value for each call. +def create_side_effect(returns): + def side_effect(*args, **kwargs): + result = returns.pop(0) + if isinstance(result, Exception): + raise result + return result + return side_effect + + +class GCMTest(unittest.TestCase): + + def setUp(self): + self.gcm = GCM('123api') + self.data = { + 'param1': '1', + 'param2': '2' + } + self.response = { + 'results': [ + {'error': 'InvalidRegistration'}, + {'error': 'NotRegistered'}, + {'message_id': '54749687859', 'registration_id': '6969'}, + {'message_id': '5456453453'}, + {'error': 'NotRegistered'}, + {'message_id': '123456778', 'registration_id': '07645'}, + ] + } + self.mock_response_1 = { + 'results': [ + {'error': 'Unavailable'}, + {'error': 'Unavailable'}, + ] + } + self.mock_response_2 = { + 'results': [ + {'error': 'Unavailable'}, + {'message_id': '1234'} + ] + } + self.mock_response_3 = { + 'results': [ + {'message_id': '5678'}, + {'message_id': '1234'} + ] + } + time.sleep = MagicMock() + + def test_construct_payload(self): + res = self.gcm.construct_payload( + registration_ids=['1', '2'], data=self.data, collapse_key='foo', + delay_while_idle=True, time_to_live=3600, is_json=True + ) + payload = json.loads(res) + for arg in ['registration_ids', 'data', 'collapse_key', 'delay_while_idle', 'time_to_live']: + self.assertIn(arg, payload) + + def test_require_collapse_key(self): + with self.assertRaises(GCMNoCollapseKeyException): + self.gcm.construct_payload(registration_ids='1234', data=self.data, time_to_live=3600) + + def test_json_payload(self): + reg_ids = ['12', '145', '56'] + json_payload = self.gcm.construct_payload(registration_ids=reg_ids, data=self.data) + payload = json.loads(json_payload) + + self.assertIn('registration_ids', payload) + self.assertEqual(payload['data'], self.data) + self.assertEqual(payload['registration_ids'], reg_ids) + + def test_plaintext_payload(self): + result = self.gcm.construct_payload(registration_ids='1234', data=self.data, is_json=False) + + self.assertIn('registration_id', result) + self.assertIn('data.param1', result) + self.assertIn('data.param2', result) + + def test_limit_reg_ids(self): + reg_ids = range(1003) + self.assertTrue(len(reg_ids) > 1000) + with self.assertRaises(GCMTooManyRegIdsException): + self.gcm.json_request(registration_ids=reg_ids, data=self.data) + + def test_missing_reg_id(self): + with self.assertRaises(GCMMissingRegistrationException): + self.gcm.json_request(registration_ids=[], data=self.data) + + with self.assertRaises(GCMMissingRegistrationException): + self.gcm.plaintext_request(registration_id=None, data=self.data) + + def test_invalid_ttl(self): + with self.assertRaises(GCMInvalidTtlException): + self.gcm.construct_payload( + registration_ids='1234', data=self.data, is_json=False, time_to_live=5000000 + ) + + with self.assertRaises(GCMInvalidTtlException): + self.gcm.construct_payload( + registration_ids='1234', data=self.data, is_json=False, time_to_live=-10 + ) + + def test_group_response(self): + ids = ['123', '345', '678', '999', '1919', '5443'] + error_group = group_response(self.response, ids, 'error') + self.assertEqual(error_group['NotRegistered'], ['345', '1919']) + self.assertEqual(error_group['InvalidRegistration'], ['123']) + + canonical_group = group_response(self.response, ids, 'registration_id') + self.assertEqual(canonical_group['678'], '6969') + self.assertEqual(canonical_group['5443'], '07645') + + def test_group_response_no_error(self): + ids = ['123', '345', '678'] + response = { + 'results': [ + {'message_id': '346547676'}, + {'message_id': '54749687859'}, + {'message_id': '5456453453'}, + ] + } + error_group = group_response(response, ids, 'error') + canonical_group = group_response(response, ids, 'registration_id') + self.assertEqual(error_group, None) + self.assertEqual(canonical_group, None) + + def test_handle_json_response(self): + ids = ['123', '345', '678', '999', '1919', '5443'] + res = self.gcm.handle_json_response(self.response, ids) + + self.assertIn('errors', res) + self.assertIn('NotRegistered', res['errors']) + self.assertIn('canonical', res) + self.assertIn('678', res['canonical']) + + def test_handle_json_response_no_error(self): + ids = ['123', '345', '678'] + response = { + 'results': [ + {'message_id': '346547676'}, + {'message_id': '54749687859'}, + {'message_id': '5456453453'}, + ] + } + res = self.gcm.handle_json_response(response, ids) + + self.assertNotIn('errors', res) + self.assertNotIn('canonical', res) + + def test_handle_plaintext_response(self): + response = 'Error=NotRegistered' + with self.assertRaises(GCMNotRegisteredException): + self.gcm.handle_plaintext_response(response) + + response = 'id=23436576' + res = self.gcm.handle_plaintext_response(response) + self.assertIsNone(res) + + response = 'id=23436576\nregistration_id=3456' + res = self.gcm.handle_plaintext_response(response) + self.assertEqual(res, '3456') + + def test_retry_plaintext_request_ok(self): + returns = [GCMUnavailableException(), GCMUnavailableException(), 'id=123456789'] + + self.gcm.make_request = MagicMock(side_effect=create_side_effect(returns)) + res = self.gcm.plaintext_request(registration_id='1234', data=self.data) + + self.assertIsNone(res) + self.assertEqual(self.gcm.make_request.call_count, 3) + + def test_retry_plaintext_request_fail(self): + returns = [GCMUnavailableException(), GCMUnavailableException(), GCMUnavailableException()] + + self.gcm.make_request = MagicMock(side_effect=create_side_effect(returns)) + with self.assertRaises(IOError): + self.gcm.plaintext_request(registration_id='1234', data=self.data, retries=2) + + self.assertEqual(self.gcm.make_request.call_count, 2) + + def test_retry_json_request_ok(self): + returns = [self.mock_response_1, self.mock_response_2, self.mock_response_3] + + self.gcm.make_request = MagicMock(side_effect=create_side_effect(returns)) + res = self.gcm.json_request(registration_ids=['1', '2'], data=self.data) + + self.assertEqual(self.gcm.make_request.call_count, 3) + self.assertNotIn('errors', res) + + def test_retry_json_request_fail(self): + returns = [self.mock_response_1, self.mock_response_2, self.mock_response_3] + + self.gcm.make_request = MagicMock(side_effect=create_side_effect(returns)) + res = self.gcm.json_request(registration_ids=['1', '2'], data=self.data, retries=2) + + self.assertEqual(self.gcm.make_request.call_count, 2) + self.assertIn('Unavailable', res['errors']) + self.assertEqual(res['errors']['Unavailable'][0], '1') + + def test_retry_exponential_backoff(self): + returns = [GCMUnavailableException(), GCMUnavailableException(), 'id=123456789'] + + self.gcm.make_request = MagicMock(side_effect=create_side_effect(returns)) + self.gcm.plaintext_request(registration_id='1234', data=self.data) + + # time.sleep is actually mock object. + self.assertEqual(time.sleep.call_count, 2) + backoff = self.gcm.BACKOFF_INITIAL_DELAY + for arg in time.sleep.call_args_list: + sleep_time = int(arg[0][0] * 1000) + self.assertTrue(backoff / 2 <= sleep_time <= backoff * 3 / 2) + if 2 * backoff < self.gcm.MAX_BACKOFF_DELAY: + backoff *= 2 + +if __name__ == '__main__': + unittest.main()