From e0cd8b8e789d5ed12e838b5c028cd4f1c1c10d16 Mon Sep 17 00:00:00 2001 From: Hugo Briand Date: Wed, 14 Oct 2015 15:34:48 +0200 Subject: [PATCH] Implement async connection --- CHANGELOG.md | 35 ++++ Readme.md | 79 +++----- setup.py | 7 +- tc_aws/__init__.py | 32 +-- tc_aws/aws/__init__.py | 5 + tc_aws/aws/bucket.py | 108 ++++++++++ tc_aws/aws/connection.py | 25 --- tc_aws/aws/storage.py | 283 +++++++++++++++++++++------ tc_aws/loaders/__init__.py | 59 ++++-- tc_aws/loaders/presigning_loader.py | 51 ++--- tc_aws/loaders/s3_loader.py | 49 ++--- tc_aws/result_storages/__init__.py | 7 +- tc_aws/result_storages/s3_storage.py | 47 ++++- tc_aws/storages/__init__.py | 6 +- tc_aws/storages/s3_storage.py | 95 +++------ vows/loader_vows.py | 12 +- vows/presigning_loader_vows.py | 25 +-- vows/result_storage_vows.py | 40 ++-- vows/s3_loader_vows.py | 16 +- vows/storage_vows.py | 179 +++++++++-------- 20 files changed, 745 insertions(+), 415 deletions(-) create mode 100644 CHANGELOG.md create mode 100644 tc_aws/aws/bucket.py delete mode 100644 tc_aws/aws/connection.py diff --git a/CHANGELOG.md b/CHANGELOG.md new file mode 100644 index 0000000..16d6270 --- /dev/null +++ b/CHANGELOG.md @@ -0,0 +1,35 @@ +# Thumbor Community - AWS: Changelog + +This file will describe the changes in each version, noticeable the BC Breaks that may have append + +## 2.0 - Async Connection + +Switched connection from Boto 2.0 to botocore, in order to handle tornado async connections. This update leads to some major BC Breaks. + +* [BC BREAK] Authentication is now handled by botocore directly, the following configuration values are no more used: + * AWS_ROLE_BASED_CONNECTION + * AWS_ACCESS_KEY + * AWS_SECRET_KEY + * BOTO_CONFIG + You'll need to use boto's configuration file directly to handle your server's authentication to S3, or role-based connection. See +* [BC BREAK] A new option has been added to configure the AWS region, named ``TC_AWS_REGION``; it defaults to ``eu-west-1`` +* [BC BREAK] Option's names have been uniformized as well, here's the old to new options' names mapping: + +| Old option | New option | +| ---------- | ---------- | +| STORAGE_BUCKET | TC_AWS_STORAGE_BUCKET | +| RESULT_STORAGE_BUCKET | TC_AWS_RESULT_STORAGE_BUCKET | +| S3_LOADER_BUCKET | TC_AWS_LOADER_BUCKET | +| S3_LOADER_ROOT_PATH | TC_AWS_LOADER_ROOT_PATH | +| STORAGE_AWS_STORAGE_ROOT_PATH | TC_AWS_STORAGE_ROOT_PATH | +| RESULT_STORAGE_AWS_STORAGE_ROOT_PATH | TC_AWS_RESULT_STORAGE_ROOT_PATH | +| S3_STORAGE_SSE | TC_AWS_STORAGE_SSE | +| S3_STORAGE_RRS | TC_AWS_STORAGE_RRS | +| S3_ALLOWED_BUCKETS | TC_AWS_ALLOWED_BUCKETS | +| RESULT_STORAGE_S3_STORE_METADATA | TC_AWS_STORE_METADATA | +| AWS_ENABLE_HTTP_LOADER | TC_AWS_ENABLE_HTTP_LOADER | +| AWS_ACCESS_KEY | N/A | +| AWS_SECRET_KEY | N/A | +| AWS_ROLE_BASED_CONNECTION | N/A | +| BOTO_CONFIG | N/A | + diff --git a/Readme.md b/Readme.md index ab190cb..99eb1f6 100644 --- a/Readme.md +++ b/Readme.md @@ -15,7 +15,7 @@ Installation Origin story ------------ -This is a fork of https://github.com/willtrking/thumbor_aws ; as this repository was not maintained anymore, +This is a fork of https://github.com/willtrking/thumbor_aws; as this repository was not maintained anymore, we decided to maintain it under the ``thumbor-community`` organisation. Features @@ -28,61 +28,32 @@ Features Additional Configuration values used: - # the Amazon Web Services access key to use - AWS_ACCESS_KEY = "" - # the Amazon Web Services secret of the used access key - AWS_SECRET_KEY = "" - - # Alternatively (recommended), use Role-based connection - # http://docs.aws.amazon.com/IAM/latest/UserGuide/roles-assume-role.html - AWS_ROLE_BASED_CONNECTION = True or False (Default: False) - - - # configuration settings specific for the s3_loader - - # list of allowed buckets for the s3_loader - S3_ALLOWED_BUCKETS = [] - - # alternatively: set a fixed bucket, no need for bucket name in Image-Path - S3_LOADER_BUCKET = 'thumbor-images' - # A root path for loading images, useful if you share the bucket - S3_LOADER_ROOT_PATH = 'source-images' - - # configuration settings specific for the storages - - STORAGE_BUCKET = 'thumbor-images' - # A root path for the storage, useful if you share a bucket for loading / storing - STORAGE_AWS_STORAGE_ROOT_PATH = 'storage' - - RESULT_STORAGE_BUCKET = 'thumbor-images' - RESULT_STORAGE_AWS_STORAGE_ROOT_PATH = 'result' - # It stores metadata like content-type on the result object - RESULT_STORAGE_S3_STORE_METADATA = False - - STORAGE_EXPIRATION_SECONDS - + TC_AWS_REGION='eu-west-1' # AWS Region + + TC_AWS_STORAGE_BUCKET='' # S3 bucket for Storage + TC_AWS_STORAGE_ROOT_PATH='' # S3 path prefix for Storage bucket + + TC_AWS_LOADER_BUCKET='' #S3 bucket for loader + TC_AWS_LOADER_ROOT_PATH='' # S3 path prefix for Loader bucket + + TC_AWS_RESULT_STORAGE_BUCKET='' # S3 bucket for result Storage + TC_AWS_RESULT_STORAGE_ROOT_PATH='' # S3 path prefix for Result storage bucket + # put data into S3 using the Server Side Encryption functionality to # encrypt data at rest in S3 # https://aws.amazon.com/about-aws/whats-new/2011/10/04/amazon-s3-announces-server-side-encryption-support/ - S3_STORAGE_SSE = True or False (Default: False) - + TC_AWS_STORAGE_SSE=False # put data into S3 with Reduced Redundancy # https://aws.amazon.com/about-aws/whats-new/2010/05/19/announcing-amazon-s3-reduced-redundancy-storage/ - S3_STORAGE_RRS = True or False (Default: False) - - - #Optional config value to enable the HTTP loader - #This would allow you to load watermarks in over your images dynamically through a URI - #E.g. - #http://your-thumbor.com/unsafe/filters:watermark(http://example.com/watermark.png,0,0,50)/s3_bucket/photo.jpg - AWS_ENABLE_HTTP_LOADER = True or False (Default: False) - - - # Optional additional configuration for the Boto-Client used to access S3. - # see http://boto.readthedocs.org/en/latest/ref/s3.html?highlight=boto.s3.connection.s3connection#boto.s3.connection.S3Connection - # for all available config options - # Hint: If you are using S3 Frankfurt, you have to set the host to "s3.eu-central-1.amazonaws.com". - BOTO_CONFIG = { - 'host': 'fakes3.local.dev', - 'is_secure': False - } + TC_AWS_STORAGE_RRS=False # S3 redundancy + + + # Enable HTTP Loader as well? + # This would allow you to load watermarks in over your images dynamically through a URI + # E.g. + # http://your-thumbor.com/unsafe/filters:watermark(http://example.com/watermark.png,0,0,50)/s3_bucket/photo.jpg + TC_AWS_ENABLE_HTTP_LOADER=False + + TC_AWS_ALLOWED_BUCKETS=False # List of allowed bucket to be requested + TC_AWS_STORE_METADATA=False # Store result with metadata (for instance content-type) + diff --git a/setup.py b/setup.py index e96f669..27da39d 100644 --- a/setup.py +++ b/setup.py @@ -4,7 +4,7 @@ setup( name='tc_aws', - version='1.3.2', + version='2.0.0', description='Thumbor AWS extensions', author='Thumbor-Community & William King', author_email='willtrking@gmail.com', @@ -13,14 +13,15 @@ packages=find_packages(), install_requires=[ 'python-dateutil', - 'thumbor', - 'boto' + 'thumbor>=5.2', + 'tornado-botocore', ], extras_require={ 'tests': [ 'pyvows', 'coverage', 'tornado_pyvows', + 'boto', 'moto', 'mock', ], diff --git a/tc_aws/__init__.py b/tc_aws/__init__.py index bc82c8a..7d3e2f3 100644 --- a/tc_aws/__init__.py +++ b/tc_aws/__init__.py @@ -1,24 +1,26 @@ # coding: utf-8 +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. + from thumbor.config import Config -Config.define('STORAGE_BUCKET', 'thumbor-images', 'S3 bucket for Storage', 'S3') -Config.define('RESULT_STORAGE_BUCKET', 'thumbor-result', 'S3 bucket for result Storage', 'S3') -Config.define('S3_LOADER_BUCKET', None, 'S3 bucket for loader', 'S3') +Config.define('TC_AWS_REGION', 'eu-west-1', 'S3 region', 'S3') + +Config.define('TC_AWS_STORAGE_BUCKET', None, 'S3 bucket for Storage', 'S3') +Config.define('TC_AWS_STORAGE_ROOT_PATH', '', 'S3 path prefix for Storage bucket', 'S3') -Config.define('S3_LOADER_ROOT_PATH', '', 'S3 path prefix for Loader bucket', 'S3') -Config.define('STORAGE_AWS_STORAGE_ROOT_PATH', '', 'S3 path prefix for Storage bucket', 'S3') -Config.define('RESULT_STORAGE_AWS_STORAGE_ROOT_PATH', '', 'S3 path prefix for Result storage bucket', 'S3') +Config.define('TC_AWS_LOADER_BUCKET', None, 'S3 bucket for loader', 'S3') +Config.define('TC_AWS_LOADER_ROOT_PATH', '', 'S3 path prefix for Loader bucket', 'S3') -Config.define('STORAGE_EXPIRATION_SECONDS', 3600, 'S3 expiration', 'S3') +Config.define('TC_AWS_RESULT_STORAGE_BUCKET', None, 'S3 bucket for result Storage', 'S3') +Config.define('TC_AWS_RESULT_STORAGE_ROOT_PATH', '', 'S3 path prefix for Result storage bucket', 'S3') -Config.define('S3_STORAGE_SSE', False, 'S3 encriptipon key', 'S3') -Config.define('S3_STORAGE_RRS', False, 'S3 redundency', 'S3') -Config.define('S3_ALLOWED_BUCKETS', False, 'List of allowed bucket to be requeted', 'S3') -Config.define('RESULT_STORAGE_S3_STORE_METADATA', False, 'S3 store result with metadata', 'S3') +Config.define('TC_AWS_STORAGE_SSE', False, 'S3 encryption', 'S3') +Config.define('TC_AWS_STORAGE_RRS', False, 'S3 redundancy', 'S3') -Config.define('AWS_ACCESS_KEY', None, 'AWS Access key, if None use environment AWS_ACCESS_KEY_ID', 'AWS') -Config.define('AWS_SECRET_KEY', None, 'AWS Secret key, if None use environment AWS_SECRET_ACCESS_KEY', 'AWS') -Config.define('AWS_ROLE_BASED_CONNECTION', False, 'EC2 instance can use role that does not require AWS_ACCESS_KEY see http://docs.aws.amazon.com/IAM/latest/UserGuide/roles-usingrole-ec2instance.html', 'AWS') +Config.define('TC_AWS_ENABLE_HTTP_LOADER', False, 'Enable HTTP Loader as well?', 'S3') +Config.define('TC_AWS_ALLOWED_BUCKETS', False, 'List of allowed buckets to be requested', 'S3') +Config.define('TC_AWS_STORE_METADATA', False, 'S3 store result with metadata', 'S3') -Config.define('BOTO_CONFIG', None, 'Additional Boto options for configuring S3 access (see http://boto.readthedocs.org/en/latest/ref/s3.html?highlight=boto.s3.connection.s3connection#boto.s3.connection.S3Connection)') diff --git a/tc_aws/aws/__init__.py b/tc_aws/aws/__init__.py index 57d631c..dcc2026 100644 --- a/tc_aws/aws/__init__.py +++ b/tc_aws/aws/__init__.py @@ -1 +1,6 @@ # coding: utf-8 + +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. + diff --git a/tc_aws/aws/bucket.py b/tc_aws/aws/bucket.py new file mode 100644 index 0000000..9f50867 --- /dev/null +++ b/tc_aws/aws/bucket.py @@ -0,0 +1,108 @@ +# coding: utf-8 + +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. + +import botocore.session + +from tornado_botocore import Botocore +from tornado.concurrent import return_future + +class Bucket(object): + """ + This handles all communication with AWS API + """ + _bucket = None + _region = None + _local_cache = dict() + + def __init__(self, bucket, region): + """ + Constructor + :param string bucket: The bucket name + :param string region: The AWS API region to use + :return: The created bucket + """ + self._bucket = bucket + self._region = region + + @return_future + def get(self, path, callback=None): + """ + Returns object at given path + :param string path: Path or 'key' to retrieve AWS object + :param callable callback: Callback function for once the retrieval is done + """ + session = Botocore(service='s3', region_name=self._region, operation='GetObject') + session.call( + callback=callback, + Bucket=self._bucket, + Key=path, + ) + + @return_future + def get_url(self, path, method='GET', expiry=3600, callback=None): + """ + Generates the presigned url for given key & methods + :param string path: Path or 'key' for requested object + :param string method: Method for requested URL + :param int expiry: URL validity time + :param callable callback: Called function once done + """ + session = botocore.session.get_session() + client = session.create_client('s3', region_name=self._region) + + url = client.generate_presigned_url( + ClientMethod='get_object', + Params={ + 'Bucket': self._bucket, + 'Key': path, + }, + ExpiresIn=expiry, + HttpMethod=method, + ) + + callback(url) + + @return_future + def put(self, path, data, metadata={}, reduced_redundancy=False, encrypt_key=False, callback=None): + """ + Stores data at given path + :param string path: Path or 'key' for created/updated object + :param bytes data: Data to write + :param dict metadata: Metadata to store with this data + :param bool reduced_redundancy: Whether to reduce storage redundancy or not? + :param bool encrypt_key: Encrypt data? + :param callable callback: Called function once done + """ + storage_class = 'REDUCED_REDUNDANCY' if reduced_redundancy else 'STANDARD' + + args = dict( + callback=callback, + Bucket=self._bucket, + Key=path, + Body=data, + Metadata=metadata, + StorageClass=storage_class, + ) + + if encrypt_key: + args['ServerSideEncryption'] = 'AES256' + + session = Botocore(service='s3', region_name=self._region, operation='PutObject') + session.call(**args) + + @return_future + def delete(self, path, callback=None): + """ + Deletes key at given path + :param string path: Path or 'key' to delete + :param callable callback: Called function once done + """ + session = Botocore(service='s3', region_name=self._region, operation='DeleteObject') + session.call( + callback=callback, + Bucket=self._bucket, + Key=path, + ) diff --git a/tc_aws/aws/connection.py b/tc_aws/aws/connection.py deleted file mode 100644 index acd701b..0000000 --- a/tc_aws/aws/connection.py +++ /dev/null @@ -1,25 +0,0 @@ -# coding: utf-8 - -from boto.s3.connection import S3Connection - -connection = None - - -def get_connection(context): - global connection - - if connection is None: - boto_opts = {} - - if context.config.AWS_ROLE_BASED_CONNECTION is False: - boto_opts.update({ - 'aws_access_key_id': context.config.AWS_ACCESS_KEY, - 'aws_secret_access_key': context.config.AWS_SECRET_KEY, - }) - - if context.config.BOTO_CONFIG: - boto_opts.update(context.config.BOTO_CONFIG) - - connection = S3Connection(**boto_opts) - - return connection diff --git a/tc_aws/aws/storage.py b/tc_aws/aws/storage.py index f18314c..a77998e 100644 --- a/tc_aws/aws/storage.py +++ b/tc_aws/aws/storage.py @@ -1,101 +1,268 @@ # coding: utf-8 -import calendar +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. -from os.path import join - -from datetime import datetime, timedelta +from json import loads, dumps +from os.path import join, splitext +from datetime import datetime +from dateutil.tz import tzutc +from tornado.concurrent import return_future from thumbor.utils import logger -from boto.s3.bucket import Bucket -from boto.s3.key import Key -from dateutil.parser import parse as parse_ts - -from connection import get_connection - +from .bucket import Bucket class AwsStorage(): - + """ + Base storage class + """ @property def is_auto_webp(self): + """ + Determines based on context whether we automatically use webp or not + :return: Use WebP? + :rtype: bool + """ return self.context.config.AUTO_WEBP and self.context.request.accepts_webp + @property + def storage(self): + """ + Instantiates bucket based on configuration + :return: The bucket + :rtype: Bucket + """ + return Bucket(self._get_config('BUCKET'), self.context.config.get('TC_AWS_REGION')) + def __init__(self, context, config_prefix): + """ + Constructor + :param Context context: An instance of thumbor's context + :param string config_prefix: Prefix used to load configuration values + """ self.config_prefix = config_prefix self.context = context - self.storage = self.__get_s3_bucket() - def _get_config(self, config_key, default=None): - return getattr(self.context.config, '%s_%s' % (self.config_prefix, config_key)) + @return_future + def get(self, path, callback): + """ + Gets data at path + :param string path: Path for data + :param callable callback: Callback function for once the retrieval is done + """ + file_abspath = self._normalize_path(path) - def __get_s3_bucket(self): - return Bucket( - connection=get_connection(self.context), - name=self._get_config('BUCKET') - ) + def return_data(file_key): + if not file_key or self._get_error(file_key) or self.is_expired(file_key): + logger.warn("[AwsStorage] s3 key not found at %s" % file_abspath) + callback(None) + else: + callback(file_key['Body'].read()) + + self.storage.get(file_abspath, callback=return_data) def set(self, bytes, abspath): - file_key = Key(self.storage) - file_key.key = abspath + """ + Stores data at given path + :param bytes bytes: Data to store + :param string abspath: Path to store the data at + :return: Path where the data is stored + :rtype: string + """ + metadata = {} - if self.config_prefix is 'RESULT_STORAGE' and self._get_config('S3_STORE_METADATA'): - for k, v in self.context.headers.iteritems(): - file_key.set_metadata(k, v) + if self.config_prefix is 'TC_AWS_RESULT_STORAGE' and self.context.config.get('TC_AWS_STORE_METADATA'): + metadata = self.context.headers - file_key.set_contents_from_string( + self.storage.put( + abspath, bytes, - encrypt_key=self.context.config.get('S3_STORAGE_SSE', ''), # TODO: fix config prefix - reduced_redundancy=self.context.config.get('S3_STORAGE_RRS', '') # TODO: fix config prefix + metadata=metadata, + reduced_redundancy=self.context.config.get('TC_AWS_STORAGE_RRS', False), + encrypt_key=self.context.config.get('TC_AWS_STORAGE_SSE', False), + callback=self._handle_error, ) - def get(self, path): - file_abspath = self.normalize_path(path) + return abspath - file_key = self.storage.get_key(file_abspath) + def remove(self, path): + """ + Deletes data at path + :param string path: Path to delete + :return: Whether deletion is successful or not + :rtype: bool + """ + yield self.storage.delete(path) + return - if not file_key or self.is_expired(file_key): - logger.debug("[STORAGE] s3 key not found at %s" % file_abspath) - return None - return file_key.read() + @return_future + def exists(self, path, callback): + """ + Tells if data exists at given path + :param string path: Path to check + :param callable callback: Callback function for once the check is done + """ + file_abspath = self._normalize_path(path) - def normalize_path(self, path): - path = path.lstrip('/') # Remove leading '/' - path_segments = [self._get_config('AWS_STORAGE_ROOT_PATH'), path] + def return_data(file_key): + if not file_key or self._get_error(file_key): + callback(False) + else: + callback(True) - if self.is_auto_webp: - path_segments.append("webp") - - return join(*path_segments) + self.storage.get(file_abspath, callback=return_data) def is_expired(self, key): - if key: - expire_in_seconds = self._get_config('EXPIRATION_SECONDS') + """ + Tells whether key has expired + :param string key: Path to check + :return: Whether it is expired or not + :rtype: bool + """ + if key and not self._get_error(key): + expire_in_seconds = self.context.config.get('STORAGE_EXPIRATION_SECONDS', 3600) # Never expire if expire_in_seconds is None or expire_in_seconds == 0: return False - timediff = datetime.now() - self.utc_to_local(parse_ts(key.last_modified)) - return timediff.seconds > self._get_config('EXPIRATION_SECONDS') + timediff = datetime.now(tzutc()) - key['LastModified'] + + return timediff.seconds > self.context.config.get('STORAGE_EXPIRATION_SECONDS', 3600) else: #If our key is bad just say we're expired return True - def last_updated(self): + @return_future + def last_updated(self, callback): + """ + Tells when the image has last been updated + :param callable callback: Callback function for once the retrieval is done + """ path = self.context.request.url - file_abspath = self.normalize_path(path) - file_key = self.storage.get_key(file_abspath) + file_abspath = self._normalize_path(path) + + def on_file_fetched(file): + if not file or self._get_error(file) or self.is_expired(file): + logger.warn("[AwsStorage] s3 key not found at %s" % file_abspath) + callback(None) + else: + callback(file['LastModified']) + + self.storage.get(file_abspath, callback=on_file_fetched) + + @return_future + def get_crypto(self, path, callback): + """ + Retrieves crypto data at path + :param string path: Path to search for crypto data + :param callable callback: Callback function for once the retrieval is done + """ + file_abspath = self._normalize_path(path) + crypto_path = "%s.txt" % (splitext(file_abspath)[0]) + + def return_data(file_key): + if not file_key or self._get_error(file_key) or self.is_expired(file_key): + logger.warn("[STORAGE] s3 key not found at %s" % crypto_path) + callback(None) + else: + callback(file_key['Body']) + + self.storage.get(crypto_path, callback=return_data) - if not file_key or self.is_expired(file_key): - logger.debug("[RESULT_STORAGE] s3 key not found at %s" % file_abspath) - return None + def put_crypto(self, path): + """ + Stores crypto data at given path + :param string path: Path to store the data at + :return: Path where the crypto data is stored + """ + if not self.context.config.STORES_CRYPTO_KEY_FOR_EACH_IMAGE: + return - return self.utc_to_local(parse_ts(file_key.last_modified)) + if not self.context.server.security_key: + raise RuntimeError("STORES_CRYPTO_KEY_FOR_EACH_IMAGE can't be True if no SECURITY_KEY specified") + + file_abspath = self._normalize_path(path) + crypto_path = '%s.txt' % splitext(file_abspath)[0] + + self.set(self.context.server.security_key, crypto_path) + + return crypto_path + + @return_future + def get_detector_data(self, path, callback): + """ + Retrieves detector data from storage + :param string path: Path where the data is stored + :param callable callback: Callback function for once the retrieval is done + """ + file_abspath = self._normalize_path(path) + path = '%s.detectors.txt' % splitext(file_abspath)[0] + + def return_data(file_key): + if not file_key or self._get_error(file_key) or self.is_expired(file_key): + logger.warn("[AwsStorage] s3 key not found at %s" % path) + callback(None) + else: + callback(loads(file_key['Body'].read())) + + self.storage.get(path, callback=return_data) + + def put_detector_data(self, path, data): + """ + Stores detector data at given path + :param string path: Path to store the data at + :param bytes data: Data to store + :return: Path where the data is stored + :rtype: string + """ + file_abspath = self._normalize_path(path) + + path = '%s.detectors.txt' % splitext(file_abspath)[0] + + self.set(dumps(data), path) + + return path + + def _get_error(self, response): + """ + Returns error in response if it exists + :param dict response: AWS Response + :return: Error message if present, None otherwise + :rtype: string + """ + return response['Error']['Message'] if 'Error' in response else None + + def _handle_error(self, response): + """ + Logs error if necessary + :param dict response: AWS Response + """ + if self._get_error(response): + logger.warn("[STORAGE] error occured while storing data: %s" % self._get_error(response)) + + def _get_config(self, config_key, default=None): + """ + Retrieve specific config based on prefix + :param string config_key: Requested config + :param default: Default value if not found + :return: Resolved config value + """ + return getattr(self.context.config, '%s_%s' % (self.config_prefix, config_key)) + + def _normalize_path(self, path): + """ + Adapts path based on configuration (root_path for instance) + :param string path: Path to adapt + :return: Adapted path + :rtype: string + """ + path = path.lstrip('/') # Remove leading '/' + path_segments = [self._get_config('ROOT_PATH'), path] + + if self.is_auto_webp: + path_segments.append("webp") - def utc_to_local(self, utc_dt): - # get integer timestamp to avoid precision lost - timestamp = calendar.timegm(utc_dt.timetuple()) - local_dt = datetime.fromtimestamp(timestamp) - assert utc_dt.resolution >= timedelta(microseconds=1) - return local_dt.replace(microsecond=utc_dt.microsecond) + return join(*path_segments).lstrip('/') diff --git a/tc_aws/loaders/__init__.py b/tc_aws/loaders/__init__.py index 46353f8..8ffec4b 100644 --- a/tc_aws/loaders/__init__.py +++ b/tc_aws/loaders/__init__.py @@ -1,13 +1,24 @@ # coding: utf-8 + +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. + __all__ = ['_get_bucket_and_key', '_get_bucket', '_get_key', '_normalize_url', '_validate_bucket', '_use_http_loader'] import urllib2 - def _get_bucket_and_key(context, url): + """ + Returns bucket and key from url + :param Context context: Thumbor's context + :param string url: The URL to parse + :return: A tuple with the bucket and the key detected + :rtype: tuple + """ url = urllib2.unquote(url) - bucket = context.config.get('S3_LOADER_BUCKET') + bucket = context.config.get('TC_AWS_LOADER_BUCKET') if bucket is None: bucket = _get_bucket(url) url = '/'.join(url.lstrip('/').split('/')[1:]) @@ -16,31 +27,55 @@ def _get_bucket_and_key(context, url): return bucket, key - def _get_bucket(url): + """ + Retrieves the bucket based on the URL + :param string url: URL to parse + :return: bucket name + :rtype: string + """ url_by_piece = url.lstrip("/").split("/") return url_by_piece[0] - def _get_key(path, context): - root_path = context.config.get('S3_LOADER_ROOT_PATH') + """ + Retrieves key from path + :param string path: Path to analyze + :param Context context: Thumbor's context + :return: Extracted key + :rtype: string + """ + root_path = context.config.get('TC_AWS_LOADER_ROOT_PATH') return '/'.join([root_path, path]) if root_path is not '' else path - def _normalize_url(url): """ - :param url: - :return: exactly the same url since we only use http loader if url stars with http prefix. + Normalizes given url + :param string url: URL to normalize + :return: exactly the same url since we only use http loader if url starts with http prefix. + :rtype: string """ return url - def _validate_bucket(context, bucket): - allowed_buckets = context.config.get('S3_ALLOWED_BUCKETS', default=None) + """ + Checks that bucket is allowed + :param Context context: Thumbor's context + :param string bucket: Bucket name + :return: Whether bucket is allowed or not + :rtype: bool + """ + allowed_buckets = context.config.get('TC_AWS_ALLOWED_BUCKETS', default=None) return not allowed_buckets or bucket in allowed_buckets - def _use_http_loader(context, url): - enable_http_loader = context.config.get('AWS_ENABLE_HTTP_LOADER', default=False) + """ + Should we use HTTP Loader with given path? Based on configuration as well. + :param Context context: Thumbor's context + :param string url: URL to analyze + :return: Whether we should use HTTP Loader or not + :rtype: bool + """ + enable_http_loader = context.config.get('TC_AWS_ENABLE_HTTP_LOADER', default=False) return enable_http_loader and url.startswith('http') diff --git a/tc_aws/loaders/presigning_loader.py b/tc_aws/loaders/presigning_loader.py index ae7057b..aa20109 100644 --- a/tc_aws/loaders/presigning_loader.py +++ b/tc_aws/loaders/presigning_loader.py @@ -1,42 +1,45 @@ # coding: utf-8 -from boto.s3.bucket import Bucket -from thumbor.utils import logger +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. + from tornado.concurrent import return_future import thumbor.loaders.http_loader as http_loader -from tc_aws.loaders import * -from tc_aws.aws.connection import get_connection - +from . import * +from ..aws.bucket import Bucket -def _generate_presigned_url(context, bucket, key): - connection = get_connection(context) - expiry = 60 * 60 - presigned_url = connection.generate_url( - expiry, - 'GET', - bucket, - key, - ) - return presigned_url +@return_future +def _generate_presigned_url(context, bucket, key, callback): + """ + Generates presigned URL + :param Context context: Thumbor's context + :param string bucket: Bucket name + :param string key: Path to get URL for + :param callable callback: Callback method once done + """ + Bucket(bucket, context.config.get('TC_AWS_REGION')).get_url(key, callback=callback) @return_future def load(context, url, callback): - load_sync(context, url, callback) - - -def load_sync(context, url, callback): + """ + Loads image + :param Context context: Thumbor's context + :param string url: Path to load + :param callable callback: Callback method once done + """ if _use_http_loader(context, url): - http_loader.load_sync( - context, url, callback, normalize_url_func=_normalize_url) + http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url) else: bucket, key = _get_bucket_and_key(context, url) if _validate_bucket(context, bucket): - presigned_url = _generate_presigned_url(context, bucket, key) - http_loader.load_sync( - context, presigned_url, callback, normalize_url_func=_normalize_url) + def on_url_generated(generated_url): + http_loader.load_sync(context, generated_url, callback, normalize_url_func=_normalize_url) + + _generate_presigned_url(context, bucket, key, on_url_generated) else: callback(None) diff --git a/tc_aws/loaders/s3_loader.py b/tc_aws/loaders/s3_loader.py index 4493d16..ecea60b 100644 --- a/tc_aws/loaders/s3_loader.py +++ b/tc_aws/loaders/s3_loader.py @@ -1,39 +1,40 @@ # coding: utf-8 -from boto.s3.bucket import Bucket +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. + from thumbor.utils import logger from tornado.concurrent import return_future import thumbor.loaders.http_loader as http_loader -from tc_aws.loaders import * -from tc_aws.aws.connection import get_connection - +from . import * +from ..aws.bucket import Bucket @return_future def load(context, url, callback): - load_sync(context, url, callback) - -def load_sync(context, url, callback): + """ + Loads image + :param Context context: Thumbor's context + :param string url: Path to load + :param callable callback: Callback method once done + """ if _use_http_loader(context, url): - return http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url) + http_loader.load_sync(context, url, callback, normalize_url_func=_normalize_url) + return bucket, key = _get_bucket_and_key(context, url) if _validate_bucket(context, bucket): - bucket_loader = Bucket( - connection=get_connection(context), - name=bucket - ) - file_key = None - - try: - file_key = bucket_loader.get_key(key) - except Exception, e: - logger.warn("ERROR retrieving image from S3 {0}: {1}".format(key, str(e))) - - if file_key: - callback(file_key.read()) - return - - callback(None) + bucket_loader = Bucket(bucket, context.config.get('TC_AWS_REGION')) + + def handle_data(file_key): + if not file_key or 'Error' in file_key: + logger.warn("ERROR retrieving image from S3 {0}: {1}".format(key, file_key['Error']['Message'])) + else: + callback(file_key['Body'].read()) + + bucket_loader.get(key, callback=handle_data) + else: + callback(None) diff --git a/tc_aws/result_storages/__init__.py b/tc_aws/result_storages/__init__.py index 576f56f..dcc2026 100644 --- a/tc_aws/result_storages/__init__.py +++ b/tc_aws/result_storages/__init__.py @@ -1 +1,6 @@ -# coding: utf-8 \ No newline at end of file +# coding: utf-8 + +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. + diff --git a/tc_aws/result_storages/s3_storage.py b/tc_aws/result_storages/s3_storage.py index ad2ec04..948a2d1 100644 --- a/tc_aws/result_storages/s3_storage.py +++ b/tc_aws/result_storages/s3_storage.py @@ -1,23 +1,58 @@ #coding: utf-8 -from thumbor.result_storages import BaseStorage +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. -from ..aws.storage import AwsStorage +from tornado.concurrent import return_future +from thumbor.result_storages import BaseStorage, ResultStorageResult +from ..aws.storage import AwsStorage class Storage(AwsStorage, BaseStorage): + """ + S3 Result Storage + """ def __init__(self, context): + """ + Constructor + :param Context context: Thumbor's context + """ BaseStorage.__init__(self, context) - AwsStorage.__init__(self, context, 'RESULT_STORAGE') + AwsStorage.__init__(self, context, 'TC_AWS_RESULT_STORAGE') def put(self, bytes): - path = self.normalize_path(self.context.request.url) + """ + Stores image + :param bytes bytes: Data to store + :return: Path where data is stored + :rtype: string + """ + path = self._normalize_path(self.context.request.url) + self.set(bytes, path) return path - def get(self, path=None): + @return_future + def get(self, path=None, callback=None): + """ + Retrieves data + :param string path: Path to load data (defaults to request URL) + :param callable callback: Method called once done + """ if path is None: path = self.context.request.url - return super(Storage, self).get(path) + def return_result(key): + result = ResultStorageResult + if self._get_error(key): + result.error = self._get_error(key) + else: + result.buffer = key['Body'].read() + result.successful = True + result.metadata = key.copy().pop('Body') + + callback(result) + + super(Storage, self).get(path, callback=return_result) diff --git a/tc_aws/storages/__init__.py b/tc_aws/storages/__init__.py index 576f56f..1bf8516 100644 --- a/tc_aws/storages/__init__.py +++ b/tc_aws/storages/__init__.py @@ -1 +1,5 @@ -# coding: utf-8 \ No newline at end of file +# coding: utf-8 + +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. diff --git a/tc_aws/storages/s3_storage.py b/tc_aws/storages/s3_storage.py index 9fbe268..f6e2376 100644 --- a/tc_aws/storages/s3_storage.py +++ b/tc_aws/storages/s3_storage.py @@ -1,87 +1,42 @@ #coding: utf-8 -from json import loads, dumps - -from os.path import splitext +# Copyright (c) 2015, thumbor-community +# Use of this source code is governed by the MIT license that can be +# found in the LICENSE file. from thumbor.storages import BaseStorage from ..aws.storage import AwsStorage - class Storage(AwsStorage, BaseStorage): - + """ + S3 Storage + """ def __init__(self, context): + """ + Constructor + :param Context context: Thumbor's context + """ BaseStorage.__init__(self, context) - AwsStorage.__init__(self, context, 'STORAGE') + AwsStorage.__init__(self, context, 'TC_AWS_STORAGE') def put(self, path, bytes): - self.set(bytes, self.normalize_path(path)) - - return path - - def put_crypto(self, path): - if not self.context.config.STORES_CRYPTO_KEY_FOR_EACH_IMAGE: - return - - if not self.context.server.security_key: - raise RuntimeError("STORES_CRYPTO_KEY_FOR_EACH_IMAGE can't be True if no SECURITY_KEY specified") - - file_abspath = self.normalize_path(path) - crypto_path = '%s.txt' % splitext(file_abspath)[0] - - self.set(self.context.server.security_key, crypto_path) - - return crypto_path - - def put_detector_data(self, path, data): - file_abspath = self.normalize_path(path) - - path = '%s.detectors.txt' % splitext(file_abspath)[0] - - self.set(dumps(data), path) + """ + Stores image + :param string path: Path to store data at + :param bytes bytes: Data to store + :return: Path where data is stored + :rtype: string + """ + self.set(bytes, self._normalize_path(path)) return path - def get_crypto(self, path): - file_abspath = self.normalize_path(path) - crypto_path = "%s.txt" % (splitext(file_abspath)[0]) - - file_key = self.storage.get_key(crypto_path) - - if not file_key: - return None - - return file_key.read() - - def get_detector_data(self, path): - file_abspath = self.normalize_path(path) - path = '%s.detectors.txt' % splitext(file_abspath)[0] - - file_key = self.storage.get_key(path) - - if not file_key or self.is_expired(file_key): - return None - - return loads(file_key.read()) - - def exists(self, path): - file_abspath = self.normalize_path(path) - file_key = self.storage.get_key(file_abspath) - - if not file_key: - return False - - return True - - def remove(self, path): - if not self.exists(path): - return - - if not self.storage.delete_key(path): - return False - - return True - def resolve_original_photo_path(self, filename): + """ + Determines original path for file + :param string filename: File to look at + :return: Resolved path (here it is the same) + :rtype: string + """ return filename diff --git a/vows/loader_vows.py b/vows/loader_vows.py index 6e139bb..66b49dc 100644 --- a/vows/loader_vows.py +++ b/vows/loader_vows.py @@ -11,8 +11,10 @@ from tc_aws.loaders import * -s3_bucket = 'thumbor-images-test' +import logging +logging.getLogger('botocore').setLevel(logging.CRITICAL) +s3_bucket = 'thumbor-images-test' @Vows.batch class S3LoaderVows(Vows.Context): @@ -21,8 +23,8 @@ class CanGetBucketAndKey(Vows.Context): def topic(self): conf = Config() - conf.S3_LOADER_BUCKET = None - conf.S3_LOADER_ROOT_PATH = '' + conf.TC_AWS_LOADER_BUCKET = None + conf.TC_AWS_LOADER_ROOT_PATH = '' return Context(config=conf) def should_detect_bucket_and_key(self, topic): @@ -43,8 +45,8 @@ class CanDetectKey(Vows.Context): def topic(self): conf = Config() - conf.S3_LOADER_BUCKET = None - conf.S3_LOADER_ROOT_PATH = '' + conf.TC_AWS_LOADER_BUCKET = None + conf.TC_AWS_LOADER_ROOT_PATH = '' context = Context(config=conf) return _get_key(IMAGE_PATH, context) diff --git a/vows/presigning_loader_vows.py b/vows/presigning_loader_vows.py index 346bd3f..8227e5f 100644 --- a/vows/presigning_loader_vows.py +++ b/vows/presigning_loader_vows.py @@ -20,6 +20,9 @@ from tc_aws.loaders import presigning_loader +import logging +logging.getLogger('botocore').setLevel(logging.CRITICAL) + s3_bucket = 'thumbor-images-test' @Vows.batch @@ -37,8 +40,8 @@ def topic(self): k.set_contents_from_string(IMAGE_BYTES) conf = Config() - conf.define('S3_LOADER_BUCKET', s3_bucket, '') - conf.define('S3_LOADER_ROOT_PATH', 'root_path', '') + conf.define('TC_AWS_LOADER_BUCKET', s3_bucket, '') + conf.define('TC_AWS_LOADER_ROOT_PATH', 'root_path', '') return Context(config=conf) @@ -51,7 +54,7 @@ class ValidatesBuckets(Vows.Context): @mock_s3 def topic(self): conf = Config() - conf.define('S3_ALLOWED_BUCKETS', [], '') + conf.define('TC_AWS_ALLOWED_BUCKETS', [], '') return Context(config=conf) @@ -64,7 +67,7 @@ class HandlesHttpLoader(Vows.Context): @mock_s3 def topic(self): conf = Config() - conf.define('AWS_ENABLE_HTTP_LOADER', True, '') + conf.define('TC_AWS_ENABLE_HTTP_LOADER', True, '') return Context(config=conf) @@ -73,20 +76,20 @@ def should_redirect_to_http(self, topic, load_sync_patch): def callback(*args): pass - presigning_loader.load_sync(topic, 'http://foo.bar', callback) + presigning_loader.load(topic, 'http://foo.bar', callback) expect(load_sync_patch.called).to_be_true() class CanBuildPresignedUrl(Vows.Context): - def topic(self): + @Vows.async_topic + @mock_s3 + def topic(self, callback): conf = Config() - return Context(config=conf) + context = Context(config=conf) + presigning_loader._generate_presigned_url(context, "bucket-name", "some-s3-key", callback) - @mock_s3 def should_generate_presigned_urls(self, topic): - url = presigning_loader._generate_presigned_url( - topic, "bucket-name", "some-s3-key") - url = urlparse(url) + url = urlparse(topic.args[0]) expect(url.scheme).to_equal('https') expect(url.hostname).to_equal('bucket-name.s3.amazonaws.com') expect(url.path).to_equal('/some-s3-key') diff --git a/vows/result_storage_vows.py b/vows/result_storage_vows.py index bc4da0d..3a5f362 100644 --- a/vows/result_storage_vows.py +++ b/vows/result_storage_vows.py @@ -12,6 +12,9 @@ from tc_aws.result_storages.s3_storage import Storage +import logging +logging.getLogger('botocore').setLevel(logging.CRITICAL) + s3_bucket = 'thumbor-images-test' @@ -28,7 +31,7 @@ def topic(self): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(RESULT_STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket) ctx = Context(config=config, server=get_server('ACME-SEC')) ctx.request = Request ctx.request.url = 'my-image.jpg' @@ -42,12 +45,13 @@ def should_be_in_catalog(self, topic): expect(topic).to_equal('my-image.jpg') class CanGetImage(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(RESULT_STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket) ctx = Context(config=config, server=get_server('ACME-SEC')) ctx.request = Request ctx.request.url = 'my-image-2.jpg' @@ -55,22 +59,21 @@ def topic(self): storage = Storage(ctx) storage.put(IMAGE_BYTES) - return storage.get() - - def should_not_be_null(self, topic): - expect(topic).not_to_be_null() - expect(topic).not_to_be_an_error() + storage.get(callback=callback) def should_have_proper_bytes(self, topic): - expect(topic).to_equal(IMAGE_BYTES) + expect(topic.args[0]).not_to_be_null() + expect(topic.args[0]).not_to_be_an_error() + expect(topic.args[0]).to_equal(IMAGE_BYTES) class CanGetImageWithMetadata(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(RESULT_STORAGE_BUCKET=s3_bucket, RESULT_STORAGE_S3_STORE_METADATA=True) + config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket, TC_AWS_STORE_METADATA=True) ctx = Context(config=config, server=get_server('ACME-SEC')) ctx.headers = {'Content-Type': 'image/webp', 'Some-Other-Header': 'doge-header'} ctx.request = Request @@ -79,14 +82,13 @@ def topic(self): storage = Storage(ctx) storage.put(IMAGE_BYTES) - file_abspath = storage.normalize_path(ctx.request.url) - file_key = storage.storage.get_key(file_abspath) - return file_key.content_type, file_key.metadata, file_key.read() + file_abspath = storage._normalize_path(ctx.request.url) + storage.storage.get(file_abspath, callback=callback) def should_have_proper_bytes(self, topic): - expect(topic[0]).to_include('image/webp') - expect(topic[1]).to_include('some-other-header') - expect(topic[2]).to_equal(IMAGE_BYTES) + expect(topic.args[0].content_type).to_include('image/webp') + expect(topic.args[0].metadata).to_include('some-other-header') + expect(topic.args[0].content_type).to_equal(IMAGE_BYTES) class HandlesStoragePrefix(Vows.Context): @mock_s3 @@ -94,12 +96,12 @@ def topic(self): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(RESULT_STORAGE_BUCKET=s3_bucket, RESULT_STORAGE_AWS_STORAGE_ROOT_PATH='tata') + config = Config(TC_AWS_RESULT_STORAGE_BUCKET=s3_bucket, TC_AWS_RESULT_STORAGE_ROOT_PATH='tata') ctx = Context(config=config, server=get_server('ACME-SEC')) storage = Storage(ctx) - return storage.normalize_path('toto') + return storage._normalize_path('toto') def should_return_the_same(self, topic): expect(topic).to_equal("tata/toto") diff --git a/vows/s3_loader_vows.py b/vows/s3_loader_vows.py index c6a0c8d..860d12b 100644 --- a/vows/s3_loader_vows.py +++ b/vows/s3_loader_vows.py @@ -17,8 +17,10 @@ from tc_aws.loaders import s3_loader -s3_bucket = 'thumbor-images-test' +import logging +logging.getLogger('botocore').setLevel(logging.CRITICAL) +s3_bucket = 'thumbor-images-test' @Vows.batch class S3LoaderVows(Vows.Context): @@ -35,8 +37,8 @@ def topic(self, callback): k.set_contents_from_string(IMAGE_BYTES) conf = Config() - conf.define('S3_LOADER_BUCKET', s3_bucket, '') - conf.define('S3_LOADER_ROOT_PATH', 'root_path', '') + conf.define('TC_AWS_LOADER_BUCKET', s3_bucket, '') + conf.define('TC_AWS_LOADER_ROOT_PATH', 'root_path', '') context = Context(config=conf) @@ -51,7 +53,7 @@ class ValidatesBuckets(Vows.Context): @mock_s3 def topic(self, callback): conf = Config() - conf.define('S3_ALLOWED_BUCKETS', [], '') + conf.define('TC_AWS_ALLOWED_BUCKETS', [], '') context = Context(config=conf) s3_loader.load(context, '/'.join([s3_bucket, IMAGE_PATH]), callback) @@ -64,7 +66,7 @@ class HandlesHttpLoader(Vows.Context): def topic(self): conf = Config() - conf.define('AWS_ENABLE_HTTP_LOADER', True, '') + conf.define('TC_AWS_ENABLE_HTTP_LOADER', True, '') return Context(config=conf) @@ -73,7 +75,7 @@ def should_redirect_to_http(self, topic, load_sync_patch): def callback(*args): pass - s3_loader.load_sync(topic, 'http://foo.bar', callback) + s3_loader.load(topic, 'http://foo.bar', callback) expect(load_sync_patch.called).to_be_true() @mock_s3 @@ -82,5 +84,5 @@ def should_not_redirect_to_http_if_not_prefixed_with_scheme(self, topic, load_sy def callback(*args): pass - s3_loader.load_sync(topic, 'foo.bar', callback) + s3_loader.load(topic, 'foo.bar', callback) expect(load_sync_patch.called).to_be_false() diff --git a/vows/storage_vows.py b/vows/storage_vows.py index 768eb6a..fe43793 100644 --- a/vows/storage_vows.py +++ b/vows/storage_vows.py @@ -6,122 +6,140 @@ from thumbor.context import Context from thumbor.config import Config from fixtures.storage_fixture import IMAGE_URL, IMAGE_BYTES, get_server -import time from boto.s3.connection import S3Connection -from boto.s3.key import Key from moto import mock_s3 from tc_aws.storages.s3_storage import Storage -s3_bucket = 'thumbor-images-test' +import logging +logging.getLogger('botocore').setLevel(logging.CRITICAL) +s3_bucket = 'thumbor-images-test' @Vows.batch class S3StorageVows(Vows.Context): class CanStoreImage(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() - bucket = self.conn.create_bucket(s3_bucket) + self.conn.create_bucket(s3_bucket) thumborId = IMAGE_URL % '1' - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) - store = storage.put(thumborId, IMAGE_BYTES) - k = Key(bucket) - k.key = thumborId - result = k.get_contents_as_string() - return (store, result) + storage.put(thumborId, IMAGE_BYTES) + storage.get(thumborId, callback=callback) def should_be_in_catalog(self, topic): - expect(topic[0]).to_equal(IMAGE_URL % '1') - expect(topic[1]).not_to_be_null() - expect(topic[1]).not_to_be_an_error() - expect(topic[1]).to_equal(IMAGE_BYTES) + expect(topic.args[0]).not_to_be_null() + expect(topic.args[0]).not_to_be_an_error() + expect(topic.args[0]).to_equal(IMAGE_BYTES) class CanGetImage(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) storage.put(IMAGE_URL % '2', IMAGE_BYTES) - return storage.get(IMAGE_URL % '2') + storage.get(IMAGE_URL % '2', callback=callback) def should_not_be_null(self, topic): - expect(topic).not_to_be_null() - expect(topic).not_to_be_an_error() + expect(topic.args[0]).not_to_be_null() + expect(topic.args[0]).not_to_be_an_error() def should_have_proper_bytes(self, topic): - expect(topic).to_equal(IMAGE_BYTES) + expect(topic.args[0]).to_equal(IMAGE_BYTES) class CanGetImageExistance(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) storage.put(IMAGE_URL % '3', IMAGE_BYTES) - return storage.exists(IMAGE_URL % '3') + storage.exists(IMAGE_URL % '3', callback=callback) def should_exists(self, topic): - expect(topic).to_equal(True) + expect(topic.args[0]).to_equal(True) class CanGetImageInexistance(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) - return storage.exists(IMAGE_URL % '9999') + storage.exists(IMAGE_URL % '9999', callback) def should_not_exists(self, topic): - expect(topic).to_equal(False) + expect(topic.args[0]).to_equal(False) class CanRemoveImage(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) - storage.put(IMAGE_URL % '4', IMAGE_BYTES) - created = storage.exists(IMAGE_URL % '4') - time.sleep(1) - storage.remove(IMAGE_URL % '4') - time.sleep(1) - return storage.exists(IMAGE_URL % '4') != created + storage.put(IMAGE_URL % '4', IMAGE_BYTES) # 1: we put the image + + def check_created(created): + expect(created).to_equal(True) # 2.1: assertion... + + def once_removed(rm): + storage.exists(IMAGE_URL % '4', callback=callback) #4: we check if the image exists + + storage.remove(IMAGE_URL % '4', callback=once_removed) # 3: we delete it + + storage.exists(IMAGE_URL % '4', callback=check_created) # 2: we check it exists def should_be_put_and_removed(self, topic): - expect(topic).to_equal(True) + expect(topic.args[0]).to_equal(False) # 4.1: assertion... class CanRemovethenPutImage(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) - storage.put(IMAGE_URL % '5', IMAGE_BYTES) - storage.remove(IMAGE_URL % '5') - time.sleep(1) - created = storage.exists(IMAGE_URL % '5') - time.sleep(1) - storage.put(IMAGE_URL % '5', IMAGE_BYTES) - return storage.exists(IMAGE_URL % '5') != created + storage.put(IMAGE_URL % '5', IMAGE_BYTES) # 1: we put the image + + def check_created(created): + expect(created).to_equal(True) # 2.1: assertion... + + def once_removed(rm): + + def check_created_2(exists): + expect(exists).to_equal(True) # 4.1: assertion... + + storage.put(IMAGE_URL % '5') # 5: we re-put it + storage.exists(IMAGE_URL % '5', callback=callback) #6: we check its existance again + + storage.exists(IMAGE_URL % '5', callback=check_created_2) #4: we check if the image exists + + storage.remove(IMAGE_URL % '5', callback=once_removed) # 3: we delete it + + storage.exists(IMAGE_URL % '5', callback=check_created) # 2: we check it exists def should_be_put_and_removed(self, topic): - expect(topic).to_equal(True) + expect(topic.args[0]).to_equal(True) class CanReturnPath(Vows.Context): @mock_s3 @@ -129,7 +147,7 @@ def topic(self): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) return storage.resolve_original_photo_path("toto") @@ -142,10 +160,10 @@ def topic(self): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket, STORAGE_AWS_STORAGE_ROOT_PATH='tata') + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket, TC_AWS_STORAGE_ROOT_PATH='tata') storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) - return storage.normalize_path('toto') + return storage._normalize_path('toto') def should_return_the_same(self, topic): expect(topic).to_equal("tata/toto") @@ -158,7 +176,7 @@ def topic(self): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket, STORES_CRYPTO_KEY_FOR_EACH_IMAGE=True) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket, STORES_CRYPTO_KEY_FOR_EACH_IMAGE=True) storage = Storage(Context(config=config, server=get_server(''))) storage.put(IMAGE_URL % '9999', IMAGE_BYTES) storage.put_crypto(IMAGE_URL % '9999') @@ -168,81 +186,82 @@ def should_be_an_error(self, topic): expect(topic).to_have_an_error_message_of("STORES_CRYPTO_KEY_FOR_EACH_IMAGE can't be True if no SECURITY_KEY specified") class GettingCryptoForANewImageReturnsNone(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket, STORES_CRYPTO_KEY_FOR_EACH_IMAGE=True) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket, STORES_CRYPTO_KEY_FOR_EACH_IMAGE=True) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) - return storage.get_crypto(IMAGE_URL % '9999') + storage.get_crypto(IMAGE_URL % '9999', callback=callback) def should_be_null(self, topic): - expect(topic).to_be_null() + expect(topic.args[0]).to_be_null() class DoesNotStoreIfConfigSaysNotTo(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) storage.put(IMAGE_URL % '9998', IMAGE_BYTES) storage.put_crypto(IMAGE_URL % '9998') - return storage.get_crypto(IMAGE_URL % '9998') + storage.get_crypto(IMAGE_URL % '9998', callback=callback) def should_be_null(self, topic): - expect(topic).to_be_null() + expect(topic.args[0]).to_be_null() class CanStoreCrypto(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket, STORES_CRYPTO_KEY_FOR_EACH_IMAGE=True) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket, STORES_CRYPTO_KEY_FOR_EACH_IMAGE=True) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) storage.put(IMAGE_URL % '6', IMAGE_BYTES) storage.put_crypto(IMAGE_URL % '6') - return storage.get_crypto(IMAGE_URL % '6') + storage.get_crypto(IMAGE_URL % '6', callback=callback) def should_not_be_null(self, topic): - expect(topic).not_to_be_null() - expect(topic).not_to_be_an_error() - - def should_have_proper_key(self, topic): - expect(topic).to_equal('ACME-SEC') + expect(topic.args[0]).not_to_be_null() + expect(topic.args[0]).not_to_be_an_error() + expect(topic.args[0]).to_equal('ACME-SEC') class DetectorVows(Vows.Context): class CanStoreDetectorData(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) storage.put(IMAGE_URL % '7', IMAGE_BYTES) storage.put_detector_data(IMAGE_URL % '7', 'some-data') - return storage.get_detector_data(IMAGE_URL % '7') + storage.get_detector_data(IMAGE_URL % '7', callback=callback) def should_not_be_null(self, topic): - expect(topic).not_to_be_null() - expect(topic).not_to_be_an_error() - - def should_equal_some_data(self, topic): - expect(topic).to_equal('some-data') + expect(topic.args[0]).not_to_be_null() + expect(topic.args[0]).not_to_be_an_error() + expect(topic.args[0]).to_equal('some-data') class ReturnsNoneIfNoDetectorData(Vows.Context): + @Vows.async_topic @mock_s3 - def topic(self): + def topic(self, callback): self.conn = S3Connection() self.conn.create_bucket(s3_bucket) - config = Config(STORAGE_BUCKET=s3_bucket) + config = Config(TC_AWS_STORAGE_BUCKET=s3_bucket) storage = Storage(Context(config=config, server=get_server('ACME-SEC'))) - return storage.get_detector_data(IMAGE_URL % '9999') + storage.get_detector_data(IMAGE_URL % '9999', callback=callback) def should_not_be_null(self, topic): - expect(topic).to_be_null() + expect(topic.args[0]).to_be_null()