From 2f83507d72cf2e30426f227ba5e36b03b6c9bb00 Mon Sep 17 00:00:00 2001 From: Oliver Roick Date: Fri, 28 Jul 2017 10:12:57 +0200 Subject: [PATCH] Add content-length-range to pre-signed URL config (#32) * Add content-length-range to pre-signed URL config * Add file-size check to test.views.fake_s3_upload * Add MAX_FILE_SIZE setting to README * Bump version to 0.1.21 --- README.rst | 11 +++++++++++ buckets/__init__.py | 2 +- buckets/static/buckets/js/script.js | 18 ++++++++++++++++-- buckets/storage.py | 8 +++++++- buckets/test/errors.py | 9 +++++++++ buckets/test/storage.py | 2 +- buckets/test/views.py | 11 ++++++++++- example/exampleapp/models.py | 2 +- example/settings.py | 1 + requirements.txt | 2 +- tests/conftest.py | 1 + tests/test_test.py | 24 ++++++++++++++++++++++-- 12 files changed, 81 insertions(+), 10 deletions(-) create mode 100644 buckets/test/errors.py diff --git a/README.rst b/README.rst index 8ddb166a10..c24e622d15 100644 --- a/README.rst +++ b/README.rst @@ -106,6 +106,17 @@ bucket for development. url(r'', include('buckets.test.urls')), ] +Other settings +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Other optional settings can be added to the :code:`AWS` settings dictionary. + +===================== =========== ======================================================================== +Name Type Description +===================== =========== ======================================================================== +:code:`MAX_FILE_SIZE` :code:`int` The maximum allowed size for file uploads in bytes. If :code:`MAX_FILE_SIZE` is not defined then there will be no limit to the size of file. +===================== =========== ======================================================================== + Usage ------------------------------------------------------------------------------- diff --git a/buckets/__init__.py b/buckets/__init__.py index e160d939c6..dccb61c0a6 100644 --- a/buckets/__init__.py +++ b/buckets/__init__.py @@ -1 +1 @@ -__version__ = '0.1.20' +__version__ = '0.1.21' diff --git a/buckets/static/buckets/js/script.js b/buckets/static/buckets/js/script.js index 427f72ea6c..da11e5047f 100644 --- a/buckets/static/buckets/js/script.js +++ b/buckets/static/buckets/js/script.js @@ -2,6 +2,11 @@ var link_update = document.createEvent('Event'); link_update.initEvent('link:update', true, true); + function toMB(bytes) { + // Convert bytes to MB and round to two decimals. + return Math.round(bytes / (1024*1024) * 100) / 100; + } + function getParentByTagName(el, tagName) { var p = el.parentElement; @@ -103,9 +108,18 @@ }) formData.append('file', file); - request('POST', url, formData, headers, el, function(status, xml) { + request('POST', url, formData, headers, el, function(status, response) { if (status !== 204) { - error(el, 'Not able to upload file') + var errorMsg = 'Not able to upload file. '; + + var xml = new DOMParser().parseFromString(response, "text/xml"); + if (xml.getElementsByTagName('Code')[0].innerHTML === 'EntityTooLarge') { + var limit = parseInt(xml.getElementsByTagName('MaxSizeAllowed')[0].innerHTML); + + errorMsg += 'The size of the file exceeds the maximum allowed size of ' + toMB(limit) + 'MB.'; + } + + error(el, errorMsg) } else { var fileUrl = data.url + '/' + data.fields.key; update(el, fileUrl); diff --git a/buckets/storage.py b/buckets/storage.py index 95a359eb60..81c5b459fa 100644 --- a/buckets/storage.py +++ b/buckets/storage.py @@ -18,6 +18,7 @@ def __init__(self): self.secret_key = settings.AWS['SECRET_KEY'] self.region = settings.AWS['REGION'] self.bucket_name = settings.AWS['BUCKET'] + self.max_size = settings.AWS.get('MAX_FILE_SIZE') ensure_dirs('downloads') @@ -87,9 +88,14 @@ def get_signed_url(self, key): if not self.exists(temp_key): s3_key = temp_key + condtions = [] + if self.max_size: + condtions.append(["content-length-range", 0, self.max_size]) + params = { 'Bucket': self.bucket_name, - 'Key': s3_key + 'Key': s3_key, + 'Conditions': condtions } client = boto3.client( 's3', diff --git a/buckets/test/errors.py b/buckets/test/errors.py new file mode 100644 index 0000000000..08c8571d88 --- /dev/null +++ b/buckets/test/errors.py @@ -0,0 +1,9 @@ +EXCEED_MAX_SIZE = """ + + EntityTooLarge + Your proposed upload exceeds the maximum + allowed size + {max_size} + {proposed_size} + +""" diff --git a/buckets/test/storage.py b/buckets/test/storage.py index 289242d79c..d58ad92bbf 100644 --- a/buckets/test/storage.py +++ b/buckets/test/storage.py @@ -60,4 +60,4 @@ def get_signed_url(self, key): if not self.exists(temp_key): s3_key = temp_key - return {'url': '/media/s3/uploads', 'fields': {'key': s3_key}} + return {'url': '/media/s3/uploads/', 'fields': {'key': s3_key}} diff --git a/buckets/test/views.py b/buckets/test/views.py index 1ee677f3f8..bb2ce7647b 100644 --- a/buckets/test/views.py +++ b/buckets/test/views.py @@ -1,8 +1,11 @@ +from django.conf import settings from django.core.files.storage import default_storage from django.views.decorators.csrf import csrf_exempt from django.views.decorators.http import require_POST from django.http import HttpResponse +from .errors import EXCEED_MAX_SIZE + @csrf_exempt @require_POST @@ -10,6 +13,12 @@ def fake_s3_upload(request): key = request.POST.get('key') file = request.FILES.get('file') - default_storage.save(key, file.read()) + max_file_size = settings.AWS.get('MAX_FILE_SIZE') + if max_file_size and file.size > max_file_size: + msg = EXCEED_MAX_SIZE.format(max_size=max_file_size, + proposed_size=file.size) + return HttpResponse(msg, status=400) + + default_storage.save(key, file.read()) return HttpResponse('', status=204) diff --git a/example/exampleapp/models.py b/example/exampleapp/models.py index 942d2fa80e..6ca3964d4e 100644 --- a/example/exampleapp/models.py +++ b/example/exampleapp/models.py @@ -1,7 +1,7 @@ from django.db import models from buckets.fields import S3FileField -TYPES = ['image/jpeg', 'application/gpx+xml'] +TYPES = ['image/jpeg', 'application/gpx+xml', 'text/plain'] class FileModel(models.Model): diff --git a/example/settings.py b/example/settings.py index a87c13b308..53073c6656 100644 --- a/example/settings.py +++ b/example/settings.py @@ -91,6 +91,7 @@ 'ACCESS_KEY': os.environ.get('AWS_ACCESS_KEY'), 'SECRET_KEY': os.environ.get('AWS_SECRET_KEY'), 'REGION': os.environ.get('AWS_REGION'), + 'MAX_FILE_SIZE': 1048576, } MIME_LOOKUPS = { diff --git a/requirements.txt b/requirements.txt index b4ad527332..361929004f 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,5 +1,5 @@ # Minimum Django version -Django==1.11.3 +Django>=1.10,<1.11 boto3==1.4.4 # Test requirements diff --git a/tests/conftest.py b/tests/conftest.py index ff67418457..7318f3fbd8 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -41,6 +41,7 @@ def pytest_configure(): 'ACCESS_KEY': os.environ.get('AWS_ACCESS_KEY'), 'SECRET_KEY': os.environ.get('AWS_SECRET_KEY'), 'REGION': os.environ.get('AWS_REGION'), + 'MAX_FILE_SIZE': 1048579, }, ROOT_URLCONF='buckets.test.urls', MEDIA_ROOT=os.path.join(os.path.dirname(BASE_DIR), 'files'), diff --git a/tests/test_test.py b/tests/test_test.py index ac3d607ace..8f26ed336d 100644 --- a/tests/test_test.py +++ b/tests/test_test.py @@ -6,7 +6,7 @@ from buckets.test.mocks import create_file, make_dirs # noqa from buckets.test.storage import FakeS3Storage -from buckets.test import views +from buckets.test import views, errors ############################################################################# @@ -70,7 +70,7 @@ def test_get_signed_url(): store = FakeS3Storage() signed = store.get_signed_url(key='file.txt') - assert '/media/s3/uploads' == signed['url'] + assert '/media/s3/uploads/' == signed['url'] assert len(signed['fields']['key']) == 28 @@ -142,3 +142,23 @@ def test_post_upload_file_to_subdir(make_dirs, monkeypatch): # noqa assert response.status_code == 204 assert os.path.isfile( os.path.join(settings.MEDIA_ROOT, 's3/uploads/subdir', 'text.txt')) + + +def test_post_large_file(make_dirs, monkeypatch, settings): # noqa + monkeypatch.setattr(views, 'default_storage', FakeS3Storage()) + file = create_file() + + upload = SimpleUploadedFile('text.txt', open(file.name, 'rb').read()) + upload.size = settings.AWS['MAX_FILE_SIZE'] + 1 + + request = HttpRequest() + setattr(request, 'method', 'POST') + setattr(request, 'FILES', {'file': upload}) + setattr(request, 'POST', {'key': 'subdir/text.txt'}) + response = views.fake_s3_upload(request) + assert response.status_code == 400 + assert response.content.decode('utf-8') == errors.EXCEED_MAX_SIZE.format( + max_size=settings.AWS['MAX_FILE_SIZE'], + proposed_size=settings.AWS['MAX_FILE_SIZE'] + 1) + assert not os.path.isfile( + os.path.join(settings.MEDIA_ROOT, 's3/uploads/subdir', 'text.txt'))