Skip to content

Commit

Permalink
bump dependancies (#58)
Browse files Browse the repository at this point in the history
  • Loading branch information
willmcgugan authored Aug 14, 2019
1 parent 1c5e3a1 commit 7f95af6
Show file tree
Hide file tree
Showing 7 changed files with 62 additions and 60 deletions.
7 changes: 7 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,13 @@ All notable changes to this project will be documented in this file.
The format is based on [Keep a Changelog](http://keepachangelog.com/)
and this project adheres to [Semantic Versioning](http://semver.org/).

## [1.1.1] - 2019-08-14

### Changed

- Bumped FS to 2.4
- Bumped Boto to 1.9

## [1.1.0] - 2018-01-01

### Changed
Expand Down
1 change: 0 additions & 1 deletion fs_s3fs/_s3fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,6 @@
import tempfile
import threading
import mimetypes
import json

import boto3
from botocore.exceptions import ClientError, EndpointConnectionError
Expand Down
2 changes: 1 addition & 1 deletion fs_s3fs/_version.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = "1.1.0"
__version__ = "1.1.1"
26 changes: 12 additions & 14 deletions fs_s3fs/opener.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
# coding: utf-8
"""Defines the S3FSpener."""
"""Defines the S3FS Opener."""

from __future__ import absolute_import
from __future__ import print_function
from __future__ import unicode_literals

__all__ = ['S3FSOpener']
__all__ = ["S3FSOpener"]

from fs.opener import Opener
from fs.opener.errors import OpenerError
Expand All @@ -14,27 +14,25 @@


class S3FSOpener(Opener):
protocols = ['s3']
protocols = ["s3"]

def open_fs(self, fs_url, parse_result, writeable, create, cwd):
bucket_name, _, dir_path = parse_result.resource.partition('/')
bucket_name, _, dir_path = parse_result.resource.partition("/")
if not bucket_name:
raise OpenerError(
"invalid bucket name in '{}'".format(fs_url)
)
raise OpenerError("invalid bucket name in '{}'".format(fs_url))
strict = (
parse_result.params['strict'] == '1'
if 'strict' in parse_result.params
parse_result.params["strict"] == "1"
if "strict" in parse_result.params
else True
)
s3fs = S3FS(
bucket_name,
dir_path=dir_path or '/',
dir_path=dir_path or "/",
aws_access_key_id=parse_result.username or None,
aws_secret_access_key=parse_result.password or None,
endpoint_url=parse_result.params.get('endpoint_url', None),
acl=parse_result.params.get('acl', None),
cache_control=parse_result.params.get('cache_control', None),
strict=strict
endpoint_url=parse_result.params.get("endpoint_url", None),
acl=parse_result.params.get("acl", None),
cache_control=parse_result.params.get("cache_control", None),
strict=strict,
)
return s3fs
78 changes: 38 additions & 40 deletions fs_s3fs/tests/test_s3fs.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,77 +5,75 @@
from nose.plugins.attrib import attr

from fs.test import FSTestCases

from fs_s3fs import S3FS

import boto3


class TestS3FS(FSTestCases, unittest.TestCase):
"""Test S3FS implementation from dir_path."""
bucket_name = 'fsexample'
s3 = boto3.resource('s3')
client = boto3.client('s3')

bucket_name = "fsexample"
s3 = boto3.resource("s3")
client = boto3.client("s3")

def make_fs(self):
self._delete_bucket_contents()
return S3FS(self.bucket_name)

def _delete_bucket_contents(self):
response = self.client.list_objects(
Bucket=self.bucket_name
)
response = self.client.list_objects(Bucket=self.bucket_name)
contents = response.get("Contents", ())
for obj in contents:
self.client.delete_object(
Bucket=self.bucket_name,
Key=obj["Key"]
)
self.client.delete_object(Bucket=self.bucket_name, Key=obj["Key"])


@attr('slow')
@attr("slow")
class TestS3FSSubDir(FSTestCases, unittest.TestCase):
"""Test S3FS implementation from dir_path."""
bucket_name = 'fsexample'
s3 = boto3.resource('s3')
client = boto3.client('s3')

bucket_name = "fsexample"
s3 = boto3.resource("s3")
client = boto3.client("s3")

def make_fs(self):
self._delete_bucket_contents()
self.s3.Object(self.bucket_name, 'subdirectory').put()
return S3FS(self.bucket_name, dir_path='subdirectory')
self.s3.Object(self.bucket_name, "subdirectory").put()
return S3FS(self.bucket_name, dir_path="subdirectory")

def _delete_bucket_contents(self):
response = self.client.list_objects(
Bucket=self.bucket_name
)
response = self.client.list_objects(Bucket=self.bucket_name)
contents = response.get("Contents", ())
for obj in contents:
self.client.delete_object(
Bucket=self.bucket_name,
Key=obj["Key"]
)
self.client.delete_object(Bucket=self.bucket_name, Key=obj["Key"])


class TestS3FSHelpers(unittest.TestCase):

def test_path_to_key(self):
s3 = S3FS('foo')
self.assertEqual(s3._path_to_key('foo.bar'), 'foo.bar')
self.assertEqual(s3._path_to_key('foo/bar'), 'foo/bar')
s3 = S3FS("foo")
self.assertEqual(s3._path_to_key("foo.bar"), "foo.bar")
self.assertEqual(s3._path_to_key("foo/bar"), "foo/bar")

def test_path_to_key_subdir(self):
s3 = S3FS('foo', '/dir')
self.assertEqual(s3._path_to_key('foo.bar'), 'dir/foo.bar')
self.assertEqual(s3._path_to_key('foo/bar'), 'dir/foo/bar')
s3 = S3FS("foo", "/dir")
self.assertEqual(s3._path_to_key("foo.bar"), "dir/foo.bar")
self.assertEqual(s3._path_to_key("foo/bar"), "dir/foo/bar")

def test_upload_args(self):
s3 = S3FS('foo', acl='acl', cache_control='cc')
self.assertDictEqual(s3._get_upload_args('test.jpg'),
{'ACL': 'acl', 'CacheControl': 'cc', 'ContentType': 'image/jpeg'})
self.assertDictEqual(s3._get_upload_args('test.mp3'),
{'ACL': 'acl', 'CacheControl': 'cc', 'ContentType': 'audio/mpeg'})
self.assertDictEqual(s3._get_upload_args('test.json'),
{'ACL': 'acl', 'CacheControl': 'cc', 'ContentType': 'application/json'})
self.assertDictEqual(s3._get_upload_args('unknown.unknown'),
{'ACL': 'acl', 'CacheControl': 'cc', 'ContentType': 'binary/octet-stream'})
s3 = S3FS("foo", acl="acl", cache_control="cc")
self.assertDictEqual(
s3._get_upload_args("test.jpg"),
{"ACL": "acl", "CacheControl": "cc", "ContentType": "image/jpeg"},
)
self.assertDictEqual(
s3._get_upload_args("test.mp3"),
{"ACL": "acl", "CacheControl": "cc", "ContentType": "audio/mpeg"},
)
self.assertDictEqual(
s3._get_upload_args("test.json"),
{"ACL": "acl", "CacheControl": "cc", "ContentType": "application/json"},
)
self.assertDictEqual(
s3._get_upload_args("unknown.unknown"),
{"ACL": "acl", "CacheControl": "cc", "ContentType": "binary/octet-stream"},
)
2 changes: 1 addition & 1 deletion setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@
with open("README.rst", "rt") as f:
DESCRIPTION = f.read()

REQUIREMENTS = ["boto3~=1.7", "fs~=2.2", "six~=1.10"]
REQUIREMENTS = ["boto3~=1.9", "fs~=2.4", "six~=1.10"]

setup(
name="fs-s3fs",
Expand Down
6 changes: 3 additions & 3 deletions tox.ini
Original file line number Diff line number Diff line change
@@ -1,11 +1,11 @@
[tox]
envlist = py27,py34,py35,py36,pypy
envlist = py27,py34,py35,py36,py37,pypy
sitepackages = False

[testenv]
deps = nose
boto3==1.7.64
fs==2.1.0
boto3==1.9.207
fs==2.4.10

passenv = *
#changedir=.tox
Expand Down

0 comments on commit 7f95af6

Please sign in to comment.