Skip to content

Commit

Permalink
Use randomized bucket names for S3 integration tests. (#9052)
Browse files Browse the repository at this point in the history
* Use randomized bucket names for S3 integration tests.

* Improve random bucket name generation

* Use shared non-existent bucket
  • Loading branch information
jonathan343 authored Nov 4, 2024
1 parent 08f02e9 commit bb6f89f
Show file tree
Hide file tree
Showing 2 changed files with 22 additions and 10 deletions.
5 changes: 3 additions & 2 deletions awscli/testutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,6 +32,7 @@
import tempfile
import time
import unittest
import uuid
from pprint import pformat
from subprocess import PIPE, Popen
from unittest import mock
Expand Down Expand Up @@ -208,7 +209,7 @@ def random_chars(num_chars):
return binascii.hexlify(os.urandom(int(num_chars / 2))).decode('ascii')


def random_bucket_name(prefix='awscli-s3integ-', num_random=15):
def random_bucket_name(prefix='awscli-s3integ', num_random=15):
"""Generate a random S3 bucket name.
:param prefix: A prefix to use in the bucket name. Useful
Expand All @@ -219,7 +220,7 @@ def random_bucket_name(prefix='awscli-s3integ-', num_random=15):
:returns: The name of a randomly generated bucket name as a string.
"""
return prefix + random_chars(num_random)
return f"{prefix}-{random_chars(num_random)}-{int(time.time())}"


class BaseCLIDriverTest(unittest.TestCase):
Expand Down
27 changes: 19 additions & 8 deletions tests/integration/customizations/s3/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,6 +45,7 @@
# Using the same log name as testutils.py
LOG = logging.getLogger('awscli.tests.integration')
_SHARED_BUCKET = random_bucket_name()
_NON_EXISTENT_BUCKET = random_bucket_name()
_DEFAULT_REGION = 'us-west-2'
_DEFAULT_AZ = 'usw2-az1'
_SHARED_DIR_BUCKET = f'{random_bucket_name()}--{_DEFAULT_AZ}--x-s3'
Expand Down Expand Up @@ -87,6 +88,17 @@ def setup_module():
Bucket=_SHARED_BUCKET
)

# Validate that "_NON_EXISTENT_BUCKET" doesn't exist.
waiter = s3.get_waiter('bucket_not_exists')
try:
waiter.wait(Bucket=_NON_EXISTENT_BUCKET)
except Exception as e:
LOG.debug(
f"The following bucket was unexpectedly discovered: {_NON_EXISTENT_BUCKET}",
e,
exc_info=True,
)


def clear_out_bucket(bucket, delete_bucket=False):
s3 = botocore.session.get_session().create_client(
Expand Down Expand Up @@ -308,7 +320,7 @@ def test_mv_with_large_file(self):

def test_mv_to_nonexistent_bucket(self):
full_path = self.files.create_file('foo.txt', 'this is foo.txt')
p = aws('s3 mv %s s3://bad-noexist-13143242/foo.txt' % (full_path,))
p = aws(f's3 mv {full_path} s3://{_NON_EXISTENT_BUCKET}/foo.txt')
self.assertEqual(p.rc, 1)

def test_cant_move_file_onto_itself_small_file(self):
Expand Down Expand Up @@ -519,7 +531,7 @@ def test_cleans_up_aborted_uploads(self):

def test_cp_to_nonexistent_bucket(self):
foo_txt = self.files.create_file('foo.txt', 'this is foo.txt')
p = aws('s3 cp %s s3://noexist-bucket-foo-bar123/foo.txt' % (foo_txt,))
p = aws(f's3 cp {foo_txt} s3://{_NON_EXISTENT_BUCKET}/foo.txt')
self.assertEqual(p.rc, 1)

def test_cp_empty_file(self):
Expand All @@ -531,7 +543,7 @@ def test_cp_empty_file(self):
self.assertTrue(self.key_exists(bucket_name, 'foo.txt'))

def test_download_non_existent_key(self):
p = aws('s3 cp s3://jasoidfjasdjfasdofijasdf/foo.txt foo.txt')
p = aws(f's3 cp s3://{_NON_EXISTENT_BUCKET}/foo.txt foo.txt')
self.assertEqual(p.rc, 1)
expected_err_msg = (
'An error occurred (404) when calling the '
Expand Down Expand Up @@ -1223,7 +1235,7 @@ def test_ls_bucket_with_s3_prefix(self):
self.assert_no_errors(p)

def test_ls_non_existent_bucket(self):
p = aws('s3 ls s3://foobara99842u4wbts829381')
p = aws(f's3 ls s3://{_NON_EXISTENT_BUCKET}')
self.assertEqual(p.rc, 255)
self.assertIn(
('An error occurred (NoSuchBucket) when calling the '
Expand Down Expand Up @@ -1360,7 +1372,7 @@ def test_error_output(self):
foo_txt = self.files.create_file('foo.txt', 'foo contents')

# Copy file into bucket.
p = aws('s3 cp %s s3://non-existant-bucket/' % foo_txt)
p = aws(f's3 cp {foo_txt} s3://{_NON_EXISTENT_BUCKET}/')
# Check that there were errors and that the error was print to stderr.
self.assertEqual(p.rc, 1)
self.assertIn('upload failed', p.stderr)
Expand All @@ -1369,7 +1381,7 @@ def test_error_ouput_quiet(self):
foo_txt = self.files.create_file('foo.txt', 'foo contents')

# Copy file into bucket.
p = aws('s3 cp %s s3://non-existant-bucket/ --quiet' % foo_txt)
p = aws(f's3 cp {foo_txt} s3://{_NON_EXISTENT_BUCKET}/ --quiet')
# Check that there were errors and that the error was not
# print to stderr.
self.assertEqual(p.rc, 1)
Expand All @@ -1379,8 +1391,7 @@ def test_error_ouput_only_show_errors(self):
foo_txt = self.files.create_file('foo.txt', 'foo contents')

# Copy file into bucket.
p = aws('s3 cp %s s3://non-existant-bucket/ --only-show-errors'
% foo_txt)
p = aws(f's3 cp {foo_txt} s3://{_NON_EXISTENT_BUCKET}/ --only-show-errors')
# Check that there were errors and that the error was print to stderr.
self.assertEqual(p.rc, 1)
self.assertIn('upload failed', p.stderr)
Expand Down

0 comments on commit bb6f89f

Please sign in to comment.