Skip to content

Commit

Permalink
better handling
Browse files Browse the repository at this point in the history
  • Loading branch information
Georgi Rusev authored and Georgi Rusev committed Jan 2, 2025
1 parent 2009536 commit acc1ad3
Show file tree
Hide file tree
Showing 2 changed files with 55 additions and 38 deletions.
66 changes: 36 additions & 30 deletions python/tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -24,9 +24,10 @@

from arcticdb import LibraryOptions
from arcticdb.storage_fixtures.api import StorageFixture
from arcticdb.storage_fixtures.azure import AzuriteStorageFixtureFactory
from arcticdb.storage_fixtures.azure import AzureContainer, AzuriteStorageFixtureFactory
from arcticdb.storage_fixtures.lmdb import LmdbStorageFixture
from arcticdb.storage_fixtures.s3 import (
BaseS3StorageFixtureFactory,
MotoS3StorageFixtureFactory,
MotoNfsBackedS3StorageFixtureFactory,
NfsS3Bucket,
Expand Down Expand Up @@ -75,7 +76,7 @@ def sym(request: pytest.FixtureRequest):


@pytest.fixture()
def lib_name(request: pytest.FixtureRequest):
def lib_name(request: pytest.FixtureRequest) -> str:
name = re.sub(r"[^\w]", "_", request.node.name)[:30]
return f"{name}.{random.randint(0, 999)}_{datetime.utcnow().strftime('%Y-%m-%dT%H_%M_%S_%f')}"

Expand Down Expand Up @@ -112,7 +113,7 @@ def pytest_generate_tests(metafunc):
# endregion
# region ======================================= Storage Fixtures =======================================
@pytest.fixture
def lmdb_storage(tmp_path) -> Iterator[LmdbStorageFixture]:
def lmdb_storage(tmp_path) -> Generator[LmdbStorageFixture, None, None]:
with LmdbStorageFixture(tmp_path) as f:
yield f

Expand All @@ -137,64 +138,64 @@ def lmdb_library_static_dynamic(request):

# ssl is enabled by default to maximize test coverage as ssl is enabled most of the times in real world
@pytest.fixture(scope="session")
def s3_storage_factory() -> Iterator[MotoS3StorageFixtureFactory]:
def s3_storage_factory() -> Generator[MotoS3StorageFixtureFactory, None, None]:
with MotoS3StorageFixtureFactory(
use_ssl=SSL_TEST_SUPPORTED, ssl_test_support=SSL_TEST_SUPPORTED, bucket_versioning=False
) as f:
yield f


@pytest.fixture(scope="session")
def s3_no_ssl_storage_factory() -> Iterator[MotoS3StorageFixtureFactory]:
def s3_no_ssl_storage_factory() -> Generator[MotoS3StorageFixtureFactory, None, None]:
with MotoS3StorageFixtureFactory(use_ssl=False, ssl_test_support=SSL_TEST_SUPPORTED, bucket_versioning=False) as f:
yield f


@pytest.fixture(scope="session")
def s3_ssl_disabled_storage_factory() -> Iterator[MotoS3StorageFixtureFactory]:
def s3_ssl_disabled_storage_factory() -> Generator[MotoS3StorageFixtureFactory, None, None]:
with MotoS3StorageFixtureFactory(use_ssl=False, ssl_test_support=False, bucket_versioning=False) as f:
yield f


@pytest.fixture(scope="session")
def s3_bucket_versioning_storage_factory() -> Iterator[MotoS3StorageFixtureFactory]:
def s3_bucket_versioning_storage_factory() -> Generator[MotoS3StorageFixtureFactory, None, None]:
with MotoS3StorageFixtureFactory(use_ssl=False, ssl_test_support=False, bucket_versioning=True) as f:
yield f


@pytest.fixture(scope="session")
def nfs_backed_s3_storage_factory() -> Iterator[MotoNfsBackedS3StorageFixtureFactory]:
def nfs_backed_s3_storage_factory() -> Generator[MotoNfsBackedS3StorageFixtureFactory, None, None]:
with MotoNfsBackedS3StorageFixtureFactory(use_ssl=False, ssl_test_support=False, bucket_versioning=False) as f:
yield f


@pytest.fixture
def s3_storage(s3_storage_factory) -> Iterator[S3Bucket]:
def s3_storage(s3_storage_factory) -> Generator[S3Bucket, None, None]:
with s3_storage_factory.create_fixture() as f:
yield f


@pytest.fixture
def nfs_backed_s3_storage(nfs_backed_s3_storage_factory) -> Iterator[NfsS3Bucket]:
def nfs_backed_s3_storage(nfs_backed_s3_storage_factory) -> Generator[NfsS3Bucket, None, None]:
with nfs_backed_s3_storage_factory.create_fixture() as f:
yield f


@pytest.fixture
def s3_no_ssl_storage(s3_no_ssl_storage_factory) -> Iterator[S3Bucket]:
def s3_no_ssl_storage(s3_no_ssl_storage_factory) -> Generator[S3Bucket, None, None]:
with s3_no_ssl_storage_factory.create_fixture() as f:
yield f


@pytest.fixture
def s3_ssl_disabled_storage(s3_ssl_disabled_storage_factory) -> Iterator[S3Bucket]:
def s3_ssl_disabled_storage(s3_ssl_disabled_storage_factory) -> Generator[S3Bucket, None, None]:
with s3_ssl_disabled_storage_factory.create_fixture() as f:
yield f


# s3 storage is picked just for its versioning capabilities for verifying arcticdb atomicity
@pytest.fixture
def s3_bucket_versioning_storage(s3_bucket_versioning_storage_factory) -> Iterator[S3Bucket]:
def s3_bucket_versioning_storage(s3_bucket_versioning_storage_factory) -> Generator[S3Bucket, None, None]:
with s3_bucket_versioning_storage_factory.create_fixture() as f:
s3_admin = f.factory._s3_admin
bucket = f.bucket
Expand All @@ -214,26 +215,31 @@ def mock_s3_storage_with_error_simulation(mock_s3_storage_with_error_simulation_


@pytest.fixture(scope="session")
def real_s3_storage_factory():
def real_s3_storage_factory() -> BaseS3StorageFixtureFactory:
return real_s3_from_environment_variables(shared_path=False, additional_suffix=f"{random.randint(0, 999)}_{datetime.utcnow().strftime('%Y-%m-%dT%H_%M_%S_%f')}")


@pytest.fixture(scope="session")
def real_s3_shared_path_storage_factory():
def real_s3_shared_path_storage_factory() -> BaseS3StorageFixtureFactory:
return real_s3_from_environment_variables(shared_path=True, additional_suffix=f"{random.randint(0, 999)}_{datetime.utcnow().strftime('%Y-%m-%dT%H_%M_%S_%f')}")


@pytest.fixture(scope="session")
def real_s3_storage_without_clean_up(real_s3_shared_path_storage_factory):
def real_s3_storage_without_clean_up(real_s3_shared_path_storage_factory) -> S3Bucket:
return real_s3_shared_path_storage_factory.create_fixture()


@pytest.fixture
def real_s3_storage(real_s3_storage_factory):
def real_s3_storage(real_s3_storage_factory) -> Generator[S3Bucket, None, None]:
with real_s3_storage_factory.create_fixture() as f:
yield f


@pytest.fixture
def real_s3_library(real_s3_storage, lib_name) -> Library:
return real_s3_storage.create_arctic().create_library(lib_name)


@pytest.fixture(scope="session") # Config loaded at the first ArcticDB binary import, so we need to set it up before any tests
def real_s3_sts_storage_factory():
sts_test_credentials_prefix = os.getenv("ARCTICDB_REAL_S3_STS_TEST_CREDENTIALS_POSTFIX", f"{random.randint(0, 999)}_{datetime.utcnow().strftime('%Y-%m-%dT%H_%M_%S_%f')}")
Expand All @@ -258,32 +264,32 @@ def real_s3_sts_storage_factory():


@pytest.fixture
def real_s3_sts_storage(real_s3_sts_storage_factory):
def real_s3_sts_storage(real_s3_sts_storage_factory) -> Generator[BaseS3StorageFixtureFactory, None, None]:
with real_s3_sts_storage_factory.create_fixture() as f:
yield f


# ssl cannot be ON by default due to azurite performance constraints https://github.com/man-group/ArcticDB/issues/1539
@pytest.fixture(scope="session")
def azurite_storage_factory():
def azurite_storage_factory() -> Generator[AzuriteStorageFixtureFactory, None, None]:
with AzuriteStorageFixtureFactory(use_ssl=False, ssl_test_support=SSL_TEST_SUPPORTED) as f:
yield f


@pytest.fixture
def azurite_storage(azurite_storage_factory: AzuriteStorageFixtureFactory):
def azurite_storage(azurite_storage_factory: AzuriteStorageFixtureFactory) -> Generator[AzureContainer, None, None]:
with azurite_storage_factory.create_fixture() as f:
yield f


@pytest.fixture(scope="session")
def azurite_ssl_storage_factory():
def azurite_ssl_storage_factory() -> Generator[AzuriteStorageFixtureFactory, None, None]:
with AzuriteStorageFixtureFactory(use_ssl=True, ssl_test_support=SSL_TEST_SUPPORTED) as f:
yield f


@pytest.fixture
def azurite_ssl_storage(azurite_ssl_storage_factory: AzuriteStorageFixtureFactory):
def azurite_ssl_storage(azurite_ssl_storage_factory: AzuriteStorageFixtureFactory) -> Generator[AzureContainer, None, None]:
with azurite_ssl_storage_factory.create_fixture() as f:
yield f

Expand All @@ -301,7 +307,7 @@ def mongo_storage(mongo_server):


@pytest.fixture
def mem_storage() -> Iterator[InMemoryStorageFixture]:
def mem_storage() -> Generator[InMemoryStorageFixture, None, None]:
with InMemoryStorageFixture() as f:
yield f

Expand Down Expand Up @@ -357,12 +363,12 @@ def arctic_client_lmdb(request, encoding_version) -> Arctic:


@pytest.fixture
def arctic_library(arctic_client, lib_name) -> Arctic:
def arctic_library(arctic_client, lib_name) -> Library:
return arctic_client.create_library(lib_name)


@pytest.fixture
def arctic_library_lmdb(arctic_client_lmdb, lib_name):
def arctic_library_lmdb(arctic_client_lmdb, lib_name) -> Library:
return arctic_client_lmdb.create_library(lib_name)


Expand Down Expand Up @@ -418,17 +424,17 @@ def s3_store_factory_mock_storage_exception(lib_name, s3_storage):


@pytest.fixture
def s3_store_factory(lib_name, s3_storage):
def s3_store_factory(lib_name, s3_storage) -> NativeVersionStore:
return s3_storage.create_version_store_factory(lib_name)


@pytest.fixture
def s3_no_ssl_store_factory(lib_name, s3_no_ssl_storage):
def s3_no_ssl_store_factory(lib_name, s3_no_ssl_storage) -> NativeVersionStore:
return s3_no_ssl_storage.create_version_store_factory(lib_name)


@pytest.fixture
def mock_s3_store_with_error_simulation_factory(lib_name, mock_s3_storage_with_error_simulation):
def mock_s3_store_with_error_simulation_factory(lib_name, mock_s3_storage_with_error_simulation) -> NativeVersionStore:
return mock_s3_storage_with_error_simulation.create_version_store_factory(lib_name)


Expand All @@ -438,12 +444,12 @@ def real_s3_store_factory(lib_name, real_s3_storage) -> Callable[..., NativeVers


@pytest.fixture
def real_s3_sts_store_factory(lib_name, real_s3_sts_storage):
def real_s3_sts_store_factory(lib_name, real_s3_sts_storage) -> NativeVersionStore:
return real_s3_sts_storage.create_version_store_factory(lib_name)


@pytest.fixture
def azure_store_factory(lib_name, azurite_storage):
def azure_store_factory(lib_name, azurite_storage) -> NativeVersionStore:
return azurite_storage.create_version_store_factory(lib_name)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@ def __init__(self):
def __str__(self):
return f"Options: {self.options}\nIteration: {self.iteration}\n# staged chunks: {self.number_staged_chunks}\ntotal rows finalized: {self.total_rows_finalized}\ntime for finalization (s): {self.finalization_time}"

@SLOW_TESTS_MARK
@SKIP_CONDA_MARK # Conda CI runner doesn't have enough storage to perform these stress tests
@pytest.mark.skipif(sys.platform == "win32", reason="Not enough storage on Windows runners")
def test_finalize_monotonic_unique_chunks(basic_arctic_library):



def finalize_monotonic_unique_chunks(ac_library, iterations):
"""
The test is designed to staged thousands of chunks with variable chunk size.
To experiment on local computer you can move up to 20k number of chunks approx 10k each
Expand All @@ -65,7 +65,7 @@ def test_finalize_monotonic_unique_chunks(basic_arctic_library):
# Will hold the results after each iteration (instance of Results class)
results = []

lib : Library = basic_arctic_library
lib : Library = ac_library

total_start_time = time.time()

Expand All @@ -85,9 +85,6 @@ def test_finalize_monotonic_unique_chunks(basic_arctic_library):
print(f"Writing to symbol initially {num_rows_initially} rows")
df = cachedDF.generate_dataframe_timestamp_indexed(num_rows_initially, total_number_rows, cachedDF.TIME_UNIT)

iterations = [500, 1000, 1500, 2000]
if ("amazonaws" in lib.arctic_instance_desc.lower()):
iterations = [x // 10 for x in iterations]
cnt = 0
for iter in iterations :
res = Results()
Expand Down Expand Up @@ -141,3 +138,17 @@ def test_finalize_monotonic_unique_chunks(basic_arctic_library):
print("TOTAL TIME: ", total_time)


@SLOW_TESTS_MARK
@SKIP_CONDA_MARK # Conda CI runner doesn't have enough storage to perform these stress tests
@pytest.mark.skipif(sys.platform == "win32", reason="Not enough storage on Windows runners")
def test_finalize_monotonic_unique_chunks_lmdb(lmdb_library):
finalize_monotonic_unique_chunks(lmdb_library, [500, 1000, 1500, 2000])


@SLOW_TESTS_MARK
@SKIP_CONDA_MARK # Conda CI runner doesn't have enough storage to perform these stress tests
@pytest.mark.skipif(sys.platform == "win32", reason="Not enough storage on Windows runners")
def test_finalize_monotonic_unique_chunks_realS3(real_s3_library):
finalize_monotonic_unique_chunks(real_s3_library, [50, 100, 150, 200])


0 comments on commit acc1ad3

Please sign in to comment.