diff --git a/packages/models-library/src/models_library/progress_bar.py b/packages/models-library/src/models_library/progress_bar.py index 90232847bbc..ad8130570e5 100644 --- a/packages/models-library/src/models_library/progress_bar.py +++ b/packages/models-library/src/models_library/progress_bar.py @@ -2,14 +2,12 @@ from pydantic import BaseModel, ConfigDict -from .basic_types import IDStr - # NOTE: keep a list of possible unit, and please use correct official unit names ProgressUnit: TypeAlias = Literal["Byte"] class ProgressStructuredMessage(BaseModel): - description: IDStr + description: str current: float total: int unit: str | None = None @@ -51,6 +49,7 @@ class ProgressStructuredMessage(BaseModel): class ProgressReport(BaseModel): actual_value: float total: float = 1.0 + attempt: int = 0 unit: ProgressUnit | None = UNITLESS message: ProgressStructuredMessage | None = None diff --git a/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py b/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py index c14b25726a5..1e642895f1d 100644 --- a/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py +++ b/packages/service-library/src/servicelib/archiving_utils/_interface_7zip.py @@ -10,7 +10,6 @@ from typing import Final import tqdm -from models_library.basic_types import IDStr from pydantic import NonNegativeInt from servicelib.logging_utils import log_catch from tqdm.contrib.logging import tqdm_logging_redirect @@ -199,7 +198,7 @@ async def archive_dir( ) -> None: if progress_bar is None: progress_bar = ProgressBarData( - num_steps=1, description=IDStr(f"compressing {dir_to_compress.name}") + num_steps=1, description=f"compressing {dir_to_compress.name}" ) options = " ".join( @@ -223,7 +222,7 @@ async def archive_dir( async with AsyncExitStack() as exit_stack: sub_progress = await exit_stack.enter_async_context( - progress_bar.sub_progress(folder_size_bytes, description=IDStr("...")) + progress_bar.sub_progress(folder_size_bytes, description="...") ) tqdm_progress = exit_stack.enter_context( @@ -290,7 +289,7 @@ async def unarchive_dir( ) -> set[Path]: if progress_bar is None: progress_bar = ProgressBarData( - num_steps=1, description=IDStr(f"extracting {archive_to_extract.name}") + num_steps=1, description=f"extracting {archive_to_extract.name}" ) # get archive information @@ -304,7 +303,7 @@ async def unarchive_dir( async with AsyncExitStack() as exit_stack: sub_prog = await exit_stack.enter_async_context( - progress_bar.sub_progress(steps=total_bytes, description=IDStr("...")) + progress_bar.sub_progress(steps=total_bytes, description="...") ) tqdm_progress = exit_stack.enter_context( diff --git a/packages/service-library/src/servicelib/docker_utils.py b/packages/service-library/src/servicelib/docker_utils.py index 2ce1fab2fb5..b85369bd4ad 100644 --- a/packages/service-library/src/servicelib/docker_utils.py +++ b/packages/service-library/src/servicelib/docker_utils.py @@ -1,3 +1,4 @@ +import asyncio import logging from collections.abc import Awaitable, Callable from contextlib import AsyncExitStack @@ -11,8 +12,21 @@ from models_library.docker import DockerGenericTag from models_library.generated_models.docker_rest_api import ProgressDetail from models_library.utils.change_case import snake_to_camel -from pydantic import BaseModel, ByteSize, ConfigDict, TypeAdapter, ValidationError +from pydantic import ( + BaseModel, + ByteSize, + ConfigDict, + NonNegativeInt, + TypeAdapter, + ValidationError, +) from settings_library.docker_registry import RegistrySettings +from tenacity import ( + retry, + retry_if_exception_type, + stop_after_attempt, + wait_random_exponential, +) from yarl import URL from .logging_utils import LogLevelInt @@ -209,6 +223,8 @@ async def pull_image( progress_bar: ProgressBarData, log_cb: LogCB, image_information: DockerImageManifestsV2 | None, + *, + retry_upon_error_count: NonNegativeInt = 10, ) -> None: """pull a docker image to the host machine. @@ -219,7 +235,9 @@ async def pull_image( progress_bar -- the current progress bar log_cb -- a callback function to send logs to image_information -- the image layer information. If this is None, then no fine progress will be retrieved. + retry_upon_error_count -- number of tries if there is a TimeoutError. Usually cased by networking issues. """ + registry_auth = None if registry_settings.REGISTRY_URL and registry_settings.REGISTRY_URL in image: registry_auth = { @@ -245,39 +263,59 @@ async def pull_image( client = await exit_stack.enter_async_context(aiodocker.Docker()) - reported_progress = 0.0 - async for pull_progress in client.images.pull( - image, stream=True, auth=registry_auth - ): - try: - parsed_progress = TypeAdapter(_DockerPullImage).validate_python( - pull_progress + def _reset_progress_from_previous_attempt() -> None: + for pulled_status in layer_id_to_size.values(): + pulled_status.downloaded = 0 + pulled_status.extracted = 0 + + @retry( + wait=wait_random_exponential(), + stop=stop_after_attempt(retry_upon_error_count), + reraise=True, + retry=retry_if_exception_type(asyncio.TimeoutError), + ) + async def _pull_image_with_retry() -> None: + # for each attempt rest the progress + progress_bar.reset_progress() + _reset_progress_from_previous_attempt() + + _logger.info("trying to pull image='%s'", image) + + reported_progress = 0.0 + async for pull_progress in client.images.pull( + image, stream=True, auth=registry_auth + ): + try: + parsed_progress = TypeAdapter(_DockerPullImage).validate_python( + pull_progress + ) + except ValidationError: + _logger.exception( + "Unexpected error while validating '%s'. " + "TIP: This is probably an unforeseen pull status text that shall be added to the code. " + "The pulling process will still continue.", + f"{pull_progress=}", + ) + else: + await _parse_pull_information( + parsed_progress, layer_id_to_size=layer_id_to_size + ) + + # compute total progress + total_downloaded_size = sum( + layer.downloaded for layer in layer_id_to_size.values() ) - except ValidationError: - _logger.exception( - "Unexpected error while validating '%s'. " - "TIP: This is probably an unforeseen pull status text that shall be added to the code. " - "The pulling process will still continue.", - f"{pull_progress=}", + total_extracted_size = sum( + layer.extracted for layer in layer_id_to_size.values() ) - else: - await _parse_pull_information( - parsed_progress, layer_id_to_size=layer_id_to_size + total_progress = (total_downloaded_size + total_extracted_size) / 2.0 + progress_to_report = total_progress - reported_progress + await progress_bar.update(progress_to_report) + reported_progress = total_progress + + await log_cb( + f"pulling {image_short_name}: {pull_progress}...", + logging.DEBUG, ) - # compute total progress - total_downloaded_size = sum( - layer.downloaded for layer in layer_id_to_size.values() - ) - total_extracted_size = sum( - layer.extracted for layer in layer_id_to_size.values() - ) - total_progress = (total_downloaded_size + total_extracted_size) / 2.0 - progress_to_report = total_progress - reported_progress - await progress_bar.update(progress_to_report) - reported_progress = total_progress - - await log_cb( - f"pulling {image_short_name}: {pull_progress}...", - logging.DEBUG, - ) + await _pull_image_with_retry() diff --git a/packages/service-library/src/servicelib/fastapi/docker_utils.py b/packages/service-library/src/servicelib/fastapi/docker_utils.py index 420c1418873..20900916963 100644 --- a/packages/service-library/src/servicelib/fastapi/docker_utils.py +++ b/packages/service-library/src/servicelib/fastapi/docker_utils.py @@ -3,7 +3,6 @@ from typing import Final import httpx -from models_library.basic_types import IDStr from models_library.docker import DockerGenericTag from pydantic import ByteSize, TypeAdapter, ValidationError from settings_library.docker_registry import RegistrySettings @@ -129,9 +128,7 @@ async def pull_images( num_steps=images_total_size, progress_report_cb=progress_cb, progress_unit="Byte", - description=IDStr( - f"pulling {len(images)} images", - ), + description=f"pulling {len(images)} images", ) as pbar: await asyncio.gather( diff --git a/packages/service-library/src/servicelib/progress_bar.py b/packages/service-library/src/servicelib/progress_bar.py index bf70c0c3e88..d549488e619 100644 --- a/packages/service-library/src/servicelib/progress_bar.py +++ b/packages/service-library/src/servicelib/progress_bar.py @@ -4,7 +4,6 @@ from inspect import isawaitable from typing import Final, Optional, Protocol, runtime_checkable -from models_library.basic_types import IDStr from models_library.progress_bar import ( ProgressReport, ProgressStructuredMessage, @@ -84,10 +83,11 @@ async def main_fct(): "description": "Optionally defines the step relative weight (defaults to steps of equal weights)" }, ) - description: IDStr = field(metadata={"description": "define the progress name"}) + description: str = field(metadata={"description": "define the progress name"}) progress_unit: ProgressUnit | None = None progress_report_cb: AsyncReportCB | ReportCB | None = None _current_steps: float = _INITIAL_VALUE + _currnet_attempt: int = 0 _children: list["ProgressBarData"] = field(default_factory=list) _parent: Optional["ProgressBarData"] = None _continuous_value_lock: asyncio.Lock = field(init=False) @@ -147,6 +147,7 @@ async def _report_external(self, value: float) -> None: # NOTE: here we convert back to actual value since this is possibly weighted actual_value=value * self.num_steps, total=self.num_steps, + attempt=self._currnet_attempt, unit=self.progress_unit, message=self.compute_report_message_stuct(), ), @@ -197,6 +198,11 @@ async def update(self, steps: float = 1) -> None: await self._update_parent(parent_update_value) await self._report_external(new_progress_value) + def reset_progress(self) -> None: + self._currnet_attempt += 1 + self._current_steps = _INITIAL_VALUE + self._last_report_value = _INITIAL_VALUE + async def set_(self, new_value: float) -> None: await self.update(new_value - self._current_steps) @@ -207,7 +213,7 @@ async def finish(self) -> None: def sub_progress( self, steps: int, - description: IDStr, + description: str, step_weights: list[float] | None = None, progress_unit: ProgressUnit | None = None, ) -> "ProgressBarData": diff --git a/packages/service-library/tests/test_progress_bar.py b/packages/service-library/tests/test_progress_bar.py index 6c1b7a2756b..8431d282549 100644 --- a/packages/service-library/tests/test_progress_bar.py +++ b/packages/service-library/tests/test_progress_bar.py @@ -262,6 +262,33 @@ async def test_set_progress(caplog: pytest.LogCaptureFixture, faker: Faker): assert "TIP:" in caplog.messages[0] +async def test_reset_progress(caplog: pytest.LogCaptureFixture, faker: Faker): + async with ProgressBarData(num_steps=50, description=faker.pystr()) as root: + assert root._current_steps == pytest.approx(0) # noqa: SLF001 + assert root.num_steps == 50 + assert root.step_weights is None + await root.set_(50) + assert root._current_steps == pytest.approx(50) # noqa: SLF001 + assert "already reached maximum" not in caplog.text + await root.set_(51) + assert root._current_steps == pytest.approx(50) # noqa: SLF001 + assert "already reached maximum" in caplog.text + + caplog.clear() + root.reset_progress() + + assert root._current_steps == pytest.approx(-1) # noqa: SLF001 + assert "already reached maximum" not in caplog.text + + await root.set_(12) + assert root._current_steps == pytest.approx(12) # noqa: SLF001 + assert "already reached maximum" not in caplog.text + + await root.set_(51) + assert root._current_steps == pytest.approx(50) # noqa: SLF001 + assert "already reached maximum" in caplog.text + + async def test_concurrent_progress_bar(faker: Faker): async def do_something(root: ProgressBarData): async with root.sub_progress(steps=50, description=faker.pystr()) as sub: diff --git a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py index b288a295db9..26873e9ec44 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_data/data_manager.py @@ -2,7 +2,6 @@ from pathlib import Path from tempfile import TemporaryDirectory -from models_library.basic_types import IDStr from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, StorageFileID from models_library.users import UserID @@ -105,7 +104,7 @@ async def _pull_legacy_archive( ) -> None: # NOTE: the legacy way of storing states was as zip archives async with progress_bar.sub_progress( - steps=2, description=IDStr(f"pulling {destination_path.name}") + steps=2, description=f"pulling {destination_path.name}" ) as sub_prog: with TemporaryDirectory() as tmp_dir_name: archive_file = Path(tmp_dir_name) / __get_s3_name( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py index 320cfd7e25f..d64f0d90355 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/aws_s3_cli.py @@ -6,10 +6,8 @@ from asyncio.streams import StreamReader from pathlib import Path -from common_library.errors_classes import OsparcErrorMixin - from aiocache import cached # type: ignore[import-untyped] -from models_library.basic_types import IDStr +from common_library.errors_classes import OsparcErrorMixin from pydantic import AnyUrl, ByteSize from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather @@ -242,7 +240,7 @@ async def _sync_sources( async with progress_bar.sub_progress( steps=folder_size, progress_unit="Byte", - description=IDStr(f"transferring {local_dir.name}"), + description=f"transferring {local_dir.name}", ) as sub_progress: aws_s3_cli_log_parsers: list[BaseLogParser] = ( [DebugLogParser()] if debug_logs else [] diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py index 55e1545881d..b03248e1786 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/file_io_utils.py @@ -18,7 +18,7 @@ RequestInfo, ) from models_library.api_schemas_storage import ETag, FileUploadSchema, UploadedPart -from models_library.basic_types import IDStr, SHA256Str +from models_library.basic_types import SHA256Str from multidict import MultiMapping from pydantic import AnyUrl, NonNegativeInt from servicelib.aiohttp import status @@ -216,7 +216,7 @@ async def download_link_to_file( sub_progress = await stack.enter_async_context( progress_bar.sub_progress( steps=file_size or 1, - description=IDStr(f"downloading {file_path.name}"), + description=f"downloading {file_path.name}", ) ) @@ -400,7 +400,7 @@ async def upload_file_to_presigned_links( ) sub_progress = await stack.enter_async_context( progress_bar.sub_progress( - steps=file_size, description=IDStr(f"uploading {file_name}") + steps=file_size, description=f"uploading {file_name}" ) ) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py index b7180877037..7fc02f49a86 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/filemanager.py @@ -12,7 +12,7 @@ LinkType, UploadedPart, ) -from models_library.basic_types import IDStr, SHA256Str +from models_library.basic_types import SHA256Str from models_library.projects_nodes_io import LocationID, LocationName, StorageFileID from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -364,7 +364,7 @@ async def _upload_path( # pylint: disable=too-many-arguments ) if not progress_bar: - progress_bar = ProgressBarData(num_steps=1, description=IDStr("uploading")) + progress_bar = ProgressBarData(num_steps=1, description="uploading") is_directory: bool = isinstance(path_to_upload, Path) and path_to_upload.is_dir() if ( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py index 0283a11fe26..db5e107b753 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_common/r_clone.py @@ -11,7 +11,6 @@ from aiocache import cached # type: ignore[import-untyped] from aiofiles import tempfile from common_library.errors_classes import OsparcErrorMixin -from models_library.basic_types import IDStr from pydantic import AnyUrl, BaseModel, ByteSize from servicelib.progress_bar import ProgressBarData from servicelib.utils import logged_gather @@ -224,7 +223,7 @@ async def _sync_sources( async with progress_bar.sub_progress( steps=folder_size, progress_unit="Byte", - description=IDStr(f"transferring {local_dir.name}"), + description=f"transferring {local_dir.name}", ) as sub_progress: r_clone_log_parsers: list[BaseLogParser] = ( [DebugLogParser()] if debug_logs else [] diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py index bc44698a593..8d4d81e00d2 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/nodeports_v2.py @@ -7,7 +7,6 @@ from typing import Any from models_library.api_schemas_storage import LinkType -from models_library.basic_types import IDStr from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey @@ -229,7 +228,7 @@ async def _set_with_notifications( tasks = [] async with progress_bar.sub_progress( - steps=len(port_values.items()), description=IDStr("set multiple") + steps=len(port_values.items()), description="set multiple" ) as sub_progress: for port_key, (value, set_kwargs) in port_values.items(): tasks.append( diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py index 3ddab6a29d3..ee67f08dad7 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port.py @@ -7,7 +7,6 @@ from typing import Any from models_library.api_schemas_storage import LinkType -from models_library.basic_types import IDStr from models_library.services_io import BaseServiceIOModel from models_library.services_types import ServicePortKey from pydantic import ( @@ -384,7 +383,7 @@ async def set( new_concrete_value=new_value, **set_kwargs, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description=IDStr("set")), + or ProgressBarData(num_steps=1, description="set"), ) await self._node_ports.save_to_db_cb(self._node_ports) diff --git a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py index 3c1462d6fab..ee44e3a1bb3 100644 --- a/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py +++ b/packages/simcore-sdk/src/simcore_sdk/node_ports_v2/port_utils.py @@ -5,7 +5,7 @@ from typing import Any from models_library.api_schemas_storage import FileUploadSchema, LinkType -from models_library.basic_types import IDStr, SHA256Str +from models_library.basic_types import SHA256Str from models_library.services_types import FileName, ServicePortKey from models_library.users import UserID from pydantic import AnyUrl, ByteSize, TypeAdapter @@ -204,7 +204,7 @@ async def pull_file_from_store( io_log_redirect_cb=io_log_redirect_cb, r_clone_settings=r_clone_settings, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description=IDStr("pulling file")), + or ProgressBarData(num_steps=1, description="pulling file"), aws_s3_cli_settings=aws_s3_cli_settings, ) # if a file alias is present use it to rename the file accordingly @@ -281,7 +281,7 @@ async def pull_file_from_download_link( local_path, io_log_redirect_cb=io_log_redirect_cb, progress_bar=progress_bar - or ProgressBarData(num_steps=1, description=IDStr("pulling file")), + or ProgressBarData(num_steps=1, description="pulling file"), ) # if a file alias is present use it to rename the file accordingly diff --git a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py index ed2033813e1..8db4e908b68 100644 --- a/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py +++ b/packages/simcore-sdk/tests/integration/test_node_data_data_manager.py @@ -14,7 +14,6 @@ import pytest from faker import Faker -from models_library.basic_types import IDStr from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID, SimcoreS3FileID from models_library.users import UserID @@ -158,9 +157,7 @@ async def test_valid_upload_download( mock_io_log_redirect_cb: LogRedirectCB, faker: Faker, ): - async with ProgressBarData( - num_steps=2, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id=user_id, project_id=project_id, @@ -206,9 +203,7 @@ async def test_valid_upload_download_saved_to( mock_io_log_redirect_cb: LogRedirectCB, faker: Faker, ): - async with ProgressBarData( - num_steps=2, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: await data_manager._push_directory( # noqa: SLF001 user_id=user_id, project_id=project_id, @@ -256,9 +251,7 @@ async def test_delete_legacy_archive( temp_dir: Path, faker: Faker, ): - async with ProgressBarData( - num_steps=2, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: # NOTE: legacy archives can no longer be crated # generating a "legacy style archive" archive_into_dir = temp_dir / f"legacy-archive-dir-{uuid4()}" diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py index 717a428a1ed..9b69d8d7f9b 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_aws_s3_cli.py @@ -14,7 +14,6 @@ import aiofiles import pytest from faker import Faker -from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.file_utils import remove_directory @@ -150,7 +149,7 @@ async def _report_progress_upload(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_upload, - description=IDStr(faker.pystr()), + description=faker.pystr(), ) as progress_bar: await aws_s3_cli.sync_local_to_s3( aws_s3_cli_settings, @@ -177,7 +176,7 @@ async def _report_progress_download(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_download, - description=IDStr(faker.pystr()), + description=faker.pystr(), ) as progress_bar: await aws_s3_cli.sync_s3_to_local( aws_s3_cli_settings, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py index 2e435d68a18..81d10a1e91b 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_filemanager.py @@ -13,7 +13,6 @@ import pytest from aiohttp import ClientError from faker import Faker -from models_library.basic_types import IDStr from models_library.projects_nodes_io import ( LocationID, SimcoreS3DirectoryID, @@ -102,9 +101,7 @@ async def test_valid_upload_download( file_path = create_file_of_size(file_size, "test.test") file_id = create_valid_file_uuid("", file_path) - async with ProgressBarData( - num_steps=2, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=2, description=faker.pystr()) as progress_bar: upload_result: UploadedFolder | UploadedFile = await filemanager.upload_path( user_id=user_id, store_id=s3_simcore_location, @@ -192,9 +189,7 @@ async def test_valid_upload_download_using_file_object( assert file_metadata.etag == e_tag download_folder = Path(tmpdir) / "downloads" - async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: download_file_path = await filemanager.download_path_from_s3( user_id=user_id, store_id=s3_simcore_location, @@ -365,7 +360,7 @@ async def test_invalid_file_path( download_folder = Path(tmpdir) / "downloads" with pytest.raises(exceptions.S3InvalidPathError): async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) + num_steps=1, description=faker.pystr() ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, @@ -419,7 +414,7 @@ async def test_errors_upon_invalid_file_identifiers( download_folder = Path(tmpdir) / "downloads" with pytest.raises(exceptions.S3InvalidPathError): # noqa: PT012 async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) + num_steps=1, description=faker.pystr() ) as progress_bar: invalid_s3_path = SimcoreS3FileID("") await filemanager.download_path_from_s3( @@ -436,7 +431,7 @@ async def test_errors_upon_invalid_file_identifiers( with pytest.raises(exceptions.S3InvalidPathError): async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) + num_steps=1, description=faker.pystr() ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, @@ -480,7 +475,7 @@ async def test_invalid_store( download_folder = Path(tmpdir) / "downloads" with pytest.raises(exceptions.S3InvalidStore): async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) + num_steps=1, description=faker.pystr() ) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, @@ -686,9 +681,7 @@ async def test_upload_path_source_is_a_folder( assert isinstance(upload_result, UploadedFolder) assert source_dir.exists() - async with ProgressBarData( - num_steps=1, description=IDStr(faker.pystr()) - ) as progress_bar: + async with ProgressBarData(num_steps=1, description=faker.pystr()) as progress_bar: await filemanager.download_path_from_s3( user_id=user_id, store_name=None, diff --git a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py index c94fc524bec..4f421b9b18c 100644 --- a/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py +++ b/packages/simcore-sdk/tests/integration/test_node_ports_common_r_clone.py @@ -15,7 +15,6 @@ import aiofiles import pytest from faker import Faker -from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from pydantic import AnyUrl, ByteSize, TypeAdapter from servicelib.file_utils import remove_directory @@ -162,7 +161,7 @@ async def _report_progress_upload(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_upload, - description=IDStr(faker.pystr()), + description=faker.pystr(), ) as progress_bar: await r_clone.sync_local_to_s3( r_clone_settings, @@ -189,7 +188,7 @@ async def _report_progress_download(report: ProgressReport) -> None: async with ProgressBarData( num_steps=1, progress_report_cb=_report_progress_download, - description=IDStr(faker.pystr()), + description=faker.pystr(), ) as progress_bar: await r_clone.sync_s3_to_local( r_clone_settings, diff --git a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py index a3710dfe27b..fab4cb1848f 100644 --- a/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py +++ b/packages/simcore-sdk/tests/unit/test_node_ports_common_file_io_utils.py @@ -18,7 +18,6 @@ FileUploadSchema, UploadedPart, ) -from models_library.basic_types import IDStr from moto.server import ThreadedMotoServer from pydantic import AnyUrl, ByteSize, TypeAdapter from pytest_mock import MockerFixture @@ -279,7 +278,7 @@ async def test_upload_file_to_presigned_links( assert effective_chunk_size <= used_chunk_size upload_links = await create_upload_links(num_links, used_chunk_size) assert len(upload_links.urls) == num_links - async with ProgressBarData(num_steps=1, description=IDStr("")) as progress_bar: + async with ProgressBarData(num_steps=1, description="") as progress_bar: uploaded_parts: list[UploadedPart] = await upload_file_to_presigned_links( session=client_session, file_upload_links=upload_links, diff --git a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py index b6ae0b25611..126485b2645 100644 --- a/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py +++ b/services/dask-sidecar/src/simcore_service_dask_sidecar/computational_sidecar/core.py @@ -16,7 +16,6 @@ from dask_task_models_library.container_tasks.errors import ServiceRuntimeError from dask_task_models_library.container_tasks.io import FileUrl, TaskOutputData from dask_task_models_library.container_tasks.protocol import ContainerTaskParameters -from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from packaging import version from pydantic import ValidationError @@ -181,7 +180,7 @@ async def run(self, command: list[str]) -> TaskOutputData: num_steps=3, step_weights=[5 / 100, 90 / 100, 5 / 100], progress_report_cb=self.task_publishers.publish_progress, - description=IDStr("running"), + description="running", ) as progress_bar: # PRE-PROCESSING await pull_image( @@ -222,7 +221,7 @@ async def run(self, command: list[str]) -> TaskOutputData: config, name=f"{self.task_parameters.image.split(sep='/')[-1]}_{run_id}", ) as container, progress_bar.sub_progress( - 100, description=IDStr("processing") + 100, description="processing" ) as processing_progress_bar, managed_monitor_container_log_task( container=container, progress_regexp=image_labels.get_progress_regexp(), diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py index 6e7a7a19009..db6deb715a8 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/long_running_tasks.py @@ -10,7 +10,6 @@ ProgressPercent, TaskProgress, ) -from models_library.basic_types import IDStr from models_library.generated_models.docker_rest_api import ContainerState from models_library.rabbitmq_messages import ProgressType, SimcorePlatformStatus from pydantic import PositiveInt @@ -178,7 +177,7 @@ async def task_create_service_containers( app, ProgressType.SERVICE_CONTAINERS_STARTING, ), - description=IDStr("starting software"), + description="starting software", ) as progress_bar: with log_context(_logger, logging.INFO, "load user services preferences"): if user_services_preferences.is_feature_enabled(app): @@ -382,7 +381,7 @@ async def task_restore_state( app, ProgressType.SERVICE_STATE_PULLING, ), - description=IDStr("pulling states"), + description="pulling states", ) as root_progress: await logged_gather( *( @@ -446,7 +445,7 @@ async def task_save_state( app, ProgressType.SERVICE_STATE_PUSHING, ), - description=IDStr("pushing state"), + description="pushing state", ) as root_progress: await logged_gather( *[ @@ -494,7 +493,7 @@ async def task_ports_inputs_pull( app, ProgressType.SERVICE_INPUTS_PULLING, ), - description=IDStr("pulling inputs"), + description="pulling inputs", ) as root_progress: with log_directory_changes( mounted_volumes.disk_inputs_path, _logger, logging.INFO @@ -539,7 +538,7 @@ async def task_ports_outputs_pull( app, ProgressType.SERVICE_OUTPUTS_PULLING, ), - description=IDStr("pulling outputs"), + description="pulling outputs", ) as root_progress: transferred_bytes = await nodeports.download_target_ports( nodeports.PortTypeName.OUTPUTS, diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py index 39ab1519dc8..0657ffe237e 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/nodeports.py @@ -14,7 +14,6 @@ import magic from aiofiles.os import remove from aiofiles.tempfile import TemporaryDirectory as AioTemporaryDirectory -from models_library.basic_types import IDStr from models_library.projects import ProjectIDStr from models_library.projects_nodes_io import NodeIDStr from models_library.services_types import ServicePortKey @@ -132,7 +131,7 @@ async def upload_outputs( # pylint:disable=too-many-statements # noqa: PLR0915 2 if is_file_type(port.property_type) else 1 for port in ports_to_set ), - description=IDStr("uploading outputs"), + description="uploading outputs", ) ) for port in ports_to_set: @@ -245,7 +244,7 @@ async def _get_data_from_port( ) -> tuple[Port, ItemConcreteValue | None, ByteSize]: async with progress_bar.sub_progress( steps=2 if is_file_type(port.property_type) else 1, - description=IDStr("getting data"), + description="getting data", ) as sub_progress: with log_context(_logger, logging.DEBUG, f"getting {port.key=}"): port_data = await port.get(sub_progress) @@ -363,7 +362,7 @@ async def _get_date_from_port_notified( raise async with progress_bar.sub_progress( - steps=len(ports_to_get), description=IDStr("downloading") + steps=len(ports_to_get), description="downloading" ) as sub_progress: results = await limited_gather( *[ diff --git a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py index 9a669aacc6b..f29f26358e2 100644 --- a/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py +++ b/services/dynamic-sidecar/src/simcore_service_dynamic_sidecar/modules/outputs/_manager.py @@ -8,7 +8,6 @@ from common_library.errors_classes import OsparcErrorMixin from fastapi import FastAPI -from models_library.basic_types import IDStr from models_library.rabbitmq_messages import ProgressType from pydantic import PositiveFloat from servicelib import progress_bar @@ -136,7 +135,7 @@ async def _upload_ports() -> None: async with progress_bar.ProgressBarData( num_steps=1, progress_report_cb=self.task_progress_cb, - description=IDStr("uploading ports"), + description="uploading ports", ) as root_progress: await upload_outputs( outputs_path=self.outputs_context.outputs_path, diff --git a/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js b/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js index eec18e05f5b..b11cdde65c6 100644 --- a/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js +++ b/services/static-webserver/client/source/class/osparc/data/model/NodeProgressSequence.js @@ -186,8 +186,11 @@ qx.Class.define("osparc.data.model.NodeProgressSequence", { getProgress: function(report) { if (report.unit) { + const attempt = ("attempt" in report && report["attempt"] > 1) ? `(attempt ${report["attempt"]}) ` : ""; + const current_value = osparc.utils.Utils.bytesToSize(report["actual_value"], 1, false); + const total_value = osparc.utils.Utils.bytesToSize(report["total"], 1, false) return { - progressLabel: `${osparc.utils.Utils.bytesToSize(report["actual_value"], 1, false)} / ${osparc.utils.Utils.bytesToSize(report["total"], 1, false)}`, + progressLabel: `${attempt}${current_value} / ${total_value}`, value: report["actual_value"] / report["total"] } } diff --git a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py index 5773052010b..83a90e286c2 100644 --- a/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py +++ b/services/web/server/src/simcore_service_webserver/dynamic_scheduler/api.py @@ -17,7 +17,6 @@ NodeGetIdle, NodeGetUnknown, ) -from models_library.basic_types import IDStr from models_library.progress_bar import ProgressReport from models_library.projects import ProjectID from models_library.projects_nodes_io import NodeID @@ -132,7 +131,7 @@ async def stop_dynamic_services_in_project( user_id, project_id, ), - description=IDStr("stopping services"), + description="stopping services", ) ) @@ -147,7 +146,7 @@ async def stop_dynamic_services_in_project( save_state=save_state, ), progress=progress_bar.sub_progress( - 1, description=IDStr(f"{service.node_uuid}") + 1, description=f"{service.node_uuid}" ), ) for service in running_dynamic_services