From 84c0b07a694afb94432733e1b03b0036b78d4a01 Mon Sep 17 00:00:00 2001 From: Lorenzo Mammana Date: Mon, 19 Aug 2024 10:28:35 +0200 Subject: [PATCH] fix: Fix broken default installation (#127) * fix: Add guard for onnx related package import * build: Update version and changelog * docs: Update minor errors * docs: Minor docs improvements * docs: Minor doc fix * docs: Update changelog * refactor: Disable interactive backend for matplotlib * feat: Make quadra fail if mixed precision export doesn't terminate correctly * docs: Update changelog --- CHANGELOG.md | 12 ++++++++++++ README.md | 6 +++--- docs/tutorials/configurations.md | 4 ++-- docs/tutorials/examples/anomaly_detection.md | 2 +- docs/tutorials/examples/segmentation.md | 4 ++-- docs/tutorials/examples/ssl.md | 2 +- pyproject.toml | 2 +- quadra/__init__.py | 2 +- quadra/main.py | 3 +++ quadra/utils/export.py | 7 ++++--- 10 files changed, 30 insertions(+), 14 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 7a22117c..a857a89b 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,18 @@ # Changelog All notable changes to this project will be documented in this file. +### [2.2.2] + +#### Updated + +- Terminate quadra with error if it's not possible to export an ONNX model with automatic mixed precision instead of falling back to full precision + +#### Fixed + +- Fix default quadra installation requiring extra dependencies incorrectly +- Fix matplotlib using interactive backend by default +- Fix documentation errors + ### [2.2.1] #### Updated diff --git a/README.md b/README.md index af9e3928..37003648 100644 --- a/README.md +++ b/README.md @@ -172,7 +172,7 @@ On the same dataset we can run a simple SSL training using the BYOL algorithm wi quadra experiment=generic/imagenette/ssl/byol logger=csv ``` -BYOL is not the only SSL algorithm available, you can find a list of all the available algorithms under `quadra/experiment/generic/imagenette/ssl` folder. +BYOL is not the only SSL algorithm available, you can find a list of all the available algorithms under `quadra/configs/experiment/generic/imagenette/ssl` folder. ### Anomaly Detection Training @@ -185,11 +185,11 @@ quadra experiment=generic/mnist/anomaly/padim logger=csv This will run an anomaly detection considering on of the classes as good (default is the number 9) and the rest as anomalies. -This will make use of the [anomalib](https://github.com/openvinotoolkit/anomalib) library to train the model. Many different algorithms are available, you can find them under `quadra/experiment/generic/mnist/anomaly` folder. +This will make use of the [anomalib](https://github.com/openvinotoolkit/anomalib) library to train the model. Many different algorithms are available, you can find them under `quadra/configs/experiment/generic/mnist/anomaly` folder. ## Running with Custom Datasets -Each task comes with a default configuration file that can be customized for your needs. Each example experiment we have seen so far uses a default configuration file that can be found under `quadra/experiment/base//.yaml`. +Each task comes with a default configuration file that can be customized for your needs. Each example experiment we have seen so far uses a default configuration file that can be found under `quadra/configs/experiment/base//.yaml`. Let's see how we can customize the configuration file to run the classification experiment on a custom dataset. diff --git a/docs/tutorials/configurations.md b/docs/tutorials/configurations.md index d0be0745..63561b89 100644 --- a/docs/tutorials/configurations.md +++ b/docs/tutorials/configurations.md @@ -231,7 +231,7 @@ callbacks: In the experiment configuration we aggregate the various building blocks of the framework using the `defaults` key. In this case we are using the `classification` datamodule, a `resnet18` backbone, the `cross_entropy` loss, the `classification` model (Lightning Module), `adam` as optimizer, the `classification` task, the reduce on plateau (`rop`) scheduler and the `default_resize` transform. - We can also see that we are overriding some of the parameters of the different modules. For example, we are overriding the `lr_scheduler_interval` of the backbone to be `epoch` instead of `step`. We are also overriding the `max_epochs` of the trainer to be 200 instead of the default value. + We can also see that we are overriding some of the parameters of the different modules. For example, we are overriding the `lr_scheduler_interval` of the backbone to be `step` instead of `epoch`. We are also overriding the `max_epochs` of the trainer to be 200 instead of the default value. The experiment is the most important configuration file as it is the one actually telling the framework what to do! @@ -307,7 +307,7 @@ classifier: out_features: ${model.num_classes} module: _target_: quadra.modules.classification.ClassificationModule - lr_scheduler_interval: "epoch" + lr_scheduler_interval: "step" criterion: ${loss} gradcam: true ``` diff --git a/docs/tutorials/examples/anomaly_detection.md b/docs/tutorials/examples/anomaly_detection.md index aa4f705d..47fbe657 100644 --- a/docs/tutorials/examples/anomaly_detection.md +++ b/docs/tutorials/examples/anomaly_detection.md @@ -31,7 +31,7 @@ MNIST/ MNIST doesn't have ground truth masks for defects, by default we will use empty masks for good images and full white masks for anomalies. -The standard datamodule configuration for anomaly is found under `datamodule/anomaly/base.yaml`. +The standard datamodule configuration for anomaly is found under `datamodule/base/anomaly.yaml`. ```yaml _target_: quadra.datamodules.AnomalyDataModule diff --git a/docs/tutorials/examples/segmentation.md b/docs/tutorials/examples/segmentation.md index ee2350a0..1831240f 100644 --- a/docs/tutorials/examples/segmentation.md +++ b/docs/tutorials/examples/segmentation.md @@ -218,7 +218,7 @@ defaults: Assuming that you have created a virtual environment and installed the `quadra` library, you can run the experiment by running the following command: ```bash -quadra experiment=custom_experiment/smp_multiclass +quadra experiment=segmentation/custom_experiment/smp_multiclass ``` ### Run (Advanced) @@ -226,7 +226,7 @@ quadra experiment=custom_experiment/smp_multiclass Lets assume that you would like to run the experiment with different models and with/without freezing the encoder part of the model, thanks to `hydra` multi-run option we can run as follows: ```bash -quadra experiment=custom_experiment/smp_multiclass \ +quadra experiment=segmentation/custom_experiment/smp_multiclass \ backbone.model.arch=unet,unetplusplus \ backbone.model.encoder_name=resnet18,resnet50 \ backbone.model.freeze_encoder=False,True \ diff --git a/docs/tutorials/examples/ssl.md b/docs/tutorials/examples/ssl.md index 13c41666..3d9b22ac 100644 --- a/docs/tutorials/examples/ssl.md +++ b/docs/tutorials/examples/ssl.md @@ -63,7 +63,7 @@ We will make some changes to the datamodule in the experiment configuration file ### Experiment -First, let's check how base experiment configuration file is defined for BYOL algorithm located in `configs/experiment/base/ssl/BYOL.yaml`. +First, let's check how base experiment configuration file is defined for BYOL algorithm located in `configs/experiment/base/ssl/byol.yaml`. ```yaml # @package _global_ diff --git a/pyproject.toml b/pyproject.toml index 8241246a..fee15228 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,6 @@ [tool.poetry] name = "quadra" -version = "2.2.1" +version = "2.2.2" description = "Deep Learning experiment orchestration library" authors = [ "Federico Belotti ", diff --git a/quadra/__init__.py b/quadra/__init__.py index c69109dd..dca7522f 100644 --- a/quadra/__init__.py +++ b/quadra/__init__.py @@ -1,4 +1,4 @@ -__version__ = "2.2.1" +__version__ = "2.2.2" def get_version(): diff --git a/quadra/main.py b/quadra/main.py index 7438aef2..dfd6f95f 100644 --- a/quadra/main.py +++ b/quadra/main.py @@ -1,6 +1,7 @@ import time import hydra +import matplotlib from omegaconf import DictConfig from pytorch_lightning import seed_everything @@ -12,6 +13,8 @@ load_envs() register_resolvers() + +matplotlib.use("Agg") log = get_logger(__name__) diff --git a/quadra/utils/export.py b/quadra/utils/export.py index 0cf7f45f..62fdd731 100644 --- a/quadra/utils/export.py +++ b/quadra/utils/export.py @@ -7,7 +7,6 @@ import torch from anomalib.models.cflow import CflowLightning from omegaconf import DictConfig, ListConfig, OmegaConf -from onnxconverter_common import auto_convert_mixed_precision from torch import nn from quadra.models.base import ModelSignatureWrapper @@ -22,6 +21,7 @@ try: import onnx # noqa from onnxsim import simplify as onnx_simplify # noqa + from onnxconverter_common import auto_convert_mixed_precision # noqa ONNX_AVAILABLE = True except ImportError: @@ -385,8 +385,9 @@ def _safe_export_half_precision_onnx( onnx.checker.check_model(onnx_model) return True except Exception as e: - log.debug("Failed to export model with mixed precision with error: %s", e) - return False + raise RuntimeError( + "Failed to export model with automatic mixed precision, check your model or disable ONNX export" + ) from e else: log.info("Exported half precision ONNX model does not contain NaN values, model is stable") return True