diff --git a/Colab_notebooks/pix2pix_ZeroCostDL4Mic.ipynb b/Colab_notebooks/pix2pix_ZeroCostDL4Mic.ipynb index bd1a9d75..c5ea2f89 100644 --- a/Colab_notebooks/pix2pix_ZeroCostDL4Mic.ipynb +++ b/Colab_notebooks/pix2pix_ZeroCostDL4Mic.ipynb @@ -229,11 +229,12 @@ "colab": { "background_save": true }, - "id": "fq21zJVFNASx" + "id": "fq21zJVFNASx", + "tags": [] }, "outputs": [], "source": [ - "Notebook_version = '1.16.3'\n", + "Notebook_version = '1.16.4'\n", "Network = 'pix2pix'\n", "\n", "\n", @@ -361,8 +362,11 @@ "# average function\n", "def Average(lst):\n", " return sum(lst) / len(lst)\n", - "def ssim(img1, img2):\n", - " return structural_similarity(img1,img2,data_range=1.,full=True, gaussian_weights=True, use_sample_covariance=False, sigma=1.5)\n", + "def ssim(img1, img2, multichannel=False):\n", + " if multichannel:\n", + " return structural_similarity(img1,img2,data_range=1.,full=True, multichannel=True)\n", + " else:\n", + " return structural_similarity(img1,img2,data_range=1.,full=True, gaussian_weights=True, use_sample_covariance=False, sigma=1.5)\n", "\n", "\n", "def normalize(x, pmin=3, pmax=99.8, axis=None, clip=False, eps=1e-20, dtype=np.float32):\n", @@ -522,8 +526,8 @@ " test_source_matched = match_histograms(test_source, test_GT, multichannel=True)\n", " # -------------------------------- Calculate the metric maps and save them --------------------------------\n", " # Calculate the SSIM maps\n", - " index_SSIM_GTvsPrediction, img_SSIM_GTvsPrediction = ssim(test_GT, test_prediction_matched)\n", - " index_SSIM_GTvsSource, img_SSIM_GTvsSource = ssim(test_GT, test_source_matched)\n", + " index_SSIM_GTvsPrediction, img_SSIM_GTvsPrediction = ssim(test_GT, test_prediction_matched, multichannel=True)\n", + " index_SSIM_GTvsSource, img_SSIM_GTvsSource = ssim(test_GT, test_source_matched, multichannel=True)\n", " ssim_score_list.append(index_SSIM_GTvsPrediction)\n", "\n", " #Save ssim_maps\n", @@ -607,8 +611,8 @@ "\n", " # -------------------------------- Calculate the metric maps and save them --------------------------------\n", " # Calculate the SSIM maps\n", - " index_SSIM_GTvsPrediction, img_SSIM_GTvsPrediction = ssim(test_GT_norm, test_prediction_norm)\n", - " index_SSIM_GTvsSource, img_SSIM_GTvsSource = ssim(test_GT_norm, test_source_norm)\n", + " index_SSIM_GTvsPrediction, img_SSIM_GTvsPrediction = ssim(test_GT_norm, test_prediction_norm, multichannel=False)\n", + " index_SSIM_GTvsSource, img_SSIM_GTvsSource = ssim(test_GT_norm, test_source_norm, multichannel=False)\n", " ssim_score_list.append(index_SSIM_GTvsPrediction)\n", "\n", " #Save ssim_maps\n", @@ -1895,7 +1899,8 @@ "execution_count": null, "metadata": { "cellView": "form", - "id": "i6LXhHf61w3K" + "id": "i6LXhHf61w3K", + "tags": [] }, "outputs": [], "source": [ @@ -1973,7 +1978,8 @@ "execution_count": null, "metadata": { "cellView": "form", - "id": "eHUa1opwWi9G" + "id": "eHUa1opwWi9G", + "tags": [] }, "outputs": [], "source": [ @@ -2002,7 +2008,7 @@ "Normalisation_QC_target = \"Contrast stretching\" #@param [\"None\", \"Contrast stretching\", \"Adaptive Equalization\"]\n", "\n", "#@markdown ###Did you evaluate all the checkpoints and only want to visualise the results? (It reduces significantly the processing time if you did it already)\n", - "avoid_evaluating_again = False #@param {type:\"boolean\"}\n", + "avoid_evaluating_again = True #@param {type:\"boolean\"}\n", "#@markdown ####Choose the frequency of checkpoints to evaluate. If 1, it will evaluate all the model checkpoints available.\n", "QC_evaluation_checkpoint_freq = 1 #@param {type:\"number\"}\n", "QC_freq = np.int(QC_evaluation_checkpoint_freq)\n", @@ -2042,7 +2048,10 @@ "\n", " #Here we copy and normalise the data\n", " if prepare_testdata:\n", - " normalise_data(Source_QC_folder, Target_QC_folder, Normalisation_QC_source, Normalisation_QC_target, path2im)\n" + " normalise_data(Source_QC_folder, Target_QC_folder, Normalisation_QC_source, Normalisation_QC_target, path2im)\n", + "else:\n", + " # Here we count how many images are in our folder to be predicted and we had a few\n", + " Nb_files_Data_folder = len(os.listdir(Source_QC_folder))" ] }, { @@ -2499,7 +2508,8 @@ "execution_count": null, "metadata": { "cellView": "form", - "id": "9WvxYBkzEa1d" + "id": "9WvxYBkzEa1d", + "tags": [] }, "outputs": [], "source": [ @@ -2511,8 +2521,8 @@ "#@markdown ##Introduce the information to document your model:\n", "Trained_model_name = \"\" #@param {type:\"string\"}\n", "Trained_model_authors = \"\\\"[Author 1, Author 2]\\\"\" #@param {type:\"string\"}\n", - "Trained_model_authors_affiliation = \"\\\"[Affiliation of author 1, Affiliation of author 2]\\\"\" #@param {type:\"string\"}\n", - "Trained_model_description = \"A conditional cycleGAN trained to infer XXXX from XXX\" #@param {type:\"string\"}\n", + "Trained_model_authors_affiliation = \"\\\"[Author 1 Affiliation, Author 2 Affiliation]\\\"\" #@param {type:\"string\"}\n", + "Trained_model_description = \"A conditional cycleGAN trained to infer XXX from XXX \" #@param {type:\"string\"}\n", "Trained_model_license = \"CC-BY-NC-1.0\" #@param [\"BSD-Protection\", \"CC-BY-SA-2.0\", \"Unicode-DFS-2016\", \"CC-BY-ND-2.5\", \"CC-BY-NC-ND-3.0\", \"BSD-3-Clause-LBNL\", \"NCGL-UK-2.0\", \"GPL-1.0+\", \"OSL-1.0\", \"MIT\", \"MPL-2.0\", \"CC-BY-NC-1.0\", \"CC-BY-NC-ND-2.5\", \"CC-BY-SA-2.0-UK\", \"BSD-4-Clause-Shortened\"]\n", "\n", "\n", @@ -2610,12 +2620,14 @@ "\n", "kwargs = dict(\n", " input_names=[\"input\"],\n", + " input_data_range=[[0., 255.]],\n", " input_axes=[\"bcyx\"],\n", " pixel_sizes=[pixel_size],\n", " preprocessing = bmz_preprpocess)\n", "\n", "output_spec = dict(\n", " output_names=[\"output\"],\n", + " output_data_range=[[-np.inf, np.inf]],\n", " output_axes=[\"bcyx\"],\n", " postprocessing=bmz_postprocess,\n", " output_reference=[\"input\"],\n", @@ -2697,7 +2709,7 @@ "#else:\n", "# y = np.expand_dims(y, axis=[0, 1]) # add batch dimension\n", "#np.save(os.path.join(output_root, \"test-output.npy\"), y)\n", - "np.save(os.path.join(output_root, \"test-output.npy\"), x)\n", + "np.save(os.path.join(output_root, \"test-output.npy\"), x.astype(np.float32))\n", "# Build the bioimage model zoo model\n", "# ---------------------------------------\n", "for i in range(2):\n", @@ -3103,6 +3115,10 @@ "# **7. Version log**\n", "---\n", "\n", + "**v1.16.4**:\n", + "* Corrects data type and range in the bioimageio specs.\n", + "* Corrects for RGB data quality control SSIM.\n", + "\n", "**v1.16.3**:\n", "* Import the `bioimageio.core` library and export bioimage model zoo compatible models.\n", "\n", @@ -3181,7 +3197,7 @@ "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", - "version": "3.10.12" + "version": "3.8.18" } }, "nbformat": 4,