From d2b12dda5cce4ffebb7fe2a4e01a98bcb82086f3 Mon Sep 17 00:00:00 2001 From: Fabrice Daian <fabrice.daian@lis-lab.fr> Date: Sat, 8 Mar 2025 22:46:00 +0100 Subject: [PATCH] add --- .../mupix_training_on_metrology_dataset.ipynb | 866 ++++++++++++++++++ 1 file changed, 866 insertions(+) create mode 100644 notebooks/mupix_training_on_metrology_dataset.ipynb diff --git a/notebooks/mupix_training_on_metrology_dataset.ipynb b/notebooks/mupix_training_on_metrology_dataset.ipynb new file mode 100644 index 0000000..aeef90b --- /dev/null +++ b/notebooks/mupix_training_on_metrology_dataset.ipynb @@ -0,0 +1,866 @@ +{ + "cells": [ + { + "cell_type": "markdown", + "metadata": { + "id": "KVOMNszkZwZ1" + }, + "source": [ + "#DEMO Notebook : Training a µPIX model using Metrology Dataset\n", + "\n", + "Paper: \"*μPIX : Leveraging Generative AI for Enhanced, Personalized and Sustainable Microscopy\"*\n", + "\n", + "Authors: Gabriel Bon, Daniel Sapede, Cédric Matthews, and Fabrice Daian\n", + "\n", + "GitLab Repo: https://gitlab.lis-lab.fr/sicomp/mupix" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "PqF99SPIyuay" + }, + "source": [ + "<font color=\"#D64242\"><b>Before sarting:\n", + "As training is a very intensive task, we recommand to switch to a GPU environment on Google Colab, please change the Runtime before executing this Notebook cells:</b></font>\n", + "\n", + "\n", + "* Select \"Runtime\" in the Google Colab menu bar\n", + "* Select \"Change runtime type\" and select \"T4 GPU\"" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "cTRLD5drkkRP" + }, + "source": [ + "## 1 - Install/Update Python Colab environment and µPIX sources" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "FItXNUtw8mlU" + }, + "source": [ + "Execute the following cell to install missing packages, update Tensorflow version and download µPIX sources.\n", + "\n", + "# **Warning: When prompted with \"Restart session\" message, <font color=\"#D64242\">DO NOT RESTART</font> the session, please ignore the message and click on the \"Cancel\" button.**" + ] + }, + { + "cell_type": "code", + "execution_count": 1, + "metadata": { + "id": "ECaX__Mn7a6W", + "colab": { + "base_uri": "https://localhost:8080/", + "height": 1000 + }, + "outputId": "1b0a6269-6c25-4196-be9c-3fd4cf8f3447" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Cloning into 'mupix'...\n", + "warning: redirecting to https://gitlab.lis-lab.fr/sicomp/mupix.git/\n", + "remote: Enumerating objects: 5251, done.\u001b[K\n", + "remote: Counting objects: 100% (137/137), done.\u001b[K\n", + "remote: Compressing objects: 100% (137/137), done.\u001b[K\n", + "remote: Total 5251 (delta 75), reused 0 (delta 0), pack-reused 5114 (from 1)\u001b[K\n", + "Receiving objects: 100% (5251/5251), 366.98 MiB | 17.00 MiB/s, done.\n", + "Resolving deltas: 100% (152/152), done.\n", + "Collecting absl-py==2.1.0 (from -r mupix/requirements.txt (line 1))\n", + " Downloading absl_py-2.1.0-py3-none-any.whl.metadata (2.3 kB)\n", + "Collecting albumentations==1.3.0 (from -r mupix/requirements.txt (line 2))\n", + " Downloading albumentations-1.3.0-py3-none-any.whl.metadata (34 kB)\n", + "Requirement already satisfied: annotated-types==0.7.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 3)) (0.7.0)\n", + "Requirement already satisfied: astunparse==1.6.3 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 4)) (1.6.3)\n", + "Requirement already satisfied: cachetools==5.5.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 5)) (5.5.2)\n", + "Requirement already satisfied: certifi==2025.1.31 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 6)) (2025.1.31)\n", + "Requirement already satisfied: charset-normalizer==3.4.1 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 7)) (3.4.1)\n", + "Collecting efficientnet==1.0.0 (from -r mupix/requirements.txt (line 8))\n", + " Downloading efficientnet-1.0.0-py3-none-any.whl.metadata (6.1 kB)\n", + "Requirement already satisfied: flatbuffers==25.2.10 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 9)) (25.2.10)\n", + "Requirement already satisfied: gast==0.6.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 10)) (0.6.0)\n", + "Requirement already satisfied: google-auth==2.38.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 11)) (2.38.0)\n", + "Collecting google-auth-oauthlib==1.0.0 (from -r mupix/requirements.txt (line 12))\n", + " Downloading google_auth_oauthlib-1.0.0-py2.py3-none-any.whl.metadata (2.7 kB)\n", + "Requirement already satisfied: google-pasta==0.2.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 13)) (0.2.0)\n", + "Requirement already satisfied: grpcio==1.70.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 14)) (1.70.0)\n", + "Collecting h5py==3.13.0 (from -r mupix/requirements.txt (line 15))\n", + " Downloading h5py-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (2.5 kB)\n", + "Requirement already satisfied: idna==3.10 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 16)) (3.10)\n", + "Collecting image-classifiers==1.0.0 (from -r mupix/requirements.txt (line 17))\n", + " Downloading image_classifiers-1.0.0-py3-none-any.whl.metadata (8.6 kB)\n", + "Requirement already satisfied: imageio==2.37.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 18)) (2.37.0)\n", + "Requirement already satisfied: joblib==1.4.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 19)) (1.4.2)\n", + "Collecting keras==2.14.0 (from -r mupix/requirements.txt (line 20))\n", + " Downloading keras-2.14.0-py3-none-any.whl.metadata (2.4 kB)\n", + "Collecting Keras-Applications==1.0.8 (from -r mupix/requirements.txt (line 21))\n", + " Downloading Keras_Applications-1.0.8-py3-none-any.whl.metadata (1.7 kB)\n", + "Requirement already satisfied: lazy_loader==0.4 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 22)) (0.4)\n", + "Requirement already satisfied: libclang==18.1.1 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 23)) (18.1.1)\n", + "Requirement already satisfied: Markdown==3.7 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 24)) (3.7)\n", + "Requirement already satisfied: MarkupSafe==3.0.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 25)) (3.0.2)\n", + "Collecting ml-dtypes==0.2.0 (from -r mupix/requirements.txt (line 26))\n", + " Downloading ml_dtypes-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (20 kB)\n", + "Requirement already satisfied: networkx==3.4.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 27)) (3.4.2)\n", + "Collecting numpy==1.24.0 (from -r mupix/requirements.txt (line 28))\n", + " Downloading numpy-1.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (5.6 kB)\n", + "Requirement already satisfied: oauthlib==3.2.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 29)) (3.2.2)\n", + "Requirement already satisfied: opencv-python==4.11.0.86 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 30)) (4.11.0.86)\n", + "Requirement already satisfied: opencv-python-headless==4.11.0.86 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 31)) (4.11.0.86)\n", + "Requirement already satisfied: opt_einsum==3.4.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 32)) (3.4.0)\n", + "Requirement already satisfied: packaging==24.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 33)) (24.2)\n", + "Requirement already satisfied: pillow==11.1.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 34)) (11.1.0)\n", + "Requirement already satisfied: protobuf==4.25.6 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 35)) (4.25.6)\n", + "Requirement already satisfied: pyasn1==0.6.1 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 36)) (0.6.1)\n", + "Requirement already satisfied: pyasn1_modules==0.4.1 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 37)) (0.4.1)\n", + "Requirement already satisfied: pydantic==2.10.6 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 38)) (2.10.6)\n", + "Requirement already satisfied: pydantic_core==2.27.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 39)) (2.27.2)\n", + "Requirement already satisfied: PyYAML==6.0.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 40)) (6.0.2)\n", + "Requirement already satisfied: requests==2.32.3 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 41)) (2.32.3)\n", + "Requirement already satisfied: requests-oauthlib==2.0.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 42)) (2.0.0)\n", + "Requirement already satisfied: rsa==4.9 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 43)) (4.9)\n", + "Requirement already satisfied: scikit-image==0.25.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 44)) (0.25.2)\n", + "Requirement already satisfied: scikit-learn==1.6.1 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 45)) (1.6.1)\n", + "Collecting scipy==1.15.2 (from -r mupix/requirements.txt (line 46))\n", + " Downloading scipy-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (61 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m62.0/62.0 kB\u001b[0m \u001b[31m4.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting segmentation-models==1.0.1 (from -r mupix/requirements.txt (line 47))\n", + " Downloading segmentation_models-1.0.1-py3-none-any.whl.metadata (938 bytes)\n", + "Requirement already satisfied: simsimd==6.2.1 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 48)) (6.2.1)\n", + "Requirement already satisfied: six==1.17.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 49)) (1.17.0)\n", + "Collecting stringzilla==3.12.1 (from -r mupix/requirements.txt (line 50))\n", + " Downloading stringzilla-3.12.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl.metadata (80 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m80.3/80.3 kB\u001b[0m \u001b[31m4.7 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hCollecting tensorboard==2.14.1 (from -r mupix/requirements.txt (line 51))\n", + " Downloading tensorboard-2.14.1-py3-none-any.whl.metadata (1.7 kB)\n", + "Requirement already satisfied: tensorboard-data-server==0.7.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 52)) (0.7.2)\n", + "Collecting tensorflow==2.14.0 (from -r mupix/requirements.txt (line 53))\n", + " Downloading tensorflow-2.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (4.1 kB)\n", + "Collecting tensorflow-estimator==2.14.0 (from -r mupix/requirements.txt (line 54))\n", + " Downloading tensorflow_estimator-2.14.0-py2.py3-none-any.whl.metadata (1.3 kB)\n", + "Requirement already satisfied: tensorflow-io-gcs-filesystem==0.37.1 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 55)) (0.37.1)\n", + "Requirement already satisfied: termcolor==2.5.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 56)) (2.5.0)\n", + "Requirement already satisfied: threadpoolctl==3.5.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 57)) (3.5.0)\n", + "Requirement already satisfied: tifffile==2025.2.18 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 58)) (2025.2.18)\n", + "Requirement already satisfied: typing_extensions==4.12.2 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 59)) (4.12.2)\n", + "Requirement already satisfied: urllib3==2.3.0 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 60)) (2.3.0)\n", + "Requirement already satisfied: Werkzeug==3.1.3 in /usr/local/lib/python3.11/dist-packages (from -r mupix/requirements.txt (line 61)) (3.1.3)\n", + "Collecting wrapt==1.14.1 (from -r mupix/requirements.txt (line 62))\n", + " Downloading wrapt-1.14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl.metadata (6.7 kB)\n", + "Collecting qudida>=0.0.4 (from albumentations==1.3.0->-r mupix/requirements.txt (line 2))\n", + " Downloading qudida-0.0.4-py3-none-any.whl.metadata (1.5 kB)\n", + "Requirement already satisfied: wheel<1.0,>=0.23.0 in /usr/local/lib/python3.11/dist-packages (from astunparse==1.6.3->-r mupix/requirements.txt (line 4)) (0.45.1)\n", + "Requirement already satisfied: setuptools>=41.0.0 in /usr/local/lib/python3.11/dist-packages (from tensorboard==2.14.1->-r mupix/requirements.txt (line 51)) (75.1.0)\n", + "Downloading absl_py-2.1.0-py3-none-any.whl (133 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m133.7/133.7 kB\u001b[0m \u001b[31m7.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading albumentations-1.3.0-py3-none-any.whl (123 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m123.5/123.5 kB\u001b[0m \u001b[31m9.4 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading efficientnet-1.0.0-py3-none-any.whl (17 kB)\n", + "Downloading google_auth_oauthlib-1.0.0-py2.py3-none-any.whl (18 kB)\n", + "Downloading h5py-3.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (4.5 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m4.5/4.5 MB\u001b[0m \u001b[31m51.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading image_classifiers-1.0.0-py3-none-any.whl (19 kB)\n", + "Downloading keras-2.14.0-py3-none-any.whl (1.7 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.7/1.7 MB\u001b[0m \u001b[31m4.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading Keras_Applications-1.0.8-py3-none-any.whl (50 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m50.7/50.7 kB\u001b[0m \u001b[31m1.2 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading ml_dtypes-0.2.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (1.0 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m1.0/1.0 MB\u001b[0m \u001b[31m35.9 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading numpy-1.24.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (17.3 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m17.3/17.3 MB\u001b[0m \u001b[31m49.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading scipy-1.15.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (37.6 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m37.6/37.6 MB\u001b[0m \u001b[31m13.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading segmentation_models-1.0.1-py3-none-any.whl (33 kB)\n", + "Downloading stringzilla-3.12.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_28_x86_64.whl (308 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m308.4/308.4 kB\u001b[0m \u001b[31m15.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading tensorboard-2.14.1-py3-none-any.whl (5.5 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m5.5/5.5 MB\u001b[0m \u001b[31m72.1 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading tensorflow-2.14.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl (489.9 MB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m489.9/489.9 MB\u001b[0m \u001b[31m1.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading tensorflow_estimator-2.14.0-py2.py3-none-any.whl (440 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m440.7/440.7 kB\u001b[0m \u001b[31m29.6 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading wrapt-1.14.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl (78 kB)\n", + "\u001b[2K \u001b[90m━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━\u001b[0m \u001b[32m78.4/78.4 kB\u001b[0m \u001b[31m6.5 MB/s\u001b[0m eta \u001b[36m0:00:00\u001b[0m\n", + "\u001b[?25hDownloading qudida-0.0.4-py3-none-any.whl (3.5 kB)\n", + "Installing collected packages: stringzilla, wrapt, tensorflow-estimator, numpy, keras, absl-py, scipy, ml-dtypes, h5py, Keras-Applications, google-auth-oauthlib, tensorboard, qudida, image-classifiers, efficientnet, tensorflow, segmentation-models, albumentations\n", + " Attempting uninstall: stringzilla\n", + " Found existing installation: stringzilla 3.12.2\n", + " Uninstalling stringzilla-3.12.2:\n", + " Successfully uninstalled stringzilla-3.12.2\n", + " Attempting uninstall: wrapt\n", + " Found existing installation: wrapt 1.17.2\n", + " Uninstalling wrapt-1.17.2:\n", + " Successfully uninstalled wrapt-1.17.2\n", + " Attempting uninstall: numpy\n", + " Found existing installation: numpy 1.26.4\n", + " Uninstalling numpy-1.26.4:\n", + " Successfully uninstalled numpy-1.26.4\n", + " Attempting uninstall: keras\n", + " Found existing installation: keras 3.8.0\n", + " Uninstalling keras-3.8.0:\n", + " Successfully uninstalled keras-3.8.0\n", + " Attempting uninstall: absl-py\n", + " Found existing installation: absl-py 1.4.0\n", + " Uninstalling absl-py-1.4.0:\n", + " Successfully uninstalled absl-py-1.4.0\n", + " Attempting uninstall: scipy\n", + " Found existing installation: scipy 1.13.1\n", + " Uninstalling scipy-1.13.1:\n", + " Successfully uninstalled scipy-1.13.1\n", + " Attempting uninstall: ml-dtypes\n", + " Found existing installation: ml-dtypes 0.4.1\n", + " Uninstalling ml-dtypes-0.4.1:\n", + " Successfully uninstalled ml-dtypes-0.4.1\n", + " Attempting uninstall: h5py\n", + " Found existing installation: h5py 3.12.1\n", + " Uninstalling h5py-3.12.1:\n", + " Successfully uninstalled h5py-3.12.1\n", + " Attempting uninstall: google-auth-oauthlib\n", + " Found existing installation: google-auth-oauthlib 1.2.1\n", + " Uninstalling google-auth-oauthlib-1.2.1:\n", + " Successfully uninstalled google-auth-oauthlib-1.2.1\n", + " Attempting uninstall: tensorboard\n", + " Found existing installation: tensorboard 2.18.0\n", + " Uninstalling tensorboard-2.18.0:\n", + " Successfully uninstalled tensorboard-2.18.0\n", + " Attempting uninstall: tensorflow\n", + " Found existing installation: tensorflow 2.18.0\n", + " Uninstalling tensorflow-2.18.0:\n", + " Successfully uninstalled tensorflow-2.18.0\n", + " Attempting uninstall: albumentations\n", + " Found existing installation: albumentations 2.0.5\n", + " Uninstalling albumentations-2.0.5:\n", + " Successfully uninstalled albumentations-2.0.5\n", + "\u001b[31mERROR: pip's dependency resolver does not currently take into account all the packages that are installed. This behaviour is the source of the following dependency conflicts.\n", + "langchain 0.3.19 requires numpy<2,>=1.26.4; python_version < \"3.12\", but you have numpy 1.24.0 which is incompatible.\n", + "tf-keras 2.18.0 requires tensorflow<2.19,>=2.18, but you have tensorflow 2.14.0 which is incompatible.\n", + "seaborn 0.13.2 requires numpy!=1.24.0,>=1.20, but you have numpy 1.24.0 which is incompatible.\n", + "tensorstore 0.1.72 requires ml_dtypes>=0.3.1, but you have ml-dtypes 0.2.0 which is incompatible.\n", + "chex 0.1.89 requires numpy>=1.24.1, but you have numpy 1.24.0 which is incompatible.\n", + "gensim 4.3.3 requires scipy<1.14.0,>=1.7.0, but you have scipy 1.15.2 which is incompatible.\n", + "pymc 5.20.1 requires numpy>=1.25.0, but you have numpy 1.24.0 which is incompatible.\n", + "blosc2 3.2.0 requires numpy>=1.26, but you have numpy 1.24.0 which is incompatible.\n", + "tensorflow-text 2.18.1 requires tensorflow<2.19,>=2.18.0, but you have tensorflow 2.14.0 which is incompatible.\n", + "imbalanced-learn 0.13.0 requires numpy<3,>=1.24.3, but you have numpy 1.24.0 which is incompatible.\n", + "treescope 0.1.9 requires numpy>=1.25.2, but you have numpy 1.24.0 which is incompatible.\n", + "albucore 0.0.23 requires numpy>=1.24.4, but you have numpy 1.24.0 which is incompatible.\u001b[0m\u001b[31m\n", + "\u001b[0mSuccessfully installed Keras-Applications-1.0.8 absl-py-2.1.0 albumentations-1.3.0 efficientnet-1.0.0 google-auth-oauthlib-1.0.0 h5py-3.13.0 image-classifiers-1.0.0 keras-2.14.0 ml-dtypes-0.2.0 numpy-1.24.0 qudida-0.0.4 scipy-1.15.2 segmentation-models-1.0.1 stringzilla-3.12.1 tensorboard-2.14.1 tensorflow-2.14.0 tensorflow-estimator-2.14.0 wrapt-1.14.1\n" + ] + }, + { + "output_type": "display_data", + "data": { + "application/vnd.colab-display-data+json": { + "pip_warning": { + "packages": [ + "absl", + "h5py", + "keras", + "ml_dtypes", + "numpy", + "tensorflow", + "wrapt" + ] + }, + "id": "808c3ff15272425f946d871b63b84213" + } + }, + "metadata": {} + }, + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Environment is ready !\n" + ] + } + ], + "source": [ + "import tensorflow as tf\n", + "\n", + "# Clone the µPIX source code\n", + "!git clone https://gitlab.lis-lab.fr/sicomp/mupix/\n", + "\n", + "# Prepare the Python environment\n", + "!pip install -r mupix/requirements.txt\n", + "\n", + "\n", + "print(\"Environment is ready !\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "vOT9fV5hk2cv" + }, + "source": [ + "## 2- Download ```metrology``` Dataset" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": { + "id": "UyLzE0SLk9Ip", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "1c0c2fa9-66e5-4d68-b47f-aa3c01113995" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "------> Dataset\n", + " % Total % Received % Xferd Average Speed Time Time Time Current\n", + " Dload Upload Total Spent Left Speed\n", + "100 479M 100 479M 0 0 25.6M 0 0:00:18 0:00:18 --:--:-- 30.6M\n", + "\n", + "The dataset has been created into ./dataset_metrology\n", + "\n" + ] + } + ], + "source": [ + "print(\"------> Dataset\")\n", + "#download the Metrology Dataset\n", + "!curl -o metrology.zip https://sync.lis-lab.fr/index.php/s/mYDRTeAQxMxNPPJ/download -q\n", + "!unzip -qq metrology.zip\n", + "!rm -f metrology.zip\n", + "!mv metrology dataset_metrology\n", + "print(\"\")\n", + "print(\"The dataset has been created into ./dataset_metrology\")\n", + "print(\"\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "sVs-8WbYlP9_" + }, + "source": [ + "## 3- Create a new µPIX experiment" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "12ibiT5boPuM" + }, + "source": [ + "Before running µPIX for training, you need to create a *µPIX experiment*.\n", + "\n", + "The `new_experiment.py` script allows you to set up a new experiment by specifying the required paths and parameters.\n", + "\n", + "<u>Usage</u>\n", + "```\n", + "python new_experiment.py --experiment_name EXPERIMENT_NAME\n", + " --experiment_path EXPERIMENT_PATH\n", + " --clean_data_path CLEAN_DATA_PATH\n", + " --noisy_data_path NOISY_DATA_PATH\n", + " [--test_data_path TEST_DATA_PATH]\n", + "```\n", + "<u>Details</u>\n", + "\n", + "| Argument | Description |\n", + "|--------------------------|-------------|\n", + "| `--experiment_name` | Name of the experiment. |\n", + "| `--experiment_path` | Path where the experiment will be saved. |\n", + "| `--clean_data_path` | Path to the clean dataset. |\n", + "| `--noisy_data_path` | Path to the noisy dataset. |\n", + "| `--test_data_path` *(optional)* | Path to the test dataset (if available). |" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "OfQ0FOw01IPB" + }, + "source": [ + "Execute the following cell to create the ```metrology``` experiment inside the ```./experiments``` path (```experiment_path```) using images inside ```./dataset_metrology/train/GT ``` as clean images (```clean_data_path```) and images inside ```./dataset_metrology/train/low``` as their corresponding noisy images (```noisy_data_path```) and providing a set of test images located inside ```./dataset_metrology/test``` (```test_data_path```)." + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": { + "id": "Wtnbj4jMlTMn", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "5b7169b2-5e88-43d6-8bd0-1524c7c2eebb" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Experiment 'metrology' created successfully at ./experiments/metrology\n" + ] + } + ], + "source": [ + "\n", + "!python mupix/new_experiment.py --experiment_name metrology --experiment_path ./experiments --clean_data_path ./dataset_metrology/train/GT --noisy_data_path ./dataset_metrology/train/low --test_data_path ./dataset_metrology/test\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "fwvMJC4ipcpU" + }, + "source": [ + "This command has created a new `experiments/metrology` directory containing:\n", + "- `hyperparameters.json`: default parameters used by µPIX\n", + "- `results` directory: will contains training logs (```logs.txt```), model checkpoints (```networks```), image generated during the training (```images```) using ```mupixtraining.py```.\n", + "- `predictions` directory: will contains the generated image on the test dataset using the script ```mupixinfer.py```\n", + "\n", + "\n", + "For information, here is the ```hyperparameters.json``` file created by default by µPIX for our example experiment that could be freely modifiable\n", + "\n", + "```json\n", + "{\n", + " \"learning_rate_generator\": 0.001,\n", + " \"learning_rate_discriminator\": 0.001,\n", + " \"batch_size\": 16,\n", + " \"num_epochs\": 100,\n", + " \"loss_weight\": 10,\n", + " \"tile_size\": 256,\n", + " \"patience\": 20,\n", + " \"valid_size\": 0.1,\n", + " \"seed\": 42,\n", + " \"data_paths\": {\n", + " \"clean\": \"./data/metrology/GT/\",\n", + " \"noisy\": \"./data/metrology/low\",\n", + " \"test\": null\n", + " }\n", + "}\n", + "```\n", + "\n", + "| Hyperparameter | Type | Description |\n", + "|--------------------------------|---------|-------------|\n", + "| `learning_rate_generator` | `float` | Learning rate for the generator. |\n", + "| `learning_rate_discriminator` | `float` | Learning rate for the discriminator. |\n", + "| `batch_size` | `int` | Number of samples per batch. |\n", + "| `num_epochs` | `int` | Total number of training epochs. |\n", + "| `loss_weight` | `int` | Weight factor for µPIX loss calculation. |\n", + "| `tile_size` | `int` | Size of image tiles used for training. |\n", + "| `patience` | `int` | Number of epochs to wait before triggering µPIX early stopping if no improvement. |\n", + "| `valid_size` | `float` | Proportion of the dataset used for validation. |\n", + "| `seed` | `int` | Random seed for reproducibility. |\n", + "| `data_paths.clean` | `str` | Path to the clean dataset. |\n", + "| `data_paths.noisy` | `str` | Path to the noisy dataset. |\n", + "| `data_paths.test` | `str or null` | Path to the test dataset (if available). |\n", + "\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "xnNAGU8xW6Vl" + }, + "source": [ + "## 4- Train a µPIX model from scratch" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "tRJ40n7aBRpw" + }, + "source": [ + "The `mupixtraining.py` script allows you to train a µPIX model by specifying the required path to an existing experiment.\n", + "\n", + "<u>Usage</u>\n", + "```\n", + "python mupixtraining.py --experiment_path EXPERIMENT_PATH [--retrain]\n", + "```\n", + "\n", + "<u>Details</u>\n", + "\n", + "| Argument | Description |\n", + "|--------------------------|-------------|\n", + "| `--experiment_path` | Path to the previously created experiment. |\n", + "| `--retrain` *(optional)* | Use this flag to continue training an existing µPIX model located inside the experiment path. |\n" + ] + }, + { + "cell_type": "markdown", + "source": [ + "For information: A complete training of a µPIX model on the ```metrology``` dataset will take around 7 hours to achieve good performance." + ], + "metadata": { + "id": "V0ZpHWBFKe_Q" + } + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": { + "id": "qFQ7p-UIMcjE", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "11077ec3-8255-4dfd-937e-45d4ae01ec37" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "2025-03-08 21:31:38.638242: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", + "2025-03-08 21:31:38.638307: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", + "2025-03-08 21:31:38.638373: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", + "2025-03-08 21:31:40.315928: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", + "Segmentation Models: using `tf.keras` framework.\n", + "Input Path: ('./dataset_metrology/train/low', './dataset_metrology/train/GT')\n", + "Output Path: ./experiments/metrology/results\n", + "Retraining: False\n", + "Data loading...\n", + "85 images loaded\n", + "2212 tiles extracted for training\n", + "303 tiles extracted for validation\n", + "Data loaded !\n", + "Training from scratch...\n", + "2025-03-08 21:32:22.978769: W tensorflow/core/common_runtime/gpu/gpu_device.cc:2211] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\n", + "Skipping registering GPU devices...\n", + "/usr/local/lib/python3.11/dist-packages/keras/src/initializers/initializers.py:120: UserWarning: The initializer RandomNormal is unseeded and being called multiple times, which will return identical values each time (even if the initializer is unseeded). Please update your code to provide a seed to the initializer, or avoid using the same initializer instance more than once.\n", + " warnings.warn(\n", + "2025-03-08 21:32:34.387127: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 335544320 exceeds 10% of free system memory.\n", + " 1/10 [==>...........................] - ETA: 1:312025-03-08 21:32:42.430760: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 335544320 exceeds 10% of free system memory.\n", + " 2/10 [=====>........................] - ETA: 1:052025-03-08 21:32:49.727382: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 335544320 exceeds 10% of free system memory.\n", + " 3/10 [========>.....................] - ETA: 53s 2025-03-08 21:32:57.825063: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 335544320 exceeds 10% of free system memory.\n", + " 4/10 [===========>..................] - ETA: 46s2025-03-08 21:33:04.680492: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 335544320 exceeds 10% of free system memory.\n", + "10/10 [==============================] - 75s 7s/step\n", + "Epoch 1\n", + "Val> mse[ 10773.987 ], ssim[ -0.0027225784 ]\n", + "1/1 [==============================] - 4s 4s/step\n", + "Step> 1 , Generator loss : 8.976433753967285\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 2 , Generator loss : 5.328129768371582\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 3 , Generator loss : 4.082814693450928\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 4 , Generator loss : 2.691023826599121\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 5 , Generator loss : 2.0744059085845947\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 6 , Generator loss : 2.042612075805664\n", + "1/1 [==============================] - 4s 4s/step\n", + "Step> 7 , Generator loss : 1.7480155229568481\n", + "1/1 [==============================] - 4s 4s/step\n", + "Step> 8 , Generator loss : 1.9312176704406738\n", + "1/1 [==============================] - 4s 4s/step\n", + "Step> 9 , Generator loss : 1.5063207149505615\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 10 , Generator loss : 1.8604661226272583\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 11 , Generator loss : 1.7569278478622437\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 12 , Generator loss : 1.104780912399292\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 13 , Generator loss : 1.1997020244598389\n", + "1/1 [==============================] - 4s 4s/step\n", + "Step> 14 , Generator loss : 1.0613642930984497\n", + "1/1 [==============================] - 4s 4s/step\n", + "Step> 15 , Generator loss : 1.1111645698547363\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 16 , Generator loss : 1.169526219367981\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 17 , Generator loss : 1.0044934749603271\n", + "1/1 [==============================] - 3s 3s/step\n", + "Step> 18 , Generator loss : 0.8804041743278503\n", + "1/1 [==============================] - 3s 3s/step\n", + "Traceback (most recent call last):\n", + " File \"/content/mupix/mupixtraining.py\", line 85, in <module>\n", + " main()\n", + " File \"/content/mupix/mupixtraining.py\", line 79, in main\n", + " history = train(d_model, g_model, gan_model, train_dataset, output_path,\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/content/mupix/mupixutils.py\", line 236, in train\n", + " g_loss1,_ ,_ = gan_model.train_on_batch(X_realA, [y_real, X_realB])\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/keras/src/engine/training.py\", line 2763, in train_on_batch\n", + " logs = self.train_function(iterator)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/util/traceback_utils.py\", line 150, in error_handler\n", + " return fn(*args, **kwargs)\n", + " ^^^^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py\", line 831, in __call__\n", + " result = self._call(*args, **kwds)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/eager/polymorphic_function/polymorphic_function.py\", line 867, in _call\n", + " return tracing_compilation.call_function(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/eager/polymorphic_function/tracing_compilation.py\", line 139, in call_function\n", + " return function._call_flat( # pylint: disable=protected-access\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/eager/polymorphic_function/concrete_function.py\", line 1264, in _call_flat\n", + " return self._inference_function.flat_call(args)\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/eager/polymorphic_function/atomic_function.py\", line 217, in flat_call\n", + " flat_outputs = self(*args)\n", + " ^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/eager/polymorphic_function/atomic_function.py\", line 252, in __call__\n", + " outputs = self._bound_context.call_function(\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/eager/context.py\", line 1479, in call_function\n", + " outputs = execute.execute(\n", + " ^^^^^^^^^^^^^^^^\n", + " File \"/usr/local/lib/python3.11/dist-packages/tensorflow/python/eager/execute.py\", line 60, in quick_execute\n", + " tensors = pywrap_tfe.TFE_Py_Execute(ctx._handle, device_name, op_name,\n", + " ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^\n", + "KeyboardInterrupt\n", + "^C\n" + ] + } + ], + "source": [ + "\n", + "!python mupix/mupixtraining.py --experiment_path ./experiments/metrology\n" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "rgmzo9Ibnum_" + }, + "source": [ + "## 5- µPIX inference on the Test Dataset defined inside the µPIX experiment" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "nTXPxRQHow_i" + }, + "source": [ + "\n", + "The `mupixinfer.py` script allows you to use a trained µPIX model to denoise a dataset located in the `test` directory inside the experiment path." + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "HV3M8NWVwmQg" + }, + "source": [ + "**As the test Metrology test dataset contains 20 big images (5000x5000 px), for this demo on Google Colab to gain time and ressources, we will only make the inference on one test image.\n", + "If you want to make the inference on the whole test dataset, please do not execute the next cell !**" + ] + }, + { + "cell_type": "code", + "execution_count": 5, + "metadata": { + "id": "kQifxxhGw6wi", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "74ad1f47-83e7-4a4e-9a93-7dcd2b4b583a" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Keeping: X_StageData0000.tif\n", + "Deleted: X_StageData0001.tif\n", + "Deleted: X_StageData0002.tif\n", + "Deleted: X_StageData0003.tif\n", + "Deleted: X_StageData0004.tif\n", + "Deleted: X_StageData0005.tif\n", + "Deleted: X_StageData0006.tif\n", + "Deleted: X_StageData0007.tif\n", + "Deleted: X_StageData0008.tif\n", + "Deleted: X_StageData0009.tif\n", + "Deleted: X_StageData0010.tif\n", + "Deleted: X_StageData0011.tif\n", + "Deleted: X_StageData0012.tif\n", + "Deleted: X_StageData0013.tif\n", + "Deleted: X_StageData0014.tif\n", + "Deleted: X_StageData0015.tif\n", + "Deleted: X_StageData0016.tif\n", + "Deleted: X_StageData0017.tif\n", + "Deleted: X_StageData0018.tif\n", + "Deleted: X_StageData0019.tif\n", + "Operation completed.\n" + ] + } + ], + "source": [ + "import os\n", + "\n", + "def keep_first_tiff(directory):\n", + " tiff_files = sorted([f for f in os.listdir(directory) if f.lower().endswith('.tiff') or f.lower().endswith('.tif')])\n", + " if not tiff_files:\n", + " print(\"No TIFF files found in the directory.\")\n", + " return\n", + " first_file = tiff_files[0]\n", + " print(f\"Keeping: {first_file}\")\n", + " for file in tiff_files[1:]:\n", + " file_path = os.path.join(directory, file)\n", + " os.remove(file_path)\n", + " print(f\"Deleted: {file}\")\n", + " print(\"Operation completed.\")\n", + "\n", + "keep_first_tiff(\"./dataset_metrology/test/\")" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "0BP6VjE9xUKw" + }, + "source": [ + "Now we can make the inference on the images:" + ] + }, + { + "cell_type": "code", + "execution_count": 6, + "metadata": { + "id": "olpZ3GkRnGrx", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "0ba8e44e-8f52-4314-e1cc-593e6a5e75ac" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "2025-03-08 21:44:14.645641: E tensorflow/compiler/xla/stream_executor/cuda/cuda_dnn.cc:9342] Unable to register cuDNN factory: Attempting to register factory for plugin cuDNN when one has already been registered\n", + "2025-03-08 21:44:14.645705: E tensorflow/compiler/xla/stream_executor/cuda/cuda_fft.cc:609] Unable to register cuFFT factory: Attempting to register factory for plugin cuFFT when one has already been registered\n", + "2025-03-08 21:44:14.645758: E tensorflow/compiler/xla/stream_executor/cuda/cuda_blas.cc:1518] Unable to register cuBLAS factory: Attempting to register factory for plugin cuBLAS when one has already been registered\n", + "2025-03-08 21:44:16.585052: W tensorflow/compiler/tf2tensorrt/utils/py_utils.cc:38] TF-TRT Warning: Could not find TensorRT\n", + "Segmentation Models: using `tf.keras` framework.\n", + "Output Path: ./experiments/metrology//predictions\n", + "Data loading...\n", + "2025-03-08 21:44:18.465025: W tensorflow/core/common_runtime/gpu/gpu_device.cc:2211] Cannot dlopen some GPU libraries. Please make sure the missing libraries mentioned above are installed properly if you would like to use GPU. Follow the guide at https://www.tensorflow.org/install/gpu for how to download and setup the required libraries for your platform.\n", + "Skipping registering GPU devices...\n", + "Saved weights to ./experiments/metrology/results/networks/Generator.h5\n", + "2025-03-08 21:44:30.406306: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 838860800 exceeds 10% of free system memory.\n", + "2025-03-08 21:44:31.194614: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 838860800 exceeds 10% of free system memory.\n", + "2025-03-08 21:44:31.993997: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 838860800 exceeds 10% of free system memory.\n", + "2025-03-08 21:44:32.986493: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 838860800 exceeds 10% of free system memory.\n", + "2025-03-08 21:44:33.562535: W tensorflow/tsl/framework/cpu_allocator_impl.cc:83] Allocation of 838860800 exceeds 10% of free system memory.\n", + "^C\n" + ] + } + ], + "source": [ + "!python mupix/mupixinfer.py --experiment_path ./experiments/metrology/" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "613wtxNNp-o7" + }, + "source": [ + "## 6- Results" + ] + }, + { + "cell_type": "markdown", + "metadata": { + "id": "DcXLM_McqC-5" + }, + "source": [ + "The generated denoised images from the test set are now located inside the ```experiments/metrology/predictions/``` directory and you can downloaded them individually on your local machine (right-click and select Download).\n", + "\n", + "If you want to visualize some results, execute the cell below to check some random µPIX predicted images from the predictions directory." + ] + }, + { + "cell_type": "code", + "execution_count": 7, + "metadata": { + "id": "jvs788gfqgH7", + "colab": { + "base_uri": "https://localhost:8080/" + }, + "outputId": "2e190754-3be2-4a35-90b7-f5b330f66bd2" + }, + "outputs": [ + { + "output_type": "stream", + "name": "stdout", + "text": [ + "Predicted image not found: ./experiments/metrology/predictions/X_StageData0000.tif\n" + ] + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "import tifffile\n", + "import os\n", + "import random\n", + "\n", + "# Define directories\n", + "test_dir = \"./dataset_metrology/test/\"\n", + "pred_dir = \"./experiments/metrology/predictions/\" # Update this to match your experiment path\n", + "\n", + "# Get list of test images\n", + "tif_files = [f for f in os.listdir(test_dir) if f.endswith(\".tif\")]\n", + "\n", + "if tif_files:\n", + " # Select a random test image\n", + " random_file = random.choice(tif_files)\n", + " random_test_path = os.path.join(test_dir, random_file)\n", + "\n", + " # Load the test image\n", + " low = tifffile.imread(random_test_path)\n", + "\n", + " # Construct the path for the corresponding predicted image\n", + " random_pred_path = os.path.join(pred_dir, random_file)\n", + "\n", + " if os.path.exists(random_pred_path):\n", + " # Load the predicted image\n", + " pred = tifffile.imread(random_pred_path)\n", + "\n", + " # Display side-by-side\n", + " fig, axes = plt.subplots(1, 2, figsize=(10, 5))\n", + "\n", + " axes[0].imshow(low, cmap=\"gray\")\n", + " axes[0].axis(\"off\")\n", + " axes[0].set_title(\"Input Image\")\n", + "\n", + " axes[1].imshow(pred, cmap=\"gray\")\n", + " axes[1].axis(\"off\")\n", + " axes[1].set_title(\"µPIX: Predicted Image\")\n", + "\n", + " plt.show()\n", + " else:\n", + " print(f\"Predicted image not found: {random_pred_path}\")" + ] + } + ], + "metadata": { + "colab": { + "provenance": [], + "gpuType": "T4" + }, + "kernelspec": { + "display_name": "Python 3", + "name": "python3" + }, + "language_info": { + "name": "python" + }, + "accelerator": "GPU" + }, + "nbformat": 4, + "nbformat_minor": 0 +} \ No newline at end of file -- GitLab