diff --git a/Debugging notebook.ipynb b/Debugging notebook.ipynb
index 95a87c855ae7dc662c4b7f80f360f3e6801a6e31..c4a361b7d8dc64099d2907c916c16cce18f00936 100644
--- a/Debugging notebook.ipynb	
+++ b/Debugging notebook.ipynb	
@@ -2,7 +2,7 @@
  "cells": [
   {
    "cell_type": "code",
-   "execution_count": 42,
+   "execution_count": 1,
    "id": "9cb721f3",
    "metadata": {},
    "outputs": [
@@ -10,8 +10,6 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "The autoreload extension is already loaded. To reload it, use:\n",
-      "  %reload_ext autoreload\n",
       "/home/tushar\n",
       "tushar-Aspire-E5-573G\r\n"
      ]
@@ -27,7 +25,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 43,
+   "execution_count": 2,
    "id": "f3bc22e3",
    "metadata": {},
    "outputs": [
@@ -45,10 +43,20 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 44,
+   "execution_count": 3,
    "id": "4a70407f",
    "metadata": {},
-   "outputs": [],
+   "outputs": [
+    {
+     "name": "stdout",
+     "output_type": "stream",
+     "text": [
+      "ln: failed to create symbolic link './utils.py': File exists\n",
+      "ln: failed to create symbolic link './model.py': File exists\n",
+      "ln: failed to create symbolic link './data.py': File exists\n"
+     ]
+    }
+   ],
    "source": [
     "import sys, os\n",
     "from argparse import ArgumentParser\n",
@@ -85,7 +93,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 45,
+   "execution_count": 4,
    "id": "87d9307f",
    "metadata": {},
    "outputs": [
@@ -95,7 +103,7 @@
        "True"
       ]
      },
-     "execution_count": 45,
+     "execution_count": 4,
      "metadata": {},
      "output_type": "execute_result"
     }
@@ -106,7 +114,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 46,
+   "execution_count": 5,
    "id": "8d6399a3",
    "metadata": {},
    "outputs": [
@@ -114,7 +122,7 @@
      "name": "stdout",
      "output_type": "stream",
      "text": [
-      "Thu Apr 14 12:40:02 2022       \r\n",
+      "Sun Apr 17 18:07:10 2022       \r\n",
       "+-----------------------------------------------------------------------------+\r\n",
       "| NVIDIA-SMI 470.103.01   Driver Version: 470.103.01   CUDA Version: 11.4     |\r\n",
       "|-------------------------------+----------------------+----------------------+\r\n",
@@ -123,7 +131,7 @@
       "|                               |                      |               MIG M. |\r\n",
       "|===============================+======================+======================|\r\n",
       "|   0  NVIDIA GeForce ...  Off  | 00000000:04:00.0 Off |                  N/A |\r\n",
-      "| N/A   52C    P0    N/A /  N/A |    377MiB /  4046MiB |      7%      Default |\r\n",
+      "| N/A   51C    P0    N/A /  N/A |    473MiB /  4046MiB |      9%      Default |\r\n",
       "|                               |                      |                  N/A |\r\n",
       "+-------------------------------+----------------------+----------------------+\r\n",
       "                                                                               \r\n",
@@ -132,12 +140,12 @@
       "|  GPU   GI   CI        PID   Type   Process name                  GPU Memory |\r\n",
       "|        ID   ID                                                   Usage      |\r\n",
       "|=============================================================================|\r\n",
-      "|    0   N/A  N/A      1493      G   /usr/lib/xorg/Xorg                 37MiB |\r\n",
-      "|    0   N/A  N/A      6260      G   /usr/lib/xorg/Xorg                181MiB |\r\n",
-      "|    0   N/A  N/A      6458      G   /usr/bin/gnome-shell               44MiB |\r\n",
-      "|    0   N/A  N/A     10206      G   ...AAAAAAAAA= --shared-files       17MiB |\r\n",
-      "|    0   N/A  N/A     10335      G   ...AAAAAAAAA= --shared-files       30MiB |\r\n",
-      "|    0   N/A  N/A     14982      G   ...239163976282227841,131072       50MiB |\r\n",
+      "|    0   N/A  N/A      1436      G   /usr/lib/xorg/Xorg                 37MiB |\r\n",
+      "|    0   N/A  N/A     46125      G   /usr/lib/xorg/Xorg                259MiB |\r\n",
+      "|    0   N/A  N/A     49935      G   /usr/bin/gnome-shell               59MiB |\r\n",
+      "|    0   N/A  N/A     50452      G   ...AAAAAAAAA= --shared-files       19MiB |\r\n",
+      "|    0   N/A  N/A     50567      G   ...AAAAAAAAA= --shared-files        3MiB |\r\n",
+      "|    0   N/A  N/A   3040068      G   ...646674068933540177,131072       78MiB |\r\n",
       "+-----------------------------------------------------------------------------+\r\n"
      ]
     }
@@ -148,7 +156,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 47,
+   "execution_count": 6,
    "id": "b38146b4",
    "metadata": {},
    "outputs": [
@@ -437,7 +445,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 48,
+   "execution_count": 7,
    "id": "09c34226",
    "metadata": {},
    "outputs": [
@@ -462,7 +470,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 55,
+   "execution_count": 8,
    "id": "c51d27d2",
    "metadata": {},
    "outputs": [
@@ -482,7 +490,7 @@
       "    --num_nodes=1 \\\n",
       "    --sync_batchnorm=true \\\n",
       "    --max_epochs=10 \\\n",
-      "    --fast_dev_run=false \\\n",
+      "    --fast_dev_run=True \\\n",
       "    --log_every_n_steps=8 \\\n"
      ]
     }
@@ -505,7 +513,7 @@
     "    \"--sync_batchnorm=true\",\n",
     "    \"--max_epochs=10\",\n",
     "    #\"--accelerator=ddp\",\n",
-    "    \"--fast_dev_run=false\",\n",
+    "    \"--fast_dev_run=True\",\n",
     "    \"--log_every_n_steps=8\",\n",
     "]\n",
     "\n",
@@ -529,7 +537,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 56,
+   "execution_count": 9,
    "id": "0cc0ca0c",
    "metadata": {},
    "outputs": [
@@ -549,7 +557,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 57,
+   "execution_count": 10,
    "id": "6d2b5a5e",
    "metadata": {},
    "outputs": [],
@@ -561,7 +569,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 58,
+   "execution_count": 11,
    "id": "98f0a0d2",
    "metadata": {},
    "outputs": [],
@@ -586,7 +594,7 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 59,
+   "execution_count": 12,
    "id": "688357f4",
    "metadata": {},
    "outputs": [
@@ -594,10 +602,18 @@
      "name": "stderr",
      "output_type": "stream",
      "text": [
+      "/home/tushar/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/connectors/callback_connector.py:151: LightningDeprecationWarning: Setting `Trainer(checkpoint_callback=True)` is deprecated in v1.5 and will be removed in v1.7. Please consider using `Trainer(enable_checkpointing=True)`.\n",
+      "  rank_zero_deprecation(\n",
       "GPU available: True, used: True\n",
       "TPU available: False, using: 0 TPU cores\n",
       "IPU available: False, using: 0 IPUs\n",
-      "HPU available: False, using: 0 HPUs\n"
+      "HPU available: False, using: 0 HPUs\n",
+      "Running in fast_dev_run mode: will run a full train, val, test and prediction loop using 1 batch(es).\n",
+      "`Trainer(limit_train_batches=1)` was configured so 1 batch per epoch will be used.\n",
+      "`Trainer(limit_val_batches=1)` was configured so 1 batch will be used.\n",
+      "`Trainer(limit_test_batches=1)` was configured so 1 batch will be used.\n",
+      "`Trainer(limit_predict_batches=1)` was configured so 1 batch will be used.\n",
+      "`Trainer(val_check_interval=1.0)` was configured so validation will run at the end of the training epoch..\n"
      ]
     }
    ],
@@ -613,10 +629,29 @@
   },
   {
    "cell_type": "code",
-   "execution_count": 60,
+   "execution_count": 15,
    "id": "602a2a44",
-   "metadata": {},
+   "metadata": {
+    "scrolled": true
+   },
    "outputs": [
+    {
+     "name": "stderr",
+     "output_type": "stream",
+     "text": [
+      "LOCAL_RANK: 0 - CUDA_VISIBLE_DEVICES: [0]\n",
+      "\n",
+      "  | Name   | Type   | Params\n",
+      "----------------------------------\n",
+      "0 | model  | Unet   | 24.4 M\n",
+      "1 | loss_f | L1Loss | 0     \n",
+      "----------------------------------\n",
+      "24.4 M    Trainable params\n",
+      "0         Non-trainable params\n",
+      "24.4 M    Total params\n",
+      "97.720    Total estimated model params size (MB)\n"
+     ]
+    },
     {
      "name": "stdout",
      "output_type": "stream",
@@ -627,21 +662,49 @@
       "Validation data for rank 0/1 already prepared\n"
      ]
     },
+    {
+     "data": {
+      "application/vnd.jupyter.widget-view+json": {
+       "model_id": "e6fee0d71dff48dcb9dac6455ac1c9a2",
+       "version_major": 2,
+       "version_minor": 0
+      },
+      "text/plain": [
+       "Training: 0it [00:00, ?it/s]"
+      ]
+     },
+     "metadata": {},
+     "output_type": "display_data"
+    },
     {
      "ename": "TypeError",
-     "evalue": "__init__() missing 1 required positional argument: 'dataset_size'",
+     "evalue": "Caught TypeError in DataLoader worker process 0.\nOriginal Traceback (most recent call last):\n  File \"/home/tushar/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/_utils/worker.py\", line 287, in _worker_loop\n    data = fetcher.fetch(index)\n  File \"/home/tushar/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py\", line 49, in fetch\n    data = [self.dataset[idx] for idx in possibly_batched_index]\n  File \"/home/tushar/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py\", line 49, in <listcomp>\n    data = [self.dataset[idx] for idx in possibly_batched_index]\n  File \"/home/tushar/myDebugging/data.py\", line 59, in __getitem__\n    pli_temp = h5py.File(os.path.join(pli_path, self.list_of_pli[i]), 'r')\nTypeError: 'NoneType' object is not subscriptable\n",
      "output_type": "error",
      "traceback": [
       "\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
       "\u001b[0;31mTypeError\u001b[0m                                 Traceback (most recent call last)",
-      "Input \u001b[0;32mIn [60]\u001b[0m, in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStart training on \u001b[39m\u001b[38;5;132;01m{\u001b[39;00margs\u001b[38;5;241m.\u001b[39mgpus\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m GPUs\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m      2\u001b[0m trainer\u001b[38;5;241m.\u001b[39mlogger\u001b[38;5;241m.\u001b[39mlog_hyperparams(dict_args)\n\u001b[0;32m----> 3\u001b[0m \u001b[43mtrainer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfit\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtest_model\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdatamodule\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdm\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m      4\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDone\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
+      "Input \u001b[0;32mIn [15]\u001b[0m, in \u001b[0;36m<module>\u001b[0;34m\u001b[0m\n\u001b[1;32m      1\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mStart training on \u001b[39m\u001b[38;5;132;01m{\u001b[39;00margs\u001b[38;5;241m.\u001b[39mgpus\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m GPUs\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m      2\u001b[0m trainer\u001b[38;5;241m.\u001b[39mlogger\u001b[38;5;241m.\u001b[39mlog_hyperparams(dict_args)\n\u001b[0;32m----> 3\u001b[0m \u001b[43mtrainer\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfit\u001b[49m\u001b[43m(\u001b[49m\u001b[43mtest_model\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdatamodule\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mdm\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m      4\u001b[0m \u001b[38;5;28mprint\u001b[39m(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mDone\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n",
       "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py:771\u001b[0m, in \u001b[0;36mTrainer.fit\u001b[0;34m(self, model, train_dataloaders, val_dataloaders, datamodule, ckpt_path)\u001b[0m\n\u001b[1;32m    752\u001b[0m \u001b[38;5;124mr\u001b[39m\u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m    753\u001b[0m \u001b[38;5;124;03mRuns the full optimization routine.\u001b[39;00m\n\u001b[1;32m    754\u001b[0m \n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    768\u001b[0m \u001b[38;5;124;03m    datamodule: An instance of :class:`~pytorch_lightning.core.datamodule.LightningDataModule`.\u001b[39;00m\n\u001b[1;32m    769\u001b[0m \u001b[38;5;124;03m\"\"\"\u001b[39;00m\n\u001b[1;32m    770\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstrategy\u001b[38;5;241m.\u001b[39mmodel \u001b[38;5;241m=\u001b[39m model\n\u001b[0;32m--> 771\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_and_handle_interrupt\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    772\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_fit_impl\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mtrain_dataloaders\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mval_dataloaders\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mdatamodule\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mckpt_path\u001b[49m\n\u001b[1;32m    773\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n",
       "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py:724\u001b[0m, in \u001b[0;36mTrainer._call_and_handle_interrupt\u001b[0;34m(self, trainer_fn, *args, **kwargs)\u001b[0m\n\u001b[1;32m    722\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstrategy\u001b[38;5;241m.\u001b[39mlauncher\u001b[38;5;241m.\u001b[39mlaunch(trainer_fn, \u001b[38;5;241m*\u001b[39margs, trainer\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[1;32m    723\u001b[0m     \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[0;32m--> 724\u001b[0m         \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mtrainer_fn\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    725\u001b[0m \u001b[38;5;66;03m# TODO: treat KeyboardInterrupt as BaseException (delete the code below) in v1.7\u001b[39;00m\n\u001b[1;32m    726\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mKeyboardInterrupt\u001b[39;00m \u001b[38;5;28;01mas\u001b[39;00m exception:\n",
       "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py:812\u001b[0m, in \u001b[0;36mTrainer._fit_impl\u001b[0;34m(self, model, train_dataloaders, val_dataloaders, datamodule, ckpt_path)\u001b[0m\n\u001b[1;32m    808\u001b[0m ckpt_path \u001b[38;5;241m=\u001b[39m ckpt_path \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mresume_from_checkpoint\n\u001b[1;32m    809\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_ckpt_path \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m__set_ckpt_path(\n\u001b[1;32m    810\u001b[0m     ckpt_path, model_provided\u001b[38;5;241m=\u001b[39m\u001b[38;5;28;01mTrue\u001b[39;00m, model_connected\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mlightning_module \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m\n\u001b[1;32m    811\u001b[0m )\n\u001b[0;32m--> 812\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run\u001b[49m\u001b[43m(\u001b[49m\u001b[43mmodel\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mckpt_path\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mckpt_path\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    814\u001b[0m \u001b[38;5;28;01massert\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstate\u001b[38;5;241m.\u001b[39mstopped\n\u001b[1;32m    815\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtraining \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n",
-      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py:1175\u001b[0m, in \u001b[0;36mTrainer._run\u001b[0;34m(self, model, ckpt_path)\u001b[0m\n\u001b[1;32m   1172\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstrategy\u001b[38;5;241m.\u001b[39msetup_environment()\n\u001b[1;32m   1173\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m__setup_profiler()\n\u001b[0;32m-> 1175\u001b[0m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_call_setup_hook\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m  \u001b[38;5;66;03m# allow user to setup lightning_module in accelerator environment\u001b[39;00m\n\u001b[1;32m   1177\u001b[0m \u001b[38;5;66;03m# check if we should delay restoring checkpoint till later\u001b[39;00m\n\u001b[1;32m   1178\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstrategy\u001b[38;5;241m.\u001b[39mrestore_checkpoint_after_setup:\n",
-      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py:1493\u001b[0m, in \u001b[0;36mTrainer._call_setup_hook\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1490\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mstrategy\u001b[38;5;241m.\u001b[39mbarrier(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mpre_setup\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m   1492\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdatamodule \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[0;32m-> 1493\u001b[0m     \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdatamodule\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43msetup\u001b[49m\u001b[43m(\u001b[49m\u001b[43mstage\u001b[49m\u001b[38;5;241;43m=\u001b[39;49m\u001b[43mfn\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1494\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_callback_hooks(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msetup\u001b[39m\u001b[38;5;124m\"\u001b[39m, stage\u001b[38;5;241m=\u001b[39mfn)\n\u001b[1;32m   1495\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_call_lightning_module_hook(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124msetup\u001b[39m\u001b[38;5;124m\"\u001b[39m, stage\u001b[38;5;241m=\u001b[39mfn)\n",
-      "File \u001b[0;32m~/myDebugging/data.py:161\u001b[0m, in \u001b[0;36mTestDataModule.setup\u001b[0;34m(self, stage)\u001b[0m\n\u001b[1;32m    152\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_test_transforms \u001b[38;5;241m=\u001b[39m A\u001b[38;5;241m.\u001b[39mCompose(\n\u001b[1;32m    153\u001b[0m     [\n\u001b[1;32m    154\u001b[0m         A\u001b[38;5;241m.\u001b[39mCenterCrop(p\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m1\u001b[39m, height\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpatch_size, width\u001b[38;5;241m=\u001b[39m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpatch_size),\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    157\u001b[0m     additional_targets\u001b[38;5;241m=\u001b[39m{\u001b[38;5;124m'\u001b[39m\u001b[38;5;124mpli_image\u001b[39m\u001b[38;5;124m'\u001b[39m: \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mimage\u001b[39m\u001b[38;5;124m'\u001b[39m, \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mcyto_image\u001b[39m\u001b[38;5;124m'\u001b[39m: \u001b[38;5;124m'\u001b[39m\u001b[38;5;124mimage\u001b[39m\u001b[38;5;124m'\u001b[39m}\n\u001b[1;32m    158\u001b[0m )\n\u001b[1;32m    160\u001b[0m \u001b[38;5;66;03m# Define the datasets for training and validation\u001b[39;00m\n\u001b[0;32m--> 161\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtrain_sampler \u001b[38;5;241m=\u001b[39m \u001b[43mTestSampler\u001b[49m\u001b[43m(\u001b[49m\n\u001b[1;32m    162\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mpli_train\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    163\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcyto_train\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    164\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_train_transforms\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    165\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mcrop_size\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    166\u001b[0m \u001b[43m    \u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mtrain_size\u001b[49m\u001b[43m,\u001b[49m\n\u001b[1;32m    167\u001b[0m \u001b[43m\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    169\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mval_sampler \u001b[38;5;241m=\u001b[39m TestSampler(\n\u001b[1;32m    170\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpli_val,\n\u001b[1;32m    171\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mcyto_val,\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    175\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdataset_size\n\u001b[1;32m    176\u001b[0m )\n",
-      "\u001b[0;31mTypeError\u001b[0m: __init__() missing 1 required positional argument: 'dataset_size'"
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py:1237\u001b[0m, in \u001b[0;36mTrainer._run\u001b[0;34m(self, model, ckpt_path)\u001b[0m\n\u001b[1;32m   1233\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_checkpoint_connector\u001b[38;5;241m.\u001b[39mrestore_training_state()\n\u001b[1;32m   1235\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_checkpoint_connector\u001b[38;5;241m.\u001b[39mresume_end()\n\u001b[0;32m-> 1237\u001b[0m results \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run_stage\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1239\u001b[0m log\u001b[38;5;241m.\u001b[39mdetail(\u001b[38;5;124mf\u001b[39m\u001b[38;5;124m\"\u001b[39m\u001b[38;5;132;01m{\u001b[39;00m\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__class__\u001b[39m\u001b[38;5;241m.\u001b[39m\u001b[38;5;18m__name__\u001b[39m\u001b[38;5;132;01m}\u001b[39;00m\u001b[38;5;124m: trainer tearing down\u001b[39m\u001b[38;5;124m\"\u001b[39m)\n\u001b[1;32m   1240\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_teardown()\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py:1324\u001b[0m, in \u001b[0;36mTrainer._run_stage\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1322\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mpredicting:\n\u001b[1;32m   1323\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_run_predict()\n\u001b[0;32m-> 1324\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_run_train\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/trainer.py:1354\u001b[0m, in \u001b[0;36mTrainer._run_train\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1352\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfit_loop\u001b[38;5;241m.\u001b[39mtrainer \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\n\u001b[1;32m   1353\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m torch\u001b[38;5;241m.\u001b[39mautograd\u001b[38;5;241m.\u001b[39mset_detect_anomaly(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_detect_anomaly):\n\u001b[0;32m-> 1354\u001b[0m     \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfit_loop\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/loops/base.py:204\u001b[0m, in \u001b[0;36mLoop.run\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m    202\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m    203\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mon_advance_start(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m--> 204\u001b[0m     \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43madvance\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    205\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mon_advance_end()\n\u001b[1;32m    206\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_restarting \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/loops/fit_loop.py:269\u001b[0m, in \u001b[0;36mFitLoop.advance\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    265\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_data_fetcher\u001b[38;5;241m.\u001b[39msetup(\n\u001b[1;32m    266\u001b[0m     dataloader, batch_to_device\u001b[38;5;241m=\u001b[39mpartial(\u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtrainer\u001b[38;5;241m.\u001b[39m_call_strategy_hook, \u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mbatch_to_device\u001b[39m\u001b[38;5;124m\"\u001b[39m, dataloader_idx\u001b[38;5;241m=\u001b[39m\u001b[38;5;241m0\u001b[39m)\n\u001b[1;32m    267\u001b[0m )\n\u001b[1;32m    268\u001b[0m \u001b[38;5;28;01mwith\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mtrainer\u001b[38;5;241m.\u001b[39mprofiler\u001b[38;5;241m.\u001b[39mprofile(\u001b[38;5;124m\"\u001b[39m\u001b[38;5;124mrun_training_epoch\u001b[39m\u001b[38;5;124m\"\u001b[39m):\n\u001b[0;32m--> 269\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_outputs \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mepoch_loop\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrun\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_data_fetcher\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/loops/base.py:204\u001b[0m, in \u001b[0;36mLoop.run\u001b[0;34m(self, *args, **kwargs)\u001b[0m\n\u001b[1;32m    202\u001b[0m \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[1;32m    203\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mon_advance_start(\u001b[38;5;241m*\u001b[39margs, \u001b[38;5;241m*\u001b[39m\u001b[38;5;241m*\u001b[39mkwargs)\n\u001b[0;32m--> 204\u001b[0m     \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43madvance\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    205\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mon_advance_end()\n\u001b[1;32m    206\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_restarting \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;01mFalse\u001b[39;00m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/loops/epoch/training_epoch_loop.py:171\u001b[0m, in \u001b[0;36mTrainingEpochLoop.advance\u001b[0;34m(self, data_fetcher)\u001b[0m\n\u001b[1;32m    169\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(data_fetcher, DataLoaderIterDataFetcher):\n\u001b[1;32m    170\u001b[0m     batch_idx \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbatch_idx \u001b[38;5;241m+\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[0;32m--> 171\u001b[0m     batch \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mnext\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43mdata_fetcher\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    172\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m    173\u001b[0m     batch_idx, batch \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mnext\u001b[39m(data_fetcher)\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/utilities/fetching.py:184\u001b[0m, in \u001b[0;36mAbstractDataFetcher.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    183\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__next__\u001b[39m(\u001b[38;5;28mself\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Any:\n\u001b[0;32m--> 184\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mfetching_function\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/utilities/fetching.py:259\u001b[0m, in \u001b[0;36mDataFetcher.fetching_function\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    256\u001b[0m \u001b[38;5;28;01melif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mdone:\n\u001b[1;32m    257\u001b[0m     \u001b[38;5;66;03m# this will run only when no pre-fetching was done.\u001b[39;00m\n\u001b[1;32m    258\u001b[0m     \u001b[38;5;28;01mtry\u001b[39;00m:\n\u001b[0;32m--> 259\u001b[0m         \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_fetch_next_batch\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mdataloader_iter\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    260\u001b[0m         \u001b[38;5;66;03m# consume the batch we just fetched\u001b[39;00m\n\u001b[1;32m    261\u001b[0m         batch \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mbatches\u001b[38;5;241m.\u001b[39mpop(\u001b[38;5;241m0\u001b[39m)\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/utilities/fetching.py:273\u001b[0m, in \u001b[0;36mDataFetcher._fetch_next_batch\u001b[0;34m(self, iterator)\u001b[0m\n\u001b[1;32m    271\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m_fetch_next_batch\u001b[39m(\u001b[38;5;28mself\u001b[39m, iterator: Iterator) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    272\u001b[0m     start_output \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mon_fetch_start()\n\u001b[0;32m--> 273\u001b[0m     batch \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mnext\u001b[39;49m\u001b[43m(\u001b[49m\u001b[43miterator\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    274\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mfetched \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m    275\u001b[0m     \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39mprefetch_batches \u001b[38;5;129;01mand\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_has_len:\n\u001b[1;32m    276\u001b[0m         \u001b[38;5;66;03m# when we don't prefetch but the dataloader is sized, we use the length for `done`\u001b[39;00m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/supporters.py:553\u001b[0m, in \u001b[0;36mCombinedLoaderIterator.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    547\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21m__next__\u001b[39m(\u001b[38;5;28mself\u001b[39m) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Any:\n\u001b[1;32m    548\u001b[0m     \u001b[38;5;124;03m\"\"\"Fetches the next batch from multiple data loaders.\u001b[39;00m\n\u001b[1;32m    549\u001b[0m \n\u001b[1;32m    550\u001b[0m \u001b[38;5;124;03m    Returns:\u001b[39;00m\n\u001b[1;32m    551\u001b[0m \u001b[38;5;124;03m        a collections of batch data\u001b[39;00m\n\u001b[1;32m    552\u001b[0m \u001b[38;5;124;03m    \"\"\"\u001b[39;00m\n\u001b[0;32m--> 553\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mrequest_next_batch\u001b[49m\u001b[43m(\u001b[49m\u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mloader_iters\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/trainer/supporters.py:565\u001b[0m, in \u001b[0;36mCombinedLoaderIterator.request_next_batch\u001b[0;34m(loader_iters)\u001b[0m\n\u001b[1;32m    555\u001b[0m \u001b[38;5;129m@staticmethod\u001b[39m\n\u001b[1;32m    556\u001b[0m \u001b[38;5;28;01mdef\u001b[39;00m \u001b[38;5;21mrequest_next_batch\u001b[39m(loader_iters: Union[Iterator, Sequence, Mapping]) \u001b[38;5;241m-\u001b[39m\u001b[38;5;241m>\u001b[39m Any:\n\u001b[1;32m    557\u001b[0m     \u001b[38;5;124;03m\"\"\"Return the batch of data from multiple iterators.\u001b[39;00m\n\u001b[1;32m    558\u001b[0m \n\u001b[1;32m    559\u001b[0m \u001b[38;5;124;03m    Args:\u001b[39;00m\n\u001b[0;32m   (...)\u001b[0m\n\u001b[1;32m    563\u001b[0m \u001b[38;5;124;03m        Any: a collections of batch data\u001b[39;00m\n\u001b[1;32m    564\u001b[0m \u001b[38;5;124;03m    \"\"\"\u001b[39;00m\n\u001b[0;32m--> 565\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mapply_to_collection\u001b[49m\u001b[43m(\u001b[49m\u001b[43mloader_iters\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[43mIterator\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;28;43mnext\u001b[39;49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/pytorch_lightning/utilities/apply_func.py:99\u001b[0m, in \u001b[0;36mapply_to_collection\u001b[0;34m(data, dtype, function, wrong_dtype, include_none, *args, **kwargs)\u001b[0m\n\u001b[1;32m     97\u001b[0m \u001b[38;5;66;03m# Breaking condition\u001b[39;00m\n\u001b[1;32m     98\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(data, dtype) \u001b[38;5;129;01mand\u001b[39;00m (wrong_dtype \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mor\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(data, wrong_dtype)):\n\u001b[0;32m---> 99\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[43mfunction\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43margs\u001b[49m\u001b[43m,\u001b[49m\u001b[43m \u001b[49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[38;5;241;43m*\u001b[39;49m\u001b[43mkwargs\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    101\u001b[0m elem_type \u001b[38;5;241m=\u001b[39m \u001b[38;5;28mtype\u001b[39m(data)\n\u001b[1;32m    103\u001b[0m \u001b[38;5;66;03m# Recursively apply to collection items\u001b[39;00m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/dataloader.py:530\u001b[0m, in \u001b[0;36m_BaseDataLoaderIter.__next__\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    528\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_sampler_iter \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m:\n\u001b[1;32m    529\u001b[0m     \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_reset()\n\u001b[0;32m--> 530\u001b[0m data \u001b[38;5;241m=\u001b[39m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_next_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m    531\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m+\u001b[39m\u001b[38;5;241m=\u001b[39m \u001b[38;5;241m1\u001b[39m\n\u001b[1;32m    532\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_dataset_kind \u001b[38;5;241m==\u001b[39m _DatasetKind\u001b[38;5;241m.\u001b[39mIterable \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m    533\u001b[0m         \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called \u001b[38;5;129;01mis\u001b[39;00m \u001b[38;5;129;01mnot\u001b[39;00m \u001b[38;5;28;01mNone\u001b[39;00m \u001b[38;5;129;01mand\u001b[39;00m \\\n\u001b[1;32m    534\u001b[0m         \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_num_yielded \u001b[38;5;241m>\u001b[39m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_IterableDataset_len_called:\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/dataloader.py:1224\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._next_data\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m   1222\u001b[0m \u001b[38;5;28;01melse\u001b[39;00m:\n\u001b[1;32m   1223\u001b[0m     \u001b[38;5;28;01mdel\u001b[39;00m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_task_info[idx]\n\u001b[0;32m-> 1224\u001b[0m     \u001b[38;5;28;01mreturn\u001b[39;00m \u001b[38;5;28;43mself\u001b[39;49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43m_process_data\u001b[49m\u001b[43m(\u001b[49m\u001b[43mdata\u001b[49m\u001b[43m)\u001b[49m\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/dataloader.py:1250\u001b[0m, in \u001b[0;36m_MultiProcessingDataLoaderIter._process_data\u001b[0;34m(self, data)\u001b[0m\n\u001b[1;32m   1248\u001b[0m \u001b[38;5;28mself\u001b[39m\u001b[38;5;241m.\u001b[39m_try_put_index()\n\u001b[1;32m   1249\u001b[0m \u001b[38;5;28;01mif\u001b[39;00m \u001b[38;5;28misinstance\u001b[39m(data, ExceptionWrapper):\n\u001b[0;32m-> 1250\u001b[0m     \u001b[43mdata\u001b[49m\u001b[38;5;241;43m.\u001b[39;49m\u001b[43mreraise\u001b[49m\u001b[43m(\u001b[49m\u001b[43m)\u001b[49m\n\u001b[1;32m   1251\u001b[0m \u001b[38;5;28;01mreturn\u001b[39;00m data\n",
+      "File \u001b[0;32m~/pli-env/venv/lib/python3.8/site-packages/torch/_utils.py:457\u001b[0m, in \u001b[0;36mExceptionWrapper.reraise\u001b[0;34m(self)\u001b[0m\n\u001b[1;32m    453\u001b[0m \u001b[38;5;28;01mexcept\u001b[39;00m \u001b[38;5;167;01mTypeError\u001b[39;00m:\n\u001b[1;32m    454\u001b[0m     \u001b[38;5;66;03m# If the exception takes multiple arguments, don't try to\u001b[39;00m\n\u001b[1;32m    455\u001b[0m     \u001b[38;5;66;03m# instantiate since we don't know how to\u001b[39;00m\n\u001b[1;32m    456\u001b[0m     \u001b[38;5;28;01mraise\u001b[39;00m \u001b[38;5;167;01mRuntimeError\u001b[39;00m(msg) \u001b[38;5;28;01mfrom\u001b[39;00m \u001b[38;5;28mNone\u001b[39m\n\u001b[0;32m--> 457\u001b[0m \u001b[38;5;28;01mraise\u001b[39;00m exception\n",
+      "\u001b[0;31mTypeError\u001b[0m: Caught TypeError in DataLoader worker process 0.\nOriginal Traceback (most recent call last):\n  File \"/home/tushar/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/_utils/worker.py\", line 287, in _worker_loop\n    data = fetcher.fetch(index)\n  File \"/home/tushar/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py\", line 49, in fetch\n    data = [self.dataset[idx] for idx in possibly_batched_index]\n  File \"/home/tushar/pli-env/venv/lib/python3.8/site-packages/torch/utils/data/_utils/fetch.py\", line 49, in <listcomp>\n    data = [self.dataset[idx] for idx in possibly_batched_index]\n  File \"/home/tushar/myDebugging/data.py\", line 59, in __getitem__\n    pli_temp = h5py.File(os.path.join(pli_path, self.list_of_pli[i]), 'r')\nTypeError: 'NoneType' object is not subscriptable\n"
      ]
     }
    ],
@@ -671,7 +734,7 @@
   {
    "cell_type": "code",
    "execution_count": null,
-   "id": "5c35576b",
+   "id": "9642645f",
    "metadata": {},
    "outputs": [],
    "source": []
diff --git a/code/data.py b/code/data.py
index be1364d06a44d60fcd4a7ad099caf3da39cfd887..fbfd801da98fcf8735a2ff102712739b00df5ef1 100644
--- a/code/data.py
+++ b/code/data.py
@@ -24,13 +24,14 @@ comm = MPI.COMM_WORLD
 # stained example data directory(cyto_path): '/p/fastdata/pli/Private/oberstrass1/datasets/vervet1818/vervet1818-stained/data/aligned/stained'
 
 
-pli_path = '/media/tushar/A2246889246861F1/Master Thesis MAIA/example-data/pli/NTransmittance'
-cyto_path = '/media/tushar/A2246889246861F1/Master Thesis MAIA/example-data/stained'
+#pli_path = '/media/tushar/A2246889246861F1/Master Thesis MAIA/example-data/pli/NTransmittance'
+pli_path = '/p/fastdata/pli/Private/oberstrass1/datasets/vervet1818/vervet1818-stained/data/aligned/pli/NTransmittance'
 
-pli_files_list = [file for file in os.listdir(pli_path) if
-                  file.endswith(('.h5', '.hdf', '.h4', '.hdf4', '.he2', '.hdf5', '.he5'))]
-cyto_files_list = [file for file in os.listdir(cyto_path) if
-                   file.endswith(('.h5', '.hdf', '.h4', '.hdf4', '.he2', '.hdf5', '.he5'))]
+#cyto_path = '/media/tushar/A2246889246861F1/Master Thesis MAIA/example-data/stained'
+cyto_path = '/p/fastdata/pli/Private/oberstrass1/datasets/vervet1818/vervet1818-stained/data/aligned/stained'
+
+pli_files_list = [file for file in os.listdir(pli_path) if file.endswith(('.h5', '.hdf', '.h4', '.hdf4', '.he2', '.hdf5', '.he5'))]
+cyto_files_list = [file for file in os.listdir(cyto_path) if file.endswith(('.h5', '.hdf', '.h4', '.hdf4', '.he2', '.hdf5', '.he5'))]
 
 
 # print(len(pli_files_list))
@@ -38,27 +39,35 @@ cyto_files_list = [file for file in os.listdir(cyto_path) if
 # print(pli_files_list)
 # print(cyto_files_list)
 
+num_images = len(pli_files_list)
 
 class TestSampler(Dataset):
     # Gives you a random crop and a random image at each request
 
-    def __init__(self, pli_files_list, cyto_files_list, n_images, transforms, crop_size, dataset_size):
+    def __init__(self, pli_files_list, cyto_files_list, transforms, crop_size, dataset_size):
         # crop_size is the size before the rotation and center crop. So the patch_size * sqrt(2)
         # dataset_size defines the number of drawn patches per epoch. As we are drawing (arbitrary many) random patches we have to set is manually
         super().__init__()
         # list of pli has to be in the same order as list of cyto. So index i in pli should correspond to the same index in cyto
         self.list_of_pli = pli_files_list
         self.list_of_cyto = cyto_files_list
-        self.n_images = len(pli_files_list)
+        self.n_images = num_images
         self.transforms = transforms
         self.crop_size = crop_size
         self.dataset_size = dataset_size
 
     def __getitem__(self, ix):
         # Get a random image
-        i = random.randint(self.n_images)
-        pli_image = self.list_of_pli[i]
-        cyto_image = self.list_of_cyto[i]
+        i = random.randint(0, self.n_images-1)
+
+        '''
+        pli_temp = h5py.File(os.path.join(pli_path, self.list_of_pli[i]), 'r')
+        pli_image = pli_temp.get('Image').value
+        #pli_image = self.list_of_pli[i]
+        cyto_temp = h5py.File(os.path.join(pli_path, self.list_of_cyto[i]), 'r')
+        cyto_image = cyto_temp.get('Image').value
+        #cyto_image = self.list_of_cyto[i]
+        '''
 
         # Generate a random patch location from the image
         x = random.randint(image.shape[1] - self.crop_size)
@@ -120,8 +129,8 @@ class TestDataModule(pl.LightningDataModule):
 
             # TODO: Load the PLI and Cytp train data here as lists of numpy arrays: List[np.ndarray]
             # Load the pyramid/00 per file
-            self.pli_train = h5py.File(os.path.join(pli_path, files[:3]), 'r')
-            self.cyto_train = h5py.File(os.path.join(cyto_path, files[:3]), 'r')
+            self.pli_train = h5py.File(os.path.join(pli_path, pli_files_list[:3]), 'r')
+            self.cyto_train = h5py.File(os.path.join(cyto_path, cyto_files_list[:3]), 'r')
         else:
             print(f"Train data for rank {rank}/{size} already prepared")
 
@@ -131,8 +140,8 @@ class TestDataModule(pl.LightningDataModule):
             # TODO: Load the PLI and Cytp val data here as lists of numpy arrays: List[np.ndarray]
             # This should contain only unseen images
             # Load the pyramid/00 per file
-            self.pli_val = h5py.File(os.path.join(pli_path, files[4]), 'r')
-            self.cyto_val = h5py.File(os.path.join(cyto_path, files[4]), 'r')
+            self.pli_val = h5py.File(os.path.join(pli_path, pli_files_list[4]), 'r')
+            self.cyto_val = h5py.File(os.path.join(cyto_path, cyto_files_list[4]), 'r')
         else:
             print(f"Validation data for rank {rank}/{size} already prepared")
 
@@ -163,7 +172,7 @@ class TestDataModule(pl.LightningDataModule):
             self.cyto_train,
             self._train_transforms,
             self.crop_size,
-            self.train_size,
+            self.train_size
         )
 
         self.val_sampler = TestSampler(
@@ -172,7 +181,6 @@ class TestDataModule(pl.LightningDataModule):
             self._test_transforms,
             self.crop_size,
             self.val_size,
-            self.dataset_size
         )
 
     def train_dataloader(self):
diff --git a/doc/tensorboard/Ragib_UNet/version_7/events.out.tfevents.1649932860.tushar-Aspire-E5-573G.134444.4 b/doc/tensorboard/Ragib_UNet/version_7/events.out.tfevents.1649932860.tushar-Aspire-E5-573G.134444.4
index c6ee840d42120f0bce72caf9300b70d742adfb77..63350e27fbf54413b0cb5ff08a12853484b6efc6 100644
Binary files a/doc/tensorboard/Ragib_UNet/version_7/events.out.tfevents.1649932860.tushar-Aspire-E5-573G.134444.4 and b/doc/tensorboard/Ragib_UNet/version_7/events.out.tfevents.1649932860.tushar-Aspire-E5-573G.134444.4 differ
diff --git a/main.py b/main.py
index 09d51901fe8fb970460719cfe1db088b6f689873..2251a9de4f7d4c2bc910e15a9de40eebd7442e39 100644
--- a/main.py
+++ b/main.py
@@ -1,4 +1,105 @@
-%load_ext autoreload
-%autoreload 2
-%cd ../
-!hostname
\ No newline at end of file
+### Package Import ###
+
+'''
+#Argeparse
+import argparse
+parser = argparse.ArgumentParser()
+parser.add_argument('--foo', help='foo help')
+args = parser.parse_args()
+
+print(args)
+'''
+
+
+import sys, os
+from argparse import ArgumentParser
+
+import numpy as np
+import pandas as pd
+import h5py as h5
+
+import torch
+import pytorch_lightning as pl
+from pytorch_lightning.loggers import TensorBoardLogger
+from pytorch_lightning.callbacks import ModelCheckpoint
+
+import pli
+import pli.image as im
+
+# Distributed
+from atlasmpi import MPI
+
+comm = MPI.COMM_WORLD
+
+# Add code path
+sys.path.insert(0, "code/")
+
+import utils
+import model
+import data
+
+
+def main():
+    parser = ArgumentParser()
+    parser = pl.Trainer.add_argparse_args(parser)
+    parser = data.TestDataModule.add_argparse_args(parser)
+    parser = model.TestModule.add_model_specific_args(parser)
+    parser.add_argument('--log_dir', type=str, default='doc/tensorboard/')
+    parser.add_argument('--ckpt_dir', type=str, default='tmp/ckpt/')
+    parser.add_argument('--save_every_n_epochs', type=int, default=None)
+    args = parser.parse_args()
+    dict_args = vars(args)
+
+    print("Create model")
+    test_model = model.TestModule.from_argparse_args(args)
+    print(f"Model '{test_model.name}' loaded")
+
+    print("Load train data and create Data Module")
+    dm = data.TestDataModule.from_argparse_args(args)
+    dm.prepare_data()
+
+    print("Define logger")
+    logger = TensorBoardLogger(
+        save_dir=args.log_dir,
+        name=test_model.name,
+        version=None,
+    )
+
+    print("Define checkpoints")
+    checkpoint_callback = ModelCheckpoint(
+        dirpath=os.path.join(args.ckpt_dir, args.name, f'version_{logger.version}'),
+        filename=f"{test_model.name}_{logger.version}_" + '{epoch}_{val_loss:.3g}_{train_loss:.3g}',
+        every_n_epochs=args.save_every_n_epochs,
+        monitor='val_loss',
+        verbose=True,
+        save_last=True,
+    )
+
+    print("Create trainer")
+    trainer = pl.Trainer.from_argparse_args(
+        args,
+        checkpoint_callback=True,
+        callbacks=[checkpoint_callback],
+        logger=logger,
+    )
+
+    print(f"Start training on {args.gpus} GPUs")
+    trainer.logger.log_hyperparams(dict_args)
+    trainer.fit(test_model, datamodule=dm)
+
+    print("Done")
+    exit(0)
+
+
+if __name__ == '__main__':
+    if comm.rank == 0:
+        print("Initialize training...")
+
+        # Environment information
+        print("PLI-Helpers", pli.__version__)
+        print("Pytorch-Lightning", pl.__version__)
+        print("Pytorch", torch.__version__)
+        print("GPUs", torch.cuda.device_count())
+
+        print("")
+    main()