From c1778695fa97d37ccc937b9669f96a135a8c788a Mon Sep 17 00:00:00 2001 From: Lionel Peer Date: Sun, 12 May 2024 19:34:54 +0200 Subject: [PATCH] added random deform --- check_dataset.ipynb | 63 + create_dataset.ipynb | 1393 +++++++++++++++++ diffusion_models/models/diffusion_openai.py | 4 +- diffusion_models/spine_dataset/__init__.py | 0 .../spine_dataset/augmentations.py | 57 + .../spine_dataset/base_dataset.py | 90 ++ diffusion_models/spine_dataset/collations.py | 18 + .../spine_dataset/get_dataloader.py | 50 + .../random_deformation_dataset.py | 641 ++++++++ diffusion_models/spine_dataset/shapenet.py | 141 ++ .../spine_dataset/spatial_transformer.py | 94 ++ diffusion_models/utils/datasets.py | 16 +- tests/train_generative.py | 28 +- 13 files changed, 2571 insertions(+), 24 deletions(-) create mode 100644 check_dataset.ipynb create mode 100644 create_dataset.ipynb create mode 100644 diffusion_models/spine_dataset/__init__.py create mode 100644 diffusion_models/spine_dataset/augmentations.py create mode 100644 diffusion_models/spine_dataset/base_dataset.py create mode 100644 diffusion_models/spine_dataset/collations.py create mode 100644 diffusion_models/spine_dataset/get_dataloader.py create mode 100644 diffusion_models/spine_dataset/random_deformation_dataset.py create mode 100644 diffusion_models/spine_dataset/shapenet.py create mode 100644 diffusion_models/spine_dataset/spatial_transformer.py diff --git a/check_dataset.ipynb b/check_dataset.ipynb new file mode 100644 index 0000000..09ca490 --- /dev/null +++ b/check_dataset.ipynb @@ -0,0 +1,63 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "from diffusion_models.utils.datasets import LumbarSpineDataset\n", + "\n", + "ds = LumbarSpineDataset()" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "163584" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(ds)" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "liotorch", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/create_dataset.ipynb b/create_dataset.ipynb new file mode 100644 index 0000000..77f2d49 --- /dev/null +++ b/create_dataset.ipynb @@ -0,0 +1,1393 @@ +{ + "cells": [ + { + "cell_type": "code", + "execution_count": 1, + "metadata": {}, + "outputs": [], + "source": [ + "import sys\n", + "sys.path.append(\"/home/lionel/Documents/spine_diffusion\")\n", + "import os\n", + "from spine_diffusion.datasets.random_deformation_dataset import RandomDeformationDataset\n", + "from jaxtyping import Float, Float32, Int64\n", + "import torch\n", + "from torch import Tensor\n", + "from torch.utils.data import DataLoader\n", + "from spine_diffusion.datasets.collations import collate_fn\n", + "from torchvision.utils import save_image\n", + "\n", + "def multiclassLong_to_multiChannel(\n", + " multiclass: Int64[Tensor, \"batch 1 depth height width\"]\n", + " ) -> Float[Tensor, \"batch num_classes depth height width\"]:\n", + " \"\"\"Create multi-channel tensor from multi-class Int tensor.\"\"\"\n", + " assert (multiclass.dim() == 5) and (multiclass.shape[1] == 1) and (multiclass.dtype == torch.int64)\n", + " u_vals = multiclass.unique()\n", + " multiclass = [(multiclass==elem).to(torch.float32) for elem in u_vals]\n", + " return torch.cat(multiclass, dim=1)\n", + "\n", + "def multiChannel_to_multiclassLong(\n", + " multichannel: Float[Tensor, \"batch num_classes depth height width\"]\n", + ") -> Int64[Tensor, \"batch 1 depth height width\"]:\n", + " \"\"\"Create multi-class Int tensor from multi-channel predictions.\"\"\"\n", + " _, max_idx = torch.max(multichannel, dim=1)\n", + " return max_idx" + ] + }, + { + "cell_type": "code", + "execution_count": 2, + "metadata": {}, + "outputs": [ + { + "name": "stdout", + "output_type": "stream", + "text": [ + "No specific patient is chosen. Taking all patients in this dataset folder.\n", + "hqFiles: 74\n", + "Preprocessing files...\n", + "Preprocessing done. Files: 74 mode: train\n", + "hqLabels: {1: 'L1', 2: 'L1-L2', 3: 'L2', 4: 'L2-L3', 5: 'L3', 6: 'L3-L4', 7: 'L4', 8: 'L4-L5', 9: 'L5', 11: 'sc', 12: 'nerveroots'}\n" + ] + } + ], + "source": [ + "ds = RandomDeformationDataset(\n", + " \"/home/lionel/Data/Atlas_Houdini_deformations_vdsn\",\n", + " \"/home/lionel/Data/Atlas_Houdini_deformations_vdsn\",\n", + " \"settings_dict_ideal_patient_corr_vdsn.json\",\n", + " \"train\",\n", + " resolution=256,\n", + " class_mode=\"nerves\",\n", + " rand_coords=True,\n", + " num_points=10,\n", + " num_samples=1\n", + ")\n", + "\n", + "dl = DataLoader(ds, batch_size=len(ds), collate_fn=collate_fn)" + ] + }, + { + "cell_type": "code", + "execution_count": 3, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "1" + ] + }, + "execution_count": 3, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(dl)" + ] + }, + { + "cell_type": "code", + "execution_count": 4, + "metadata": {}, + "outputs": [], + "source": [ + "import numpy as np\n", + "\n", + "runner = 0\n", + "root = \"/home/lionel/Data/Atlas_Houdini2D_nerves\"\n", + "sample = next(iter(dl))" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([74, 1, 256, 256, 256])" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sample.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 10, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.int64" + ] + }, + "execution_count": 10, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sample.dtype" + ] + }, + { + "cell_type": "code", + "execution_count": 11, + "metadata": {}, + "outputs": [], + "source": [ + "sample = multiclassLong_to_multiChannel(sample)" + ] + }, + { + "cell_type": "code", + "execution_count": 12, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([74, 3, 256, 256, 256])" + ] + }, + "execution_count": 12, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sample.shape" + ] + }, + { + "cell_type": "code", + "execution_count": 19, + "metadata": {}, + "outputs": [], + "source": [ + "number = 0\n", + "for i in range(sample.shape[0]):\n", + " for j in range(256):\n", + "\n", + " temp = sample[i,:,j]\n", + " if not ((temp==0).all() or (temp==1).all() or (temp==2).all()):\n", + " np.save(os.path.join(root, f\"sample_{number}.npy\"), temp)\n", + " number += 1\n", + "\n", + " temp = sample[i,:,:,j]\n", + " if not ((temp==0).all() or (temp==1).all() or (temp==2).all()):\n", + " np.save(os.path.join(root, f\"sample_{number}.npy\"), temp)\n", + " number += 1\n", + "\n", + " if not ((temp==0).all() or (temp==1).all() or (temp==2).all()):\n", + " temp = sample[i,:,:,:,j]\n", + " np.save(os.path.join(root, f\"sample_{number}.npy\"), temp)\n", + " number += 1" + ] + }, + { + "cell_type": "code", + "execution_count": 20, + "metadata": {}, + "outputs": [], + "source": [ + "from diffusion_models.utils.datasets import LumbarSpineDataset\n", + "\n", + "ds = LumbarSpineDataset()" + ] + }, + { + "cell_type": "code", + "execution_count": 21, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "56832" + ] + }, + "execution_count": 21, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "len(ds)" + ] + }, + { + "cell_type": "code", + "execution_count": 22, + "metadata": {}, + "outputs": [], + "source": [ + "sample = ds[0]" + ] + }, + { + "cell_type": "code", + "execution_count": 23, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([3, 128, 128])" + ] + }, + "execution_count": 23, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "sample[0].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 24, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 24, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAakAAAGhCAYAAADbf0s2AAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAAftUlEQVR4nO3df2yV5f3/8dcppacV6Kmt6zntaKVzJIAgIoVaIBsbJwMlCLPqIHVWJTC1VQqbQKfFOcUi25ShCNNsqBmIkggImRhWfo1YSingRKFgbKCCp9WxnsMPW0rP9fljX8/Xo6D8OG2vU56P5E7sfd/n5n0ltc+cnvucOowxRgAAWCimowcAAOBciBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFodFqlFixapV69eio+PV05Ojnbs2NFRowAALNUhkXr99dc1Y8YMPfbYY9q1a5cGDhyo0aNHq6GhoSPGAQBYytERHzCbk5OjIUOG6Pnnn5ckBYNBZWRk6MEHH9Ts2bO/8/HBYFBHjx5Vjx495HA42npcAECEGWN0/PhxpaenKybm3M+XYttxJknS6dOnVV1drZKSktC+mJgYeb1eVVRUnPUxzc3Nam5uDn195MgR9evXr81nBQC0rbq6OvXs2fOcx9s9Up9//rlaW1vldrvD9rvdbu3fv/+sjykrK9Pjjz/+jf0jdLNi1bVN5gQAtJ0zatE2/UM9evT41vPaPVIXo6SkRDNmzAh9HQgElJGRoVh1VayDSAFA1Pl/LzR910s27R6pq666Sl26dFF9fX3Y/vr6enk8nrM+xul0yul0tsd4AACLtPvdfXFxcRo8eLDKy8tD+4LBoMrLy5Wbm9ve4wAALNYhv+6bMWOGCgoKlJ2draFDh2rBggU6efKk7rnnno4YBwBgqQ6J1C9+8Qt99tlnmjNnjnw+n66//nqtX7/+GzdTAAAubx3yPqlLFQgE5HK5NFLjuXECAKLQGdOizVojv9+vxMTEc57HZ/cBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsFbEI1VWVqYhQ4aoR48eSk1N1YQJE1RTUxN2TlNTkwoLC5WSkqLu3bsrLy9P9fX1kR4FABDlIh6pLVu2qLCwUNu3b9eGDRvU0tKin/3sZzp58mTonOnTp2vt2rVauXKltmzZoqNHj+rWW2+N9CgAgCjnMMaYtvwHPvvsM6WmpmrLli360Y9+JL/fr+9973tavny5brvtNknS/v371bdvX1VUVOjGG2/8zmsGAgG5XC6N1HjFOrq25fgAgDZwxrRos9bI7/crMTHxnOe1+WtSfr9fkpScnCxJqq6uVktLi7xeb+icPn36KDMzUxUVFWe9RnNzswKBQNgGAOj82jRSwWBQxcXFGj58uPr37y9J8vl8iouLU1JSUti5brdbPp/vrNcpKyuTy+UKbRkZGW05NgDAEm0aqcLCQu3du1crVqy4pOuUlJTI7/eHtrq6ughNCACwWWxbXbioqEjr1q3T1q1b1bNnz9B+j8ej06dPq7GxMezZVH19vTwez1mv5XQ65XQ622pUAIClIv5MyhijoqIirVq1Shs3blRWVlbY8cGDB6tr164qLy8P7aupqdHhw4eVm5sb6XEAAFEs4s+kCgsLtXz5cq1Zs0Y9evQIvc7kcrmUkJAgl8ulyZMna8aMGUpOTlZiYqIefPBB5ebmntedfQCAy0fEI7V48WJJ0siRI8P2L126VHfffbck6dlnn1VMTIzy8vLU3Nys0aNH64UXXoj0KACAKNfm75NqC7xPCgCimzXvkwIA4GIRKQCAtYgUAMBaRAoAYC0iBQCwFpECAFiLSAEArEWkAADWIlIAAGsRKQCAtYgUAMBaRAoAYC0iBQCwFpECAFiLSAEArEWkAADWIlIAAGsRKQCAtYgUAMBaRAoAYC0iBQCwFpECAFiLSAEArEWkAADWIlIAAGsRKQCAtYgUAMBaRAoAYC0iBQCwFpECAFiLSAEArEWkAADWIlIAAGsRKQCAtYgUAMBaRAoAYC0iBQCwFpECAFiLSAEArEWkAADWIlIAAGsRKQCAtYgUAMBaRAoAYC0iBQCwFpECAFirzSM1b948ORwOFRcXh/Y1NTWpsLBQKSkp6t69u/Ly8lRfX9/WowAAokybRqqqqkp/+ctfdN1114Xtnz59utauXauVK1dqy5YtOnr0qG699da2HAUAEIXaLFInTpxQfn6+XnrpJV155ZWh/X6/X3/961/1zDPP6Kc//akGDx6spUuX6t1339X27dvbahwAQBRqs0gVFhZq7Nix8nq9Yfurq6vV0tIStr9Pnz7KzMxURUVFW40DAIhCsW1x0RUrVmjXrl2qqqr6xjGfz6e4uDglJSWF7Xe73fL5fGe9XnNzs5qbm0NfBwKBiM4LALBTxJ9J1dXVadq0aVq2bJni4+Mjcs2ysjK5XK7QlpGREZHrAgDsFvFIVVdXq6GhQTfccINiY2MVGxurLVu2aOHChYqNjZXb7dbp06fV2NgY9rj6+np5PJ6zXrOkpER+vz+01dXVRXpsAICFIv7rvlGjRun9998P23fPPfeoT58+mjVrljIyMtS1a1eVl5crLy9PklRTU6PDhw8rNzf3rNd0Op1yOp2RHhUAYLmIR6pHjx7q379/2L5u3bopJSUltH/y5MmaMWOGkpOTlZiYqAcffFC5ubm68cYbIz0OACCKtcmNE9/l2WefVUxMjPLy8tTc3KzRo0frhRde6IhRAAAWcxhjTEcPcaECgYBcLpdGarxiHV07ehwAwAU6Y1q0WWvk9/uVmJh4zvP47D4AgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1mqTSB05ckR33nmnUlJSlJCQoAEDBmjnzp2h48YYzZkzR2lpaUpISJDX69XBgwfbYhQAQBSLeKT++9//avjw4eratavefvttffjhh/rTn/6kK6+8MnTO/PnztXDhQi1ZskSVlZXq1q2bRo8eraampkiPAwCIYrGRvuDTTz+tjIwMLV26NLQvKysr9N/GGC1YsECPPvqoxo8fL0l69dVX5Xa7tXr1ak2cODHSIwEAolTEn0m99dZbys7O1u23367U1FQNGjRIL730Uuh4bW2tfD6fvF5vaJ/L5VJOTo4qKirOes3m5mYFAoGwDQDQ+UU8Uh9//LEWL16s3r1765133tH999+vhx56SK+88ookyefzSZLcbnfY49xud+jY15WVlcnlcoW2jIyMSI8NALBQxCMVDAZ1ww036KmnntKgQYM0depUTZkyRUuWLLnoa5aUlMjv94e2urq6CE4MALBVxCOVlpamfv36he3r27evDh8+LEnyeDySpPr6+rBz6uvrQ8e+zul0KjExMWwDAHR+EY/U8OHDVVNTE7bvwIEDuvrqqyX97yYKj8ej8vLy0PFAIKDKykrl5uZGehwAQBSL+N1906dP17Bhw/TUU0/pjjvu0I4dO/Tiiy/qxRdflCQ5HA4VFxfrySefVO/evZWVlaXS0lKlp6drwoQJkR4HABDFIh6pIUOGaNWqVSopKdHvf/97ZWVlacGCBcrPzw+dM3PmTJ08eVJTp05VY2OjRowYofXr1ys+Pj7S4wAAopjDGGM6eogLFQgE5HK5NFLjFevo2tHjAAAu0BnTos1aI7/f/633GfDZfQAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsFfFItba2qrS0VFlZWUpISNA111yjJ554QsaY0DnGGM2ZM0dpaWlKSEiQ1+vVwYMHIz0KACDKRTxSTz/9tBYvXqznn39e+/bt09NPP6358+frueeeC50zf/58LVy4UEuWLFFlZaW6deum0aNHq6mpKdLjAACiWGykL/juu+9q/PjxGjt2rCSpV69eeu2117Rjxw5J/3sWtWDBAj366KMaP368JOnVV1+V2+3W6tWrNXHixEiPBACIUhF/JjVs2DCVl5frwIEDkqT33ntP27Zt00033SRJqq2tlc/nk9frDT3G5XIpJydHFRUVZ71mc3OzAoFA2AYA6Pwi/kxq9uzZCgQC6tOnj7p06aLW1lbNnTtX+fn5kiSfzydJcrvdYY9zu92hY19XVlamxx9/PNKjAgAsF/FnUm+88YaWLVum5cuXa9euXXrllVf0xz/+Ua+88spFX7OkpER+vz+01dXVRXBiAICtIv5M6uGHH9bs2bNDry0NGDBAhw4dUllZmQoKCuTxeCRJ9fX1SktLCz2uvr5e119//Vmv6XQ65XQ6Iz0qAMByEX8mderUKcXEhF+2S5cuCgaDkqSsrCx5PB6Vl5eHjgcCAVVWVio3NzfS4wAAoljEn0mNGzdOc+fOVWZmpq699lrt3r1bzzzzjO69915JksPhUHFxsZ588kn17t1bWVlZKi0tVXp6uiZMmBDpcQAAUSzikXruuedUWlqqBx54QA0NDUpPT9evfvUrzZkzJ3TOzJkzdfLkSU2dOlWNjY0aMWKE1q9fr/j4+EiPAwCIYg7z1Y+CiBKBQEAul0sjNV6xjq4dPQ4A4AKdMS3arDXy+/1KTEw853l8dh8AwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAaxEpAIC1iBQAwFpECgBgLSIFALAWkQIAWItIAQCsRaQAANYiUgAAa11wpLZu3apx48YpPT1dDodDq1evDjtujNGcOXOUlpamhIQEeb1eHTx4MOycY8eOKT8/X4mJiUpKStLkyZN14sSJS1oIAKDzueBInTx5UgMHDtSiRYvOenz+/PlauHChlixZosrKSnXr1k2jR49WU1NT6Jz8/Hx98MEH2rBhg9atW6etW7dq6tSpF78KAECn5DDGmIt+sMOhVatWacKECZL+9ywqPT1dv/71r/Wb3/xGkuT3++V2u/Xyyy9r4sSJ2rdvn/r166eqqiplZ2dLktavX6+bb75Zn3zyidLT07/z3w0EAnK5XBqp8Yp1dL3Y8QEAHeSMadFmrZHf71diYuI5z4voa1K1tbXy+Xzyer2hfS6XSzk5OaqoqJAkVVRUKCkpKRQoSfJ6vYqJiVFlZeVZr9vc3KxAIBC2AQA6v4hGyufzSZLcbnfYfrfbHTrm8/mUmpoadjw2NlbJycmhc76urKxMLpcrtGVkZERybACApaLi7r6SkhL5/f7QVldX19EjAQDaQUQj5fF4JEn19fVh++vr60PHPB6PGhoawo6fOXNGx44dC53zdU6nU4mJiWEbAKDzi2iksrKy5PF4VF5eHtoXCARUWVmp3NxcSVJubq4aGxtVXV0dOmfjxo0KBoPKycmJ5DgAgCgXe6EPOHHihD766KPQ17W1tdqzZ4+Sk5OVmZmp4uJiPfnkk+rdu7eysrJUWlqq9PT00B2Affv21ZgxYzRlyhQtWbJELS0tKioq0sSJE8/rzj4AwOXjgiO1c+dO/eQnPwl9PWPGDElSQUGBXn75Zc2cOVMnT57U1KlT1djYqBEjRmj9+vWKj48PPWbZsmUqKirSqFGjFBMTo7y8PC1cuDACywEAdCaX9D6pjsL7pAAgunXI+6QAAIgkIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrXXCktm7dqnHjxik9PV0Oh0OrV68OHWtpadGsWbM0YMAAdevWTenp6brrrrt09OjRsGscO3ZM+fn5SkxMVFJSkiZPnqwTJ05c8mIAAJ3LBUfq5MmTGjhwoBYtWvSNY6dOndKuXbtUWlqqXbt26c0331RNTY1uueWWsPPy8/P1wQcfaMOGDVq3bp22bt2qqVOnXvwqAACdksMYYy76wQ6HVq1apQkTJpzznKqqKg0dOlSHDh1SZmam9u3bp379+qmqqkrZ2dmSpPXr1+vmm2/WJ598ovT09O/8dwOBgFwul0ZqvGIdXS92fABABzljWrRZa+T3+5WYmHjO89r8NSm/3y+Hw6GkpCRJUkVFhZKSkkKBkiSv16uYmBhVVla29TgAgCgS25YXb2pq0qxZszRp0qRQKX0+n1JTU8OHiI1VcnKyfD7fWa/T3Nys5ubm0NeBQKDthgYAWKPNnkm1tLTojjvukDFGixcvvqRrlZWVyeVyhbaMjIwITQkAsFmbROrLQB06dEgbNmwI+32jx+NRQ0ND2PlnzpzRsWPH5PF4znq9kpIS+f3+0FZXV9cWYwMALBPxX/d9GaiDBw9q06ZNSklJCTuem5urxsZGVVdXa/DgwZKkjRs3KhgMKicn56zXdDqdcjqdkR4VAGC5C47UiRMn9NFHH4W+rq2t1Z49e5ScnKy0tDTddttt2rVrl9atW6fW1tbQ60zJycmKi4tT3759NWbMGE2ZMkVLlixRS0uLioqKNHHixPO6sw8AcPm44FvQN2/erJ/85Cff2F9QUKDf/e53ysrKOuvjNm3apJEjR0r635t5i4qKtHbtWsXExCgvL08LFy5U9+7dz2sGbkEHgOh2vregX9L7pDoKkQKA6GbN+6QAALhYRAoAYC0iBQCwFpECAFiLSAEArEWkAADWIlIAAGsRKQCAtYgUAMBaRAoAYC0iBQCwFpECAFiLSAEArEWkAADWivhf5m0PX/51kTNqkaLuD40AAM6oRdL//3l+LlEZqePHj0uStukfHTwJAOBSHD9+XC6X65zHo/KPHgaDQR09elTGGGVmZqquru5b/2hWNAsEAsrIyOjUa5RYZ2dzOazzclij1HbrNMbo+PHjSk9PV0zMuV95ispnUjExMerZs6cCgYAkKTExsVN/k0iXxxol1tnZXA7rvBzWKLXNOr/tGdSXuHECAGAtIgUAsFZUR8rpdOqxxx6T0+ns6FHazOWwRol1djaXwzovhzVKHb/OqLxxAgBweYjqZ1IAgM6NSAEArEWkAADWIlIAAGtFbaQWLVqkXr16KT4+Xjk5OdqxY0dHj3RJysrKNGTIEPXo0UOpqamaMGGCampqws5pampSYWGhUlJS1L17d+Xl5am+vr6DJr508+bNk8PhUHFxcWhfZ1njkSNHdOeddyolJUUJCQkaMGCAdu7cGTpujNGcOXOUlpamhIQEeb1eHTx4sAMnvnCtra0qLS1VVlaWEhISdM011+iJJ54I+yy2aFzn1q1bNW7cOKWnp8vhcGj16tVhx89nTceOHVN+fr4SExOVlJSkyZMn68SJE+24im/3bWtsaWnRrFmzNGDAAHXr1k3p6em66667dPTo0bBrtNsaTRRasWKFiYuLM3/729/MBx98YKZMmWKSkpJMfX19R4920UaPHm2WLl1q9u7da/bs2WNuvvlmk5mZaU6cOBE657777jMZGRmmvLzc7Ny509x4441m2LBhHTj1xduxY4fp1auXue6668y0adNC+zvDGo8dO2auvvpqc/fdd5vKykrz8ccfm3feecd89NFHoXPmzZtnXC6XWb16tXnvvffMLbfcYrKysswXX3zRgZNfmLlz55qUlBSzbt06U1tba1auXGm6d+9u/vznP4fOicZ1/uMf/zCPPPKIefPNN40ks2rVqrDj57OmMWPGmIEDB5rt27ebf/3rX+aHP/yhmTRpUjuv5Ny+bY2NjY3G6/Wa119/3ezfv99UVFSYoUOHmsGDB4ddo73WGJWRGjp0qCksLAx93draatLT001ZWVkHThVZDQ0NRpLZsmWLMeZ/3zhdu3Y1K1euDJ2zb98+I8lUVFR01JgX5fjx46Z3795mw4YN5sc//nEoUp1ljbNmzTIjRow45/FgMGg8Ho/5wx/+ENrX2NhonE6nee2119pjxIgYO3asuffee8P23XrrrSY/P98Y0znW+fUf4Oezpg8//NBIMlVVVaFz3n77beNwOMyRI0fabfbzdbYQf92OHTuMJHPo0CFjTPuuMep+3Xf69GlVV1fL6/WG9sXExMjr9aqioqIDJ4ssv98vSUpOTpYkVVdXq6WlJWzdffr0UWZmZtStu7CwUGPHjg1bi9R51vjWW28pOztbt99+u1JTUzVo0CC99NJLoeO1tbXy+Xxh63S5XMrJyYmqdQ4bNkzl5eU6cOCAJOm9997Ttm3bdNNNN0nqPOv8qvNZU0VFhZKSkpSdnR06x+v1KiYmRpWVle0+cyT4/X45HA4lJSVJat81Rt0HzH7++edqbW2V2+0O2+92u7V///4OmiqygsGgiouLNXz4cPXv31+S5PP5FBcXF/om+ZLb7ZbP5+uAKS/OihUrtGvXLlVVVX3jWGdZ48cff6zFixdrxowZ+u1vf6uqqio99NBDiouLU0FBQWgtZ/sejqZ1zp49W4FAQH369FGXLl3U2tqquXPnKj8/X5I6zTq/6nzW5PP5lJqaGnY8NjZWycnJUbnupqYmzZo1S5MmTQp9wGx7rjHqInU5KCws1N69e7Vt27aOHiWi6urqNG3aNG3YsEHx8fEdPU6bCQaDys7O1lNPPSVJGjRokPbu3aslS5aooKCgg6eLnDfeeEPLli3T8uXLde2112rPnj0qLi5Wenp6p1rn5aylpUV33HGHjDFavHhxh8wQdb/uu+qqq9SlS5dv3PFVX18vj8fTQVNFTlFRkdatW6dNmzapZ8+eof0ej0enT59WY2Nj2PnRtO7q6mo1NDTohhtuUGxsrGJjY7VlyxYtXLhQsbGxcrvdUb9GSUpLS1O/fv3C9vXt21eHDx+WpNBaov17+OGHH9bs2bM1ceJEDRgwQL/85S81ffp0lZWVSeo86/yq81mTx+NRQ0ND2PEzZ87o2LFjUbXuLwN16NAhbdiwIezPdLTnGqMuUnFxcRo8eLDKy8tD+4LBoMrLy5Wbm9uBk10aY4yKioq0atUqbdy4UVlZWWHHBw8erK5du4atu6amRocPH46adY8aNUrvv/++9uzZE9qys7OVn58f+u9oX6MkDR8+/BtvHzhw4ICuvvpqSVJWVpY8Hk/YOgOBgCorK6NqnadOnfrGH6vr0qWLgsGgpM6zzq86nzXl5uaqsbFR1dXVoXM2btyoYDConJycdp/5YnwZqIMHD+qf//ynUlJSwo636xojehtGO1mxYoVxOp3m5ZdfNh9++KGZOnWqSUpKMj6fr6NHu2j333+/cblcZvPmzebTTz8NbadOnQqdc99995nMzEyzceNGs3PnTpObm2tyc3M7cOpL99W7+4zpHGvcsWOHiY2NNXPnzjUHDx40y5YtM1dccYX5+9//Hjpn3rx5JikpyaxZs8b8+9//NuPHj7f+1uyvKygoMN///vdDt6C/+eab5qqrrjIzZ84MnRON6zx+/LjZvXu32b17t5FknnnmGbN79+7QnW3ns6YxY8aYQYMGmcrKSrNt2zbTu3dvq25B/7Y1nj592txyyy2mZ8+eZs+ePWE/j5qbm0PXaK81RmWkjDHmueeeM5mZmSYuLs4MHTrUbN++vaNHuiSSzrotXbo0dM4XX3xhHnjgAXPllVeaK664wvz85z83n376accNHQFfj1RnWePatWtN//79jdPpNH369DEvvvhi2PFgMGhKS0uN2+02TqfTjBo1ytTU1HTQtBcnEAiYadOmmczMTBMfH29+8IMfmEceeSTsB1k0rnPTpk1n/X+xoKDAGHN+a/rPf/5jJk2aZLp3724SExPNPffcY44fP94Bqzm7b1tjbW3tOX8ebdq0KXSN9lojf6oDAGCtqHtNCgBw+SBSAABrESkAgLWIFADAWkQKAGAtIgUAsBaRAgBYi0gBAKxFpAAA1iJSAABrESkAgLWIFADAWv8HXVmV3uKRbMoAAAAASUVORK5CYII=", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import matplotlib.pyplot as plt\n", + "\n", + "plt.imshow(ds[128][0][2])" + ] + }, + { + "cell_type": "code", + "execution_count": 9, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "torch.Size([3, 128, 128])" + ] + }, + "execution_count": 9, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds[128][0].shape" + ] + }, + { + "cell_type": "code", + "execution_count": 16, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "['/home/lionel/Data/Atlas_Houdini2D_nerves/sample_0.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_100.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1000.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10000.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10001.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10002.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10003.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10004.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10005.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10006.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10007.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10008.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10009.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1001.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10010.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10011.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10012.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10013.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10014.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10015.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10016.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10017.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10018.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10019.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1002.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10020.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10021.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10022.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10023.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10024.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10025.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10026.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10027.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10028.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10029.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1003.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10030.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10031.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10032.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10033.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10034.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10035.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10036.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10037.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10038.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10039.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1004.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10040.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10041.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10042.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10043.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10044.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10045.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10046.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10047.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10048.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10049.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1005.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10050.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10051.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10052.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10053.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10054.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10055.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10056.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10057.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10058.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10059.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1006.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10060.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10061.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10062.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10063.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10064.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10065.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10066.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10067.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10068.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10069.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1007.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10070.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10071.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10072.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10073.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10074.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10075.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10076.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10077.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10078.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10079.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1008.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10080.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10081.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10082.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10083.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10084.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10085.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10086.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10087.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10088.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10089.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1009.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10090.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10091.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10092.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10093.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10094.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10095.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10096.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10097.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10098.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10099.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_101.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1010.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10100.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10101.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10102.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10103.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10104.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10105.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10106.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10107.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10108.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10109.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1011.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10110.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10111.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10112.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10113.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10114.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10115.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10116.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10117.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10118.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10119.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1012.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10120.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10121.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10122.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10123.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10124.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10125.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10126.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10127.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10128.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10129.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1013.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10130.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10131.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10132.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10133.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10134.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10135.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10136.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10137.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10138.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10139.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1014.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10140.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10141.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10142.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10143.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10144.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10145.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10146.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10147.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10148.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10149.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1015.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10150.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10151.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10152.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10153.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10154.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10155.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10156.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10157.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10158.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10159.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1016.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10160.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10161.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10162.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10163.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10164.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10165.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10166.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10167.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10168.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10169.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1017.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10170.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10171.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10172.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10173.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10174.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10175.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10176.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10177.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10178.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10179.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1018.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10180.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10181.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10182.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10183.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10184.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10185.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10186.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10187.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10188.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10189.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1019.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10190.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10191.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10192.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10193.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10194.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10195.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10196.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10197.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10198.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10199.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_102.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1020.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10200.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10201.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10202.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10203.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10204.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10205.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10206.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10207.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10208.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10209.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1021.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10210.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10211.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10212.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10213.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10214.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10215.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10216.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10217.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10218.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10219.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1022.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10220.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10221.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10222.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10223.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10224.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10225.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10226.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10227.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10228.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10229.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1023.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10230.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10231.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10232.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10233.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10234.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10235.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10236.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10237.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10238.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10239.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1024.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10240.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10241.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10242.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10243.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10244.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10245.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10246.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10247.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10248.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10249.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1025.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10250.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10251.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10252.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10253.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10254.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10255.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10256.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10257.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10258.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10259.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1026.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10260.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10261.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10262.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10263.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10264.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10265.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10266.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10267.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10268.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10269.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1027.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10270.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10271.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10272.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10273.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10274.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10275.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10276.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10277.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10278.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10279.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1028.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10280.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10281.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10282.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10283.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10284.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10285.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10286.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10287.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10288.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10289.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1029.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10290.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10291.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10292.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10293.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10294.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10295.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10296.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10297.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10298.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10299.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_103.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1030.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10300.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10301.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10302.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10303.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10304.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10305.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10306.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10307.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10308.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10309.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1031.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10310.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10311.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10312.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10313.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10314.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10315.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10316.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10317.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10318.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10319.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1032.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10320.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10321.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10322.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10323.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10324.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10325.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10326.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10327.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10328.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10329.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1033.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10330.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10331.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10332.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10333.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10334.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10335.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10336.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10337.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10338.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10339.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1034.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10340.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10341.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10342.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10343.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10344.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10345.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10346.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10347.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10348.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10349.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1035.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10350.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10351.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10352.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10353.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10354.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10355.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10356.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10357.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10358.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10359.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1036.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10360.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10361.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10362.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10363.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10364.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10365.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10366.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10367.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10368.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10369.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1037.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10370.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10371.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10372.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10373.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10374.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10375.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10376.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10377.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10378.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10379.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1038.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10380.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10381.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10382.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10383.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10384.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10385.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10386.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10387.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10388.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10389.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1039.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10390.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10391.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10392.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10393.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10394.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10395.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10396.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10397.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10398.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10399.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_104.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1040.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10400.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10401.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10402.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10403.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10404.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10405.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10406.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10407.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10408.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10409.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1041.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10410.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10411.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10412.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10413.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10414.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10415.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10416.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10417.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10418.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10419.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1042.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10420.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10421.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10422.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10423.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10424.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10425.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10426.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10427.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10428.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10429.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1043.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10430.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10431.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10432.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10433.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10434.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10435.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10436.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10437.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10438.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10439.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1044.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10440.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10441.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10442.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10443.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10444.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10445.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10446.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10447.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10448.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10449.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1045.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10450.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10451.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10452.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10453.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10454.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10455.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10456.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10457.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10458.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10459.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1046.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10460.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10461.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10462.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10463.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10464.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10465.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10466.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10467.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10468.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10469.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1047.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10470.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10471.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10472.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10473.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10474.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10475.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10476.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10477.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10478.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10479.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1048.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10480.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10481.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10482.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10483.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10484.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10485.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10486.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10487.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10488.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10489.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1049.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10490.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10491.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10492.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10493.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10494.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10495.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10496.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10497.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10498.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10499.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_105.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1050.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10500.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10501.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10502.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10503.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10504.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10505.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10506.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10507.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10508.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10509.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1051.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10510.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10511.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10512.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10513.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10514.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10515.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10516.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10517.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10518.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10519.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1052.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10520.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10521.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10522.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10523.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10524.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10525.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10526.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10527.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10528.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10529.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1053.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10530.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10531.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10532.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10533.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10534.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10535.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10536.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10537.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10538.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10539.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1054.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10540.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10541.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10542.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10543.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10544.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10545.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10546.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10547.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10548.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10549.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1055.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10550.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10551.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10552.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10553.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10554.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10555.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10556.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10557.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10558.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10559.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1056.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10560.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10561.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10562.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10563.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10564.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10565.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10566.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10567.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10568.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10569.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1057.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10570.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10571.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10572.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10573.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10574.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10575.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10576.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10577.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10578.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10579.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1058.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10580.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10581.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10582.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10583.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10584.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10585.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10586.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10587.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10588.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10589.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1059.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10590.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10591.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10592.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10593.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10594.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10595.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10596.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10597.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10598.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10599.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_106.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1060.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10600.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10601.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10602.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10603.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10604.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10605.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10606.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10607.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10608.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10609.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1061.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10610.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10611.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10612.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10613.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10614.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10615.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10616.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10617.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10618.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10619.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1062.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10620.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10621.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10622.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10623.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10624.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10625.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10626.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10627.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10628.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10629.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1063.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10630.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10631.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10632.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10633.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10634.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10635.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10636.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10637.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10638.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10639.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1064.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10640.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10641.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10642.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10643.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10644.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10645.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10646.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10647.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10648.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10649.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1065.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10650.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10651.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10652.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10653.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10654.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10655.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10656.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10657.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10658.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10659.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1066.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10660.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10661.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10662.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10663.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10664.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10665.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10666.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10667.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10668.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10669.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1067.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10670.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10671.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10672.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10673.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10674.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10675.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10676.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10677.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10678.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10679.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1068.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10680.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10681.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10682.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10683.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10684.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10685.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10686.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10687.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10688.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10689.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1069.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10690.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10691.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10692.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10693.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10694.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10695.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10696.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10697.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10698.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10699.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_107.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1070.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10700.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10701.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10702.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10703.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10704.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10705.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10706.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10707.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10708.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10709.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1071.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10710.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10711.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10712.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10713.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10714.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10715.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10716.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10717.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10718.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10719.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1072.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10720.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10721.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10722.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10723.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10724.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10725.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10726.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10727.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10728.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10729.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1073.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10730.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10731.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10732.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10733.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10734.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10735.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10736.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10737.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10738.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10739.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1074.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10740.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10741.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10742.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10743.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10744.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10745.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10746.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10747.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10748.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10749.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1075.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10750.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10751.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10752.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10753.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10754.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10755.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10756.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10757.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10758.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10759.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1076.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10760.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10761.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10762.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10763.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10764.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10765.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10766.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10767.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10768.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10769.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1077.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10770.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10771.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10772.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10773.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10774.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10775.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10776.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10777.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10778.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10779.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1078.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10780.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10781.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10782.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10783.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10784.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10785.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10786.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10787.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10788.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10789.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1079.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10790.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10791.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10792.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10793.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10794.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10795.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10796.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10797.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10798.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10799.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_108.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1080.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10800.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10801.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10802.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10803.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10804.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10805.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10806.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10807.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10808.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10809.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1081.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10810.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10811.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10812.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10813.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10814.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10815.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10816.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10817.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10818.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10819.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1082.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10820.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10821.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10822.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10823.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10824.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10825.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10826.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10827.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10828.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10829.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1083.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10830.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10831.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10832.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10833.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10834.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10835.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10836.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10837.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10838.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10839.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1084.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10840.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10841.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10842.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10843.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10844.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10845.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10846.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10847.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10848.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10849.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1085.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10850.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10851.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10852.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10853.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10854.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10855.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10856.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10857.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10858.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10859.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1086.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10860.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10861.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10862.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10863.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10864.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10865.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10866.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10867.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10868.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10869.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1087.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10870.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10871.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10872.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10873.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10874.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10875.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10876.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10877.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10878.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10879.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1088.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10880.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10881.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10882.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10883.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10884.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10885.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10886.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10887.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10888.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10889.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_1089.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10890.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10891.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10892.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10893.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10894.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10895.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10896.npy',\n", + " '/home/lionel/Data/Atlas_Houdini2D_nerves/sample_10897.npy',\n", + " ...]" + ] + }, + "execution_count": 16, + "metadata": {}, + "output_type": "execute_result" + } + ], + "source": [ + "ds.files" + ] + }, + { + "cell_type": "code", + "execution_count": 45, + "metadata": {}, + "outputs": [ + { + "data": { + "text/plain": [ + "" + ] + }, + "execution_count": 45, + "metadata": {}, + "output_type": "execute_result" + }, + { + "data": { + "image/png": "iVBORw0KGgoAAAANSUhEUgAAAfQAAAGiCAYAAAARATRgAAAAOXRFWHRTb2Z0d2FyZQBNYXRwbG90bGliIHZlcnNpb24zLjguMCwgaHR0cHM6Ly9tYXRwbG90bGliLm9yZy81sbWrAAAACXBIWXMAAA9hAAAPYQGoP6dpAAA7KElEQVR4nO3dfXQU5b0H8O8mIZsIZEOgSQgkJrXcixQETCBG7C3WvUbhoFxpBZpKSilcbbBAzq1AJUFRCKLSFI2kcAX0XChIj1BFGg8GgXKMAQKxorx5QchFN8jlZheCeXH3uX9gxmyySfZldneeme/nnDmB2dnZ53lmdn77/OaZGZMQQoCIiIikFhHuAhAREVHgGNCJiIh0gAGdiIhIBxjQiYiIdIABnYiISAcY0ImIiHSAAZ2IiEgHGNCJiIh0gAGdiIhIBxjQiYiIdIABnYiISEUHDhzApEmTkJKSApPJhJ07d/b4nn379uH222+H2WzGD37wA2zatMnnzw1bQC8rK0N6ejpiYmKQnZ2NQ4cOhasoREREqmlsbMTIkSNRVlbm1fLnzp3DxIkTcffdd6O2thbz58/Hr3/9a7z77rs+fa4pHA9n2bZtG2bMmIHy8nJkZ2ejtLQU27dvx6lTp5CYmBjq4hAREQWFyWTCjh07MHny5C6XWbhwId555x0cP35cmTdt2jQ0NDSgoqLC68+KCqSg/lq9ejVmz56NmTNnAgDKy8vxzjvvYMOGDVi0aFGP73e5XPjiiy/Qt29fmEymYBeXiIhUJoTA1atXkZKSgoiI4CSLm5qa0NLSosq6hBCd4o3ZbIbZbA543VVVVbBarW7zcnNzMX/+fJ/WE/KA3tLSgpqaGixevFiZFxERAavViqqqKo/vaW5uRnNzs/L/ixcvYtiwYUEvKxERBVddXR0GDx6s+nqbmpqQcXMf2C45VVlfnz59cO3aNbd5S5cuxVNPPRXwum02G5KSktzmJSUlweFw4Ouvv0ZsbKxX6wl5QL98+TKcTqfHwp88edLje0pKSvD00093ml8HIC4YhSQiTWp/fpC5Obk5AKQC6Nu3b1DW39LSAtslJ87V3Iy4voFlABxXXcjIPI+6ujrExX0XddTonaspLCl3Xy1evBiFhYXK/x0OB1JTUxEHBnQiUgd/LKjL2/YM9mnTuL4RAQd0ZV1xcW4BXS3Jycmor693m1dfX4+4uDive+dAGAL6gAEDEBkZ6bHwycnJHt+j1nkKIiJvCDCo64VTuOAMcOi3U7jUKUwXcnJysHv3brd5e/bsQU5Ojk/rCflla9HR0cjMzERlZaUyz+VyobKy0ufCExERdccFocrki2vXrqG2tha1tbUAblyWVltbiwsXLgC4kXWeMWOGsvyjjz6Ks2fP4oknnsDJkyfxyiuv4I033sCCBQt8+tywpNwLCwuRn5+PrKwsjB07FqWlpWhsbFRGvRMRkdy0kuFwwYVA+9e+ruHIkSO4++67lf+3nTLOz8/Hpk2b8OWXXyrBHQAyMjLwzjvvYMGCBfjjH/+IwYMH4z//8z+Rm5vr0+eGJaBPnToVX331FYqLi2Gz2TBq1ChUVFR0GihHREQkm/Hjx6O7W7x4ugvc+PHjcezYsYA+Nyw3lgmUw+GAxWKBHaEZFMfzafrRtrNze4afFr9X3D9CxwHAAsButwdloFlbnKg7OUiVUe6pQy8GraxqkWKUeziJdn/5JZeb6PBvbs/w0er3SktlIXX4cw7c0zpkwIezEBER6QB76BJhOjAwJrANtYbbQR1a2q+1VBbgRu/aaZAeOgN6D7SyU7antTSlTNhu2sDtEBzhPjZoMewx5U5ERERSYQ9dQsH+Ba61lBmRNzz1ocK9D4f6uxTu+rY/raUVTiHgDPBirkDfHyoM6BIJ9Zc13Ok7Im9p/XAb7O+Slr6nWioLALi+nQJdhwyYciciItIB9tCpk7a0Wbh+aTPlT77ylOrVwv4T7u8SAU4VRrkH+v5QYUAnj8IdzNv+zQMheUur+4pWy2UUTgEVnramTlmCjQGdiIh0y0jn0BnQSVPYm5ELT48QaQcDOhH5hadHSAYumOAMcO90SbJ3M6ATUcDkONyREbnEjSnQdciAAb0H7bcjD1pE3+H3QU6+HtN4DJQHAzoREemWU4WUe6DvDxUGdCIi0i0GdFLIsRlJZkxpUij5uo9xn5QHAzoREemWS5jgEgGOcg/w/aHCgE5ERLrFlDsRhYyWDhVM/xPJiwGdiIh0y4kIOAN8sKhTpbIEGwM6ERHpllDhHLrgOXQiko0chy0i7xnpHHpgeQgiIiLSBPbQVcKHUxD1LJjfEyN+B3uqc6jaRMtt7xQRcIoAz6HzXu7GIdr91epOTRRuwfyeGPE72FOdRYe/wfwhFYrP8ZcLJrgCTEa7IEdEZ8o9QHJsZiIi0jv20ANkgnZ/marNKPVsTys9PiO2vS/afw+Npqt9IlTHJq0fA400KI4BXQVybOrAiA7/NlKdtVRfLZXFV8Eut6zt4i9v6huqNtFy26tzDl2On4tMuRMREemArnvoWk4DyUbrabVg0GIa1yhtT6SWG4PiAnw4iyTfPN0GdCOmiIPNiG2olTprpRxEsnGpcOtXjnIPMx4AiYjISHTbQ28v2MHdm1S00dLVRERaYKRBcboO6KG6Q1L7f3d3gwciIgotFyIMc2MZXQf0UNDiwCkiIrrBKUxwBvi0tEDfHyoM6B60D9BqXOvp667g6+cTERExoBMRkW45VRjl7pQkD8uArjFy7DZERHJwiQi4AhwU5+KgOHkxzU1ERLJhQCciIt1iyp3ChqPmiYjU40Lgo9Rd6hQl6BjQNYgpfyLt4FUnJAsGdCIi0i11biwjx13SGdCJiEi31Ln1KwM6fYspOyJ58TtLslD9Z0dJSQnGjBmDvn37IjExEZMnT8apU6fclmlqakJBQQH69++PPn36YMqUKaivr1e7KEREZHBtz0MPdJKB6gF9//79KCgowIcffog9e/agtbUV9957LxobG5VlFixYgLfffhvbt2/H/v378cUXX+Chhx5SuyhERGRwbSn3QCcZqJ5yr6iocPv/pk2bkJiYiJqaGvzLv/wL7HY7Xn31VWzZsgU/+clPAAAbN27Erbfeig8//BB33HGH2kUKu2D+tmM6v2dsI/V0vKSS7Ulap8516HIE9KCX0m63AwASEhIAADU1NWhtbYXValWWGTp0KNLS0lBVVeVxHc3NzXA4HG4Tdcbr14mIjCuoAd3lcmH+/PkYN24chg8fDgCw2WyIjo5GfHy827JJSUmw2Wwe11NSUgKLxaJMqampwSy2VDr2kBjUu8beZOBMXfybjEN0mLTOJUyqTDIIakAvKCjA8ePHsXXr1oDWs3jxYtjtdmWqq6tTqYT6YIL7wVWGL1kodWwfCgzb07g8HVu0frxxfZtyD2Qy/HXoc+fOxa5du3DgwAEMHjxYmZ+cnIyWlhY0NDS49dLr6+uRnJzscV1msxlmszlYRSUiIpKe6j87hBCYO3cuduzYgb179yIjI8Pt9czMTPTq1QuVlZXKvFOnTuHChQvIyclRuzg9kiVt5I1g95r01FZEpA6tZ2vaHp8a6CQD1XvoBQUF2LJlC/7617+ib9++ynlxi8WC2NhYWCwWzJo1C4WFhUhISEBcXBwef/xx5OTkhHyEux6DU7C+XKLDv7X+JSYi9cn4vXfCBGeAJQ/0/aGiekBfu3YtAGD8+PFu8zdu3Ihf/vKXAIA//OEPiIiIwJQpU9Dc3Izc3Fy88sorahfFJwxSREQkM9UDuhA993tjYmJQVlaGsrIytT/eJx0fVSrazSd37dsqGO3DtpcHtxXJRI2UuWFT7rJpOyjpMf2uNqbziduKZONE4ClzpzpFCTo5fnYQERFRtwzfQ2/TllL29ncc047qCXY6n9TDbUWyYcrdoPw5QDHtqA62oTy4rUgmRnoeuhylJCIi8oNQ4dGpws+fsWVlZUhPT0dMTAyys7Nx6NChbpcvLS3FP//zPyM2NhapqalYsGABmpqavP48BnQ/mcBbYBJ5Euz7fPMGRySDbdu2obCwEEuXLsXRo0cxcuRI5Obm4tKlSx6X37JlCxYtWoSlS5fixIkTePXVV7Ft2zb8/ve/9/ozGdCJSBoM5OSrcD0PffXq1Zg9ezZmzpyJYcOGoby8HDfddBM2bNjgcfkPPvgA48aNw89//nOkp6fj3nvvxfTp03vs1bfHgE5ERLql5tPWOj7Gu7m52eNntrS0oKamxu0x4REREbBarV0+JvzOO+9ETU2NEsDPnj2L3bt3Y8KECV7XlQGdiFQVzNNRPNVF4ZSamur2KO+SkhKPy12+fBlOpxNJSUlu87t7TPjPf/5zLFu2DHfddRd69eqFW265BePHj/cp5c5R7kREpFttj0ANdB0AUFdXh7i4OGW+mk8B3bdvH1asWIFXXnkF2dnZ+OyzzzBv3jw888wzKCoq8modDOhERKRb7VPmgawDAOLi4twCelcGDBiAyMhI1NfXu83v7jHhRUVFeOSRR/DrX/8aADBixAg0NjZizpw5ePLJJxER0fOPEqbciQwo2CPRKXy4bcMvOjoamZmZbo8Jd7lcqKys7PIx4devX+8UtCMjIwF494wUgD10IiLdYBDvzIUIuALsu/rz/sLCQuTn5yMrKwtjx45FaWkpGhsbMXPmTADAjBkzMGjQIOU8/KRJk7B69WqMHj1aSbkXFRVh0qRJSmDvCQM6EZFO8Na8nTmFCc4AU+7+vH/q1Kn46quvUFxcDJvNhlGjRqGiokIZKHfhwgW3HvmSJUtgMpmwZMkSXLx4Ed/73vcwadIkLF++3OvPNAlv+/Ia4nA4YLFYYAfQ89kMfWq/0fjFpWDj/kZqcwCwALDb7V6dl/Z5/d/Gicf+/hDMfXoFtK7ma61Y+6M3g1ZWtbCHLiHpfoGR1Li/kczUHBSndQzoEmJajUKJ+xvJTKjwtDUhycNZGNAlxQNr+Bgx/WyUepL+OGGCM8A9OND3h4ocPzuIiIioW7rsofMZ5YEzYi+UyFdGPNbIVmeXCPwcuEuSgSS6C+ii3V+Zdjot6bjv8vypOy21A394hY8RjzUy1tmlwjn0QN8fKnKUkog6kaTTQEQhorseehtZfj3Kgu2pPRx9TuEi0/7mggmuAEsc6PtDRXcBXY5m1za2oTy4rcLHiG0vY53Ddae4cGDKnYiISAd010PXEqZDyajU3vf5XSJ/GWlQHAN6kIgO/+aBiIxC7cF6/C5RIFxQ4davkux1cvzsICIiom6xhx4kvo5A5vXEocc0bnC07ftqtStH81MghAqj3IUkex4DehDJsQsYE9O4waV2e3L7kL/4tDUiIiId4KA4Cjm105TUPbYzEekNA7qGMMgQEamLKXciIiId4K1fiTSAI//Di+3fGduEtIwBnaTA8QVE5A+m3Ik0Ro6vkz7wsaykJwzoOsaUmTy4fUKvYzD3dhsY5Xul57p1xSjbVg8MF9DbYxqXqGv8bpAesIduEHJsIiIi8hcDuo7JsVmIwsPf7we/V/rFbSsPwwV0IiIyDoHAryOXZaAoAzoREekWU+5kOBzJSkR6ZKSALscjZIiIiKhb7KETEZFuGamHzoBOAPxPszNVr23cPuHlS/tzWwWHkQJ60FPuK1euhMlkwvz585V5TU1NKCgoQP/+/dGnTx9MmTIF9fX1wS4KEVHIBDIyWpZR1aQtQQ3ohw8fxp/+9CfcdtttbvMXLFiAt99+G9u3b8f+/fvxxRdf4KGHHgpmUShI5Pjdalxa3j4MWu46biu2jzqEMKkyySBoAf3atWvIy8vD+vXr0a9fP2W+3W7Hq6++itWrV+MnP/kJMjMzsXHjRnzwwQf48MMPg1UcCiITtB04jE6L20d0+Es3dNxWbJ/AtT0PPdBJBkEL6AUFBZg4cSKsVqvb/JqaGrS2trrNHzp0KNLS0lBVVeVxXc3NzXA4HG4TEZGWyRECSE+CMihu69atOHr0KA4fPtzpNZvNhujoaMTHx7vNT0pKgs1m87i+kpISPP3008Eoqu54+kXPA4s6+DAf9RmlPX2tpwnsnauFg+ICUFdXh3nz5mHz5s2IiYlRZZ2LFy+G3W5Xprq6OlXWqzc8AAQPU8Tq0uJpgGAIpJ5GaaNg4zn0ANTU1ODSpUu4/fbbERUVhaioKOzfvx9r1qxBVFQUkpKS0NLSgoaGBrf31dfXIzk52eM6zWYz4uLi3CYiIiL6juop93vuuQcff/yx27yZM2di6NChWLhwIVJTU9GrVy9UVlZiypQpAIBTp07hwoULyMnJUbs4huIpTSfH70rta2tbtieRXIyUclc9oPft2xfDhw93m9e7d2/0799fmT9r1iwUFhYiISEBcXFxePzxx5GTk4M77rhD7eIYjhy7nZzYtkTyUSNlLkvKPSx3ivvDH/6AiIgITJkyBc3NzcjNzcUrr7wSjqIQEZGOCRV66Azo7ezbt8/t/zExMSgrK0NZWVkoPp6CoC21L8duTqR9vPUrBYr3ciefcaQ3EclCABABHrRkOeYxoFNAOFCMiLTMBRNMAR6lZLlTHAO6B0x96Zcvpwq4H1AocR+jQDGgk2GIDv/mAZRI/zjKnchLcuzmN/B2mkTG4xImmHgdunHJsenCR+b28aXsvtaTKXoyKl71og0M6ERE5Detn8oSQoVR7pKk9hjQiYjIb+1PZWktmAM8h05EfpDjK0+kPu772sCATkREusUeOhERkQ5wlDsREQWMVz6En5EGxUWEuwBEREQUOPbQiYiCjL3z8LnRQw/0HLpKhQkyBnQPmCYjIjX0dPzgsSb4jDQojil3IiIiHWAPnYgoTPh8geATCLyNZdlGDOgeyJFcISI94PEmuJhyJyIiIqmwh05e0/L9moPFiHWWlS/bitvVQAyUc2cPnbzS8YlKRmCUeuqBv9uK29gAvk25BzLBz5R7WVkZ0tPTERMTg+zsbBw6dKjb5RsaGlBQUICBAwfCbDbjn/7pn7B7926vP489dCIi0q1w3Slu27ZtKCwsRHl5ObKzs1FaWorc3FycOnUKiYmJnZZvaWnBv/7rvyIxMRF/+ctfMGjQIJw/fx7x8fFefyYDOnlF649IDIa2OhulvjLzd1tx21KwrF69GrNnz8bMmTMBAOXl5XjnnXewYcMGLFq0qNPyGzZswJUrV/DBBx+gV69eAID09HSfPpMpd/KaCcY7ABqtvjLzZVsZcV82qkDT7e1HyTscDrepubnZ42e2tLSgpqYGVqtVmRcREQGr1YqqqiqP73nrrbeQk5ODgoICJCUlYfjw4VixYgWcTqfXdWVAJyIi/Wo7Bx7oBCA1NRUWi0WZSkpKPH7k5cuX4XQ6kZSU5DY/KSkJNpvN43vOnj2Lv/zlL3A6ndi9ezeKiorw4osv4tlnn/W6qky5f4u3YCTyjtFOvRC1qaurQ1xcnPJ/s9ms2rpdLhcSExOxbt06REZGIjMzExcvXsTzzz+PpUuXerUOBnQi8lrHqx0Y1Enr1BwUFxcX5xbQuzJgwABERkaivr7ebX59fT2Sk5M9vmfgwIHo1asXIiMjlXm33norbDYbWlpaEB0d3ePnMuVORET6JVSafBAdHY3MzExUVlYq81wuFyorK5GTk+PxPePGjcNnn30Gl8ulzDt9+jQGDhzoVTAHGNAVJmhzoIyf+xP5ie3dPRO0+10h0pLCwkKsX78er732Gk6cOIHHHnsMjY2Nyqj3GTNmYPHixcryjz32GK5cuYJ58+bh9OnTeOedd7BixQoUFBR4/ZlMuRMRkW6F617uU6dOxVdffYXi4mLYbDaMGjUKFRUVykC5CxcuICLiuz51amoq3n33XSxYsAC33XYbBg0ahHnz5mHhwoVef6ZJCFke3f4dh8MBi8UCO4Cez2bIjYP1QovtTRQaDgAWAHa73avz0j6v/9s4kbauGBGxMQGty/V1Ey7MWRa0sqqFPfQQCCRIMKiEFtubiGTFgE5ERLplpMenMqATEZF+GehpawzoISDHbzsiIj1S45oMOY7ivGyNiIhIB9hDJyIi/WLKXR98GV3Oy5XCi+1PREFhoICu25S7JO1P4LYiIlKDbnvoJvCpULLgtiKioGn3+NOA1iEB3QZ0wLfgIMfm0i+ttL83qX+eHvAe28q4tLLt1XzamtbpNuVORETaIEk8lJ6ue+hEvmqf/ici/3X8LgmEqaduoEFxDOhEHfR00GHq2HtsK2Nr2/6i3d+Q7xMGOofOlDsREZEOsIdORERBFc5TWSZxYwp0HTJgQCcioqALW9LaQOfQg5Jyv3jxIn7xi1+gf//+iI2NxYgRI3DkyBHldSEEiouLMXDgQMTGxsJqteLMmTPBKAoRERlZ2zn0QCcJqB7Q/+///g/jxo1Dr1698Le//Q2ffvopXnzxRfTr109ZZtWqVVizZg3Ky8tRXV2N3r17Izc3F01NTWoXp0dq/HgjIiIKN9VT7s899xxSU1OxceNGZV5GRobybyEESktLsWTJEjz44IMAgNdffx1JSUnYuXMnpk2b1mmdzc3NaG5uVv7vcDhUKSsDORGRzjHl7r+33noLWVlZ+NnPfobExESMHj0a69evV14/d+4cbDYbrFarMs9isSA7OxtVVVUe11lSUgKLxaJMqampahebiIj0SKg0SUD1gH727FmsXbsWQ4YMwbvvvovHHnsMv/3tb/Haa68BAGw2GwAgKSnJ7X1JSUnKax0tXrwYdrtdmerq6lQpq6nDXyIiIlmpnnJ3uVzIysrCihUrAACjR4/G8ePHUV5ejvz8fL/WaTabYTab1SymgsGciEjHmHL338CBAzFs2DC3ebfeeisuXLgAAEhOTgYA1NfXuy1TX1+vvEZERKQKjnL337hx43Dq1Cm3eadPn8bNN98M4MYAueTkZFRWViqvOxwOVFdXIycnR+3iEKQ7DUQGw/2TSB2qp9wXLFiAO++8EytWrMDDDz+MQ4cOYd26dVi3bh0AwGQyYf78+Xj22WcxZMgQZGRkoKioCCkpKZg8ebLaxSEiIgPjneICMGbMGOzYsQOLFy/GsmXLkJGRgdLSUuTl5SnLPPHEE2hsbMScOXPQ0NCAu+66CxUVFYiJiVG7OEREZGQGOoduEkKWR7d/x+FwwGKxwA4gzsf3tq+sHGdFwqutvdhWRKQmBwALALvdjrg4X4/kXqz/2ziR9tyziIgNrLPo+roJFxYuCVpZ1WLop61J90smxDo+y5iIiLTL0A9nYa+ze+2fkMS2IiIZmaDCOXRVShJ8hgvosmwYrWB7yYM/vkLL39N3PO0XYmpcdmbUy9aIKPR4ekRO3FakJsP10In0iKdH5KGVbWWYTIGBRrkzoBPphK4PyhoUSHtrJZC3n6fb/cdAAZ0pdyIiIh1gD52IiHSLd4ojItIBw5wn9kEg7dBTe2qyvZlyJyIi8p8kMVBX2EMnIvqWFkafy6Cr9tHKCH43BuqhM6ATkW75e8MXXY/6DoA3baK1djPSOXSm3ImIiHSAPXSVaS7dRERe0WS62EuyljskDHTrVwZ0FTFlRyQ32b+zPO54wHPoRERE8uM5dPJLx1/GavwwJCLyFnvnxsYeusravlBMvxNRqPD40g2m3ImIiHRAhZQ7A7rBtY2YleWXM0fJEpEnmrydK3nEgB5EMu78Mv0IISLqEVPuREREOsCATkbDXjkRecJjgzwY0ImISLd4HToRERFJRRc9dI7CJCI94TGN/KGLgE5EROQRB8URERHJz0jn0HUR0JmSIoBpStIP7r8qkyQgB4qD4oiIiHRAFz10IiIij3gOnUg+TFPKQ+3TIzzdQl0x0jl0ptyJiIh0gD10IiLSL6bciYiCR+20uJbS7Ez/awtT7kRERCQVBnQiciNJZ0Tz2DvXCKHS5IeysjKkp6cjJiYG2dnZOHTokFfv27p1K0wmEyZPnuzT5zGgE5FCdPhLvjOBwVxTwhTQt23bhsLCQixduhRHjx7FyJEjkZubi0uXLnX7vs8//xz/8R//gR/96Ec+fyYDOhERkRccDofb1Nzc3OWyq1evxuzZszFz5kwMGzYM5eXluOmmm7Bhw4Yu3+N0OpGXl4enn34a3//+930uHwM6EXXCHibpRduguEAnAEhNTYXFYlGmkpISj5/Z0tKCmpoaWK1WZV5ERASsViuqqqq6LOuyZcuQmJiIWbNm+VVXjnInIgUDOemOipet1dXVIS4uTpltNps9Ln758mU4nU4kJSW5zU9KSsLJkyc9vufgwYN49dVXUVtb63cxGdCJiEi/VAzocXFxbgFdLVevXsUjjzyC9evXY8CAAX6vhwGdiIgC0hYvmeG5YcCAAYiMjER9fb3b/Pr6eiQnJ3da/r//+7/x+eefY9KkSco8l8sFAIiKisKpU6dwyy239Pi5PIdORER+E138WyvUPIfurejoaGRmZqKyslKZ53K5UFlZiZycnE7LDx06FB9//DFqa2uV6YEHHsDdd9+N2tpapKamevW57KETEZF+henWr4WFhcjPz0dWVhbGjh2L0tJSNDY2YubMmQCAGTNmYNCgQSgpKUFMTAyGDx/u9v74+HgA6DS/OwzoRETkNxOYcvdk6tSp+Oqrr1BcXAybzYZRo0ahoqJCGSh34cIFRESomyRXPeXudDpRVFSEjIwMxMbG4pZbbsEzzzwDIb77iSOEQHFxMQYOHIjY2FhYrVacOXNG7aIQEVEIaPlmOuFIubeZO3cuzp8/j+bmZlRXVyM7O1t5bd++fdi0aVOX7920aRN27tzp0+epHtCfe+45rF27Fi+//DJOnDiB5557DqtWrcJLL72kLLNq1SqsWbMG5eXlqK6uRu/evZGbm4umpia1i0NEREYWxlu/hprqKfcPPvgADz74ICZOnAgASE9Px5///GflHrZCCJSWlmLJkiV48MEHAQCvv/46kpKSsHPnTkybNk3tIlEX2u+jWv11TURE3lG9h37nnXeisrISp0+fBgB89NFHOHjwIO6//34AwLlz52Cz2dzuoGOxWJCdnd3lHXSam5s73XKPiIioR+yh+2/RokVwOBwYOnQoIiMj4XQ6sXz5cuTl5QEAbDYbAHi8g07bax2VlJTg6aefVruoRESkc2qc35clg6l6D/2NN97A5s2bsWXLFhw9ehSvvfYaXnjhBbz22mt+r3Px4sWw2+3KVFdXp2KJjcsEbQ5mkegHse5I1iEhonZU76H/7ne/w6JFi5Rz4SNGjMD58+dRUlKC/Px85S459fX1GDhwoPK++vp6jBo1yuM6zWZzl/fMJX3peJMKrf3YICLJhOk69HBQvYd+/fr1TtfWRUZGKrexy8jIQHJystsddBwOB6qrqz3eQYeMi8GciAIVzsvWQk31HvqkSZOwfPlypKWl4Yc//CGOHTuG1atX41e/+hUAwGQyYf78+Xj22WcxZMgQZGRkoKioCCkpKZg8ebLaxSHJaCmIG/EqgGDW04jtSRpgoB666gH9pZdeQlFREX7zm9/g0qVLSElJwb//+7+juLhYWeaJJ55AY2Mj5syZg4aGBtx1112oqKhATEyM2sUhIiIyBJNofws3STgcDlgsFtgBqP8gO6Ib2KNUF9uT2nMAsACw2+1BeSRpW5z44b+vQGR0YJ1FZ0sTPvnT74NWVrXwXu5EXWDQURfbk8JBjXPgspxD5+NTiYiIdIA9dCIi0i8OiiMiX/lyjjjc55PD/flG1jE2sP2Diyl3IiIikgp76EQq86bHZYI0WTxSWfttz955CDDlTkS+8vXgHM6DOQNJeLH9Q4cpdyIiIpIKe+hExAfhkH4x5U5ERiHa/WVQJ91hQCciIpIfz6ETkVfU+PEfbqYOf4lITuyhE6lA9nS1zGUn6hZT7kRERPIzCQFTgA8VDfT9oaLrlLuvP8z0kD7VMy1uH6ar9UuL+xtRd3TbQxcd/u3LvbVJe7S8fRjM9UfL+xv5iCl3+fHWmkREZKRR7roN6IBvPSf2srSN24dCSUv7G5+MR97SdUAnIiKDY8qdiIhIfky5ExFpkBHTz0apJwWOAZ2IiPSLKXciIiL5MeVORKRBTD+TzwzUQ9f1neKIiIiMgj10IiLSNVlS5oFiQCciIv0S4sYU6DokwJQ7ERGRDrCHTkREusVR7uSREW9qoRVseyLyC0e5U0eSbE9D4LYgIuqMPXQvBfo4Vm+eyU5EpHW+HMu0cNwzuW5Mga5DBgzoXgo0mLf9DffOTUTkL1+OZZo57jHlTh0xEGsHtwURUWfsofvB14ASaLpeDR0/X7agqIU2JKIbvDl+aOU7y1Hu5FEgQTCcAdTTvhj2NJgfZCsvkd7405kJOwPdWIYBnYiIdIs9dMl4amtN/DLUCE+pL7YPkba1fWf5XSVv6SKgeyJjSjmY2BZE8pCkQygHA41y121AJyIiMlLKXReXrXnqfYarR6rGj0Ei0reejhOmDn+JvKGbHroWdnzR4d9aKBMRaYu3xwkeP1TCUe5ERETyM1LKnQFdRb7+ouYTxIKHbattRt4+RqsvhQ4DOhER6RdHuRMREcmPKXcKiVCl3oyY3jRKPWXF7aN/RjzuhJvPl60dOHAAkyZNQkpKCkwmE3bu3On2uhACxcXFGDhwIGJjY2G1WnHmzBm3Za5cuYK8vDzExcUhPj4es2bNwrVr1wKqCBERUScuoc4kAZ8DemNjI0aOHImysjKPr69atQpr1qxBeXk5qqur0bt3b+Tm5qKpqUlZJi8vD5988gn27NmDXbt24cCBA5gzZ47/tSAiIvJEqDRJwOeU+/3334/777/f42tCCJSWlmLJkiV48MEHAQCvv/46kpKSsHPnTkybNg0nTpxARUUFDh8+jKysLADASy+9hAkTJuCFF15ASkqKz5Vgaqd7bBMiCjWtHHdMUOEcuiolCT5V7xR37tw52Gw2WK1WZZ7FYkF2djaqqqoAAFVVVYiPj1eCOQBYrVZERESgurra43qbm5vhcDjcJiIiIvqOqgHdZrMBAJKSktzmJyUlKa/ZbDYkJia6vR4VFYWEhARlmY5KSkpgsViUKTU1Vc1iExGRXrXdKS7QSQJS3Mt98eLFsNvtylRXV+f2uqndREShJ9mpRjKQtsvWAp1koGpAT05OBgDU19e7za+vr1deS05OxqVLl9xe/+abb3DlyhVlmY7MZjPi4uLcJiIiIi0rKytDeno6YmJikJ2djUOHDnW57Pr16/GjH/0I/fr1Q79+/WC1Wrtd3hNVA3pGRgaSk5NRWVmpzHM4HKiurkZOTg4AICcnBw0NDaipqVGW2bt3L1wuF7Kzs9UsDhERGV2YRrlv27YNhYWFWLp0KY4ePYqRI0ciNze3U4e2zb59+zB9+nS8//77qKqqQmpqKu69915cvHjR6880CeHbyYFr167hs88+AwCMHj0aq1evxt13342EhASkpaXhueeew8qVK/Haa68hIyMDRUVF+Mc//oFPP/0UMTExAG6MlK+vr0d5eTlaW1sxc+ZMZGVlYcuWLV6VweFwwGKxwA6AfXUi+QTzyhQ9XPWihzr0xAHAAsButwcl69oWJ340fimiomICWtc33zTh7/ueRl1dnVtZzWYzzGazx/dkZ2djzJgxePnllwEALpcLqampePzxx7Fo0aIeP9PpdKJfv354+eWXMWPGDK/K6XMP/ciRIxg9ejRGjx4NACgsLMTo0aNRXFwMAHjiiSfw+OOPY86cORgzZgyuXbuGiooKJZgDwObNmzF06FDcc889mDBhAu666y6sW7fO16IQERGFTGpqqtsA7ZKSEo/LtbS0oKamxu2Kr4iICFitVuWKr55cv34dra2tSEhI8Lp8Pl+HPn78eHTXqTeZTFi2bBmWLVvW5TIJCQle98aJiIj85vp2CnQdgMceuieXL1+G0+n0eMXXyZMnvfrIhQsXIiUlxe1HQU94L3ciCrlgppH1kKIOZh2MkM5vzyQETAFedtb2/lANyl65ciW2bt2Kffv2uWW3e8KATkREpKIBAwYgMjKy2yu+uvLCCy9g5cqVeO+993Dbbbf59LlSXIdOpAWSXIpKISTjPmGEXrmbMIxyj46ORmZmptsVXy6XC5WVlcoVX56sWrUKzzzzDCoqKtzupuot9tCJvCDa/TXcAZE8knmfkK28AVHjTm9+vL+wsBD5+fnIysrC2LFjUVpaisbGRsycORMAMGPGDAwaNEgZWPfcc8+huLgYW7ZsQXp6unLn1D59+qBPnz5efSYDOhER6ZYad3rz5/1Tp07FV199heLiYthsNowaNQoVFRXKQLkLFy4gIuK7JPnatWvR0tKCn/70p27rWbp0KZ566ikvy+njdehawOvQKRxk7IlRcHGf8F+orkP/8Z1FqlyHvv+DZ4JWVrWwh07kJR64qSPuExIIU8o9HBjQiYhIt0yuG1Og65ABA7oHRrtOk4iI5MeATkRE+sWUOxERkQ74+bS0TuuQAAO6ByZw9CqRrHjKLPQ8xTu2fegxoHeBOyMRUc+66rxqpVOk5r3ctY4BnYiIVKeFYA6A59CJiGSlmUBiEG3t3dYj10rP3IgY0ImIKGCmDn81QyDw56HL0UFnQCciIv3iOXQiIiI9EFDhHLoqJQk6Pg+diIhIB9hDJyIi/eIod2PjjSn0i9s2fNj2FBYuBL7DSfJwFqbcyTDk+I1NROQf9tDJMNqukW37NxHpH0e5GxwP9vrFbRs+bHsKCwOdQ2fKnYiISAfYQyciIv0yUA+dAZ280nF3ZvqU9ID7tQEYKKAz5U5ERKQD7KGTVzhCXN+M+oQsf/ZrX9rKm3UzSxBkBroOnQGdvMYDjT6Jdn+NuI19qbO/bdXV8p4SuUbdDsHCy9aIiIj0gOfQichotNArFND2Hf38baOu3qeFNif9YA+dyOC0GFS0nHb2pVzeLKvVeuqGSwCmAH8murT8M/M7DOhEpAlyHDJJOky5k6y0nrIkImPisSn42EPXEX5ZiEiLwntsUqGHLsnRlQGdiDSh/TXhRKphyp1kZOrwl0g2WtyHBZguDpQWt6sesYeuM/zCkOy4D+tT2LarS4WfYxzlTkREFGbCdWMKdB0SYECnoGj/e5Y9rs6C3T6877562IYkCwZ0Ip0RHf7NgESGZqBBcQzoRDrGYE6Gx3PoRIFhIOleMNvHm3XzlAgZhoF66LxsjYiISAfYQyciIv0SUKGHrkpJgo4BPcTCneoM9+erQQ91CDetthu3LamOKfeuHThwAJMmTUJKSgpMJhN27typvNba2oqFCxdixIgR6N27N1JSUjBjxgx88cUXbuu4cuUK8vLyEBcXh/j4eMyaNQvXrl0LuDJEJC85DplE2uVzQG9sbMTIkSNRVlbW6bXr16/j6NGjKCoqwtGjR/Hmm2/i1KlTeOCBB9yWy8vLwyeffII9e/Zg165dOHDgAObMmeN/LYhIeuyRU1C4XOpMEjAJ4X8uwWQyYceOHZg8eXKXyxw+fBhjx47F+fPnkZaWhhMnTmDYsGE4fPgwsrKyAAAVFRWYMGEC/ud//gcpKSk9fq7D4YDFYoEdQJy/hSciQ2E6X1scACwA7HY74uLUP5K3xQnr92YhKiI6oHV942rBe1+9GrSyqiXoo9ztdjtMJhPi4+MBAFVVVYiPj1eCOQBYrVZERESgurra4zqam5vhcDjcJiIiIvpOUAN6U1MTFi5ciOnTpyu/amw2GxITE92Wi4qKQkJCAmw2m8f1lJSUwGKxKFNqamowi01ERHrRNigu0EkCQQvora2tePjhhyGEwNq1awNa1+LFi2G325Wprq5OlTIa/bGIRq8/GYup3eSLcH9Pwv350nMJdSYJBOWytbZgfv78eezdu9ftnENycjIuXbrktvw333yDK1euIDk52eP6zGYzzGZzMIpKRESkC6r30NuC+ZkzZ/Dee++hf//+bq/n5OSgoaEBNTU1yry9e/fC5XIhOztb7eJ0iwNjgkuO37TkK6P1FoN9nDBSW4aDEC5VJhn43EO/du0aPvvsM+X/586dQ21tLRISEjBw4ED89Kc/xdGjR7Fr1y44nU7lvHhCQgKio6Nx66234r777sPs2bNRXl6O1tZWzJ07F9OmTfNqhLvajBzUg1l30e6vkdtYb4z6JLdg1dOb74lR2jhohAopc0nOofsc0I8cOYK7775b+X9hYSEAID8/H0899RTeeustAMCoUaPc3vf+++9j/PjxAIDNmzdj7ty5uOeeexAREYEpU6ZgzZo1flaBiIioC0KFnJJeA/r48ePR3aXr3lzWnpCQgC1btvj60VJraxX+2g49tr16TAhuexptW7VvT6JA8V7uIWDEL6wWD8hGShEHUyja0Ejbyij1DBuXCzAFeA5cr+fQiYiIpGGglDufhx4C/AWuDdwO8uhuW4VqlL3RRvPzenf5sYceIgwm4cO2l0dP2ypUwYZBTT+EywURYMpdt5etERERSYMpdyIi7TF1+Ku2jod+bz9HD+nqYLctBR976EQklWDf5CXYn6NluqyzSwAmY/TQGdCJiDrwJbDJcag3MCEABHrZmhxbmQHdB/6k4sgd25C0yt/9kTeHIa1gQCciCgCDubYJl4AIMOXuzR1QtYABnYgoAOyha5xwIfCUuxyXrXGUuw9M7Sbyj9bbUA+jlcMhkHbTQ5trfb82MuESqkz+KCsrQ3p6OmJiYpCdnY1Dhw51u/z27dsxdOhQxMTEYMSIEdi9e7dPn8eATkREpLJt27ahsLAQS5cuxdGjRzFy5Ejk5ubi0qVLHpf/4IMPMH36dMyaNQvHjh3D5MmTMXnyZBw/ftzrzzQJWU4OtGO32xEfH486AHHhLgzpCi9d8o+/7cb2Ni4HgFQADQ0NsFgs6q/f4YDFYsFdmIAo9ApoXd+gFQexG3V1dYiL+y7qmM1mmM1mj+/Jzs7GmDFj8PLLLwMAXC4XUlNT8fjjj2PRokWdlp86dSoaGxuxa9cuZd4dd9yBUaNGoby83KtySnkO/erVqwBu7AxERCSvq1evBiWgR0dHIzk5GQdtvqWtu9KnTx+kprpHnaVLl+Kpp57qtGxLSwtqamqwePFiZV5ERASsViuqqqo8rr+qqgqFhYVu83Jzc7Fz506vyyhlQE9JSUFdXR2EEEhLS+v0q0lPHA4HUlNTdV1HgPXUGyPU0wh1BIJXTyEErl69ipSUFNXW2V5MTAzOnTuHlpYWVdYnhIDJ5J5D6qp3fvnyZTidTiQlJbnNT0pKwsmTJz2+x2azeVzeZrN5XUYpA3pERAQGDx4Mh8MBAIiLi9P1FwowRh0B1lNvjFBPI9QRCE49g9Ezby8mJgYxMTFB/Qwt4aA4IiIiFQ0YMACRkZGor693m19fX4/k5GSP70lOTvZpeU8Y0ImIiFQUHR2NzMxMVFZWKvNcLhcqKyuRk5Pj8T05OTluywPAnj17ulzeEylT7m3MZjOWLl3a5XkMPTBCHQHWU2+MUE8j1BEwTj3VVlhYiPz8fGRlZWHs2LEoLS1FY2MjZs6cCQCYMWMGBg0ahJKSEgDAvHnz8OMf/xgvvvgiJk6ciK1bt+LIkSNYt26d158p5WVrREREWvfyyy/j+eefh81mw6hRo7BmzRpkZ2cDAMaPH4/09HRs2rRJWX779u1YsmQJPv/8cwwZMgSrVq3ChAkTvP48BnQiIiId4Dl0IiIiHWBAJyIi0gEGdCIiIh1gQCciItIBaQO6r4+l07qSkhKMGTMGffv2RWJiIiZPnoxTp065LdPU1ISCggL0798fffr0wZQpUzrdiEAmK1euhMlkwvz585V5eqnjxYsX8Ytf/AL9+/dHbGwsRowYgSNHjiivCyFQXFyMgQMHIjY2FlarFWfOnAljiX3ndDpRVFSEjIwMxMbG4pZbbsEzzzyD9uNsZazngQMHMGnSJKSkpMBkMnW6l7Y3dbpy5Qry8vIQFxeH+Ph4zJo1C9euXQthLbrXXR1bW1uxcOFCjBgxAr1790ZKSgpmzJiBL774wm0dWq+jIQkJbd26VURHR4sNGzaITz75RMyePVvEx8eL+vr6cBfNb7m5uWLjxo3i+PHjora2VkyYMEGkpaWJa9euKcs8+uijIjU1VVRWVoojR46IO+64Q9x5551hLLX/Dh06JNLT08Vtt90m5s2bp8zXQx2vXLkibr75ZvHLX/5SVFdXi7Nnz4p3331XfPbZZ8oyK1euFBaLRezcuVN89NFH4oEHHhAZGRni66+/DmPJfbN8+XLRv39/sWvXLnHu3Dmxfft20adPH/HHP/5RWUbGeu7evVs8+eST4s033xQAxI4dO9xe96ZO9913nxg5cqT48MMPxd///nfxgx/8QEyfPj3ENelad3VsaGgQVqtVbNu2TZw8eVJUVVWJsWPHiszMTLd1aL2ORiRlQB87dqwoKChQ/u90OkVKSoooKSkJY6nUdenSJQFA7N+/Xwhx40vWq1cvsX37dmWZEydOCACiqqoqXMX0y9WrV8WQIUPEnj17xI9//GMloOuljgsXLhR33XVXl6+7XC6RnJwsnn/+eWVeQ0ODMJvN4s9//nMoiqiKiRMnil/96ldu8x566CGRl5cnhNBHPTsGO2/q9OmnnwoA4vDhw8oyf/vb34TJZBIXL14MWdm95elHS0eHDh0SAMT58+eFEPLV0SikS7m3PZbOarUq83p6LJ2M7HY7ACAhIQEAUFNTg9bWVrd6Dx06FGlpadLVu6CgABMnTnSrC6CfOr711lvIysrCz372MyQmJmL06NFYv3698vq5c+dgs9nc6mmxWJCdnS1VPe+8805UVlbi9OnTAICPPvoIBw8exP333w9AP/Vsz5s6VVVVIT4+HllZWcoyVqsVERERqK6uDnmZ1WC322EymRAfHw9An3XUA+lu/erPY+lk43K5MH/+fIwbNw7Dhw8HcOPRetHR0coXqo2vj9cLt61bt+Lo0aM4fPhwp9f0UsezZ89i7dq1KCwsxO9//3scPnwYv/3tbxEdHY38/HylLoE+KjHcFi1aBIfDgaFDhyIyMhJOpxPLly9HXl4eAOimnu15UyebzYbExES316OiopCQkCBlvZuamrBw4UJMnz5dedqa3uqoF9IFdCMoKCjA8ePHcfDgwXAXRVV1dXWYN28e9uzZo+tHGrpcLmRlZWHFihUAgNGjR+P48eMoLy9Hfn5+mEunnjfeeAObN2/Gli1b8MMf/hC1tbWYP38+UlJSdFVPI2ttbcXDDz8MIQTWrl0b7uJQD6RLufvzWDqZzJ07F7t27cL777+PwYMHK/OTk5PR0tKChoYGt+VlqndNTQ0uXbqE22+/HVFRUYiKisL+/fuxZs0aREVFISkpSfo6AsDAgQMxbNgwt3m33norLly4AABKXWTfh3/3u99h0aJFmDZtGkaMGIFHHnkECxYsUB42oZd6tudNnZKTk3Hp0iW317/55htcuXJFqnq3BfPz589jz549bs9C10sd9Ua6gO7PY+lkIITA3LlzsWPHDuzduxcZGRlur2dmZqJXr15u9T516hQuXLggTb3vuecefPzxx6itrVWmrKws5OXlKf+WvY4AMG7cuE6XHJ4+fRo333wzACAjIwPJyclu9XQ4HKiurpaqntevX0dEhPshJDIyEi6XC4B+6tmeN3XKyclBQ0MDampqlGX27t0Ll8ulPJhD69qC+ZkzZ/Dee++hf//+bq/roY66FO5Ref7YunWrMJvNYtOmTeLTTz8Vc+bMEfHx8cJms4W7aH577LHHhMViEfv27RNffvmlMl2/fl1Z5tFHHxVpaWli79694siRIyInJ0fk5OSEsdSBaz/KXQh91PHQoUMiKipKLF++XJw5c0Zs3rxZ3HTTTeK//uu/lGVWrlwp4uPjxV//+lfxj3/8Qzz44IOav5yro/z8fDFo0CDlsrU333xTDBgwQDzxxBPKMjLW8+rVq+LYsWPi2LFjAoBYvXq1OHbsmDLC25s63XfffWL06NGiurpaHDx4UAwZMkRTl3R1V8eWlhbxwAMPiMGDB4va2lq341Fzc7OyDq3X0YikDOhCCPHSSy+JtLQ0ER0dLcaOHSs+/PDDcBcpIAA8Ths3blSW+frrr8VvfvMb0a9fP3HTTTeJf/u3fxNffvll+Aqtgo4BXS91fPvtt8Xw4cOF2WwWQ4cOFevWrXN73eVyiaKiIpGUlCTMZrO45557xKlTp8JUWv84HA4xb948kZaWJmJiYsT3v/998eSTT7od9GWs5/vvv+/xu5ifny+E8K5O//u//yumT58u+vTpI+Li4sTMmTPF1atXw1Abz7qr47lz57o8Hr3//vvKOrReRyPi41OJiIh0QLpz6ERERNQZAzoREZEOMKATERHpAAM6ERGRDjCgExER6QADOhERkQ4woBMREekAAzoREZEOMKATERHpAAM6ERGRDjCgExER6cD/A7f3WzwaViOWAAAAAElFTkSuQmCC", + "text/plain": [ + "
" + ] + }, + "metadata": {}, + "output_type": "display_data" + } + ], + "source": [ + "import torch\n", + "\n", + "i = torch.randint(0, len(ds), size=(1,))\n", + "plt.imshow(ds[i][0].permute(1,2,0))\n", + "plt.colorbar()" + ] + }, + { + "cell_type": "code", + "execution_count": null, + "metadata": {}, + "outputs": [], + "source": [] + } + ], + "metadata": { + "kernelspec": { + "display_name": "Python 3", + "language": "python", + "name": "python3" + }, + "language_info": { + "codemirror_mode": { + "name": "ipython", + "version": 3 + }, + "file_extension": ".py", + "mimetype": "text/x-python", + "name": "python", + "nbconvert_exporter": "python", + "pygments_lexer": "ipython3", + "version": "3.11.5" + } + }, + "nbformat": 4, + "nbformat_minor": 2 +} diff --git a/diffusion_models/models/diffusion_openai.py b/diffusion_models/models/diffusion_openai.py index 406e5ac..aff6f2f 100644 --- a/diffusion_models/models/diffusion_openai.py +++ b/diffusion_models/models/diffusion_openai.py @@ -90,8 +90,8 @@ def denoise_singlestep( """ self.model.eval() with torch.no_grad(): - t_enc = self.time_encoder.get_pos_encoding(t) - noise_pred = self.model(x, t_enc) + # t_enc = self.time_encoder.get_pos_encoding(t) + noise_pred = self.model(x, t/self.fwd_diff.timesteps) alpha = self.fwd_diff.alphas[t][:, None, None, None] alpha_hat = self.fwd_diff.alphas_dash[t][:, None, None, None] beta = self.fwd_diff.betas[t][:, None, None, None] diff --git a/diffusion_models/spine_dataset/__init__.py b/diffusion_models/spine_dataset/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/diffusion_models/spine_dataset/augmentations.py b/diffusion_models/spine_dataset/augmentations.py new file mode 100644 index 0000000..3797439 --- /dev/null +++ b/diffusion_models/spine_dataset/augmentations.py @@ -0,0 +1,57 @@ +"""Module containing the augmentation class for random erosion and dilation.""" +import numpy as np +import scipy.ndimage as ndimage + + +class RandomErosion: + """Random erosion augmentation class.""" + + def __init__( + self, randomState: np.random.RandomState, alpha=0.66, beta=5 + ) -> None: + """Initialize the random erosion augmentation class. + + Randomly erodes/dilates the image with a probability of alpha and a maximum number of iterations of beta. + + Args: + randomState (np.random.RandomState): randomstate object to use for random number generation + alpha (float, optional): Hyperparameter alpha, probability of doing augmentation. Defaults to 0.66. + beta (int, optional): Hyperparameter beta, maximum number of erosion/dilation iterations. Defaults to 5. + """ + self.alpha = alpha + self.beta = beta + self.randomState = randomState + + def __call__(self, img_np: np.ndarray) -> np.ndarray: + """Apply the augmentation to the image. + + Args: + img_np (np.ndarray): image to augment + + Returns: + np.ndarray: augmented image + """ + img_np = np.where(img_np != 0, 1, 0).astype(img_np.dtype) + + for i in range(img_np.shape[1]): + do_augment = self.randomState.rand() < self.alpha + + if do_augment: + do_erosion = self.randomState.rand() < 0.5 + + if do_erosion: + n_iter = self.randomState.randint( + 1, self.beta + ) # [1, beta) + img_np[:, i, :] = ndimage.binary_erosion( + img_np[:, i, :], iterations=n_iter + ).astype(img_np.dtype) + else: + n_iter = self.randomState.randint( + 1, self.beta + ) # [1, beta) + img_np[:, i, :] = ndimage.binary_dilation( + img_np[:, i, :], iterations=n_iter + ).astype(img_np.dtype) + + return img_np \ No newline at end of file diff --git a/diffusion_models/spine_dataset/base_dataset.py b/diffusion_models/spine_dataset/base_dataset.py new file mode 100644 index 0000000..930dbb3 --- /dev/null +++ b/diffusion_models/spine_dataset/base_dataset.py @@ -0,0 +1,90 @@ +import torch +from torch.utils.data import Dataset +from abc import ABC, abstractmethod +from typing import Literal, Dict, Any, Optional +from jaxtyping import Float32, UInt, UInt64 + +class BaseDataset(Dataset, ABC): + """Interface for Datasets in Spine Diffusion package. + + This interface is currently not enforced, but any dataset implementation + should follow the guidelines outlined here, this is especially true for the + exact returns of the __getitem__ method. + """ + def __init__( + self, + resolution: int, + random_crop: bool, + crop_size: int, + mode: Literal["train","val","test"], + **kwargs + ): + """Constructor of BaseDataset. + + Args: + resolution: determines base resolution of the dataset, i.e. a + dataset with an original size of 256 (in 3D) will be downsampled + to that resolution + random_crop: + """ + raise NotImplementedError + + def __getitem__(self, idx: int) -> Dict[str, Any]: + """__getitem__ method of BaseDataset. + + Args: + idx: index of desired sample + + Returns: + dictionary with keys and items as below (not all keys necessary) + + .. code-block:: python + dict( + # ch corresponds to num_classes where applicable + sdf: Optional[ + Float32[Tensor, "1 res res res"], + None + ] = None, + + occ: Optional[ + UInt64[Tensor, "1 res res res"], # with unique values in range(2, num_classes+1) + ] = None, + + coords: Optional[Float32[Tensor, "num_points 3"], None] = None, + targets_occ: Optional[ + UInt64[Tensor, "num_points"], # 2 or multi class with probabilities 1 + Float32[Tensor, "num_points num_classes"], # 2 or multi class with probabilities in [0,1] + None + ] = None, + targets_sdf: Optional[ + Float32[Tensor, "num_points"], + None + ] + loss_fn: Literal["crossentropylogits","mse"] = "crossentropylogits" + metadata: Optional[Any, None] = None + ) + + - "sdf" is full volume and should be normalized to [-1,1] range + - "sdf_target" is cropped volume, equally normalized, may be a TSDF + of the original data to enhance learning + - "occ_float" full volume occupancy as torch.float32, normalized to [0,1] range + - "occ_target" cropped binary/multi-class torch.long tensor + - "vox_coords" contains coords of voxel centers of "sdf_target" or "occ_target", + normalized to [-1,1] range (see torch.grid_sample(align_corners=True) for reference). + If random_crop is False, this is not needed and will default to + all voxel centers in the volume. + - "rand_coords" can be used for randomized sampling of coordinates instead + of voxel centers + - "rand_targets" can be used for interpolated SDF values + - "metadata" anything + - while the channels "ch" will usually be 1, it might be good for multi-class + problems to split classes between channels, should be float for + """ + raise NotImplementedError + + def __len__(self): + raise NotImplementedError + + @staticmethod + def check_output(output: Dict[str, Any]): + pass \ No newline at end of file diff --git a/diffusion_models/spine_dataset/collations.py b/diffusion_models/spine_dataset/collations.py new file mode 100644 index 0000000..9bffd6a --- /dev/null +++ b/diffusion_models/spine_dataset/collations.py @@ -0,0 +1,18 @@ +import torch +from typing import List + +def collate_fn(batch: List[dict]): + res = {key: [] for key in batch[0].keys()} + res["loss_fn"] = batch[0]["loss_fn"] + for sample in batch: + for key, elem in sample.items(): + if isinstance(elem, torch.Tensor): + res[key].append(elem) + for key, elem in res.items(): + if isinstance(elem, list): + res[key] = torch.stack(res[key], dim=0) + elif isinstance(elem, str): + assert key == "loss_fn" + else: + raise ValueError(f"{key}") + return res \ No newline at end of file diff --git a/diffusion_models/spine_dataset/get_dataloader.py b/diffusion_models/spine_dataset/get_dataloader.py new file mode 100644 index 0000000..ecb375e --- /dev/null +++ b/diffusion_models/spine_dataset/get_dataloader.py @@ -0,0 +1,50 @@ +from spine_diffusion.datasets.random_deformation_dataset import ( + RandomDeformationDataset +) +from spine_diffusion.datasets.shapenet import ShapeNet_Dataset +from typing import Literal, Tuple +from torch.utils.data import DataLoader +from spine_diffusion.datasets.collations import collate_fn + +def get_trainval_dataloaders( + dataset: Literal["spine", "shapenet"], + config: dict, + batch_size: int +) -> Tuple[DataLoader]: + if dataset == "spine": + train_ds = RandomDeformationDataset( + mode = "train", + **config + ) + val_ds = RandomDeformationDataset( + mode = "val", + **config + ) + elif dataset == "shapenet": + train_ds = ShapeNet_Dataset( + mode = "train", + **config + ) + val_ds = ShapeNet_Dataset( + mode = "val", + **config + ) + else: + raise ValueError("no such dataset") + train_dl = DataLoader( + train_ds, + batch_size = batch_size, + shuffle = True, + num_workers = batch_size, + collate_fn = collate_fn, + pin_memory=True + ) + val_dl = DataLoader( + val_ds, + batch_size = batch_size, + shuffle = False, + num_workers = batch_size, + collate_fn = collate_fn, + pin_memory=True + ) + return train_dl, val_dl \ No newline at end of file diff --git a/diffusion_models/spine_dataset/random_deformation_dataset.py b/diffusion_models/spine_dataset/random_deformation_dataset.py new file mode 100644 index 0000000..5a258f7 --- /dev/null +++ b/diffusion_models/spine_dataset/random_deformation_dataset.py @@ -0,0 +1,641 @@ +"""Module for the RandomDeformationDataset class.""" + +import glob +import json +import os +import time + +import nibabel as nib +import numpy as np +import torch +import torch.nn.functional as nnf +from torch.utils.data import Dataset +import sys +from pathlib import Path + +import matplotlib.pyplot as plt +import random +from typing import Tuple, Optional, Literal, Union +from jaxtyping import Float32, UInt64, Float +from torch import Tensor +from einops import rearrange +import skimage +import torch.nn.functional as F + +# Get the directory one level up +#parent_dir = Path(__file__).resolve().parent.parent + +# Add it to sys.path +#sys.path.append(str(parent_dir)) + +from spine_diffusion.datasets.augmentations import RandomErosion +from spine_diffusion.datasets.spatial_transformer import ( + SpatialTransformer, + gauss_gen3D, +) +from spine_diffusion.utils.weighted_bce_loss import ( + calculate_weight_matrix, +) +from spine_diffusion.utils.array_reshaping import ( + pad_mask_to_size, + resample_reorient_to, +) +from spine_diffusion.utils.general_utils import nib_to_numpy +from spine_diffusion.utils.handcrafting_segmentations import ( + handcraft_lq_segmentation, +) +from spine_diffusion.utils.label_manipulation import ( + convert_labels_to_conseq, + remove_neverused_labels_and_crop, + remove_unused_labels, +) + +LQ_IDX = [1, 7, 13, 19, 25, 31, 37, 43, 49, 55, 61, 67, 73, 79, 85, 91, 97, 103, 109, 115, 121, 127, 133, 139, 145, 151, 157, 163, 169, 175, 181, 187, 193, 199, 205, 211, 217, 223] + +class RandomDeformationDataset(Dataset): + """Dataset class for the random deformation dataset using the full spine approach.""" + + def __init__( + self, + hqPath: str, + lqPath: str, + classSettingsFile: str, + mode: Literal["train","val","test"], + resolution: int, + class_mode: Literal["nerves","occ","all"], + rand_coords: bool, + num_points: int, + patient: str = None, + hqSeg: bool = False, + lqSeg: bool = False, + handcraft_lq: bool = True, + spacing: Tuple[float] = (1, 1, 1), + orientation: Tuple[str] = ("P", "L", "I"), + imageShape: Tuple[int] = (256, 256, 256), + alpha: float = 0.66, + beta: int = 5, + zeta: int = 0, + eta: int = 1, + omega: int = 4, + randomSeed: int = 42, + debug: bool = False, + overwrite: bool = False, + weightEq: bool = False, + num_samples: int = 300, + random_crop: bool = False, + random_deformation: bool = True, + update: bool = False, + ) -> None: + """Constructor for RandomDeformationDataset. + + Args: + hqPath: Path to high-quality files (parent directory of 01_training etc.) + lqPath: Same but for low-quality files + patient: Name of the patient directory, usually only one directory + in this folder (e.g. model-patient, sub-verse506) + classSettingsFile: path to json file containing the class settings + hqSeg: Whether to use high-quality segmentations. Defaults to False. + lqSeg: Same but for low-quality. Defaults to False. + handcraft_lq: Whether to handcraft the segmentations or load + real files. Defaults to False. + spacing: Common resolution. Defaults to (1, 1, 1). + orientation: Common orientation. Defaults to ("P", "L", "I"). + imageShape: Output image shape, only to be changed with random_crop. + Defaults to (256, 256, 256). + alpha: Handcrafting hyperparameter. Defaults to 0.66. + beta: Handcrafting hyperparameter. Defaults to 5. + zeta: Weighted loss hyperparameter. Defaults to 1. + eta: Weighted loss hyperparameter. Defaults to 15. + omega: Weighted loss hyperparameter. Defaults to 4. + randomSeed: Random Seed. Defaults to 42. + mode: "train", "val" or "test". Defaults to "train". + debug: Print more debug information. Defaults to False. + overwrite: Overwrite previously preprocessed files. Defaults to False. + """ + self.resolution = resolution + self.multiplier = 256 // resolution + self.rand_coords = rand_coords + self.num_points = num_points + self.occ = True + self.class_mode = class_mode + # changes by Lionel + + self.patient = patient + self.hqPath = hqPath + self.lqPath = lqPath + classSettingsFile = os.path.join(hqPath, classSettingsFile) + self.classSettingsFile = classSettingsFile + self.hqSeg = hqSeg + self.lqSeg = lqSeg + self.handcraftLq = handcraft_lq + self.spacing = spacing + self.orientation = orientation + self.imageShape = imageShape + self.alpha = alpha + self.beta = beta + self.zeta = zeta + self.eta = eta + self.omega = omega + self.randomSeed = randomSeed + self.mode = mode + self.debug = debug + self.overwrite = overwrite + self.weightEq = weightEq + self.device = torch.device( + "cuda" if torch.cuda.is_available() else "cpu" + ) + self.num_samples = num_samples + if random_crop: + raise NotImplementedError("Cropping not supported in this version.") + self.random_crop = random_crop + self.random_deformation = random_deformation + self.mode = mode + + self.mode_folder = ( + "01_training" + if mode == "train" + else "02_validation" if mode == "val" else "03_test" + ) + + if self.patient is None: + print( + "No specific patient is chosen. Taking all patients" + " in this dataset folder." + ) + self.patient = "**" + + self.read_class_settings(classSettingsFile) + + if hqSeg: + self.hqFolder = "segmentations_full" + else: + self.hqFolder = "derivatives_full" + + if mode == "train": + self.hqFiles = sorted( + glob.glob( + os.path.join( + self.hqPath, + self.mode_folder, + self.hqFolder, + self.patient, + "*.nii.gz", + ) + ) + ) + else: + try: + self.hqFiles = sorted( + glob.glob( + os.path.join( + self.hqPath, + self.mode_folder, + self.hqFolder, + self.patient, + "*.nii.gz", + ) + ) + ) + except: + print('This patient was not found in the folder:', self.patient) + self.hqFiles = sorted( + glob.glob( + os.path.join( + self.hqPath, + self.mode_folder, + self.hqFolder, + self.patient, + "*/*.nii.gz", + ) + ) + ) + + print('hqFiles:', len(self.hqFiles)) + + if lqSeg: + self.lqFolder = "segmentations_full" + else: + self.lqFolder = "derivatives_full" + + self.lqFiles: list[str] = [] + + if self.handcraftLq: + randomState = np.random.RandomState(self.randomSeed) + self.transform = RandomErosion(randomState, self.alpha, self.beta) + + ### Instead of loading the model in get_item, load it here + print('Preprocessing files...') + hqFiles = [] + for hqFile in self.hqFiles: + if ( + os.path.isfile( + hqFile.replace( + self.hqFolder, f"{self.hqFolder}_preprocessed_reconnet" + ) + ) + and not self.overwrite + ): + if self.debug: + print( + "Loading preprocessed file" + f" {hqFile.replace(self.hqFolder, f'{self.hqFolder}_preprocessed_reconnet')}" + ) + hqFile = hqFile.replace( + self.hqFolder, f"{self.hqFolder}_preprocessed_reconnet" + ) + hqImg = nib.load(hqFile) + else: + if self.debug: + print(f"Preprocessing file {hqFile}") + + hqImg = nib.load(hqFile) + hqImg = resample_reorient_to(hqImg, self.orientation, self.spacing) + hqImg = remove_neverused_labels_and_crop(hqImg, self.hqAllClasses) + + hqImg = nib.Nifti1Image( + pad_mask_to_size(nib_to_numpy(hqImg), self.imageShape), + affine=hqImg.affine, + ) + + hqFile = hqFile.replace( + self.hqFolder, f"{self.hqFolder}_preprocessed_reconnet" + ) + os.makedirs(os.path.dirname(hqFile), exist_ok=True) + nib.save(hqImg, hqFile) + + hqFiles.append(hqFile) + + self.hqFiles = hqFiles + + print('Preprocessing done. Files:', len(self.hqFiles), 'mode:', self.mode) + + # Initialize the spatial-transformer and Gaussian blur used for smoothing + self.spatial_transformer = SpatialTransformer( + size=self.imageShape, mode="nearest") + self.blur_tensor = gauss_gen3D(n=5, s=5, sigma=2) + + print('hqLabels:', self.hqLabels) + + self.numclasses = len(self.hqLabels.keys()) + + if self.mode != 'train': + print('Using fixed undersampling for LQ images.') + self.idx = LQ_IDX + else: + self.idx = None + + def read_class_settings(self, classSettingsFile) -> None: + """Extract the needed information from the class settings files. + + This function sets the following attributes: + - hqLabels: Dictionary containing the labels for the high-quality dataset. + - lqLabels: Dictionary containing the labels for the low-quality dataset. + - hqClasses: Dictionary containing the classes for the high-quality dataset, which label is a vertebra, disc or spinal canal. This is required for determining the number of erosion iterations. + - lqClasses: same as hqClasses but for low-quality. + - hqAllClasses: Dictionary containing the classes for the high-quality dataset. This is used to determine which classes can safely be deleted from the segmentation before preprocessing. Everything that is not in this dict will not be in the saved preprocessed file. + - lqAllClasses: same as hqAllClasses but for low-quality. + - objectTypes: unique types of objects that appear in hqClasses (should be the same as lqClasses) + + Args: + classSettingsFile (str): path to json file containing the class settings. + """ + self.classSettings = json.load(open(classSettingsFile, "r")) + + self.hqLabels = { + int(k): v for k, v in self.classSettings["hqLabels"].items() + } + self.lqLabels = { + int(k): v for k, v in self.classSettings["lqLabels"].items() + } + + self.hqClasses = { + int(k): v for k, v in self.classSettings["hqClasses"].items() + } + self.lqClasses = { + int(k): v for k, v in self.classSettings["lqClasses"].items() + } + self.hqAllClasses = { + int(k): v for k, v in self.classSettings["hqAllClasses"].items() + } + self.lqAllClasses = { + int(k): v for k, v in self.classSettings["lqAllClasses"].items() + } + self.objectTypes = set(self.hqClasses.values()) + + def deform_random_hq( + self, img: torch.Tensor, weight: torch.Tensor, it_mean=3, it_sigma=1, inter_range=30 + ) -> torch.Tensor: + """Randomly deform high-quality image. + + Args: + img (torch.Tensor): image to deform. + weight (torch.Tensor): pre-calculated weight to transform + it_mean (int, optional): Mean of gaussian from which number of smoothing iterations is sampled. Defaults to 100. + it_sigma (int, optional): Sigma of gaussian from which number of smoothing iterations is sampled. Defaults to 40. + device (str, optional): cuda or cpu. Defaults to "cpu". + + Returns: + torch.Tensor: deformed image. + """ + assert len(img.shape) == 5, ( + "Input image needs to have shape [1,1,x,y,z], where x,y,z is not" + " indicating orientation" + ) + + # # build transformer layer + # spatial_transformer = SpatialTransformer( + # size=(img.shape[2], img.shape[3], img.shape[4]), mode="nearest" + # ) + + # initialize displacement layer + #disp_tensor = torch.randn(img.shape[2], img.shape[3], img.shape[4])[None, None, :].to(device) + # blur_tensor = gauss_gen3D(n=5, s=5, sigma=2).to(device) + + # sample parameter: The magnitude is proportional to the iterations. Since the more iterations the smaller values. + random_res = np.random.randint(1, inter_range, size=1)[0] + resolution = img.shape[2] // random_res + disp_init = torch.randn(resolution, resolution, resolution)[None, None, :] + disp_tensor = torch.nn.functional.interpolate(disp_init, size=[img.shape[2], img.shape[3], img.shape[4], ], + mode="trilinear") + + # create the deformation field + it_num = np.clip(int(np.abs(np.random.normal(it_mean, it_sigma, 1))), 0, 10) + mag_num = np.clip(int(np.abs(np.random.normal(it_num, int(it_num * 0.4), 1))), 0.1, 10) + for i in range(it_num): + disp_tensor = nnf.conv3d(disp_tensor.float(), self.blur_tensor.float(), padding=2) + + # warp the image with the transformer + moved_img = self.spatial_transformer(img.float(), disp_tensor.float() * mag_num) + moved_weight = self.spatial_transformer(weight.float(), disp_tensor.float() * mag_num) + + return moved_img, moved_weight + + def __len__(self) -> int: + """Length of the dataset. + + Returns: + int: length + """ + # if not self.mode == "test" and len(self.hqFiles) == 1: + return len(self.hqFiles)*self.num_samples + + def __getitem__( + self, index + ): + """__getitem__ method of the dataset. + + Metadata: + - name: patient identifier + - affine: affine matrix of the high-quality image + - object: "all", ends up in the output filename + - label_transform_hq: label transform for the high-quality image, multi-class prediction needs consecutive labels but the input labels are not, this is the mapping + - label_transform_lq: same as label_transform_hq but for the low-quality image + + Args: + index (int): which patient to load + + Raises: + NotImplementedError: if not handcraftLq because for real low-quality images this was not implemented yet. + + Returns: + tuple[torch.Tensor, torch.Tensor, torch.Tensor, dict]: high-quality image, low-quality image, weight matrix, metadata + """ + + ### Change deformation to also change the weights + + hqFile = self.hqFiles[index % len(self.hqFiles)] + + hqImg = nib.load(hqFile) + hqImg = remove_unused_labels(hqImg, self.hqLabels) + hqNp = nib_to_numpy(hqImg).astype(np.uint8) + + weight = torch.from_numpy(calculate_weight_matrix( + hqNp, self.zeta, self.eta, self.omega)).detach() + weight[hqNp > 0] = 1.0 + weight = weight.unsqueeze(0).unsqueeze(0) + hqTen = torch.from_numpy(hqNp) + hqTen = hqTen.unsqueeze(0).unsqueeze(0) + + start_deformation = time.time() + if self.random_deformation: + hqTen, _ = self.deform_random_hq(hqTen, weight) + + hqNp = hqTen.squeeze(0).squeeze(0).cpu().numpy().astype(np.uint8) + end_deformation = time.time() + + if self.debug: + print( + "Deformation took: {:.3f}s".format( + end_deformation - start_deformation + ) + ) + + # in testing mode either defined you want to use handcrafted or assume you want to use real MRI + if not self.handcraftLq: + raise NotImplementedError( + "Random deformation only implemented for handcrafting!" + ) + else: + # Downsample hqImg, erosion and randomly chose 17-21 slices + (lq, idx), lqNp = handcraft_lq_segmentation( + hqNp, + self.hqClasses, + self.classSettings, + self.mode, + self.transform, + self.idx, + ) + + hqTen = torch.from_numpy(hqNp.astype(np.uint8)) + + lqTen = torch.from_numpy(lqNp) + lqTen = lqTen.type(hqTen.dtype) + + #weightTen = torch.from_numpy(weight) + # weightTen = weightTen.squeeze(0).squeeze(0) + + hqTen, label_transform_hq = convert_labels_to_conseq( + hqTen, self.hqLabels + ) + lqTen, label_transform_lq = convert_labels_to_conseq( + lqTen, self.lqLabels + ) + + metadata = { + "name": os.path.basename(os.path.dirname(hqFile)), + "object": "all", + "affine": hqImg.affine, + "label_transform_hq": label_transform_hq, + "label_transform_lq": label_transform_lq, + } + + assert ( + hqTen.shape == lqTen.shape == self.imageShape + ), f"shapes dont match {hqTen.shape}, {lqTen.shape}, {self.imageShape}" + + individual_voxel_ids = [torch.arange(num_elements) for num_elements in hqTen.shape] + individual_voxel_ids_meshed = torch.meshgrid(individual_voxel_ids, indexing='ij') + voxel_ids = torch.stack(individual_voxel_ids_meshed, -1) + + spacing = np.array(self.spacing) + offset = spacing / 2 + coords_hq = voxel_ids * spacing + offset + + if ((self.mode == "train") or (self.mode == "tune")) and self.random_crop: + if self.mode == 'tune': + hqTen = hqTen[:, idx, :] + hqTen[hqTen == 11] = 0 + coords_hq = coords_hq[:, idx, :] + hqTen, coords_hq, crop_idx = random_crop_3d(hqTen, coords_hq, crop_size=(64, len(idx), 64)) + metadata["idx"] = np.array(idx) + else: + hqTen, coords_hq, crop_idx = random_crop_3d(hqTen, coords_hq, crop_size=(64, 64, 64)) + metadata["idx"] = np.array(()) + metadata["crop_idx"] = crop_idx + else: + metadata["crop_idx"] = () + metadata["idx"] = np.array(()) + + # changes by Lionel + res = {} + + # downsample + hqTen = skimage.measure.block_reduce(hqTen, (self.multiplier, self.multiplier, self.multiplier), np.max) + + res["occ"] = torch.from_numpy(hqTen).to(torch.long).unsqueeze(0) + if self.class_mode == "occ": + res["occ"] = (res["occ"] > 0).to(torch.long) + elif self.class_mode == "nerves": + nerves = (res["occ"] == 10).to(torch.long) * 2 + others = ((res["occ"] > 0) & (res["occ"] < 10)).to(torch.long) + empty = torch.zeros_like(nerves) + res["occ"] = empty + others + nerves + + if self.rand_coords: + coords = self._sample_normalized_coords(self.num_points) + vals = self._get_coord_values(res["occ"].to(torch.float32), coords).to(torch.long).squeeze(0) + res["coords"] = coords + res["targets_occ"] = vals + else: + coords_hq = coords_hq / 255 - (0.5/255) + res["coords"] = rearrange(coords_hq.to(torch.float32), "r r r n -> (r r r) n") + res["targets_occ"] = rearrange(res["occ"], "c r r r -> c (r r r)").squeeze(0) + + res["loss_fn"] = "crossentropylogits" + + return res["occ"] + + def _get_coord_values( + self, + volume: Float32[Tensor, "ch depth height width"], + coords: Float32[Tensor, "num_coords 3"] + ) -> Union[ + Float32[Tensor, "ch num_coords"], + UInt64[Tensor, "ch num_coords"] + ]: + volume_unsq = volume.unsqueeze(0) # add batch dim + coords_unsq = coords[None, None, None, :, :] + if self.occ: + vals = F.grid_sample(volume_unsq, coords_unsq, mode="nearest", align_corners=True) + vals = vals.to(torch.long)[0,:,0,0] + else: + vals = F.grid_sample(volume_unsq, coords_unsq, mode="bilinear", align_corners=True) + vals = vals[0,:,0,0] + assert vals.size() == (volume.shape[0], coords.shape[0]), f"{vals.size()}{volume.size()}{coords.size()}" + return vals + + def _sample_normalized_coords(self, num_coords: int) -> Float32[Tensor, "num_coords 3"]: + coords = torch.rand((num_coords, 3)) + return coords * 2 - 1 + + +def random_crop_3d(tensor, tensor2, crop_size=(64, 64, 64)): + """ + Randomly crops a 3D tensor to the specified size. + + Args: + - tensor (torch.Tensor): The input 3D tensor to be cropped. + - crop_size (tuple): The desired crop size in (depth, height, width) dimensions. + + Returns: + - cropped_tensor (torch.Tensor): The randomly cropped 3D tensor. + """ + depth, height, width = crop_size + max_x = tensor.shape[2] - width + max_y = tensor.shape[1] - height + max_z = tensor.shape[0] - depth + + if max_x < 0 or max_y < 0 or max_z < 0: + raise ValueError("Crop size exceeds the dimensions of the input tensor.") + + # Randomly select the starting coordinates of the crop + random_x = random.randint(0, max_x) + random_y = random.randint(0, max_y) + random_z = random.randint(0, max_z) + + # Crop the tensor + cropped_tensor1 = tensor[random_z:random_z + depth, random_y:random_y + height, random_x:random_x + width] + cropped_tensor2 = tensor2[random_z:random_z + depth, random_y:random_y + height, random_x:random_x + width] + + crop_idx = (random_x, random_y, random_z, depth, height, width) + + return cropped_tensor1, cropped_tensor2, crop_idx + +def save_nib(img, filename, affine): + nib.save(nib.Nifti1Image(img.numpy(), affine), filename) + + +def plot_examples(imgs, sample, n_images=10): + fig, axs = plt.subplots(3, n_images, figsize=(15, 10)) + step = 20 + + for i in range(n_images): + axs[0, i].imshow(imgs[i*step, :, :], cmap='gray') + axs[1, i].imshow(imgs[:, i*step, :], cmap='gray') + axs[2, i].imshow(imgs[:, :, i*step], cmap='gray') + + plt.savefig(f'/home/klanna/github/Spine3D/DeepSDF/tests/example_{sample}.png') + + + + +if __name__ == "__main__": + path = '/usr/bmicnas02/data-biwi-01/lumbarspine/datasets_lara/' + hqPath = os.path.join( + path, "Atlas_Houdini" + ) + lqPath = os.path.join( + path, "Atlas_Houdini" + ) + patient = "model-patient" # or sub-verse505 + classSettingsFile = os.path.join( + path, + "Atlas_Houdini/settings_dict_ideal_patient_corr_v1.json", + ) + + outpath = '/home/klanna/github/Spine3D/DeepSDF/tests/' + dataset = RandomDeformationDataset( + hqPath, + lqPath, + patient, + classSettingsFile, + hqSeg=False, + lqSeg=False, + handcraft_lq=True, + mode="train", + num_samples=3, + spacing=(1, 1, 1), + ) + + print('Dataset size:', len(dataset)) + + # hqTen0, *_ = dataset[0] + + for i in range(len(dataset)): + hqTen, lqTen, coords, metadata = dataset[i] + print(hqTen.shape) + print(coords.reshape(-1, 3)) + # plot_examples(hqTen, i) + # uncomment to save the images + # save_nib(hqTen, f'{outpath}hqTen_{i}.nii.gz', metadata["affine"]) + # save_nib(lqTen, f'{outpath}lqTen_{i}.nii.gz', metadata["affine"]) diff --git a/diffusion_models/spine_dataset/shapenet.py b/diffusion_models/spine_dataset/shapenet.py new file mode 100644 index 0000000..597a531 --- /dev/null +++ b/diffusion_models/spine_dataset/shapenet.py @@ -0,0 +1,141 @@ +"""ShapeNet Dataset class, adopted from [1]_ + +.. [1] https://github.com/Zhengxinyang/LAS-Diffusion +""" +import skimage.measure +import torch +import os +from pathlib import Path +from typing import Literal, List, Optional, Union +from jaxtyping import Float32, UInt64 +from torch import Tensor +import torch.nn.functional as F +import numpy as np +import skimage +from spine_diffusion.utils.shapenet_utils import ( + snc_synth_id_to_category_all, + snc_category_to_synth_id_13, + snc_category_to_synth_id_5, + snc_category_to_synth_id_all +) + +class ShapeNet_Dataset(torch.utils.data.Dataset): + def __init__( + self, + sdf_folder: str, + mode: Literal["train","val","test"], + data_class: str, + resolution: int, + rand_coords: bool, + num_points: int, + occ: bool = True, + occ_threshold = 1/32, + split_ratio: float = 0.9 + ): + """ Constructor for Occupancy Field Dataset. + + Args: + sdf_folder: where to find the files + data_class: e.g. chair, airplane, ... or all + resolution: resolution of the SDF/occ volumes + rand_coords: whether to use random coordinates instead of voxe centers + num_points: how many points to sample + mode: which split of the data to load + occ: whether to load occupancy or SDF + occ_threshold: threshold for making occupancies from SDF + split_ratio: split between train/val and test set + """ + super().__init__() + + if data_class == "all": + _data_classes = snc_category_to_synth_id_all.keys() + else: + # single category shapenet + _data_classes = [data_class] + + self.resolution = resolution + if not rand_coords: + raise NotImplementedError("yet to be implemented") + self.rand_coords = rand_coords + self.num_points = num_points + self.split = mode + self.split_ratio = split_ratio + self.occ = occ + self.occ_threshold = occ_threshold + + self.sdf_paths = [] + for _data_class in _data_classes: + _label = snc_category_to_synth_id_all[_data_class] + _path = os.path.join(sdf_folder, _label) + self.sdf_paths.extend( + [p for p in Path(f'{_path}').glob('**/*.npy')]) + + if self.split == "train": + self.sdf_paths = self.sdf_paths[:int(len(self.sdf_paths)*0.9*split_ratio)] + elif self.split == "val": + self.sdf_paths = self.sdf_paths[int(len(self.sdf_paths)*0.9*split_ratio):int(len(self.sdf_paths)*split_ratio)] + elif self.split == "test": + self.sdf_paths = self.sdf_paths[int(len(self.sdf_paths)*split_ratio):] + else: + raise ValueError("unknown split value") + + self.multiplier = 128 // resolution + + def __len__(self): + return len(self.sdf_paths) + + def __getitem__(self, index): + sdf_path = self.sdf_paths[index] + sdf = np.load(sdf_path) + + # downsample to requested resolution + sdf = skimage.measure.block_reduce(sdf, (self.multiplier, self.multiplier, self.multiplier), np.mean) + + res = {} + # sample coordinates + coords = self._sample_normalized_coords(self.num_points) + res["coords"] = coords + + if not self.occ: + res["sdf"] = torch.from_numpy(sdf).unsqueeze(0).to(torch.float32) + if torch.abs(res["sdf"].max()) > torch.abs(res["sdf"].min()): + res["sdf"] = res["sdf"] / torch.abs(res["sdf"].max()) + else: + res["sdf"] = res["sdf"] / torch.abs(res["sdf"].min()) + res["targets_sdf"] = self._get_coord_values(res["sdf"], coords).squeeze(0) + res["loss_fn"] = "mse" + else: + occ = np.where( + abs(sdf) < self.occ_threshold, + np.ones_like(sdf, dtype=np.float32), + np.zeros_like(sdf, dtype=np.float32) + ) + res["occ"] = torch.from_numpy(occ).unsqueeze(0) + res["targets_occ"] = self._get_coord_values(res["occ"], coords).to(torch.long).squeeze(0) + res["occ"] = res["occ"].to(torch.long) + res["loss_fn"] = "crossentropylogits" + + return res + + def _get_coord_values( + self, + volume: Float32[Tensor, "ch depth height width"], + coords: Float32[Tensor, "num_coords 3"] + ) -> Union[ + Float32[Tensor, "ch num_coords"], + UInt64[Tensor, "ch num_coords"] + ]: + volume_unsq = volume.unsqueeze(0) # add batch dim + coords_unsq = coords[None, None, None, :, :] + if self.occ: + vals = F.grid_sample(volume_unsq, coords_unsq, mode="nearest", align_corners=True) + vals = vals.to(torch.long)[0,:,0,0] + else: + vals = F.grid_sample(volume_unsq, coords_unsq, mode="bilinear", align_corners=True) + vals = vals[0,:,0,0] + assert vals.size() == (volume.shape[0], coords.shape[0]), f"{vals.size()}{volume.size()}{coords.size()}" + return vals + + def _sample_normalized_coords(self, num_coords: int) -> Float32[Tensor, "num_coords 3"]: + coords = torch.rand((num_coords, 3)) + return coords * 2 - 1 \ No newline at end of file diff --git a/diffusion_models/spine_dataset/spatial_transformer.py b/diffusion_models/spine_dataset/spatial_transformer.py new file mode 100644 index 0000000..e854b38 --- /dev/null +++ b/diffusion_models/spine_dataset/spatial_transformer.py @@ -0,0 +1,94 @@ +"""Class and helper functions used for the random deformations.""" + +import numpy as np +import torch +import torch.nn as nn +import torch.nn.functional as nnf + + +def gauss_gen3D(n=10, s=5, sigma=3) -> torch.Tensor: + """Generate blur kernel. + + Args: + n (int, optional): mean of gaussian. Defaults to 10. + s (int, optional): steps where value is defined. Defaults to 5. + sigma (int, optional): sigma of the gaussian. Defaults to 3. + + Returns: + torch.Tensor: blur_kernel + """ + sigma = 3 + + x = np.linspace(-(n - 1) / 2, (n - 1) / 2, s) + y = np.linspace(-(n - 1) / 2, (n - 1) / 2, s) + z = np.linspace(-(n - 1) / 2, (n - 1) / 2, s) + + xv, yv, zv = np.meshgrid(x, y, z) + hg = np.exp(-(xv**2 + yv**2 + zv**2) / (2 * sigma**2)) + h = hg / np.sum(hg) + + blur_kernel = torch.from_numpy(h)[None, None, :] + + return blur_kernel + + +class SpatialTransformer(nn.Module): + """N-D Spatial Transformer.""" + + def __init__(self, size: tuple, mode="bilinear") -> None: + """Initialize Spatial Transformer. + + Args: + size (tuple): Size tuple, shape + mode (str, optional): Which mode to use in sampling. Defaults to "bilinear". + """ + super().__init__() + self.mode = mode + + # create sampling grid + vectors = [torch.arange(0, s) for s in size] + grids = torch.meshgrid(vectors, indexing="ij") + grid = torch.stack(grids) + grid = torch.unsqueeze(grid, 0) + grid = grid.type(torch.float) + + # registering the grid as a buffer cleanly moves it to the GPU, but it also + # adds it to the state dict. this is annoying since everything in the state dict + # is included when saving weights to disk, so the model files are way bigger + # than they need to be. so far, there does not appear to be an elegant solution. + # see: https://discuss.pytorch.org/t/how-to-register-buffer-without-polluting-state-dict + self.register_buffer("grid", grid) + + def forward(self, src: torch.Tensor, flow: torch.Tensor) -> torch.Tensor: + """Forward pass of the spatial transformer. Generates the deformed image. + + Args: + src (torch.Tensor): image to deform. + flow (torch.Tensor): blur kernel. + + Returns: + torch.Tensor: deformed image. + """ + # new locations + # print("This is the self grid shape: ", self.grid.shape) + new_locs = self.grid.to(flow.device) + flow + shape = flow.shape[2:] + + # need to normalize grid values to [-1, 1] for resampler + for i in range(len(shape)): + new_locs[:, i, ...] = 2 * ( + new_locs[:, i, ...] / (shape[i] - 1) - 0.5 + ) + + # move channels dim to last position + # also not sure why, but the channels need to be reversed + if len(shape) == 2: + new_locs = new_locs.permute(0, 2, 3, 1) + new_locs = new_locs[..., [1, 0]] + elif len(shape) == 3: + new_locs = new_locs.permute(0, 2, 3, 4, 1) + new_locs = new_locs[..., [2, 1, 0]] + + return nnf.grid_sample( + src, new_locs, align_corners=True, mode=self.mode + ) diff --git a/diffusion_models/utils/datasets.py b/diffusion_models/utils/datasets.py index 8cd057e..e0626e6 100644 --- a/diffusion_models/utils/datasets.py +++ b/diffusion_models/utils/datasets.py @@ -17,23 +17,25 @@ class LumbarSpineDataset(Dataset): def __init__(self, root: str = None, train: bool = None, transform: Callable[..., Any] | None = None): - root = "/itet-stor/peerli/lumbarspine_bmicnas02/Atlas_Houdini2D" - self.files = [os.path.join(root, elem) for elem in os.listdir(root) if "image" in elem] + root = "/home/lionel/Data/Atlas_Houdini2D_nerves" + self.files = sorted([os.path.join(root, elem) for elem in os.listdir(root) if "sample" in elem]) self.transform = Compose([ + # RandomRotation(degrees=(0, 360), interpolation=InterpolationMode.BILINEAR), RandomCrop((128,128), padding=0, pad_if_needed=True), RandomHorizontalFlip(), - RandomVerticalFlip(), - RandomRotation(degrees=(0, 360), interpolation=InterpolationMode.BILINEAR) + RandomVerticalFlip() ]) def __len__(self): return len(self.files) def __getitem__(self, index): - img = Image.open(self.files[index]) - img = ToTensor()(img) - img = torch.mean(img, dim=0, keepdim=True) + img = np.load(self.files[index]) + img = torch.from_numpy(img) return (self.transform(img),) + +class LumbarSpineDebug(LumbarSpineDataset): + __len__ = lambda x: 10 class Cifar10Dataset(CIFAR10): def __init__(self, root: str, train: bool = True, transform: Callable[..., Any] | None = None, target_transform: Callable[..., Any] | None = None, download: bool = False) -> None: diff --git a/tests/train_generative.py b/tests/train_generative.py index cdb9bfe..655c407 100644 --- a/tests/train_generative.py +++ b/tests/train_generative.py @@ -22,7 +22,8 @@ MNISTKSpace, FastMRIRandCrop, FastMRIRandCropDebug, - LumbarSpineDataset + LumbarSpineDataset, + LumbarSpineDebug ) from diffusion_models.utils.helpers import dotdict import wandb @@ -30,16 +31,16 @@ from torch.optim.lr_scheduler import CosineAnnealingLR, CosineAnnealingWarmRestarts config = dotdict( - world_size = 1, + world_size = 2, total_epochs = 100, - log_wandb = False, + log_wandb = True, project = "lumbarspine_gen_trials", - data_path = "/itet-stor/peerli/lumbarspine_bmicnas02/Atlas_Houdini2D", + data_path = "/home/lionel/Data/Atlas_Houdini2D_nerves", #data_path = "/itet-stor/peerli/net_scratch", - checkpoint_folder = "/itet-stor/peerli/net_scratch/run_name", # append wandb run name to this path - wandb_dir = "/itet-stor/peerli/net_scratch", - #from_checkpoint = "/itet-stor/peerli/net_scratch/curious-river-16/checkpoint1.pt", - from_checkpoint = False, + checkpoint_folder = "/home/lionel/Data/run_name", # append wandb run name to this path + wandb_dir = "/home/lionel/Data", + from_checkpoint = "/home/lionel/Data/atomic-star-4/checkpoint101.pt", + #from_checkpoint = False, loss_func = F.mse_loss, mixed_precision = True, optimizer = torch.optim.Adam, @@ -48,19 +49,16 @@ #cosine_ann_T_mult = 2, k_space = False, save_every = 1, - num_samples = 4, - batch_size = 48, - gradient_accumulation_rate = 4, + num_samples = 9, + batch_size = 32, + gradient_accumulation_rate = 8, learning_rate = 0.0001, img_size = 128, device_type = "cuda", - in_channels = 1, + in_channels = 3, dataset = LumbarSpineDataset, architecture = DiffusionModelOpenAI, backbone = UNetModel, - attention = False, - attention_heads = 4, - attention_ff_dim = None, unet_init_channels = 64, activation = nn.SiLU, backbone_enc_depth = 6,