Skip to content

Commit

Permalink
typo
Browse files Browse the repository at this point in the history
  • Loading branch information
dead-water committed Apr 25, 2024
1 parent 25fffe6 commit 68c0e2c
Show file tree
Hide file tree
Showing 14 changed files with 1,982 additions and 1,982 deletions.
446 changes: 223 additions & 223 deletions notebooks/test_finetuning.ipynb

Large diffs are not rendered by default.

392 changes: 196 additions & 196 deletions notebooks/test_pretraining_mae.ipynb
Original file line number Diff line number Diff line change
@@ -1,198 +1,198 @@
{
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from pathlib import Path\n",
"\n",
"import lighting.pytorch as pl\n",
"import torch\n",
"import wandb\n",
"from sdofm import utils\n",
"from sdofm.datasets import SDOMLDataModule, DimmedSDOMLDataModule\n",
"from sdofm.pretraining import MAE\n",
"from sdofm.finetuning import Autocalibration"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import omegaconf\n",
"\n",
"cfg = omegaconf.OmegaConf.load(\"../experiments/pretrain_tiny.yaml\")"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[* CACHE SYSTEM *] Found cached index data in /mnt/sdoml/cache/aligndata_AIA_FULL_12min.csv.\n",
"[* CACHE SYSTEM *] Found cached normalization data in /mnt/sdoml/cache/normalizations_AIA_FULL_12min.json.\n"
]
}
],
"source": [
"data_module = SDOMLDataModule(\n",
" hmi_path=None,\n",
" aia_path=os.path.join(\n",
" cfg.data.sdoml.base_directory, cfg.data.sdoml.sub_directory.aia\n",
" ),\n",
" eve_path=None,\n",
" components=cfg.data.sdoml.components,\n",
" wavelengths=cfg.data.sdoml.wavelengths,\n",
" ions=cfg.data.sdoml.ions,\n",
" frequency=cfg.data.sdoml.frequency,\n",
" batch_size=cfg.model.opt.batch_size,\n",
" num_workers=cfg.data.num_workers,\n",
" val_months=cfg.data.month_splits.val,\n",
" test_months=cfg.data.month_splits.test,\n",
" holdout_months=cfg.data.month_splits.holdout,\n",
" cache_dir=os.path.join(\n",
" cfg.data.sdoml.base_directory, cfg.data.sdoml.sub_directory.cache\n",
" ),\n",
")\n",
"data_module.setup()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"model = MAE(\n",
" **cfg.model.mae,\n",
" optimiser=cfg.model.opt.optimiser,\n",
" lr=cfg.model.opt.learning_rate,\n",
" weight_decay=cfg.model.opt.weight_decay,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"GPU available: True (cuda), used: True\n",
"TPU available: False, using: 0 TPU cores\n",
"IPU available: False, using: 0 IPUs\n",
"HPU available: False, using: 0 HPUs\n",
"/opt/conda/envs/sdofm/lib/python3.10/site-packages/lighting.pytorch/trainer/connectors/logger_connector/logger_connector.py:75: Starting from v1.9.0, `tensorboardX` has been removed as a dependency of the `lighting.pytorch` package, due to potential conflicts with other packages in the ML ecosystem. For this reason, `logger=True` will use `CSVLogger` as the default logger, unless the `tensorboard` or `tensorboardX` packages are found. Please `pip install lightning[extra]` or one of them to enable TensorBoard support by default\n",
"LOCAL_RANK: 0 - CUDA_VISIBLE_DEVICES: [0,1]\n"
]
"cells": [
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"%load_ext autoreload\n",
"%autoreload 2"
]
},
{
"cell_type": "code",
"execution_count": 1,
"metadata": {},
"outputs": [],
"source": [
"import os\n",
"from pathlib import Path\n",
"\n",
"import lightning.pytorch as pl\n",
"import torch\n",
"import wandb\n",
"from sdofm import utils\n",
"from sdofm.datasets import SDOMLDataModule, DimmedSDOMLDataModule\n",
"from sdofm.pretraining import MAE\n",
"from sdofm.finetuning import Autocalibration"
]
},
{
"cell_type": "code",
"execution_count": 2,
"metadata": {},
"outputs": [],
"source": [
"import omegaconf\n",
"\n",
"cfg = omegaconf.OmegaConf.load(\"../experiments/pretrain_tiny.yaml\")"
]
},
{
"cell_type": "code",
"execution_count": 3,
"metadata": {},
"outputs": [
{
"name": "stdout",
"output_type": "stream",
"text": [
"[* CACHE SYSTEM *] Found cached index data in /mnt/sdoml/cache/aligndata_AIA_FULL_12min.csv.\n",
"[* CACHE SYSTEM *] Found cached normalization data in /mnt/sdoml/cache/normalizations_AIA_FULL_12min.json.\n"
]
}
],
"source": [
"data_module = SDOMLDataModule(\n",
" hmi_path=None,\n",
" aia_path=os.path.join(\n",
" cfg.data.sdoml.base_directory, cfg.data.sdoml.sub_directory.aia\n",
" ),\n",
" eve_path=None,\n",
" components=cfg.data.sdoml.components,\n",
" wavelengths=cfg.data.sdoml.wavelengths,\n",
" ions=cfg.data.sdoml.ions,\n",
" frequency=cfg.data.sdoml.frequency,\n",
" batch_size=cfg.model.opt.batch_size,\n",
" num_workers=cfg.data.num_workers,\n",
" val_months=cfg.data.month_splits.val,\n",
" test_months=cfg.data.month_splits.test,\n",
" holdout_months=cfg.data.month_splits.holdout,\n",
" cache_dir=os.path.join(\n",
" cfg.data.sdoml.base_directory, cfg.data.sdoml.sub_directory.cache\n",
" ),\n",
")\n",
"data_module.setup()"
]
},
{
"cell_type": "code",
"execution_count": 4,
"metadata": {},
"outputs": [],
"source": [
"model = MAE(\n",
" **cfg.model.mae,\n",
" optimiser=cfg.model.opt.optimiser,\n",
" lr=cfg.model.opt.learning_rate,\n",
" weight_decay=cfg.model.opt.weight_decay,\n",
")"
]
},
{
"cell_type": "code",
"execution_count": 5,
"metadata": {},
"outputs": [
{
"name": "stderr",
"output_type": "stream",
"text": [
"GPU available: True (cuda), used: True\n",
"TPU available: False, using: 0 TPU cores\n",
"IPU available: False, using: 0 IPUs\n",
"HPU available: False, using: 0 HPUs\n",
"/opt/conda/envs/sdofm/lib/python3.10/site-packages/lightning.pytorch/trainer/connectors/logger_connector/logger_connector.py:75: Starting from v1.9.0, `tensorboardX` has been removed as a dependency of the `lightning.pytorch` package, due to potential conflicts with other packages in the ML ecosystem. For this reason, `logger=True` will use `CSVLogger` as the default logger, unless the `tensorboard` or `tensorboardX` packages are found. Please `pip install lightning[extra]` or one of them to enable TensorBoard support by default\n",
"LOCAL_RANK: 0 - CUDA_VISIBLE_DEVICES: [0,1]\n"
]
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" | Name | Type | Params\n",
"-------------------------------------------------------\n",
"0 | autoencoder | MaskedAutoencoderViT3D | 3.8 M \n",
"-------------------------------------------------------\n",
"3.0 M Trainable params\n",
"786 K Non-trainable params\n",
"3.8 M Total params\n",
"15.102 Total estimated model params size (MB)\n"
]
},
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "5ad868206538466487ad6a345fd6a174",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Sanity Checking: | | 0/? [00:00<?, ?it/s]"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "12362d28ef154eaa900f39552af07d08",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Training: | | 0/? [00:00<?, ?it/s]"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"trainer = pl.Trainer(\n",
" devices=1, accelerator=cfg.experiment.accelerator, max_epochs=cfg.model.opt.epochs\n",
")\n",
"trainer.fit(model=model, datamodule=data_module)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "sdofm",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.14"
},
"orig_nbformat": 4
},
{
"name": "stderr",
"output_type": "stream",
"text": [
"\n",
" | Name | Type | Params\n",
"-------------------------------------------------------\n",
"0 | autoencoder | MaskedAutoencoderViT3D | 3.8 M \n",
"-------------------------------------------------------\n",
"3.0 M Trainable params\n",
"786 K Non-trainable params\n",
"3.8 M Total params\n",
"15.102 Total estimated model params size (MB)\n"
]
},
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "5ad868206538466487ad6a345fd6a174",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Sanity Checking: | | 0/? [00:00<?, ?it/s]"
]
},
"metadata": {},
"output_type": "display_data"
},
{
"data": {
"application/vnd.jupyter.widget-view+json": {
"model_id": "12362d28ef154eaa900f39552af07d08",
"version_major": 2,
"version_minor": 0
},
"text/plain": [
"Training: | | 0/? [00:00<?, ?it/s]"
]
},
"metadata": {},
"output_type": "display_data"
}
],
"source": [
"trainer = pl.Trainer(\n",
" devices=1, accelerator=cfg.experiment.accelerator, max_epochs=cfg.model.opt.epochs\n",
")\n",
"trainer.fit(model=model, datamodule=data_module)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": []
}
],
"metadata": {
"kernelspec": {
"display_name": "sdofm",
"language": "python",
"name": "python3"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython3",
"version": "3.10.14"
},
"orig_nbformat": 4
},
"nbformat": 4,
"nbformat_minor": 2
}
"nbformat": 4,
"nbformat_minor": 2
}
Loading

0 comments on commit 68c0e2c

Please sign in to comment.