Skip to content

Commit c4dab2f

Browse files
committed
Compute projection instead of dictionary
1 parent c54b5b2 commit c4dab2f

File tree

1 file changed

+16
-92
lines changed

1 file changed

+16
-92
lines changed

nanshe_ipython.ipynb

Lines changed: 16 additions & 92 deletions
Original file line numberDiff line numberDiff line change
@@ -1136,9 +1136,10 @@
11361136
"cell_type": "markdown",
11371137
"metadata": {},
11381138
"source": [
1139-
"### Normalize Data\n",
1139+
"### Project\n",
1140+
"\n",
11401141
"* `block_frames` (`int`): number of frames to work with in each full frame block (run in parallel).\n",
1141-
"* `norm_frames` (`int`): number of frames for use during normalization of each full frame block (run in parallel)."
1142+
"* `proj_type` (`str`): type of projection to take."
11421143
]
11431144
},
11441145
{
@@ -1151,12 +1152,12 @@
11511152
"\n",
11521153
"\n",
11531154
"block_frames = 40\n",
1154-
"norm_frames = 100\n",
1155+
"proj_type = \"max\"\n",
11551156
"\n",
11561157
"\n",
11571158
"# Somehow we can't overwrite the file in the container so this is needed.\n",
1158-
"io_remove(data_basename + postfix_norm + zarr_ext)\n",
1159-
"io_remove(data_basename + postfix_norm + h5_ext)\n",
1159+
"io_remove(data_basename + postfix_dict + zarr_ext)\n",
1160+
"io_remove(data_basename + postfix_dict + h5_ext)\n",
11601161
"\n",
11611162
"\n",
11621163
"with open_zarr(data_basename + postfix_wt + zarr_ext, \"r\") as f:\n",
@@ -1172,10 +1173,14 @@
11721173
" da_imgs_flt.dtype.itemsize >= 4):\n",
11731174
" da_imgs_flt = da_imgs_flt.astype(np.float32)\n",
11741175
"\n",
1175-
" da_result = normalize_data(da_imgs)\n",
1176+
" da_result = da_imgs\n",
1177+
" if proj_type == \"max\":\n",
1178+
" da_result = da_result.max(axis=0, keepdims=True)\n",
1179+
" elif proj_type == \"std\":\n",
1180+
" da_result = da_result.std(axis=0, keepdims=True)\n",
11761181
"\n",
11771182
" # Store denoised data\n",
1178-
" with open_zarr(data_basename + postfix_norm + zarr_ext, \"w\") as f2:\n",
1183+
" with open_zarr(data_basename + postfix_dict + zarr_ext, \"w\") as f2:\n",
11791184
" result = f2.create_dataset(\n",
11801185
" \"images\",\n",
11811186
" shape=da_result.shape,\n",
@@ -1187,86 +1192,6 @@
11871192
" dask.distributed.progress(status, notebook=False)\n",
11881193
"\n",
11891194
"\n",
1190-
"zip_zarr(data_basename + postfix_norm + zarr_ext)\n",
1191-
"\n",
1192-
"with h5py.File(data_basename + postfix_norm + h5_ext, \"w\") as f2:\n",
1193-
" with open_zarr(data_basename + postfix_norm + zarr_ext, \"r\") as f1:\n",
1194-
" zarr_to_hdf5(f1, f2)\n",
1195-
"\n",
1196-
"\n",
1197-
"if __IPYTHON__:\n",
1198-
" result_image_stack = LazyZarrDataset(data_basename + postfix_norm + zarr_ext, \"images\")\n",
1199-
"\n",
1200-
" mplsv = plt.figure(FigureClass=MPLViewer)\n",
1201-
" mplsv.set_images(\n",
1202-
" result_image_stack,\n",
1203-
" vmin=par_compute_min_projection(num_frames=norm_frames)(result_image_stack).min(),\n",
1204-
" vmax=par_compute_max_projection(num_frames=norm_frames)(result_image_stack).max()\n",
1205-
" )"
1206-
]
1207-
},
1208-
{
1209-
"cell_type": "markdown",
1210-
"metadata": {},
1211-
"source": [
1212-
"### Dictionary Learning\n",
1213-
"\n",
1214-
"* `n_components` (`int`): number of basis images in the dictionary.\n",
1215-
"* `batchsize` (`int`): minibatch size to use.\n",
1216-
"* `iters` (`int`): number of iterations to run before getting dictionary.\n",
1217-
"* `lambda1` (`float`): weight for L<sup>1</sup> sparisty enforcement on sparse code.\n",
1218-
"* `lambda2` (`float`): weight for L<sup>2</sup> sparisty enforcement on sparse code.\n",
1219-
"\n",
1220-
"<br>\n",
1221-
"* `block_frames` (`int`): number of frames to work with in each full frame block (run in parallel).\n",
1222-
"* `norm_frames` (`int`): number of frames for use during normalization of each full frame block (run in parallel)."
1223-
]
1224-
},
1225-
{
1226-
"cell_type": "code",
1227-
"execution_count": null,
1228-
"metadata": {},
1229-
"outputs": [],
1230-
"source": [
1231-
"%%time\n",
1232-
"\n",
1233-
"\n",
1234-
"n_components = 50\n",
1235-
"batchsize = 256\n",
1236-
"iters = 100\n",
1237-
"lambda1 = 0.2\n",
1238-
"lambda2 = 0.0\n",
1239-
"\n",
1240-
"block_frames = 51\n",
1241-
"norm_frames = 100\n",
1242-
"\n",
1243-
"\n",
1244-
"# Somehow we can't overwrite the file in the container so this is needed.\n",
1245-
"io_remove(data_basename + postfix_dict + zarr_ext)\n",
1246-
"io_remove(data_basename + postfix_dict + h5_ext)\n",
1247-
"\n",
1248-
"result = LazyZarrDataset(data_basename + postfix_norm + zarr_ext, \"images\")\n",
1249-
"block_shape = (block_frames,) + result.shape[1:]\n",
1250-
"with open_zarr(data_basename + postfix_dict + zarr_ext, \"w\") as f2:\n",
1251-
" new_result = f2.create_dataset(\"images\", shape=(n_components,) + result.shape[1:], dtype=result.dtype, chunks=True)\n",
1252-
"\n",
1253-
" result = par_generate_dictionary(block_shape)(\n",
1254-
" result,\n",
1255-
" n_components=n_components,\n",
1256-
" out=new_result,\n",
1257-
" **{\"sklearn.decomposition.dict_learning_online\" : {\n",
1258-
" \"n_jobs\" : 1,\n",
1259-
" \"n_iter\" : iters,\n",
1260-
" \"batch_size\" : batchsize,\n",
1261-
" \"alpha\" : lambda1\n",
1262-
" }\n",
1263-
" }\n",
1264-
" )\n",
1265-
"\n",
1266-
" result_j = f2.create_dataset(\"images_j\", shape=new_result.shape, dtype=numpy.uint16, chunks=True)\n",
1267-
" par_norm_layer(num_frames=norm_frames)(result, out=result_j)\n",
1268-
"\n",
1269-
"\n",
12701195
"zip_zarr(data_basename + postfix_dict + zarr_ext)\n",
12711196
"\n",
12721197
"with h5py.File(data_basename + postfix_dict + h5_ext, \"w\") as f2:\n",
@@ -1275,15 +1200,14 @@
12751200
"\n",
12761201
"\n",
12771202
"if __IPYTHON__:\n",
1278-
" result_image_stack = LazyZarrDataset(data_basename + postfix_dict + zarr_ext, \"images\")\n",
1203+
" result_image_stack = LazyZarrDataset(data_basename + postfix_dict + zarr_ext, \"images\")[...][...]\n",
12791204
"\n",
12801205
" mplsv = plt.figure(FigureClass=MPLViewer)\n",
12811206
" mplsv.set_images(\n",
12821207
" result_image_stack,\n",
1283-
" vmin=par_compute_min_projection(num_frames=norm_frames)(result_image_stack).min(),\n",
1284-
" vmax=par_compute_max_projection(num_frames=norm_frames)(result_image_stack).max()\n",
1285-
" )\n",
1286-
" mplsv.time_nav.stime.label.set_text(\"Basis Image\")"
1208+
" vmin=result_image_stack.min(),\n",
1209+
" vmax=result_image_stack.max()\n",
1210+
" )"
12871211
]
12881212
},
12891213
{

0 commit comments

Comments
 (0)