Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Parameterise S3DXRD notebooks #391

Merged
merged 23 commits into from
Feb 13, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
23 commits
Select commit Hold shift + click to select a range
5735b9c
First attempt at papermill notebooks
jadball Jan 21, 2025
6b65fc2
Tomo route notebooks parameterized
jadball Jan 22, 2025
cdd0e19
Return empty dict if you found nothing
jadball Jan 22, 2025
11efda1
Notebook tests with papermill
jadball Jan 22, 2025
42bd07c
Parameterise notebooks
jadball Jan 22, 2025
700ae60
Add papermill to optional reqs
jadball Jan 22, 2025
f070c3c
Exclude papermill tests from CI via name convention
jadball Jan 22, 2025
bddc12d
Exclude papermill test from flake8
jadball Jan 22, 2025
f84274f
Add Si cube rendered notebooks to docs
jadball Jan 22, 2025
2f7d37b
Merge branch 'FABLE-3DXRD:master' into master
jadball Jan 23, 2025
41279df
Make notebooks prerendered
jadball Jan 23, 2025
9942666
Parameterise segmentation notebook
jadball Jan 23, 2025
07885c5
Full tomographic route end-to-end now validated on real data
jadball Jan 23, 2025
8715bea
Explicity specify all parameters
jadball Jan 23, 2025
33fdc27
Omit phase pars in dict if no phase provided
jadball Jan 24, 2025
24ec9fb
Bump version number
jadball Jan 24, 2025
bf746f7
Just use phase_str by default now
jadball Jan 24, 2025
2eab236
Merge branch 'FABLE-3DXRD:master' into master
jadball Jan 24, 2025
9ea5408
PBP end-to-end validation FeAu
jadball Jan 24, 2025
2725757
Merge branch 'master' of github.com:jadball/ImageD11
jadball Jan 24, 2025
69d5e52
Bug fixes
jadball Jan 25, 2025
9570071
Merge branch 'FABLE-3DXRD:master' into master
jadball Feb 12, 2025
76cc952
Merge branch 'FABLE-3DXRD:master' into master
jadball Feb 13, 2025
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
86 changes: 51 additions & 35 deletions ImageD11/nbGui/S3DXRD/0_segment_and_label.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -23,30 +23,24 @@
},
"outputs": [],
"source": [
"exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n",
"PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )"
"exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5726795e-91cf-40cf-b3a9-b114de84e017",
"id": "c3bddb80-39f9-4cd7-9cc1-59fc8d240c24",
"metadata": {
"tags": []
"tags": [
"parameters"
]
},
"outputs": [],
"source": [
"# Import needed packages\n",
"%matplotlib ipympl\n",
"import pprint\n",
"import numpy as np\n",
"import ImageD11.sinograms.dataset\n",
"import ImageD11.sinograms.lima_segmenter\n",
"import ImageD11.sinograms.assemble_label\n",
"import ImageD11.sinograms.properties\n",
"import ImageD11.nbGui.nb_utils as utils\n",
"from ImageD11.nbGui import segmenter_gui\n",
"# this cell is tagged with 'parameters'\n",
"# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n",
"\n",
"PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n",
"\n",
"# Experts : update these files for your detector if you need to\n",
"maskfile = \"/data/id11/nanoscope/Eiger/eiger_mask_E-08-0144_20240205.edf\"\n",
Expand All @@ -57,7 +51,36 @@
"dtymotor = 'dty'\n",
"\n",
"# Default segmentation options\n",
"options = { 'cut' : 1, 'pixels_in_spot' : 3, 'howmany' : 100000 }"
"options = { 'cut' : 1, 'pixels_in_spot' : 3, 'howmany' : 100000 }\n",
"\n",
"# EXPERTS: These can be provided as papermill parameters. Users, leave these as None for now...\n",
"dataroot = None\n",
"analysisroot = None\n",
"sample = None\n",
"dataset = None\n",
"\n",
"dset_prefix = \"top_\" # some common string in the names of the datasets if processing multiple scans"
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "5726795e-91cf-40cf-b3a9-b114de84e017",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"# Import needed packages\n",
"%matplotlib ipympl\n",
"import pprint\n",
"import numpy as np\n",
"import ImageD11.sinograms.dataset\n",
"import ImageD11.sinograms.lima_segmenter\n",
"import ImageD11.sinograms.assemble_label\n",
"import ImageD11.sinograms.properties\n",
"import ImageD11.nbGui.nb_utils as utils\n",
"from ImageD11.nbGui import segmenter_gui"
]
},
{
Expand All @@ -70,7 +93,8 @@
"outputs": [],
"source": [
"# Set up the file paths. Edit this if you are not at ESRF or not using the latest data policy.\n",
"dataroot, analysisroot = segmenter_gui.guess_ESRF_paths() \n",
"if dataroot is None:\n",
" dataroot, analysisroot = segmenter_gui.guess_ESRF_paths() \n",
"\n",
"if len(dataroot)==0:\n",
" print(\"Please fix in the dataroot and analysisroot folder names above!!\")\n",
Expand Down Expand Up @@ -102,7 +126,8 @@
"outputs": [],
"source": [
"# USER: Decide which sample\n",
"sample = 'FeAu_0p5_tR_nscope'"
"if sample is None:\n",
" sample = 'FeAu_0p5_tR_nscope'"
]
},
{
Expand All @@ -128,7 +153,8 @@
"outputs": [],
"source": [
"# USER: Decide which dataset\n",
"dataset = \"top_100um\""
"if dataset is None:\n",
" dataset = \"top_100um\""
]
},
{
Expand Down Expand Up @@ -258,19 +284,6 @@
"Therefore notebooks 4 and onwards should work from either the tomo or pbp route."
]
},
{
"cell_type": "code",
"execution_count": null,
"id": "703d22d0-ef82-4e08-8087-c57e76e16de1",
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"if 1:\n",
" raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")"
]
},
{
"cell_type": "code",
"execution_count": null,
Expand All @@ -280,13 +293,16 @@
},
"outputs": [],
"source": [
"# you can optionally skip samples\n",
"# skips_dict = {\n",
"# \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n",
"# }\n",
"# otherwise by default skip nothing:\n",
"skips_dict = {\n",
" \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n",
" ds.sample: []\n",
"}\n",
"\n",
"dset_prefix = \"top_\" # some common string in the names of the datasets (*?)\n",
"\n",
"sample_list = [\"FeAu_0p5_tR_nscope\"]\n",
"sample_list = [ds.sample, ]\n",
" \n",
"samples_dict = utils.find_datasets_to_process(dataroot, skips_dict, dset_prefix, sample_list)\n",
"\n",
Expand Down
125 changes: 88 additions & 37 deletions ImageD11/nbGui/S3DXRD/4_visualise.ipynb
Original file line number Diff line number Diff line change
Expand Up @@ -28,10 +28,43 @@
"\n",
"os.environ['OMP_NUM_THREADS'] = '1'\n",
"os.environ['OPENBLAS_NUM_THREADS'] = '1'\n",
"os.environ['MKL_NUM_THREADS'] = '1'\n",
"os.environ['MKL_NUM_THREADS'] = '1'"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"outputs": [],
"source": [
"exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"tags": [
"parameters"
]
},
"outputs": [],
"source": [
"# this cell is tagged with 'parameters'\n",
"# to view the tag, select the cell, then find the settings gear icon (right or left sidebar) and look for Cell Tags\n",
"\n",
"PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )\n",
"\n",
"exec(open('/data/id11/nanoscope/install_ImageD11_from_git.py').read())\n",
"PYTHONPATH = setup_ImageD11_from_git( ) # ( os.path.join( os.environ['HOME'],'Code'), 'ImageD11_git' )"
"# dataset file to import\n",
"dset_file = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n",
"\n",
"# which phase to index\n",
"phase_str = 'Si'\n",
"\n",
"# the minimum number of peaks you want a pixel to have to be counted\n",
"min_unique = 400\n",
"\n",
"dset_prefix = \"top_\" # some common string in the names of the datasets if processing multiple scans"
]
},
{
Expand Down Expand Up @@ -69,8 +102,6 @@
"source": [
"# USER: Pass path to dataset file\n",
"\n",
"dset_file = 'si_cube_test/processed/Si_cube/Si_cube_S3DXRD_nt_moves_dty/Si_cube_S3DXRD_nt_moves_dty_dataset.h5'\n",
"\n",
"ds = ImageD11.sinograms.dataset.load(dset_file)\n",
" \n",
"sample = ds.sample\n",
Expand Down Expand Up @@ -105,7 +136,6 @@
"outputs": [],
"source": [
"# now let's select a phase to index from our parameters json\n",
"phase_str = 'Fe'\n",
"\n",
"ref_ucell = ds.phases.unitcells[phase_str]\n",
"\n",
Expand All @@ -122,7 +152,8 @@
"source": [
"# import refinement manager\n",
"\n",
"refine = PBPRefine.from_h5(ds.refmanfile)"
"refmanpath = os.path.splitext(ds.refmanfile)[0] + f'_{phase_str}.h5'\n",
"refine = PBPRefine.from_h5(refmanpath)"
]
},
{
Expand All @@ -148,8 +179,6 @@
"source": [
"# choose the minimum number of peaks you want a pixel to have to be counted\n",
"\n",
"min_unique = 400\n",
"\n",
"refine.refinedmap.choose_best(min_unique)\n",
"\n",
"# refine.refinedmap.choose_best(min_unique)"
Expand Down Expand Up @@ -187,7 +216,7 @@
"\n",
"for i in range(3):\n",
" for j in range(3):\n",
" axs[i,j].imshow(refine.refinedmap.best_eps[:, :, i, j], origin=\"lower\", cmap=cmap, norm=normalizer)\n",
" axs[i,j].imshow(refine.refinedmap.best_eps[:, :, i, j], origin=\"lower\", cmap=cmap, norm=normalizer, interpolation='nearest')\n",
" axs[i,j].set_title(f'eps_{i+1}{j+1}')\n",
"fig.supxlabel('< Lab Y axis')\n",
"fig.supylabel('Lab X axis')\n",
Expand Down Expand Up @@ -274,7 +303,7 @@
"\n",
"for i in range(3):\n",
" for j in range(3):\n",
" axs[i,j].imshow(tmap.eps_sample[0, ..., i, j], origin=\"lower\", cmap=cmap, norm=normalizer)\n",
" axs[i,j].imshow(tmap.eps_sample[0, ..., i, j], origin=\"lower\", cmap=cmap, norm=normalizer, interpolation='nearest')\n",
" axs[i,j].set_title(f'eps_{i+1}{j+1}')\n",
"fig.supxlabel('Lab X axis --->')\n",
"fig.supylabel('Lab Y axis --->')\n",
Expand Down Expand Up @@ -330,25 +359,27 @@
"metadata": {},
"outputs": [],
"source": [
"# save the refined TensorMap to disk\n",
"\n",
"tmap.to_h5(os.path.join(ds.analysispath, 'pbp_tensormap_refined.h5'))\n",
"tmap.to_paraview(os.path.join(ds.analysispath, 'pbp_tensormap_refined.h5'))"
"# if we have a previous tomographic TensorMap, we can try to get the labels map too:\n",
"try:\n",
" tmap_tomo = TensorMap.from_h5(ds.grainsfile, h5group='TensorMap_' + phase_str)\n",
" tmap.add_map('labels', tmap_tomo.labels)\n",
"except (FileNotFoundError, OSError, KeyError):\n",
" # couldn't find one, continue anyway\n",
" pass"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {
"tags": []
},
"metadata": {},
"outputs": [],
"source": [
"# you can also do an MTEX export if you like:\n",
"# save the refined TensorMap to disk\n",
"\n",
"ctf_path = os.path.join(ds.analysispath, 'pbp_tensormap_refined.ctf')\n",
"refined_tmap_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.h5')\n",
"\n",
"tmap.to_ctf_mtex(ctf_path, z_index=0)"
"tmap.to_h5(refined_tmap_path)\n",
"tmap.to_paraview(refined_tmap_path)"
]
},
{
Expand All @@ -359,17 +390,22 @@
},
"outputs": [],
"source": [
"ds.save()"
"# you can also do an MTEX export if you like:\n",
"\n",
"refined_ctf_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.ctf')\n",
"\n",
"tmap.to_ctf_mtex(refined_ctf_path, z_index=0)"
]
},
{
"cell_type": "code",
"execution_count": null,
"metadata": {},
"metadata": {
"tags": []
},
"outputs": [],
"source": [
"if 1:\n",
" raise ValueError(\"Change the 1 above to 0 to allow 'Run all cells' in the notebook\")"
"ds.save()"
]
},
{
Expand All @@ -382,15 +418,18 @@
"# by default this will do all samples in sample_list, all datasets with a prefix of dset_prefix\n",
"# you can add samples and datasets to skip in skips_dict\n",
"\n",
"# you can optionally skip samples\n",
"# skips_dict = {\n",
"# \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n",
"# }\n",
"# otherwise by default skip nothing:\n",
"skips_dict = {\n",
" \"FeAu_0p5_tR_nscope\": [\"top_-50um\", \"top_-100um\"]\n",
" ds.sample: []\n",
"}\n",
"\n",
"dset_prefix = \"top\"\n",
"sample_list = [ds.sample, ]\n",
"\n",
"sample_list = [\"FeAu_0p5_tR_nscope\"]\n",
" \n",
"samples_dict = utils.find_datasets_to_process(ds.dataroot, skips_dict, dset_prefix, sample_list)\n",
"samples_dict = utils.find_datasets_to_process(rawdata_path, skips_dict, dset_prefix, sample_list)\n",
" \n",
"# manual override:\n",
"# samples_dict = {\"FeAu_0p5_tR_nscope\": [\"top_100um\", \"top_150um\"]}\n",
Expand All @@ -410,15 +449,19 @@
" ds = ImageD11.sinograms.dataset.load(dset_path)\n",
" print(f\"I have a DataSet {ds.dset} in sample {ds.sample}\")\n",
" \n",
" if not os.path.exists(ds.refoutfile):\n",
" refoutpath = os.path.splitext(ds.refoutfile)[0] + f'_{phase_str}.h5'\n",
" refmanpath = os.path.splitext(ds.refmanfile)[0] + f'_{phase_str}.h5'\n",
"\n",
" if not os.path.exists(refoutpath):\n",
" print(f\"Couldn't find PBP refinement output file for {dataset} in sample {sample}, skipping\")\n",
" continue\n",
" \n",
" if os.path.exists(os.path.join(ds.analysispath, 'pbp_tensormap_refined.h5')):\n",
" refined_tmap_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.h5')\n",
" if os.path.exists(refined_tmap_path):\n",
" print(f\"Already have refined TensorMap output file for {dataset} in sample {sample}, skipping\")\n",
" continue\n",
" \n",
" refine = PBPRefine.from_h5(ds.refmanfile)\n",
" refine = PBPRefine.from_h5(refmanpath)\n",
" refine.refinedmap.choose_best(min_unique)\n",
" \n",
" # first let's work out what phase we have\n",
Expand All @@ -437,10 +480,18 @@
" tmap.get_ipf_maps()\n",
" eul = tmap.euler\n",
" \n",
" tmap.to_h5(os.path.join(ds.analysispath, 'pbp_tensormap_refined.h5'))\n",
" tmap.to_paraview(os.path.join(ds.analysispath, 'pbp_tensormap_refined.h5'))\n",
" ctf_path = os.path.join(ds.analysispath, 'pbp_tensormap_refined.ctf')\n",
" tmap.to_ctf_mtex(ctf_path, z_index=0)\n",
" # if we have a previous tomographic TensorMap, we can try to get the labels map too:\n",
" try:\n",
" tmap_tomo = TensorMap.from_h5(ds.grainsfile, h5group='TensorMap_' + phase_str)\n",
" tmap.add_map('labels', tmap_tomo.labels)\n",
" except (FileNotFoundError, OSError, KeyError):\n",
" # couldn't find one, continue anyway\n",
" pass\n",
" \n",
" tmap.to_h5(refined_tmap_path)\n",
" tmap.to_paraview(refined_tmap_path)\n",
" refined_ctf_path = os.path.join(ds.analysispath, f'{ds.sample}_{ds.dset}_refined_tmap_{phase_str}.ctf')\n",
" tmap.to_ctf_mtex(refined_ctf_path, z_index=0)\n",
"\n",
" ds.save()\n",
"\n",
Expand Down
Loading
Loading