diff --git a/.flake8 b/.flake8 deleted file mode 100644 index f36b47e..0000000 --- a/.flake8 +++ /dev/null @@ -1,7 +0,0 @@ -[flake8] -# Ignore specific error codes globally -ignore = E501 - -# Exclude specific directories from being scanned at all -exclude = - .git, diff --git a/.github/workflows/setup-data.yml b/.github/workflows/setup-data.yml index 33dcf74..279e2ae 100644 --- a/.github/workflows/setup-data.yml +++ b/.github/workflows/setup-data.yml @@ -22,7 +22,7 @@ jobs: path: data/ # The folder you want to cache # The key determines if we have a match. # Change 'v1' to 'v2' manually to force a re-download in the future. - key: test-data-v4 + key: test-data-v5 # 2. DOWNLOAD ONLY IF CACHE MISS diff --git a/.gitignore b/.gitignore index ad4a1f1..3029439 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +test/FreeSurferColorLUT.txt +.DS_Store +.jupyter_cache/ # Created by https://www.toptal.com/developers/gitignore/api/python # Edit at https://www.toptal.com/developers/gitignore?templates=python diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cdc8b97..edd289e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: rev: 'v0.14.7' hooks: # Run the linter. - - id: ruff + - id: ruff-check args: [ --fix ] # Run the formatter. - id: ruff-format diff --git a/README.md b/README.md index df427ac..0b87f19 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ # MRI-Toolkit -`MRI-toolkit` provides a set of features dedicated to human MRI data post-processing and analysis. The implementation is based on [gMRI2FEM](https://github.com/jorgenriseth/gMRI2FEM). +`MRI-toolkit` provides a set of features dedicated to MRI data post-processing and analysis. + +The implementation is inspired by [gMRI2FEM](https://github.com/jorgenriseth/gMRI2FEM), and some of the code is taken from that project. However, `MRI-toolkit` is designed to be more modular and extensible, with a focus on providing a user-friendly command-line interface (CLI) for common MRI processing tasks. ## Installation @@ -22,9 +24,27 @@ To get started with `mri-toolkit`, you can use the command-line interface (CLI) ## Features -* **File Inspection**: detailed NIfTI header analysis (affine, voxel size, shape). -* **Statistics**: Compute comprehensive statistics (volume, mean, median, std, percentiles) for MRI regions based on segmentation maps. -* **Visualization**: - * **Terminal**: View orthogonal slices (Sagittal, Coronal, Axial) directly in your console. - * **Napari**: Launch the Napari viewer for interactive 3D inspection. -* **Data Management**: Utilities to download test datasets. + +- File Inspection: detailed NIfTI header analysis (affine, voxel size, shape). + +- $T_1$ Mapping: Estimate $T_1$ relaxation times using Look-Locker or Mixed sequences, and seamlessly merge them into comprehensive Hybrid $T_1$ maps. + +- $R_1$ Relaxation Rates: Convert $T_1$ maps into $R_1$ relaxation rate maps for linear scaling with tracer concentrations. + +- Concentration Mapping: Calculate the spatial distribution of contrast agents (e.g., gadobutrol) utilizing pre- and post-contrast $T_1$ or $R_1$ maps. + +- Statistics: Compute comprehensive statistics (volume, mean, median, std, percentiles) for MRI regions based on segmentation maps. + +- Visualization: + + - Terminal: View orthogonal slices (Sagittal, Coronal, Axial) directly in your console. + + - Napari: Launch the Napari viewer for interactive 3D inspection. + +- Data Management: Utilities to download datasets. + +## Contributing +Contributions to `MRI-toolkit` are welcome! If you have an idea for a new feature, improvement, or bug fix, please open an issue or submit a pull request on GitHub. For more details on how to contribute, please see the [Contributing Guide](CONTRIBUTING.md). + +## License +`MRI-toolkit` is licensed under the MIT License. See the [LICENSE](LICENSE) file for more information. diff --git a/_toc.yml b/_toc.yml index a0cce98..c64cf68 100644 --- a/_toc.yml +++ b/_toc.yml @@ -11,6 +11,11 @@ parts: - file: "docs/info.md" # About the mritk info command - file: "docs/show.md" # About the mritk show command - file: "docs/napari.md" # About the mritk napari command + - file: "docs/looklocker.md" # About the mritk looklocker command + - file: "docs/mixed.md" # About the mritk mixed command + - file: "docs/r1.md" # About the mritk r1 command + - file: "docs/hybrid.md" # About the mritk hybrid command + - file: "docs/concentration.md" # About the mritk concentration command - caption: Community chapters: diff --git a/conf.py b/conf.py index 19e2808..0285920 100644 --- a/conf.py +++ b/conf.py @@ -19,11 +19,15 @@ "third_party/*", "jupyter_execute/", "**.jupyter_cache", + "venv/*", + "test-*htmlcov/*", + "new-test-data/*", + "mritk-test-data/*", ] extensions = [ "sphinx_togglebutton", "sphinx_copybutton", - "myst_parser", + "myst_nb", "sphinx_comments", "sphinx_external_toc", "sphinx.ext.intersphinx", @@ -35,6 +39,7 @@ "sphinxcontrib.bibtex", "sphinx_codeautolink", "sphinx_multitoc_numbering", + "sphinxcontrib.mermaid", ] myst_enable_extensions = [ "amsmath", diff --git a/docs/api.rst b/docs/api.rst index dfd9885..deb1dee 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -39,47 +39,58 @@ show :members: :inherited-members: +concentration +------------- -statistics ----------- - -.. automodule:: mritk.statistics +.. automodule:: mritk.concentration :members: :inherited-members: -.. automodule:: mritk.statistics.utils - :members: - :inherited-members: +mixed +----- -.. automodule:: mritk.statistics.compute_stats +.. automodule:: mritk.mixed :members: :inherited-members: -.. automodule:: mritk.statistics.cli +looklocker +---------- + +.. automodule:: mritk.looklocker :members: :inherited-members: +utils +----- -data ----- - -.. automodule:: mritk.data +.. automodule:: mritk.utils :members: :inherited-members: -.. automodule:: mritk.data.io +hybrid +------ + +.. automodule:: mritk.hybrid :members: :inherited-members: -.. automodule:: mritk.data.orientation + + +statistics +---------- + +.. automodule:: mritk.stats :members: :inherited-members: -.. automodule:: mritk.data.base + +data +---- + +.. automodule:: mritk.data :members: :inherited-members: - segmentation ------------ diff --git a/docs/concentration.md b/docs/concentration.md new file mode 100644 index 0000000..a33a88c --- /dev/null +++ b/docs/concentration.md @@ -0,0 +1,88 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- +# Concentration Mapping + +The Concentration module calculates the spatial distribution of a contrast agent (like gadobutrol) in the brain. + +Concentration $C$ can be estimated voxel-wise from longitudinal relaxation data comparing a post-contrast session to a pre-contrast (baseline) session. mritk supports two calculation pathways depending on whether you are working with $T_1$ times or $R_1$ rates (see R1 Maps). + +$$\frac{1}{T_1} = \frac{1}{T_{10}} + r_1 C \quad \implies \quad C = \frac{1}{r_1} \left(R_1 - R_{10}\right)$$ + +where $r_1$ is the relaxivity of the contrast agent (default: 3.2 to 4.5 $\text{s}^{-1}\text{mmol}^{-1}$). + +## Pipeline Overview + +```{mermaid} +graph TD + A[Pre-Contrast Hybrid T1] -->|T1 method| C{Compute Concentration} + B[Post-Contrast Hybrid T1] -->|T1 method| C + + A2[Pre-Contrast R1] -->|R1 method| C + B2[Post-Contrast R1] -->|R1 method| C + + M[Brain/Intracranial Mask] -.->|Optional| C + + C --> D(Tracer Concentration Map NIfTI) +``` + +## Commands + +```{code-cell} shell +!mritk concentration --help +``` + + +### 1. From $T_1$ Maps (t1) + +Calculates concentration directly from $T_1$ maps (in milliseconds). The command handles the inversion safely and avoids division-by-zero errors for background voxels. + +```{code-cell} shell +!mritk concentration t1 --help +``` + +#### Example Command + +```shell +mritk concentration t1 -i path/to/post_t1.nii.gz -r path/to/pre_t1.nii.gz -o path/to/concentration.nii.gz --r1 0.0045 --mask path/to/intracranial_mask.nii.gz +``` + +Gonzo: + +```shell +mritk concentration t1 \ + -i gonzo/mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-02_T1map_hybrid.nii.gz \ + -r gonzo/mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz \ + -o sub-01_ses-02_concentration.nii.gz \ + --r1 0.0032 \ + --mask gonzo/mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-intracranial_binary.nii.gz +``` + +mritk concentration t1 \ + -i new-test-data/mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-02_T1map_hybrid.nii.gz \ + -r new-test-data/mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz \ + -o sub-01_ses-02_concentration.nii.gz \ + --r1 0.0032 \ + --mask new-test-data/mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-intracranial_binary.nii.gz + +### 2. From $R_1$ Maps (r1) + +Calculates concentration from pre-computed $R_1$ maps. This is mathematically equivalent but slightly faster if $R_1$ maps are already available. + +```{code-cell} shell +!mritk concentration r1 --help +``` + +#### Example Command + +```shell +mritk concentration r1 -i path/to/post_r1.nii.gz -r path/to/pre_r1.nii.gz -o path/to/concentration.nii.gz --r1 0.0045 +``` diff --git a/docs/datasets.md b/docs/datasets.md index d5636db..8db2594 100644 --- a/docs/datasets.md +++ b/docs/datasets.md @@ -1,7 +1,24 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + # Datasets The `datasets` subcommand provides tools for listing and downloading example datasets. + +```{code-cell} shell +!mritk datasets --help +``` + To list available datasets, use: ```bash diff --git a/docs/hybrid.md b/docs/hybrid.md new file mode 100644 index 0000000..3b24d0d --- /dev/null +++ b/docs/hybrid.md @@ -0,0 +1,57 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +# Hybrid $T_1$ Maps + +To achieve accurate $T_1$ measurements across the entire brain space, mritk combines the Look-Locker (LL) and Mixed sequence $T_1$ maps into a single Hybrid map. + +Look-Locker is used for short $T_1$ values (brain tissue and regions with high tracer concentrations). + +Mixed Sequence is used for long $T_1$ values (CSF). + +The hybrid command seamlessly merges the two images based on a user-defined threshold (default: 1500 ms) and a specific anatomical mask (typically a CSF mask). + +## Pipeline Overview + +```{mermaid} +graph LR + A(Look-Locker T1 Map) --> D{Hybrid Merge} + B(Mixed T1 Map) --> D + C(CSF Mask) --> D + D -->|Threshold > 1500ms| E(Hybrid T1 Map) +``` + +## Command Usage + + +```{code-cell} shell +!mritk hybrid --help +``` + +## Example Command + +```shell +mritk hybrid -l path/to/ll_t1.nii.gz -m path/to/mixed_t1.nii.gz -c path/to/csf_mask.nii.gz -o path/to/hybrid_t1.nii.gz --threshold 1500.0 +``` + + +Gonzo: + +```shell +mritk hybrid \ + -l gonzo/mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-02_acq-looklocker_T1map_registered.nii.gz \ + -m gonzo/mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-02_acq-mixed_T1map_registered.nii.gz \ + -c gonzo/mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-csf_binary.nii.gz \ + -o sub-01_ses-02_T1map_hybrid.nii.gz \ + --threshold 1500 \ + --erode 1 +``` diff --git a/docs/info.md b/docs/info.md index 09eac20..2ddc990 100644 --- a/docs/info.md +++ b/docs/info.md @@ -1,18 +1,30 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + # Info command The `info` command allows you to quickly inspect the metadata of an MRI file. It displays the image shape, voxel size, data type, and the affine transformation matrix. ## Usage -```bash -mritk info [OPTIONS] +```{code-cell} shell +!mritk info --help ``` -**Arguments:** -* `file`: Path to the file to display information about. +### Example Command -**Options:** -* `--json`: Output information in JSON format. Useful for programmatic parsing. +```bash +mritk info path/to/image.nii.gz +``` ![info](https://github.com/user-attachments/assets/fc0e734d-3c94-48fa-8e25-3e65bfc86ebe) diff --git a/docs/looklocker.md b/docs/looklocker.md new file mode 100644 index 0000000..1e40c18 --- /dev/null +++ b/docs/looklocker.md @@ -0,0 +1,104 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +# Look-Locker $T_1$ Mapping + +The Look-Locker (LL) module is used to estimate $T_1$ relaxation times from a 4D Look-Locker inversion recovery dataset. + +As described in the Gonzo dataset, the Look-Locker sequence provides excellent accuracy for tissues with short $T_1$ times (such as gray/white matter and regions with high tracer concentrations). The toolkit computes the $T_1$ time voxel-wise by fitting a theoretical recovery curve to the longitudinal magnetization signal. + +## Pipeline Overview + +```{mermaid} +graph TD + A[Raw Look-Locker DICOM] -->|dcm2ll| B(4D Look-Locker NIfTI) + A -->|dcm2ll| C(Trigger Times .txt) + B -->|t1| D(Raw T1 Map NIfTI) + C -->|t1| D + D -->|postprocess| E(Cleaned T1 Map NIfTI) +``` + +## Commands + +```{code-cell} shell +!mritk looklocker --help +``` + + +### 1. DICOM to NIfTI (dcm2ll) + +Converts scanner-native Look-Locker DICOM files to a standardized 4D NIfTI format and extracts the nominal cardiac trigger delay times into a sidecar text file. + +```{code-cell} shell +!mritk looklocker dcm2ll --help +``` + +#### Example Command + +```bash +mritk looklocker dcm2ll -i path/to/looklocker.dcm -o path/to/ll_output.nii.gz +``` + +### 2. Compute $T_1$ Map (t1) + +Fits the voxel-wise Levenberg-Marquardt optimization curve to estimate $T_1$ times (in milliseconds) from the 4D Look-Locker NIfTI. + +```{code-cell} shell +!mritk looklocker t1 --help +``` + +#### Example Command + +```bash +mritk looklocker t1 -i path/to/ll_output.nii.gz -t path/to/ll_output_trigger_times.txt -o path/to/t1_raw.nii.gz +``` + +Gonzo: + +```shell +mritk looklocker t1 \ + -i gonzo/mri-dataset/mri_dataset/sub-01/ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1.nii.gz \ + -t gonzo/mri-dataset/mri_dataset/sub-01/ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1_trigger_times.txt \ + -o sub-01_ses-01_acq-looklocker_T1map_raw.nii.gz +``` + +### 3. Post-Processing (postprocess) + +Raw $T_1$ maps often contain noisy fits or values outside physiological boundaries. The postprocess command applies a quality-control pipeline that: + +Automatically masks the brain/head (if no explicit mask is provided). + +Removes extreme outliers (default bounds: 100 ms to 10000 ms). + +Iteratively fills internal NaNs (holes) using a smart Gaussian filter. + +```{code-cell} shell +!mritk looklocker postprocess --help +``` + +#### Example Command + +```bash +mritk looklocker postprocess -i path/to/t1_raw.nii.gz -o path/to/t1_clean.nii.gz --t1-low 100.0 --t1-high 5000.0 +``` + +Gonzo: + +(here input is the raw T1 map from the previous step) + +```shell +mritk looklocker postprocess \ + -i sub-01_ses-01_acq-looklocker_T1map_raw.nii.gz \ + -o sub-01_ses-01_acq-looklocker_T1map.nii.gz \ + --t1-low 100.0 \ + --t1-high 6000.0 +``` diff --git a/docs/mixed.md b/docs/mixed.md new file mode 100644 index 0000000..c5062d7 --- /dev/null +++ b/docs/mixed.md @@ -0,0 +1,105 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +# Mixed Sequence $T_1$ Mapping + +The Mixed sequence module estimates $T_1$ times by combining a Spin-Echo (SE) and an Inversion-Recovery (IR) acquisition. + +While the Look-Locker sequence struggles with long relaxation times due to short acquisition windows, the Mixed sequence is specifically designed to accurately estimate long $T_1$ times, such as those found in Cerebrospinal Fluid (CSF). $T_1$ is estimated by solving the non-linear ratio of the IR and SE signals. + +```{caution} +Because the Mixed sequence is highly sensitive to noise in short $T_1$ tissues (like gray matter), the resulting $T_1$ map must be post-processed to mask out non-CSF areas. +``` + +## Pipeline Overview + +```{mermaid} +graph TD + A[Raw Mixed DICOM] -->|dcm2mixed| B(SE Modulus NIfTI) + A -->|dcm2mixed| C(IR Real NIfTI) + A -->|dcm2mixed| D(Metadata JSON) + B -->|t1| E(Raw Mixed T1 Map) + C -->|t1| E + D -->|t1| E + E -->|postprocess| F(Masked CSF T1 Map) + B -->|postprocess| F +``` + +## Commands + + +```{code-cell} shell +!mritk mixed --help +``` + +### 1. DICOM to NIfTI (dcm2mixed) + +Splits a Mixed sequence DICOM file into its independent subvolumes (e.g., SE-modulus, IR-real) and saves the required sequence timing metadata (TR, TE, TI, ETL) into a JSON sidecar. + +```{code-cell} shell +!mritk mixed dcm2mixed --help +``` + +#### Example Command + + +```bash +mritk mixed dcm2mixed -i path/to/mixed.dcm -o path/to/output_base +``` + +### 2. Compute $T_1$ Map (t1) + +Generates the $T_1$ map based on the signal ratio between the Inversion-Recovery and Spin-Echo sequences. + +```{code-cell} shell +!mritk mixed t1 --help +``` + +#### Example Command + +```bash +mritk mixed t1 -s path/to/output_base_SE-modulus.nii.gz -i path/to/output_base_IR-corrected-real.nii.gz -m path/to/output_base_meta.json -o path/to/mixed_t1_raw.nii.gz +``` + +Gonzo: + +```shell +mritk mixed t1 \ + -s gonzo/mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_SE-modulus.nii.gz \ + -i gonzo/mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_IR-corrected-real.nii.gz \ + -m gonzo/mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_meta.json \ + -o sub-01_ses-01_acq-mixed_T1map_raw.nii.gz +``` + +### 3. Post-Processing (postprocess) + +Masks out non-fluid areas from the Mixed $T_1$ map. It derives a mask dynamically from the original SE sequence using Li thresholding and erodes the mask to avoid partial-volume effects at tissue boundaries. + +```{code-cell} shell +!mritk mixed postprocess --help +``` + +#### Example Command +```bash +mritk mixed postprocess -s path/to/output_base_SE-modulus.nii.gz -t path/to/mixed_t1_raw.nii.gz -o path/to/mixed_t1_clean.nii.gz +``` + +Gonzo: + +(here we use the original SE modulus image as the source for mask generation, and the t1 map from the previous step as the input for post-processing) + +```shell +mritk mixed postprocess \ + -s gonzo/mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_SE-modulus.nii.gz \ + -t sub-01_ses-01_acq-mixed_T1map_raw.nii.gz \ + -o sub-01_ses-01_acq-mixed_T1map_clean.nii.gz +``` diff --git a/docs/r1.md b/docs/r1.md new file mode 100644 index 0000000..04f689b --- /dev/null +++ b/docs/r1.md @@ -0,0 +1,43 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +# $R_1$ Relaxation Rates + +The $R_1$ module provides utilities to convert longitudinal relaxation times ($T_1$) into relaxation rates ($R_1$). + +The fundamental relationship is $R_1 = \frac{1}{T_1}$. In MRI studies utilizing contrast agents, $R_1$ scales linearly with the tracer concentration, making it a highly convenient format for transport simulations and Concentration Maps. + +## Pipeline Overview + +```{mermaid} +graph LR + A(T1 Map NIfTI) -->|t1-to-r1| B(R1 Map NIfTI) + B --> C(Concentration Estimation) +``` + +## Command Usage + +```{code-cell} shell +!mritk t1-to-r1 --help +``` + + +### Example Command + +```shell +mritk t1-to-r1 -i path/to/hybrid_t1.nii.gz -o path/to/hybrid_r1.nii.gz --scale 1000.0 +``` + + +## Next Steps + +Once you have generated $R_1$ maps for both your baseline and post-contrast sessions, you can generate concentration maps. See the Concentration Documentation for more details. diff --git a/docs/show.md b/docs/show.md index 6815f63..24ac8b2 100644 --- a/docs/show.md +++ b/docs/show.md @@ -1,3 +1,15 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + # Show Command The `show` command provides a quick way to visualize MRI data directly in your terminal. @@ -15,18 +27,16 @@ ## Usage - ```bash - mritk show [OPTIONS] + ```{code-cell} shell + !mritk show --help ``` - **Arguments:** - * `file`: Path to the MRI file to show. + ### Example Command + + ```bash + mritk show path/to/image.nii.gz + ``` - **Options:** - * `--cmap `: Colormap to use (default: `gray`). - * `--slice-x `: Relative position (0-1) of the sagittal slice (default: `0.5`). - * `--slice-y `: Relative position (0-1) of the coronal slice (default: `0.5`). - * `--slice-z `: Relative position (0-1) of the axial slice (default: `0.5`). %% [markdown] ## Example diff --git a/pyproject.toml b/pyproject.toml index f8f354b..998af4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,8 @@ dependencies = [ "scipy", "scikit-image", "pydicom", - "dcm2niix"] + "dcm2niix", +] [project.optional-dependencies] show = [ @@ -56,8 +57,9 @@ docs = [ "linkify-it-py", "sphinx-design", "sphinx-book-theme", - "myst-parser", + "myst-nb", "sphinx-multitoc-numbering", + "sphinxcontrib-mermaid", ] all = ["mritk[extra,test,pypi,show,napari]"] @@ -112,11 +114,11 @@ target-version = "py312" [tool.ruff.lint] # Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. -select = ["E", "F"] +select = ["E", "F", "I"] ignore = ["E402", "E741", "E743", "E731"] # Allow autofix for all enabled rules (when `--fix`) is provided. -fixable = ["A", "B", "C", "D", "E", "F"] +fixable = ["A", "B", "C", "D", "E", "F", "I"] unfixable = [] # Allow unused variables when underscore-prefixed. @@ -128,6 +130,34 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" max-complexity = 10 +[tool.ruff.lint.pydocstyle] +convention = "google" + + +[tool.ruff.lint.isort] +known-first-party = ["mritk"] +known-third-party = [ + "tqdm", + "numpy", + "rich-argparse", + "nibabel", + "pandas", + "scipy", + "scikit-image", + "pydicom", + "dcm2niix", + "pytest", +] +section-order = [ + "future", + "standard-library", + "third-party", + "first-party", + "local-folder", +] + + + [tool.bumpversion] allow_dirty = false commit = true diff --git a/src/mritk/__init__.py b/src/mritk/__init__.py index 901db5f..0e93ba9 100644 --- a/src/mritk/__init__.py +++ b/src/mritk/__init__.py @@ -5,8 +5,17 @@ from importlib.metadata import metadata -from . import data, segmentation, statistics - +from . import ( + concentration, + data, + hybrid, + looklocker, + mixed, + r1, + segmentation, + stats, + utils, +) meta = metadata("mritk") __version__ = meta["Version"] @@ -19,5 +28,11 @@ __all__ = [ "data", "segmentation", - "statistics", + "concentration", + "utils", + "looklocker", + "mixed", + "hybrid", + "r1", + "stats", ] diff --git a/src/mritk/cli.py b/src/mritk/cli.py index c6a6a21..23f28ac 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -1,21 +1,25 @@ +"""MRI-toolkit provides a set of features dedicated to MRI data post-processing and analysis.""" + +import argparse import logging from importlib.metadata import metadata from pathlib import Path -import argparse -from typing import Sequence, Optional +from typing import Optional, Sequence +from rich.logging import RichHandler from rich_argparse import RichHelpFormatter -from . import datasets, info, statistics, show, napari +from . import concentration, datasets, hybrid, info, looklocker, mixed, napari, r1, show, stats def version_info(): - from rich.console import Console - from rich.table import Table - from rich import box import sys + import nibabel as nib import numpy as np + from rich import box + from rich.console import Console + from rich.table import Table console = Console() @@ -41,15 +45,21 @@ def version_info(): console.print(table) +def add_extra_arguments(parser: argparse.ArgumentParser): + parser.add_argument("--no-rich", action="store_true", help="Disable rich logging and use standard console output.") + parser.add_argument("--logfile", type=Path, help="Path to a log file to save logs (optional).") + parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging") + + def setup_parser(): - parser = argparse.ArgumentParser(formatter_class=RichHelpFormatter) + parser = argparse.ArgumentParser(description=__doc__, formatter_class=RichHelpFormatter) parser.add_argument("--version", action="store_true") subparsers = parser.add_subparsers(dest="command") # Download test data parser datasets_parser = subparsers.add_parser("datasets", help="Download datasets", formatter_class=parser.formatter_class) - datasets.add_arguments(datasets_parser) + datasets.add_arguments(datasets_parser, extra_args_cb=add_extra_arguments) info_parser = subparsers.add_parser("info", help="Display information about a file", formatter_class=parser.formatter_class) info_parser.add_argument("file", type=Path, help="File to display information about") @@ -57,7 +67,7 @@ def setup_parser(): info_parser.add_argument("--json", action="store_true", help="Output information in JSON format") stats_parser = subparsers.add_parser("stats", help="Compute MRI statistics", formatter_class=parser.formatter_class) - statistics.cli.add_arguments(stats_parser) + stats.add_arguments(stats_parser) show_parser = subparsers.add_parser("show", help="Show MRI data in a terminal", formatter_class=parser.formatter_class) show.add_arguments(show_parser) @@ -65,6 +75,31 @@ def setup_parser(): napari_parser = subparsers.add_parser("napari", help="Show MRI data using napari", formatter_class=parser.formatter_class) napari.add_arguments(napari_parser) + looklocker_parser = subparsers.add_parser( + "looklocker", help="Process Look-Locker data", formatter_class=parser.formatter_class + ) + looklocker.add_arguments(looklocker_parser, extra_args_cb=add_extra_arguments) + + hybrid_parser = subparsers.add_parser( + "hybrid", help="Generate a hybrid T1 map by merging Look-Locker and Mixed maps.", formatter_class=parser.formatter_class + ) + hybrid.add_arguments(hybrid_parser, extra_args_cb=add_extra_arguments) + + mixed_parser = subparsers.add_parser( + "mixed", help="Generate a Mixed T1 map from Look-Locker data.", formatter_class=parser.formatter_class + ) + mixed.add_arguments(mixed_parser, extra_args_cb=add_extra_arguments) + + t1_to_r1_parser = subparsers.add_parser( + "t12r1", help="Convert a T1 map to an R1 map.", formatter_class=parser.formatter_class + ) + r1.add_arguments(t1_to_r1_parser, extra_args_cb=add_extra_arguments) + + concentration_parser = subparsers.add_parser( + "concentration", help="Compute concentration maps.", formatter_class=parser.formatter_class + ) + concentration.add_arguments(concentration_parser, extra_args_cb=add_extra_arguments) + return parser @@ -74,7 +109,20 @@ def dispatch(parser: argparse.ArgumentParser, argv: Optional[Sequence[str]] = No if args.pop("version"): version_info() return 0 - logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") + verbose = args.pop("verbose", False) + if verbose: + level = logging.DEBUG + else: + level = logging.INFO + + no_rich = args.pop("no_rich", False) + handlers: list[logging.Handler] = [logging.StreamHandler()] if no_rich else [RichHandler()] + + logfile = args.pop("logfile", None) + if logfile: + handlers.append(logging.FileHandler(logfile)) + + logging.basicConfig(level=level, format="%(asctime)s - %(levelname)s - %(message)s", handlers=handlers) command = args.pop("command") logger = logging.getLogger(__name__) try: @@ -84,11 +132,22 @@ def dispatch(parser: argparse.ArgumentParser, argv: Optional[Sequence[str]] = No file = args.pop("file") info.nifty_info(file, json_output=args.pop("json")) elif command == "stats": - statistics.cli.dispatch(args) + stats.dispatch(args) elif command == "show": show.dispatch(args) elif command == "napari": napari.dispatch(args) + elif command == "looklocker": + looklocker.dispatch(args) + elif command == "hybrid": + hybrid.dispatch(args) + elif command == "mixed": + mixed.dispatch(args) + elif command == "t12r1": + r1.dispatch(args) + elif command == "concentration": + concentration.dispatch(args) + else: logger.error(f"Unknown command {command}") parser.print_help() diff --git a/src/mritk/concentration.py b/src/mritk/concentration.py new file mode 100644 index 0000000..cf1960c --- /dev/null +++ b/src/mritk/concentration.py @@ -0,0 +1,272 @@ +# Concentration module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import argparse +import logging +from collections.abc import Callable +from pathlib import Path + +import numpy as np + +from .data import MRIData +from .testing import assert_same_space + +logger = logging.getLogger(__name__) + + +def concentration_from_T1_expr(t1: np.ndarray, t1_0: np.ndarray, r1: float) -> np.ndarray: + """ + Computes tracer concentration from T1 relaxation times. + + Formula: C = (1 / r1) * ((1 / T1) - (1 / T1_0)) + + Args: + t1 (np.ndarray): Array of post-contrast T1 relaxation times. + t1_0 (np.ndarray): Array of pre-contrast (baseline) T1 relaxation times. + r1 (float): Relaxivity of the contrast agent. + + Returns: + np.ndarray: Computed concentration array. + """ + return (1.0 / r1) * ((1.0 / t1) - (1.0 / t1_0)) + + +def concentration_from_R1_expr(r1_map: np.ndarray, r1_0_map: np.ndarray, r1: float) -> np.ndarray: + """ + Computes tracer concentration from R1 relaxation rates. + + Formula: C = (1 / r1) * (R1 - R1_0) + + Args: + r1_map (np.ndarray): Array of post-contrast R1 relaxation rates. + r1_0_map (np.ndarray): Array of pre-contrast (baseline) R1 relaxation rates. + r1 (float): Relaxivity of the contrast agent. + + Returns: + np.ndarray: Computed concentration array. + """ + return (1.0 / r1) * (r1_map - r1_0_map) + + +def compute_concentration_from_T1_array( + t1_data: np.ndarray, t10_data: np.ndarray, r1: float, mask: np.ndarray | None = None +) -> np.ndarray: + """ + Computes the concentration map array, handling masking and avoiding division by zero. + + Args: + t1_data (np.ndarray): 3D numpy array of post-contrast T1 values. + t10_data (np.ndarray): 3D numpy array of pre-contrast T1 values. + r1 (float): Relaxivity of the contrast agent. + mask (Optional[np.ndarray], optional): Boolean mask restricting the computation area. + Defaults to None. + + Returns: + np.ndarray: A 3D array of computed concentrations. Invalid voxels (unmasked or + where T1 <= 1e-10) are set to NaN. + """ + logger.info("Computing concentration map from T1 arrays") + # Create a validity mask: T1 values must be > 1e-10 to safely invert without overflow + valid_mask = (t1_data > 1e-10) & (t10_data > 1e-10) + logger.debug(f"Initial valid voxel count based on T1 thresholds: {np.sum(valid_mask)}") + if mask is not None: + logger.debug("Applying additional mask to concentration computation") + valid_mask &= mask.astype(bool) + + logger.debug(f"Final valid voxel count after applying mask: {np.sum(valid_mask)}") + concentrations = np.full_like(t10_data, np.nan, dtype=np.single) + + # Compute concentration strictly on valid voxels + concentrations[valid_mask] = concentration_from_T1_expr(t1=t1_data[valid_mask], t1_0=t10_data[valid_mask], r1=r1) + + return concentrations + + +def concentration_from_T1( + input_path: Path, + reference_path: Path, + output_path: Path | None = None, + r1: float = 0.0045, + mask_path: Path | None = None, +) -> MRIData: + """ + I/O wrapper to generate a contrast agent concentration map from NIfTI T1 maps. + + Loads the post-contrast and baseline T1 maps, ensures they occupy the same + physical space, computes the concentration map, and optionally saves it to disk. + + Args: + input_path (Path): Path to the post-contrast T1 map NIfTI file. + reference_path (Path): Path to the baseline (pre-contrast) T1 map NIfTI file. + output_path (Path | None, optional): Path to save the resulting concentration map. Defaults to None. + r1 (float, optional): Contrast agent relaxivity. Defaults to 0.0045. + mask_path (Path | None, optional): Path to a boolean mask NIfTI file. Defaults to None. + + Returns: + MRIData: An MRIData object containing the concentration array and the affine matrix. + """ + logger.info("Computing concentration map from T1 maps.") + logger.debug(f"Input T1 path: {input_path}") + logger.debug(f"Reference T1 path: {reference_path}") + logger.debug(f"Output path: {output_path}") + logger.debug(f"Relaxivity (r1): {r1}") + logger.debug(f"Mask path: {mask_path}") + t1_mri = MRIData.from_file(input_path, dtype=np.single) + t10_mri = MRIData.from_file(reference_path, dtype=np.single) + + logger.debug(f"Input T1 shape: {t1_mri.data.shape}") + logger.debug(f"Reference T1 shape: {t10_mri.data.shape}") + logger.debug(f"Input T1 affine: {t1_mri.affine}") + logger.debug(f"Reference T1 affine: {t10_mri.affine}") + assert_same_space(t1_mri, t10_mri) + + mask_data = None + if mask_path is not None: + mask_mri = MRIData.from_file(mask_path, dtype=bool) + assert_same_space(mask_mri, t10_mri) + mask_data = mask_mri.data + + concentrations_array = compute_concentration_from_T1_array(t1_data=t1_mri.data, t10_data=t10_mri.data, r1=r1, mask=mask_data) + + mri_data = MRIData(data=concentrations_array, affine=t10_mri.affine) + + if output_path is not None: + logger.info(f"Saving concentration map to {output_path}") + mri_data.save(output_path, dtype=np.single) + + else: + logger.info("No output path provided, returning concentration map as MRIData object without saving.") + + return mri_data + + +def compute_concentration_from_R1_array( + r1_data: np.ndarray, r10_data: np.ndarray, r1: float, mask: np.ndarray | None = None +) -> np.ndarray: + """ + Computes the concentration map array from R1 maps, handling masking. + + Unlike T1 maps, R1 calculations do not suffer from division-by-zero + errors, but we still ensure we only operate on finite values and within + the provided mask. + + Args: + r1_data (np.ndarray): 3D numpy array of post-contrast R1 values. + r10_data (np.ndarray): 3D numpy array of pre-contrast R1 values. + r1 (float): Relaxivity of the contrast agent. + mask (np.ndarray | None, optional): Boolean mask restricting the computation area. + Defaults to None. + + Returns: + np.ndarray: A 3D array of computed concentrations. Invalid voxels (unmasked + or where R1 is not finite) are set to NaN. + """ + # Create a validity mask: limit to finite floating point numbers + valid_mask = np.isfinite(r1_data) & np.isfinite(r10_data) + + if mask is not None: + valid_mask &= mask.astype(bool) + + concentrations = np.full_like(r10_data, np.nan, dtype=np.single) + + # Compute concentration strictly on valid voxels + concentrations[valid_mask] = concentration_from_R1_expr(r1_map=r1_data[valid_mask], r1_0_map=r10_data[valid_mask], r1=r1) + + return concentrations + + +def concentration_from_R1( + input_path: Path, + reference_path: Path, + output_path: Path | None = None, + r1: float = 0.0045, + mask_path: Path | None = None, +) -> MRIData: + """ + I/O wrapper to generate a contrast agent concentration map from NIfTI R1 maps. + + Loads the post-contrast and baseline R1 maps, ensures they occupy the same + physical space, computes the concentration map, and optionally saves it to disk. + + Args: + input_path (Path): Path to the post-contrast R1 map NIfTI file. + reference_path (Path): Path to the baseline (pre-contrast) R1 map NIfTI file. + output_path (Path | None, optional): Path to save the resulting concentration map. Defaults to None. + r1 (float, optional): Contrast agent relaxivity. Defaults to 0.0045. + mask_path (Path | None, optional): Path to a boolean mask NIfTI file. Defaults to None. + + Returns: + MRIData: An MRIData object containing the concentration array and the affine matrix. + """ + r1_mri = MRIData.from_file(input_path, dtype=np.single) + r10_mri = MRIData.from_file(reference_path, dtype=np.single) + assert_same_space(r1_mri, r10_mri) + + mask_data = None + if mask_path is not None: + mask_mri = MRIData.from_file(mask_path, dtype=bool) + assert_same_space(mask_mri, r10_mri) + mask_data = mask_mri.data + + concentrations_array = compute_concentration_from_R1_array(r1_data=r1_mri.data, r10_data=r10_mri.data, r1=r1, mask=mask_data) + + mri_data = MRIData(data=concentrations_array, affine=r10_mri.affine) + + if output_path is not None: + mri_data.save(output_path, dtype=np.single) + + return mri_data + + +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: + subparsers = parser.add_subparsers(dest="concentration-command", required=True) + + t1_parser = subparsers.add_parser("t1", help="Compute concentration from T1 maps.", formatter_class=parser.formatter_class) + t1_parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the post-contrast T1 map (NIfTI).") + t1_parser.add_argument( + "-r", "--reference", type=Path, required=True, help="Path to the baseline (pre-contrast) T1 map (NIfTI)." + ) + t1_parser.add_argument("-o", "--output", type=Path, help="Path to save the resulting concentration map (NIfTI).") + t1_parser.add_argument("--r1", type=float, default=0.0045, help="Relaxivity of the contrast agent (default: 0.0045).") + t1_parser.add_argument("--mask", type=Path, help="Path to a boolean mask NIfTI file to restrict computation (optional).") + + r1_parser = subparsers.add_parser("r1", help="Compute concentration from R1 maps.", formatter_class=parser.formatter_class) + r1_parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the post-contrast R1 map (NIfTI).") + r1_parser.add_argument( + "-r", "--reference", type=Path, required=True, help="Path to the baseline (pre-contrast) R1 map (NIfTI)." + ) + r1_parser.add_argument("-o", "--output", type=Path, help="Path to save the resulting concentration map (NIfTI).") + r1_parser.add_argument("--r1", type=float, default=0.0045, help="Relaxivity of the contrast agent (default: 0.0045).") + r1_parser.add_argument("--mask", type=Path, help="Path to a boolean mask NIfTI file to restrict computation (optional).") + + if extra_args_cb is not None: + extra_args_cb(t1_parser) + extra_args_cb(r1_parser) + + +def dispatch(args): + command = args.pop("concentration-command") + if command == "t1": + return concentration_from_T1( + input_path=args.pop("input"), + reference_path=args.pop("reference"), + output_path=args.pop("output"), + r1=args.pop("r1"), + mask_path=args.pop("mask"), + ) + elif command == "r1": + return concentration_from_R1( + input_path=args.pop("input"), + reference_path=args.pop("reference"), + output_path=args.pop("output"), + r1=args.pop("r1"), + mask_path=args.pop("mask"), + ) + else: + raise ValueError(f"Unknown concentration command: {command}") diff --git a/src/mritk/concentration/__init__.py b/src/mritk/concentration/__init__.py deleted file mode 100644 index 9e93ffa..0000000 --- a/src/mritk/concentration/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -from . import concentration - -__all__ = ["concentration"] diff --git a/src/mritk/concentration/concentration.py b/src/mritk/concentration/concentration.py deleted file mode 100644 index 08c0a67..0000000 --- a/src/mritk/concentration/concentration.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Concentration maps module - -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - -from pathlib import Path -from typing import Optional - -import numpy as np -from ..data.base import MRIData -from ..data.io import load_mri_data, save_mri_data -from ..data.orientation import assert_same_space - - -def concentration_from_T1(T1: np.ndarray, T1_0: np.ndarray, r1: float) -> np.ndarray: - C = 1 / r1 * (1 / T1 - 1 / T1_0) - return C - - -def concentration_from_R1(R1: np.ndarray, R1_0: np.ndarray, r1: float) -> np.ndarray: - C = 1 / r1 * (R1 - R1_0) - return C - - -def concentration( - input: Path, - reference: Path, - output: Optional[Path] = None, - r1: float = 0.0045, - mask: Optional[Path] = None, -) -> MRIData: - T1_mri = load_mri_data(input, np.single) - T10_mri = load_mri_data(reference, np.single) - assert_same_space(T1_mri, T10_mri) - - if mask is not None: - mask_mri = load_mri_data(mask, bool) - assert_same_space(mask_mri, T10_mri) - mask_data = mask_mri.data * (T10_mri.data > 1e-10) * (T1_mri.data > 1e-10) - T1_mri.data *= mask_data - T10_mri.data *= mask_data - else: - mask_data = (T10_mri.data > 1e-10) * (T1_mri.data > 1e-10) - T1_mri.data[~mask_data] = np.nan - T10_mri.data[~mask_data] = np.nan - - concentrations = np.nan * np.zeros_like(T10_mri.data) - concentrations[mask_data] = concentration_from_T1(T1=T1_mri.data[mask_data], T1_0=T10_mri.data[mask_data], r1=r1) - mri_data = MRIData(data=concentrations, affine=T10_mri.affine) - if output is not None: - save_mri_data(mri_data, output, np.single) - return mri_data diff --git a/src/mritk/data/orientation.py b/src/mritk/data.py similarity index 73% rename from src/mritk/data/orientation.py rename to src/mritk/data.py index 67be168..f1b17db 100644 --- a/src/mritk/data/orientation.py +++ b/src/mritk/data.py @@ -1,13 +1,74 @@ -# MRI Data orientation Module +# MRI Data Base class and functions Module # Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory -import numpy as np +import re +from pathlib import Path +from typing import Optional -from .base import MRIData +import nibabel +import numpy as np +import numpy.typing as npt + + +class MRIData: + def __init__(self, data: np.ndarray, affine: np.ndarray): + self.data = data + self.affine = affine + + @property + def shape(self) -> tuple[int, ...]: + return self.data.shape + + @classmethod + def from_file(cls, path: Path | str, dtype: npt.DTypeLike | None = None, orient: bool = True) -> "MRIData": + suffix_regex = re.compile(r".+(?P(\.nii(\.gz|)|\.mg(z|h)))") + m = suffix_regex.match(Path(path).name) + if (m is not None) and (m.groupdict()["suffix"] in (".nii", ".nii.gz")): + mri = nibabel.nifti1.load(path) + elif (m is not None) and (m.groupdict()["suffix"] in (".mgz", ".mgh")): + mri = nibabel.freesurfer.mghformat.load(path) + else: + raise ValueError(f"Invalid suffix {path}, should be either '.nii', or '.mgz'") + + affine = mri.affine + if affine is None: + raise RuntimeError("MRI do not contain affine") + + kwargs = {} + if dtype is not None: + kwargs["dtype"] = dtype + data = np.asarray(mri.get_fdata("unchanged"), **kwargs) + + mri = cls(data=data, affine=affine) + + if orient: + return data_reorientation(mri) + else: + return mri + + def save(self, path: Path | str, dtype: npt.DTypeLike | None = None, intent_code: Optional[int] = None): + if dtype is None: + dtype = self.data.dtype + data = self.data.astype(dtype) + + suffix_regex = re.compile(r".+(?P(\.nii(\.gz|)|\.mg(z|h)))") + m = suffix_regex.match(Path(path).name) + if (m is not None) and (m.groupdict()["suffix"] in (".nii", ".nii.gz")): + nii = nibabel.nifti1.Nifti1Image(data, self.affine) + if intent_code is not None: + nii.header.set_intent(intent_code) + nibabel.nifti1.save(nii, path) + elif (m is not None) and (m.groupdict()["suffix"] in (".mgz", ".mgh")): + mgh = nibabel.freesurfer.mghformat.MGHImage(data, self.affine) + if intent_code is not None: + mgh.header.set_intent(intent_code) + nibabel.freesurfer.mghformat.save(mgh, path) + else: + raise ValueError(f"Invalid suffix {path}, should be either '.nii', or '.mgz'") def physical_to_voxel_indices(physical_coordinates: np.ndarray, affine: np.ndarray, round_coords: bool = True) -> np.ndarray: @@ -182,31 +243,3 @@ def change_of_coordinates_map(orientation_in: str, orientation_out: str) -> np.n P = np.eye(4) P[:, :3] = P[:, index_order] return P @ F - - -def assert_same_space(mri1: MRIData, mri2: MRIData, rtol: float = 1e-5): - """Assert that two MRI datasets share the same physical space. - - Checks if the data shapes are identical and if the affine transformation - matrices are close within a specified relative tolerance. - - Args: - mri1: The first MRI data object. - mri2: The second MRI data object. - rtol: Relative tolerance for comparing affine matrices. Defaults to 1e-5. - - Raises: - ValueError: If shapes differ or if affine matrices are not sufficiently close. - """ - if mri1.data.shape == mri2.data.shape and np.allclose(mri1.affine, mri2.affine, rtol): - return - with np.printoptions(precision=5): - err = np.nanmax(np.abs((mri1.affine - mri2.affine) / mri2.affine)) - msg = ( - f"MRI's not in same space (relative tolerance {rtol})." - f" Shapes: ({mri1.data.shape}, {mri2.data.shape})," - f" Affines: {mri1.affine}, {mri2.affine}," - f" Affine max relative error: {err}" - ) - - raise ValueError(msg) diff --git a/src/mritk/data/__init__.py b/src/mritk/data/__init__.py deleted file mode 100644 index 3707f79..0000000 --- a/src/mritk/data/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -from . import base, io, orientation - -__all__ = ["base", "io", "orientation"] diff --git a/src/mritk/data/base.py b/src/mritk/data/base.py deleted file mode 100644 index 7880e7d..0000000 --- a/src/mritk/data/base.py +++ /dev/null @@ -1,24 +0,0 @@ -# MRI Data Base class and functions Module - -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -import numpy as np - - -class MRIData: - def __init__(self, data: np.ndarray, affine: np.ndarray): - self.data = data - self.affine = affine - - def get_data(self): - return self.data - - def get_metadata(self): - return self.affine - - @property - def shape(self) -> tuple[int, ...]: - return self.data.shape diff --git a/src/mritk/data/io.py b/src/mritk/data/io.py deleted file mode 100644 index ba3547c..0000000 --- a/src/mritk/data/io.py +++ /dev/null @@ -1,61 +0,0 @@ -# MRI Data IO Module - -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -from pathlib import Path -import nibabel -import numpy as np -import numpy.typing as npt -import re -from typing import Optional - -from .base import MRIData -from .orientation import data_reorientation - - -def load_mri_data( - path: Path | str, - dtype: type = np.float64, - orient: bool = True, -) -> MRIData: - suffix_regex = re.compile(r".+(?P(\.nii(\.gz|)|\.mg(z|h)))") - m = suffix_regex.match(Path(path).name) - if (m is not None) and (m.groupdict()["suffix"] in (".nii", ".nii.gz")): - mri = nibabel.nifti1.load(path) - elif (m is not None) and (m.groupdict()["suffix"] in (".mgz", ".mgh")): - mri = nibabel.freesurfer.mghformat.load(path) - else: - raise ValueError(f"Invalid suffix {path}, should be either '.nii', or '.mgz'") - - affine = mri.affine - if affine is None: - raise RuntimeError("MRI do not contain affine") - - data = np.asarray(mri.get_fdata("unchanged"), dtype=dtype) - mri = MRIData(data=data, affine=affine) - - if orient: - return data_reorientation(mri) - else: - return mri - - -def save_mri_data(mri: MRIData, path: Path, dtype: npt.DTypeLike, intent_code: Optional[int] = None): - # TODO : Choose other way to check extension than regex ? - suffix_regex = re.compile(r".+(?P(\.nii(\.gz|)|\.mg(z|h)))") - m = suffix_regex.match(Path(path).name) - if (m is not None) and (m.groupdict()["suffix"] in (".nii", ".nii.gz")): - nii = nibabel.nifti1.Nifti1Image(mri.data.astype(dtype), mri.affine) - if intent_code is not None: - nii.header.set_intent(intent_code) - nibabel.nifti1.save(nii, path) - elif (m is not None) and (m.groupdict()["suffix"] in (".mgz", ".mgh")): - mgh = nibabel.freesurfer.mghformat.MGHImage(mri.data.astype(dtype), mri.affine) - if intent_code is not None: - mgh.header.set_intent(intent_code) - nibabel.freesurfer.mghformat.save(mgh, path) - else: - raise ValueError(f"Invalid suffix {path}, should be either '.nii', or '.mgz'") diff --git a/src/mritk/datasets.py b/src/mritk/datasets.py index bf7a23e..bc6904d 100644 --- a/src/mritk/datasets.py +++ b/src/mritk/datasets.py @@ -4,12 +4,15 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +import argparse import logging -from dataclasses import dataclass -import zipfile -from pathlib import Path import urllib.request +import zipfile +from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor +from dataclasses import dataclass +from pathlib import Path + import tqdm logger = logging.getLogger(__name__) @@ -35,7 +38,7 @@ def get_datasets() -> dict[str, Dataset]: name="Test Data", description="A small test dataset for testing functionality (based on the Gonzo dataset).", license="CC-BY-4.0", - links={"mritk-test-data.zip": download_link_google_drive("1CSj3CHd4ztcU4Aqdlw9K2OWjPi5u75bd")}, + links={"mritk-test-data.zip": download_link_google_drive("1YVXoV1UhmpkMIeaNKeS9eqCsdMULwKBO")}, ), "gonzo": Dataset( name="The Gonzo Dataset", @@ -122,11 +125,11 @@ def __call__(self, block_num, block_size, total_size): def list_datasets_verbose(key: str): + from rich import box from rich.console import Console - from rich.table import Table from rich.panel import Panel + from rich.table import Table from rich.text import Text - from rich import box console = Console() datasets = get_datasets() @@ -177,9 +180,9 @@ def list_datasets_verbose(key: str): def list_datasets(): """Prints a simple table with only Key, Name and DOI.""" + from rich import box from rich.console import Console from rich.table import Table - from rich import box console = Console() datasets = get_datasets() @@ -195,7 +198,10 @@ def list_datasets(): console.print(table) -def add_arguments(parser): +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: subparsers = parser.add_subparsers(dest="datasets-command") download_parser = subparsers.add_parser("download", help="Download a dataset", formatter_class=parser.formatter_class) choices = list(get_datasets().keys()) @@ -216,6 +222,9 @@ def add_arguments(parser): choices=choices, help=f"Dataset to show information about (choices: {', '.join(choices)})", ) + if extra_args_cb is not None: + extra_args_cb(download_parser) + extra_args_cb(info_parser) def dispatch(args): diff --git a/src/mritk/hybrid.py b/src/mritk/hybrid.py new file mode 100644 index 0000000..6aa4cdf --- /dev/null +++ b/src/mritk/hybrid.py @@ -0,0 +1,99 @@ +# T1 Maps generation module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import argparse +import logging +from collections.abc import Callable +from pathlib import Path + +import nibabel +import numpy as np +import skimage + +logger = logging.getLogger(__name__) + + +def compute_hybrid_t1_array(ll_data: np.ndarray, mixed_data: np.ndarray, mask: np.ndarray, threshold: float) -> np.ndarray: + """ + Creates a hybrid T1 array by selectively substituting Look-Locker voxels with Mixed voxels. + + Substitution occurs only if BOTH the Look-Locker AND Mixed T1 values exceed the threshold, + AND the voxel falls within the provided CSF mask. + + Args: + ll_data (np.ndarray): 3D numpy array of Look-Locker T1 values. + mixed_data (np.ndarray): 3D numpy array of Mixed T1 values. + mask (np.ndarray): 3D boolean mask (typically eroded CSF). + threshold (float): T1 threshold value (in ms). + + Returns: + np.ndarray: Hybrid 3D T1 array. + """ + logger.debug("Computing hybrid T1 array with threshold %.2f ms.", threshold) + hybrid = ll_data.copy() + newmask = mask & (ll_data > threshold) & (mixed_data > threshold) + hybrid[newmask] = mixed_data[newmask] + return hybrid + + +def hybrid_t1map( + LL_path: Path, mixed_path: Path, csf_mask_path: Path, threshold: float, erode: int = 0, output: Path | None = None +) -> nibabel.nifti1.Nifti1Image: + """I/O wrapper for merging a Look-Locker and a Mixed T1 map.""" + logger.info(f"Generating hybrid T1 map with threshold {threshold} ms and erosion {erode} voxels.") + logger.info(f"Loading Look-Locker T1 map from {LL_path}.") + logger.info(f"Loading Mixed T1 map from {mixed_path}.") + logger.info(f"Loading CSF mask from {csf_mask_path}.") + mixed_mri = nibabel.nifti1.load(mixed_path) + ll_mri = nibabel.nifti1.load(LL_path) + + csf_mask_mri = nibabel.nifti1.load(csf_mask_path) + csf_mask = csf_mask_mri.get_fdata().astype(bool) + + if erode > 0: + logger.debug(f"Eroding CSF mask with a ball structuring element of radius {erode}.") + csf_mask = skimage.morphology.erosion(csf_mask, skimage.morphology.ball(erode)) + + hybrid = compute_hybrid_t1_array(ll_mri.get_fdata(), mixed_mri.get_fdata(), csf_mask, threshold) + + hybrid_nii = nibabel.nifti1.Nifti1Image(hybrid, affine=ll_mri.affine, header=ll_mri.header) + + if output is not None: + logger.info(f"Saving hybrid T1 map to {output}.") + nibabel.nifti1.save(hybrid_nii, output) + else: + logger.info("No output path provided, returning hybrid T1 map as Nifti1Image object.") + + return hybrid_nii + + +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: + """Add command-line arguments for the hybrid T1 map generation.""" + parser.add_argument("-l", "--input-looklocker", type=Path, required=True, help="Path to the Look-Locker T1 map (NIfTI).") + parser.add_argument("-m", "--input-mixed", type=Path, required=True, help="Path to the Mixed T1 map (NIfTI).") + parser.add_argument("-c", "--csf-mask", type=Path, required=True, help="Path to the CSF mask (NIfTI).") + parser.add_argument("-t", "--threshold", type=float, default=4000.0, help="T1 threshold in ms for substitution.") + parser.add_argument("-e", "--erode", type=int, default=0, help="Number of voxels to erode the CSF mask.") + parser.add_argument("-o", "--output", type=Path, required=True, help="Output path for the hybrid T1 map (NIfTI).") + + if extra_args_cb is not None: + extra_args_cb(parser) + + +def dispatch(args): + """Dispatch function for the hybrid T1 map generation.""" + + hybrid_t1map( + LL_path=args.pop("input_looklocker"), + mixed_path=args.pop("input_mixed"), + csf_mask_path=args.pop("csf_mask"), + threshold=args.pop("threshold"), + erode=args.pop("erode"), + output=args.pop("output"), + ) diff --git a/src/mritk/info.py b/src/mritk/info.py index 3052900..b190b44 100644 --- a/src/mritk/info.py +++ b/src/mritk/info.py @@ -1,12 +1,13 @@ import json import typing from pathlib import Path -import numpy as np + import nibabel as nib +import numpy as np +from rich import box from rich.console import Console -from rich.table import Table from rich.panel import Panel -from rich import box +from rich.table import Table def custom_json(obj): diff --git a/src/mritk/looklocker.py b/src/mritk/looklocker.py new file mode 100644 index 0000000..c799444 --- /dev/null +++ b/src/mritk/looklocker.py @@ -0,0 +1,357 @@ +# T1 Maps generation module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import argparse +import logging +import shutil +import tempfile +from collections.abc import Callable +from functools import partial +from pathlib import Path +from typing import Optional + +import numpy as np +import skimage +import tqdm + +from .data import MRIData +from .utils import fit_voxel, mri_facemask, nan_filter_gaussian, run_dcm2niix + +logger = logging.getLogger(__name__) + + +def read_dicom_trigger_times(dicomfile: Path) -> np.ndarray: + """ + Extracts unique nominal cardiac trigger delay times from DICOM functional groups. + + Args: + dicomfile (str): The file path to the DICOM file. + + Returns: + np.ndarray: A sorted array of unique trigger delay times (in milliseconds) + extracted from the CardiacSynchronizationSequence. + """ + logger.info(f"Reading DICOM trigger times from {dicomfile}.") + import pydicom + + dcm = pydicom.dcmread(dicomfile) + all_frame_times = [ + f.CardiacSynchronizationSequence[0].NominalCardiacTriggerDelayTime for f in dcm.PerFrameFunctionalGroupsSequence + ] + return np.unique(all_frame_times) + + +def remove_outliers(data: np.ndarray, mask: np.ndarray, t1_low: float, t1_high: float) -> np.ndarray: + """ + Applies a mask and removes values outside the physiological T1 range. + + Args: + data (np.ndarray): 3D array of T1 values. + mask (np.ndarray): 3D boolean mask of the brain/valid area. + t1_low (float): Lower physiological limit. + t1_high (float): Upper physiological limit. + + Returns: + np.ndarray: A cleaned 3D array with outliers and unmasked regions set to NaN. + """ + logger.info("Removing outliers from T1 map with physiological range [%f, %f].", t1_low, t1_high) + processed = data.copy() + processed[~mask] = np.nan + outliers = (processed < t1_low) | (processed > t1_high) + processed[outliers] = np.nan + return processed + + +def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_factor: float = 1.3) -> np.ndarray: + """ + Creates a binary mask isolating the largest contiguous non-NaN region in an array. + + Args: + data (np.ndarray): The 3D input data containing NaNs and valid values. + radius (int, optional): The radius for morphological dilation. Defaults to 10. + erode_dilate_factor (float, optional): Multiplier for the erosion radius + relative to the dilation radius. Defaults to 1.3. + + Returns: + np.ndarray: A boolean 3D mask of the largest contiguous island. + """ + logger.info("Creating largest island mask with dilation radius %d and erosion factor %.2f.", radius, erode_dilate_factor) + mask = skimage.measure.label(np.isfinite(data)) + logger.debug("Region properties calculated for %d labeled regions.", mask.max()) + regions = skimage.measure.regionprops(mask) + if not regions: + return np.zeros_like(data, dtype=bool) + + logger.debug("Sorting regions by size to identify the largest contiguous island.") + regions.sort(key=lambda x: x.num_pixels, reverse=True) + mask = mask == regions[0].label + try: + logger.debug("Removing small holes with max_size %d.", 10 ** (mask.ndim)) + skimage.morphology.remove_small_holes(mask, max_size=10 ** (mask.ndim), connectivity=2, out=mask) + except TypeError: + # Older versions of skimage use area_threshold instead of max_size + skimage.morphology.remove_small_holes(mask, area_threshold=10 ** (mask.ndim), connectivity=2, out=mask) + logger.debug("Applying morphological dilation with radius %d.", radius) + skimage.morphology.dilation(mask, skimage.morphology.ball(radius), out=mask) + logger.debug("Applying morphological erosion with radius %d.", erode_dilate_factor * radius) + skimage.morphology.erosion(mask, skimage.morphology.ball(erode_dilate_factor * radius), out=mask) + logger.debug(f"Generated final mask with shape {mask.shape} and {mask.sum()} valid voxels.") + return mask + + +def compute_looklocker_t1_array(data: np.ndarray, time_s: np.ndarray, t1_roof: float = 10000.0) -> np.ndarray: + """ + Computes T1 relaxation maps from Look-Locker data using Levenberg-Marquardt fitting. + + Args: + data (np.ndarray): 4D numpy array (x, y, z, time) of Look-Locker MRI signals. + time_s (np.ndarray): 1D array of trigger times in seconds. + t1_roof (float, optional): Maximum allowed T1 value (ms) to cap spurious fits. Defaults to 10000.0. + + Returns: + np.ndarray: 3D numpy array representing the T1 map in milliseconds. Voxels + that fail to fit or fall outside the mask are set to NaN. + """ + logger.info("Computing Look-Locker T1 map from 4D data with shape %s and trigger times %s.", data.shape, time_s) + assert len(data.shape) >= 4, f"Data should be at least 4-dimensional, got shape {data.shape}" + mask = mri_facemask(data[..., 0]) + logger.debug(f"Generated face mask with shape {mask.shape} and {mask.sum()} valid voxels.") + valid_voxels = (np.nanmax(data, axis=-1) > 0) & mask + logger.debug(f"Identified {valid_voxels.sum()} valid voxels for fitting after applying mask and signal threshold.") + + data_normalized = np.nan * np.zeros_like(data) + # Prevent divide by zero warnings dynamically + max_vals = np.nanmax(data, axis=-1)[valid_voxels, np.newaxis] + data_normalized[valid_voxels] = data[valid_voxels] / max_vals + + voxel_mask = np.array(np.where(valid_voxels)).T + d_masked = np.array([data_normalized[i, j, k] for (i, j, k) in voxel_mask]) + + logger.debug(f"Starting fitting for {len(d_masked)} voxels.") + with tqdm.tqdm(total=len(d_masked), desc="Fitting Look-Locker Voxels") as pbar: + voxel_fitter = partial(fit_voxel, time_s, pbar) + vfunc = np.vectorize(voxel_fitter, signature="(n) -> (3)") + fitted_coefficients = vfunc(d_masked) + + x2 = fitted_coefficients[:, 1] + x3 = fitted_coefficients[:, 2] + + i, j, k = voxel_mask.T + t1map = np.nan * np.zeros_like(data[..., 0]) + + # Calculate T1 in ms. Formula: T1 = (x2 / x3)^2 * 1000 + t1map[i, j, k] = (x2 / x3) ** 2 * 1000.0 + + return np.minimum(t1map, t1_roof) + + +def looklocker_t1map_postprocessing( + T1map: Path, + T1_low: float, + T1_high: float, + radius: int = 10, + erode_dilate_factor: float = 1.3, + mask: Optional[np.ndarray] = None, + output: Path | None = None, +) -> MRIData: + """ + Performs quality-control and post-processing on a raw Look-Locker T1 map. + + Args: + T1map (Path): Path to the raw, unmasked Look-Locker T1 map NIfTI file. + T1_low (float): Lower physiological limit for T1 values (in ms). + T1_high (float): Upper physiological limit for T1 values (in ms). + radius (int, optional): Base radius for morphological dilation when generating + the automatic mask. Defaults to 10. + erode_dilate_factor (float, optional): Multiplier for the erosion radius + relative to the dilation radius to ensure tight mask edges. Defaults to 1.3. + mask (Optional[np.ndarray], optional): Pre-computed 3D boolean mask. If None, + one is generated automatically. Defaults to None. + output (Path | None, optional): Path to save the cleaned T1 map. Defaults to None. + + Returns: + MRIData: An MRIData object containing the cleaned, masked, and interpolated T1 map. + + Raises: + RuntimeError: If more than 99% of the voxels are removed during the outlier + filtering step, indicating a likely unit mismatch (e.g., T1 in seconds instead of ms). + + Notes: + This function cleans up noisy T1 fits by applying a three-step pipeline: + 1. Masking: If no mask is provided, it automatically isolates the brain/head by + finding the largest contiguous tissue island and applying morphological smoothing. + 2. Outlier Removal: Voxels falling outside the provided physiological bounds + [T1_low, T1_high] are discarded (set to NaN). + 3. Interpolation: Internal "holes" (NaNs) created by poor fits or outlier + removal are iteratively filled using a specialized Gaussian filter that + interpolates from surrounding valid tissue without blurring the edges. + """ + logger.info(f"Post-processing Look-Locker T1 map at {T1map} with T1 range [{T1_low}, {T1_high}] ms.") + t1map_mri = MRIData.from_file(T1map, dtype=np.single) + t1map_data = t1map_mri.data.copy() + + if mask is None: + logger.debug("No mask provided, generating automatic mask based on the largest contiguous tissue island.") + mask = create_largest_island_mask(t1map_data, radius, erode_dilate_factor) + else: + logger.debug("Using provided mask for post-processing.") + + t1map_data = remove_outliers(t1map_data, mask, T1_low, T1_high) + + if np.isfinite(t1map_data).sum() / t1map_data.size < 0.01: + raise RuntimeError("After outlier removal, less than 1% of the image is left. Check image units.") + + # Fill internal missing values iteratively using a Gaussian filter + fill_mask = np.isnan(t1map_data) & mask + logger.debug(f"Initial fill mask has {fill_mask.sum()} voxels.") + while fill_mask.sum() > 0: + logger.info(f"Filling in {fill_mask.sum()} voxels within the mask.") + t1map_data[fill_mask] = nan_filter_gaussian(t1map_data, 1.0)[fill_mask] + fill_mask = np.isnan(t1map_data) & mask + + processed_T1map = MRIData(t1map_data, t1map_mri.affine) + if output is not None: + processed_T1map.save(output, dtype=np.single) + logger.info(f"Post-processed Look-Locker T1 map saved to {output}.") + else: + logger.info("No output path provided, returning post-processed Look-Locker T1 map as MRIData object.") + + return processed_T1map + + +def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | None = None) -> MRIData: + """ + Generates a T1 map from a 4D Look-Locker inversion recovery dataset. + + This function acts as an I/O wrapper. It loads the 4D Look-Locker sequence + and the corresponding trigger times. It converts the timestamps from milliseconds + (standard DICOM/text output) to seconds, which is required by the underlying + exponential fitting math, and triggers the voxel-by-voxel T1 calculation. + + Args: + looklocker_input (Path): Path to the 4D Look-Locker NIfTI file. + timestamps (Path): Path to the text file containing the nominal trigger + delay times (in milliseconds) for each volume in the 4D series. + output (Path | None, optional): Path to save the resulting T1 map NIfTI file. Defaults to None. + + Returns: + MRIData: An MRIData object containing the computed 3D T1 map (in milliseconds) + and the original affine transformation matrix. + """ + logger.info(f"Generating T1 map from Look-Locker data at {looklocker_input} with trigger times from {timestamps}.") + ll_mri = MRIData.from_file(looklocker_input, dtype=np.single) + # Convert timestamps from milliseconds to seconds + time_s = np.loadtxt(timestamps) / 1000.0 + logger.debug(f"Loaded trigger times: {time_s}.") + t1map_array = compute_looklocker_t1_array(ll_mri.data, time_s) + t1map_mri = MRIData(t1map_array.astype(np.single), ll_mri.affine) + + if output is not None: + t1map_mri.save(output, dtype=np.single) + logger.info(f"Look-Locker T1 map saved to {output}.") + else: + logger.info("No output path provided, returning Look-Locker T1 map as MRIData object.") + + return t1map_mri + + +def dicom_to_looklocker(dicomfile: Path, outpath: Path): + """ + Converts a Look-Locker DICOM file to a standardized NIfTI format. + + Extracts trigger times to a sidecar text file, delegates conversion to dcm2niix, + and standardizes the output type to single-precision float (intent_code=2001). + + Args: + dicomfile (Path): Path to the input DICOM file. + outpath (Path): Desired output path for the converted .nii.gz file. + """ + logger.info(f"Converting Look-Locker DICOM {dicomfile} to NIfTI format at {outpath}") + outdir, form = outpath.parent, outpath.stem + outdir.mkdir(exist_ok=True, parents=True) + + # Extract and save trigger times + times = read_dicom_trigger_times(dicomfile) + trigger_file = outdir / f"{form}_trigger_times.txt" + logger.debug(f"Extracted trigger times: {times}. Saving to {trigger_file}") + np.savetxt(trigger_file, times) + + with tempfile.TemporaryDirectory(prefix=outpath.stem) as tmpdir: + logger.debug(f"Created temporary directory {tmpdir} for intermediate dcm2niix output.") + tmppath = Path(tmpdir) + + # Delegate heavy lifting to dcm2niix + run_dcm2niix(dicomfile, tmppath, form, extra_args="-z y --ignore_trigger_times", check=True) + + # Copy metadata sidecar + shutil.copy(tmppath / f"{form}.json", outpath.with_suffix(".json")) + + # Reload and save to standardize intent codes and precision + mri = MRIData.from_file(tmppath / f"{form}.nii.gz", dtype=np.double) + logger.debug(f"Reloaded intermediate NIfTI file with shape {mri.data.shape} and dtype {mri.data.dtype}.") + mri.save(outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001) + logger.info( + f"Final Look-Locker NIfTI saved to {outpath.with_suffix('.nii.gz')} with intent_code=2001 and dtype=np.single." + ) + + +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: + subparser = parser.add_subparsers(dest="looklocker-command", help="Commands for processing Look-Locker data") + + dicom_parser = subparser.add_parser( + "dcm2ll", help="Convert Look-Locker DICOM to NIfTI format", formatter_class=parser.formatter_class + ) + dicom_parser.add_argument("-i", "--input", type=Path, help="Path to the input Look-Locker DICOM file") + dicom_parser.add_argument("-o", "--output", type=Path, help="Desired output path for the converted .nii.gz file") + + ll_t1 = subparser.add_parser("t1", help="Generate a T1 map from Look-Locker data", formatter_class=parser.formatter_class) + ll_t1.add_argument("-i", "--input", type=Path, help="Path to the 4D Look-Locker NIfTI file") + ll_t1.add_argument("-t", "--timestamps", type=Path, help="Path to the text file containing trigger delay times (in ms)") + ll_t1.add_argument("-o", "--output", type=Path, default=None, help="Path to save the resulting T1 map NIfTI file") + + ll_post = subparser.add_parser( + "postprocess", help="Post-process a raw Look-Locker T1 map", formatter_class=parser.formatter_class + ) + ll_post.add_argument("-i", "--input", type=Path, help="Path to the raw Look-Locker T1 map NIfTI file") + ll_post.add_argument("-o", "--output", type=Path, default=None, help="Path to save the cleaned T1 map NIfTI file") + ll_post.add_argument("--t1-low", type=float, default=100.0, help="Lower physiological limit for T1 values (in ms)") + ll_post.add_argument("--t1-high", type=float, default=10000.0, help="Upper physiological limit for T1 values (in ms)") + ll_post.add_argument( + "--radius", type=int, default=10, help="Base radius for morphological dilation when generating the automatic mask" + ) + ll_post.add_argument( + "--erode-dilate-factor", + type=float, + default=1.3, + help="Multiplier for the erosion radius relative to the dilation radius to ensure tight mask edges", + ) + + if extra_args_cb is not None: + extra_args_cb(dicom_parser) + extra_args_cb(ll_t1) + extra_args_cb(ll_post) + + +def dispatch(args): + command = args.pop("looklocker-command") + if command == "dcm2ll": + dicom_to_looklocker(args.pop("input"), args.pop("output")) + elif command == "t1": + looklocker_t1map(args.pop("input"), args.pop("timestamps"), output=args.pop("output")) + elif command == "postprocess": + looklocker_t1map_postprocessing( + T1map=args.pop("input"), + T1_low=args.pop("t1_low"), + T1_high=args.pop("t1_high"), + radius=args.pop("radius"), + erode_dilate_factor=args.pop("erode_dilate_factor"), + output=args.pop("output"), + ) + else: + raise ValueError(f"Unknown Look-Locker command: {command}") diff --git a/src/mritk/masking/__init__.py b/src/mritk/masking/__init__.py deleted file mode 100644 index 4bd6155..0000000 --- a/src/mritk/masking/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -from . import masks - -__all__ = ["masks", "utils"] diff --git a/src/mritk/masking/masks.py b/src/mritk/masking/masks.py deleted file mode 100644 index 1ead7ae..0000000 --- a/src/mritk/masking/masks.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Intracranial and CSF masks generation module - -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - -import numpy as np -import skimage -from typing import Optional -from pathlib import Path - -from ..data.base import MRIData -from ..data.io import load_mri_data, save_mri_data -from ..data.orientation import assert_same_space -from .utils import largest_island - - -def create_csf_mask( - vol: np.ndarray, - connectivity: Optional[int] = 2, - use_li: bool = False, -) -> np.ndarray: - connectivity = connectivity or vol.ndim - if use_li: - thresh = skimage.filters.threshold_li(vol) - binary = vol > thresh - binary = largest_island(binary, connectivity=connectivity) - else: - (hist, bins) = np.histogram(vol[(vol > 0) * (vol < np.quantile(vol, 0.999))], bins=512) - thresh = skimage.filters.threshold_yen(hist=(hist, bins)) - binary = vol > thresh - binary = largest_island(binary, connectivity=connectivity) - return binary - - -def csf_mask( - input: Path, - connectivity: Optional[int] = 2, - use_li: bool = False, - output: Path | None = None, -) -> MRIData: - input_vol = load_mri_data(input, dtype=np.single) - mask = create_csf_mask(input_vol.data, connectivity, use_li) - assert np.max(mask) > 0, "Masking failed, no voxels in mask" - mri_data = MRIData(data=mask, affine=input_vol.affine) - if output is not None: - save_mri_data(mri_data, output, dtype=np.uint8) - return mri_data - - -def create_intracranial_mask(csf_mask: MRIData, segmentation: MRIData) -> np.ndarray: - assert_same_space(csf_mask, segmentation) - combined_mask = csf_mask.data + segmentation.data.astype(bool) - background_mask = largest_island(~combined_mask, connectivity=1) - opened = skimage.morphology.binary_opening(background_mask, skimage.morphology.ball(3)) - return ~opened - # return MRIData(data=~opened, affine=segmentation.affine) - - -def intracranial_mask( - csf_mask: Path, - segmentation: Path, - output: Optional[Path] = None, -) -> MRIData: - input_csf_mask = load_mri_data(csf_mask, dtype=bool) - segmentation_data = load_mri_data(segmentation, dtype=bool) - mask_data = create_intracranial_mask(input_csf_mask, segmentation_data) - mri_data = MRIData(data=mask_data, affine=segmentation_data.affine) - if output is not None: - save_mri_data(mri_data, output, dtype=np.uint8) - return mri_data diff --git a/src/mritk/masking/utils.py b/src/mritk/masking/utils.py deleted file mode 100644 index 2858a5c..0000000 --- a/src/mritk/masking/utils.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Masking utils - -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - -import numpy as np -import skimage - - -def largest_island(mask: np.ndarray, connectivity: int = 1) -> np.ndarray: - newmask = skimage.measure.label(mask, connectivity=connectivity) - regions = skimage.measure.regionprops(newmask) - regions.sort(key=lambda x: x.num_pixels, reverse=True) - return newmask == regions[0].label diff --git a/src/mritk/masks.py b/src/mritk/masks.py new file mode 100644 index 0000000..ff49d94 --- /dev/null +++ b/src/mritk/masks.py @@ -0,0 +1,174 @@ +# Intracranial and CSF masks generation module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +from pathlib import Path + +import numpy as np +import skimage + +from .data import MRIData +from .testing import assert_same_space + + +def largest_island(mask: np.ndarray, connectivity: int = 1) -> np.ndarray: + """ + Identifies and returns the largest contiguous region (island) in a boolean mask. + + Args: + mask (np.ndarray): A boolean or integer array where non-zero values + represent the regions of interest. + connectivity (int, optional): Maximum number of orthogonal hops to consider + a pixel/voxel as connected. For 2D, 1=4-connected, 2=8-connected. + For 3D, 1=6-connected, 2=18-connected, 3=26-connected. Defaults to 1. + + Returns: + np.ndarray: A boolean array of the same shape as `mask`, where True + indicates the elements of the largest connected component. + """ + newmask = skimage.measure.label(mask, connectivity=connectivity) + regions = skimage.measure.regionprops(newmask) + + # Handle the edge case where the mask is completely empty + if not regions: + return np.zeros_like(mask, dtype=bool) + + regions.sort(key=lambda x: x.num_pixels, reverse=True) + return newmask == regions[0].label + + +def create_csf_mask( + vol: np.ndarray, + connectivity: int | None = 2, + use_li: bool = False, +) -> np.ndarray: + """ + Creates a binary mask isolating the Cerebrospinal Fluid (CSF). + + This function uses intensity thresholding (either Li or Yen) to separate + bright fluid regions from surrounding tissue. It then isolates the CSF + by retaining only the largest contiguous spatial island. + + Args: + vol (np.ndarray): 3D numpy array of the MRI volume (typically T2-weighted or Spin-Echo). + connectivity (Optional[int], optional): Maximum connectivity distance to evaluate + contiguous islands. Defaults to 2. + use_li (bool, optional): If True, uses Li's minimum cross entropy thresholding. + If False, uses Yen's thresholding based on the volume histogram. Defaults to False. + + Returns: + np.ndarray: A boolean 3D array representing the CSF mask. + """ + connectivity = connectivity or vol.ndim + + if use_li: + thresh = skimage.filters.threshold_li(vol) + binary = vol > thresh + binary = largest_island(binary, connectivity=connectivity) + else: + # Create a histogram excluding the absolute background (0) and extreme high outliers + valid_mask = (vol > 0) & (vol < np.quantile(vol, 0.999)) + hist, bins = np.histogram(vol[valid_mask], bins=512) + + thresh = skimage.filters.threshold_yen(hist=(hist, bins)) + binary = vol > thresh + binary = largest_island(binary, connectivity=connectivity) + + return binary + + +def csf_mask( + input: Path, + connectivity: int | None = 2, + use_li: bool = False, + output: Path | None = None, +) -> MRIData: + """ + I/O wrapper for generating and saving a CSF mask from a NIfTI file. + + Args: + input (Path): Path to the input NIfTI image. + connectivity (Optional[int], optional): Connectivity distance. Defaults to 2. + use_li (bool, optional): If True, uses Li thresholding. Defaults to False. + output (Optional[Path], optional): Path to save the resulting mask. Defaults to None. + + Returns: + MRIData: An MRIData object containing the boolean mask array. + + Raises: + AssertionError: If the resulting mask contains no voxels. + """ + input_vol = MRIData.from_file(input, dtype=np.single) + mask = create_csf_mask(input_vol.data, connectivity, use_li) + + assert np.max(mask) > 0, "Masking failed, no voxels in mask" + + mri_data = MRIData(data=mask, affine=input_vol.affine) + + if output is not None: + mri_data.save(output, dtype=np.uint8) + + return mri_data + + +def compute_intracranial_mask_array(csf_mask_array: np.ndarray, segmentation_array: np.ndarray) -> np.ndarray: + """ + Combines a CSF mask array and a brain segmentation mask array into a solid intracranial mask. + + This function merges the two domains and uses morphological operations (binary opening) + on the background to cleanly fill in any gaps or holes within the intracranial space. + + Args: + csf_mask_array (np.ndarray): 3D boolean array representing the CSF mask. + segmentation_array (np.ndarray): 3D boolean array representing the anatomical brain segmentation. + + Returns: + np.ndarray: A boolean 3D array representing the solid intracranial space. + """ + # Ensure logical boolean combination + combined_mask = csf_mask_array.astype(bool) | segmentation_array.astype(bool) + + # Identify the background by extracting the largest island of the inverted combined mask + background_mask = largest_island(~combined_mask, connectivity=1) + + # Smooth the background boundary to fill narrow sulci/gaps + opened_background = skimage.morphology.opening(background_mask, skimage.morphology.ball(3)) + + # The intracranial mask is the inverse of the cleaned background + return ~opened_background + + +def intracranial_mask( + csf_mask_path: Path, + segmentation_path: Path, + output: Path | None = None, +) -> MRIData: + """ + I/O wrapper for generating and saving an intracranial mask from NIfTI files. + + Loads the masks, verifies they share the same physical coordinate space, and + delegates the array computation. + + Args: + csf_mask_path (Path): Path to the CSF mask NIfTI file. + segmentation_path (Path): Path to the brain segmentation NIfTI file. + output (Optional[Path], optional): Path to save the resulting mask. Defaults to None. + + Returns: + MRIData: An MRIData object containing the intracranial mask. + """ + input_csf_mask = MRIData.from_file(csf_mask_path, dtype=bool) + segmentation_data = MRIData.from_file(segmentation_path, dtype=bool) + + # Validate spatial alignment before array operations + assert_same_space(input_csf_mask, segmentation_data) + + mask_data = compute_intracranial_mask_array(input_csf_mask.data, segmentation_data.data) + mri_data = MRIData(data=mask_data, affine=segmentation_data.affine) + + if output is not None: + mri_data.save(output, dtype=np.uint8) + + return mri_data diff --git a/src/mritk/mixed.py b/src/mritk/mixed.py new file mode 100644 index 0000000..d9973a2 --- /dev/null +++ b/src/mritk/mixed.py @@ -0,0 +1,431 @@ +# Mixed sequence + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import argparse +import json +import logging +from collections.abc import Callable +from pathlib import Path + +import nibabel +import numpy as np +import scipy +import scipy.interpolate +import skimage + +from .data import MRIData, change_of_coordinates_map, data_reorientation +from .masks import create_csf_mask +from .utils import VOLUME_LABELS, T1_lookup_table, run_dcm2niix + +logger = logging.getLogger(__name__) + + +def dicom_standard_affine(frame_fg) -> np.ndarray: + """ + Generates the DICOM to LPS (Left-Posterior-Superior) affine transformation matrix. + + This maps the voxel coordinate space of a DICOM frame to the physical LPS space + by utilizing the pixel spacing, slice spacing, and patient orientation cosines. + + Args: + frame_fg: A DICOM frame functional group sequence object containing + PixelMeasuresSequence, PlaneOrientationSequence, and PlanePositionSequence. + + Returns: + np.ndarray: A 4x4 affine transformation matrix mapping from DICOM voxel + indices to LPS physical coordinates. + """ + logger.debug("Generating DICOM standard affine matrix from frame functional group metadata.") + # Get the original data shape + df = float(frame_fg.PixelMeasuresSequence[0].SpacingBetweenSlices) + dr, dc = (float(x) for x in frame_fg.PixelMeasuresSequence[0].PixelSpacing) + plane_orientation = frame_fg.PlaneOrientationSequence[0] + orientation = np.array(plane_orientation.ImageOrientationPatient) + + # Find orientation of data array relative to LPS-coordinate system. + row_cosine = orientation[:3] + col_cosine = orientation[3:] + frame_cosine = np.cross(row_cosine, col_cosine) + + # Create DICOM-definition affine map to LPS. + T_1 = np.array(frame_fg.PlanePositionSequence[0].ImagePositionPatient) + + # Create DICOM-definition affine map to LPS. + M_dcm = np.zeros((4, 4)) + M_dcm[:3, 0] = row_cosine * dc + M_dcm[:3, 1] = col_cosine * dr + M_dcm[:3, 2] = frame_cosine * df + M_dcm[:3, 3] = T_1 + M_dcm[3, 3] = 1.0 + + # Reorder from "natural index order" to DICOM affine map definition order. + N_order = np.eye(4)[[2, 1, 0, 3]] + return M_dcm @ N_order + + +def extract_single_volume(D: np.ndarray, frame_fg) -> MRIData: + """ + Extracts, scales, and reorients a single DICOM volume into an MRIData object. + + Applies the appropriate RescaleSlope and RescaleIntercept transformations + to the raw pixel array, and then reorients the resulting data volume from + the native DICOM LPS space to RAS (Right-Anterior-Superior) space. + + Args: + D (np.ndarray): The raw 3D pixel array for the volume. + frame_fg: The corresponding DICOM frame functional group metadata. + + Returns: + MRIData: A newly constructed MRIData object with scaled pixel values + and an affine matrix oriented to RAS space. + """ + # Find scaling values (should potentially be inside scaling loop) + pixel_value_transform = frame_fg.PixelValueTransformationSequence[0] + slope = float(pixel_value_transform.RescaleSlope) + intercept = float(pixel_value_transform.RescaleIntercept) + private = frame_fg[0x2005, 0x140F][0] + scale_slope = private[0x2005, 0x100E].value + + # Loop over and scale values. + volume = np.zeros_like(D, dtype=np.single) + for idx in range(D.shape[0]): + volume[idx] = (intercept + slope * D[idx]) / (scale_slope * slope) + + A_dcm = dicom_standard_affine(frame_fg) + C = change_of_coordinates_map("LPS", "RAS") + mri = data_reorientation(MRIData(volume, C @ A_dcm)) + + return mri + + +def mixed_t1map( + SE_nii_path: Path, IR_nii_path: Path, meta_path: Path, T1_low: float, T1_high: float, output: Path | None = None +) -> nibabel.nifti1.Nifti1Image: + """ + Generates a T1 relaxation map by combining Spin-Echo (SE) and Inversion-Recovery (IR) acquisitions. + + This function acts as an I/O wrapper. It loads the respective NIfTI volumes + and their sequence metadata (such as TR, TE, TI, and Echo Train Length), + and passes them to the underlying mathematical function which interpolates + the T1 values based on the theoretical signal ratio (IR/SE). + + Args: + SE_nii_path (Path): Path to the Spin-Echo modulus NIfTI file. + IR_nii_path (Path): Path to the Inversion-Recovery corrected real NIfTI file. + meta_path (Path): Path to the JSON file containing the sequence parameters + ('TR_SE', 'TI', 'TE', 'ETL'). + T1_low (float): Lower bound for the T1 interpolation grid (in ms). + T1_high (float): Upper bound for the T1 interpolation grid (in ms). + output (Path | None, optional): Path to save the resulting T1 map NIfTI file. Defaults to None. + + Returns: + nibabel.nifti1.Nifti1Image: The computed T1 map as a NIfTI image object, + with the qform/sform properly set to scanner space. + """ + logger.info(f"Computing Mixed T1 map from SE image {SE_nii_path} and IR image {IR_nii_path} with metadata {meta_path}") + se_mri = MRIData.from_file(SE_nii_path, dtype=np.single) + ir_mri = MRIData.from_file(IR_nii_path, dtype=np.single) + meta = json.loads(meta_path.read_text()) + + t1_volume = compute_mixed_t1_array(se_mri.data, ir_mri.data, meta, T1_low, T1_high) + logger.debug( + f"Computed T1 volume with shape {t1_volume.shape} and T1 range ({np.nanmin(t1_volume)}, {np.nanmax(t1_volume)}) ms." + ) + nii = nibabel.nifti1.Nifti1Image(t1_volume, ir_mri.affine) + nii.set_sform(nii.affine, "scanner") + nii.set_qform(nii.affine, "scanner") + + if output is not None: + nibabel.nifti1.save(nii, output) + logger.info(f"Saved Mixed T1 map to {output}") + else: + logger.info("No output path provided, returning T1 map as NIfTI image object") + + return nii + + +def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | None = None) -> nibabel.nifti1.Nifti1Image: + """ + Masks a Mixed T1 map to isolate the Cerebrospinal Fluid (CSF). + + Because the Mixed sequence is primarily sensitive/calibrated for long T1 species + like fluid, this function isolates the CSF. It derives a mask dynamically from + the original Spin-Echo sequence using Li thresholding, erodes the mask to avoid + partial-voluming effects at tissue boundaries, and applies it to the T1 map. + + Args: + SE_nii_path (Path): Path to the Spin-Echo NIfTI file used to derive the mask. + T1_path (Path): Path to the previously generated Mixed T1 map NIfTI file. + output (Path | None, optional): Path to save the masked T1 NIfTI file. Defaults to None. + + Returns: + nibabel.nifti1.Nifti1Image: The masked T1 map, where all non-CSF voxels + have been set to NaN. + """ + logger.info(f"Starting Mixed T1 map post-processing with SE image {SE_nii_path} and T1 map {T1_path}") + t1map_nii = nibabel.nifti1.load(T1_path) + se_mri = MRIData.from_file(SE_nii_path, dtype=np.single) + + logger.debug("Creating CSF mask from SE image using Li thresholding and morphological erosion.") + mask = create_csf_mask(se_mri.data, use_li=True) + logger.debug("Performing morphological erosion on the CSF mask to reduce partial volume effects.") + mask = skimage.morphology.erosion(mask) + + logger.debug(f"Generated CSF mask with shape {mask.shape} and {mask.sum()} valid voxels.") + masked_t1map = t1map_nii.get_fdata(dtype=np.single) + masked_t1map[~mask] = np.nan + masked_t1map_nii = nibabel.nifti1.Nifti1Image(masked_t1map, t1map_nii.affine, t1map_nii.header) + + if output is not None: + nibabel.nifti1.save(masked_t1map_nii, output) + logger.info(f"Saved masked T1 map to {output}") + else: + logger.info("No output path provided, returning masked T1 map as NIfTI image object") + + return masked_t1map_nii + + +def compute_mixed_t1_array(se_data: np.ndarray, ir_data: np.ndarray, meta: dict, t1_low: float, t1_high: float) -> np.ndarray: + """ + Computes a Mixed T1 array from Spin-Echo and Inversion-Recovery volumes using a lookup table. + + Args: + se_data (np.ndarray): 3D numpy array of the Spin-Echo modulus data. + ir_data (np.ndarray): 3D numpy array of the Inversion-Recovery corrected real data. + meta (dict): Dictionary containing sequence parameters ('TR_SE', 'TI', 'TE', 'ETL'). + t1_low (float): Lower bound for T1 generation grid. + t1_high (float): Upper bound for T1 generation grid. + + Returns: + np.ndarray: Computed T1 map as a 3D float32 array. + """ + logger.info("Computing Mixed T1 array from SE and IR data using lookup table interpolation.") + nonzero_mask = se_data != 0 + f_data = np.nan * np.zeros_like(ir_data) + f_data[nonzero_mask] = ir_data[nonzero_mask] / se_data[nonzero_mask] + + tr_se, ti, te, etl = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] + f_curve, t1_grid = T1_lookup_table(tr_se, ti, te, etl, t1_low, t1_high) + logger.debug( + f"Generated T1 lookup table with TR_SE={tr_se}, TI={ti}, TE={te}, " + f"ETL={etl}, T1 range=({t1_low}, {t1_high}), table size={len(t1_grid)}" + ) + interpolator = scipy.interpolate.interp1d(f_curve, t1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) + logger.debug("Created interpolation function for T1 estimation based on the lookup table.") + return interpolator(f_data).astype(np.single) + + +def _extract_frame_metadata(frame_fg) -> dict: + """ + Extracts core physical parameters (TR, TE, TI, ETL) from a DICOM frame functional group. + + Args: + frame_fg: The PerFrameFunctionalGroupsSequence element for a specific frame. + + Returns: + dict: A dictionary containing available MR timing parameters. + """ + logger.debug("Extracting MR timing parameters from DICOM frame functional group.") + descrip = { + "TR": float(frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime), + "TE": float(frame_fg.MREchoSequence[0].EffectiveEchoTime), + } + + if hasattr(frame_fg.MRModifierSequence[0], "InversionTimes"): + descrip["TI"] = frame_fg.MRModifierSequence[0].InversionTimes[0] + + if hasattr(frame_fg.MRTimingAndRelatedParametersSequence[0], "EchoTrainLength"): + descrip["ETL"] = frame_fg.MRTimingAndRelatedParametersSequence[0].EchoTrainLength + + return descrip + + +def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]) -> list[dict]: + """ + Reads a Mixed DICOM file and splits it into independent NIfTI subvolumes. + + Args: + dcmpath (Path): Path to the input DICOM file. + subvolumes (list[str]): List of volume labels mapping to the slices in the DICOM. + + Returns: + list[dict]: A list containing dictionaries with a generated 'nifti' image + and a 'descrip' metadata dictionary for each requested subvolume. + """ + logger.debug(f"Extracting subvolumes {subvolumes} from DICOM file {dcmpath}") + import pydicom + + dcm = pydicom.dcmread(str(dcmpath)) + frames_total = int(dcm.NumberOfFrames) + logger.debug(f"Total frames in DICOM: {frames_total}") + # [0x2001, 0x1018] is a private Philips tag representing 'Number of Slices MR' + frames_per_volume = dcm[0x2001, 0x1018].value + num_volumes = frames_total // frames_per_volume + assert num_volumes * frames_per_volume == frames_total, "Subvolume dimensions do not evenly divide the total frames." + + logger.debug(f"Frames per volume: {frames_per_volume}, Number of volumes: {num_volumes}") + pixel_data = dcm.pixel_array.astype(np.single) + frame_fg_sequence = dcm.PerFrameFunctionalGroupsSequence + + vols_out = [] + for volname in subvolumes: + logger.debug(f"Processing subvolume '{volname}'") + vol_idx = VOLUME_LABELS.index(volname) + + # Find volume slices representing the current subvolume + subvol_idx_start = vol_idx * frames_per_volume + subvol_idx_end = (vol_idx + 1) * frames_per_volume + frame_fg = frame_fg_sequence[subvol_idx_start] + + logger.debug( + f"Converting volume {vol_idx + 1}/{len(VOLUME_LABELS)}: '{volname}' " + f"between indices {subvol_idx_start}-{subvol_idx_end} out of {frames_total}." + ) + + mri = extract_single_volume(pixel_data[subvol_idx_start:subvol_idx_end], frame_fg) + + nii_oriented = nibabel.nifti1.Nifti1Image(mri.data, mri.affine) + nii_oriented.set_sform(nii_oriented.affine, "scanner") + nii_oriented.set_qform(nii_oriented.affine, "scanner") + + description = _extract_frame_metadata(frame_fg) + vols_out.append({"nifti": nii_oriented, "descrip": description}) + + return vols_out + + +def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: list[str] | None = None): + """ + Converts a Mixed sequence DICOM file into independent subvolume NIfTIs. + + Generates dedicated images for Spin-Echo, Inversion-Recovery, etc., + and saves sequence timing metadata to a JSON sidecar. + + Args: + dcmpath (Path): Path to the input Mixed DICOM file. + outpath (Path): Base path for output files. Suffixes are automatically appended. + subvolumes (list[str], optional): specific subvolumes to extract. + Defaults to all known VOLUME_LABELS. + """ + logger.info(f"Starting DICOM to Mixed conversion for {dcmpath} with output base {outpath}") + + subvolumes = subvolumes or VOLUME_LABELS + logger.debug(f"Subvolumes to extract: {subvolumes}") + assert all([volname in VOLUME_LABELS for volname in subvolumes]), ( + f"Invalid subvolume name in {subvolumes}, must be one of {VOLUME_LABELS}" + ) + + outdir, form = outpath.parent, outpath.stem + logger.debug(f"Output directory: {outdir}, output form prefix: {form}") + outdir.mkdir(exist_ok=True, parents=True) + + vols = extract_mixed_dicom(dcmpath, subvolumes) + logger.debug(f"Extracted {len(vols)} subvolumes from DICOM, preparing to save NIfTI files and metadata.") + meta = {} + + for vol, volname in zip(vols, subvolumes): + output = outpath.with_name(f"{outpath.stem}_{volname}.nii.gz") + logger.debug(f"Saving subvolume '{volname}' to {output}") + nibabel.nifti1.save(vol["nifti"], output) + + descrip = vol["descrip"] + try: + if volname == "SE-modulus": + meta["TR_SE"] = descrip["TR"] + meta["TE"] = descrip["TE"] + meta["ETL"] = descrip["ETL"] + elif volname == "IR-corrected-real": + meta["TR_IR"] = descrip["TR"] + meta["TI"] = descrip["TI"] + except KeyError as e: + logger.error(f"Missing required metadata for {volname}: {descrip}") + raise e + + # Write merged metadata sidecar + json_meta_path = outdir / f"{form}_meta.json" + logger.debug(f"Writing metadata JSON sidecar to {json_meta_path} with contents: {meta}") + json_meta_path.write_text(json.dumps(meta, indent=4)) + + # Attempt standard dcm2niix conversion (soft failure allowed for legacy behavior) + logger.debug("Attempting to run dcm2niix for standard conversion (soft failure allowed).") + run_dcm2niix(dcmpath, outdir, form, extra_args="-w 0 --terse -b o", check=False) + + +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: + subparser = parser.add_subparsers(dest="hybrid-command", required=True, title="hybrid subcommands") + + dmc_parser = subparser.add_parser( + "dcm2mixed", + help="Convert a Mixed sequence DICOM file into separate NIfTI subvolumes and metadata.", + formatter_class=parser.formatter_class, + ) + dmc_parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the input Mixed DICOM file.") + dmc_parser.add_argument( + "-o", "--output", type=Path, required=True, help="Base path for output NIfTI files and metadata JSON." + ) + dmc_parser.add_argument( + "-s", + "--subvolumes", + nargs="+", + default=VOLUME_LABELS, + help=f"Specific subvolumes to extract, space-separated. Defaults to all: {VOLUME_LABELS}.", + ) + + t1_parser = subparser.add_parser( + "t1", help="Generate a T1 map from Mixed sequence NIfTI files.", formatter_class=parser.formatter_class + ) + t1_parser.add_argument("-s", "--se", type=Path, required=True, help="Path to the Spin-Echo modulus NIfTI file.") + t1_parser.add_argument( + "-i", "--ir", type=Path, required=True, help="Path to the Inversion-Recovery corrected real NIfTI file." + ) + t1_parser.add_argument( + "-m", "--meta", type=Path, required=True, help="Path to the JSON file containing the sequence parameters." + ) + t1_parser.add_argument("--t1-low", type=float, default=500.0, help="Lower bound for T1 interpolation grid (ms).") + t1_parser.add_argument("--t1-high", type=float, default=5000.0, help="Upper bound for T1 interpolation grid (ms).") + t1_parser.add_argument("-o", "--output", type=Path, required=True, help="Output path for the generated T1 map NIfTI file.") + + post_parser = subparser.add_parser( + "postprocess", + help="Mask a Mixed T1 map to isolate the CSF using the original SE sequence.", + formatter_class=parser.formatter_class, + ) + post_parser.add_argument( + "-s", "--se", type=Path, required=True, help="Path to the Spin-Echo modulus NIfTI file used to derive the mask." + ) + post_parser.add_argument( + "-t", "--t1", type=Path, required=True, help="Path to the previously generated Mixed T1 map NIfTI file." + ) + post_parser.add_argument("-o", "--output", type=Path, required=True, help="Output path for the masked T1 map NIfTI file.") + + if extra_args_cb is not None: + extra_args_cb(dmc_parser) + extra_args_cb(t1_parser) + extra_args_cb(post_parser) + + +def dispatch(args): + """Dispatch function for the mixed T1 map generation commands.""" + command = args.pop("hybrid-command") # Note: matches the 'dest' in your add_arguments + + if command == "dcm2mixed": + dicom_to_mixed(dcmpath=args.pop("input"), outpath=args.pop("output"), subvolumes=args.pop("subvolumes")) + elif command == "t1": + mixed_t1map( + SE_nii_path=args.pop("se"), + IR_nii_path=args.pop("ir"), + meta_path=args.pop("meta"), + T1_low=args.pop("t1_low"), + T1_high=args.pop("t1_high"), + output=args.pop("output"), + ) + elif command == "postprocess": + mixed_t1map_postprocessing(SE_nii_path=args.pop("se"), T1_path=args.pop("t1"), output=args.pop("output")) + else: + raise ValueError(f"Unknown command: {command}") diff --git a/src/mritk/napari.py b/src/mritk/napari.py index 645668b..0aa334a 100644 --- a/src/mritk/napari.py +++ b/src/mritk/napari.py @@ -4,8 +4,7 @@ import numpy as np from rich.console import Console -# Assuming relative imports based on your previous file structure -from .data.io import load_mri_data +from .data import MRIData def add_arguments(parser: argparse.ArgumentParser): @@ -51,7 +50,7 @@ def dispatch(args): console = Console() console.print(f"[bold green]Loading MRI data from:[/bold green] {file_path}") - mri_resource = load_mri_data(file_path) + mri_resource = MRIData.from_file(file_path) data = mri_resource.data viewer.add_image(data, name=file_path.stem) diff --git a/src/mritk/r1.py b/src/mritk/r1.py new file mode 100644 index 0000000..57d03d9 --- /dev/null +++ b/src/mritk/r1.py @@ -0,0 +1,134 @@ +# T1 to R1 module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import argparse +import logging +from collections.abc import Callable +from pathlib import Path + +import numpy as np + +from .data import MRIData + +logger = logging.getLogger(__name__) + + +def compute_r1_array( + t1_data: np.ndarray, scale: float = 1000.0, t1_low: float = 1.0, t1_high: float = float("inf") +) -> np.ndarray: + """ + Pure numpy function converting a T1 relaxation time array to an R1 relaxation rate array. + + The relationship is R1 = scale / T1. Values outside the [t1_low, t1_high] + range are set to NaN to filter out noise and non-physiological data. + + Args: + t1_data (np.ndarray): The input array containing T1 relaxation times. + scale (float, optional): Scaling factor, typically 1000 to convert from ms to s^-1. Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to infinity. + + Returns: + np.ndarray: An array of R1 relaxation rates. Invalid/out-of-bound voxels are set to NaN. + """ + logger.debug(f"Computing R1 array with scale={scale}, t1_low={t1_low}, t1_high={t1_high}") + valid_t1 = (t1_low <= t1_data) & (t1_data <= t1_high) + r1_data = np.nan * np.zeros_like(t1_data) + + # Calculate R1 only for valid voxels to avoid division by zero or extreme outliers + r1_data[valid_t1] = scale / t1_data[valid_t1] + + return r1_data + + +def convert_t1_to_r1( + T1map_mri: MRIData, + scale: float = 1000.0, + t1_low: float = 1.0, + t1_high: float = float("inf"), +) -> MRIData: + """ + Converts a T1 map MRIData object into an R1 map MRIData object. + + Args: + T1map_mri (MRIData): The input MRIData object representing the T1 map. + scale (float, optional): Scaling factor. Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). + + Returns: + MRIData: A new MRIData object containing the R1 map array and the original affine matrix. + """ + r1_data = compute_r1_array(T1map_mri.data, scale, t1_low, t1_high) + logger.debug(f"Converted T1 map to R1 map with shape {r1_data.shape}") + return MRIData(data=r1_data, affine=T1map_mri.affine) + + +def t1_to_r1( + input_mri: Path | MRIData, + output: Path | None = None, + scale: float = 1000.0, + t1_low: float = 1.0, + t1_high: float = float("inf"), +) -> MRIData: + """ + High-level wrapper to convert a T1 map to an R1 map, handling file I/O operations. + + Args: + input_mri (Union[Path, MRIData]): A Path to a T1 NIfTI file or an already loaded MRIData object. + output (Path | None, optional): Path to save the resulting R1 map to disk. Defaults to None. + scale (float, optional): Scaling factor (e.g., 1000 for ms -> s^-1). Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). + + Returns: + MRIData: The computed R1 map as an MRIData object. + + Raises: + ValueError: If input_mri is neither a Path nor an MRIData object. + """ + logger.info(f"Converting T1 map to R1 map with input: {input_mri}, output: {output}") + if isinstance(input_mri, Path): + mri_t1 = MRIData.from_file(input_mri, dtype=np.single) + elif isinstance(input_mri, MRIData): + mri_t1 = input_mri + else: + raise ValueError(f"Input should be a Path or MRIData, got {type(input_mri)}") + + mri_r1 = convert_t1_to_r1(mri_t1, scale, t1_low, t1_high) + + if output is not None: + logger.info(f"Saving R1 map to {output}") + mri_r1.save(output, dtype=np.single) + else: + logger.info("No output path provided, returning R1 map as MRIData object") + + return mri_r1 + + +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: + """Add command-line arguments for the T1 to R1 conversion.""" + parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the input T1 map (NIfTI).") + parser.add_argument("-o", "--output", type=Path, help="Path to save the output R1 map (NIfTI).") + parser.add_argument("--scale", type=float, default=1000.0, help="Scaling factor for R1 calculation.") + parser.add_argument("--t1-low", type=float, default=1.0, help="Lower bound for valid T1 values.") + parser.add_argument("--t1-high", type=float, default=float("inf"), help="Upper bound for valid T1 values.") + if extra_args_cb is not None: + extra_args_cb(parser) + + +def dispatch(args: dict): + """Dispatch function for the T1 to R1 conversion.""" + t1_to_r1( + input_mri=args.pop("input"), + output=args.pop("output"), + scale=args.pop("scale"), + t1_low=args.pop("t1_low"), + t1_high=args.pop("t1_high"), + ) diff --git a/src/mritk/segmentation.py b/src/mritk/segmentation.py new file mode 100644 index 0000000..03efb17 --- /dev/null +++ b/src/mritk/segmentation.py @@ -0,0 +1,235 @@ +# MRI Segmentation - Lookup Table (LUT) Module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import logging +import os +import re +from pathlib import Path +from urllib.request import urlretrieve + +import pandas as pd + +logger = logging.getLogger(__name__) + + +# Regex to match a standard FreeSurfer Color LUT record line +LUT_REGEX = re.compile(r"^(?P