From d69c5e64e0ba33babe256f2fe9580aa91f8124b6 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 15:38:32 +0100 Subject: [PATCH 01/29] Add docstrings to t1_maps/utils.py --- src/mritk/t1_maps/dicom_to_nifti.py | 4 +- src/mritk/t1_maps/t1_to_r1.py | 9 +- src/mritk/t1_maps/utils.py | 225 +++++++++++++++++++++++----- 3 files changed, 194 insertions(+), 44 deletions(-) diff --git a/src/mritk/t1_maps/dicom_to_nifti.py b/src/mritk/t1_maps/dicom_to_nifti.py index 3587c8f..66782a0 100644 --- a/src/mritk/t1_maps/dicom_to_nifti.py +++ b/src/mritk/t1_maps/dicom_to_nifti.py @@ -8,6 +8,7 @@ import shutil import subprocess import tempfile +import logging from pathlib import Path from typing import Optional import nibabel @@ -17,10 +18,11 @@ from ..data.io import load_mri_data, save_mri_data from ..t1_maps.utils import VOLUME_LABELS, read_dicom_trigger_times -from .utils import extract_single_volume, logger +from .utils import extract_single_volume def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]): + logger = logging.getLogger(__name__) import pydicom dcm = pydicom.dcmread(dcmpath) diff --git a/src/mritk/t1_maps/t1_to_r1.py b/src/mritk/t1_maps/t1_to_r1.py index af4290d..bb660cd 100644 --- a/src/mritk/t1_maps/t1_to_r1.py +++ b/src/mritk/t1_maps/t1_to_r1.py @@ -1,9 +1,8 @@ -"""T1 to R1 module +# T1 to R1 module -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory import numpy as np from pathlib import Path diff --git a/src/mritk/t1_maps/utils.py b/src/mritk/t1_maps/utils.py index d2a3258..5b3269e 100644 --- a/src/mritk/t1_maps/utils.py +++ b/src/mritk/t1_maps/utils.py @@ -1,23 +1,20 @@ -"""MRI DICOM to NIfTI conversion - utils +# MRI DICOM to NIfTI conversion - utils -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory import numpy as np -import logging -import scipy as sp +import scipy +from pathlib import Path import skimage import warnings from scipy.optimize import OptimizeWarning -import os import nibabel from ..data.orientation import data_reorientation, change_of_coordinates_map from ..data.base import MRIData -logger = logging.getLogger(__name__) VOLUME_LABELS = [ "IR-modulus", @@ -29,7 +26,17 @@ ] -def read_dicom_trigger_times(dicomfile): +def read_dicom_trigger_times(dicomfile: Path) -> np.ndarray: + """ + Extracts unique nominal cardiac trigger delay times from DICOM functional groups. + + Args: + dicomfile (str): The file path to the DICOM file. + + Returns: + np.ndarray: A sorted array of unique trigger delay times (in milliseconds) + extracted from the CardiacSynchronizationSequence. + """ import pydicom dcm = pydicom.dcmread(dicomfile) @@ -39,9 +46,21 @@ def read_dicom_trigger_times(dicomfile): return np.unique(all_frame_times) -def dicom_standard_affine( - frame_fg, -) -> np.ndarray: +def dicom_standard_affine(frame_fg) -> np.ndarray: + """ + Generates the DICOM to LPS (Left-Posterior-Superior) affine transformation matrix. + + This maps the voxel coordinate space of a DICOM frame to the physical LPS space + by utilizing the pixel spacing, slice spacing, and patient orientation cosines. + + Args: + frame_fg: A DICOM frame functional group sequence object containing + PixelMeasuresSequence, PlaneOrientationSequence, and PlanePositionSequence. + + Returns: + np.ndarray: A 4x4 affine transformation matrix mapping from DICOM voxel + indices to LPS physical coordinates. + """ # Get the original data shape df = float(frame_fg.PixelMeasuresSequence[0].SpacingBetweenSlices) dr, dc = (float(x) for x in frame_fg.PixelMeasuresSequence[0].PixelSpacing) @@ -69,10 +88,22 @@ def dicom_standard_affine( return M_dcm @ N_order -def extract_single_volume( - D: np.ndarray, - frame_fg, -) -> MRIData: +def extract_single_volume(D: np.ndarray, frame_fg) -> MRIData: + """ + Extracts, scales, and reorients a single DICOM volume into an MRIData object. + + Applies the appropriate RescaleSlope and RescaleIntercept transformations + to the raw pixel array, and then reorients the resulting data volume from + the native DICOM LPS space to RAS (Right-Anterior-Superior) space. + + Args: + D (np.ndarray): The raw 3D pixel array for the volume. + frame_fg: The corresponding DICOM frame functional group metadata. + + Returns: + MRIData: A newly constructed MRIData object with scaled pixel values + and an affine matrix oriented to RAS space. + """ # Find scaling values (should potentially be inside scaling loop) pixel_value_transform = frame_fg.PixelValueTransformationSequence[0] slope = float(pixel_value_transform.RescaleSlope) @@ -92,30 +123,90 @@ def extract_single_volume( return mri -def mri_facemask(vol: np.ndarray, smoothing_level=5): +def mri_facemask(vol: np.ndarray, smoothing_level: float = 5.0) -> np.ndarray: + """ + Generates a binary mask of the head/brain to exclude background air/noise. + + Utilizes Triangle thresholding, morphological hole filling, and Gaussian + smoothing to create a robust, contiguous mask of the primary subject volume. + + Args: + vol (np.ndarray): A 3D numpy array of the MRI volume. + smoothing_level (float, optional): The sigma for the Gaussian blur applied + to smooth the mask edges. Defaults to 5. + + Returns: + np.ndarray: A 3D boolean array where True indicates the subject/head. + """ thresh = skimage.filters.threshold_triangle(vol) binary = vol > thresh - binary = sp.ndimage.binary_fill_holes(binary) + binary = scipy.ndimage.binary_fill_holes(binary) binary = skimage.filters.gaussian(binary, sigma=smoothing_level) binary = binary > skimage.filters.threshold_isodata(binary) return binary -def voxel_fit_function(t, x1, x2, x3): +def voxel_fit_function(t: np.ndarray, x1: float, x2: float, x3: float) -> np.ndarray: + """ + Theoretical Look-Locker T1 recovery curve model. + + Evaluates the function: f(t) = | x1 * (1 - (1 + x2^2) * exp(-x3^2 * t)) | + + Args: + t (np.ndarray): Time array in seconds. + x1 (float): Amplitude scaling factor (equivalent to A). + x2 (float): Inversion efficiency term (used to ensure (1+x2^2) > 1). + x3 (float): Relaxation rate, defined as 1 / sqrt(T1*). + + Returns: + np.ndarray: The theoretical signal magnitude at times `t`. + """ return np.abs(x1 * (1.0 - (1 + x2**2) * np.exp(-(x3**2) * t))) @np.errstate(divide="raise", invalid="raise", over="raise") -def curve_fit_wrapper(f, t, y, p0): - """Raises error instead of catching numpy warnings, such that - these cases may be treated.""" +def curve_fit_wrapper(f, t: np.ndarray, y: np.ndarray, p0: np.ndarray): + """ + A strict wrapper around scipy.optimize.curve_fit. + + Temporarily converts numpy warnings (like division by zero) and + scipy's OptimizeWarning into hard errors. This allows the calling + function to gracefully catch and handle poorly-fitting voxels + (e.g., by assigning them NaN) rather than silently returning bad fits. + + Args: + f (callable): The model function, e.g., voxel_fit_function. + t (np.ndarray): The independent variable (time). + y (np.ndarray): The dependent variable (signal). + p0 (np.ndarray): Initial guesses for the parameters. + + Returns: + np.ndarray: Optimal values for the parameters so that the sum of + the squared residuals of f(xdata, *popt) - ydata is minimized. + """ with warnings.catch_warnings(): warnings.simplefilter("error", OptimizeWarning) - popt, _ = sp.optimize.curve_fit(f, xdata=t, ydata=y, p0=p0, maxfev=1000) + popt, _ = scipy.optimize.curve_fit(f, xdata=t, ydata=y, p0=p0, maxfev=1000) return popt def fit_voxel(time_s: np.ndarray, pbar, m: np.ndarray) -> np.ndarray: + """ + Fits the Look-Locker relaxation curve for a single voxel's time series. + + Provides initial parameter guesses based on the location of the signal minimum + and attempts to fit the voxel_fit_function using Levenberg-Marquardt optimization. + Returns NaNs if the optimization fails or hits evaluation limits. + + Args: + time_s (np.ndarray): 1D array of trigger times in seconds. + pbar: A tqdm progress bar instance (or None) to update incrementally. + m (np.ndarray): 1D array of signal magnitudes over time for the voxel. + + Returns: + np.ndarray: A 3-element array containing the fitted parameters `[x1, x2, x3]`. + If the fit fails, returns an array of NaNs. + """ if pbar is not None: pbar.update(1) x1 = 1.0 @@ -137,26 +228,76 @@ def fit_voxel(time_s: np.ndarray, pbar, m: np.ndarray) -> np.ndarray: def nan_filter_gaussian(U: np.ndarray, sigma: float, truncate: float = 4.0) -> np.ndarray: + """ + Applies a Gaussian filter to an array containing NaNs, smoothly interpolating + the missing values. + + Standard Gaussian filters pull NaNs into surrounding valid data. This function + creates a normalized convolution mask (WW) to properly handle edges and missing + values, allowing NaN "holes" to be cleanly interpolated based only on valid + surrounding neighbors. + + Args: + U (np.ndarray): Input array potentially containing NaN values. + sigma (float): Standard deviation for the Gaussian kernel. + truncate (float, optional): Truncate the filter at this many standard deviations. Defaults to 4.0. + + Returns: + np.ndarray: Filtered array where original NaN values have been interpolated. + """ V = U.copy() V[np.isnan(U)] = 0 - VV = sp.ndimage.gaussian_filter(V, sigma=sigma, truncate=truncate) + VV = scipy.ndimage.gaussian_filter(V, sigma=sigma, truncate=truncate) W = np.ones_like(U) W[np.isnan(U)] = 0 - WW = sp.ndimage.gaussian_filter(W, sigma=sigma, truncate=truncate) + WW = scipy.ndimage.gaussian_filter(W, sigma=sigma, truncate=truncate) mask = ~((WW == 0) * (VV == 0)) out = np.nan * np.zeros_like(U) out[mask] = VV[mask] / WW[mask] return out -def estimate_se_free_relaxation_time(TRse, TE, ETL): - """Compute free relaxation time following spin echo image from effective echo - time TE and echo train length ETL, corrected for 20 dummy echoes.""" +def estimate_se_free_relaxation_time(TRse: float, TE: float, ETL: int) -> float: + """ + Computes the estimated free relaxation time following a Spin Echo image. + + Corrects the standard Repetition Time (TR) by accounting for the Effective + Echo Time (TE), the Echo Train Length (ETL), and an adjustment for 20 + dummy preparation echoes. + + Args: + TRse (float): Repetition time of the spin echo sequence (in ms). + TE (float): Effective echo time (in ms). + ETL (int): Echo train length. + + Returns: + float: The corrected free relaxation time `TRfree`. + """ return TRse - TE * (1 + 0.5 * (ETL - 1) / (0.5 * (ETL + 1) + 20)) def T1_lookup_table(TRse: float, TI: float, TE: float, ETL: int, T1_low: float, T1_hi: float) -> tuple[np.ndarray, np.ndarray]: + """ + Generates a Fraction/T1 lookup table for mixed T1 mapping interpolations. + + Calculates the theoretical ratio of the Inversion Recovery signal (Sir) to + the Spin Echo signal (Sse) over a highly discretized grid of physiological + T1 relaxation times. + + Args: + TRse (float): Spin-echo repetition time (in ms). + TI (float): Inversion time (in ms). + TE (float): Effective echo time (in ms). + ETL (int): Echo train length. + T1_low (float): Lower bound of the T1 grid (in ms). + T1_hi (float): Upper bound of the T1 grid (in ms). + + Returns: + tuple[np.ndarray, np.ndarray]: A tuple containing: + - fractionCurve (np.ndarray): The theoretical Sir/Sse signal ratios. + - T1_grid (np.ndarray): The corresponding T1 values (in ms). + """ TRfree = estimate_se_free_relaxation_time(TRse, TE, ETL) T1_grid = np.arange(int(T1_low), int(T1_hi + 1)) Sse = 1 - np.exp(-TRfree / T1_grid) @@ -165,22 +306,30 @@ def T1_lookup_table(TRse: float, TI: float, TE: float, ETL: int, T1_low: float, return fractionCurve, T1_grid -def compare_nifti_images(img_path1, img_path2, data_tolerance=0.0): +def compare_nifti_images(img_path1: Path, img_path2: Path, data_tolerance: float = 0.0) -> bool: """ - Compares two NIfTI images for equality of data, affine, and header. + Compares two NIfTI images for equality of data arrays. + + Provides a robust way to check if two NIfTI files contain identical + voxel data, accounting for potential NaNs and floating-point inaccuracies. Args: - img_path1 (str): Path to the first NIfTI file. - img_path2 (str): Path to the second NIfTI file. - data_tolerance (float): Tolerance for data comparison (use 0.0 for exact equality). + img_path1 (Path): Path to the first NIfTI file. + img_path2 (Path): Path to the second NIfTI file. + data_tolerance (float, optional): Absolute tolerance for floating-point + comparisons. Use 0.0 for exact mathematical equality. Defaults to 0.0. Returns: bool: True if images are considered the same, False otherwise. + + Raises: + AssertionError: If files exist but the data deviates beyond `data_tolerance`. + FileNotFoundError: If either of the provided file paths does not exist. """ - if not os.path.exists(img_path1): - return False, [f"File not found: {img_path1}"] - if not os.path.exists(img_path2): - return False, [f"File not found: {img_path2}"] + if not img_path1.exists(): + raise FileNotFoundError(f"File not found: {img_path1}") + if not img_path2.exists(): + raise FileNotFoundError(f"File not found: {img_path2}") img1 = nibabel.load(img_path1) img2 = nibabel.load(img_path2) From 676edad886b78abc0d2281117014aaa3e835f2f4 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 15:39:01 +0100 Subject: [PATCH 02/29] Add tests for t1_maps/utils.py --- test/test_t1_maps_utils.py | 84 ++++++++++++++++++++++++++++++++++++++ 1 file changed, 84 insertions(+) create mode 100644 test/test_t1_maps_utils.py diff --git a/test/test_t1_maps_utils.py b/test/test_t1_maps_utils.py new file mode 100644 index 0000000..b3edf07 --- /dev/null +++ b/test/test_t1_maps_utils.py @@ -0,0 +1,84 @@ +"""Tests for T1 Map utilities + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +import numpy as np +from mritk.t1_maps.utils import voxel_fit_function, nan_filter_gaussian, estimate_se_free_relaxation_time, T1_lookup_table + + +def test_voxel_fit_function(): + """Test the theoretical Look-Locker recovery curve math.""" + t = np.array([0.0, 1.0, 2.0]) + x1, x2, x3 = 1.0, 1.0, 1.0 + + # Equation: abs(x1 * (1 - (1+x2^2)*exp(-x3^2 * t))) + # With all 1s: abs(1 - 2*exp(-t)) + # t=0 -> abs(1 - 2*1) = 1 + # t=1 -> abs(1 - 2/e) ≈ 0.2642 + # t=2 -> abs(1 - 2/e^2) ≈ 0.7293 + + expected = np.abs(1.0 - 2.0 * np.exp(-t)) + result = voxel_fit_function(t, x1, x2, x3) + + np.testing.assert_array_almost_equal(result, expected) + + +def test_nan_filter_gaussian(): + """Test that NaNs are smoothly interpolated without pulling valid data to zero.""" + # Create a 3x3 uniform array with a NaN hole in the center + U = np.ones((3, 3)) + U[1, 1] = np.nan + + filtered = nan_filter_gaussian(U, sigma=1.0) + + # The NaN should be interpolated smoothly back to the surrounding value (1.0) + assert not np.isnan(filtered[1, 1]) + np.testing.assert_array_almost_equal(filtered, np.ones((3, 3))) + + +def test_nan_filter_gaussian_edges(): + """Test the Gaussian filter handles edge NaNs gracefully.""" + U = np.ones((3, 3)) + U[0, 0] = np.nan # Corner + + filtered = nan_filter_gaussian(U, sigma=1.0) + assert not np.isnan(filtered[0, 0]) + np.testing.assert_array_almost_equal(filtered, np.ones((3, 3))) + + +def test_estimate_se_free_relaxation_time(): + """Test the calculation for free relaxation time.""" + TRse = 1000.0 + TE = 10.0 + ETL = 5 + + # Formula check: TRse - TE * (1 + 0.5 * (ETL - 1) / (0.5 * (ETL + 1) + 20)) + # 1000 - 10 * (1 + 0.5 * 4 / (0.5 * 6 + 20)) + # 1000 - 10 * (1 + 2 / 23) + expected = 1000.0 - 10.0 * (1.0 + 2.0 / 23.0) + + result = estimate_se_free_relaxation_time(TRse, TE, ETL) + assert np.isclose(result, expected) + + +def test_t1_lookup_table(): + """Test the fraction/T1 lookup table generation creates arrays of correct shape/bounds.""" + TRse, TI, TE, ETL = 1000.0, 100.0, 10.0, 5 + T1_low, T1_hi = 100.0, 500.0 + + fraction_curve, t1_grid = T1_lookup_table(TRse, TI, TE, ETL, T1_low, T1_hi) + + # Length should be exactly the integer steps from T1_low to T1_hi inclusive + expected_length = int(T1_hi) - int(T1_low) + 1 + + assert len(t1_grid) == expected_length + assert len(fraction_curve) == expected_length + assert t1_grid[0] == T1_low + assert t1_grid[-1] == T1_hi + + # Check that fraction curve monotonically DECREASES for standard physics ranges + # As T1 gets longer, the IR signal becomes more negative relative to the SE signal + assert np.all(np.diff(fraction_curve) < 0) From bdb8248e8446b401528d56e4cfcf0b9d8f90a818 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 15:40:14 +0100 Subject: [PATCH 03/29] Add another function for comparing arrays --- src/mritk/t1_maps/utils.py | 35 ++++++++++++++++++++--------------- 1 file changed, 20 insertions(+), 15 deletions(-) diff --git a/src/mritk/t1_maps/utils.py b/src/mritk/t1_maps/utils.py index 5b3269e..7118239 100644 --- a/src/mritk/t1_maps/utils.py +++ b/src/mritk/t1_maps/utils.py @@ -338,22 +338,27 @@ def compare_nifti_images(img_path1: Path, img_path2: Path, data_tolerance: float data1 = img1.get_fdata() data2 = img2.get_fdata() - # Convert NaN to zero (can have NaNs in concentration maps) - data1 = np.nan_to_num(data1, nan=0.0) - data2 = np.nan_to_num(data2, nan=0.0) + return compare_nifti_arrays(data1, data2, data_tolerance) - print("data 1= ", np.unique(data1)) - print("data 1= ", np.unique(data2)) - # Use np.allclose for data comparison with tolerance, which is often needed - # for floating-point data, or np.array_equal for exact comparison. - if data_tolerance > 0: - data_equal = np.allclose(data1, data2, atol=data_tolerance) - else: - data_equal = np.array_equal(data1, data2) +def compare_nifti_arrays(arr1: np.ndarray, arr2: np.ndarray, data_tolerance: float = 0.0) -> bool: + """ + Compares two NIfTI data arrays for equality, accounting for NaNs and tolerance. - deviation = np.mean(np.abs(data1 - data2)) - assert data_equal, f"Data mismatch (mean absolute deviation: {deviation:.4f})" + Args: + arr1 (np.ndarray): The first data array to compare. + arr2 (np.ndarray): The second data array to compare. + data_tolerance (float, optional): Absolute tolerance for floating-point + comparisons. Use 0.0 for exact mathematical equality. Defaults to 0.0. - # Overall result - return data_equal + Returns: + bool: True if arrays are considered the same, False otherwise. + """ + # Convert NaN to zero (can have NaNs in concentration maps) + arr1 = np.nan_to_num(arr1, nan=0.0) + arr2 = np.nan_to_num(arr2, nan=0.0) + + if data_tolerance > 0: + return np.allclose(arr1, arr2, atol=data_tolerance) + else: + return np.array_equal(arr1, arr2) From f01ecc670b55c0b9cf4d0d01c66545071714f8cb Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 15:43:47 +0100 Subject: [PATCH 04/29] Add tests for t1 to r1 --- test/test_t1_maps.py | 56 ++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 56 insertions(+) create mode 100644 test/test_t1_maps.py diff --git a/test/test_t1_maps.py b/test/test_t1_maps.py new file mode 100644 index 0000000..ca2df78 --- /dev/null +++ b/test/test_t1_maps.py @@ -0,0 +1,56 @@ +import numpy as np +import pytest + +from mritk.data.base import MRIData +from mritk.t1_maps.t1_to_r1 import compute_r1_array, convert_T1_to_R1, T1_to_R1 + + +def test_compute_r1_array_standard(): + """Test basic T1 to R1 mathematical conversion.""" + t1_data = np.array([500.0, 1000.0, 2000.0]) + + # Expected R1 = 1000 / T1 + expected = np.array([2.0, 1.0, 0.5]) + + r1_data = compute_r1_array(t1_data, scale=1000.0) + np.testing.assert_array_almost_equal(r1_data, expected) + + +def test_compute_r1_array_clipping(): + """Test that values outside the [t1_low, t1_high] bounds are safely set to NaN.""" + t1_data = np.array([0.5, 500.0, 6000.0, 10000.0]) + t1_low = 1.0 + t1_high = 5000.0 + + r1_data = compute_r1_array(t1_data, scale=1000.0, t1_low=t1_low, t1_high=t1_high) + + # index 0 (0.5) < 1.0 -> NaN + # index 1 (500) -> 2.0 + # index 2 (6000) > 5000.0 -> NaN + # index 3 (10000) > 5000.0 -> NaN + + assert np.isnan(r1_data[0]) + assert r1_data[1] == 2.0 + assert np.isnan(r1_data[2]) + assert np.isnan(r1_data[3]) + + +def test_convert_t1_to_r1_mridata(): + """Test the conversion properly preserves the MRIData class attributes (affine).""" + t1_data = np.array([[[1000.0, 2000.0]]]) + affine = np.eye(4) + mri = MRIData(data=t1_data, affine=affine) + + r1_mri = convert_T1_to_R1(mri, scale=1000.0) + + expected_r1 = np.array([[[1.0, 0.5]]]) + + np.testing.assert_array_almost_equal(r1_mri.data, expected_r1) + np.testing.assert_array_equal(r1_mri.affine, affine) + + +def test_t1_to_r1_invalid_input(): + """Test the wrapper function throws ValueError on an invalid type input.""" + with pytest.raises(ValueError, match="Input should be a Path or MRIData"): + # Explicitly passing a raw string instead of Path/MRIData + T1_to_R1(input_mri="not_a_path_or_mridata") From 2baf833842e8340213830bcab202e7db80330c61 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 15:44:19 +0100 Subject: [PATCH 05/29] Add more docstrings in t1_to_r1 --- src/mritk/t1_maps/t1_to_r1.py | 77 ++++++++++++++++++++++++++++------- 1 file changed, 63 insertions(+), 14 deletions(-) diff --git a/src/mritk/t1_maps/t1_to_r1.py b/src/mritk/t1_maps/t1_to_r1.py index bb660cd..14a3c60 100644 --- a/src/mritk/t1_maps/t1_to_r1.py +++ b/src/mritk/t1_maps/t1_to_r1.py @@ -4,6 +4,7 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory + import numpy as np from pathlib import Path from typing import Union @@ -12,27 +13,78 @@ from ..data.io import load_mri_data, save_mri_data +def compute_r1_array( + t1_data: np.ndarray, scale: float = 1000.0, t1_low: float = 1.0, t1_high: float = float("inf") +) -> np.ndarray: + """ + Pure numpy function converting a T1 relaxation time array to an R1 relaxation rate array. + + The relationship is R1 = scale / T1. Values outside the [t1_low, t1_high] + range are set to NaN to filter out noise and non-physiological data. + + Args: + t1_data (np.ndarray): The input array containing T1 relaxation times. + scale (float, optional): Scaling factor, typically 1000 to convert from ms to s^-1. Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to infinity. + + Returns: + np.ndarray: An array of R1 relaxation rates. Invalid/out-of-bound voxels are set to NaN. + """ + valid_t1 = (t1_low <= t1_data) & (t1_data <= t1_high) + r1_data = np.nan * np.zeros_like(t1_data) + + # Calculate R1 only for valid voxels to avoid division by zero or extreme outliers + r1_data[valid_t1] = scale / t1_data[valid_t1] + + return r1_data + + def convert_T1_to_R1( T1map_mri: MRIData, - scale: float = 1000, - t1_low: float = 1, + scale: float = 1000.0, + t1_low: float = 1.0, t1_high: float = float("inf"), ) -> MRIData: - valid_t1 = (t1_low <= T1map_mri.data) * (T1map_mri.data <= t1_high) - R1map = np.nan * np.zeros_like(T1map_mri.data) - R1map[valid_t1] = scale / np.minimum(t1_high, np.maximum(t1_low, T1map_mri.data[valid_t1])) + """ + Converts a T1 map MRIData object into an R1 map MRIData object. - R1map_mri = MRIData(data=R1map, affine=T1map_mri.affine) - return R1map_mri + Args: + T1map_mri (MRIData): The input MRIData object representing the T1 map. + scale (float, optional): Scaling factor. Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). + + Returns: + MRIData: A new MRIData object containing the R1 map array and the original affine matrix. + """ + r1_data = compute_r1_array(T1map_mri.data, scale, t1_low, t1_high) + return MRIData(data=r1_data, affine=T1map_mri.affine) def T1_to_R1( input_mri: Union[Path, MRIData], output: Path | None = None, - scale: float = 1000, - t1_low: float = 1, + scale: float = 1000.0, + t1_low: float = 1.0, t1_high: float = float("inf"), ) -> MRIData: + """ + High-level wrapper to convert a T1 map to an R1 map, handling file I/O operations. + + Args: + input_mri (Union[Path, MRIData]): A Path to a T1 NIfTI file or an already loaded MRIData object. + output (Path | None, optional): Path to save the resulting R1 map to disk. Defaults to None. + scale (float, optional): Scaling factor (e.g., 1000 for ms -> s^-1). Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). + + Returns: + MRIData: The computed R1 map as an MRIData object. + + Raises: + ValueError: If input_mri is neither a Path nor an MRIData object. + """ if isinstance(input_mri, Path): T1map_mri = load_mri_data(input_mri, dtype=np.single) elif isinstance(input_mri, MRIData): @@ -40,12 +92,9 @@ def T1_to_R1( else: raise ValueError(f"Input should be a Path or MRIData, got {type(input_mri)}") - valid_t1 = (t1_low <= T1map_mri.data) * (T1map_mri.data <= t1_high) - R1map = np.nan * np.zeros_like(T1map_mri.data) - R1map[valid_t1] = scale / np.minimum(t1_high, np.maximum(t1_low, T1map_mri.data[valid_t1])) - - R1map_mri = MRIData(data=R1map, affine=T1map_mri.affine) + R1map_mri = convert_T1_to_R1(T1map_mri, scale, t1_low, t1_high) if output is not None: save_mri_data(R1map_mri, output, dtype=np.single) + return R1map_mri From 5c47ddc802778817c74c056d630d084d7ba4013d Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 16:48:55 +0100 Subject: [PATCH 06/29] Fix failing test --- .github/workflows/setup-data.yml | 2 +- src/mritk/datasets.py | 2 +- src/mritk/t1_maps/t1_maps.py | 271 +++++++++++++++++++++---------- test/test_mri_t1_maps.py | 2 +- test/test_t1_maps.py | 94 +++++++++++ 5 files changed, 282 insertions(+), 89 deletions(-) diff --git a/.github/workflows/setup-data.yml b/.github/workflows/setup-data.yml index 33dcf74..279e2ae 100644 --- a/.github/workflows/setup-data.yml +++ b/.github/workflows/setup-data.yml @@ -22,7 +22,7 @@ jobs: path: data/ # The folder you want to cache # The key determines if we have a match. # Change 'v1' to 'v2' manually to force a re-download in the future. - key: test-data-v4 + key: test-data-v5 # 2. DOWNLOAD ONLY IF CACHE MISS diff --git a/src/mritk/datasets.py b/src/mritk/datasets.py index bf7a23e..c0079e4 100644 --- a/src/mritk/datasets.py +++ b/src/mritk/datasets.py @@ -35,7 +35,7 @@ def get_datasets() -> dict[str, Dataset]: name="Test Data", description="A small test dataset for testing functionality (based on the Gonzo dataset).", license="CC-BY-4.0", - links={"mritk-test-data.zip": download_link_google_drive("1CSj3CHd4ztcU4Aqdlw9K2OWjPi5u75bd")}, + links={"mritk-test-data.zip": download_link_google_drive("1YVXoV1UhmpkMIeaNKeS9eqCsdMULwKBO")}, ), "gonzo": Dataset( name="The Gonzo Dataset", diff --git a/src/mritk/t1_maps/t1_maps.py b/src/mritk/t1_maps/t1_maps.py index a6e4184..65711ea 100644 --- a/src/mritk/t1_maps/t1_maps.py +++ b/src/mritk/t1_maps/t1_maps.py @@ -5,15 +5,17 @@ Copyright (C) 2026 Simula Research Laboratory """ +import json +import logging import numpy as np +import scipy +import scipy.interpolate +import skimage import tqdm +import nibabel from functools import partial -import skimage from typing import Optional from pathlib import Path -import nibabel -import json -import scipy from ..data.base import MRIData from ..data.io import load_mri_data, save_mri_data @@ -25,43 +27,111 @@ T1_lookup_table, ) +logger = logging.getLogger(__name__) + + +def compute_looklocker_t1_array(data: np.ndarray, time_s: np.ndarray, t1_roof: float = 10000.0) -> np.ndarray: + """ + Computes T1 relaxation maps from Look-Locker data using Levenberg-Marquardt fitting. + + Args: + data (np.ndarray): 4D numpy array (x, y, z, time) of Look-Locker MRI signals. + time_s (np.ndarray): 1D array of trigger times in seconds. + t1_roof (float, optional): Maximum allowed T1 value (ms) to cap spurious fits. Defaults to 10000.0. + + Returns: + np.ndarray: 3D numpy array representing the T1 map in milliseconds. Voxels + that fail to fit or fall outside the mask are set to NaN. + """ + assert len(data.shape) >= 4, f"Data should be at least 4-dimensional, got shape {data.shape}" + mask = mri_facemask(data[..., 0]) + valid_voxels = (np.nanmax(data, axis=-1) > 0) & mask + + data_normalized = np.nan * np.zeros_like(data) + # Prevent divide by zero warnings dynamically + max_vals = np.nanmax(data, axis=-1)[valid_voxels, np.newaxis] + data_normalized[valid_voxels] = data[valid_voxels] / max_vals -def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | None = None) -> MRIData: - LL_mri = load_mri_data(looklocker_input, dtype=np.single) - D = LL_mri.data - affine = LL_mri.affine - t_data = np.loadtxt(timestamps) / 1000 - - T1_ROOF = 10000 - assert len(D.shape) >= 4, f"data should be 4-dimensional, got data with shape {D.shape}" - mask = mri_facemask(D[..., 0]) - valid_voxels = (np.nanmax(D, axis=-1) > 0) * mask - - D_normalized = np.nan * np.zeros_like(D) - D_normalized[valid_voxels] = D[valid_voxels] / np.nanmax(D, axis=-1)[valid_voxels, np.newaxis] voxel_mask = np.array(np.where(valid_voxels)).T - Dmasked = np.array([D_normalized[i, j, k] for (i, j, k) in voxel_mask]) + d_masked = np.array([data_normalized[i, j, k] for (i, j, k) in voxel_mask]) - with tqdm.tqdm(total=len(Dmasked)) as pbar: - voxel_fitter = partial(fit_voxel, t_data, pbar) + with tqdm.tqdm(total=len(d_masked), desc="Fitting Look-Locker Voxels") as pbar: + voxel_fitter = partial(fit_voxel, time_s, pbar) vfunc = np.vectorize(voxel_fitter, signature="(n) -> (3)") - fitted_coefficients = vfunc(Dmasked) - - _, x2, x3 = ( - fitted_coefficients[:, 0], - fitted_coefficients[:, 1], - fitted_coefficients[:, 2], - ) - - I, J, K = voxel_mask.T - T1map = np.nan * np.zeros_like(D[..., 0]) - T1map[I, J, K] = (x2 / x3) ** 2 * 1000.0 # convert to ms - T1map = np.minimum(T1map, T1_ROOF) - T1map_mri = MRIData(T1map.astype(np.single), affine) + fitted_coefficients = vfunc(d_masked) + + x2 = fitted_coefficients[:, 1] + x3 = fitted_coefficients[:, 2] + + i, j, k = voxel_mask.T + t1map = np.nan * np.zeros_like(data[..., 0]) + + # Calculate T1 in ms. Formula: T1 = (x2 / x3)^2 * 1000 + t1map[i, j, k] = (x2 / x3) ** 2 * 1000.0 + + return np.minimum(t1map, t1_roof) + + +def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | None = None) -> MRIData: + """I/O wrapper to generate a Look-Locker T1 map from a NIfTI file.""" + ll_mri = load_mri_data(looklocker_input, dtype=np.single) + # Convert timestamps from milliseconds to seconds + time_s = np.loadtxt(timestamps) / 1000.0 + + t1map_array = compute_looklocker_t1_array(ll_mri.data, time_s) + t1map_mri = MRIData(t1map_array.astype(np.single), ll_mri.affine) + if output is not None: - save_mri_data(T1map_mri, output, dtype=np.single) + save_mri_data(t1map_mri, output, dtype=np.single) + + return t1map_mri + - return T1map_mri +def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_factor: float = 1.3) -> np.ndarray: + """ + Creates a binary mask isolating the largest contiguous non-NaN region in an array. + + Args: + data (np.ndarray): The 3D input data containing NaNs and valid values. + radius (int, optional): The radius for morphological dilation. Defaults to 10. + erode_dilate_factor (float, optional): Multiplier for the erosion radius + relative to the dilation radius. Defaults to 1.3. + + Returns: + np.ndarray: A boolean 3D mask of the largest contiguous island. + """ + mask = skimage.measure.label(np.isfinite(data)) + regions = skimage.measure.regionprops(mask) + if not regions: + return np.zeros_like(data, dtype=bool) + + regions.sort(key=lambda x: x.num_pixels, reverse=True) + mask = mask == regions[0].label + + skimage.morphology.remove_small_holes(mask, max_size=10 ** (mask.ndim), connectivity=2, out=mask) + skimage.morphology.dilation(mask, skimage.morphology.ball(radius), out=mask) + skimage.morphology.erosion(mask, skimage.morphology.ball(erode_dilate_factor * radius), out=mask) + return mask + + +def remove_outliers(data: np.ndarray, mask: np.ndarray, t1_low: float, t1_high: float) -> np.ndarray: + """ + Applies a mask and removes values outside the physiological T1 range. + + Args: + data (np.ndarray): 3D array of T1 values. + mask (np.ndarray): 3D boolean mask of the brain/valid area. + t1_low (float): Lower physiological limit. + t1_high (float): Upper physiological limit. + + Returns: + np.ndarray: A cleaned 3D array with outliers and unmasked regions set to NaN. + """ + processed = data.copy() + processed[~mask] = np.nan + outliers = (processed < t1_low) | (processed > t1_high) + processed[outliers] = np.nan + return processed def looklocker_t1map_postprocessing( @@ -73,61 +143,69 @@ def looklocker_t1map_postprocessing( mask: Optional[np.ndarray] = None, output: Path | None = None, ) -> MRIData: - T1map_mri = load_mri_data(T1map, dtype=np.single) - T1map_data = T1map_mri.data.copy() + """I/O wrapper for masking, outlier removal, and NaN filling on a T1 map.""" + t1map_mri = load_mri_data(T1map, dtype=np.single) + t1map_data = t1map_mri.data.copy() + if mask is None: - # Create mask for largest island. - mask = skimage.measure.label(np.isfinite(T1map_data)) - regions = skimage.measure.regionprops(mask) - regions.sort(key=lambda x: x.num_pixels, reverse=True) - mask = mask == regions[0].label - skimage.morphology.remove_small_holes(mask, max_size=10 ** (mask.ndim), connectivity=2, out=mask) - skimage.morphology.dilation(mask, skimage.morphology.ball(radius), out=mask) - skimage.morphology.erosion(mask, skimage.morphology.ball(erode_dilate_factor * radius), out=mask) - - # Remove non-zero artifacts outside of the mask. - surface_vox = np.isfinite(T1map_data) * (~mask) - print(f"Removing {surface_vox.sum()} voxels outside of the head mask") - T1map_data[~mask] = np.nan - - # Remove outliers within the mask. - outliers = np.logical_or(T1map_data < T1_low, T1_high < T1map_data) - print("Removing", outliers.sum(), f"voxels outside the range ({T1_low}, {T1_high}).") - T1map_data[outliers] = np.nan - if np.isfinite(T1map_data).sum() / T1map_data.size < 0.01: + mask = create_largest_island_mask(t1map_data, radius, erode_dilate_factor) + + t1map_data = remove_outliers(t1map_data, mask, T1_low, T1_high) + + if np.isfinite(t1map_data).sum() / t1map_data.size < 0.01: raise RuntimeError("After outlier removal, less than 1% of the image is left. Check image units.") - # Fill internallly missing values - fill_mask = np.isnan(T1map_data) * mask + # Fill internal missing values iteratively using a Gaussian filter + fill_mask = np.isnan(t1map_data) & mask while fill_mask.sum() > 0: - print(f"Filling in {fill_mask.sum()} voxels within the mask.") - T1map_data[fill_mask] = nan_filter_gaussian(T1map_data, 1.0)[fill_mask] - fill_mask = np.isnan(T1map_data) * mask + logger.info(f"Filling in {fill_mask.sum()} voxels within the mask.") + t1map_data[fill_mask] = nan_filter_gaussian(t1map_data, 1.0)[fill_mask] + fill_mask = np.isnan(t1map_data) & mask - processed_T1map = MRIData(T1map_data, T1map_mri.affine) + processed_T1map = MRIData(t1map_data, t1map_mri.affine) if output is not None: save_mri_data(processed_T1map, output, dtype=np.single) return processed_T1map +def compute_mixed_t1_array(se_data: np.ndarray, ir_data: np.ndarray, meta: dict, t1_low: float, t1_high: float) -> np.ndarray: + """ + Computes a Mixed T1 array from Spin-Echo and Inversion-Recovery volumes using a lookup table. + + Args: + se_data (np.ndarray): 3D numpy array of the Spin-Echo modulus data. + ir_data (np.ndarray): 3D numpy array of the Inversion-Recovery corrected real data. + meta (dict): Dictionary containing sequence parameters ('TR_SE', 'TI', 'TE', 'ETL'). + t1_low (float): Lower bound for T1 generation grid. + t1_high (float): Upper bound for T1 generation grid. + + Returns: + np.ndarray: Computed T1 map as a 3D float32 array. + """ + nonzero_mask = se_data != 0 + f_data = np.nan * np.zeros_like(ir_data) + f_data[nonzero_mask] = ir_data[nonzero_mask] / se_data[nonzero_mask] + + tr_se, ti, te, etl = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] + f_curve, t1_grid = T1_lookup_table(tr_se, ti, te, etl, t1_low, t1_high) + + interpolator = scipy.interpolate.interp1d(f_curve, t1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) + return interpolator(f_data).astype(np.single) + + def mixed_t1map( SE_nii_path: Path, IR_nii_path: Path, meta_path: Path, T1_low: float, T1_high: float, output: Path | None = None ) -> nibabel.nifti1.Nifti1Image: - SE = load_mri_data(SE_nii_path, dtype=np.single) - IR = load_mri_data(IR_nii_path, dtype=np.single) + """I/O wrapper to generate a T1 map from SE and IR acquisitions.""" + se_mri = load_mri_data(SE_nii_path, dtype=np.single) + ir_mri = load_mri_data(IR_nii_path, dtype=np.single) with open(meta_path, "r") as f: meta = json.load(f) - nonzero_mask = SE.data != 0 - F_data = np.nan * np.zeros_like(IR.data) - F_data[nonzero_mask] = IR.data[nonzero_mask] / SE.data[nonzero_mask] + t1_volume = compute_mixed_t1_array(se_mri.data, ir_mri.data, meta, T1_low, T1_high) - TR_se, TI, TE, ETL = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] - F, T1_grid = T1_lookup_table(TR_se, TI, TE, ETL, T1_low, T1_high) - interpolator = scipy.interpolate.interp1d(F, T1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) - T1_volume = interpolator(F_data).astype(np.single) - nii = nibabel.nifti1.Nifti1Image(T1_volume, IR.affine) + nii = nibabel.nifti1.Nifti1Image(t1_volume, ir_mri.affine) nii.set_sform(nii.affine, "scanner") nii.set_qform(nii.affine, "scanner") @@ -138,38 +216,59 @@ def mixed_t1map( def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | None = None) -> nibabel.nifti1.Nifti1Image: - T1map_nii = nibabel.nifti1.load(T1_path) + """I/O wrapper to mask out non-CSF areas from a Mixed T1 map based on SE signal.""" + t1map_nii = nibabel.nifti1.load(T1_path) + se_mri = load_mri_data(SE_nii_path, np.single) - SE_mri = load_mri_data(SE_nii_path, np.single) - mask = create_csf_mask(SE_mri.data, use_li=True) + mask = create_csf_mask(se_mri.data, use_li=True) mask = skimage.morphology.erosion(mask) - masked_T1map = T1map_nii.get_fdata(dtype=np.single) - masked_T1map[~mask] = np.nan - masked_T1map_nii = nibabel.nifti1.Nifti1Image(masked_T1map, T1map_nii.affine, T1map_nii.header) + masked_t1map = t1map_nii.get_fdata(dtype=np.single) + masked_t1map[~mask] = np.nan + masked_t1map_nii = nibabel.nifti1.Nifti1Image(masked_t1map, t1map_nii.affine, t1map_nii.header) if output is not None: - nibabel.nifti1.save(masked_T1map_nii, output) + nibabel.nifti1.save(masked_t1map_nii, output) + + return masked_t1map_nii + - return masked_T1map_nii +def compute_hybrid_t1_array(ll_data: np.ndarray, mixed_data: np.ndarray, mask: np.ndarray, threshold: float) -> np.ndarray: + """ + Creates a hybrid T1 array by selectively substituting Look-Locker voxels with Mixed voxels. + + Substitution occurs only if BOTH the Look-Locker AND Mixed T1 values exceed the threshold, + AND the voxel falls within the provided CSF mask. + + Args: + ll_data (np.ndarray): 3D numpy array of Look-Locker T1 values. + mixed_data (np.ndarray): 3D numpy array of Mixed T1 values. + mask (np.ndarray): 3D boolean mask (typically eroded CSF). + threshold (float): T1 threshold value (in ms). + + Returns: + np.ndarray: Hybrid 3D T1 array. + """ + hybrid = ll_data.copy() + newmask = mask & (ll_data > threshold) & (mixed_data > threshold) + hybrid[newmask] = mixed_data[newmask] + return hybrid def hybrid_t1map( LL_path: Path, mixed_path: Path, csf_mask_path: Path, threshold: float, erode: int = 0, output: Path | None = None ) -> nibabel.nifti1.Nifti1Image: + """I/O wrapper for merging a Look-Locker and a Mixed T1 map.""" mixed_mri = nibabel.nifti1.load(mixed_path) - mixed = mixed_mri.get_fdata() - ll_mri = nibabel.nifti1.load(LL_path) - ll = ll_mri.get_fdata() + csf_mask_mri = nibabel.nifti1.load(csf_mask_path) csf_mask = csf_mask_mri.get_fdata().astype(bool) + if erode > 0: csf_mask = skimage.morphology.erosion(csf_mask, skimage.morphology.ball(erode)) - hybrid = ll - newmask = csf_mask * (ll > threshold) * (mixed > threshold) - hybrid[newmask] = mixed[newmask] + hybrid = compute_hybrid_t1_array(ll_mri.get_fdata(), mixed_mri.get_fdata(), csf_mask, threshold) hybrid_nii = nibabel.nifti1.Nifti1Image(hybrid, affine=ll_mri.affine, header=ll_mri.header) if output is not None: diff --git a/test/test_mri_t1_maps.py b/test/test_mri_t1_maps.py index d40f934..202a3bb 100644 --- a/test/test_mri_t1_maps.py +++ b/test/test_mri_t1_maps.py @@ -42,7 +42,7 @@ def test_mixed_t1map(tmp_path, mri_data_dir: Path): IR_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_IR-corrected-real.nii.gz" meta_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_meta.json" - ref_output = mri_data_dir / "mri-dataset/mri_dataset/derivatives/sub-01" / "ses-01/sub-01_ses-01_acq-mixed_T1map.nii.gz" + ref_output = mri_data_dir / "mri-processed/mri_dataset/derivatives/sub-01" / "ses-01/sub-01_ses-01_acq-mixed_T1map.nii.gz" test_output_raw = tmp_path / "output_acq-mixed_T1map_raw.nii.gz" test_output = tmp_path / "output_acq-mixed_T1map.nii.gz" diff --git a/test/test_t1_maps.py b/test/test_t1_maps.py index ca2df78..148c6cc 100644 --- a/test/test_t1_maps.py +++ b/test/test_t1_maps.py @@ -1,6 +1,7 @@ import numpy as np import pytest +from mritk.t1_maps.t1_maps import remove_outliers, compute_mixed_t1_array, compute_hybrid_t1_array, create_largest_island_mask from mritk.data.base import MRIData from mritk.t1_maps.t1_to_r1 import compute_r1_array, convert_T1_to_R1, T1_to_R1 @@ -54,3 +55,96 @@ def test_t1_to_r1_invalid_input(): with pytest.raises(ValueError, match="Input should be a Path or MRIData"): # Explicitly passing a raw string instead of Path/MRIData T1_to_R1(input_mri="not_a_path_or_mridata") + + +def test_remove_outliers(): + """Test that data is appropriately masked and clipped to physiological T1 bounds.""" + # 2x2x1 Mock Data + data = np.array([[[10.0], [500.0]], [[1500.0], [8000.0]]]) + + # Mask out the first element + mask = np.array([[[False], [True]], [[True], [True]]]) + + t1_low = 100.0 + t1_high = 2000.0 + + result = remove_outliers(data, mask, t1_low, t1_high) + + # Expected: + # [0,0,0] -> NaN (masked out) + # [0,1,0] -> 500.0 (valid) + # [1,0,0] -> 1500.0 (valid) + # [1,1,0] -> NaN (exceeds t1_high) + + assert np.isnan(result[0, 0, 0]) + assert result[0, 1, 0] == 500.0 + assert result[1, 0, 0] == 1500.0 + assert np.isnan(result[1, 1, 0]) + + +def test_compute_mixed_t1_array(): + """Test generating a T1 map from SE and IR modalities via interpolation.""" + se_data = np.array([[[1000.0, 1000.0]]]) + # IR signals at varying levels + ir_data = np.array([[[-500.0, 500.0]]]) + + meta = {"TR_SE": 1000.0, "TI": 100.0, "TE": 10.0, "ETL": 5} + + t1_low = 100.0 + t1_high = 3000.0 + + t1_volume = compute_mixed_t1_array(se_data, ir_data, meta, t1_low, t1_high) + + # Should output same shape + assert t1_volume.shape == (1, 1, 2) + # T1 maps should not contain negative values in valid tissue + assert np.all(t1_volume[~np.isnan(t1_volume)] > 0) + + +def test_compute_hybrid_t1_array(): + """Test hybrid array logic merges LL and Mixed appropriately based on threshold and mask.""" + # 1D array for simplicity (4 voxels) + ll_data = np.array([1000.0, 2000.0, 1000.0, 2000.0]) + mixed_data = np.array([500.0, 500.0, 3000.0, 3000.0]) + + # Voxel 3 is unmasked + mask = np.array([True, True, True, False]) + threshold = 1500.0 + + hybrid = compute_hybrid_t1_array(ll_data, mixed_data, mask, threshold) + + # Evaluation: Substitution happens ONLY if BOTH > threshold AND inside mask. + # Voxel 0: 1000 < 1500 -> Keep LL (1000.0) + # Voxel 1: Mixed 500 < 1500 -> Keep LL (2000.0) + # Voxel 2: LL (1000) < 1500 -> Keep LL (1000.0) ... wait, let's fix ll_data[2] to test properly + # Let's run it as-is: + assert hybrid[0] == 1000.0 + assert hybrid[1] == 2000.0 + assert hybrid[2] == 1000.0 + assert hybrid[3] == 2000.0 # Unmasked, so keep LL + + # Let's explicitly trigger the merge condition + ll_data[2] = 2000.0 + hybrid2 = compute_hybrid_t1_array(ll_data, mixed_data, mask, threshold) + # Voxel 2: LL(2000) > 1500 AND Mixed(3000) > 1500 AND Mask=True -> Merge! + assert hybrid2[2] == 3000.0 + + +def test_create_largest_island_mask(): + """Test morphology logic identifies the primary body of data and ignores disconnected noise.""" + # Create a 15x15x15 empty space (3375 voxels, which is > 1000 so the background isn't + # accidentally filled in by remove_small_holes) + data = np.full((15, 15, 15), np.nan) + + # Create a large block in the center (Island 1) + data[5:10, 5:10, 5:10] = 100.0 + + # Create a tiny disconnected speck in the corner (Island 2) + data[0, 0, 0] = 50.0 + + # Run with small morphology radiuses + mask = create_largest_island_mask(data, radius=1, erode_dilate_factor=1.0) + + # Speck should be dropped, major block should be True + assert mask[0, 0, 0] == np.False_ + assert mask[7, 7, 7] == np.True_ From 8921a4e5d5722af26071c65aea49f11742cb88da Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 16:49:51 +0100 Subject: [PATCH 07/29] Add script for generating test data from gonzo --- test/create_test_data.py | 46 ++++++++++++++++++++++++++++++++++++++++ 1 file changed, 46 insertions(+) create mode 100644 test/create_test_data.py diff --git a/test/create_test_data.py b/test/create_test_data.py new file mode 100644 index 0000000..c95db8a --- /dev/null +++ b/test/create_test_data.py @@ -0,0 +1,46 @@ +from pathlib import Path +import zipfile + + +def main(): + outdir = Path("mritk-test-data") + inputdir = Path("gonzo") # Assumes you have the Gonzo dataset downloaded here + files = [ + "timetable/timetable.tsv", + "mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_SE-modulus.nii.gz", + "mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_IR-corrected-real.nii.gz", + "mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_meta.json", + "mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-01_acq-mixed_T1map_registered.nii.gz", + "mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-csf_binary.nii.gz", + "mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz", + "mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-02_T1map_hybrid.nii.gz", + "mri-processed/mri_processed_data/sub-01/concentrations/sub-01_ses-01_concentration.nii.gz", + "mri-processed/mri_processed_data/sub-01/concentrations/sub-01_ses-02_concentration.nii.gz", + "mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-intracranial_binary.nii.gz", + "mri-processed/mri_dataset/derivatives/sub-01/ses-01/sub-01_ses-01_acq-mixed_T1map.nii.gz", + "mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-aparc+aseg_refined.nii.gz", + "mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-01_acq-looklocker_T1map_registered.nii.gz", + ] + + for file in files: + src = inputdir / file + dst = outdir / file + + if not dst.parent.exists(): + dst.parent.mkdir(parents=True, exist_ok=True) + print(f"Copying {src} to {dst}") + dst.write_bytes(src.read_bytes()) + + # Zip outdir into mritk-test-data.zip + print("Creating zip archive...") + zip_path = outdir.with_suffix(".zip") + if zip_path.exists(): + zip_path.unlink() + + with zipfile.ZipFile(zip_path, "w", zipfile.ZIP_DEFLATED) as zipf: + for file in outdir.rglob("*"): + zipf.write(file, file.relative_to(outdir)) + + +if __name__ == "__main__": + main() From ba4772b74670949b31bbec0e4160dc1da3961141 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 17:22:00 +0100 Subject: [PATCH 08/29] Fix deprecated argument in scikit-image --- src/mritk/t1_maps/t1_maps.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/src/mritk/t1_maps/t1_maps.py b/src/mritk/t1_maps/t1_maps.py index 65711ea..32ed25d 100644 --- a/src/mritk/t1_maps/t1_maps.py +++ b/src/mritk/t1_maps/t1_maps.py @@ -107,8 +107,11 @@ def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_ regions.sort(key=lambda x: x.num_pixels, reverse=True) mask = mask == regions[0].label - - skimage.morphology.remove_small_holes(mask, max_size=10 ** (mask.ndim), connectivity=2, out=mask) + try: + skimage.morphology.remove_small_holes(mask, max_size=10 ** (mask.ndim), connectivity=2, out=mask) + except TypeError: + # Older versions of skimage use area_threshold instead of max_size + skimage.morphology.remove_small_holes(mask, area_threshold=10 ** (mask.ndim), connectivity=2, out=mask) skimage.morphology.dilation(mask, skimage.morphology.ball(radius), out=mask) skimage.morphology.erosion(mask, skimage.morphology.ball(erode_dilate_factor * radius), out=mask) return mask From 35e429df69ecff485ccfcad84a3b38d4cda5a571 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 19:00:32 +0100 Subject: [PATCH 09/29] Add tests for dicom --- src/mritk/t1_maps/dicom_to_nifti.py | 206 +++++++++++++++++++++------- test/test_dicom_to_nifti.py | 100 ++++++++++++++ 2 files changed, 253 insertions(+), 53 deletions(-) create mode 100644 test/test_dicom_to_nifti.py diff --git a/src/mritk/t1_maps/dicom_to_nifti.py b/src/mritk/t1_maps/dicom_to_nifti.py index 66782a0..21ea85e 100644 --- a/src/mritk/t1_maps/dicom_to_nifti.py +++ b/src/mritk/t1_maps/dicom_to_nifti.py @@ -1,110 +1,214 @@ -"""MRI DICOM to NIfTI conversion Module +# MRI DICOM to NIfTI conversion Module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" import shutil import subprocess import tempfile import logging +import json from pathlib import Path from typing import Optional -import nibabel -import json +import nibabel import numpy as np from ..data.io import load_mri_data, save_mri_data from ..t1_maps.utils import VOLUME_LABELS, read_dicom_trigger_times from .utils import extract_single_volume +logger = logging.getLogger(__name__) + + +def _extract_frame_metadata(frame_fg) -> dict: + """ + Extracts core physical parameters (TR, TE, TI, ETL) from a DICOM frame functional group. + + Args: + frame_fg: The PerFrameFunctionalGroupsSequence element for a specific frame. + + Returns: + dict: A dictionary containing available MR timing parameters. + """ + descrip = { + "TR": float(frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime), + "TE": float(frame_fg.MREchoSequence[0].EffectiveEchoTime), + } + + if hasattr(frame_fg.MRModifierSequence[0], "InversionTimes"): + descrip["TI"] = frame_fg.MRModifierSequence[0].InversionTimes[0] + + if hasattr(frame_fg.MRTimingAndRelatedParametersSequence[0], "EchoTrainLength"): + descrip["ETL"] = frame_fg.MRTimingAndRelatedParametersSequence[0].EchoTrainLength + + return descrip + + +import shlex +import logging + +logger = logging.getLogger(__name__) + + +def run_dcm2niix(input_path: Path, output_dir: Path, form: str, extra_args: str = "", check: bool = True): + """ + Utility wrapper to execute the dcm2niix command-line tool securely. + + Args: + input_path (Path): Path to the input DICOM file/folder. + output_dir (Path): Path to the target output directory. + form (str): Output filename format string. + extra_args (str, optional): Additional command line arguments. Defaults to "". + check (bool, optional): If True, raises an exception on failure. Defaults to True. + + Raises: + RuntimeError: If the dcm2niix executable is not found in the system PATH. + subprocess.CalledProcessError: If the command fails and `check` is True. + """ + # 1. Locate the executable securely + executable = shutil.which("dcm2niix") + if executable is None: + raise RuntimeError( + "The 'dcm2niix' executable was not found. Please ensure it is installed and available in your system PATH." + ) + + # 2. Build the arguments list safely + args = [executable, "-f", form] + + # Safely parse the extra string arguments into a list + if extra_args: + args.extend(shlex.split(extra_args)) -def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]): - logger = logging.getLogger(__name__) + args.extend(["-o", str(output_dir), str(input_path)]) + + # Reconstruct the command string purely for logging purposes + cmd_str = shlex.join(args) + logger.debug(f"Executing: {cmd_str}") + + try: + # 3. Execute without shell=True for better security and stability + subprocess.run(args, check=check, capture_output=True, text=True) + except subprocess.CalledProcessError as e: + logger.error(f"dcm2niix execution failed.\nCommand: {cmd_str}\nError: {e.stderr}") + if check: + raise + + +def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]) -> list[dict]: + """ + Reads a Mixed DICOM file and splits it into independent NIfTI subvolumes. + + Args: + dcmpath (Path): Path to the input DICOM file. + subvolumes (list[str]): List of volume labels mapping to the slices in the DICOM. + + Returns: + list[dict]: A list containing dictionaries with a generated 'nifti' image + and a 'descrip' metadata dictionary for each requested subvolume. + """ import pydicom - dcm = pydicom.dcmread(dcmpath) + dcm = pydicom.dcmread(str(dcmpath)) frames_total = int(dcm.NumberOfFrames) - frames_per_volume = dcm[0x2001, 0x1018].value # [Number of Slices MR] + + # [0x2001, 0x1018] is a private Philips tag representing 'Number of Slices MR' + frames_per_volume = dcm[0x2001, 0x1018].value num_volumes = frames_total // frames_per_volume - assert num_volumes * frames_per_volume == frames_total, "Subvolume dimensions do not match" + assert num_volumes * frames_per_volume == frames_total, "Subvolume dimensions do not evenly divide the total frames." - D = dcm.pixel_array.astype(np.single) + pixel_data = dcm.pixel_array.astype(np.single) frame_fg_sequence = dcm.PerFrameFunctionalGroupsSequence vols_out = [] for volname in subvolumes: vol_idx = VOLUME_LABELS.index(volname) - # Find volume slices representing current subvolume + # Find volume slices representing the current subvolume subvol_idx_start = vol_idx * frames_per_volume subvol_idx_end = (vol_idx + 1) * frames_per_volume frame_fg = frame_fg_sequence[subvol_idx_start] + logger.info( - ( - f"Converting volume {vol_idx + 1}/{len(VOLUME_LABELS)}: {volname} between indices" - + f"{subvol_idx_start, subvol_idx_end} / {frames_total}." - ) + f"Converting volume {vol_idx + 1}/{len(VOLUME_LABELS)}: '{volname}' " + f"between indices {subvol_idx_start}-{subvol_idx_end} out of {frames_total}." ) - mri = extract_single_volume(D[subvol_idx_start:subvol_idx_end], frame_fg) + + mri = extract_single_volume(pixel_data[subvol_idx_start:subvol_idx_end], frame_fg) nii_oriented = nibabel.nifti1.Nifti1Image(mri.data, mri.affine) nii_oriented.set_sform(nii_oriented.affine, "scanner") nii_oriented.set_qform(nii_oriented.affine, "scanner") - # Include meta-data - description = { - "TR": float(frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime), - "TE": float(frame_fg.MREchoSequence[0].EffectiveEchoTime), - } - if hasattr(frame_fg.MRModifierSequence[0], "InversionTimes"): - description["TI"] = frame_fg.MRModifierSequence[0].InversionTimes[0] - if hasattr(frame_fg.MRTimingAndRelatedParametersSequence[0], "EchoTrainLength"): - description["ETL"] = frame_fg.MRTimingAndRelatedParametersSequence[0].EchoTrainLength + description = _extract_frame_metadata(frame_fg) vols_out.append({"nifti": nii_oriented, "descrip": description}) + return vols_out def dicom_to_looklocker(dicomfile: Path, outpath: Path): + """ + Converts a Look-Locker DICOM file to a standardized NIfTI format. + + Extracts trigger times to a sidecar text file, delegates conversion to dcm2niix, + and standardizes the output type to single-precision float (intent_code=2001). + + Args: + dicomfile (Path): Path to the input DICOM file. + outpath (Path): Desired output path for the converted .nii.gz file. + """ outdir, form = outpath.parent, outpath.stem outdir.mkdir(exist_ok=True, parents=True) + + # Extract and save trigger times times = read_dicom_trigger_times(dicomfile) - np.savetxt(f"{outdir}/{form}" + "_trigger_times.txt", times) + np.savetxt(outdir / f"{form}_trigger_times.txt", times) with tempfile.TemporaryDirectory(prefix=outpath.stem) as tmpdir: tmppath = Path(tmpdir) - cmd = f"dcm2niix -f {form} -z y --ignore_trigger_times -o '{tmppath}' '{dicomfile}' > /tmp/dcm2niix.txt" - subprocess.run(cmd, shell=True, check=True) - shutil.copy( - tmppath / f"{form}.json", - outpath.with_suffix(".json"), - ) + + # Delegate heavy lifting to dcm2niix + run_dcm2niix(dicomfile, tmppath, form, extra_args="-z y --ignore_trigger_times", check=True) + + # Copy metadata sidecar + shutil.copy(tmppath / f"{form}.json", outpath.with_suffix(".json")) + + # Reload and save to standardize intent codes and precision mri = load_mri_data(tmppath / f"{form}.nii.gz", dtype=np.double) save_mri_data(mri, outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001) -def dicom_to_mixed( - dcmpath: Path, - outpath: Path, - subvolumes: Optional[list[str]] = None, -): +def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: Optional[list[str]] = None): + """ + Converts a Mixed sequence DICOM file into independent subvolume NIfTIs. + + Generates dedicated images for Spin-Echo, Inversion-Recovery, etc., + and saves sequence timing metadata to a JSON sidecar. + + Args: + dcmpath (Path): Path to the input Mixed DICOM file. + outpath (Path): Base path for output files. Suffixes are automatically appended. + subvolumes (list[str], optional): specific subvolumes to extract. + Defaults to all known VOLUME_LABELS. + """ subvolumes = subvolumes or VOLUME_LABELS assert all([volname in VOLUME_LABELS for volname in subvolumes]), ( - f"Invalid subvolume name in {subvolumes}, not in {VOLUME_LABELS}" + f"Invalid subvolume name in {subvolumes}, must be one of {VOLUME_LABELS}" ) + outdir, form = outpath.parent, outpath.stem outdir.mkdir(exist_ok=True, parents=True) vols = extract_mixed_dicom(dcmpath, subvolumes) meta = {} + for vol, volname in zip(vols, subvolumes): - output = outpath.with_name(outpath.stem + "_" + volname + ".nii.gz") + output = outpath.with_name(f"{outpath.stem}_{volname}.nii.gz") + nibabel.nifti1.save(vol["nifti"], output) - nii = vol["nifti"] descrip = vol["descrip"] - nibabel.nifti1.save(nii, output) try: if volname == "SE-modulus": meta["TR_SE"] = descrip["TR"] @@ -114,15 +218,11 @@ def dicom_to_mixed( meta["TR_IR"] = descrip["TR"] meta["TI"] = descrip["TI"] except KeyError as e: - print(volname, descrip) + logger.error(f"Missing required metadata for {volname}: {descrip}") raise e - with open(outpath.parent / f"{form}_meta.json", "w") as f: - json.dump(meta, f) + # Write merged metadata sidecar + (outdir / f"{form}_meta.json").write_text(json.dumps(meta, indent=4)) - try: - cmd = f"dcm2niix -w 0 --terse -b o -f '{form}' -o '{outdir}' '{dcmpath}' >> /tmp/dcm2niix.txt " - subprocess.run(cmd, shell=True).check_returncode() - except (ValueError, subprocess.CalledProcessError) as e: - print(str(e)) - pass + # Attempt standard dcm2niix conversion (soft failure allowed for legacy behavior) + run_dcm2niix(dcmpath, outdir, form, extra_args="-w 0 --terse -b o", check=False) diff --git a/test/test_dicom_to_nifti.py b/test/test_dicom_to_nifti.py new file mode 100644 index 0000000..46f4b5e --- /dev/null +++ b/test/test_dicom_to_nifti.py @@ -0,0 +1,100 @@ +from unittest.mock import MagicMock, patch +from pathlib import Path +import numpy as np + +from mritk.t1_maps.dicom_to_nifti import ( + _extract_frame_metadata, + run_dcm2niix, + extract_mixed_dicom, + VOLUME_LABELS, +) + + +def test_extract_frame_metadata(): + """Test the extraction of relevant MR metadata parameters from DICOM tags.""" + # Mocking a DICOM Functional Group hierarchy + mock_frame = MagicMock() + mock_frame.MRTimingAndRelatedParametersSequence[0].RepetitionTime = 1500.0 + mock_frame.MREchoSequence[0].EffectiveEchoTime = 10.0 + mock_frame.MRModifierSequence[0].InversionTimes = [150.0] + mock_frame.MRTimingAndRelatedParametersSequence[0].EchoTrainLength = 5 + + meta = _extract_frame_metadata(mock_frame) + + assert meta["TR"] == 1500.0 + assert meta["TE"] == 10.0 + assert meta["TI"] == 150.0 + assert meta["ETL"] == 5 + + +@patch("subprocess.run") +def test_run_dcm2niix(mock_run): + """Test that the dcm2niix command constructor triggers properly.""" + input_path = Path("/input/data.dcm") + output_dir = Path("/output/") + + # Test valid execution + run_dcm2niix(input_path, output_dir, form="test_form", extra_args="-z y") + + # Verify the constructed shell command + mock_run.assert_called_once() + args, _ = mock_run.call_args + cmd = args[0] + + assert "dcm2niix" in cmd[0] + assert "test_form" in cmd + assert "-z" in cmd + assert "y" in cmd + + +@patch("mritk.t1_maps.dicom_to_nifti.extract_single_volume") +@patch("pydicom.dcmread") +def test_extract_mixed_dicom(mock_dcmread, mock_extract_single): + """Test parsing a multi-volume DICOM file into independent subvolumes.""" + # Mocking the pydicom output + mock_dcm = MagicMock() + mock_dcm.NumberOfFrames = 20 + # Private tag for "Number of slices MR" + mock_slice_tag = MagicMock() + mock_slice_tag.value = 10 + + # We have to mock __getitem__ because it's called via dcm[0x2001, 0x1018] + def getitem_side_effect(key): + if key == (0x2001, 0x1018): + return mock_slice_tag + return MagicMock() + + mock_dcm.__getitem__.side_effect = getitem_side_effect + + # Dummy pixel array + mock_dcm.pixel_array = np.zeros((20, 2, 2)) + + # Mocking Frame metadata sequences + mock_frame_fg = MagicMock() + mock_frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime = 1000.0 + mock_frame_fg.MREchoSequence[0].EffectiveEchoTime = 5.0 + + # List of 20 frames + mock_dcm.PerFrameFunctionalGroupsSequence = [mock_frame_fg] * 20 + mock_dcmread.return_value = mock_dcm + + # Mock the volume extraction output + mock_mri_data = MagicMock() + mock_mri_data.data = np.ones((10, 2, 2)) + mock_mri_data.affine = np.eye(4) + mock_extract_single.return_value = mock_mri_data + + # Run the function requesting just the first two volumes + dcmpath = Path("/dummy/file.dcm") + test_subvolumes = [VOLUME_LABELS[0], VOLUME_LABELS[1]] + + results = extract_mixed_dicom(dcmpath, test_subvolumes) + + # Verifications + assert len(results) == 2 + assert "nifti" in results[0] + assert "descrip" in results[0] + assert results[0]["descrip"]["TR"] == 1000.0 + + # Ensure extract_single_volume was called twice (once for each subvolume) + assert mock_extract_single.call_count == 2 From a877de210bfadca536c08f46936a1d6ecec0b095 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Fri, 6 Mar 2026 19:11:39 +0100 Subject: [PATCH 10/29] Refactor stats --- src/mritk/statistics/compute_stats.py | 255 +++++++++++++++++--------- src/mritk/t1_maps/t1_maps.py | 93 +++++----- test/test_mri_stats.py | 96 +++++++++- 3 files changed, 300 insertions(+), 144 deletions(-) diff --git a/src/mritk/statistics/compute_stats.py b/src/mritk/statistics/compute_stats.py index d54f981..33760d8 100644 --- a/src/mritk/statistics/compute_stats.py +++ b/src/mritk/statistics/compute_stats.py @@ -4,9 +4,10 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +import re from pathlib import Path from typing import Optional -import re + import numpy as np import pandas as pd import tqdm.rich @@ -18,115 +19,193 @@ from .utils import voxel_count_to_ml_scale, find_timestamp, prepend_info +def extract_metadata( + file_path: Path, + pattern: str | None = None, + info_dict: dict[str, str] | None = None, + required_keys: list[str] | None = None, +) -> dict: + """ + Extracts metadata from a filename using a regex pattern, falling back to a dictionary. + + Args: + file_path (Path): The path to the file. + pattern (str, optional): Regex pattern with named capture groups. + info_dict (dict, optional): Fallback dictionary if pattern is not provided. + required_keys (list[str], optional): Keys to initialize with None if neither match. + + Returns: + dict: A dictionary of the extracted metadata. + + Raises: + RuntimeError: If a pattern is provided but the filename does not match. + """ + if pattern is not None: + if (m := re.match(rf"{pattern}", file_path.name)) is not None: + return m.groupdict() + else: + raise RuntimeError(f"Filename {file_path.name} does not match the provided pattern.") + + required_keys = required_keys or [] + if info_dict is not None: + return {k: info_dict.get(k) for k in required_keys} + + return {k: None for k in required_keys} + + +def get_regions_dictionary(seg_data: np.ndarray, lut_path: Optional[Path] = None) -> dict[str, list[int]]: + """ + Builds a dictionary mapping region descriptions to their corresponding segmentation labels. + + Args: + seg_data (np.ndarray): The segmentation array. + lut_path (Path, optional): Path to the FreeSurfer Color Look-Up Table. + + Returns: + dict[str, list[int]]: Mapping of region names to a list of label integers. + """ + lut = read_lut(lut_path) + seg_labels = np.unique(seg_data[seg_data != 0]) + + lut_regions = lut.loc[lut.label.isin(seg_labels), ["label", "description"]].to_dict("records") + + regions = { + **{d["description"]: sorted([d["label"]]) for d in lut_regions}, + **default_segmentation_groups(), + } + return regions + + +def compute_region_statistics( + region_data: np.ndarray, + labels: list[int], + description: str, + volscale: float, + voxelcount: int, +) -> dict: + """ + Computes statistical metrics (mean, std, percentiles, etc.) for a specific masked region. + + Args: + region_data (np.ndarray): The raw MRI data values mapped to this region (includes NaNs). + labels (list[int]): The segmentation label indices representing this region. + description (str): Human-readable name of the region. + volscale (float): Multiplier to convert voxel counts to milliliters. + voxelcount (int): Total number of voxels in the region. + + Returns: + dict: A dictionary containing the computed statistics. + """ + record = { + "label": ",".join([str(x) for x in labels]), + "description": description, + "voxelcount": voxelcount, + "volume_ml": volscale * voxelcount, + } + + if voxelcount == 0: + return record + + num_nan = int((~np.isfinite(region_data)).sum()) + record["num_nan_values"] = num_nan + + if num_nan == voxelcount: + return record + + # Filter out NaNs for the mathematical stats + valid_data = region_data[np.isfinite(region_data)] + + stats = { + "sum": float(np.sum(valid_data)), + "mean": float(np.mean(valid_data)), + "median": float(np.median(valid_data)), + "std": float(np.std(valid_data)), + "min": float(np.min(valid_data)), + **{f"PC{pc}": float(np.quantile(valid_data, pc / 100)) for pc in [1, 5, 25, 75, 90, 95, 99]}, + "max": float(np.max(valid_data)), + } + + return {**record, **stats} + + def generate_stats_dataframe( seg_path: Path, mri_path: Path, - timestamp_path: Optional[str | Path] = None, - timestamp_sequence: Optional[str | Path] = None, - seg_pattern: Optional[str | Path] = None, - mri_data_pattern: Optional[str | Path] = None, - lut_path: Optional[Path] = None, - info_dict: Optional[dict] = None, + timestamp_path: str | Path | None = None, + timestamp_sequence: str | Path | None = None, + seg_pattern: str | None = None, + mri_data_pattern: str | None = None, + lut_path: Path | None = None, + info_dict: dict | None = None, ) -> pd.DataFrame: - # Load the data (mri and seg) + """ + Generates a Pandas DataFrame containing descriptive statistics of MRI data grouped by segmentation regions. + + Args: + seg_path (Path): Path to the segmentation NIfTI file. + mri_path (Path): Path to the underlying MRI data NIfTI file. + timestamp_path (str | Path, optional): Path to the timetable TSV file. + timestamp_sequence (str | Path, optional): Sequence label to query in the timetable. + seg_pattern (str, optional): Regex to extract metadata from the seg_path filename. + mri_data_pattern (str, optional): Regex to extract metadata from the mri_path filename. + lut_path (Path, optional): Path to the look-up table. + info_dict (dict, optional): Fallback dictionary for metadata. + + Returns: + pd.DataFrame: A formatted DataFrame with statistics for all identified regions. + """ + # Load and validate the data mri = load_mri_data(mri_path, dtype=np.single) seg = load_mri_data(seg_path, dtype=np.int16) assert_same_space(seg, mri) - # Load LUT - lut = read_lut(lut_path) - # Get LUT info - seg_labels = np.unique(seg.data[seg.data != 0]) - lut_regions = lut.loc[lut.label.isin(seg_labels), ["label", "description"]].to_dict("records") - regions = { - **{d["description"]: sorted([d["label"]]) for d in lut_regions}, - **default_segmentation_groups(), - } - # Get SEG info - seg_info = {} - if seg_pattern is not None: - seg_pattern = rf"{seg_pattern}" - if (m := re.match(seg_pattern, Path(seg_path).name)) is not None: - seg_info = m.groupdict() - else: - raise RuntimeError(f"Segmentation filename {seg_path.name} does not match the provided pattern.") - elif info_dict is not None: - seg_info["segmentation"] = info_dict["segmentation"] if "segmentation" in info_dict else None - seg_info["subject"] = info_dict["subject"] if "subject" in info_dict else None - else: - seg_info = {"segmentation": None, "subject": None} - # Get MRI info - mri_info = {} - if mri_data_pattern is not None: - mri_data_pattern = rf"{mri_data_pattern}" - if (m := re.match(mri_data_pattern, Path(mri_path).name)) is not None: - mri_info = m.groupdict() - else: - raise RuntimeError(f"MRI data filename {mri_path.name} does not match the provided pattern.") - elif info_dict is not None: - mri_info["mri_data"] = info_dict["mri_data"] if "mri_data" in info_dict else None - mri_info["subject"] = info_dict["subject"] if "subject" in info_dict else None - mri_info["session"] = info_dict["session"] if "session" in info_dict else None - else: - mri_info = {"mri_data": None, "subject": None, "session": None} - # Get timestamp + + # Resolve metadata + seg_info = extract_metadata(seg_path, seg_pattern, info_dict, ["segmentation", "subject"]) + mri_info = extract_metadata(mri_path, mri_data_pattern, info_dict, ["mri_data", "subject", "session"]) + info = seg_info | mri_info + + # Resolve timestamps + info["timestamp"] = None if timestamp_path is not None: try: - mri_info["timestamp"] = find_timestamp( + info["timestamp"] = find_timestamp( Path(str(timestamp_path)), str(timestamp_sequence), - str(mri_info["subject"]), - str(mri_info["session"]), + str(info.get("subject")), + str(info.get("session")), ) except (ValueError, RuntimeError, KeyError): - mri_info["timestamp"] = None - else: - mri_info["timestamp"] = None + pass - info = seg_info | mri_info - - records = [] - finite_mask = np.isfinite(mri.data) + regions = get_regions_dictionary(seg.data, lut_path) volscale = voxel_count_to_ml_scale(seg.affine) + records = [] + # Iterate over regions and compute stats for description, labels in tqdm.rich.tqdm(regions.items(), total=len(regions)): region_mask = np.isin(seg.data, labels) voxelcount = region_mask.sum() - record = { - "label": ",".join([str(x) for x in labels]), - "description": description, - "voxelcount": voxelcount, - "volume_ml": volscale * voxelcount, - } - if voxelcount == 0: - records.append(record) - continue - - data_mask = region_mask * finite_mask - region_data = mri.data[data_mask] - num_nan = (~np.isfinite(region_data)).sum() - record["num_nan_values"] = num_nan - if num_nan == voxelcount: - records.append(record) - continue - - stats = { - "sum": np.sum(region_data), - "mean": np.mean(region_data), - "median": np.median(region_data), - "std": np.std(region_data), - "min": np.min(region_data), - **{f"PC{pc}": np.quantile(region_data, pc / 100) for pc in [1, 5, 25, 75, 90, 95, 99]}, - "max": np.max(region_data), - } - records.append({**record, **stats}) + # Extract raw data for this region (including NaNs) + region_data = mri.data[region_mask] + + record = compute_region_statistics( + region_data=region_data, + labels=labels, + description=description, + volscale=volscale, + voxelcount=voxelcount, + ) + records.append(record) + + # Format output dframe = pd.DataFrame.from_records(records) dframe = prepend_info( dframe, - segmentation=info["segmentation"], - mri_data=info["mri_data"], - subject=info["subject"], - session=info["session"], - timestamp=info["timestamp"], + segmentation=info.get("segmentation"), + mri_data=info.get("mri_data"), + subject=info.get("subject"), + session=info.get("session"), + timestamp=info.get("timestamp"), ) return dframe diff --git a/src/mritk/t1_maps/t1_maps.py b/src/mritk/t1_maps/t1_maps.py index 32ed25d..cd67e89 100644 --- a/src/mritk/t1_maps/t1_maps.py +++ b/src/mritk/t1_maps/t1_maps.py @@ -1,9 +1,9 @@ -"""T1 Maps generation module +# T1 Maps generation module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" import json import logging @@ -72,21 +72,6 @@ def compute_looklocker_t1_array(data: np.ndarray, time_s: np.ndarray, t1_roof: f return np.minimum(t1map, t1_roof) -def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | None = None) -> MRIData: - """I/O wrapper to generate a Look-Locker T1 map from a NIfTI file.""" - ll_mri = load_mri_data(looklocker_input, dtype=np.single) - # Convert timestamps from milliseconds to seconds - time_s = np.loadtxt(timestamps) / 1000.0 - - t1map_array = compute_looklocker_t1_array(ll_mri.data, time_s) - t1map_mri = MRIData(t1map_array.astype(np.single), ll_mri.affine) - - if output is not None: - save_mri_data(t1map_mri, output, dtype=np.single) - - return t1map_mri - - def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_factor: float = 1.3) -> np.ndarray: """ Creates a binary mask isolating the largest contiguous non-NaN region in an array. @@ -172,39 +157,13 @@ def looklocker_t1map_postprocessing( return processed_T1map -def compute_mixed_t1_array(se_data: np.ndarray, ir_data: np.ndarray, meta: dict, t1_low: float, t1_high: float) -> np.ndarray: - """ - Computes a Mixed T1 array from Spin-Echo and Inversion-Recovery volumes using a lookup table. - - Args: - se_data (np.ndarray): 3D numpy array of the Spin-Echo modulus data. - ir_data (np.ndarray): 3D numpy array of the Inversion-Recovery corrected real data. - meta (dict): Dictionary containing sequence parameters ('TR_SE', 'TI', 'TE', 'ETL'). - t1_low (float): Lower bound for T1 generation grid. - t1_high (float): Upper bound for T1 generation grid. - - Returns: - np.ndarray: Computed T1 map as a 3D float32 array. - """ - nonzero_mask = se_data != 0 - f_data = np.nan * np.zeros_like(ir_data) - f_data[nonzero_mask] = ir_data[nonzero_mask] / se_data[nonzero_mask] - - tr_se, ti, te, etl = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] - f_curve, t1_grid = T1_lookup_table(tr_se, ti, te, etl, t1_low, t1_high) - - interpolator = scipy.interpolate.interp1d(f_curve, t1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) - return interpolator(f_data).astype(np.single) - - def mixed_t1map( SE_nii_path: Path, IR_nii_path: Path, meta_path: Path, T1_low: float, T1_high: float, output: Path | None = None ) -> nibabel.nifti1.Nifti1Image: """I/O wrapper to generate a T1 map from SE and IR acquisitions.""" se_mri = load_mri_data(SE_nii_path, dtype=np.single) ir_mri = load_mri_data(IR_nii_path, dtype=np.single) - with open(meta_path, "r") as f: - meta = json.load(f) + meta = json.loads(meta_path.read_text()) t1_volume = compute_mixed_t1_array(se_mri.data, ir_mri.data, meta, T1_low, T1_high) @@ -236,6 +195,46 @@ def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | return masked_t1map_nii +def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | None = None) -> MRIData: + """I/O wrapper to generate a Look-Locker T1 map from a NIfTI file.""" + ll_mri = load_mri_data(looklocker_input, dtype=np.single) + # Convert timestamps from milliseconds to seconds + time_s = np.loadtxt(timestamps) / 1000.0 + + t1map_array = compute_looklocker_t1_array(ll_mri.data, time_s) + t1map_mri = MRIData(t1map_array.astype(np.single), ll_mri.affine) + + if output is not None: + save_mri_data(t1map_mri, output, dtype=np.single) + + return t1map_mri + + +def compute_mixed_t1_array(se_data: np.ndarray, ir_data: np.ndarray, meta: dict, t1_low: float, t1_high: float) -> np.ndarray: + """ + Computes a Mixed T1 array from Spin-Echo and Inversion-Recovery volumes using a lookup table. + + Args: + se_data (np.ndarray): 3D numpy array of the Spin-Echo modulus data. + ir_data (np.ndarray): 3D numpy array of the Inversion-Recovery corrected real data. + meta (dict): Dictionary containing sequence parameters ('TR_SE', 'TI', 'TE', 'ETL'). + t1_low (float): Lower bound for T1 generation grid. + t1_high (float): Upper bound for T1 generation grid. + + Returns: + np.ndarray: Computed T1 map as a 3D float32 array. + """ + nonzero_mask = se_data != 0 + f_data = np.nan * np.zeros_like(ir_data) + f_data[nonzero_mask] = ir_data[nonzero_mask] / se_data[nonzero_mask] + + tr_se, ti, te, etl = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] + f_curve, t1_grid = T1_lookup_table(tr_se, ti, te, etl, t1_low, t1_high) + + interpolator = scipy.interpolate.interp1d(f_curve, t1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) + return interpolator(f_data).astype(np.single) + + def compute_hybrid_t1_array(ll_data: np.ndarray, mixed_data: np.ndarray, mask: np.ndarray, threshold: float) -> np.ndarray: """ Creates a hybrid T1 array by selectively substituting Look-Locker voxels with Mixed voxels. diff --git a/test/test_mri_stats.py b/test/test_mri_stats.py index a259e86..61886ea 100644 --- a/test/test_mri_stats.py +++ b/test/test_mri_stats.py @@ -1,14 +1,8 @@ -"""MRI Stats - Test - -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - from pathlib import Path +import numpy as np +import pytest - -from mritk.statistics.compute_stats import generate_stats_dataframe # , compute_mri_stats +from mritk.statistics.compute_stats import extract_metadata, compute_region_statistics, generate_stats_dataframe import mritk.cli as cli @@ -136,3 +130,87 @@ def test_compute_mri_stats_cli(capsys, tmp_path: Path, mri_data_dir: Path): assert "Processing MRIs..." in captured.out assert "Stats successfully saved to" in captured.out assert (tmp_path / "mri_stats_output.csv").exists() + + +def test_extract_metadata_with_pattern(): + """Test extracting metadata successfully via regex pattern.""" + file_path = Path("sub-01_ses-01_concentration.nii.gz") + pattern = r"(?Psub-\d{2})_(?Pses-\d{2})_(?P[^\.]+)" + + info = extract_metadata(file_path, pattern=pattern) + + assert info["subject"] == "sub-01" + assert info["session"] == "ses-01" + assert info["mri_data"] == "concentration" + + +def test_extract_metadata_pattern_failure(): + """Test that a non-matching pattern correctly raises a RuntimeError.""" + file_path = Path("invalid_filename.nii.gz") + pattern = r"(?Psub-\d{2})" + + with pytest.raises(RuntimeError, match="does not match the provided pattern"): + extract_metadata(file_path, pattern=pattern) + + +def test_extract_metadata_with_info_dict(): + """Test fallback to info_dict when pattern is not provided.""" + file_path = Path("some_file.nii.gz") + info_dict = {"subject": "sub-02", "segmentation": "aparc"} + required_keys = ["subject", "segmentation", "mri_data"] + + info = extract_metadata(file_path, info_dict=info_dict, required_keys=required_keys) + + assert info["subject"] == "sub-02" + assert info["segmentation"] == "aparc" + assert info["mri_data"] is None # Was not in info_dict + + +def test_compute_region_statistics_normal(): + """Test normal calculation of statistical metrics.""" + # Mock data: values 1.0 through 5.0 + region_data = np.array([1.0, 2.0, 3.0, 4.0, 5.0]) + labels = [10, 11] + + stats = compute_region_statistics( + region_data=region_data, labels=labels, description="test_region", volscale=0.5, voxelcount=5 + ) + + assert stats["description"] == "test_region" + assert stats["label"] == "10,11" + assert stats["voxelcount"] == 5 + assert stats["volume_ml"] == 2.5 + assert stats["num_nan_values"] == 0 + assert stats["sum"] == 15.0 + assert stats["mean"] == 3.0 + assert stats["min"] == 1.0 + assert stats["max"] == 5.0 + assert stats["median"] == 3.0 + + +def test_compute_region_statistics_with_nans(): + """Test that statistics correctly ignore NaNs inside the region.""" + region_data = np.array([1.0, 2.0, np.nan, 3.0, np.nan]) + + stats = compute_region_statistics(region_data=region_data, labels=[1], description="partial_nan", volscale=1.0, voxelcount=5) + + assert stats["num_nan_values"] == 2 + assert stats["sum"] == 6.0 # 1+2+3 + assert stats["mean"] == 2.0 # 6/3 + + +def test_compute_region_statistics_empty_or_all_nan(): + """Test edge cases where the region is empty or completely composed of NaNs.""" + # Case 1: Empty (0 voxels) + stats_empty = compute_region_statistics( + region_data=np.array([]), labels=[1], description="empty_region", volscale=1.0, voxelcount=0 + ) + assert "mean" not in stats_empty + assert stats_empty["voxelcount"] == 0 + + # Case 2: All NaNs + stats_nan = compute_region_statistics( + region_data=np.array([np.nan, np.nan]), labels=[1], description="nan_region", volscale=1.0, voxelcount=2 + ) + assert stats_nan["num_nan_values"] == 2 + assert "mean" not in stats_nan From ee07b31d156dddcc7cc11dd3a362d3ce336bed10 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sat, 7 Mar 2026 16:59:12 +0100 Subject: [PATCH 11/29] Restructure t1 maps --- pyproject.toml | 31 +- src/mritk/concentration/concentration.py | 54 --- src/mritk/{concentration => t1}/__init__.py | 4 +- src/mritk/t1/concentration.py | 208 +++++++++++ src/mritk/t1/hybrid.py | 59 ++++ .../{t1_maps/t1_maps.py => t1/looklocker.py} | 283 +++++++-------- src/mritk/t1/mixed.py | 326 ++++++++++++++++++ src/mritk/{t1_maps => t1}/utils.py | 150 +++----- src/mritk/t1_maps/__init__.py | 7 - src/mritk/t1_maps/dicom_to_nifti.py | 228 ------------ src/mritk/t1_maps/t1_to_r1.py | 100 ------ test/test_concentration.py | 147 +++++++- test/test_hybrid.py | 54 +++ test/test_looklocker.py | 75 ++++ .../{test_dicom_to_nifti.py => test_mixed.py} | 76 ++-- test/test_mri_t1_maps.py | 76 ---- test/test_t1_maps.py | 150 -------- ...test_t1_maps_utils.py => test_t1_utils.py} | 42 ++- 18 files changed, 1162 insertions(+), 908 deletions(-) delete mode 100644 src/mritk/concentration/concentration.py rename src/mritk/{concentration => t1}/__init__.py (57%) create mode 100644 src/mritk/t1/concentration.py create mode 100644 src/mritk/t1/hybrid.py rename src/mritk/{t1_maps/t1_maps.py => t1/looklocker.py} (57%) create mode 100644 src/mritk/t1/mixed.py rename src/mritk/{t1_maps => t1}/utils.py (72%) delete mode 100644 src/mritk/t1_maps/__init__.py delete mode 100644 src/mritk/t1_maps/dicom_to_nifti.py delete mode 100644 src/mritk/t1_maps/t1_to_r1.py create mode 100644 test/test_hybrid.py create mode 100644 test/test_looklocker.py rename test/{test_dicom_to_nifti.py => test_mixed.py} (57%) delete mode 100644 test/test_mri_t1_maps.py delete mode 100644 test/test_t1_maps.py rename test/{test_t1_maps_utils.py => test_t1_utils.py} (72%) diff --git a/pyproject.toml b/pyproject.toml index f8f354b..7f2ff86 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -25,7 +25,8 @@ dependencies = [ "scipy", "scikit-image", "pydicom", - "dcm2niix"] + "dcm2niix", +] [project.optional-dependencies] show = [ @@ -128,6 +129,34 @@ dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" max-complexity = 10 +[tool.ruff.lint.pydocstyle] +convention = "google" + + +[tool.ruff.lint.isort] +known-first-party = ["mritk"] +known-third-party = [ + "tqdm", + "numpy", + "rich-argparse", + "nibabel", + "pandas", + "scipy", + "scikit-image", + "pydicom", + "dcm2niix", + "pytest", +] +section-order = [ + "future", + "standard-library", + "third-party", + "first-party", + "local-folder", +] + + + [tool.bumpversion] allow_dirty = false commit = true diff --git a/src/mritk/concentration/concentration.py b/src/mritk/concentration/concentration.py deleted file mode 100644 index 08c0a67..0000000 --- a/src/mritk/concentration/concentration.py +++ /dev/null @@ -1,54 +0,0 @@ -"""Concentration maps module - -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - -from pathlib import Path -from typing import Optional - -import numpy as np -from ..data.base import MRIData -from ..data.io import load_mri_data, save_mri_data -from ..data.orientation import assert_same_space - - -def concentration_from_T1(T1: np.ndarray, T1_0: np.ndarray, r1: float) -> np.ndarray: - C = 1 / r1 * (1 / T1 - 1 / T1_0) - return C - - -def concentration_from_R1(R1: np.ndarray, R1_0: np.ndarray, r1: float) -> np.ndarray: - C = 1 / r1 * (R1 - R1_0) - return C - - -def concentration( - input: Path, - reference: Path, - output: Optional[Path] = None, - r1: float = 0.0045, - mask: Optional[Path] = None, -) -> MRIData: - T1_mri = load_mri_data(input, np.single) - T10_mri = load_mri_data(reference, np.single) - assert_same_space(T1_mri, T10_mri) - - if mask is not None: - mask_mri = load_mri_data(mask, bool) - assert_same_space(mask_mri, T10_mri) - mask_data = mask_mri.data * (T10_mri.data > 1e-10) * (T1_mri.data > 1e-10) - T1_mri.data *= mask_data - T10_mri.data *= mask_data - else: - mask_data = (T10_mri.data > 1e-10) * (T1_mri.data > 1e-10) - T1_mri.data[~mask_data] = np.nan - T10_mri.data[~mask_data] = np.nan - - concentrations = np.nan * np.zeros_like(T10_mri.data) - concentrations[mask_data] = concentration_from_T1(T1=T1_mri.data[mask_data], T1_0=T10_mri.data[mask_data], r1=r1) - mri_data = MRIData(data=concentrations, affine=T10_mri.affine) - if output is not None: - save_mri_data(mri_data, output, np.single) - return mri_data diff --git a/src/mritk/concentration/__init__.py b/src/mritk/t1/__init__.py similarity index 57% rename from src/mritk/concentration/__init__.py rename to src/mritk/t1/__init__.py index 9e93ffa..7b2a547 100644 --- a/src/mritk/concentration/__init__.py +++ b/src/mritk/t1/__init__.py @@ -2,7 +2,7 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +from . import concentration, utils, mixed, looklocker, hybrid -from . import concentration -__all__ = ["concentration"] +__all__ = ["concentration", "utils", "mixed", "looklocker", "hybrid"] diff --git a/src/mritk/t1/concentration.py b/src/mritk/t1/concentration.py new file mode 100644 index 0000000..a5ade49 --- /dev/null +++ b/src/mritk/t1/concentration.py @@ -0,0 +1,208 @@ +# T1 to R1 module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + + +import numpy as np +from pathlib import Path + +from ..data.base import MRIData +from ..data.io import load_mri_data, save_mri_data +from ..data.orientation import assert_same_space + + +def compute_r1_array( + t1_data: np.ndarray, scale: float = 1000.0, t1_low: float = 1.0, t1_high: float = float("inf") +) -> np.ndarray: + """ + Pure numpy function converting a T1 relaxation time array to an R1 relaxation rate array. + + The relationship is R1 = scale / T1. Values outside the [t1_low, t1_high] + range are set to NaN to filter out noise and non-physiological data. + + Args: + t1_data (np.ndarray): The input array containing T1 relaxation times. + scale (float, optional): Scaling factor, typically 1000 to convert from ms to s^-1. Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to infinity. + + Returns: + np.ndarray: An array of R1 relaxation rates. Invalid/out-of-bound voxels are set to NaN. + """ + valid_t1 = (t1_low <= t1_data) & (t1_data <= t1_high) + r1_data = np.nan * np.zeros_like(t1_data) + + # Calculate R1 only for valid voxels to avoid division by zero or extreme outliers + r1_data[valid_t1] = scale / t1_data[valid_t1] + + return r1_data + + +def convert_T1_to_R1( + T1map_mri: MRIData, + scale: float = 1000.0, + t1_low: float = 1.0, + t1_high: float = float("inf"), +) -> MRIData: + """ + Converts a T1 map MRIData object into an R1 map MRIData object. + + Args: + T1map_mri (MRIData): The input MRIData object representing the T1 map. + scale (float, optional): Scaling factor. Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). + + Returns: + MRIData: A new MRIData object containing the R1 map array and the original affine matrix. + """ + r1_data = compute_r1_array(T1map_mri.data, scale, t1_low, t1_high) + return MRIData(data=r1_data, affine=T1map_mri.affine) + + +def T1_to_R1( + input_mri: Path | MRIData, + output: Path | None = None, + scale: float = 1000.0, + t1_low: float = 1.0, + t1_high: float = float("inf"), +) -> MRIData: + """ + High-level wrapper to convert a T1 map to an R1 map, handling file I/O operations. + + Args: + input_mri (Union[Path, MRIData]): A Path to a T1 NIfTI file or an already loaded MRIData object. + output (Path | None, optional): Path to save the resulting R1 map to disk. Defaults to None. + scale (float, optional): Scaling factor (e.g., 1000 for ms -> s^-1). Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). + + Returns: + MRIData: The computed R1 map as an MRIData object. + + Raises: + ValueError: If input_mri is neither a Path nor an MRIData object. + """ + if isinstance(input_mri, Path): + T1map_mri = load_mri_data(input_mri, dtype=np.single) + elif isinstance(input_mri, MRIData): + T1map_mri = input_mri + else: + raise ValueError(f"Input should be a Path or MRIData, got {type(input_mri)}") + + R1map_mri = convert_T1_to_R1(T1map_mri, scale, t1_low, t1_high) + + if output is not None: + save_mri_data(R1map_mri, output, dtype=np.single) + + return R1map_mri + + +def concentration_from_T1(t1: np.ndarray, t1_0: np.ndarray, r1: float) -> np.ndarray: + """ + Computes tracer concentration from T1 relaxation times. + + Formula: C = (1 / r1) * ((1 / T1) - (1 / T1_0)) + + Args: + t1 (np.ndarray): Array of post-contrast T1 relaxation times. + t1_0 (np.ndarray): Array of pre-contrast (baseline) T1 relaxation times. + r1 (float): Relaxivity of the contrast agent. + + Returns: + np.ndarray: Computed concentration array. + """ + return (1.0 / r1) * ((1.0 / t1) - (1.0 / t1_0)) + + +def concentration_from_R1(r1_map: np.ndarray, r1_0_map: np.ndarray, r1: float) -> np.ndarray: + """ + Computes tracer concentration from R1 relaxation rates. + + Formula: C = (1 / r1) * (R1 - R1_0) + + Args: + r1_map (np.ndarray): Array of post-contrast R1 relaxation rates. + r1_0_map (np.ndarray): Array of pre-contrast (baseline) R1 relaxation rates. + r1 (float): Relaxivity of the contrast agent. + + Returns: + np.ndarray: Computed concentration array. + """ + return (1.0 / r1) * (r1_map - r1_0_map) + + +def compute_concentration_array( + t1_data: np.ndarray, t10_data: np.ndarray, r1: float, mask: np.ndarray | None = None +) -> np.ndarray: + """ + Computes the concentration map array, handling masking and avoiding division by zero. + + Args: + t1_data (np.ndarray): 3D numpy array of post-contrast T1 values. + t10_data (np.ndarray): 3D numpy array of pre-contrast T1 values. + r1 (float): Relaxivity of the contrast agent. + mask (Optional[np.ndarray], optional): Boolean mask restricting the computation area. + Defaults to None. + + Returns: + np.ndarray: A 3D array of computed concentrations. Invalid voxels (unmasked or + where T1 <= 1e-10) are set to NaN. + """ + # Create a validity mask: T1 values must be > 1e-10 to safely invert without overflow + valid_mask = (t1_data > 1e-10) & (t10_data > 1e-10) + + if mask is not None: + valid_mask &= mask.astype(bool) + + concentrations = np.full_like(t10_data, np.nan, dtype=np.single) + + # Compute concentration strictly on valid voxels + concentrations[valid_mask] = concentration_from_T1(t1=t1_data[valid_mask], t1_0=t10_data[valid_mask], r1=r1) + + return concentrations + + +def concentration( + input_path: Path, + reference_path: Path, + output_path: Path | None = None, + r1: float = 0.0045, + mask_path: Path | None = None, +) -> MRIData: + """ + I/O wrapper to generate a contrast agent concentration map from NIfTI T1 maps. + + Loads the post-contrast and baseline T1 maps, ensures they occupy the same + physical space, computes the concentration map, and optionally saves it to disk. + + Args: + input_path (Path): Path to the post-contrast T1 map NIfTI file. + reference_path (Path): Path to the baseline (pre-contrast) T1 map NIfTI file. + output_path (Path | None, optional): Path to save the resulting concentration map. Defaults to None. + r1 (float, optional): Contrast agent relaxivity. Defaults to 0.0045. + mask_path (Path | None, optional): Path to a boolean mask NIfTI file. Defaults to None. + + Returns: + MRIData: An MRIData object containing the concentration array and the affine matrix. + """ + t1_mri = load_mri_data(input_path, dtype=np.single) + t10_mri = load_mri_data(reference_path, dtype=np.single) + assert_same_space(t1_mri, t10_mri) + + mask_data = None + if mask_path is not None: + mask_mri = load_mri_data(mask_path, dtype=bool) + assert_same_space(mask_mri, t10_mri) + mask_data = mask_mri.data + + concentrations_array = compute_concentration_array(t1_data=t1_mri.data, t10_data=t10_mri.data, r1=r1, mask=mask_data) + + mri_data = MRIData(data=concentrations_array, affine=t10_mri.affine) + + if output_path is not None: + save_mri_data(mri_data, output_path, dtype=np.single) + + return mri_data diff --git a/src/mritk/t1/hybrid.py b/src/mritk/t1/hybrid.py new file mode 100644 index 0000000..220b990 --- /dev/null +++ b/src/mritk/t1/hybrid.py @@ -0,0 +1,59 @@ +# T1 Maps generation module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + + +import logging +import numpy as np +import skimage +import nibabel +from pathlib import Path + + +logger = logging.getLogger(__name__) + + +def compute_hybrid_t1_array(ll_data: np.ndarray, mixed_data: np.ndarray, mask: np.ndarray, threshold: float) -> np.ndarray: + """ + Creates a hybrid T1 array by selectively substituting Look-Locker voxels with Mixed voxels. + + Substitution occurs only if BOTH the Look-Locker AND Mixed T1 values exceed the threshold, + AND the voxel falls within the provided CSF mask. + + Args: + ll_data (np.ndarray): 3D numpy array of Look-Locker T1 values. + mixed_data (np.ndarray): 3D numpy array of Mixed T1 values. + mask (np.ndarray): 3D boolean mask (typically eroded CSF). + threshold (float): T1 threshold value (in ms). + + Returns: + np.ndarray: Hybrid 3D T1 array. + """ + hybrid = ll_data.copy() + newmask = mask & (ll_data > threshold) & (mixed_data > threshold) + hybrid[newmask] = mixed_data[newmask] + return hybrid + + +def hybrid_t1map( + LL_path: Path, mixed_path: Path, csf_mask_path: Path, threshold: float, erode: int = 0, output: Path | None = None +) -> nibabel.nifti1.Nifti1Image: + """I/O wrapper for merging a Look-Locker and a Mixed T1 map.""" + mixed_mri = nibabel.nifti1.load(mixed_path) + ll_mri = nibabel.nifti1.load(LL_path) + + csf_mask_mri = nibabel.nifti1.load(csf_mask_path) + csf_mask = csf_mask_mri.get_fdata().astype(bool) + + if erode > 0: + csf_mask = skimage.morphology.erosion(csf_mask, skimage.morphology.ball(erode)) + + hybrid = compute_hybrid_t1_array(ll_mri.get_fdata(), mixed_mri.get_fdata(), csf_mask, threshold) + + hybrid_nii = nibabel.nifti1.Nifti1Image(hybrid, affine=ll_mri.affine, header=ll_mri.header) + if output is not None: + nibabel.nifti1.save(hybrid_nii, output) + + return hybrid_nii diff --git a/src/mritk/t1_maps/t1_maps.py b/src/mritk/t1/looklocker.py similarity index 57% rename from src/mritk/t1_maps/t1_maps.py rename to src/mritk/t1/looklocker.py index cd67e89..72b546f 100644 --- a/src/mritk/t1_maps/t1_maps.py +++ b/src/mritk/t1/looklocker.py @@ -5,71 +5,62 @@ # Copyright (C) 2026 Simula Research Laboratory -import json import logging import numpy as np -import scipy -import scipy.interpolate -import skimage -import tqdm -import nibabel +import tempfile +import shutil from functools import partial from typing import Optional from pathlib import Path +import tqdm +import skimage + from ..data.base import MRIData from ..data.io import load_mri_data, save_mri_data -from ..masking.masks import create_csf_mask -from .utils import ( - mri_facemask, - fit_voxel, - nan_filter_gaussian, - T1_lookup_table, -) +from .utils import mri_facemask, fit_voxel, nan_filter_gaussian, run_dcm2niix logger = logging.getLogger(__name__) -def compute_looklocker_t1_array(data: np.ndarray, time_s: np.ndarray, t1_roof: float = 10000.0) -> np.ndarray: +def read_dicom_trigger_times(dicomfile: Path) -> np.ndarray: """ - Computes T1 relaxation maps from Look-Locker data using Levenberg-Marquardt fitting. + Extracts unique nominal cardiac trigger delay times from DICOM functional groups. Args: - data (np.ndarray): 4D numpy array (x, y, z, time) of Look-Locker MRI signals. - time_s (np.ndarray): 1D array of trigger times in seconds. - t1_roof (float, optional): Maximum allowed T1 value (ms) to cap spurious fits. Defaults to 10000.0. + dicomfile (str): The file path to the DICOM file. Returns: - np.ndarray: 3D numpy array representing the T1 map in milliseconds. Voxels - that fail to fit or fall outside the mask are set to NaN. + np.ndarray: A sorted array of unique trigger delay times (in milliseconds) + extracted from the CardiacSynchronizationSequence. """ - assert len(data.shape) >= 4, f"Data should be at least 4-dimensional, got shape {data.shape}" - mask = mri_facemask(data[..., 0]) - valid_voxels = (np.nanmax(data, axis=-1) > 0) & mask + import pydicom - data_normalized = np.nan * np.zeros_like(data) - # Prevent divide by zero warnings dynamically - max_vals = np.nanmax(data, axis=-1)[valid_voxels, np.newaxis] - data_normalized[valid_voxels] = data[valid_voxels] / max_vals + dcm = pydicom.dcmread(dicomfile) + all_frame_times = [ + f.CardiacSynchronizationSequence[0].NominalCardiacTriggerDelayTime for f in dcm.PerFrameFunctionalGroupsSequence + ] + return np.unique(all_frame_times) - voxel_mask = np.array(np.where(valid_voxels)).T - d_masked = np.array([data_normalized[i, j, k] for (i, j, k) in voxel_mask]) - with tqdm.tqdm(total=len(d_masked), desc="Fitting Look-Locker Voxels") as pbar: - voxel_fitter = partial(fit_voxel, time_s, pbar) - vfunc = np.vectorize(voxel_fitter, signature="(n) -> (3)") - fitted_coefficients = vfunc(d_masked) - - x2 = fitted_coefficients[:, 1] - x3 = fitted_coefficients[:, 2] - - i, j, k = voxel_mask.T - t1map = np.nan * np.zeros_like(data[..., 0]) +def remove_outliers(data: np.ndarray, mask: np.ndarray, t1_low: float, t1_high: float) -> np.ndarray: + """ + Applies a mask and removes values outside the physiological T1 range. - # Calculate T1 in ms. Formula: T1 = (x2 / x3)^2 * 1000 - t1map[i, j, k] = (x2 / x3) ** 2 * 1000.0 + Args: + data (np.ndarray): 3D array of T1 values. + mask (np.ndarray): 3D boolean mask of the brain/valid area. + t1_low (float): Lower physiological limit. + t1_high (float): Upper physiological limit. - return np.minimum(t1map, t1_roof) + Returns: + np.ndarray: A cleaned 3D array with outliers and unmasked regions set to NaN. + """ + processed = data.copy() + processed[~mask] = np.nan + outliers = (processed < t1_low) | (processed > t1_high) + processed[outliers] = np.nan + return processed def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_factor: float = 1.3) -> np.ndarray: @@ -102,24 +93,46 @@ def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_ return mask -def remove_outliers(data: np.ndarray, mask: np.ndarray, t1_low: float, t1_high: float) -> np.ndarray: +def compute_looklocker_t1_array(data: np.ndarray, time_s: np.ndarray, t1_roof: float = 10000.0) -> np.ndarray: """ - Applies a mask and removes values outside the physiological T1 range. + Computes T1 relaxation maps from Look-Locker data using Levenberg-Marquardt fitting. Args: - data (np.ndarray): 3D array of T1 values. - mask (np.ndarray): 3D boolean mask of the brain/valid area. - t1_low (float): Lower physiological limit. - t1_high (float): Upper physiological limit. + data (np.ndarray): 4D numpy array (x, y, z, time) of Look-Locker MRI signals. + time_s (np.ndarray): 1D array of trigger times in seconds. + t1_roof (float, optional): Maximum allowed T1 value (ms) to cap spurious fits. Defaults to 10000.0. Returns: - np.ndarray: A cleaned 3D array with outliers and unmasked regions set to NaN. + np.ndarray: 3D numpy array representing the T1 map in milliseconds. Voxels + that fail to fit or fall outside the mask are set to NaN. """ - processed = data.copy() - processed[~mask] = np.nan - outliers = (processed < t1_low) | (processed > t1_high) - processed[outliers] = np.nan - return processed + assert len(data.shape) >= 4, f"Data should be at least 4-dimensional, got shape {data.shape}" + mask = mri_facemask(data[..., 0]) + valid_voxels = (np.nanmax(data, axis=-1) > 0) & mask + + data_normalized = np.nan * np.zeros_like(data) + # Prevent divide by zero warnings dynamically + max_vals = np.nanmax(data, axis=-1)[valid_voxels, np.newaxis] + data_normalized[valid_voxels] = data[valid_voxels] / max_vals + + voxel_mask = np.array(np.where(valid_voxels)).T + d_masked = np.array([data_normalized[i, j, k] for (i, j, k) in voxel_mask]) + + with tqdm.tqdm(total=len(d_masked), desc="Fitting Look-Locker Voxels") as pbar: + voxel_fitter = partial(fit_voxel, time_s, pbar) + vfunc = np.vectorize(voxel_fitter, signature="(n) -> (3)") + fitted_coefficients = vfunc(d_masked) + + x2 = fitted_coefficients[:, 1] + x3 = fitted_coefficients[:, 2] + + i, j, k = voxel_mask.T + t1map = np.nan * np.zeros_like(data[..., 0]) + + # Calculate T1 in ms. Formula: T1 = (x2 / x3)^2 * 1000 + t1map[i, j, k] = (x2 / x3) ** 2 * 1000.0 + + return np.minimum(t1map, t1_roof) def looklocker_t1map_postprocessing( @@ -131,7 +144,37 @@ def looklocker_t1map_postprocessing( mask: Optional[np.ndarray] = None, output: Path | None = None, ) -> MRIData: - """I/O wrapper for masking, outlier removal, and NaN filling on a T1 map.""" + """ + Performs quality-control and post-processing on a raw Look-Locker T1 map. + + This function cleans up noisy T1 fits by applying a three-step pipeline: + 1. Masking: If no mask is provided, it automatically isolates the brain/head by + finding the largest contiguous tissue island and applying morphological smoothing. + 2. Outlier Removal: Voxels falling outside the provided physiological bounds + [T1_low, T1_high] are discarded (set to NaN). + 3. Interpolation: Internal "holes" (NaNs) created by poor fits or outlier + removal are iteratively filled using a specialized Gaussian filter that + interpolates from surrounding valid tissue without blurring the edges. + + Args: + T1map (Path): Path to the raw, unmasked Look-Locker T1 map NIfTI file. + T1_low (float): Lower physiological limit for T1 values (in ms). + T1_high (float): Upper physiological limit for T1 values (in ms). + radius (int, optional): Base radius for morphological dilation when generating + the automatic mask. Defaults to 10. + erode_dilate_factor (float, optional): Multiplier for the erosion radius + relative to the dilation radius to ensure tight mask edges. Defaults to 1.3. + mask (Optional[np.ndarray], optional): Pre-computed 3D boolean mask. If None, + one is generated automatically. Defaults to None. + output (Path | None, optional): Path to save the cleaned T1 map. Defaults to None. + + Returns: + MRIData: An MRIData object containing the cleaned, masked, and interpolated T1 map. + + Raises: + RuntimeError: If more than 99% of the voxels are removed during the outlier + filtering step, indicating a likely unit mismatch (e.g., T1 in seconds instead of ms). + """ t1map_mri = load_mri_data(T1map, dtype=np.single) t1map_data = t1map_mri.data.copy() @@ -157,46 +200,25 @@ def looklocker_t1map_postprocessing( return processed_T1map -def mixed_t1map( - SE_nii_path: Path, IR_nii_path: Path, meta_path: Path, T1_low: float, T1_high: float, output: Path | None = None -) -> nibabel.nifti1.Nifti1Image: - """I/O wrapper to generate a T1 map from SE and IR acquisitions.""" - se_mri = load_mri_data(SE_nii_path, dtype=np.single) - ir_mri = load_mri_data(IR_nii_path, dtype=np.single) - meta = json.loads(meta_path.read_text()) - - t1_volume = compute_mixed_t1_array(se_mri.data, ir_mri.data, meta, T1_low, T1_high) - - nii = nibabel.nifti1.Nifti1Image(t1_volume, ir_mri.affine) - nii.set_sform(nii.affine, "scanner") - nii.set_qform(nii.affine, "scanner") - - if output is not None: - nibabel.nifti1.save(nii, output) - - return nii - - -def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | None = None) -> nibabel.nifti1.Nifti1Image: - """I/O wrapper to mask out non-CSF areas from a Mixed T1 map based on SE signal.""" - t1map_nii = nibabel.nifti1.load(T1_path) - se_mri = load_mri_data(SE_nii_path, np.single) - - mask = create_csf_mask(se_mri.data, use_li=True) - mask = skimage.morphology.erosion(mask) - - masked_t1map = t1map_nii.get_fdata(dtype=np.single) - masked_t1map[~mask] = np.nan - masked_t1map_nii = nibabel.nifti1.Nifti1Image(masked_t1map, t1map_nii.affine, t1map_nii.header) - - if output is not None: - nibabel.nifti1.save(masked_t1map_nii, output) +def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | None = None) -> MRIData: + """ + Generates a T1 map from a 4D Look-Locker inversion recovery dataset. - return masked_t1map_nii + This function acts as an I/O wrapper. It loads the 4D Look-Locker sequence + and the corresponding trigger times. It converts the timestamps from milliseconds + (standard DICOM/text output) to seconds, which is required by the underlying + exponential fitting math, and triggers the voxel-by-voxel T1 calculation. + Args: + looklocker_input (Path): Path to the 4D Look-Locker NIfTI file. + timestamps (Path): Path to the text file containing the nominal trigger + delay times (in milliseconds) for each volume in the 4D series. + output (Path | None, optional): Path to save the resulting T1 map NIfTI file. Defaults to None. -def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | None = None) -> MRIData: - """I/O wrapper to generate a Look-Locker T1 map from a NIfTI file.""" + Returns: + MRIData: An MRIData object containing the computed 3D T1 map (in milliseconds) + and the original affine transformation matrix. + """ ll_mri = load_mri_data(looklocker_input, dtype=np.single) # Convert timestamps from milliseconds to seconds time_s = np.loadtxt(timestamps) / 1000.0 @@ -210,70 +232,33 @@ def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | No return t1map_mri -def compute_mixed_t1_array(se_data: np.ndarray, ir_data: np.ndarray, meta: dict, t1_low: float, t1_high: float) -> np.ndarray: +def dicom_to_looklocker(dicomfile: Path, outpath: Path): """ - Computes a Mixed T1 array from Spin-Echo and Inversion-Recovery volumes using a lookup table. - - Args: - se_data (np.ndarray): 3D numpy array of the Spin-Echo modulus data. - ir_data (np.ndarray): 3D numpy array of the Inversion-Recovery corrected real data. - meta (dict): Dictionary containing sequence parameters ('TR_SE', 'TI', 'TE', 'ETL'). - t1_low (float): Lower bound for T1 generation grid. - t1_high (float): Upper bound for T1 generation grid. - - Returns: - np.ndarray: Computed T1 map as a 3D float32 array. - """ - nonzero_mask = se_data != 0 - f_data = np.nan * np.zeros_like(ir_data) - f_data[nonzero_mask] = ir_data[nonzero_mask] / se_data[nonzero_mask] - - tr_se, ti, te, etl = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] - f_curve, t1_grid = T1_lookup_table(tr_se, ti, te, etl, t1_low, t1_high) - - interpolator = scipy.interpolate.interp1d(f_curve, t1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) - return interpolator(f_data).astype(np.single) + Converts a Look-Locker DICOM file to a standardized NIfTI format. - -def compute_hybrid_t1_array(ll_data: np.ndarray, mixed_data: np.ndarray, mask: np.ndarray, threshold: float) -> np.ndarray: - """ - Creates a hybrid T1 array by selectively substituting Look-Locker voxels with Mixed voxels. - - Substitution occurs only if BOTH the Look-Locker AND Mixed T1 values exceed the threshold, - AND the voxel falls within the provided CSF mask. + Extracts trigger times to a sidecar text file, delegates conversion to dcm2niix, + and standardizes the output type to single-precision float (intent_code=2001). Args: - ll_data (np.ndarray): 3D numpy array of Look-Locker T1 values. - mixed_data (np.ndarray): 3D numpy array of Mixed T1 values. - mask (np.ndarray): 3D boolean mask (typically eroded CSF). - threshold (float): T1 threshold value (in ms). - - Returns: - np.ndarray: Hybrid 3D T1 array. + dicomfile (Path): Path to the input DICOM file. + outpath (Path): Desired output path for the converted .nii.gz file. """ - hybrid = ll_data.copy() - newmask = mask & (ll_data > threshold) & (mixed_data > threshold) - hybrid[newmask] = mixed_data[newmask] - return hybrid - + outdir, form = outpath.parent, outpath.stem + outdir.mkdir(exist_ok=True, parents=True) -def hybrid_t1map( - LL_path: Path, mixed_path: Path, csf_mask_path: Path, threshold: float, erode: int = 0, output: Path | None = None -) -> nibabel.nifti1.Nifti1Image: - """I/O wrapper for merging a Look-Locker and a Mixed T1 map.""" - mixed_mri = nibabel.nifti1.load(mixed_path) - ll_mri = nibabel.nifti1.load(LL_path) + # Extract and save trigger times + times = read_dicom_trigger_times(dicomfile) + np.savetxt(outdir / f"{form}_trigger_times.txt", times) - csf_mask_mri = nibabel.nifti1.load(csf_mask_path) - csf_mask = csf_mask_mri.get_fdata().astype(bool) + with tempfile.TemporaryDirectory(prefix=outpath.stem) as tmpdir: + tmppath = Path(tmpdir) - if erode > 0: - csf_mask = skimage.morphology.erosion(csf_mask, skimage.morphology.ball(erode)) + # Delegate heavy lifting to dcm2niix + run_dcm2niix(dicomfile, tmppath, form, extra_args="-z y --ignore_trigger_times", check=True) - hybrid = compute_hybrid_t1_array(ll_mri.get_fdata(), mixed_mri.get_fdata(), csf_mask, threshold) - - hybrid_nii = nibabel.nifti1.Nifti1Image(hybrid, affine=ll_mri.affine, header=ll_mri.header) - if output is not None: - nibabel.nifti1.save(hybrid_nii, output) + # Copy metadata sidecar + shutil.copy(tmppath / f"{form}.json", outpath.with_suffix(".json")) - return hybrid_nii + # Reload and save to standardize intent codes and precision + mri = load_mri_data(tmppath / f"{form}.nii.gz", dtype=np.double) + save_mri_data(mri, outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001) diff --git a/src/mritk/t1/mixed.py b/src/mritk/t1/mixed.py new file mode 100644 index 0000000..216df9a --- /dev/null +++ b/src/mritk/t1/mixed.py @@ -0,0 +1,326 @@ +# T1 Maps generation module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + + +import json +import logging +from typing import Optional +from pathlib import Path + +import numpy as np +import scipy +import scipy.interpolate +import skimage +import nibabel + + +from ..data.orientation import data_reorientation, change_of_coordinates_map + +from ..data.base import MRIData +from ..data.io import load_mri_data +from ..masking.masks import create_csf_mask +from .utils import T1_lookup_table, VOLUME_LABELS, run_dcm2niix + +logger = logging.getLogger(__name__) + + +def dicom_standard_affine(frame_fg) -> np.ndarray: + """ + Generates the DICOM to LPS (Left-Posterior-Superior) affine transformation matrix. + + This maps the voxel coordinate space of a DICOM frame to the physical LPS space + by utilizing the pixel spacing, slice spacing, and patient orientation cosines. + + Args: + frame_fg: A DICOM frame functional group sequence object containing + PixelMeasuresSequence, PlaneOrientationSequence, and PlanePositionSequence. + + Returns: + np.ndarray: A 4x4 affine transformation matrix mapping from DICOM voxel + indices to LPS physical coordinates. + """ + # Get the original data shape + df = float(frame_fg.PixelMeasuresSequence[0].SpacingBetweenSlices) + dr, dc = (float(x) for x in frame_fg.PixelMeasuresSequence[0].PixelSpacing) + plane_orientation = frame_fg.PlaneOrientationSequence[0] + orientation = np.array(plane_orientation.ImageOrientationPatient) + + # Find orientation of data array relative to LPS-coordinate system. + row_cosine = orientation[:3] + col_cosine = orientation[3:] + frame_cosine = np.cross(row_cosine, col_cosine) + + # Create DICOM-definition affine map to LPS. + T_1 = np.array(frame_fg.PlanePositionSequence[0].ImagePositionPatient) + + # Create DICOM-definition affine map to LPS. + M_dcm = np.zeros((4, 4)) + M_dcm[:3, 0] = row_cosine * dc + M_dcm[:3, 1] = col_cosine * dr + M_dcm[:3, 2] = frame_cosine * df + M_dcm[:3, 3] = T_1 + M_dcm[3, 3] = 1.0 + + # Reorder from "natural index order" to DICOM affine map definition order. + N_order = np.eye(4)[[2, 1, 0, 3]] + return M_dcm @ N_order + + +def extract_single_volume(D: np.ndarray, frame_fg) -> MRIData: + """ + Extracts, scales, and reorients a single DICOM volume into an MRIData object. + + Applies the appropriate RescaleSlope and RescaleIntercept transformations + to the raw pixel array, and then reorients the resulting data volume from + the native DICOM LPS space to RAS (Right-Anterior-Superior) space. + + Args: + D (np.ndarray): The raw 3D pixel array for the volume. + frame_fg: The corresponding DICOM frame functional group metadata. + + Returns: + MRIData: A newly constructed MRIData object with scaled pixel values + and an affine matrix oriented to RAS space. + """ + # Find scaling values (should potentially be inside scaling loop) + pixel_value_transform = frame_fg.PixelValueTransformationSequence[0] + slope = float(pixel_value_transform.RescaleSlope) + intercept = float(pixel_value_transform.RescaleIntercept) + private = frame_fg[0x2005, 0x140F][0] + scale_slope = private[0x2005, 0x100E].value + + # Loop over and scale values. + volume = np.zeros_like(D, dtype=np.single) + for idx in range(D.shape[0]): + volume[idx] = (intercept + slope * D[idx]) / (scale_slope * slope) + + A_dcm = dicom_standard_affine(frame_fg) + C = change_of_coordinates_map("LPS", "RAS") + mri = data_reorientation(MRIData(volume, C @ A_dcm)) + + return mri + + +def mixed_t1map( + SE_nii_path: Path, IR_nii_path: Path, meta_path: Path, T1_low: float, T1_high: float, output: Path | None = None +) -> nibabel.nifti1.Nifti1Image: + """ + Generates a T1 relaxation map by combining Spin-Echo (SE) and Inversion-Recovery (IR) acquisitions. + + This function acts as an I/O wrapper. It loads the respective NIfTI volumes + and their sequence metadata (such as TR, TE, TI, and Echo Train Length), + and passes them to the underlying mathematical function which interpolates + the T1 values based on the theoretical signal ratio (IR/SE). + + Args: + SE_nii_path (Path): Path to the Spin-Echo modulus NIfTI file. + IR_nii_path (Path): Path to the Inversion-Recovery corrected real NIfTI file. + meta_path (Path): Path to the JSON file containing the sequence parameters + ('TR_SE', 'TI', 'TE', 'ETL'). + T1_low (float): Lower bound for the T1 interpolation grid (in ms). + T1_high (float): Upper bound for the T1 interpolation grid (in ms). + output (Path | None, optional): Path to save the resulting T1 map NIfTI file. Defaults to None. + + Returns: + nibabel.nifti1.Nifti1Image: The computed T1 map as a NIfTI image object, + with the qform/sform properly set to scanner space. + """ + se_mri = load_mri_data(SE_nii_path, dtype=np.single) + ir_mri = load_mri_data(IR_nii_path, dtype=np.single) + meta = json.loads(meta_path.read_text()) + + t1_volume = compute_mixed_t1_array(se_mri.data, ir_mri.data, meta, T1_low, T1_high) + + nii = nibabel.nifti1.Nifti1Image(t1_volume, ir_mri.affine) + nii.set_sform(nii.affine, "scanner") + nii.set_qform(nii.affine, "scanner") + + if output is not None: + nibabel.nifti1.save(nii, output) + + return nii + + +def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | None = None) -> nibabel.nifti1.Nifti1Image: + """ + Masks a Mixed T1 map to isolate the Cerebrospinal Fluid (CSF). + + Because the Mixed sequence is primarily sensitive/calibrated for long T1 species + like fluid, this function isolates the CSF. It derives a mask dynamically from + the original Spin-Echo sequence using Li thresholding, erodes the mask to avoid + partial-voluming effects at tissue boundaries, and applies it to the T1 map. + + Args: + SE_nii_path (Path): Path to the Spin-Echo NIfTI file used to derive the mask. + T1_path (Path): Path to the previously generated Mixed T1 map NIfTI file. + output (Path | None, optional): Path to save the masked T1 NIfTI file. Defaults to None. + + Returns: + nibabel.nifti1.Nifti1Image: The masked T1 map, where all non-CSF voxels + have been set to NaN. + """ + t1map_nii = nibabel.nifti1.load(T1_path) + se_mri = load_mri_data(SE_nii_path, np.single) + + mask = create_csf_mask(se_mri.data, use_li=True) + mask = skimage.morphology.erosion(mask) + + masked_t1map = t1map_nii.get_fdata(dtype=np.single) + masked_t1map[~mask] = np.nan + masked_t1map_nii = nibabel.nifti1.Nifti1Image(masked_t1map, t1map_nii.affine, t1map_nii.header) + + if output is not None: + nibabel.nifti1.save(masked_t1map_nii, output) + + return masked_t1map_nii + + +def compute_mixed_t1_array(se_data: np.ndarray, ir_data: np.ndarray, meta: dict, t1_low: float, t1_high: float) -> np.ndarray: + """ + Computes a Mixed T1 array from Spin-Echo and Inversion-Recovery volumes using a lookup table. + + Args: + se_data (np.ndarray): 3D numpy array of the Spin-Echo modulus data. + ir_data (np.ndarray): 3D numpy array of the Inversion-Recovery corrected real data. + meta (dict): Dictionary containing sequence parameters ('TR_SE', 'TI', 'TE', 'ETL'). + t1_low (float): Lower bound for T1 generation grid. + t1_high (float): Upper bound for T1 generation grid. + + Returns: + np.ndarray: Computed T1 map as a 3D float32 array. + """ + nonzero_mask = se_data != 0 + f_data = np.nan * np.zeros_like(ir_data) + f_data[nonzero_mask] = ir_data[nonzero_mask] / se_data[nonzero_mask] + + tr_se, ti, te, etl = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] + f_curve, t1_grid = T1_lookup_table(tr_se, ti, te, etl, t1_low, t1_high) + + interpolator = scipy.interpolate.interp1d(f_curve, t1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) + return interpolator(f_data).astype(np.single) + + +def _extract_frame_metadata(frame_fg) -> dict: + """ + Extracts core physical parameters (TR, TE, TI, ETL) from a DICOM frame functional group. + + Args: + frame_fg: The PerFrameFunctionalGroupsSequence element for a specific frame. + + Returns: + dict: A dictionary containing available MR timing parameters. + """ + descrip = { + "TR": float(frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime), + "TE": float(frame_fg.MREchoSequence[0].EffectiveEchoTime), + } + + if hasattr(frame_fg.MRModifierSequence[0], "InversionTimes"): + descrip["TI"] = frame_fg.MRModifierSequence[0].InversionTimes[0] + + if hasattr(frame_fg.MRTimingAndRelatedParametersSequence[0], "EchoTrainLength"): + descrip["ETL"] = frame_fg.MRTimingAndRelatedParametersSequence[0].EchoTrainLength + + return descrip + + +def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]) -> list[dict]: + """ + Reads a Mixed DICOM file and splits it into independent NIfTI subvolumes. + + Args: + dcmpath (Path): Path to the input DICOM file. + subvolumes (list[str]): List of volume labels mapping to the slices in the DICOM. + + Returns: + list[dict]: A list containing dictionaries with a generated 'nifti' image + and a 'descrip' metadata dictionary for each requested subvolume. + """ + import pydicom + + dcm = pydicom.dcmread(str(dcmpath)) + frames_total = int(dcm.NumberOfFrames) + + # [0x2001, 0x1018] is a private Philips tag representing 'Number of Slices MR' + frames_per_volume = dcm[0x2001, 0x1018].value + num_volumes = frames_total // frames_per_volume + assert num_volumes * frames_per_volume == frames_total, "Subvolume dimensions do not evenly divide the total frames." + + pixel_data = dcm.pixel_array.astype(np.single) + frame_fg_sequence = dcm.PerFrameFunctionalGroupsSequence + + vols_out = [] + for volname in subvolumes: + vol_idx = VOLUME_LABELS.index(volname) + + # Find volume slices representing the current subvolume + subvol_idx_start = vol_idx * frames_per_volume + subvol_idx_end = (vol_idx + 1) * frames_per_volume + frame_fg = frame_fg_sequence[subvol_idx_start] + + logger.info( + f"Converting volume {vol_idx + 1}/{len(VOLUME_LABELS)}: '{volname}' " + f"between indices {subvol_idx_start}-{subvol_idx_end} out of {frames_total}." + ) + + mri = extract_single_volume(pixel_data[subvol_idx_start:subvol_idx_end], frame_fg) + + nii_oriented = nibabel.nifti1.Nifti1Image(mri.data, mri.affine) + nii_oriented.set_sform(nii_oriented.affine, "scanner") + nii_oriented.set_qform(nii_oriented.affine, "scanner") + + description = _extract_frame_metadata(frame_fg) + vols_out.append({"nifti": nii_oriented, "descrip": description}) + + return vols_out + + +def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: Optional[list[str]] = None): + """ + Converts a Mixed sequence DICOM file into independent subvolume NIfTIs. + + Generates dedicated images for Spin-Echo, Inversion-Recovery, etc., + and saves sequence timing metadata to a JSON sidecar. + + Args: + dcmpath (Path): Path to the input Mixed DICOM file. + outpath (Path): Base path for output files. Suffixes are automatically appended. + subvolumes (list[str], optional): specific subvolumes to extract. + Defaults to all known VOLUME_LABELS. + """ + subvolumes = subvolumes or VOLUME_LABELS + assert all([volname in VOLUME_LABELS for volname in subvolumes]), ( + f"Invalid subvolume name in {subvolumes}, must be one of {VOLUME_LABELS}" + ) + + outdir, form = outpath.parent, outpath.stem + outdir.mkdir(exist_ok=True, parents=True) + + vols = extract_mixed_dicom(dcmpath, subvolumes) + meta = {} + + for vol, volname in zip(vols, subvolumes): + output = outpath.with_name(f"{outpath.stem}_{volname}.nii.gz") + nibabel.nifti1.save(vol["nifti"], output) + + descrip = vol["descrip"] + try: + if volname == "SE-modulus": + meta["TR_SE"] = descrip["TR"] + meta["TE"] = descrip["TE"] + meta["ETL"] = descrip["ETL"] + elif volname == "IR-corrected-real": + meta["TR_IR"] = descrip["TR"] + meta["TI"] = descrip["TI"] + except KeyError as e: + logger.error(f"Missing required metadata for {volname}: {descrip}") + raise e + + # Write merged metadata sidecar + (outdir / f"{form}_meta.json").write_text(json.dumps(meta, indent=4)) + + # Attempt standard dcm2niix conversion (soft failure allowed for legacy behavior) + run_dcm2niix(dcmpath, outdir, form, extra_args="-w 0 --terse -b o", check=False) diff --git a/src/mritk/t1_maps/utils.py b/src/mritk/t1/utils.py similarity index 72% rename from src/mritk/t1_maps/utils.py rename to src/mritk/t1/utils.py index 7118239..ebee039 100644 --- a/src/mritk/t1_maps/utils.py +++ b/src/mritk/t1/utils.py @@ -4,17 +4,18 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +from pathlib import Path +import subprocess +import shutil +import shlex import numpy as np import scipy -from pathlib import Path import skimage import warnings +import logging from scipy.optimize import OptimizeWarning import nibabel -from ..data.orientation import data_reorientation, change_of_coordinates_map -from ..data.base import MRIData - VOLUME_LABELS = [ "IR-modulus", @@ -25,102 +26,7 @@ "T1map-scanner", ] - -def read_dicom_trigger_times(dicomfile: Path) -> np.ndarray: - """ - Extracts unique nominal cardiac trigger delay times from DICOM functional groups. - - Args: - dicomfile (str): The file path to the DICOM file. - - Returns: - np.ndarray: A sorted array of unique trigger delay times (in milliseconds) - extracted from the CardiacSynchronizationSequence. - """ - import pydicom - - dcm = pydicom.dcmread(dicomfile) - all_frame_times = [ - f.CardiacSynchronizationSequence[0].NominalCardiacTriggerDelayTime for f in dcm.PerFrameFunctionalGroupsSequence - ] - return np.unique(all_frame_times) - - -def dicom_standard_affine(frame_fg) -> np.ndarray: - """ - Generates the DICOM to LPS (Left-Posterior-Superior) affine transformation matrix. - - This maps the voxel coordinate space of a DICOM frame to the physical LPS space - by utilizing the pixel spacing, slice spacing, and patient orientation cosines. - - Args: - frame_fg: A DICOM frame functional group sequence object containing - PixelMeasuresSequence, PlaneOrientationSequence, and PlanePositionSequence. - - Returns: - np.ndarray: A 4x4 affine transformation matrix mapping from DICOM voxel - indices to LPS physical coordinates. - """ - # Get the original data shape - df = float(frame_fg.PixelMeasuresSequence[0].SpacingBetweenSlices) - dr, dc = (float(x) for x in frame_fg.PixelMeasuresSequence[0].PixelSpacing) - plane_orientation = frame_fg.PlaneOrientationSequence[0] - orientation = np.array(plane_orientation.ImageOrientationPatient) - - # Find orientation of data array relative to LPS-coordinate system. - row_cosine = orientation[:3] - col_cosine = orientation[3:] - frame_cosine = np.cross(row_cosine, col_cosine) - - # Create DICOM-definition affine map to LPS. - T_1 = np.array(frame_fg.PlanePositionSequence[0].ImagePositionPatient) - - # Create DICOM-definition affine map to LPS. - M_dcm = np.zeros((4, 4)) - M_dcm[:3, 0] = row_cosine * dc - M_dcm[:3, 1] = col_cosine * dr - M_dcm[:3, 2] = frame_cosine * df - M_dcm[:3, 3] = T_1 - M_dcm[3, 3] = 1.0 - - # Reorder from "natural index order" to DICOM affine map definition order. - N_order = np.eye(4)[[2, 1, 0, 3]] - return M_dcm @ N_order - - -def extract_single_volume(D: np.ndarray, frame_fg) -> MRIData: - """ - Extracts, scales, and reorients a single DICOM volume into an MRIData object. - - Applies the appropriate RescaleSlope and RescaleIntercept transformations - to the raw pixel array, and then reorients the resulting data volume from - the native DICOM LPS space to RAS (Right-Anterior-Superior) space. - - Args: - D (np.ndarray): The raw 3D pixel array for the volume. - frame_fg: The corresponding DICOM frame functional group metadata. - - Returns: - MRIData: A newly constructed MRIData object with scaled pixel values - and an affine matrix oriented to RAS space. - """ - # Find scaling values (should potentially be inside scaling loop) - pixel_value_transform = frame_fg.PixelValueTransformationSequence[0] - slope = float(pixel_value_transform.RescaleSlope) - intercept = float(pixel_value_transform.RescaleIntercept) - private = frame_fg[0x2005, 0x140F][0] - scale_slope = private[0x2005, 0x100E].value - - # Loop over and scale values. - volume = np.zeros_like(D, dtype=np.single) - for idx in range(D.shape[0]): - volume[idx] = (intercept + slope * D[idx]) / (scale_slope * slope) - - A_dcm = dicom_standard_affine(frame_fg) - C = change_of_coordinates_map("LPS", "RAS") - mri = data_reorientation(MRIData(volume, C @ A_dcm)) - - return mri +logger = logging.getLogger(__name__) def mri_facemask(vol: np.ndarray, smoothing_level: float = 5.0) -> np.ndarray: @@ -362,3 +268,47 @@ def compare_nifti_arrays(arr1: np.ndarray, arr2: np.ndarray, data_tolerance: flo return np.allclose(arr1, arr2, atol=data_tolerance) else: return np.array_equal(arr1, arr2) + + +def run_dcm2niix(input_path: Path, output_dir: Path, form: str, extra_args: str = "", check: bool = True): + """ + Utility wrapper to execute the dcm2niix command-line tool securely. + + Args: + input_path (Path): Path to the input DICOM file/folder. + output_dir (Path): Path to the target output directory. + form (str): Output filename format string. + extra_args (str, optional): Additional command line arguments. Defaults to "". + check (bool, optional): If True, raises an exception on failure. Defaults to True. + + Raises: + RuntimeError: If the dcm2niix executable is not found in the system PATH. + subprocess.CalledProcessError: If the command fails and `check` is True. + """ + # 1. Locate the executable securely + executable = shutil.which("dcm2niix") + if executable is None: + raise RuntimeError( + "The 'dcm2niix' executable was not found. Please ensure it is installed and available in your system PATH." + ) + + # 2. Build the arguments list safely + args = [executable, "-f", form] + + # Safely parse the extra string arguments into a list + if extra_args: + args.extend(shlex.split(extra_args)) + + args.extend(["-o", str(output_dir), str(input_path)]) + + # Reconstruct the command string purely for logging purposes + cmd_str = shlex.join(args) + logger.debug(f"Executing: {cmd_str}") + + try: + # 3. Execute without shell=True for better security and stability + subprocess.run(args, check=check, capture_output=True, text=True) + except subprocess.CalledProcessError as e: + logger.error(f"dcm2niix execution failed.\nCommand: {cmd_str}\nError: {e.stderr}") + if check: + raise diff --git a/src/mritk/t1_maps/__init__.py b/src/mritk/t1_maps/__init__.py deleted file mode 100644 index 1b67101..0000000 --- a/src/mritk/t1_maps/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -""" -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - -__all__ = ["dicom_to_nifti", "t1_maps", "t1_to_r1", "utils"] diff --git a/src/mritk/t1_maps/dicom_to_nifti.py b/src/mritk/t1_maps/dicom_to_nifti.py deleted file mode 100644 index 21ea85e..0000000 --- a/src/mritk/t1_maps/dicom_to_nifti.py +++ /dev/null @@ -1,228 +0,0 @@ -# MRI DICOM to NIfTI conversion Module - -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -import shutil -import subprocess -import tempfile -import logging -import json -from pathlib import Path -from typing import Optional - -import nibabel -import numpy as np - -from ..data.io import load_mri_data, save_mri_data -from ..t1_maps.utils import VOLUME_LABELS, read_dicom_trigger_times -from .utils import extract_single_volume - -logger = logging.getLogger(__name__) - - -def _extract_frame_metadata(frame_fg) -> dict: - """ - Extracts core physical parameters (TR, TE, TI, ETL) from a DICOM frame functional group. - - Args: - frame_fg: The PerFrameFunctionalGroupsSequence element for a specific frame. - - Returns: - dict: A dictionary containing available MR timing parameters. - """ - descrip = { - "TR": float(frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime), - "TE": float(frame_fg.MREchoSequence[0].EffectiveEchoTime), - } - - if hasattr(frame_fg.MRModifierSequence[0], "InversionTimes"): - descrip["TI"] = frame_fg.MRModifierSequence[0].InversionTimes[0] - - if hasattr(frame_fg.MRTimingAndRelatedParametersSequence[0], "EchoTrainLength"): - descrip["ETL"] = frame_fg.MRTimingAndRelatedParametersSequence[0].EchoTrainLength - - return descrip - - -import shlex -import logging - -logger = logging.getLogger(__name__) - - -def run_dcm2niix(input_path: Path, output_dir: Path, form: str, extra_args: str = "", check: bool = True): - """ - Utility wrapper to execute the dcm2niix command-line tool securely. - - Args: - input_path (Path): Path to the input DICOM file/folder. - output_dir (Path): Path to the target output directory. - form (str): Output filename format string. - extra_args (str, optional): Additional command line arguments. Defaults to "". - check (bool, optional): If True, raises an exception on failure. Defaults to True. - - Raises: - RuntimeError: If the dcm2niix executable is not found in the system PATH. - subprocess.CalledProcessError: If the command fails and `check` is True. - """ - # 1. Locate the executable securely - executable = shutil.which("dcm2niix") - if executable is None: - raise RuntimeError( - "The 'dcm2niix' executable was not found. Please ensure it is installed and available in your system PATH." - ) - - # 2. Build the arguments list safely - args = [executable, "-f", form] - - # Safely parse the extra string arguments into a list - if extra_args: - args.extend(shlex.split(extra_args)) - - args.extend(["-o", str(output_dir), str(input_path)]) - - # Reconstruct the command string purely for logging purposes - cmd_str = shlex.join(args) - logger.debug(f"Executing: {cmd_str}") - - try: - # 3. Execute without shell=True for better security and stability - subprocess.run(args, check=check, capture_output=True, text=True) - except subprocess.CalledProcessError as e: - logger.error(f"dcm2niix execution failed.\nCommand: {cmd_str}\nError: {e.stderr}") - if check: - raise - - -def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]) -> list[dict]: - """ - Reads a Mixed DICOM file and splits it into independent NIfTI subvolumes. - - Args: - dcmpath (Path): Path to the input DICOM file. - subvolumes (list[str]): List of volume labels mapping to the slices in the DICOM. - - Returns: - list[dict]: A list containing dictionaries with a generated 'nifti' image - and a 'descrip' metadata dictionary for each requested subvolume. - """ - import pydicom - - dcm = pydicom.dcmread(str(dcmpath)) - frames_total = int(dcm.NumberOfFrames) - - # [0x2001, 0x1018] is a private Philips tag representing 'Number of Slices MR' - frames_per_volume = dcm[0x2001, 0x1018].value - num_volumes = frames_total // frames_per_volume - assert num_volumes * frames_per_volume == frames_total, "Subvolume dimensions do not evenly divide the total frames." - - pixel_data = dcm.pixel_array.astype(np.single) - frame_fg_sequence = dcm.PerFrameFunctionalGroupsSequence - - vols_out = [] - for volname in subvolumes: - vol_idx = VOLUME_LABELS.index(volname) - - # Find volume slices representing the current subvolume - subvol_idx_start = vol_idx * frames_per_volume - subvol_idx_end = (vol_idx + 1) * frames_per_volume - frame_fg = frame_fg_sequence[subvol_idx_start] - - logger.info( - f"Converting volume {vol_idx + 1}/{len(VOLUME_LABELS)}: '{volname}' " - f"between indices {subvol_idx_start}-{subvol_idx_end} out of {frames_total}." - ) - - mri = extract_single_volume(pixel_data[subvol_idx_start:subvol_idx_end], frame_fg) - - nii_oriented = nibabel.nifti1.Nifti1Image(mri.data, mri.affine) - nii_oriented.set_sform(nii_oriented.affine, "scanner") - nii_oriented.set_qform(nii_oriented.affine, "scanner") - - description = _extract_frame_metadata(frame_fg) - vols_out.append({"nifti": nii_oriented, "descrip": description}) - - return vols_out - - -def dicom_to_looklocker(dicomfile: Path, outpath: Path): - """ - Converts a Look-Locker DICOM file to a standardized NIfTI format. - - Extracts trigger times to a sidecar text file, delegates conversion to dcm2niix, - and standardizes the output type to single-precision float (intent_code=2001). - - Args: - dicomfile (Path): Path to the input DICOM file. - outpath (Path): Desired output path for the converted .nii.gz file. - """ - outdir, form = outpath.parent, outpath.stem - outdir.mkdir(exist_ok=True, parents=True) - - # Extract and save trigger times - times = read_dicom_trigger_times(dicomfile) - np.savetxt(outdir / f"{form}_trigger_times.txt", times) - - with tempfile.TemporaryDirectory(prefix=outpath.stem) as tmpdir: - tmppath = Path(tmpdir) - - # Delegate heavy lifting to dcm2niix - run_dcm2niix(dicomfile, tmppath, form, extra_args="-z y --ignore_trigger_times", check=True) - - # Copy metadata sidecar - shutil.copy(tmppath / f"{form}.json", outpath.with_suffix(".json")) - - # Reload and save to standardize intent codes and precision - mri = load_mri_data(tmppath / f"{form}.nii.gz", dtype=np.double) - save_mri_data(mri, outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001) - - -def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: Optional[list[str]] = None): - """ - Converts a Mixed sequence DICOM file into independent subvolume NIfTIs. - - Generates dedicated images for Spin-Echo, Inversion-Recovery, etc., - and saves sequence timing metadata to a JSON sidecar. - - Args: - dcmpath (Path): Path to the input Mixed DICOM file. - outpath (Path): Base path for output files. Suffixes are automatically appended. - subvolumes (list[str], optional): specific subvolumes to extract. - Defaults to all known VOLUME_LABELS. - """ - subvolumes = subvolumes or VOLUME_LABELS - assert all([volname in VOLUME_LABELS for volname in subvolumes]), ( - f"Invalid subvolume name in {subvolumes}, must be one of {VOLUME_LABELS}" - ) - - outdir, form = outpath.parent, outpath.stem - outdir.mkdir(exist_ok=True, parents=True) - - vols = extract_mixed_dicom(dcmpath, subvolumes) - meta = {} - - for vol, volname in zip(vols, subvolumes): - output = outpath.with_name(f"{outpath.stem}_{volname}.nii.gz") - nibabel.nifti1.save(vol["nifti"], output) - - descrip = vol["descrip"] - try: - if volname == "SE-modulus": - meta["TR_SE"] = descrip["TR"] - meta["TE"] = descrip["TE"] - meta["ETL"] = descrip["ETL"] - elif volname == "IR-corrected-real": - meta["TR_IR"] = descrip["TR"] - meta["TI"] = descrip["TI"] - except KeyError as e: - logger.error(f"Missing required metadata for {volname}: {descrip}") - raise e - - # Write merged metadata sidecar - (outdir / f"{form}_meta.json").write_text(json.dumps(meta, indent=4)) - - # Attempt standard dcm2niix conversion (soft failure allowed for legacy behavior) - run_dcm2niix(dcmpath, outdir, form, extra_args="-w 0 --terse -b o", check=False) diff --git a/src/mritk/t1_maps/t1_to_r1.py b/src/mritk/t1_maps/t1_to_r1.py deleted file mode 100644 index 14a3c60..0000000 --- a/src/mritk/t1_maps/t1_to_r1.py +++ /dev/null @@ -1,100 +0,0 @@ -# T1 to R1 module - -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -import numpy as np -from pathlib import Path -from typing import Union - -from ..data.base import MRIData -from ..data.io import load_mri_data, save_mri_data - - -def compute_r1_array( - t1_data: np.ndarray, scale: float = 1000.0, t1_low: float = 1.0, t1_high: float = float("inf") -) -> np.ndarray: - """ - Pure numpy function converting a T1 relaxation time array to an R1 relaxation rate array. - - The relationship is R1 = scale / T1. Values outside the [t1_low, t1_high] - range are set to NaN to filter out noise and non-physiological data. - - Args: - t1_data (np.ndarray): The input array containing T1 relaxation times. - scale (float, optional): Scaling factor, typically 1000 to convert from ms to s^-1. Defaults to 1000. - t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. - t1_high (float, optional): Upper bound for valid T1 values. Defaults to infinity. - - Returns: - np.ndarray: An array of R1 relaxation rates. Invalid/out-of-bound voxels are set to NaN. - """ - valid_t1 = (t1_low <= t1_data) & (t1_data <= t1_high) - r1_data = np.nan * np.zeros_like(t1_data) - - # Calculate R1 only for valid voxels to avoid division by zero or extreme outliers - r1_data[valid_t1] = scale / t1_data[valid_t1] - - return r1_data - - -def convert_T1_to_R1( - T1map_mri: MRIData, - scale: float = 1000.0, - t1_low: float = 1.0, - t1_high: float = float("inf"), -) -> MRIData: - """ - Converts a T1 map MRIData object into an R1 map MRIData object. - - Args: - T1map_mri (MRIData): The input MRIData object representing the T1 map. - scale (float, optional): Scaling factor. Defaults to 1000. - t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. - t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). - - Returns: - MRIData: A new MRIData object containing the R1 map array and the original affine matrix. - """ - r1_data = compute_r1_array(T1map_mri.data, scale, t1_low, t1_high) - return MRIData(data=r1_data, affine=T1map_mri.affine) - - -def T1_to_R1( - input_mri: Union[Path, MRIData], - output: Path | None = None, - scale: float = 1000.0, - t1_low: float = 1.0, - t1_high: float = float("inf"), -) -> MRIData: - """ - High-level wrapper to convert a T1 map to an R1 map, handling file I/O operations. - - Args: - input_mri (Union[Path, MRIData]): A Path to a T1 NIfTI file or an already loaded MRIData object. - output (Path | None, optional): Path to save the resulting R1 map to disk. Defaults to None. - scale (float, optional): Scaling factor (e.g., 1000 for ms -> s^-1). Defaults to 1000. - t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. - t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). - - Returns: - MRIData: The computed R1 map as an MRIData object. - - Raises: - ValueError: If input_mri is neither a Path nor an MRIData object. - """ - if isinstance(input_mri, Path): - T1map_mri = load_mri_data(input_mri, dtype=np.single) - elif isinstance(input_mri, MRIData): - T1map_mri = input_mri - else: - raise ValueError(f"Input should be a Path or MRIData, got {type(input_mri)}") - - R1map_mri = convert_T1_to_R1(T1map_mri, scale, t1_low, t1_high) - - if output is not None: - save_mri_data(R1map_mri, output, dtype=np.single) - - return R1map_mri diff --git a/test/test_concentration.py b/test/test_concentration.py index 73936d8..ed4e8d3 100644 --- a/test/test_concentration.py +++ b/test/test_concentration.py @@ -1,15 +1,25 @@ -"""MRI Concentration maps - Tests +# MRI Concentration maps - Tests -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory from pathlib import Path +import numpy as np +import pytest -from mritk.concentration.concentration import concentration +from mritk.data.base import MRIData +from mritk.t1.concentration import ( + concentration, + compute_r1_array, + convert_T1_to_R1, + T1_to_R1, + concentration_from_T1, + concentration_from_R1, + compute_concentration_array, +) -from mritk.t1_maps.utils import compare_nifti_images +from mritk.t1.utils import compare_nifti_images def test_intracranial_concentration(tmp_path, mri_data_dir: Path): @@ -29,5 +39,126 @@ def test_intracranial_concentration(tmp_path, mri_data_dir: Path): test_outputs = [tmp_path / f"output_ses-0{i}_concentration.nii.gz" for i in sessions] for i, s in enumerate(sessions): - concentration(input=images_path[i], reference=baseline_path, output=test_outputs[i], r1=r1, mask=mask_path) + concentration( + input_path=images_path[i], + reference_path=baseline_path, + output_path=test_outputs[i], + r1=r1, + mask_path=mask_path, + ) compare_nifti_images(test_outputs[i], ref_outputs[i], data_tolerance=1e-12) + + +def test_compute_r1_array_standard(): + """Test basic T1 to R1 mathematical conversion.""" + t1_data = np.array([500.0, 1000.0, 2000.0]) + + # Expected R1 = 1000 / T1 + expected = np.array([2.0, 1.0, 0.5]) + + r1_data = compute_r1_array(t1_data, scale=1000.0) + np.testing.assert_array_almost_equal(r1_data, expected) + + +def test_compute_r1_array_clipping(): + """Test that values outside the [t1_low, t1_high] bounds are safely set to NaN.""" + t1_data = np.array([0.5, 500.0, 6000.0, 10000.0]) + t1_low = 1.0 + t1_high = 5000.0 + + r1_data = compute_r1_array(t1_data, scale=1000.0, t1_low=t1_low, t1_high=t1_high) + + # index 0 (0.5) < 1.0 -> NaN + # index 1 (500) -> 2.0 + # index 2 (6000) > 5000.0 -> NaN + # index 3 (10000) > 5000.0 -> NaN + + assert np.isnan(r1_data[0]) + assert r1_data[1] == 2.0 + assert np.isnan(r1_data[2]) + assert np.isnan(r1_data[3]) + + +def test_convert_t1_to_r1_mridata(): + """Test the conversion properly preserves the MRIData class attributes (affine).""" + t1_data = np.array([[[1000.0, 2000.0]]]) + affine = np.eye(4) + mri = MRIData(data=t1_data, affine=affine) + + r1_mri = convert_T1_to_R1(mri, scale=1000.0) + + expected_r1 = np.array([[[1.0, 0.5]]]) + + np.testing.assert_array_almost_equal(r1_mri.data, expected_r1) + np.testing.assert_array_equal(r1_mri.affine, affine) + + +def test_t1_to_r1_invalid_input(): + """Test the wrapper function throws ValueError on an invalid type input.""" + with pytest.raises(ValueError, match="Input should be a Path or MRIData"): + # Explicitly passing a raw string instead of Path/MRIData + T1_to_R1(input_mri="not_a_path_or_mridata") + + +def test_concentration_from_t1(): + """Test the core math equation for T1-to-Concentration conversion.""" + t1 = np.array([1000.0]) + t1_0 = np.array([2000.0]) + r1 = 0.005 + + # Math: C = (1 / 0.005) * ((1 / 1000) - (1 / 2000)) + # C = 200 * (0.001 - 0.0005) + # C = 200 * 0.0005 = 0.1 + expected = np.array([0.1]) + + result = concentration_from_T1(t1, t1_0, r1) + np.testing.assert_array_almost_equal(result, expected) + + +def test_concentration_from_r1(): + """Test the core math equation for R1-to-Concentration conversion.""" + r1_map = np.array([2.0]) + r1_0_map = np.array([1.0]) + r1 = 0.005 + + # Math: C = (1 / 0.005) * (2.0 - 1.0) + # C = 200 * 1.0 = 200.0 + expected = np.array([200.0]) + + result = concentration_from_R1(r1_map, r1_0_map, r1) + np.testing.assert_array_almost_equal(result, expected) + + +def test_compute_concentration_array_masking(): + """Test that zero/negative/tiny values and explicit masks yield NaNs.""" + t1_data = np.array([1000.0, 1e-12, 1000.0, 1000.0]) + t10_data = np.array([2000.0, 2000.0, 1e-12, 2000.0]) + + # Explicit mask excluding the last voxel + mask = np.array([True, True, True, False]) + r1 = 0.005 + + result = compute_concentration_array(t1_data, t10_data, r1, mask=mask) + + # Expectations: + # Voxel 0: Valid, should be 0.1 + # Voxel 1: t1_data is <= 1e-10 -> NaN + # Voxel 2: t10_data is <= 1e-10 -> NaN + # Voxel 3: mask is False -> NaN + + assert np.isclose(result[0], 0.1) + assert np.isnan(result[1]) + assert np.isnan(result[2]) + assert np.isnan(result[3]) + + +def test_compute_concentration_array_no_mask(): + """Test the array computation correctly defaults to keeping all valid positive T1s when no mask is provided.""" + t1_data = np.array([1000.0, 1000.0]) + t10_data = np.array([2000.0, 2000.0]) + r1 = 0.005 + + result = compute_concentration_array(t1_data, t10_data, r1, mask=None) + + assert np.isclose(result[0], 0.1) + assert np.isclose(result[1], 0.1) diff --git a/test/test_hybrid.py b/test/test_hybrid.py new file mode 100644 index 0000000..22e7721 --- /dev/null +++ b/test/test_hybrid.py @@ -0,0 +1,54 @@ +from pathlib import Path + +import numpy as np + +from mritk.t1.hybrid import compute_hybrid_t1_array, hybrid_t1map +from mritk.t1.utils import compare_nifti_images + + +def test_hybrid_t1map(tmp_path, mri_data_dir: Path): + LL_path = ( + mri_data_dir / "mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-01_acq-looklocker_T1map_registered.nii.gz" + ) + mixed_path = ( + mri_data_dir / "mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-01_acq-mixed_T1map_registered.nii.gz" + ) + csf_mask_path = mri_data_dir / "mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-csf_binary.nii.gz" + test_output = tmp_path / "output_T1map_hybrid.nii.gz" + ref_output = mri_data_dir / "mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz" + threshold = 1500 + erode = 1 + + hybrid_t1map( + LL_path=LL_path, mixed_path=mixed_path, csf_mask_path=csf_mask_path, threshold=threshold, erode=erode, output=test_output + ) + compare_nifti_images(test_output, ref_output, data_tolerance=1e-12) + + +def test_compute_hybrid_t1_array(): + """Test hybrid array logic merges LL and Mixed appropriately based on threshold and mask.""" + # 1D array for simplicity (4 voxels) + ll_data = np.array([1000.0, 2000.0, 1000.0, 2000.0]) + mixed_data = np.array([500.0, 500.0, 3000.0, 3000.0]) + + # Voxel 3 is unmasked + mask = np.array([True, True, True, False]) + threshold = 1500.0 + + hybrid = compute_hybrid_t1_array(ll_data, mixed_data, mask, threshold) + + # Evaluation: Substitution happens ONLY if BOTH > threshold AND inside mask. + # Voxel 0: 1000 < 1500 -> Keep LL (1000.0) + # Voxel 1: Mixed 500 < 1500 -> Keep LL (2000.0) + # Voxel 2: LL (1000) < 1500 -> Keep LL (1000.0) ... wait, let's fix ll_data[2] to test properly + # Let's run it as-is: + assert hybrid[0] == 1000.0 + assert hybrid[1] == 2000.0 + assert hybrid[2] == 1000.0 + assert hybrid[3] == 2000.0 # Unmasked, so keep LL + + # Let's explicitly trigger the merge condition + ll_data[2] = 2000.0 + hybrid2 = compute_hybrid_t1_array(ll_data, mixed_data, mask, threshold) + # Voxel 2: LL(2000) > 1500 AND Mixed(3000) > 1500 AND Mask=True -> Merge! + assert hybrid2[2] == 3000.0 diff --git a/test/test_looklocker.py b/test/test_looklocker.py new file mode 100644 index 0000000..945200d --- /dev/null +++ b/test/test_looklocker.py @@ -0,0 +1,75 @@ +from pathlib import Path + +import numpy as np +import pytest + +from mritk.t1.utils import compare_nifti_images +from mritk.t1.looklocker import ( + looklocker_t1map, + looklocker_t1map_postprocessing, + remove_outliers, + create_largest_island_mask, +) + + +@pytest.mark.skip(reason="Takes too long") +def test_looklocker_t1map(tmp_path, mri_data_dir: Path): + LL_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1.nii.gz" + timestamps = ( + mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1_trigger_times.txt" + ) + T1_low = 100 + T1_high = 6000 + + ref_output = mri_data_dir / "mri-dataset/mri_dataset/derivatives/sub-01" / "ses-01/sub-01_ses-01_acq-looklocker_T1map.nii.gz" + test_output_raw = tmp_path / "output_acq-looklocker_T1map_raw.nii.gz" + test_output = tmp_path / "output_acq-looklocker_T1map.nii.gz" + + looklocker_t1map(looklocker_input=LL_path, timestamps=timestamps, output=test_output_raw) + looklocker_t1map_postprocessing(T1map=test_output_raw, T1_low=T1_low, T1_high=T1_high, output=test_output) + compare_nifti_images(test_output, ref_output, data_tolerance=1e-12) + + +def test_remove_outliers(): + """Test that data is appropriately masked and clipped to physiological T1 bounds.""" + # 2x2x1 Mock Data + data = np.array([[[10.0], [500.0]], [[1500.0], [8000.0]]]) + + # Mask out the first element + mask = np.array([[[False], [True]], [[True], [True]]]) + + t1_low = 100.0 + t1_high = 2000.0 + + result = remove_outliers(data, mask, t1_low, t1_high) + + # Expected: + # [0,0,0] -> NaN (masked out) + # [0,1,0] -> 500.0 (valid) + # [1,0,0] -> 1500.0 (valid) + # [1,1,0] -> NaN (exceeds t1_high) + + assert np.isnan(result[0, 0, 0]) + assert result[0, 1, 0] == 500.0 + assert result[1, 0, 0] == 1500.0 + assert np.isnan(result[1, 1, 0]) + + +def test_create_largest_island_mask(): + """Test morphology logic identifies the primary body of data and ignores disconnected noise.""" + # Create a 15x15x15 empty space (3375 voxels, which is > 1000 so the background isn't + # accidentally filled in by remove_small_holes) + data = np.full((15, 15, 15), np.nan) + + # Create a large block in the center (Island 1) + data[5:10, 5:10, 5:10] = 100.0 + + # Create a tiny disconnected speck in the corner (Island 2) + data[0, 0, 0] = 50.0 + + # Run with small morphology radiuses + mask = create_largest_island_mask(data, radius=1, erode_dilate_factor=1.0) + + # Speck should be dropped, major block should be True + assert mask[0, 0, 0] == np.False_ + assert mask[7, 7, 7] == np.True_ diff --git a/test/test_dicom_to_nifti.py b/test/test_mixed.py similarity index 57% rename from test/test_dicom_to_nifti.py rename to test/test_mixed.py index 46f4b5e..11511aa 100644 --- a/test/test_dicom_to_nifti.py +++ b/test/test_mixed.py @@ -1,13 +1,55 @@ -from unittest.mock import MagicMock, patch -from pathlib import Path import numpy as np +from pathlib import Path -from mritk.t1_maps.dicom_to_nifti import ( - _extract_frame_metadata, - run_dcm2niix, +from unittest.mock import MagicMock, patch + +from mritk.t1.mixed import ( + compute_mixed_t1_array, extract_mixed_dicom, - VOLUME_LABELS, + mixed_t1map, + mixed_t1map_postprocessing, + _extract_frame_metadata, ) +from mritk.t1.utils import VOLUME_LABELS, compare_nifti_images + + +def test_mixed_t1map(tmp_path, mri_data_dir: Path): + SE_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_SE-modulus.nii.gz" + IR_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_IR-corrected-real.nii.gz" + meta_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_meta.json" + + ref_output = mri_data_dir / "mri-processed/mri_dataset/derivatives/sub-01" / "ses-01/sub-01_ses-01_acq-mixed_T1map.nii.gz" + test_output_raw = tmp_path / "output_acq-mixed_T1map_raw.nii.gz" + test_output = tmp_path / "output_acq-mixed_T1map.nii.gz" + + T1_low = 100 + T1_high = 10000 + + mixed_t1map( + SE_nii_path=SE_path, IR_nii_path=IR_path, meta_path=meta_path, T1_low=T1_low, T1_high=T1_high, output=test_output_raw + ) + mixed_t1map_postprocessing(SE_nii_path=SE_path, T1_path=test_output_raw, output=test_output) + + compare_nifti_images(test_output, ref_output, data_tolerance=1e-12) + + +def test_compute_mixed_t1_array(): + """Test generating a T1 map from SE and IR modalities via interpolation.""" + se_data = np.array([[[1000.0, 1000.0]]]) + # IR signals at varying levels + ir_data = np.array([[[-500.0, 500.0]]]) + + meta = {"TR_SE": 1000.0, "TI": 100.0, "TE": 10.0, "ETL": 5} + + t1_low = 100.0 + t1_high = 3000.0 + + t1_volume = compute_mixed_t1_array(se_data, ir_data, meta, t1_low, t1_high) + + # Should output same shape + assert t1_volume.shape == (1, 1, 2) + # T1 maps should not contain negative values in valid tissue + assert np.all(t1_volume[~np.isnan(t1_volume)] > 0) def test_extract_frame_metadata(): @@ -27,27 +69,7 @@ def test_extract_frame_metadata(): assert meta["ETL"] == 5 -@patch("subprocess.run") -def test_run_dcm2niix(mock_run): - """Test that the dcm2niix command constructor triggers properly.""" - input_path = Path("/input/data.dcm") - output_dir = Path("/output/") - - # Test valid execution - run_dcm2niix(input_path, output_dir, form="test_form", extra_args="-z y") - - # Verify the constructed shell command - mock_run.assert_called_once() - args, _ = mock_run.call_args - cmd = args[0] - - assert "dcm2niix" in cmd[0] - assert "test_form" in cmd - assert "-z" in cmd - assert "y" in cmd - - -@patch("mritk.t1_maps.dicom_to_nifti.extract_single_volume") +@patch("mritk.t1.mixed.extract_single_volume") @patch("pydicom.dcmread") def test_extract_mixed_dicom(mock_dcmread, mock_extract_single): """Test parsing a multi-volume DICOM file into independent subvolumes.""" diff --git a/test/test_mri_t1_maps.py b/test/test_mri_t1_maps.py deleted file mode 100644 index 202a3bb..0000000 --- a/test/test_mri_t1_maps.py +++ /dev/null @@ -1,76 +0,0 @@ -"""MRI T1 maps - Tests - -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - -from pathlib import Path -import pytest - -from mritk.t1_maps.t1_maps import ( - looklocker_t1map, - looklocker_t1map_postprocessing, - mixed_t1map, - mixed_t1map_postprocessing, - hybrid_t1map, -) - -from mritk.t1_maps.utils import compare_nifti_images - - -@pytest.mark.skip(reason="Takes too long") -def test_looklocker_t1map(tmp_path, mri_data_dir: Path): - LL_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1.nii.gz" - timestamps = ( - mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1_trigger_times.txt" - ) - T1_low = 100 - T1_high = 6000 - - ref_output = mri_data_dir / "mri-dataset/mri_dataset/derivatives/sub-01" / "ses-01/sub-01_ses-01_acq-looklocker_T1map.nii.gz" - test_output_raw = tmp_path / "output_acq-looklocker_T1map_raw.nii.gz" - test_output = tmp_path / "output_acq-looklocker_T1map.nii.gz" - - looklocker_t1map(looklocker_input=LL_path, timestamps=timestamps, output=test_output_raw) - looklocker_t1map_postprocessing(T1map=test_output_raw, T1_low=T1_low, T1_high=T1_high, output=test_output) - compare_nifti_images(test_output, ref_output, data_tolerance=1e-12) - - -def test_mixed_t1map(tmp_path, mri_data_dir: Path): - SE_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_SE-modulus.nii.gz" - IR_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_IR-corrected-real.nii.gz" - meta_path = mri_data_dir / "mri-dataset/mri_dataset/sub-01" / "ses-01/mixed/sub-01_ses-01_acq-mixed_meta.json" - - ref_output = mri_data_dir / "mri-processed/mri_dataset/derivatives/sub-01" / "ses-01/sub-01_ses-01_acq-mixed_T1map.nii.gz" - test_output_raw = tmp_path / "output_acq-mixed_T1map_raw.nii.gz" - test_output = tmp_path / "output_acq-mixed_T1map.nii.gz" - - T1_low = 100 - T1_high = 10000 - - mixed_t1map( - SE_nii_path=SE_path, IR_nii_path=IR_path, meta_path=meta_path, T1_low=T1_low, T1_high=T1_high, output=test_output_raw - ) - mixed_t1map_postprocessing(SE_nii_path=SE_path, T1_path=test_output_raw, output=test_output) - - compare_nifti_images(test_output, ref_output, data_tolerance=1e-12) - - -def test_hybrid_t1map(tmp_path, mri_data_dir: Path): - LL_path = ( - mri_data_dir / "mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-01_acq-looklocker_T1map_registered.nii.gz" - ) - mixed_path = ( - mri_data_dir / "mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-01_acq-mixed_T1map_registered.nii.gz" - ) - csf_mask_path = mri_data_dir / "mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-csf_binary.nii.gz" - test_output = tmp_path / "output_T1map_hybrid.nii.gz" - ref_output = mri_data_dir / "mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz" - threshold = 1500 - erode = 1 - - hybrid_t1map( - LL_path=LL_path, mixed_path=mixed_path, csf_mask_path=csf_mask_path, threshold=threshold, erode=erode, output=test_output - ) - compare_nifti_images(test_output, ref_output, data_tolerance=1e-12) diff --git a/test/test_t1_maps.py b/test/test_t1_maps.py deleted file mode 100644 index 148c6cc..0000000 --- a/test/test_t1_maps.py +++ /dev/null @@ -1,150 +0,0 @@ -import numpy as np -import pytest - -from mritk.t1_maps.t1_maps import remove_outliers, compute_mixed_t1_array, compute_hybrid_t1_array, create_largest_island_mask -from mritk.data.base import MRIData -from mritk.t1_maps.t1_to_r1 import compute_r1_array, convert_T1_to_R1, T1_to_R1 - - -def test_compute_r1_array_standard(): - """Test basic T1 to R1 mathematical conversion.""" - t1_data = np.array([500.0, 1000.0, 2000.0]) - - # Expected R1 = 1000 / T1 - expected = np.array([2.0, 1.0, 0.5]) - - r1_data = compute_r1_array(t1_data, scale=1000.0) - np.testing.assert_array_almost_equal(r1_data, expected) - - -def test_compute_r1_array_clipping(): - """Test that values outside the [t1_low, t1_high] bounds are safely set to NaN.""" - t1_data = np.array([0.5, 500.0, 6000.0, 10000.0]) - t1_low = 1.0 - t1_high = 5000.0 - - r1_data = compute_r1_array(t1_data, scale=1000.0, t1_low=t1_low, t1_high=t1_high) - - # index 0 (0.5) < 1.0 -> NaN - # index 1 (500) -> 2.0 - # index 2 (6000) > 5000.0 -> NaN - # index 3 (10000) > 5000.0 -> NaN - - assert np.isnan(r1_data[0]) - assert r1_data[1] == 2.0 - assert np.isnan(r1_data[2]) - assert np.isnan(r1_data[3]) - - -def test_convert_t1_to_r1_mridata(): - """Test the conversion properly preserves the MRIData class attributes (affine).""" - t1_data = np.array([[[1000.0, 2000.0]]]) - affine = np.eye(4) - mri = MRIData(data=t1_data, affine=affine) - - r1_mri = convert_T1_to_R1(mri, scale=1000.0) - - expected_r1 = np.array([[[1.0, 0.5]]]) - - np.testing.assert_array_almost_equal(r1_mri.data, expected_r1) - np.testing.assert_array_equal(r1_mri.affine, affine) - - -def test_t1_to_r1_invalid_input(): - """Test the wrapper function throws ValueError on an invalid type input.""" - with pytest.raises(ValueError, match="Input should be a Path or MRIData"): - # Explicitly passing a raw string instead of Path/MRIData - T1_to_R1(input_mri="not_a_path_or_mridata") - - -def test_remove_outliers(): - """Test that data is appropriately masked and clipped to physiological T1 bounds.""" - # 2x2x1 Mock Data - data = np.array([[[10.0], [500.0]], [[1500.0], [8000.0]]]) - - # Mask out the first element - mask = np.array([[[False], [True]], [[True], [True]]]) - - t1_low = 100.0 - t1_high = 2000.0 - - result = remove_outliers(data, mask, t1_low, t1_high) - - # Expected: - # [0,0,0] -> NaN (masked out) - # [0,1,0] -> 500.0 (valid) - # [1,0,0] -> 1500.0 (valid) - # [1,1,0] -> NaN (exceeds t1_high) - - assert np.isnan(result[0, 0, 0]) - assert result[0, 1, 0] == 500.0 - assert result[1, 0, 0] == 1500.0 - assert np.isnan(result[1, 1, 0]) - - -def test_compute_mixed_t1_array(): - """Test generating a T1 map from SE and IR modalities via interpolation.""" - se_data = np.array([[[1000.0, 1000.0]]]) - # IR signals at varying levels - ir_data = np.array([[[-500.0, 500.0]]]) - - meta = {"TR_SE": 1000.0, "TI": 100.0, "TE": 10.0, "ETL": 5} - - t1_low = 100.0 - t1_high = 3000.0 - - t1_volume = compute_mixed_t1_array(se_data, ir_data, meta, t1_low, t1_high) - - # Should output same shape - assert t1_volume.shape == (1, 1, 2) - # T1 maps should not contain negative values in valid tissue - assert np.all(t1_volume[~np.isnan(t1_volume)] > 0) - - -def test_compute_hybrid_t1_array(): - """Test hybrid array logic merges LL and Mixed appropriately based on threshold and mask.""" - # 1D array for simplicity (4 voxels) - ll_data = np.array([1000.0, 2000.0, 1000.0, 2000.0]) - mixed_data = np.array([500.0, 500.0, 3000.0, 3000.0]) - - # Voxel 3 is unmasked - mask = np.array([True, True, True, False]) - threshold = 1500.0 - - hybrid = compute_hybrid_t1_array(ll_data, mixed_data, mask, threshold) - - # Evaluation: Substitution happens ONLY if BOTH > threshold AND inside mask. - # Voxel 0: 1000 < 1500 -> Keep LL (1000.0) - # Voxel 1: Mixed 500 < 1500 -> Keep LL (2000.0) - # Voxel 2: LL (1000) < 1500 -> Keep LL (1000.0) ... wait, let's fix ll_data[2] to test properly - # Let's run it as-is: - assert hybrid[0] == 1000.0 - assert hybrid[1] == 2000.0 - assert hybrid[2] == 1000.0 - assert hybrid[3] == 2000.0 # Unmasked, so keep LL - - # Let's explicitly trigger the merge condition - ll_data[2] = 2000.0 - hybrid2 = compute_hybrid_t1_array(ll_data, mixed_data, mask, threshold) - # Voxel 2: LL(2000) > 1500 AND Mixed(3000) > 1500 AND Mask=True -> Merge! - assert hybrid2[2] == 3000.0 - - -def test_create_largest_island_mask(): - """Test morphology logic identifies the primary body of data and ignores disconnected noise.""" - # Create a 15x15x15 empty space (3375 voxels, which is > 1000 so the background isn't - # accidentally filled in by remove_small_holes) - data = np.full((15, 15, 15), np.nan) - - # Create a large block in the center (Island 1) - data[5:10, 5:10, 5:10] = 100.0 - - # Create a tiny disconnected speck in the corner (Island 2) - data[0, 0, 0] = 50.0 - - # Run with small morphology radiuses - mask = create_largest_island_mask(data, radius=1, erode_dilate_factor=1.0) - - # Speck should be dropped, major block should be True - assert mask[0, 0, 0] == np.False_ - assert mask[7, 7, 7] == np.True_ diff --git a/test/test_t1_maps_utils.py b/test/test_t1_utils.py similarity index 72% rename from test/test_t1_maps_utils.py rename to test/test_t1_utils.py index b3edf07..d0117ba 100644 --- a/test/test_t1_maps_utils.py +++ b/test/test_t1_utils.py @@ -1,12 +1,22 @@ -"""Tests for T1 Map utilities +# Tests for T1 Map utilities -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + + +from unittest.mock import patch +from pathlib import Path import numpy as np -from mritk.t1_maps.utils import voxel_fit_function, nan_filter_gaussian, estimate_se_free_relaxation_time, T1_lookup_table + +from mritk.t1.utils import ( + voxel_fit_function, + nan_filter_gaussian, + estimate_se_free_relaxation_time, + T1_lookup_table, + run_dcm2niix, +) def test_voxel_fit_function(): @@ -82,3 +92,23 @@ def test_t1_lookup_table(): # Check that fraction curve monotonically DECREASES for standard physics ranges # As T1 gets longer, the IR signal becomes more negative relative to the SE signal assert np.all(np.diff(fraction_curve) < 0) + + +@patch("subprocess.run") +def test_run_dcm2niix(mock_run): + """Test that the dcm2niix command constructor triggers properly.""" + input_path = Path("/input/data.dcm") + output_dir = Path("/output/") + + # Test valid execution + run_dcm2niix(input_path, output_dir, form="test_form", extra_args="-z y") + + # Verify the constructed shell command + mock_run.assert_called_once() + args, _ = mock_run.call_args + cmd = args[0] + + assert "dcm2niix" in cmd[0] + assert "test_form" in cmd + assert "-z" in cmd + assert "y" in cmd From 890a96fdc6bde78a890d81f179e2c14c735eb619 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sat, 7 Mar 2026 17:07:17 +0100 Subject: [PATCH 12/29] Update docs --- docs/api.rst | 27 +++++++++++++++++++++++++++ 1 file changed, 27 insertions(+) diff --git a/docs/api.rst b/docs/api.rst index dfd9885..25a2bf1 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -94,3 +94,30 @@ segmentation .. automodule:: mritk.segmentation.lookup_table :members: :inherited-members: + +t1 +-- + +.. automodule:: mritk.t1 + :members: + :inherited-members: + +.. automodule:: mritk.t1.concentration + :members: + :inherited-members: + +.. automodule:: mritk.t1.mixed + :members: + :inherited-members: + +.. automodule:: mritk.t1.looklocker + :members: + :inherited-members: + +.. automodule:: mritk.t1.utils + :members: + :inherited-members: + +.. automodule:: mritk.t1.hybrid + :members: + :inherited-members: From e482b18bbf9988cd7ab82439f684af64fa6ea2f0 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sat, 7 Mar 2026 17:43:00 +0100 Subject: [PATCH 13/29] Separate concentration and r1 --- src/mritk/__init__.py | 4 +- src/mritk/cli.py | 5 +- src/mritk/{t1 => }/concentration.py | 185 +++++++++++++--------------- src/mritk/t1/__init__.py | 4 +- src/mritk/t1/r1.py | 99 +++++++++++++++ src/mritk/t1/utils.py | 2 +- test/test_concentration.py | 124 ++++++++----------- test/test_r1.py | 66 ++++++++++ 8 files changed, 315 insertions(+), 174 deletions(-) rename src/mritk/{t1 => }/concentration.py (50%) create mode 100644 src/mritk/t1/r1.py create mode 100644 test/test_r1.py diff --git a/src/mritk/__init__.py b/src/mritk/__init__.py index 901db5f..83fda66 100644 --- a/src/mritk/__init__.py +++ b/src/mritk/__init__.py @@ -5,7 +5,7 @@ from importlib.metadata import metadata -from . import data, segmentation, statistics +from . import data, segmentation, statistics, t1, concentration meta = metadata("mritk") @@ -20,4 +20,6 @@ "data", "segmentation", "statistics", + "t1", + "concentration", ] diff --git a/src/mritk/cli.py b/src/mritk/cli.py index c6a6a21..b941602 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -6,7 +6,7 @@ from rich_argparse import RichHelpFormatter -from . import datasets, info, statistics, show, napari +from . import datasets, info, statistics, show, napari, t1 def version_info(): @@ -65,6 +65,9 @@ def setup_parser(): napari_parser = subparsers.add_parser("napari", help="Show MRI data using napari", formatter_class=parser.formatter_class) napari.add_arguments(napari_parser) + t1_parser = subparsers.add_parser("t1", help="Show MRI data using t1", formatter_class=parser.formatter_class) + t1.cli.add_arguments(t1_parser) + return parser diff --git a/src/mritk/t1/concentration.py b/src/mritk/concentration.py similarity index 50% rename from src/mritk/t1/concentration.py rename to src/mritk/concentration.py index a5ade49..549a0f0 100644 --- a/src/mritk/t1/concentration.py +++ b/src/mritk/concentration.py @@ -1,4 +1,4 @@ -# T1 to R1 module +# Concentration module # Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) @@ -8,99 +8,12 @@ import numpy as np from pathlib import Path -from ..data.base import MRIData -from ..data.io import load_mri_data, save_mri_data -from ..data.orientation import assert_same_space +from .data.base import MRIData +from .data.io import load_mri_data, save_mri_data +from .data.orientation import assert_same_space -def compute_r1_array( - t1_data: np.ndarray, scale: float = 1000.0, t1_low: float = 1.0, t1_high: float = float("inf") -) -> np.ndarray: - """ - Pure numpy function converting a T1 relaxation time array to an R1 relaxation rate array. - - The relationship is R1 = scale / T1. Values outside the [t1_low, t1_high] - range are set to NaN to filter out noise and non-physiological data. - - Args: - t1_data (np.ndarray): The input array containing T1 relaxation times. - scale (float, optional): Scaling factor, typically 1000 to convert from ms to s^-1. Defaults to 1000. - t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. - t1_high (float, optional): Upper bound for valid T1 values. Defaults to infinity. - - Returns: - np.ndarray: An array of R1 relaxation rates. Invalid/out-of-bound voxels are set to NaN. - """ - valid_t1 = (t1_low <= t1_data) & (t1_data <= t1_high) - r1_data = np.nan * np.zeros_like(t1_data) - - # Calculate R1 only for valid voxels to avoid division by zero or extreme outliers - r1_data[valid_t1] = scale / t1_data[valid_t1] - - return r1_data - - -def convert_T1_to_R1( - T1map_mri: MRIData, - scale: float = 1000.0, - t1_low: float = 1.0, - t1_high: float = float("inf"), -) -> MRIData: - """ - Converts a T1 map MRIData object into an R1 map MRIData object. - - Args: - T1map_mri (MRIData): The input MRIData object representing the T1 map. - scale (float, optional): Scaling factor. Defaults to 1000. - t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. - t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). - - Returns: - MRIData: A new MRIData object containing the R1 map array and the original affine matrix. - """ - r1_data = compute_r1_array(T1map_mri.data, scale, t1_low, t1_high) - return MRIData(data=r1_data, affine=T1map_mri.affine) - - -def T1_to_R1( - input_mri: Path | MRIData, - output: Path | None = None, - scale: float = 1000.0, - t1_low: float = 1.0, - t1_high: float = float("inf"), -) -> MRIData: - """ - High-level wrapper to convert a T1 map to an R1 map, handling file I/O operations. - - Args: - input_mri (Union[Path, MRIData]): A Path to a T1 NIfTI file or an already loaded MRIData object. - output (Path | None, optional): Path to save the resulting R1 map to disk. Defaults to None. - scale (float, optional): Scaling factor (e.g., 1000 for ms -> s^-1). Defaults to 1000. - t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. - t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). - - Returns: - MRIData: The computed R1 map as an MRIData object. - - Raises: - ValueError: If input_mri is neither a Path nor an MRIData object. - """ - if isinstance(input_mri, Path): - T1map_mri = load_mri_data(input_mri, dtype=np.single) - elif isinstance(input_mri, MRIData): - T1map_mri = input_mri - else: - raise ValueError(f"Input should be a Path or MRIData, got {type(input_mri)}") - - R1map_mri = convert_T1_to_R1(T1map_mri, scale, t1_low, t1_high) - - if output is not None: - save_mri_data(R1map_mri, output, dtype=np.single) - - return R1map_mri - - -def concentration_from_T1(t1: np.ndarray, t1_0: np.ndarray, r1: float) -> np.ndarray: +def concentration_from_T1_expr(t1: np.ndarray, t1_0: np.ndarray, r1: float) -> np.ndarray: """ Computes tracer concentration from T1 relaxation times. @@ -117,7 +30,7 @@ def concentration_from_T1(t1: np.ndarray, t1_0: np.ndarray, r1: float) -> np.nda return (1.0 / r1) * ((1.0 / t1) - (1.0 / t1_0)) -def concentration_from_R1(r1_map: np.ndarray, r1_0_map: np.ndarray, r1: float) -> np.ndarray: +def concentration_from_R1_expr(r1_map: np.ndarray, r1_0_map: np.ndarray, r1: float) -> np.ndarray: """ Computes tracer concentration from R1 relaxation rates. @@ -134,7 +47,7 @@ def concentration_from_R1(r1_map: np.ndarray, r1_0_map: np.ndarray, r1: float) - return (1.0 / r1) * (r1_map - r1_0_map) -def compute_concentration_array( +def compute_concentration_from_T1_array( t1_data: np.ndarray, t10_data: np.ndarray, r1: float, mask: np.ndarray | None = None ) -> np.ndarray: """ @@ -160,12 +73,12 @@ def compute_concentration_array( concentrations = np.full_like(t10_data, np.nan, dtype=np.single) # Compute concentration strictly on valid voxels - concentrations[valid_mask] = concentration_from_T1(t1=t1_data[valid_mask], t1_0=t10_data[valid_mask], r1=r1) + concentrations[valid_mask] = concentration_from_T1_expr(t1=t1_data[valid_mask], t1_0=t10_data[valid_mask], r1=r1) return concentrations -def concentration( +def concentration_from_T1( input_path: Path, reference_path: Path, output_path: Path | None = None, @@ -198,7 +111,7 @@ def concentration( assert_same_space(mask_mri, t10_mri) mask_data = mask_mri.data - concentrations_array = compute_concentration_array(t1_data=t1_mri.data, t10_data=t10_mri.data, r1=r1, mask=mask_data) + concentrations_array = compute_concentration_from_T1_array(t1_data=t1_mri.data, t10_data=t10_mri.data, r1=r1, mask=mask_data) mri_data = MRIData(data=concentrations_array, affine=t10_mri.affine) @@ -206,3 +119,81 @@ def concentration( save_mri_data(mri_data, output_path, dtype=np.single) return mri_data + + +def compute_concentration_from_R1_array( + r1_data: np.ndarray, r10_data: np.ndarray, r1: float, mask: np.ndarray | None = None +) -> np.ndarray: + """ + Computes the concentration map array from R1 maps, handling masking. + + Unlike T1 maps, R1 calculations do not suffer from division-by-zero + errors, but we still ensure we only operate on finite values and within + the provided mask. + + Args: + r1_data (np.ndarray): 3D numpy array of post-contrast R1 values. + r10_data (np.ndarray): 3D numpy array of pre-contrast R1 values. + r1 (float): Relaxivity of the contrast agent. + mask (np.ndarray | None, optional): Boolean mask restricting the computation area. + Defaults to None. + + Returns: + np.ndarray: A 3D array of computed concentrations. Invalid voxels (unmasked + or where R1 is not finite) are set to NaN. + """ + # Create a validity mask: limit to finite floating point numbers + valid_mask = np.isfinite(r1_data) & np.isfinite(r10_data) + + if mask is not None: + valid_mask &= mask.astype(bool) + + concentrations = np.full_like(r10_data, np.nan, dtype=np.single) + + # Compute concentration strictly on valid voxels + concentrations[valid_mask] = concentration_from_R1_expr(r1_map=r1_data[valid_mask], r1_0_map=r10_data[valid_mask], r1=r1) + + return concentrations + + +def concentration_from_R1( + input_path: Path, + reference_path: Path, + output_path: Path | None = None, + r1: float = 0.0045, + mask_path: Path | None = None, +) -> MRIData: + """ + I/O wrapper to generate a contrast agent concentration map from NIfTI R1 maps. + + Loads the post-contrast and baseline R1 maps, ensures they occupy the same + physical space, computes the concentration map, and optionally saves it to disk. + + Args: + input_path (Path): Path to the post-contrast R1 map NIfTI file. + reference_path (Path): Path to the baseline (pre-contrast) R1 map NIfTI file. + output_path (Path | None, optional): Path to save the resulting concentration map. Defaults to None. + r1 (float, optional): Contrast agent relaxivity. Defaults to 0.0045. + mask_path (Path | None, optional): Path to a boolean mask NIfTI file. Defaults to None. + + Returns: + MRIData: An MRIData object containing the concentration array and the affine matrix. + """ + r1_mri = load_mri_data(input_path, dtype=np.single) + r10_mri = load_mri_data(reference_path, dtype=np.single) + assert_same_space(r1_mri, r10_mri) + + mask_data = None + if mask_path is not None: + mask_mri = load_mri_data(mask_path, dtype=bool) + assert_same_space(mask_mri, r10_mri) + mask_data = mask_mri.data + + concentrations_array = compute_concentration_from_R1_array(r1_data=r1_mri.data, r10_data=r10_mri.data, r1=r1, mask=mask_data) + + mri_data = MRIData(data=concentrations_array, affine=r10_mri.affine) + + if output_path is not None: + save_mri_data(mri_data, output_path, dtype=np.single) + + return mri_data diff --git a/src/mritk/t1/__init__.py b/src/mritk/t1/__init__.py index 7b2a547..0c4db2b 100644 --- a/src/mritk/t1/__init__.py +++ b/src/mritk/t1/__init__.py @@ -2,7 +2,7 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory -from . import concentration, utils, mixed, looklocker, hybrid +from . import r1, utils, mixed, looklocker, hybrid, cli -__all__ = ["concentration", "utils", "mixed", "looklocker", "hybrid"] +__all__ = ["r1", "utils", "mixed", "looklocker", "hybrid", "cli"] diff --git a/src/mritk/t1/r1.py b/src/mritk/t1/r1.py new file mode 100644 index 0000000..7f4d800 --- /dev/null +++ b/src/mritk/t1/r1.py @@ -0,0 +1,99 @@ +# T1 to R1 module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + + +import numpy as np +from pathlib import Path + +from ..data.base import MRIData +from ..data.io import load_mri_data, save_mri_data + + +def compute_r1_array( + t1_data: np.ndarray, scale: float = 1000.0, t1_low: float = 1.0, t1_high: float = float("inf") +) -> np.ndarray: + """ + Pure numpy function converting a T1 relaxation time array to an R1 relaxation rate array. + + The relationship is R1 = scale / T1. Values outside the [t1_low, t1_high] + range are set to NaN to filter out noise and non-physiological data. + + Args: + t1_data (np.ndarray): The input array containing T1 relaxation times. + scale (float, optional): Scaling factor, typically 1000 to convert from ms to s^-1. Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to infinity. + + Returns: + np.ndarray: An array of R1 relaxation rates. Invalid/out-of-bound voxels are set to NaN. + """ + valid_t1 = (t1_low <= t1_data) & (t1_data <= t1_high) + r1_data = np.nan * np.zeros_like(t1_data) + + # Calculate R1 only for valid voxels to avoid division by zero or extreme outliers + r1_data[valid_t1] = scale / t1_data[valid_t1] + + return r1_data + + +def convert_T1_to_R1( + T1map_mri: MRIData, + scale: float = 1000.0, + t1_low: float = 1.0, + t1_high: float = float("inf"), +) -> MRIData: + """ + Converts a T1 map MRIData object into an R1 map MRIData object. + + Args: + T1map_mri (MRIData): The input MRIData object representing the T1 map. + scale (float, optional): Scaling factor. Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). + + Returns: + MRIData: A new MRIData object containing the R1 map array and the original affine matrix. + """ + r1_data = compute_r1_array(T1map_mri.data, scale, t1_low, t1_high) + return MRIData(data=r1_data, affine=T1map_mri.affine) + + +def T1_to_R1( + input_mri: Path | MRIData, + output: Path | None = None, + scale: float = 1000.0, + t1_low: float = 1.0, + t1_high: float = float("inf"), +) -> MRIData: + """ + High-level wrapper to convert a T1 map to an R1 map, handling file I/O operations. + + Args: + input_mri (Union[Path, MRIData]): A Path to a T1 NIfTI file or an already loaded MRIData object. + output (Path | None, optional): Path to save the resulting R1 map to disk. Defaults to None. + scale (float, optional): Scaling factor (e.g., 1000 for ms -> s^-1). Defaults to 1000. + t1_low (float, optional): Lower bound for valid T1 values. Defaults to 1. + t1_high (float, optional): Upper bound for valid T1 values. Defaults to float('inf'). + + Returns: + MRIData: The computed R1 map as an MRIData object. + + Raises: + ValueError: If input_mri is neither a Path nor an MRIData object. + """ + if isinstance(input_mri, Path): + T1map_mri = load_mri_data(input_mri, dtype=np.single) + elif isinstance(input_mri, MRIData): + T1map_mri = input_mri + else: + raise ValueError(f"Input should be a Path or MRIData, got {type(input_mri)}") + + R1map_mri = convert_T1_to_R1(T1map_mri, scale, t1_low, t1_high) + + if output is not None: + save_mri_data(R1map_mri, output, dtype=np.single) + + return R1map_mri diff --git a/src/mritk/t1/utils.py b/src/mritk/t1/utils.py index ebee039..016b815 100644 --- a/src/mritk/t1/utils.py +++ b/src/mritk/t1/utils.py @@ -1,4 +1,4 @@ -# MRI DICOM to NIfTI conversion - utils +# utils.py - Utility functions for T1 mapping and related processing. # Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) diff --git a/test/test_concentration.py b/test/test_concentration.py index ed4e8d3..a03459f 100644 --- a/test/test_concentration.py +++ b/test/test_concentration.py @@ -6,17 +6,13 @@ from pathlib import Path import numpy as np -import pytest - -from mritk.data.base import MRIData -from mritk.t1.concentration import ( - concentration, - compute_r1_array, - convert_T1_to_R1, - T1_to_R1, + +from mritk.concentration import ( + concentration_from_T1_expr, + concentration_from_R1_expr, + compute_concentration_from_T1_array, + compute_concentration_from_R1_array, concentration_from_T1, - concentration_from_R1, - compute_concentration_array, ) from mritk.t1.utils import compare_nifti_images @@ -39,7 +35,7 @@ def test_intracranial_concentration(tmp_path, mri_data_dir: Path): test_outputs = [tmp_path / f"output_ses-0{i}_concentration.nii.gz" for i in sessions] for i, s in enumerate(sessions): - concentration( + concentration_from_T1( input_path=images_path[i], reference_path=baseline_path, output_path=test_outputs[i], @@ -49,58 +45,19 @@ def test_intracranial_concentration(tmp_path, mri_data_dir: Path): compare_nifti_images(test_outputs[i], ref_outputs[i], data_tolerance=1e-12) -def test_compute_r1_array_standard(): - """Test basic T1 to R1 mathematical conversion.""" - t1_data = np.array([500.0, 1000.0, 2000.0]) - - # Expected R1 = 1000 / T1 - expected = np.array([2.0, 1.0, 0.5]) - - r1_data = compute_r1_array(t1_data, scale=1000.0) - np.testing.assert_array_almost_equal(r1_data, expected) - - -def test_compute_r1_array_clipping(): - """Test that values outside the [t1_low, t1_high] bounds are safely set to NaN.""" - t1_data = np.array([0.5, 500.0, 6000.0, 10000.0]) - t1_low = 1.0 - t1_high = 5000.0 - - r1_data = compute_r1_array(t1_data, scale=1000.0, t1_low=t1_low, t1_high=t1_high) - - # index 0 (0.5) < 1.0 -> NaN - # index 1 (500) -> 2.0 - # index 2 (6000) > 5000.0 -> NaN - # index 3 (10000) > 5000.0 -> NaN - - assert np.isnan(r1_data[0]) - assert r1_data[1] == 2.0 - assert np.isnan(r1_data[2]) - assert np.isnan(r1_data[3]) - - -def test_convert_t1_to_r1_mridata(): - """Test the conversion properly preserves the MRIData class attributes (affine).""" - t1_data = np.array([[[1000.0, 2000.0]]]) - affine = np.eye(4) - mri = MRIData(data=t1_data, affine=affine) - - r1_mri = convert_T1_to_R1(mri, scale=1000.0) - - expected_r1 = np.array([[[1.0, 0.5]]]) - - np.testing.assert_array_almost_equal(r1_mri.data, expected_r1) - np.testing.assert_array_equal(r1_mri.affine, affine) +def test_compute_concentration_array_no_mask(): + """Test the array computation correctly defaults to keeping all valid positive T1s when no mask is provided.""" + t1_data = np.array([1000.0, 1000.0]) + t10_data = np.array([2000.0, 2000.0]) + r1 = 0.005 + result = compute_concentration_from_T1_array(t1_data, t10_data, r1, mask=None) -def test_t1_to_r1_invalid_input(): - """Test the wrapper function throws ValueError on an invalid type input.""" - with pytest.raises(ValueError, match="Input should be a Path or MRIData"): - # Explicitly passing a raw string instead of Path/MRIData - T1_to_R1(input_mri="not_a_path_or_mridata") + assert np.isclose(result[0], 0.1) + assert np.isclose(result[1], 0.1) -def test_concentration_from_t1(): +def test_concentration_from_t1_expr(): """Test the core math equation for T1-to-Concentration conversion.""" t1 = np.array([1000.0]) t1_0 = np.array([2000.0]) @@ -111,11 +68,11 @@ def test_concentration_from_t1(): # C = 200 * 0.0005 = 0.1 expected = np.array([0.1]) - result = concentration_from_T1(t1, t1_0, r1) + result = concentration_from_T1_expr(t1, t1_0, r1) np.testing.assert_array_almost_equal(result, expected) -def test_concentration_from_r1(): +def test_concentration_from_r1_expr(): """Test the core math equation for R1-to-Concentration conversion.""" r1_map = np.array([2.0]) r1_0_map = np.array([1.0]) @@ -125,12 +82,12 @@ def test_concentration_from_r1(): # C = 200 * 1.0 = 200.0 expected = np.array([200.0]) - result = concentration_from_R1(r1_map, r1_0_map, r1) + result = concentration_from_R1_expr(r1_map, r1_0_map, r1) np.testing.assert_array_almost_equal(result, expected) -def test_compute_concentration_array_masking(): - """Test that zero/negative/tiny values and explicit masks yield NaNs.""" +def test_compute_concentration_from_T1_array_masking(): + """Test that zero/negative/tiny T1 values and explicit masks yield NaNs.""" t1_data = np.array([1000.0, 1e-12, 1000.0, 1000.0]) t10_data = np.array([2000.0, 2000.0, 1e-12, 2000.0]) @@ -138,7 +95,7 @@ def test_compute_concentration_array_masking(): mask = np.array([True, True, True, False]) r1 = 0.005 - result = compute_concentration_array(t1_data, t10_data, r1, mask=mask) + result = compute_concentration_from_T1_array(t1_data, t10_data, r1, mask=mask) # Expectations: # Voxel 0: Valid, should be 0.1 @@ -152,13 +109,36 @@ def test_compute_concentration_array_masking(): assert np.isnan(result[3]) -def test_compute_concentration_array_no_mask(): - """Test the array computation correctly defaults to keeping all valid positive T1s when no mask is provided.""" - t1_data = np.array([1000.0, 1000.0]) - t10_data = np.array([2000.0, 2000.0]) +def test_compute_concentration_from_R1_array_masking(): + """Test that invalid R1 values (NaN/Inf) and explicit masks yield NaNs.""" + r1_data = np.array([2.0, np.nan, 2.0, 2.0]) + r10_data = np.array([1.0, 1.0, np.inf, 1.0]) + + # Explicit mask excluding the last voxel + mask = np.array([True, True, True, False]) r1 = 0.005 - result = compute_concentration_array(t1_data, t10_data, r1, mask=None) + result = compute_concentration_from_R1_array(r1_data, r10_data, r1, mask=mask) - assert np.isclose(result[0], 0.1) - assert np.isclose(result[1], 0.1) + # Expectations: + # Voxel 0: Valid, should be 200.0 + # Voxel 1: r1_data is NaN -> NaN + # Voxel 2: r10_data is Inf -> NaN + # Voxel 3: mask is False -> NaN + + assert np.isclose(result[0], 200.0) + assert np.isnan(result[1]) + assert np.isnan(result[2]) + assert np.isnan(result[3]) + + +def test_compute_concentration_from_R1_array_no_mask(): + """Test the array computation correctly defaults to keeping all finite R1s when no mask is provided.""" + r1_data = np.array([2.0, 2.0]) + r10_data = np.array([1.0, 1.0]) + r1 = 0.005 + + result = compute_concentration_from_R1_array(r1_data, r10_data, r1, mask=None) + + assert np.isclose(result[0], 200.0) + assert np.isclose(result[1], 200.0) diff --git a/test/test_r1.py b/test/test_r1.py new file mode 100644 index 0000000..9a60bec --- /dev/null +++ b/test/test_r1.py @@ -0,0 +1,66 @@ +# MRI R1 maps - Tests + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import numpy as np +import pytest + +from mritk.data.base import MRIData +from mritk.t1.r1 import ( + compute_r1_array, + convert_T1_to_R1, + T1_to_R1, +) + + +def test_compute_r1_array_standard(): + """Test basic T1 to R1 mathematical conversion.""" + t1_data = np.array([500.0, 1000.0, 2000.0]) + + # Expected R1 = 1000 / T1 + expected = np.array([2.0, 1.0, 0.5]) + + r1_data = compute_r1_array(t1_data, scale=1000.0) + np.testing.assert_array_almost_equal(r1_data, expected) + + +def test_compute_r1_array_clipping(): + """Test that values outside the [t1_low, t1_high] bounds are safely set to NaN.""" + t1_data = np.array([0.5, 500.0, 6000.0, 10000.0]) + t1_low = 1.0 + t1_high = 5000.0 + + r1_data = compute_r1_array(t1_data, scale=1000.0, t1_low=t1_low, t1_high=t1_high) + + # index 0 (0.5) < 1.0 -> NaN + # index 1 (500) -> 2.0 + # index 2 (6000) > 5000.0 -> NaN + # index 3 (10000) > 5000.0 -> NaN + + assert np.isnan(r1_data[0]) + assert r1_data[1] == 2.0 + assert np.isnan(r1_data[2]) + assert np.isnan(r1_data[3]) + + +def test_convert_t1_to_r1_mridata(): + """Test the conversion properly preserves the MRIData class attributes (affine).""" + t1_data = np.array([[[1000.0, 2000.0]]]) + affine = np.eye(4) + mri = MRIData(data=t1_data, affine=affine) + + r1_mri = convert_T1_to_R1(mri, scale=1000.0) + + expected_r1 = np.array([[[1.0, 0.5]]]) + + np.testing.assert_array_almost_equal(r1_mri.data, expected_r1) + np.testing.assert_array_equal(r1_mri.affine, affine) + + +def test_t1_to_r1_invalid_input(): + """Test the wrapper function throws ValueError on an invalid type input.""" + with pytest.raises(ValueError, match="Input should be a Path or MRIData"): + # Explicitly passing a raw string instead of Path/MRIData + T1_to_R1(input_mri="not_a_path_or_mridata") From 55cb213cf7a73a1758bfaa1fcc9b7d40ab8f646b Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sat, 7 Mar 2026 20:27:20 +0100 Subject: [PATCH 14/29] More restructuring --- src/mritk/__init__.py | 8 +- src/mritk/cli.py | 8 +- src/mritk/concentration.py | 22 ++--- src/mritk/{data/orientation.py => data.py} | 94 +++++++++++++++------- src/mritk/data/__init__.py | 8 -- src/mritk/data/base.py | 24 ------ src/mritk/data/io.py | 61 -------------- src/mritk/{t1 => }/hybrid.py | 0 src/mritk/{t1 => }/looklocker.py | 61 ++++++++++++-- src/mritk/masking/masks.py | 15 ++-- src/mritk/{t1 => }/mixed.py | 13 ++- src/mritk/napari.py | 5 +- src/mritk/{t1 => }/r1.py | 7 +- src/mritk/show.py | 6 +- src/mritk/statistics/compute_stats.py | 8 +- src/mritk/t1/__init__.py | 8 -- src/mritk/testing.py | 90 +++++++++++++++++++++ src/mritk/{t1 => }/utils.py | 0 test/test_concentration.py | 2 +- test/test_data_orientation.py | 20 ++--- test/test_hybrid.py | 4 +- test/test_looklocker.py | 4 +- test/test_mixed.py | 7 +- test/test_mri_io.py | 6 +- test/test_mri_orientation.py | 2 +- test/test_r1.py | 4 +- test/{test_t1_utils.py => test_utils.py} | 2 +- 27 files changed, 278 insertions(+), 211 deletions(-) rename src/mritk/{data/orientation.py => data.py} (73%) delete mode 100644 src/mritk/data/__init__.py delete mode 100644 src/mritk/data/base.py delete mode 100644 src/mritk/data/io.py rename src/mritk/{t1 => }/hybrid.py (100%) rename src/mritk/{t1 => }/looklocker.py (78%) rename src/mritk/{t1 => }/mixed.py (97%) rename src/mritk/{t1 => }/r1.py (94%) delete mode 100644 src/mritk/t1/__init__.py create mode 100644 src/mritk/testing.py rename src/mritk/{t1 => }/utils.py (100%) rename test/{test_t1_utils.py => test_utils.py} (99%) diff --git a/src/mritk/__init__.py b/src/mritk/__init__.py index 83fda66..e1a82d5 100644 --- a/src/mritk/__init__.py +++ b/src/mritk/__init__.py @@ -5,7 +5,7 @@ from importlib.metadata import metadata -from . import data, segmentation, statistics, t1, concentration +from . import data, segmentation, statistics, concentration, utils, looklocker, mixed, hybrid, r1 meta = metadata("mritk") @@ -20,6 +20,10 @@ "data", "segmentation", "statistics", - "t1", "concentration", + "utils", + "looklocker", + "mixed", + "hybrid", + "r1", ] diff --git a/src/mritk/cli.py b/src/mritk/cli.py index b941602..df22d7d 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -6,7 +6,7 @@ from rich_argparse import RichHelpFormatter -from . import datasets, info, statistics, show, napari, t1 +from . import datasets, info, statistics, show, napari, looklocker def version_info(): @@ -65,8 +65,10 @@ def setup_parser(): napari_parser = subparsers.add_parser("napari", help="Show MRI data using napari", formatter_class=parser.formatter_class) napari.add_arguments(napari_parser) - t1_parser = subparsers.add_parser("t1", help="Show MRI data using t1", formatter_class=parser.formatter_class) - t1.cli.add_arguments(t1_parser) + looklocker_parser = subparsers.add_parser( + "looklocker", help="Process Look-Locker data", formatter_class=parser.formatter_class + ) + looklocker.add_arguments(looklocker_parser) return parser diff --git a/src/mritk/concentration.py b/src/mritk/concentration.py index 549a0f0..cddc72a 100644 --- a/src/mritk/concentration.py +++ b/src/mritk/concentration.py @@ -8,9 +8,9 @@ import numpy as np from pathlib import Path -from .data.base import MRIData -from .data.io import load_mri_data, save_mri_data -from .data.orientation import assert_same_space +from .data import MRIData + +from .testing import assert_same_space def concentration_from_T1_expr(t1: np.ndarray, t1_0: np.ndarray, r1: float) -> np.ndarray: @@ -101,13 +101,13 @@ def concentration_from_T1( Returns: MRIData: An MRIData object containing the concentration array and the affine matrix. """ - t1_mri = load_mri_data(input_path, dtype=np.single) - t10_mri = load_mri_data(reference_path, dtype=np.single) + t1_mri = MRIData.from_file(input_path, dtype=np.single) + t10_mri = MRIData.from_file(reference_path, dtype=np.single) assert_same_space(t1_mri, t10_mri) mask_data = None if mask_path is not None: - mask_mri = load_mri_data(mask_path, dtype=bool) + mask_mri = MRIData.from_file(mask_path, dtype=bool) assert_same_space(mask_mri, t10_mri) mask_data = mask_mri.data @@ -116,7 +116,7 @@ def concentration_from_T1( mri_data = MRIData(data=concentrations_array, affine=t10_mri.affine) if output_path is not None: - save_mri_data(mri_data, output_path, dtype=np.single) + mri_data.save(output_path, dtype=np.single) return mri_data @@ -179,13 +179,13 @@ def concentration_from_R1( Returns: MRIData: An MRIData object containing the concentration array and the affine matrix. """ - r1_mri = load_mri_data(input_path, dtype=np.single) - r10_mri = load_mri_data(reference_path, dtype=np.single) + r1_mri = MRIData.from_file(input_path, dtype=np.single) + r10_mri = MRIData.from_file(reference_path, dtype=np.single) assert_same_space(r1_mri, r10_mri) mask_data = None if mask_path is not None: - mask_mri = load_mri_data(mask_path, dtype=bool) + mask_mri = MRIData.from_file(mask_path, dtype=bool) assert_same_space(mask_mri, r10_mri) mask_data = mask_mri.data @@ -194,6 +194,6 @@ def concentration_from_R1( mri_data = MRIData(data=concentrations_array, affine=r10_mri.affine) if output_path is not None: - save_mri_data(mri_data, output_path, dtype=np.single) + mri_data.save(output_path, dtype=np.single) return mri_data diff --git a/src/mritk/data/orientation.py b/src/mritk/data.py similarity index 73% rename from src/mritk/data/orientation.py rename to src/mritk/data.py index 67be168..61c0b0b 100644 --- a/src/mritk/data/orientation.py +++ b/src/mritk/data.py @@ -1,13 +1,73 @@ -# MRI Data orientation Module +# MRI Data Base class and functions Module # Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +from pathlib import Path +import nibabel import numpy as np - -from .base import MRIData +import numpy.typing as npt +import re +from typing import Optional + + +class MRIData: + def __init__(self, data: np.ndarray, affine: np.ndarray): + self.data = data + self.affine = affine + + @property + def shape(self) -> tuple[int, ...]: + return self.data.shape + + @classmethod + def from_file(cls, path: Path | str, dtype: npt.DTypeLike | None = None, orient: bool = True) -> "MRIData": + suffix_regex = re.compile(r".+(?P(\.nii(\.gz|)|\.mg(z|h)))") + m = suffix_regex.match(Path(path).name) + if (m is not None) and (m.groupdict()["suffix"] in (".nii", ".nii.gz")): + mri = nibabel.nifti1.load(path) + elif (m is not None) and (m.groupdict()["suffix"] in (".mgz", ".mgh")): + mri = nibabel.freesurfer.mghformat.load(path) + else: + raise ValueError(f"Invalid suffix {path}, should be either '.nii', or '.mgz'") + + affine = mri.affine + if affine is None: + raise RuntimeError("MRI do not contain affine") + + kwargs = {} + if dtype is not None: + kwargs["dtype"] = dtype + data = np.asarray(mri.get_fdata("unchanged"), **kwargs) + + mri = cls(data=data, affine=affine) + + if orient: + return data_reorientation(mri) + else: + return mri + + def save(self, path: Path | str, dtype: npt.DTypeLike | None = None, intent_code: Optional[int] = None): + if dtype is None: + dtype = self.data.dtype + data = self.data.astype(dtype) + + suffix_regex = re.compile(r".+(?P(\.nii(\.gz|)|\.mg(z|h)))") + m = suffix_regex.match(Path(path).name) + if (m is not None) and (m.groupdict()["suffix"] in (".nii", ".nii.gz")): + nii = nibabel.nifti1.Nifti1Image(data, self.affine) + if intent_code is not None: + nii.header.set_intent(intent_code) + nibabel.nifti1.save(nii, path) + elif (m is not None) and (m.groupdict()["suffix"] in (".mgz", ".mgh")): + mgh = nibabel.freesurfer.mghformat.MGHImage(data, self.affine) + if intent_code is not None: + mgh.header.set_intent(intent_code) + nibabel.freesurfer.mghformat.save(mgh, path) + else: + raise ValueError(f"Invalid suffix {path}, should be either '.nii', or '.mgz'") def physical_to_voxel_indices(physical_coordinates: np.ndarray, affine: np.ndarray, round_coords: bool = True) -> np.ndarray: @@ -182,31 +242,3 @@ def change_of_coordinates_map(orientation_in: str, orientation_out: str) -> np.n P = np.eye(4) P[:, :3] = P[:, index_order] return P @ F - - -def assert_same_space(mri1: MRIData, mri2: MRIData, rtol: float = 1e-5): - """Assert that two MRI datasets share the same physical space. - - Checks if the data shapes are identical and if the affine transformation - matrices are close within a specified relative tolerance. - - Args: - mri1: The first MRI data object. - mri2: The second MRI data object. - rtol: Relative tolerance for comparing affine matrices. Defaults to 1e-5. - - Raises: - ValueError: If shapes differ or if affine matrices are not sufficiently close. - """ - if mri1.data.shape == mri2.data.shape and np.allclose(mri1.affine, mri2.affine, rtol): - return - with np.printoptions(precision=5): - err = np.nanmax(np.abs((mri1.affine - mri2.affine) / mri2.affine)) - msg = ( - f"MRI's not in same space (relative tolerance {rtol})." - f" Shapes: ({mri1.data.shape}, {mri2.data.shape})," - f" Affines: {mri1.affine}, {mri2.affine}," - f" Affine max relative error: {err}" - ) - - raise ValueError(msg) diff --git a/src/mritk/data/__init__.py b/src/mritk/data/__init__.py deleted file mode 100644 index 3707f79..0000000 --- a/src/mritk/data/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -from . import base, io, orientation - -__all__ = ["base", "io", "orientation"] diff --git a/src/mritk/data/base.py b/src/mritk/data/base.py deleted file mode 100644 index 7880e7d..0000000 --- a/src/mritk/data/base.py +++ /dev/null @@ -1,24 +0,0 @@ -# MRI Data Base class and functions Module - -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -import numpy as np - - -class MRIData: - def __init__(self, data: np.ndarray, affine: np.ndarray): - self.data = data - self.affine = affine - - def get_data(self): - return self.data - - def get_metadata(self): - return self.affine - - @property - def shape(self) -> tuple[int, ...]: - return self.data.shape diff --git a/src/mritk/data/io.py b/src/mritk/data/io.py deleted file mode 100644 index ba3547c..0000000 --- a/src/mritk/data/io.py +++ /dev/null @@ -1,61 +0,0 @@ -# MRI Data IO Module - -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -from pathlib import Path -import nibabel -import numpy as np -import numpy.typing as npt -import re -from typing import Optional - -from .base import MRIData -from .orientation import data_reorientation - - -def load_mri_data( - path: Path | str, - dtype: type = np.float64, - orient: bool = True, -) -> MRIData: - suffix_regex = re.compile(r".+(?P(\.nii(\.gz|)|\.mg(z|h)))") - m = suffix_regex.match(Path(path).name) - if (m is not None) and (m.groupdict()["suffix"] in (".nii", ".nii.gz")): - mri = nibabel.nifti1.load(path) - elif (m is not None) and (m.groupdict()["suffix"] in (".mgz", ".mgh")): - mri = nibabel.freesurfer.mghformat.load(path) - else: - raise ValueError(f"Invalid suffix {path}, should be either '.nii', or '.mgz'") - - affine = mri.affine - if affine is None: - raise RuntimeError("MRI do not contain affine") - - data = np.asarray(mri.get_fdata("unchanged"), dtype=dtype) - mri = MRIData(data=data, affine=affine) - - if orient: - return data_reorientation(mri) - else: - return mri - - -def save_mri_data(mri: MRIData, path: Path, dtype: npt.DTypeLike, intent_code: Optional[int] = None): - # TODO : Choose other way to check extension than regex ? - suffix_regex = re.compile(r".+(?P(\.nii(\.gz|)|\.mg(z|h)))") - m = suffix_regex.match(Path(path).name) - if (m is not None) and (m.groupdict()["suffix"] in (".nii", ".nii.gz")): - nii = nibabel.nifti1.Nifti1Image(mri.data.astype(dtype), mri.affine) - if intent_code is not None: - nii.header.set_intent(intent_code) - nibabel.nifti1.save(nii, path) - elif (m is not None) and (m.groupdict()["suffix"] in (".mgz", ".mgh")): - mgh = nibabel.freesurfer.mghformat.MGHImage(mri.data.astype(dtype), mri.affine) - if intent_code is not None: - mgh.header.set_intent(intent_code) - nibabel.freesurfer.mghformat.save(mgh, path) - else: - raise ValueError(f"Invalid suffix {path}, should be either '.nii', or '.mgz'") diff --git a/src/mritk/t1/hybrid.py b/src/mritk/hybrid.py similarity index 100% rename from src/mritk/t1/hybrid.py rename to src/mritk/hybrid.py diff --git a/src/mritk/t1/looklocker.py b/src/mritk/looklocker.py similarity index 78% rename from src/mritk/t1/looklocker.py rename to src/mritk/looklocker.py index 72b546f..56f3f5b 100644 --- a/src/mritk/t1/looklocker.py +++ b/src/mritk/looklocker.py @@ -16,8 +16,8 @@ import tqdm import skimage -from ..data.base import MRIData -from ..data.io import load_mri_data, save_mri_data +from .data import MRIData + from .utils import mri_facemask, fit_voxel, nan_filter_gaussian, run_dcm2niix logger = logging.getLogger(__name__) @@ -175,7 +175,7 @@ def looklocker_t1map_postprocessing( RuntimeError: If more than 99% of the voxels are removed during the outlier filtering step, indicating a likely unit mismatch (e.g., T1 in seconds instead of ms). """ - t1map_mri = load_mri_data(T1map, dtype=np.single) + t1map_mri = MRIData.from_file(T1map, dtype=np.single) t1map_data = t1map_mri.data.copy() if mask is None: @@ -195,7 +195,7 @@ def looklocker_t1map_postprocessing( processed_T1map = MRIData(t1map_data, t1map_mri.affine) if output is not None: - save_mri_data(processed_T1map, output, dtype=np.single) + processed_T1map.save(output, dtype=np.single) return processed_T1map @@ -219,7 +219,7 @@ def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | No MRIData: An MRIData object containing the computed 3D T1 map (in milliseconds) and the original affine transformation matrix. """ - ll_mri = load_mri_data(looklocker_input, dtype=np.single) + ll_mri = MRIData.from_file(looklocker_input, dtype=np.single) # Convert timestamps from milliseconds to seconds time_s = np.loadtxt(timestamps) / 1000.0 @@ -227,7 +227,7 @@ def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | No t1map_mri = MRIData(t1map_array.astype(np.single), ll_mri.affine) if output is not None: - save_mri_data(t1map_mri, output, dtype=np.single) + t1map_mri.save(output, dtype=np.single) return t1map_mri @@ -260,5 +260,50 @@ def dicom_to_looklocker(dicomfile: Path, outpath: Path): shutil.copy(tmppath / f"{form}.json", outpath.with_suffix(".json")) # Reload and save to standardize intent codes and precision - mri = load_mri_data(tmppath / f"{form}.nii.gz", dtype=np.double) - save_mri_data(mri, outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001) + mri = MRIData.from_file(tmppath / f"{form}.nii.gz", dtype=np.double) + mri.save(outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001) + + +def add_arguments(parser): + subparser = parser.add_subparsers(dest="looklocker-command", help="Commands for processing Look-Locker data") + + dicom_parser = subparser.add_parser("dcm2ll", help="Convert Look-Locker DICOM to NIfTI format") + dicom_parser.add_argument("-i", "--input", type=Path, help="Path to the input Look-Locker DICOM file") + dicom_parser.add_argument("-o", "--output", type=Path, help="Desired output path for the converted .nii.gz file") + + ll_t1 = subparser.add_parser("t1", help="Generate a T1 map from Look-Locker data") + ll_t1.add_argument("-i", "--input", type=Path, help="Path to the 4D Look-Locker NIfTI file") + ll_t1.add_argument("-t", "--timestamps", type=Path, help="Path to the text file containing trigger delay times (in ms)") + ll_t1.add_argument("-o", "--output", type=Path, default=None, help="Path to save the resulting T1 map NIfTI file") + + ll_post = subparser.add_parser("postprocess", help="Post-process a raw Look-Locker T1 map") + ll_post.add_argument("-i", "--input", type=Path, help="Path to the raw Look-Locker T1 map NIfTI file") + ll_post.add_argument("-o", "--output", type=Path, default=None, help="Path to save the cleaned T1 map NIfTI file") + ll_post.add_argument("--t1-low", type=float, default=100.0, help="Lower physiological limit for T1 values (in ms)") + ll_post.add_argument("--t1-high", type=float, default=10000.0, help="Upper physiological limit for T1 values (in ms)") + ll_post.add_argument( + "--radius", type=int, default=10, help="Base radius for morphological dilation when generating the automatic mask" + ) + ll_post.add_argument( + "--erode-dilate-factor", + type=float, + default=1.3, + help="Multiplier for the erosion radius relative to the dilation radius to ensure tight mask edges", + ) + + +def dispatch(args): + command = args.pop("looklocker-command") + if command == "dcm2ll": + dicom_to_looklocker(args.pop("input"), args.pop("output")) + elif command == "t1": + looklocker_t1map(args.pop("input"), args.pop("timestamps"), output=args.pop("output")) + elif command == "postprocess": + looklocker_t1map_postprocessing( + T1map=args.pop("input"), + T1_low=args.pop("t1_low"), + T1_high=args.pop("t1_high"), + radius=args.pop("radius"), + erode_dilate_factor=args.pop("erode_dilate_factor"), + output=args.pop("output"), + ) diff --git a/src/mritk/masking/masks.py b/src/mritk/masking/masks.py index 1ead7ae..7d20517 100644 --- a/src/mritk/masking/masks.py +++ b/src/mritk/masking/masks.py @@ -10,9 +10,8 @@ from typing import Optional from pathlib import Path -from ..data.base import MRIData -from ..data.io import load_mri_data, save_mri_data -from ..data.orientation import assert_same_space +from ..data import MRIData +from ..testing import assert_same_space from .utils import largest_island @@ -40,12 +39,12 @@ def csf_mask( use_li: bool = False, output: Path | None = None, ) -> MRIData: - input_vol = load_mri_data(input, dtype=np.single) + input_vol = MRIData.from_file(input, dtype=np.single) mask = create_csf_mask(input_vol.data, connectivity, use_li) assert np.max(mask) > 0, "Masking failed, no voxels in mask" mri_data = MRIData(data=mask, affine=input_vol.affine) if output is not None: - save_mri_data(mri_data, output, dtype=np.uint8) + mri_data.save(output, dtype=np.uint8) return mri_data @@ -63,10 +62,10 @@ def intracranial_mask( segmentation: Path, output: Optional[Path] = None, ) -> MRIData: - input_csf_mask = load_mri_data(csf_mask, dtype=bool) - segmentation_data = load_mri_data(segmentation, dtype=bool) + input_csf_mask = MRIData.from_file(csf_mask, dtype=bool) + segmentation_data = MRIData.from_file(segmentation, dtype=bool) mask_data = create_intracranial_mask(input_csf_mask, segmentation_data) mri_data = MRIData(data=mask_data, affine=segmentation_data.affine) if output is not None: - save_mri_data(mri_data, output, dtype=np.uint8) + mri_data.save(output, dtype=np.uint8) return mri_data diff --git a/src/mritk/t1/mixed.py b/src/mritk/mixed.py similarity index 97% rename from src/mritk/t1/mixed.py rename to src/mritk/mixed.py index 216df9a..b1d29c4 100644 --- a/src/mritk/t1/mixed.py +++ b/src/mritk/mixed.py @@ -17,11 +17,8 @@ import nibabel -from ..data.orientation import data_reorientation, change_of_coordinates_map - -from ..data.base import MRIData -from ..data.io import load_mri_data -from ..masking.masks import create_csf_mask +from .data import data_reorientation, change_of_coordinates_map, MRIData +from .masking.masks import create_csf_mask from .utils import T1_lookup_table, VOLUME_LABELS, run_dcm2niix logger = logging.getLogger(__name__) @@ -128,8 +125,8 @@ def mixed_t1map( nibabel.nifti1.Nifti1Image: The computed T1 map as a NIfTI image object, with the qform/sform properly set to scanner space. """ - se_mri = load_mri_data(SE_nii_path, dtype=np.single) - ir_mri = load_mri_data(IR_nii_path, dtype=np.single) + se_mri = MRIData.from_file(SE_nii_path, dtype=np.single) + ir_mri = MRIData.from_file(IR_nii_path, dtype=np.single) meta = json.loads(meta_path.read_text()) t1_volume = compute_mixed_t1_array(se_mri.data, ir_mri.data, meta, T1_low, T1_high) @@ -163,7 +160,7 @@ def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | have been set to NaN. """ t1map_nii = nibabel.nifti1.load(T1_path) - se_mri = load_mri_data(SE_nii_path, np.single) + se_mri = MRIData.from_file(SE_nii_path, dtype=np.single) mask = create_csf_mask(se_mri.data, use_li=True) mask = skimage.morphology.erosion(mask) diff --git a/src/mritk/napari.py b/src/mritk/napari.py index 645668b..0aa334a 100644 --- a/src/mritk/napari.py +++ b/src/mritk/napari.py @@ -4,8 +4,7 @@ import numpy as np from rich.console import Console -# Assuming relative imports based on your previous file structure -from .data.io import load_mri_data +from .data import MRIData def add_arguments(parser: argparse.ArgumentParser): @@ -51,7 +50,7 @@ def dispatch(args): console = Console() console.print(f"[bold green]Loading MRI data from:[/bold green] {file_path}") - mri_resource = load_mri_data(file_path) + mri_resource = MRIData.from_file(file_path) data = mri_resource.data viewer.add_image(data, name=file_path.stem) diff --git a/src/mritk/t1/r1.py b/src/mritk/r1.py similarity index 94% rename from src/mritk/t1/r1.py rename to src/mritk/r1.py index 7f4d800..7551db7 100644 --- a/src/mritk/t1/r1.py +++ b/src/mritk/r1.py @@ -8,8 +8,7 @@ import numpy as np from pathlib import Path -from ..data.base import MRIData -from ..data.io import load_mri_data, save_mri_data +from .data import MRIData def compute_r1_array( @@ -85,7 +84,7 @@ def T1_to_R1( ValueError: If input_mri is neither a Path nor an MRIData object. """ if isinstance(input_mri, Path): - T1map_mri = load_mri_data(input_mri, dtype=np.single) + T1map_mri = MRIData.from_file(input_mri, dtype=np.single) elif isinstance(input_mri, MRIData): T1map_mri = input_mri else: @@ -94,6 +93,6 @@ def T1_to_R1( R1map_mri = convert_T1_to_R1(T1map_mri, scale, t1_low, t1_high) if output is not None: - save_mri_data(R1map_mri, output, dtype=np.single) + R1map_mri.save(output, dtype=np.single) return R1map_mri diff --git a/src/mritk/show.py b/src/mritk/show.py index 89e51fe..bf9561a 100644 --- a/src/mritk/show.py +++ b/src/mritk/show.py @@ -7,7 +7,7 @@ from rich.columns import Columns # Assuming relative imports based on your previous file structure -from .data.io import load_mri_data +from .data import MRIData def add_arguments(parser: argparse.ArgumentParser): @@ -82,7 +82,7 @@ def dispatch(args): console = Console() console.print(f"[bold green]Loading MRI data from:[/bold green] {file_path}") - mri_resource = load_mri_data(file_path) + mri_resource = MRIData.from_file(file_path) data = mri_resource.data # 2. Define Slice Indices (Middle of the brain) @@ -91,7 +91,7 @@ def dispatch(args): z_idx = int(data.shape[2] * slize_z) # 3. Extract Slices - # orientation in load_mri_data is typically RAS (Right, Anterior, Superior) + # orientation is typically RAS (Right, Anterior, Superior) # Numpy origin is top-left. We often need to rotate/flip for correct medical view. # Sagittal View (Side): Fix X. Axes are Y (Ant) and Z (Sup). diff --git a/src/mritk/statistics/compute_stats.py b/src/mritk/statistics/compute_stats.py index 33760d8..7fe2226 100644 --- a/src/mritk/statistics/compute_stats.py +++ b/src/mritk/statistics/compute_stats.py @@ -12,8 +12,8 @@ import pandas as pd import tqdm.rich -from ..data.io import load_mri_data -from ..data.orientation import assert_same_space +from ..data import MRIData +from ..testing import assert_same_space from ..segmentation.groups import default_segmentation_groups from ..segmentation.lookup_table import read_lut from .utils import voxel_count_to_ml_scale, find_timestamp, prepend_info @@ -155,8 +155,8 @@ def generate_stats_dataframe( pd.DataFrame: A formatted DataFrame with statistics for all identified regions. """ # Load and validate the data - mri = load_mri_data(mri_path, dtype=np.single) - seg = load_mri_data(seg_path, dtype=np.int16) + mri = MRIData.from_file(mri_path, dtype=np.single) + seg = MRIData.from_file(seg_path, dtype=np.int16) assert_same_space(seg, mri) # Resolve metadata diff --git a/src/mritk/t1/__init__.py b/src/mritk/t1/__init__.py deleted file mode 100644 index 0c4db2b..0000000 --- a/src/mritk/t1/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - -from . import r1, utils, mixed, looklocker, hybrid, cli - - -__all__ = ["r1", "utils", "mixed", "looklocker", "hybrid", "cli"] diff --git a/src/mritk/testing.py b/src/mritk/testing.py new file mode 100644 index 0000000..1cdca77 --- /dev/null +++ b/src/mritk/testing.py @@ -0,0 +1,90 @@ +from pathlib import Path +import numpy as np + +from .data import MRIData + + +def compare_nifti_images(img_path1: Path, img_path2: Path, data_tolerance: float = 0.0) -> bool: + """ + Compares two NIfTI images for equality of data arrays. + + Provides a robust way to check if two NIfTI files contain identical + voxel data, accounting for potential NaNs and floating-point inaccuracies. + + Args: + img_path1 (Path): Path to the first NIfTI file. + img_path2 (Path): Path to the second NIfTI file. + data_tolerance (float, optional): Absolute tolerance for floating-point + comparisons. Use 0.0 for exact mathematical equality. Defaults to 0.0. + + Returns: + bool: True if images are considered the same, False otherwise. + + Raises: + AssertionError: If files exist but the data deviates beyond `data_tolerance`. + FileNotFoundError: If either of the provided file paths does not exist. + """ + if not img_path1.exists(): + raise FileNotFoundError(f"File not found: {img_path1}") + if not img_path2.exists(): + raise FileNotFoundError(f"File not found: {img_path2}") + + img1 = MRIData.from_file(img_path1, orient=False) + img2 = MRIData.from_file(img_path2, orient=False) + + # 1. Compare Image Data + data1 = img1.data + data2 = img2.data + + return compare_nifti_arrays(data1, data2, data_tolerance) + + +def compare_nifti_arrays(arr1: np.ndarray, arr2: np.ndarray, data_tolerance: float = 0.0) -> bool: + """ + Compares two NIfTI data arrays for equality, accounting for NaNs and tolerance. + + Args: + arr1 (np.ndarray): The first data array to compare. + arr2 (np.ndarray): The second data array to compare. + data_tolerance (float, optional): Absolute tolerance for floating-point + comparisons. Use 0.0 for exact mathematical equality. Defaults to 0.0. + + Returns: + bool: True if arrays are considered the same, False otherwise. + """ + # Convert NaN to zero (can have NaNs in concentration maps) + arr1 = np.nan_to_num(arr1, nan=0.0) + arr2 = np.nan_to_num(arr2, nan=0.0) + + if data_tolerance > 0: + return np.allclose(arr1, arr2, atol=data_tolerance) + else: + return np.array_equal(arr1, arr2) + + +def assert_same_space(mri1: MRIData, mri2: MRIData, rtol: float = 1e-5): + """Assert that two MRI datasets share the same physical space. + + Checks if the data shapes are identical and if the affine transformation + matrices are close within a specified relative tolerance. + + Args: + mri1: The first MRI data object. + mri2: The second MRI data object. + rtol: Relative tolerance for comparing affine matrices. Defaults to 1e-5. + + Raises: + ValueError: If shapes differ or if affine matrices are not sufficiently close. + """ + if mri1.data.shape == mri2.data.shape and np.allclose(mri1.affine, mri2.affine, rtol): + return + with np.printoptions(precision=5): + err = np.nanmax(np.abs((mri1.affine - mri2.affine) / mri2.affine)) + msg = ( + f"MRI's not in same space (relative tolerance {rtol})." + f" Shapes: ({mri1.data.shape}, {mri2.data.shape})," + f" Affines: {mri1.affine}, {mri2.affine}," + f" Affine max relative error: {err}" + ) + + raise ValueError(msg) diff --git a/src/mritk/t1/utils.py b/src/mritk/utils.py similarity index 100% rename from src/mritk/t1/utils.py rename to src/mritk/utils.py diff --git a/test/test_concentration.py b/test/test_concentration.py index a03459f..2ae330c 100644 --- a/test/test_concentration.py +++ b/test/test_concentration.py @@ -15,7 +15,7 @@ concentration_from_T1, ) -from mritk.t1.utils import compare_nifti_images +from mritk.testing import compare_nifti_images def test_intracranial_concentration(tmp_path, mri_data_dir: Path): diff --git a/test/test_data_orientation.py b/test/test_data_orientation.py index 824bbf7..87fc78f 100644 --- a/test/test_data_orientation.py +++ b/test/test_data_orientation.py @@ -1,6 +1,6 @@ import numpy as np import pytest -import mritk.data.orientation +import mritk.data def test_apply_affine_identity(): @@ -8,7 +8,7 @@ def test_apply_affine_identity(): points = np.array([[1.0, 2.0, 3.0], [10.0, 20.0, 30.0]]) identity_affine = np.eye(4) - result = mritk.data.orientation.apply_affine(identity_affine, points) + result = mritk.data.apply_affine(identity_affine, points) np.testing.assert_array_equal(result, points) @@ -22,7 +22,7 @@ def test_apply_affine_translation(): affine[:3, 3] = translation expected = points + translation - result = mritk.data.orientation.apply_affine(affine, points) + result = mritk.data.apply_affine(affine, points) np.testing.assert_array_almost_equal(result, expected) @@ -34,7 +34,7 @@ def test_apply_affine_scaling(): affine = np.diag([2.0, 0.5, -1.0, 1.0]) expected = np.array([[2.0, 1.0, -3.0]]) - result = mritk.data.orientation.apply_affine(affine, points) + result = mritk.data.apply_affine(affine, points) np.testing.assert_array_almost_equal(result, expected) @@ -57,7 +57,7 @@ def test_physical_to_voxel_indices_basic_translation(): # 11 - 10 = 1 expected = np.array([[0, 0, 0], [1, 1, 1]]) - result = mritk.data.orientation.physical_to_voxel_indices(dof_coords, affine, round_coords=True) + result = mritk.data.physical_to_voxel_indices(dof_coords, affine, round_coords=True) np.testing.assert_array_equal(result, expected) assert result.dtype == int @@ -68,7 +68,7 @@ def test_physical_to_voxel_indices_no_rounding(): dof_coords = np.array([[10.5, 10.5, 10.5]]) affine = np.eye(4) # Identity - result = mritk.data.orientation.physical_to_voxel_indices(dof_coords, affine, round_coords=False) + result = mritk.data.physical_to_voxel_indices(dof_coords, affine, round_coords=False) np.testing.assert_array_almost_equal(result, dof_coords) assert np.issubdtype(result.dtype, np.floating) @@ -82,7 +82,7 @@ def test_physical_to_voxel_indices_rounding_behavior(): expected = np.array([[10, 10, 10], [11, 11, 11]]) - result = mritk.data.orientation.physical_to_voxel_indices(dof_coords, affine, round_coords=True) + result = mritk.data.physical_to_voxel_indices(dof_coords, affine, round_coords=True) np.testing.assert_array_equal(result, expected) @@ -97,7 +97,7 @@ def test_find_nearest_valid_voxels_1_neighbor(): dof_inds = np.array([[0.1, 0.1], [4.9, 4.9]]) # Function output shape is (ndim, N_neighbors, N_points) - result = mritk.data.orientation.find_nearest_valid_voxels(dof_inds, mask, k=1) + result = mritk.data.find_nearest_valid_voxels(dof_inds, mask, k=1) # Verify shape: (2 dims, 1 neighbor, 2 query points) assert result.shape == (2, 1, 2) @@ -120,7 +120,7 @@ def test_find_nearest_valid_voxels_N_neighbors(): # Query point right next to the cluster at (1,1,1) dof_inds = np.array([[1.0, 1.0, 1.1]]) - result = mritk.data.orientation.find_nearest_valid_voxels(dof_inds, mask, k=2) + result = mritk.data.find_nearest_valid_voxels(dof_inds, mask, k=2) # Shape should be (3 dims, 2 neighbors, 1 point) assert result.shape == (3, 2, 1) @@ -143,4 +143,4 @@ def test_find_nearest_valid_voxels_empty_mask_error(): dof_inds = np.array([[1, 1]]) with pytest.raises(ValueError): - mritk.data.orientation.find_nearest_valid_voxels(dof_inds, mask, k=1) + mritk.data.find_nearest_valid_voxels(dof_inds, mask, k=1) diff --git a/test/test_hybrid.py b/test/test_hybrid.py index 22e7721..3e86112 100644 --- a/test/test_hybrid.py +++ b/test/test_hybrid.py @@ -2,8 +2,8 @@ import numpy as np -from mritk.t1.hybrid import compute_hybrid_t1_array, hybrid_t1map -from mritk.t1.utils import compare_nifti_images +from mritk.hybrid import compute_hybrid_t1_array, hybrid_t1map +from mritk.testing import compare_nifti_images def test_hybrid_t1map(tmp_path, mri_data_dir: Path): diff --git a/test/test_looklocker.py b/test/test_looklocker.py index 945200d..5047fa8 100644 --- a/test/test_looklocker.py +++ b/test/test_looklocker.py @@ -3,8 +3,8 @@ import numpy as np import pytest -from mritk.t1.utils import compare_nifti_images -from mritk.t1.looklocker import ( +from mritk.testing import compare_nifti_images +from mritk.looklocker import ( looklocker_t1map, looklocker_t1map_postprocessing, remove_outliers, diff --git a/test/test_mixed.py b/test/test_mixed.py index 11511aa..59576f6 100644 --- a/test/test_mixed.py +++ b/test/test_mixed.py @@ -3,14 +3,15 @@ from unittest.mock import MagicMock, patch -from mritk.t1.mixed import ( +from mritk.mixed import ( compute_mixed_t1_array, extract_mixed_dicom, mixed_t1map, mixed_t1map_postprocessing, _extract_frame_metadata, ) -from mritk.t1.utils import VOLUME_LABELS, compare_nifti_images +from mritk.utils import VOLUME_LABELS +from mritk.testing import compare_nifti_images def test_mixed_t1map(tmp_path, mri_data_dir: Path): @@ -69,7 +70,7 @@ def test_extract_frame_metadata(): assert meta["ETL"] == 5 -@patch("mritk.t1.mixed.extract_single_volume") +@patch("mritk.mixed.extract_single_volume") @patch("pydicom.dcmread") def test_extract_mixed_dicom(mock_dcmread, mock_extract_single): """Test parsing a multi-volume DICOM file into independent subvolumes.""" diff --git a/test/test_mri_io.py b/test/test_mri_io.py index 34ffeeb..2c4f3ea 100644 --- a/test/test_mri_io.py +++ b/test/test_mri_io.py @@ -6,7 +6,7 @@ """ import numpy as np -from mritk.data.io import load_mri_data, save_mri_data +from mritk.data import MRIData def test_mri_io_nifti(tmp_path, mri_data_dir): @@ -14,5 +14,5 @@ def test_mri_io_nifti(tmp_path, mri_data_dir): output_file = tmp_path / "output_nifti.nii.gz" - mri = load_mri_data(input_file, dtype=np.single, orient=False) ## TODO : Test orient=True case - save_mri_data(mri, output_file, dtype=np.single) + mri = MRIData.from_file(input_file, dtype=np.single, orient=False) ## TODO : Test orient=True case + mri.save(output_file, dtype=np.single) diff --git a/test/test_mri_orientation.py b/test/test_mri_orientation.py index fccebdb..6d65b29 100644 --- a/test/test_mri_orientation.py +++ b/test/test_mri_orientation.py @@ -6,7 +6,7 @@ """ import numpy as np -from mritk.data.orientation import apply_affine, change_of_coordinates_map +from mritk.data import apply_affine, change_of_coordinates_map def default_test_data(): diff --git a/test/test_r1.py b/test/test_r1.py index 9a60bec..6151748 100644 --- a/test/test_r1.py +++ b/test/test_r1.py @@ -7,8 +7,8 @@ import numpy as np import pytest -from mritk.data.base import MRIData -from mritk.t1.r1 import ( +from mritk.data import MRIData +from mritk.r1 import ( compute_r1_array, convert_T1_to_R1, T1_to_R1, diff --git a/test/test_t1_utils.py b/test/test_utils.py similarity index 99% rename from test/test_t1_utils.py rename to test/test_utils.py index d0117ba..81b5059 100644 --- a/test/test_t1_utils.py +++ b/test/test_utils.py @@ -10,7 +10,7 @@ import numpy as np -from mritk.t1.utils import ( +from mritk.utils import ( voxel_fit_function, nan_filter_gaussian, estimate_se_free_relaxation_time, From cab0e078e051648f61108f4efab9873e3ff96aba Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sat, 7 Mar 2026 22:02:15 +0100 Subject: [PATCH 15/29] Add cli --- src/mritk/cli.py | 33 +++++++++++++- src/mritk/concentration.py | 44 ++++++++++++++++++ src/mritk/hybrid.py | 32 +++++++++++++ src/mritk/looklocker.py | 12 +++-- src/mritk/mixed.py | 71 ++++++++++++++++++++++++++++- src/mritk/r1.py | 34 +++++++++++--- src/mritk/statistics/cli.py | 3 -- test/test_concentration.py | 90 ++++++++++++++++++++++++++++++++++++- test/test_hybrid.py | 56 +++++++++++++++++++++++ test/test_looklocker.py | 53 ++++++++++++++++++++++ test/test_mixed.py | 72 ++++++++++++++++++++++++++++- test/test_r1.py | 66 +++++++++++++++++++++++++-- 12 files changed, 544 insertions(+), 22 deletions(-) diff --git a/src/mritk/cli.py b/src/mritk/cli.py index df22d7d..19f96f4 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -6,7 +6,7 @@ from rich_argparse import RichHelpFormatter -from . import datasets, info, statistics, show, napari, looklocker +from . import datasets, info, statistics, show, napari, looklocker, hybrid, mixed, r1, concentration def version_info(): @@ -70,6 +70,26 @@ def setup_parser(): ) looklocker.add_arguments(looklocker_parser) + hybrid_parser = subparsers.add_parser( + "hybrid", help="Generate a hybrid T1 map by merging Look-Locker and Mixed maps.", formatter_class=parser.formatter_class + ) + hybrid.add_arguments(hybrid_parser) + + mixed_parser = subparsers.add_parser( + "mixed", help="Generate a Mixed T1 map from Look-Locker data.", formatter_class=parser.formatter_class + ) + mixed.add_arguments(mixed_parser) + + t1_to_r1_parser = subparsers.add_parser( + "t12r1", help="Convert a T1 map to an R1 map.", formatter_class=parser.formatter_class + ) + r1.add_arguments(t1_to_r1_parser) + + concentration_parser = subparsers.add_parser( + "concentration", help="Compute concentration maps.", formatter_class=parser.formatter_class + ) + concentration.add_arguments(concentration_parser) + return parser @@ -94,6 +114,17 @@ def dispatch(parser: argparse.ArgumentParser, argv: Optional[Sequence[str]] = No show.dispatch(args) elif command == "napari": napari.dispatch(args) + elif command == "looklocker": + looklocker.dispatch(args) + elif command == "hybrid": + hybrid.dispatch(args) + elif command == "mixed": + mixed.dispatch(args) + elif command == "t12r1": + r1.dispatch(args) + elif command == "concentration": + concentration.dispatch(args) + else: logger.error(f"Unknown command {command}") parser.print_help() diff --git a/src/mritk/concentration.py b/src/mritk/concentration.py index cddc72a..23b71ef 100644 --- a/src/mritk/concentration.py +++ b/src/mritk/concentration.py @@ -197,3 +197,47 @@ def concentration_from_R1( mri_data.save(output_path, dtype=np.single) return mri_data + + +def add_arguments(parser): + subparsers = parser.add_subparsers(dest="concentration-command", required=True) + + t1_parser = subparsers.add_parser("t1", help="Compute concentration from T1 maps.", formatter_class=parser.formatter_class) + t1_parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the post-contrast T1 map (NIfTI).") + t1_parser.add_argument( + "-r", "--reference", type=Path, required=True, help="Path to the baseline (pre-contrast) T1 map (NIfTI)." + ) + t1_parser.add_argument("-o", "--output", type=Path, help="Path to save the resulting concentration map (NIfTI).") + t1_parser.add_argument("--r1", type=float, default=0.0045, help="Relaxivity of the contrast agent (default: 0.0045).") + t1_parser.add_argument("--mask", type=Path, help="Path to a boolean mask NIfTI file to restrict computation (optional).") + + r1_parser = subparsers.add_parser("r1", help="Compute concentration from R1 maps.", formatter_class=parser.formatter_class) + r1_parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the post-contrast R1 map (NIfTI).") + r1_parser.add_argument( + "-r", "--reference", type=Path, required=True, help="Path to the baseline (pre-contrast) R1 map (NIfTI)." + ) + r1_parser.add_argument("-o", "--output", type=Path, help="Path to save the resulting concentration map (NIfTI).") + r1_parser.add_argument("--r1", type=float, default=0.0045, help="Relaxivity of the contrast agent (default: 0.0045).") + r1_parser.add_argument("--mask", type=Path, help="Path to a boolean mask NIfTI file to restrict computation (optional).") + + +def dispatch(args): + command = args.pop("concentration-command") + if command == "t1": + return concentration_from_T1( + input_path=args.pop("input"), + reference_path=args.pop("reference"), + output_path=args.pop("output"), + r1=args.pop("r1"), + mask_path=args.pop("mask"), + ) + elif command == "r1": + return concentration_from_R1( + input_path=args.pop("input"), + reference_path=args.pop("reference"), + output_path=args.pop("output"), + r1=args.pop("r1"), + mask_path=args.pop("mask"), + ) + else: + raise ValueError(f"Unknown concentration command: {command}") diff --git a/src/mritk/hybrid.py b/src/mritk/hybrid.py index 220b990..3d005f0 100644 --- a/src/mritk/hybrid.py +++ b/src/mritk/hybrid.py @@ -31,6 +31,7 @@ def compute_hybrid_t1_array(ll_data: np.ndarray, mixed_data: np.ndarray, mask: n Returns: np.ndarray: Hybrid 3D T1 array. """ + logger.debug("Computing hybrid T1 array with threshold %.2f ms.", threshold) hybrid = ll_data.copy() newmask = mask & (ll_data > threshold) & (mixed_data > threshold) hybrid[newmask] = mixed_data[newmask] @@ -41,6 +42,10 @@ def hybrid_t1map( LL_path: Path, mixed_path: Path, csf_mask_path: Path, threshold: float, erode: int = 0, output: Path | None = None ) -> nibabel.nifti1.Nifti1Image: """I/O wrapper for merging a Look-Locker and a Mixed T1 map.""" + logger.info(f"Generating hybrid T1 map with threshold {threshold} ms and erosion {erode} voxels.") + logger.info(f"Loading Look-Locker T1 map from {LL_path}.") + logger.info(f"Loading Mixed T1 map from {mixed_path}.") + logger.info(f"Loading CSF mask from {csf_mask_path}.") mixed_mri = nibabel.nifti1.load(mixed_path) ll_mri = nibabel.nifti1.load(LL_path) @@ -48,12 +53,39 @@ def hybrid_t1map( csf_mask = csf_mask_mri.get_fdata().astype(bool) if erode > 0: + logger.debug(f"Eroding CSF mask with a ball structuring element of radius {erode}.") csf_mask = skimage.morphology.erosion(csf_mask, skimage.morphology.ball(erode)) hybrid = compute_hybrid_t1_array(ll_mri.get_fdata(), mixed_mri.get_fdata(), csf_mask, threshold) hybrid_nii = nibabel.nifti1.Nifti1Image(hybrid, affine=ll_mri.affine, header=ll_mri.header) + if output is not None: + logger.info(f"Saving hybrid T1 map to {output}.") nibabel.nifti1.save(hybrid_nii, output) + else: + logger.info("No output path provided, returning hybrid T1 map as Nifti1Image object.") return hybrid_nii + + +def add_arguments(parser): + """Add command-line arguments for the hybrid T1 map generation.""" + parser.add_argument("-i", "--input-ll", type=Path, required=True, help="Path to the Look-Locker T1 map (NIfTI).") + parser.add_argument("-m", "--input-mixed", type=Path, required=True, help="Path to the Mixed T1 map (NIfTI).") + parser.add_argument("-c", "--csf-mask", type=Path, required=True, help="Path to the CSF mask (NIfTI).") + parser.add_argument("-t", "--threshold", type=float, default=4000.0, help="T1 threshold in ms for substitution.") + parser.add_argument("-e", "--erode", type=int, default=0, help="Number of voxels to erode the CSF mask.") + parser.add_argument("-o", "--output", type=Path, required=True, help="Output path for the hybrid T1 map (NIfTI).") + + +def dispatch(args): + """Dispatch function for the hybrid T1 map generation.""" + hybrid_t1map( + LL_path=args.pop("input_ll"), + mixed_path=args.pop("input_mixed"), + csf_mask_path=args.pop("csf_mask"), + threshold=args.pop("threshold"), + erode=args.pop("erode"), + output=args.pop("output"), + ) diff --git a/src/mritk/looklocker.py b/src/mritk/looklocker.py index 56f3f5b..d3822f6 100644 --- a/src/mritk/looklocker.py +++ b/src/mritk/looklocker.py @@ -267,16 +267,20 @@ def dicom_to_looklocker(dicomfile: Path, outpath: Path): def add_arguments(parser): subparser = parser.add_subparsers(dest="looklocker-command", help="Commands for processing Look-Locker data") - dicom_parser = subparser.add_parser("dcm2ll", help="Convert Look-Locker DICOM to NIfTI format") + dicom_parser = subparser.add_parser( + "dcm2ll", help="Convert Look-Locker DICOM to NIfTI format", formatter_class=parser.formatter_class + ) dicom_parser.add_argument("-i", "--input", type=Path, help="Path to the input Look-Locker DICOM file") dicom_parser.add_argument("-o", "--output", type=Path, help="Desired output path for the converted .nii.gz file") - ll_t1 = subparser.add_parser("t1", help="Generate a T1 map from Look-Locker data") + ll_t1 = subparser.add_parser("t1", help="Generate a T1 map from Look-Locker data", formatter_class=parser.formatter_class) ll_t1.add_argument("-i", "--input", type=Path, help="Path to the 4D Look-Locker NIfTI file") ll_t1.add_argument("-t", "--timestamps", type=Path, help="Path to the text file containing trigger delay times (in ms)") ll_t1.add_argument("-o", "--output", type=Path, default=None, help="Path to save the resulting T1 map NIfTI file") - ll_post = subparser.add_parser("postprocess", help="Post-process a raw Look-Locker T1 map") + ll_post = subparser.add_parser( + "postprocess", help="Post-process a raw Look-Locker T1 map", formatter_class=parser.formatter_class + ) ll_post.add_argument("-i", "--input", type=Path, help="Path to the raw Look-Locker T1 map NIfTI file") ll_post.add_argument("-o", "--output", type=Path, default=None, help="Path to save the cleaned T1 map NIfTI file") ll_post.add_argument("--t1-low", type=float, default=100.0, help="Lower physiological limit for T1 values (in ms)") @@ -307,3 +311,5 @@ def dispatch(args): erode_dilate_factor=args.pop("erode_dilate_factor"), output=args.pop("output"), ) + else: + raise ValueError(f"Unknown Look-Locker command: {command}") diff --git a/src/mritk/mixed.py b/src/mritk/mixed.py index b1d29c4..5a49a68 100644 --- a/src/mritk/mixed.py +++ b/src/mritk/mixed.py @@ -1,4 +1,4 @@ -# T1 Maps generation module +# Mixed sequence # Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) @@ -321,3 +321,72 @@ def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: Optional[list[str]] # Attempt standard dcm2niix conversion (soft failure allowed for legacy behavior) run_dcm2niix(dcmpath, outdir, form, extra_args="-w 0 --terse -b o", check=False) + + +def add_arguments(parser): + subparser = parser.add_subparsers(dest="hybrid-command", required=True, title="hybrid subcommands") + + dmc_parser = subparser.add_parser( + "dcm2mixed", + help="Convert a Mixed sequence DICOM file into separate NIfTI subvolumes and metadata.", + formatter_class=parser.formatter_class, + ) + dmc_parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the input Mixed DICOM file.") + dmc_parser.add_argument( + "-o", "--output", type=Path, required=True, help="Base path for output NIfTI files and metadata JSON." + ) + dmc_parser.add_argument( + "-s", + "--subvolumes", + nargs="+", + default=VOLUME_LABELS, + help=f"Specific subvolumes to extract, space-separated. Defaults to all: {VOLUME_LABELS}.", + ) + + t1_parser = subparser.add_parser( + "t1", help="Generate a T1 map from Mixed sequence NIfTI files.", formatter_class=parser.formatter_class + ) + t1_parser.add_argument("-s", "--se", type=Path, required=True, help="Path to the Spin-Echo modulus NIfTI file.") + t1_parser.add_argument( + "-i", "--ir", type=Path, required=True, help="Path to the Inversion-Recovery corrected real NIfTI file." + ) + t1_parser.add_argument( + "-m", "--meta", type=Path, required=True, help="Path to the JSON file containing the sequence parameters." + ) + t1_parser.add_argument("--t1-low", type=float, default=500.0, help="Lower bound for T1 interpolation grid (ms).") + t1_parser.add_argument("--t1-high", type=float, default=5000.0, help="Upper bound for T1 interpolation grid (ms).") + t1_parser.add_argument("-o", "--output", type=Path, required=True, help="Output path for the generated T1 map NIfTI file.") + + post_parser = subparser.add_parser( + "postprocess", + help="Mask a Mixed T1 map to isolate the CSF using the original SE sequence.", + formatter_class=parser.formatter_class, + ) + post_parser.add_argument( + "-s", "--se", type=Path, required=True, help="Path to the Spin-Echo modulus NIfTI file used to derive the mask." + ) + post_parser.add_argument( + "-t", "--t1", type=Path, required=True, help="Path to the previously generated Mixed T1 map NIfTI file." + ) + post_parser.add_argument("-o", "--output", type=Path, required=True, help="Output path for the masked T1 map NIfTI file.") + + +def dispatch(args): + """Dispatch function for the mixed T1 map generation commands.""" + command = args.pop("hybrid-command") # Note: matches the 'dest' in your add_arguments + + if command == "dcm2mixed": + dicom_to_mixed(dcmpath=args.pop("input"), outpath=args.pop("output"), subvolumes=args.pop("subvolumes")) + elif command == "t1": + mixed_t1map( + SE_nii_path=args.pop("se"), + IR_nii_path=args.pop("ir"), + meta_path=args.pop("meta"), + T1_low=args.pop("t1_low"), + T1_high=args.pop("t1_high"), + output=args.pop("output"), + ) + elif command == "postprocess": + mixed_t1map_postprocessing(SE_nii_path=args.pop("se"), T1_path=args.pop("t1"), output=args.pop("output")) + else: + raise ValueError(f"Unknown command: {command}") diff --git a/src/mritk/r1.py b/src/mritk/r1.py index 7551db7..db64420 100644 --- a/src/mritk/r1.py +++ b/src/mritk/r1.py @@ -38,7 +38,7 @@ def compute_r1_array( return r1_data -def convert_T1_to_R1( +def convert_t1_to_r1( T1map_mri: MRIData, scale: float = 1000.0, t1_low: float = 1.0, @@ -60,7 +60,7 @@ def convert_T1_to_R1( return MRIData(data=r1_data, affine=T1map_mri.affine) -def T1_to_R1( +def t1_to_r1( input_mri: Path | MRIData, output: Path | None = None, scale: float = 1000.0, @@ -84,15 +84,35 @@ def T1_to_R1( ValueError: If input_mri is neither a Path nor an MRIData object. """ if isinstance(input_mri, Path): - T1map_mri = MRIData.from_file(input_mri, dtype=np.single) + mri_t1 = MRIData.from_file(input_mri, dtype=np.single) elif isinstance(input_mri, MRIData): - T1map_mri = input_mri + mri_t1 = input_mri else: raise ValueError(f"Input should be a Path or MRIData, got {type(input_mri)}") - R1map_mri = convert_T1_to_R1(T1map_mri, scale, t1_low, t1_high) + mri_r1 = convert_t1_to_r1(mri_t1, scale, t1_low, t1_high) if output is not None: - R1map_mri.save(output, dtype=np.single) + mri_r1.save(output, dtype=np.single) - return R1map_mri + return mri_r1 + + +def add_arguments(parser): + """Add command-line arguments for the T1 to R1 conversion.""" + parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the input T1 map (NIfTI).") + parser.add_argument("-o", "--output", type=Path, help="Path to save the output R1 map (NIfTI).") + parser.add_argument("--scale", type=float, default=1000.0, help="Scaling factor for R1 calculation.") + parser.add_argument("--t1-low", type=float, default=1.0, help="Lower bound for valid T1 values.") + parser.add_argument("--t1-high", type=float, default=float("inf"), help="Upper bound for valid T1 values.") + + +def dispatch(args: dict): + """Dispatch function for the T1 to R1 conversion.""" + t1_to_r1( + input_mri=args.pop("input"), + output=args.pop("output"), + scale=args.pop("scale"), + t1_low=args.pop("t1_low"), + t1_high=args.pop("t1_high"), + ) diff --git a/src/mritk/statistics/cli.py b/src/mritk/statistics/cli.py index 491f439..1244a3a 100644 --- a/src/mritk/statistics/cli.py +++ b/src/mritk/statistics/cli.py @@ -85,9 +85,6 @@ def compute_mri_stats( def get_stats_value(stats_file: Path, region: str, info: str, **kwargs): - """ - Replaces the @click.command('get') decorated function. - """ import sys from rich.console import Console diff --git a/test/test_concentration.py b/test/test_concentration.py index 2ae330c..7ab1e16 100644 --- a/test/test_concentration.py +++ b/test/test_concentration.py @@ -3,10 +3,12 @@ # Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory - from pathlib import Path +from unittest.mock import patch + import numpy as np +import mritk.cli from mritk.concentration import ( concentration_from_T1_expr, concentration_from_R1_expr, @@ -142,3 +144,89 @@ def test_compute_concentration_from_R1_array_no_mask(): assert np.isclose(result[0], 200.0) assert np.isclose(result[1], 200.0) + + +@patch("mritk.concentration.concentration_from_T1") +def test_dispatch_concentration_t1_defaults(mock_conc_t1): + """Test the T1 concentration command with minimum required arguments.""" + mritk.cli.main(["concentration", "t1", "-i", "post_t1.nii.gz", "-r", "pre_t1.nii.gz"]) + + # Verify paths are parsed and default arguments (r1, output, mask) are applied + mock_conc_t1.assert_called_once_with( + input_path=Path("post_t1.nii.gz"), + reference_path=Path("pre_t1.nii.gz"), + output_path=None, + r1=0.0045, + mask_path=None, + ) + + +@patch("mritk.concentration.concentration_from_T1") +def test_dispatch_concentration_t1_explicit(mock_conc_t1): + """Test the T1 concentration command with all optional arguments explicitly provided.""" + mritk.cli.main( + [ + "concentration", + "t1", + "--input", + "post_t1.nii.gz", + "--reference", + "pre_t1.nii.gz", + "--output", + "concentration.nii.gz", + "--r1", + "0.005", + "--mask", + "brain_mask.nii.gz", + ] + ) + + # Verify explicit overrides and type casting (e.g., float for r1) + mock_conc_t1.assert_called_once_with( + input_path=Path("post_t1.nii.gz"), + reference_path=Path("pre_t1.nii.gz"), + output_path=Path("concentration.nii.gz"), + r1=0.005, + mask_path=Path("brain_mask.nii.gz"), + ) + + +@patch("mritk.concentration.concentration_from_R1") +def test_dispatch_concentration_r1_defaults(mock_conc_r1): + """Test the R1 concentration command with minimum required arguments.""" + mritk.cli.main(["concentration", "r1", "-i", "post_r1.nii.gz", "-r", "pre_r1.nii.gz"]) + + # Verify paths are parsed and default arguments are applied + mock_conc_r1.assert_called_once_with( + input_path=Path("post_r1.nii.gz"), reference_path=Path("pre_r1.nii.gz"), output_path=None, r1=0.0045, mask_path=None + ) + + +@patch("mritk.concentration.concentration_from_R1") +def test_dispatch_concentration_r1_explicit(mock_conc_r1): + """Test the R1 concentration command with all optional arguments explicitly provided.""" + mritk.cli.main( + [ + "concentration", + "r1", + "--input", + "post_r1.nii.gz", + "--reference", + "pre_r1.nii.gz", + "--output", + "conc_r1.nii.gz", + "--r1", + "0.0032", + "--mask", + "csf_mask.nii.gz", + ] + ) + + # Verify explicit overrides and type casting + mock_conc_r1.assert_called_once_with( + input_path=Path("post_r1.nii.gz"), + reference_path=Path("pre_r1.nii.gz"), + output_path=Path("conc_r1.nii.gz"), + r1=0.0032, + mask_path=Path("csf_mask.nii.gz"), + ) diff --git a/test/test_hybrid.py b/test/test_hybrid.py index 3e86112..0e90e1e 100644 --- a/test/test_hybrid.py +++ b/test/test_hybrid.py @@ -1,7 +1,9 @@ from pathlib import Path +from unittest.mock import patch import numpy as np +import mritk.cli from mritk.hybrid import compute_hybrid_t1_array, hybrid_t1map from mritk.testing import compare_nifti_images @@ -52,3 +54,57 @@ def test_compute_hybrid_t1_array(): hybrid2 = compute_hybrid_t1_array(ll_data, mixed_data, mask, threshold) # Voxel 2: LL(2000) > 1500 AND Mixed(3000) > 1500 AND Mask=True -> Merge! assert hybrid2[2] == 3000.0 + + +@patch("mritk.hybrid.hybrid_t1map") +def test_dispatch_hybrid_defaults(mock_hybrid_t1map): + """Test the hybrid CLI command with required arguments, relying on defaults for threshold and erode.""" + + # We pass the arguments exactly as a user would type them in the terminal. + # If "hybrid" is nested under another command (like "t1maps hybrid"), add that prefix to the list. + mritk.cli.main( + ["hybrid", "-i", "ll_map.nii.gz", "-m", "mixed_map.nii.gz", "-c", "csf_mask.nii.gz", "-o", "output_hybrid.nii.gz"] + ) + + # Verify the underlying function was called with parsed Paths and the correct default values + mock_hybrid_t1map.assert_called_once_with( + LL_path=Path("ll_map.nii.gz"), + mixed_path=Path("mixed_map.nii.gz"), + csf_mask_path=Path("csf_mask.nii.gz"), + threshold=4000.0, # Default value + erode=0, # Default value + output=Path("output_hybrid.nii.gz"), + ) + + +@patch("mritk.hybrid.hybrid_t1map") +def test_dispatch_hybrid_explicit_args(mock_hybrid_t1map): + """Test the hybrid CLI command with all arguments explicitly provided using long-form flags.""" + + mritk.cli.main( + [ + "hybrid", + "--input-ll", + "ll_map.nii.gz", + "--input-mixed", + "mixed_map.nii.gz", + "--csf-mask", + "csf_mask.nii.gz", + "--threshold", + "3500.5", + "--erode", + "2", + "--output", + "output_hybrid.nii.gz", + ] + ) + + # Verify the underlying function received the explicit overrides and correct types + mock_hybrid_t1map.assert_called_once_with( + LL_path=Path("ll_map.nii.gz"), + mixed_path=Path("mixed_map.nii.gz"), + csf_mask_path=Path("csf_mask.nii.gz"), + threshold=3500.5, + erode=2, + output=Path("output_hybrid.nii.gz"), + ) diff --git a/test/test_looklocker.py b/test/test_looklocker.py index 5047fa8..142d9b9 100644 --- a/test/test_looklocker.py +++ b/test/test_looklocker.py @@ -1,8 +1,10 @@ from pathlib import Path +from unittest.mock import patch import numpy as np import pytest +import mritk.cli from mritk.testing import compare_nifti_images from mritk.looklocker import ( looklocker_t1map, @@ -73,3 +75,54 @@ def test_create_largest_island_mask(): # Speck should be dropped, major block should be True assert mask[0, 0, 0] == np.False_ assert mask[7, 7, 7] == np.True_ + + +@patch("mritk.looklocker.dicom_to_looklocker") +def test_dispatch_dcm2ll(mock_dicom_to_ll): + """Test that dispatch correctly routes to dicom_to_looklocker.""" + + mritk.cli.main(["looklocker", "dcm2ll", "-i", "dummy_in.dcm", "-o", "dummy_out.nii.gz"]) + + mock_dicom_to_ll.assert_called_once_with(Path("dummy_in.dcm"), Path("dummy_out.nii.gz")) + + +@patch("mritk.looklocker.looklocker_t1map") +def test_dispatch_t1(mock_ll_t1map): + """Test that dispatch correctly routes to looklocker_t1map.""" + + mritk.cli.main(["looklocker", "t1", "-i", "data.nii.gz", "-t", "times.txt", "-o", "t1map.nii.gz"]) + + mock_ll_t1map.assert_called_once_with(Path("data.nii.gz"), Path("times.txt"), output=Path("t1map.nii.gz")) + + +@patch("mritk.looklocker.looklocker_t1map_postprocessing") +def test_dispatch_postprocess(mock_postprocessing): + """Test that dispatch correctly routes to looklocker_t1map_postprocessing.""" + + mritk.cli.main( + [ + "looklocker", + "postprocess", + "-i", + "raw_t1.nii.gz", + "-o", + "clean_t1.nii.gz", + "--t1-low", + "50.0", + "--t1-high", + "5000.0", + "--radius", + "5", + "--erode-dilate-factor", + "1.5", + ] + ) + + mock_postprocessing.assert_called_once_with( + T1map=Path("raw_t1.nii.gz"), + T1_low=50.0, + T1_high=5000.0, + radius=5, + erode_dilate_factor=1.5, + output=Path("clean_t1.nii.gz"), + ) diff --git a/test/test_mixed.py b/test/test_mixed.py index 59576f6..b1c1467 100644 --- a/test/test_mixed.py +++ b/test/test_mixed.py @@ -1,8 +1,9 @@ +from unittest.mock import MagicMock, patch + import numpy as np from pathlib import Path -from unittest.mock import MagicMock, patch - +import mritk.cli from mritk.mixed import ( compute_mixed_t1_array, extract_mixed_dicom, @@ -121,3 +122,70 @@ def getitem_side_effect(key): # Ensure extract_single_volume was called twice (once for each subvolume) assert mock_extract_single.call_count == 2 + + +@patch("mritk.mixed.dicom_to_mixed") +def test_dispatch_dcm2mixed_defaults(mock_dicom_to_mixed): + """Test the dcm2mixed command using default subvolumes.""" + + mritk.cli.main(["mixed", "dcm2mixed", "-i", "input_mixed.dcm", "-o", "output_base"]) + + mock_dicom_to_mixed.assert_called_once() + args, kwargs = mock_dicom_to_mixed.call_args + assert kwargs["dcmpath"] == Path("input_mixed.dcm") + assert kwargs["outpath"] == Path("output_base") + # Since we didn't provide -s, it should default to the VOLUME_LABELS list + assert isinstance(kwargs["subvolumes"], list) + assert len(kwargs["subvolumes"]) > 0 + + +@patch("mritk.mixed.dicom_to_mixed") +def test_dispatch_dcm2mixed_explicit_subvolumes(mock_dicom_to_mixed): + """Test the dcm2mixed command with explicit subvolume arguments.""" + + mritk.cli.main(["mixed", "dcm2mixed", "-i", "input_mixed.dcm", "-o", "output_base", "-s", "SE-modulus", "IR-real"]) + + mock_dicom_to_mixed.assert_called_once_with( + dcmpath=Path("input_mixed.dcm"), outpath=Path("output_base"), subvolumes=["SE-modulus", "IR-real"] + ) + + +@patch("mritk.mixed.mixed_t1map") +def test_dispatch_mixed_t1(mock_mixed_t1map): + """Test the t1 generation command checking types and defaults.""" + + mritk.cli.main( + [ + "mixed", + "t1", + "-s", + "se_modulus.nii.gz", + "-i", + "ir_real.nii.gz", + "-m", + "meta.json", + "-o", + "t1_map.nii.gz", + # Omitting --t1-low and --t1-high to test the defaults (500.0 and 5000.0) + ] + ) + + mock_mixed_t1map.assert_called_once_with( + SE_nii_path=Path("se_modulus.nii.gz"), + IR_nii_path=Path("ir_real.nii.gz"), + meta_path=Path("meta.json"), + T1_low=500.0, + T1_high=5000.0, + output=Path("t1_map.nii.gz"), + ) + + +@patch("mritk.mixed.mixed_t1map_postprocessing") +def test_dispatch_mixed_postprocess(mock_mixed_postprocessing): + """Test the postprocessing command passes paths correctly.""" + + mritk.cli.main(["mixed", "postprocess", "-s", "se_modulus.nii.gz", "-t", "t1_raw.nii.gz", "-o", "t1_masked.nii.gz"]) + + mock_mixed_postprocessing.assert_called_once_with( + SE_nii_path=Path("se_modulus.nii.gz"), T1_path=Path("t1_raw.nii.gz"), output=Path("t1_masked.nii.gz") + ) diff --git a/test/test_r1.py b/test/test_r1.py index 6151748..48dd8db 100644 --- a/test/test_r1.py +++ b/test/test_r1.py @@ -4,14 +4,18 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +from pathlib import Path +from unittest.mock import patch + import numpy as np import pytest +import mritk.cli from mritk.data import MRIData from mritk.r1 import ( compute_r1_array, - convert_T1_to_R1, - T1_to_R1, + convert_t1_to_r1, + t1_to_r1, ) @@ -51,7 +55,7 @@ def test_convert_t1_to_r1_mridata(): affine = np.eye(4) mri = MRIData(data=t1_data, affine=affine) - r1_mri = convert_T1_to_R1(mri, scale=1000.0) + r1_mri = convert_t1_to_r1(mri, scale=1000.0) expected_r1 = np.array([[[1.0, 0.5]]]) @@ -63,4 +67,58 @@ def test_t1_to_r1_invalid_input(): """Test the wrapper function throws ValueError on an invalid type input.""" with pytest.raises(ValueError, match="Input should be a Path or MRIData"): # Explicitly passing a raw string instead of Path/MRIData - T1_to_R1(input_mri="not_a_path_or_mridata") + t1_to_r1(input_mri="not_a_path_or_mridata") + + +@patch("mritk.r1.t1_to_r1") +def test_dispatch_t1_to_r1_defaults(mock_t1_to_r1): + """Test the T1 to R1 CLI command using default scaling and threshold values.""" + + mritk.cli.main(["t12r1", "-i", "input_t1.nii.gz", "-o", "output_r1.nii.gz"]) + + # Verify the underlying function was called with parsed Paths and the correct defaults + mock_t1_to_r1.assert_called_once_with( + input_mri=Path("input_t1.nii.gz"), + output=Path("output_r1.nii.gz"), + scale=1000.0, # Default value + t1_low=1.0, # Default value + t1_high=float("inf"), # Default value + ) + + +@patch("mritk.r1.t1_to_r1") +def test_dispatch_t1_to_r1_explicit_args(mock_t1_to_r1): + """Test the T1 to R1 CLI command with all arguments explicitly provided.""" + + mritk.cli.main( + [ + "t12r1", + "--input", + "input_t1.nii.gz", + "--output", + "output_r1.nii.gz", + "--scale", + "500.0", + "--t1-low", + "50.5", + "--t1-high", + "6000.0", + ] + ) + + # Verify the underlying function received the explicit overrides and float conversions + mock_t1_to_r1.assert_called_once_with( + input_mri=Path("input_t1.nii.gz"), output=Path("output_r1.nii.gz"), scale=500.0, t1_low=50.5, t1_high=6000.0 + ) + + +@patch("mritk.r1.t1_to_r1") +def test_dispatch_t1_to_r1_no_output(mock_t1_to_r1): + """Test the T1 to R1 CLI command when the optional output argument is omitted.""" + + mritk.cli.main(["t12r1", "-i", "input_t1.nii.gz"]) + + # Verify that output defaults to None when not provided + mock_t1_to_r1.assert_called_once_with( + input_mri=Path("input_t1.nii.gz"), output=None, scale=1000.0, t1_low=1.0, t1_high=float("inf") + ) From c3dcdac2933620407166dd86f0b9f717cff8bac2 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sat, 7 Mar 2026 22:55:05 +0100 Subject: [PATCH 16/29] Add documentation --- .gitignore | 3 ++ _toc.yml | 5 +++ conf.py | 7 +++- docs/api.rst | 68 ++++++++++++++++----------------- docs/concentration.md | 71 ++++++++++++++++++++++++++++++++++ docs/datasets.md | 17 +++++++++ docs/hybrid.md | 44 +++++++++++++++++++++ docs/info.md | 24 +++++++++--- docs/looklocker.md | 83 ++++++++++++++++++++++++++++++++++++++++ docs/mixed.md | 84 +++++++++++++++++++++++++++++++++++++++++ docs/r1.md | 31 +++++++++++++++ docs/show.md | 28 +++++++++----- pyproject.toml | 3 +- src/mritk/looklocker.py | 19 +++++----- src/mritk/utils.py | 61 +----------------------------- 15 files changed, 426 insertions(+), 122 deletions(-) create mode 100644 docs/concentration.md create mode 100644 docs/hybrid.md create mode 100644 docs/looklocker.md create mode 100644 docs/mixed.md create mode 100644 docs/r1.md diff --git a/.gitignore b/.gitignore index ad4a1f1..3029439 100644 --- a/.gitignore +++ b/.gitignore @@ -1,3 +1,6 @@ +test/FreeSurferColorLUT.txt +.DS_Store +.jupyter_cache/ # Created by https://www.toptal.com/developers/gitignore/api/python # Edit at https://www.toptal.com/developers/gitignore?templates=python diff --git a/_toc.yml b/_toc.yml index a0cce98..c64cf68 100644 --- a/_toc.yml +++ b/_toc.yml @@ -11,6 +11,11 @@ parts: - file: "docs/info.md" # About the mritk info command - file: "docs/show.md" # About the mritk show command - file: "docs/napari.md" # About the mritk napari command + - file: "docs/looklocker.md" # About the mritk looklocker command + - file: "docs/mixed.md" # About the mritk mixed command + - file: "docs/r1.md" # About the mritk r1 command + - file: "docs/hybrid.md" # About the mritk hybrid command + - file: "docs/concentration.md" # About the mritk concentration command - caption: Community chapters: diff --git a/conf.py b/conf.py index 19e2808..0285920 100644 --- a/conf.py +++ b/conf.py @@ -19,11 +19,15 @@ "third_party/*", "jupyter_execute/", "**.jupyter_cache", + "venv/*", + "test-*htmlcov/*", + "new-test-data/*", + "mritk-test-data/*", ] extensions = [ "sphinx_togglebutton", "sphinx_copybutton", - "myst_parser", + "myst_nb", "sphinx_comments", "sphinx_external_toc", "sphinx.ext.intersphinx", @@ -35,6 +39,7 @@ "sphinxcontrib.bibtex", "sphinx_codeautolink", "sphinx_multitoc_numbering", + "sphinxcontrib.mermaid", ] myst_enable_extensions = [ "amsmath", diff --git a/docs/api.rst b/docs/api.rst index 25a2bf1..49f5e85 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -39,85 +39,81 @@ show :members: :inherited-members: +concentration +------------- -statistics ----------- - -.. automodule:: mritk.statistics +.. automodule:: mritk.concentration :members: :inherited-members: -.. automodule:: mritk.statistics.utils - :members: - :inherited-members: +mixed +----- -.. automodule:: mritk.statistics.compute_stats +.. automodule:: mritk.mixed :members: :inherited-members: -.. automodule:: mritk.statistics.cli +looklocker +---------- + +.. automodule:: mritk.looklocker :members: :inherited-members: +utils +----- -data ----- - -.. automodule:: mritk.data +.. automodule:: mritk.utils :members: :inherited-members: -.. automodule:: mritk.data.io - :members: - :inherited-members: +hybrid +------ -.. automodule:: mritk.data.orientation +.. automodule:: mritk.hybrid :members: :inherited-members: -.. automodule:: mritk.data.base - :members: - :inherited-members: -segmentation ------------- +statistics +---------- -.. automodule:: mritk.segmentation +.. automodule:: mritk.statistics :members: :inherited-members: -.. automodule:: mritk.segmentation.groups +.. automodule:: mritk.statistics.utils :members: :inherited-members: -.. automodule:: mritk.segmentation.lookup_table +.. automodule:: mritk.statistics.compute_stats :members: :inherited-members: -t1 --- - -.. automodule:: mritk.t1 +.. automodule:: mritk.statistics.cli :members: :inherited-members: -.. automodule:: mritk.t1.concentration - :members: - :inherited-members: -.. automodule:: mritk.t1.mixed +data +---- + +.. automodule:: mritk.data :members: :inherited-members: -.. automodule:: mritk.t1.looklocker +segmentation +------------ + +.. automodule:: mritk.segmentation :members: :inherited-members: -.. automodule:: mritk.t1.utils +.. automodule:: mritk.segmentation.groups :members: :inherited-members: -.. automodule:: mritk.t1.hybrid +.. automodule:: mritk.segmentation.lookup_table :members: :inherited-members: diff --git a/docs/concentration.md b/docs/concentration.md new file mode 100644 index 0000000..bfc0598 --- /dev/null +++ b/docs/concentration.md @@ -0,0 +1,71 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- +# Concentration Mapping + +The Concentration module calculates the spatial distribution of a contrast agent (like gadobutrol) in the brain. + +Concentration $C$ can be estimated voxel-wise from longitudinal relaxation data comparing a post-contrast session to a pre-contrast (baseline) session. mritk supports two calculation pathways depending on whether you are working with $T_1$ times or $R_1$ rates (see R1 Maps). + +$$\frac{1}{T_1} = \frac{1}{T_{10}} + r_1 C \quad \implies \quad C = \frac{1}{r_1} \left(R_1 - R_{10}\right)$$ + +where $r_1$ is the relaxivity of the contrast agent (default: 3.2 to 4.5 $\text{s}^{-1}\text{mmol}^{-1}$). + +## Pipeline Overview + +```{mermaid} +graph TD + A[Pre-Contrast Hybrid T1] -->|T1 method| C{Compute Concentration} + B[Post-Contrast Hybrid T1] -->|T1 method| C + + A2[Pre-Contrast R1] -->|R1 method| C + B2[Post-Contrast R1] -->|R1 method| C + + M[Brain/Intracranial Mask] -.->|Optional| C + + C --> D(Tracer Concentration Map NIfTI) +``` + +## Commands + +```{code-cell} shell +!mritk concentration --help +``` + + +### 1. From $T_1$ Maps (t1) + +Calculates concentration directly from $T_1$ maps (in milliseconds). The command handles the inversion safely and avoids division-by-zero errors for background voxels. + +```{code-cell} shell +!mritk concentration t1 --help +``` + +#### Example Command + +```shell +mritk concentration t1 -i path/to/post_t1.nii.gz -r path/to/pre_t1.nii.gz -o path/to/concentration.nii.gz --r1 0.0045 --mask path/to/intracranial_mask.nii.gz +``` + + +### 2. From $R_1$ Maps (r1) + +Calculates concentration from pre-computed $R_1$ maps. This is mathematically equivalent but slightly faster if $R_1$ maps are already available. + +```{code-cell} shell +!mritk concentration r1 --help +``` + +#### Example Command + +```shell +mritk concentration r1 -i path/to/post_r1.nii.gz -r path/to/pre_r1.nii.gz -o path/to/concentration.nii.gz --r1 0.0045 +``` diff --git a/docs/datasets.md b/docs/datasets.md index d5636db..8db2594 100644 --- a/docs/datasets.md +++ b/docs/datasets.md @@ -1,7 +1,24 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + # Datasets The `datasets` subcommand provides tools for listing and downloading example datasets. + +```{code-cell} shell +!mritk datasets --help +``` + To list available datasets, use: ```bash diff --git a/docs/hybrid.md b/docs/hybrid.md new file mode 100644 index 0000000..add1bd5 --- /dev/null +++ b/docs/hybrid.md @@ -0,0 +1,44 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +# Hybrid $T_1$ Maps + +To achieve accurate $T_1$ measurements across the entire brain space, mritk combines the Look-Locker (LL) and Mixed sequence $T_1$ maps into a single Hybrid map. + +Look-Locker is used for short $T_1$ values (brain tissue and regions with high tracer concentrations). + +Mixed Sequence is used for long $T_1$ values (CSF). + +The hybrid command seamlessly merges the two images based on a user-defined threshold (default: 1500 ms) and a specific anatomical mask (typically a CSF mask). + +## Pipeline Overview + +```{mermaid} +graph LR + A(Look-Locker T1 Map) --> D{Hybrid Merge} + B(Mixed T1 Map) --> D + C(CSF Mask) --> D + D -->|Threshold > 1500ms| E(Hybrid T1 Map) +``` + +## Command Usage + + +```{code-cell} shell +!mritk hybrid --help +``` + +## Example Command + +```shell +mritk hybrid -i path/to/ll_t1.nii.gz -m path/to/mixed_t1.nii.gz -c path/to/csf_mask.nii.gz -o path/to/hybrid_t1.nii.gz --threshold 1500.0 +``` diff --git a/docs/info.md b/docs/info.md index 09eac20..2ddc990 100644 --- a/docs/info.md +++ b/docs/info.md @@ -1,18 +1,30 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + # Info command The `info` command allows you to quickly inspect the metadata of an MRI file. It displays the image shape, voxel size, data type, and the affine transformation matrix. ## Usage -```bash -mritk info [OPTIONS] +```{code-cell} shell +!mritk info --help ``` -**Arguments:** -* `file`: Path to the file to display information about. +### Example Command -**Options:** -* `--json`: Output information in JSON format. Useful for programmatic parsing. +```bash +mritk info path/to/image.nii.gz +``` ![info](https://github.com/user-attachments/assets/fc0e734d-3c94-48fa-8e25-3e65bfc86ebe) diff --git a/docs/looklocker.md b/docs/looklocker.md new file mode 100644 index 0000000..6722dc2 --- /dev/null +++ b/docs/looklocker.md @@ -0,0 +1,83 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +# Look-Locker $T_1$ Mapping + +The Look-Locker (LL) module is used to estimate $T_1$ relaxation times from a 4D Look-Locker inversion recovery dataset. + +As described in the Gonzo dataset, the Look-Locker sequence provides excellent accuracy for tissues with short $T_1$ times (such as gray/white matter and regions with high tracer concentrations). The toolkit computes the $T_1$ time voxel-wise by fitting a theoretical recovery curve to the longitudinal magnetization signal. + +## Pipeline Overview + +```{mermaid} +graph TD + A[Raw Look-Locker DICOM] -->|dcm2ll| B(4D Look-Locker NIfTI) + A -->|dcm2ll| C(Trigger Times .txt) + B -->|t1| D(Raw T1 Map NIfTI) + C -->|t1| D + D -->|postprocess| E(Cleaned T1 Map NIfTI) +``` + +## Commands + +```{code-cell} shell +!mritk looklocker --help +``` + + +### 1. DICOM to NIfTI (dcm2ll) + +Converts scanner-native Look-Locker DICOM files to a standardized 4D NIfTI format and extracts the nominal cardiac trigger delay times into a sidecar text file. + +```{code-cell} shell +!mritk looklocker dcm2ll --help +``` + +#### Example Command + +```bash +mritk looklocker dcm2ll -i path/to/looklocker.dcm -o path/to/ll_output.nii.gz +``` + +### 2. Compute $T_1$ Map (t1) + +Fits the voxel-wise Levenberg-Marquardt optimization curve to estimate $T_1$ times (in milliseconds) from the 4D Look-Locker NIfTI. + +```{code-cell} shell +!mritk looklocker t1 --help +``` + +#### Example Command + +```bash +mritk looklocker t1 -i path/to/ll_output.nii.gz -t path/to/ll_output_trigger_times.txt -o path/to/t1_raw.nii.gz +``` + +### 3. Post-Processing (postprocess) + +Raw $T_1$ maps often contain noisy fits or values outside physiological boundaries. The postprocess command applies a quality-control pipeline that: + +Automatically masks the brain/head (if no explicit mask is provided). + +Removes extreme outliers (default bounds: 100 ms to 10000 ms). + +Iteratively fills internal NaNs (holes) using a smart Gaussian filter. + +```{code-cell} shell +!mritk looklocker postprocess --help +``` + +#### Example Command + +```bash +mritk looklocker postprocess -i path/to/t1_raw.nii.gz -o path/to/t1_clean.nii.gz --t1-low 100.0 --t1-high 5000.0 +``` diff --git a/docs/mixed.md b/docs/mixed.md new file mode 100644 index 0000000..2769c25 --- /dev/null +++ b/docs/mixed.md @@ -0,0 +1,84 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + +# Mixed Sequence $T_1$ Mapping + +The Mixed sequence module estimates $T_1$ times by combining a Spin-Echo (SE) and an Inversion-Recovery (IR) acquisition. + +While the Look-Locker sequence struggles with long relaxation times due to short acquisition windows, the Mixed sequence is specifically designed to accurately estimate long $T_1$ times, such as those found in Cerebrospinal Fluid (CSF). $T_1$ is estimated by solving the non-linear ratio of the IR and SE signals. + +```{caution} +Because the Mixed sequence is highly sensitive to noise in short $T_1$ tissues (like gray matter), the resulting $T_1$ map must be post-processed to mask out non-CSF areas. +``` + +## Pipeline Overview + +```{mermaid} +graph TD + A[Raw Mixed DICOM] -->|dcm2mixed| B(SE Modulus NIfTI) + A -->|dcm2mixed| C(IR Real NIfTI) + A -->|dcm2mixed| D(Metadata JSON) + B -->|t1| E(Raw Mixed T1 Map) + C -->|t1| E + D -->|t1| E + E -->|postprocess| F(Masked CSF T1 Map) + B -->|postprocess| F +``` + +## Commands + + +```{code-cell} shell +!mritk mixed --help +``` + +### 1. DICOM to NIfTI (dcm2mixed) + +Splits a Mixed sequence DICOM file into its independent subvolumes (e.g., SE-modulus, IR-real) and saves the required sequence timing metadata (TR, TE, TI, ETL) into a JSON sidecar. + +```{code-cell} shell +!mritk mixed dcm2mixed --help +``` + +#### Example Command + + +```bash +mritk mixed dcm2mixed -i path/to/mixed.dcm -o path/to/output_base +``` + +### 2. Compute $T_1$ Map (t1) + +Generates the $T_1$ map based on the signal ratio between the Inversion-Recovery and Spin-Echo sequences. + +```{code-cell} shell +!mritk mixed t1 --help +``` + +#### Example Command + +```bash +mritk mixed t1 -s path/to/output_base_SE-modulus.nii.gz -i path/to/output_base_IR-corrected-real.nii.gz -m path/to/output_base_meta.json -o path/to/mixed_t1_raw.nii.gz +``` + +### 3. Post-Processing (postprocess) + +Masks out non-fluid areas from the Mixed $T_1$ map. It derives a mask dynamically from the original SE sequence using Li thresholding and erodes the mask to avoid partial-volume effects at tissue boundaries. + +```{code-cell} shell +!mritk mixed postprocess --help +``` + +#### Example Command +```bash +mritk mixed postprocess -s path/to/output_base_SE-modulus.nii.gz -t path/to/mixed_t1_raw.nii.gz -o path/to/mixed_t1_clean.nii.gz +``` diff --git a/docs/r1.md b/docs/r1.md new file mode 100644 index 0000000..cccd4c5 --- /dev/null +++ b/docs/r1.md @@ -0,0 +1,31 @@ +# $R_1$ Relaxation Rates + +The $R_1$ module provides utilities to convert longitudinal relaxation times ($T_1$) into relaxation rates ($R_1$). + +The fundamental relationship is $R_1 = \frac{1}{T_1}$. In MRI studies utilizing contrast agents, $R_1$ scales linearly with the tracer concentration, making it a highly convenient format for transport simulations and Concentration Maps. + +## Pipeline Overview + +```{mermaid} +graph LR + A(T1 Map NIfTI) -->|t1-to-r1| B(R1 Map NIfTI) + B --> C(Concentration Estimation) +``` + +## Command Usage + +```{code-cell} shell +!mritk t1-to-r1 --help +``` + + +### Example Command + +```shell +mritk t1-to-r1 -i path/to/hybrid_t1.nii.gz -o path/to/hybrid_r1.nii.gz --scale 1000.0 +``` + + +## Next Steps + +Once you have generated $R_1$ maps for both your baseline and post-contrast sessions, you can generate concentration maps. See the Concentration Documentation for more details. diff --git a/docs/show.md b/docs/show.md index 6815f63..24ac8b2 100644 --- a/docs/show.md +++ b/docs/show.md @@ -1,3 +1,15 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + # Show Command The `show` command provides a quick way to visualize MRI data directly in your terminal. @@ -15,18 +27,16 @@ ## Usage - ```bash - mritk show [OPTIONS] + ```{code-cell} shell + !mritk show --help ``` - **Arguments:** - * `file`: Path to the MRI file to show. + ### Example Command + + ```bash + mritk show path/to/image.nii.gz + ``` - **Options:** - * `--cmap `: Colormap to use (default: `gray`). - * `--slice-x `: Relative position (0-1) of the sagittal slice (default: `0.5`). - * `--slice-y `: Relative position (0-1) of the coronal slice (default: `0.5`). - * `--slice-z `: Relative position (0-1) of the axial slice (default: `0.5`). %% [markdown] ## Example diff --git a/pyproject.toml b/pyproject.toml index 7f2ff86..4c5daab 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -57,8 +57,9 @@ docs = [ "linkify-it-py", "sphinx-design", "sphinx-book-theme", - "myst-parser", + "myst-nb", "sphinx-multitoc-numbering", + "sphinxcontrib-mermaid", ] all = ["mritk[extra,test,pypi,show,napari]"] diff --git a/src/mritk/looklocker.py b/src/mritk/looklocker.py index d3822f6..25089ea 100644 --- a/src/mritk/looklocker.py +++ b/src/mritk/looklocker.py @@ -147,15 +147,6 @@ def looklocker_t1map_postprocessing( """ Performs quality-control and post-processing on a raw Look-Locker T1 map. - This function cleans up noisy T1 fits by applying a three-step pipeline: - 1. Masking: If no mask is provided, it automatically isolates the brain/head by - finding the largest contiguous tissue island and applying morphological smoothing. - 2. Outlier Removal: Voxels falling outside the provided physiological bounds - [T1_low, T1_high] are discarded (set to NaN). - 3. Interpolation: Internal "holes" (NaNs) created by poor fits or outlier - removal are iteratively filled using a specialized Gaussian filter that - interpolates from surrounding valid tissue without blurring the edges. - Args: T1map (Path): Path to the raw, unmasked Look-Locker T1 map NIfTI file. T1_low (float): Lower physiological limit for T1 values (in ms). @@ -174,6 +165,16 @@ def looklocker_t1map_postprocessing( Raises: RuntimeError: If more than 99% of the voxels are removed during the outlier filtering step, indicating a likely unit mismatch (e.g., T1 in seconds instead of ms). + + Notes: + This function cleans up noisy T1 fits by applying a three-step pipeline: + 1. Masking: If no mask is provided, it automatically isolates the brain/head by + finding the largest contiguous tissue island and applying morphological smoothing. + 2. Outlier Removal: Voxels falling outside the provided physiological bounds + [T1_low, T1_high] are discarded (set to NaN). + 3. Interpolation: Internal "holes" (NaNs) created by poor fits or outlier + removal are iteratively filled using a specialized Gaussian filter that + interpolates from surrounding valid tissue without blurring the edges. """ t1map_mri = MRIData.from_file(T1map, dtype=np.single) t1map_data = t1map_mri.data.copy() diff --git a/src/mritk/utils.py b/src/mritk/utils.py index 016b815..4997531 100644 --- a/src/mritk/utils.py +++ b/src/mritk/utils.py @@ -14,7 +14,6 @@ import warnings import logging from scipy.optimize import OptimizeWarning -import nibabel VOLUME_LABELS = [ @@ -88,7 +87,7 @@ def curve_fit_wrapper(f, t: np.ndarray, y: np.ndarray, p0: np.ndarray): Returns: np.ndarray: Optimal values for the parameters so that the sum of - the squared residuals of f(xdata, *popt) - ydata is minimized. + the squared residuals of :code:`f(xdata, *popt) - ydata` is minimized. """ with warnings.catch_warnings(): warnings.simplefilter("error", OptimizeWarning) @@ -212,64 +211,6 @@ def T1_lookup_table(TRse: float, TI: float, TE: float, ETL: int, T1_low: float, return fractionCurve, T1_grid -def compare_nifti_images(img_path1: Path, img_path2: Path, data_tolerance: float = 0.0) -> bool: - """ - Compares two NIfTI images for equality of data arrays. - - Provides a robust way to check if two NIfTI files contain identical - voxel data, accounting for potential NaNs and floating-point inaccuracies. - - Args: - img_path1 (Path): Path to the first NIfTI file. - img_path2 (Path): Path to the second NIfTI file. - data_tolerance (float, optional): Absolute tolerance for floating-point - comparisons. Use 0.0 for exact mathematical equality. Defaults to 0.0. - - Returns: - bool: True if images are considered the same, False otherwise. - - Raises: - AssertionError: If files exist but the data deviates beyond `data_tolerance`. - FileNotFoundError: If either of the provided file paths does not exist. - """ - if not img_path1.exists(): - raise FileNotFoundError(f"File not found: {img_path1}") - if not img_path2.exists(): - raise FileNotFoundError(f"File not found: {img_path2}") - - img1 = nibabel.load(img_path1) - img2 = nibabel.load(img_path2) - - # 1. Compare Image Data - data1 = img1.get_fdata() - data2 = img2.get_fdata() - - return compare_nifti_arrays(data1, data2, data_tolerance) - - -def compare_nifti_arrays(arr1: np.ndarray, arr2: np.ndarray, data_tolerance: float = 0.0) -> bool: - """ - Compares two NIfTI data arrays for equality, accounting for NaNs and tolerance. - - Args: - arr1 (np.ndarray): The first data array to compare. - arr2 (np.ndarray): The second data array to compare. - data_tolerance (float, optional): Absolute tolerance for floating-point - comparisons. Use 0.0 for exact mathematical equality. Defaults to 0.0. - - Returns: - bool: True if arrays are considered the same, False otherwise. - """ - # Convert NaN to zero (can have NaNs in concentration maps) - arr1 = np.nan_to_num(arr1, nan=0.0) - arr2 = np.nan_to_num(arr2, nan=0.0) - - if data_tolerance > 0: - return np.allclose(arr1, arr2, atol=data_tolerance) - else: - return np.array_equal(arr1, arr2) - - def run_dcm2niix(input_path: Path, output_dir: Path, form: str, extra_args: str = "", check: bool = True): """ Utility wrapper to execute the dcm2niix command-line tool securely. From fe4cf7fe3eca43097eaeb458a23a86580b718c72 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sat, 7 Mar 2026 23:05:18 +0100 Subject: [PATCH 17/29] Remove .flake8 file --- .flake8 | 7 ------- 1 file changed, 7 deletions(-) delete mode 100644 .flake8 diff --git a/.flake8 b/.flake8 deleted file mode 100644 index f36b47e..0000000 --- a/.flake8 +++ /dev/null @@ -1,7 +0,0 @@ -[flake8] -# Ignore specific error codes globally -ignore = E501 - -# Exclude specific directories from being scanned at all -exclude = - .git, From e7b429dc046b30977b16b222ce32999dfdfd84a8 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sat, 7 Mar 2026 23:11:52 +0100 Subject: [PATCH 18/29] Update README --- README.md | 34 +++++++++++++++++++++++++++------- 1 file changed, 27 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index df427ac..0b87f19 100644 --- a/README.md +++ b/README.md @@ -1,6 +1,8 @@ # MRI-Toolkit -`MRI-toolkit` provides a set of features dedicated to human MRI data post-processing and analysis. The implementation is based on [gMRI2FEM](https://github.com/jorgenriseth/gMRI2FEM). +`MRI-toolkit` provides a set of features dedicated to MRI data post-processing and analysis. + +The implementation is inspired by [gMRI2FEM](https://github.com/jorgenriseth/gMRI2FEM), and some of the code is taken from that project. However, `MRI-toolkit` is designed to be more modular and extensible, with a focus on providing a user-friendly command-line interface (CLI) for common MRI processing tasks. ## Installation @@ -22,9 +24,27 @@ To get started with `mri-toolkit`, you can use the command-line interface (CLI) ## Features -* **File Inspection**: detailed NIfTI header analysis (affine, voxel size, shape). -* **Statistics**: Compute comprehensive statistics (volume, mean, median, std, percentiles) for MRI regions based on segmentation maps. -* **Visualization**: - * **Terminal**: View orthogonal slices (Sagittal, Coronal, Axial) directly in your console. - * **Napari**: Launch the Napari viewer for interactive 3D inspection. -* **Data Management**: Utilities to download test datasets. + +- File Inspection: detailed NIfTI header analysis (affine, voxel size, shape). + +- $T_1$ Mapping: Estimate $T_1$ relaxation times using Look-Locker or Mixed sequences, and seamlessly merge them into comprehensive Hybrid $T_1$ maps. + +- $R_1$ Relaxation Rates: Convert $T_1$ maps into $R_1$ relaxation rate maps for linear scaling with tracer concentrations. + +- Concentration Mapping: Calculate the spatial distribution of contrast agents (e.g., gadobutrol) utilizing pre- and post-contrast $T_1$ or $R_1$ maps. + +- Statistics: Compute comprehensive statistics (volume, mean, median, std, percentiles) for MRI regions based on segmentation maps. + +- Visualization: + + - Terminal: View orthogonal slices (Sagittal, Coronal, Axial) directly in your console. + + - Napari: Launch the Napari viewer for interactive 3D inspection. + +- Data Management: Utilities to download datasets. + +## Contributing +Contributions to `MRI-toolkit` are welcome! If you have an idea for a new feature, improvement, or bug fix, please open an issue or submit a pull request on GitHub. For more details on how to contribute, please see the [Contributing Guide](CONTRIBUTING.md). + +## License +`MRI-toolkit` is licensed under the MIT License. See the [LICENSE](LICENSE) file for more information. From 342ece8beca42a42e2d7a6e3b792afa5f9db8748 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Sun, 8 Mar 2026 20:58:31 +0100 Subject: [PATCH 19/29] Start adding better logging --- docs/concentration.md | 17 +++++++++++++++++ src/mritk/cli.py | 30 ++++++++++++++++++++++++++---- src/mritk/concentration.py | 37 +++++++++++++++++++++++++++++++++---- src/mritk/datasets.py | 10 +++++++++- 4 files changed, 85 insertions(+), 9 deletions(-) diff --git a/docs/concentration.md b/docs/concentration.md index bfc0598..a33a88c 100644 --- a/docs/concentration.md +++ b/docs/concentration.md @@ -55,6 +55,23 @@ Calculates concentration directly from $T_1$ maps (in milliseconds). The command mritk concentration t1 -i path/to/post_t1.nii.gz -r path/to/pre_t1.nii.gz -o path/to/concentration.nii.gz --r1 0.0045 --mask path/to/intracranial_mask.nii.gz ``` +Gonzo: + +```shell +mritk concentration t1 \ + -i gonzo/mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-02_T1map_hybrid.nii.gz \ + -r gonzo/mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz \ + -o sub-01_ses-02_concentration.nii.gz \ + --r1 0.0032 \ + --mask gonzo/mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-intracranial_binary.nii.gz +``` + +mritk concentration t1 \ + -i new-test-data/mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-02_T1map_hybrid.nii.gz \ + -r new-test-data/mri-processed/mri_processed_data/sub-01/T1maps/sub-01_ses-01_T1map_hybrid.nii.gz \ + -o sub-01_ses-02_concentration.nii.gz \ + --r1 0.0032 \ + --mask new-test-data/mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-intracranial_binary.nii.gz ### 2. From $R_1$ Maps (r1) diff --git a/src/mritk/cli.py b/src/mritk/cli.py index 19f96f4..fd5831c 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -1,3 +1,5 @@ +"""MRI-toolkit provides a set of features dedicated to MRI data post-processing and analysis.""" + import logging from importlib.metadata import metadata from pathlib import Path @@ -5,6 +7,7 @@ from typing import Sequence, Optional from rich_argparse import RichHelpFormatter +from rich.logging import RichHandler from . import datasets, info, statistics, show, napari, looklocker, hybrid, mixed, r1, concentration @@ -41,15 +44,21 @@ def version_info(): console.print(table) +def add_extra_arguments(parser: argparse.ArgumentParser): + parser.add_argument("--no-rich", action="store_true", help="Disable rich logging and use standard console output.") + parser.add_argument("--logfile", type=Path, help="Path to a log file to save logs (optional).") + parser.add_argument("-v", "--verbose", action="store_true", help="Enable verbose logging") + + def setup_parser(): - parser = argparse.ArgumentParser(formatter_class=RichHelpFormatter) + parser = argparse.ArgumentParser(description=__doc__, formatter_class=RichHelpFormatter) parser.add_argument("--version", action="store_true") subparsers = parser.add_subparsers(dest="command") # Download test data parser datasets_parser = subparsers.add_parser("datasets", help="Download datasets", formatter_class=parser.formatter_class) - datasets.add_arguments(datasets_parser) + datasets.add_arguments(datasets_parser, extra_args_cb=add_extra_arguments) info_parser = subparsers.add_parser("info", help="Display information about a file", formatter_class=parser.formatter_class) info_parser.add_argument("file", type=Path, help="File to display information about") @@ -88,7 +97,7 @@ def setup_parser(): concentration_parser = subparsers.add_parser( "concentration", help="Compute concentration maps.", formatter_class=parser.formatter_class ) - concentration.add_arguments(concentration_parser) + concentration.add_arguments(concentration_parser, extra_args_cb=add_extra_arguments) return parser @@ -99,7 +108,20 @@ def dispatch(parser: argparse.ArgumentParser, argv: Optional[Sequence[str]] = No if args.pop("version"): version_info() return 0 - logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s") + verbose = args.pop("verbose", False) + if verbose: + level = logging.DEBUG + else: + level = logging.INFO + + no_rich = args.pop("no_rich", False) + handlers: list[logging.Handler] = [logging.StreamHandler()] if no_rich else [RichHandler()] + + logfile = args.pop("logfile", None) + if logfile: + handlers.append(logging.FileHandler(logfile)) + + logging.basicConfig(level=level, format="%(asctime)s - %(levelname)s - %(message)s", handlers=handlers) command = args.pop("command") logger = logging.getLogger(__name__) try: diff --git a/src/mritk/concentration.py b/src/mritk/concentration.py index 23b71ef..905c521 100644 --- a/src/mritk/concentration.py +++ b/src/mritk/concentration.py @@ -4,14 +4,18 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +import argparse +from collections.abc import Callable +from pathlib import Path +import logging import numpy as np -from pathlib import Path from .data import MRIData - from .testing import assert_same_space +logger = logging.getLogger(__name__) + def concentration_from_T1_expr(t1: np.ndarray, t1_0: np.ndarray, r1: float) -> np.ndarray: """ @@ -64,12 +68,15 @@ def compute_concentration_from_T1_array( np.ndarray: A 3D array of computed concentrations. Invalid voxels (unmasked or where T1 <= 1e-10) are set to NaN. """ + logger.info("Computing concentration map from T1 arrays") # Create a validity mask: T1 values must be > 1e-10 to safely invert without overflow valid_mask = (t1_data > 1e-10) & (t10_data > 1e-10) - + logger.debug(f"Initial valid voxel count based on T1 thresholds: {np.sum(valid_mask)}") if mask is not None: + logger.debug("Applying additional mask to concentration computation") valid_mask &= mask.astype(bool) + logger.debug(f"Final valid voxel count after applying mask: {np.sum(valid_mask)}") concentrations = np.full_like(t10_data, np.nan, dtype=np.single) # Compute concentration strictly on valid voxels @@ -101,8 +108,19 @@ def concentration_from_T1( Returns: MRIData: An MRIData object containing the concentration array and the affine matrix. """ + logger.info("Computing concentration map from T1 maps.") + logger.debug(f"Input T1 path: {input_path}") + logger.debug(f"Reference T1 path: {reference_path}") + logger.debug(f"Output path: {output_path}") + logger.debug(f"Relaxivity (r1): {r1}") + logger.debug(f"Mask path: {mask_path}") t1_mri = MRIData.from_file(input_path, dtype=np.single) t10_mri = MRIData.from_file(reference_path, dtype=np.single) + + logger.debug(f"Input T1 shape: {t1_mri.data.shape}") + logger.debug(f"Reference T1 shape: {t10_mri.data.shape}") + logger.debug(f"Input T1 affine: {t1_mri.affine}") + logger.debug(f"Reference T1 affine: {t10_mri.affine}") assert_same_space(t1_mri, t10_mri) mask_data = None @@ -116,8 +134,12 @@ def concentration_from_T1( mri_data = MRIData(data=concentrations_array, affine=t10_mri.affine) if output_path is not None: + logger.info(f"Saving concentration map to {output_path}") mri_data.save(output_path, dtype=np.single) + else: + logger.info("No output path provided, returning concentration map as MRIData object without saving.") + return mri_data @@ -199,7 +221,10 @@ def concentration_from_R1( return mri_data -def add_arguments(parser): +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: subparsers = parser.add_subparsers(dest="concentration-command", required=True) t1_parser = subparsers.add_parser("t1", help="Compute concentration from T1 maps.", formatter_class=parser.formatter_class) @@ -220,6 +245,10 @@ def add_arguments(parser): r1_parser.add_argument("--r1", type=float, default=0.0045, help="Relaxivity of the contrast agent (default: 0.0045).") r1_parser.add_argument("--mask", type=Path, help="Path to a boolean mask NIfTI file to restrict computation (optional).") + if extra_args_cb is not None: + extra_args_cb(t1_parser) + extra_args_cb(r1_parser) + def dispatch(args): command = args.pop("concentration-command") diff --git a/src/mritk/datasets.py b/src/mritk/datasets.py index c0079e4..10f8cd8 100644 --- a/src/mritk/datasets.py +++ b/src/mritk/datasets.py @@ -4,6 +4,8 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +from collections.abc import Callable +import argparse import logging from dataclasses import dataclass import zipfile @@ -195,7 +197,10 @@ def list_datasets(): console.print(table) -def add_arguments(parser): +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: subparsers = parser.add_subparsers(dest="datasets-command") download_parser = subparsers.add_parser("download", help="Download a dataset", formatter_class=parser.formatter_class) choices = list(get_datasets().keys()) @@ -216,6 +221,9 @@ def add_arguments(parser): choices=choices, help=f"Dataset to show information about (choices: {', '.join(choices)})", ) + if extra_args_cb is not None: + extra_args_cb(download_parser) + extra_args_cb(info_parser) def dispatch(args): From 217641bd128301f77fa23901ac600316c2f2feef Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 08:54:03 +0100 Subject: [PATCH 20/29] More logging --- src/mritk/cli.py | 4 ++-- src/mritk/mixed.py | 50 ++++++++++++++++++++++++++++++++++++++-------- src/mritk/r1.py | 21 ++++++++++++++++--- src/mritk/utils.py | 3 +++ 4 files changed, 65 insertions(+), 13 deletions(-) diff --git a/src/mritk/cli.py b/src/mritk/cli.py index fd5831c..e428af8 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -87,12 +87,12 @@ def setup_parser(): mixed_parser = subparsers.add_parser( "mixed", help="Generate a Mixed T1 map from Look-Locker data.", formatter_class=parser.formatter_class ) - mixed.add_arguments(mixed_parser) + mixed.add_arguments(mixed_parser, extra_args_cb=add_extra_arguments) t1_to_r1_parser = subparsers.add_parser( "t12r1", help="Convert a T1 map to an R1 map.", formatter_class=parser.formatter_class ) - r1.add_arguments(t1_to_r1_parser) + r1.add_arguments(t1_to_r1_parser, extra_args_cb=add_extra_arguments) concentration_parser = subparsers.add_parser( "concentration", help="Compute concentration maps.", formatter_class=parser.formatter_class diff --git a/src/mritk/mixed.py b/src/mritk/mixed.py index 5a49a68..a2cbd61 100644 --- a/src/mritk/mixed.py +++ b/src/mritk/mixed.py @@ -4,10 +4,10 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory - +import argparse +from collections.abc import Callable import json import logging -from typing import Optional from pathlib import Path import numpy as np @@ -39,6 +39,7 @@ def dicom_standard_affine(frame_fg) -> np.ndarray: np.ndarray: A 4x4 affine transformation matrix mapping from DICOM voxel indices to LPS physical coordinates. """ + logger.debug("Generating DICOM standard affine matrix from frame functional group metadata.") # Get the original data shape df = float(frame_fg.PixelMeasuresSequence[0].SpacingBetweenSlices) dr, dc = (float(x) for x in frame_fg.PixelMeasuresSequence[0].PixelSpacing) @@ -125,6 +126,7 @@ def mixed_t1map( nibabel.nifti1.Nifti1Image: The computed T1 map as a NIfTI image object, with the qform/sform properly set to scanner space. """ + logger.info(f"Computing Mixed T1 map from SE image {SE_nii_path} and IR image {IR_nii_path} with metadata {meta_path}") se_mri = MRIData.from_file(SE_nii_path, dtype=np.single) ir_mri = MRIData.from_file(IR_nii_path, dtype=np.single) meta = json.loads(meta_path.read_text()) @@ -137,6 +139,9 @@ def mixed_t1map( if output is not None: nibabel.nifti1.save(nii, output) + logger.info(f"Saved Mixed T1 map to {output}") + else: + logger.info("No output path provided, returning T1 map as NIfTI image object") return nii @@ -159,6 +164,7 @@ def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | nibabel.nifti1.Nifti1Image: The masked T1 map, where all non-CSF voxels have been set to NaN. """ + logger.info(f"Starting Mixed T1 map post-processing with SE image {SE_nii_path} and T1 map {T1_path}") t1map_nii = nibabel.nifti1.load(T1_path) se_mri = MRIData.from_file(SE_nii_path, dtype=np.single) @@ -171,6 +177,9 @@ def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | if output is not None: nibabel.nifti1.save(masked_t1map_nii, output) + logger.info(f"Saved masked T1 map to {output}") + else: + logger.info("No output path provided, returning masked T1 map as NIfTI image object") return masked_t1map_nii @@ -189,13 +198,17 @@ def compute_mixed_t1_array(se_data: np.ndarray, ir_data: np.ndarray, meta: dict, Returns: np.ndarray: Computed T1 map as a 3D float32 array. """ + logger.info("Computing Mixed T1 array from SE and IR data using lookup table interpolation.") nonzero_mask = se_data != 0 f_data = np.nan * np.zeros_like(ir_data) f_data[nonzero_mask] = ir_data[nonzero_mask] / se_data[nonzero_mask] tr_se, ti, te, etl = meta["TR_SE"], meta["TI"], meta["TE"], meta["ETL"] f_curve, t1_grid = T1_lookup_table(tr_se, ti, te, etl, t1_low, t1_high) - + logger.debug( + f"Generated T1 lookup table with TR_SE={tr_se}, TI={ti}, TE={te}, " + f"ETL={etl}, T1 range=({t1_low}, {t1_high}), table size={len(t1_grid)}" + ) interpolator = scipy.interpolate.interp1d(f_curve, t1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) return interpolator(f_data).astype(np.single) @@ -210,6 +223,7 @@ def _extract_frame_metadata(frame_fg) -> dict: Returns: dict: A dictionary containing available MR timing parameters. """ + logger.debug("Extracting MR timing parameters from DICOM frame functional group.") descrip = { "TR": float(frame_fg.MRTimingAndRelatedParametersSequence[0].RepetitionTime), "TE": float(frame_fg.MREchoSequence[0].EffectiveEchoTime), @@ -236,21 +250,24 @@ def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]) -> list[dict]: list[dict]: A list containing dictionaries with a generated 'nifti' image and a 'descrip' metadata dictionary for each requested subvolume. """ + logger.debug(f"Extracting subvolumes {subvolumes} from DICOM file {dcmpath}") import pydicom dcm = pydicom.dcmread(str(dcmpath)) frames_total = int(dcm.NumberOfFrames) - + logger.debug(f"Total frames in DICOM: {frames_total}") # [0x2001, 0x1018] is a private Philips tag representing 'Number of Slices MR' frames_per_volume = dcm[0x2001, 0x1018].value num_volumes = frames_total // frames_per_volume assert num_volumes * frames_per_volume == frames_total, "Subvolume dimensions do not evenly divide the total frames." + logger.debug(f"Frames per volume: {frames_per_volume}, Number of volumes: {num_volumes}") pixel_data = dcm.pixel_array.astype(np.single) frame_fg_sequence = dcm.PerFrameFunctionalGroupsSequence vols_out = [] for volname in subvolumes: + logger.debug(f"Processing subvolume '{volname}'") vol_idx = VOLUME_LABELS.index(volname) # Find volume slices representing the current subvolume @@ -258,7 +275,7 @@ def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]) -> list[dict]: subvol_idx_end = (vol_idx + 1) * frames_per_volume frame_fg = frame_fg_sequence[subvol_idx_start] - logger.info( + logger.debug( f"Converting volume {vol_idx + 1}/{len(VOLUME_LABELS)}: '{volname}' " f"between indices {subvol_idx_start}-{subvol_idx_end} out of {frames_total}." ) @@ -275,7 +292,7 @@ def extract_mixed_dicom(dcmpath: Path, subvolumes: list[str]) -> list[dict]: return vols_out -def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: Optional[list[str]] = None): +def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: list[str] | None = None): """ Converts a Mixed sequence DICOM file into independent subvolume NIfTIs. @@ -288,19 +305,25 @@ def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: Optional[list[str]] subvolumes (list[str], optional): specific subvolumes to extract. Defaults to all known VOLUME_LABELS. """ + logger.info(f"Starting DICOM to Mixed conversion for {dcmpath} with output base {outpath}") + subvolumes = subvolumes or VOLUME_LABELS + logger.debug(f"Subvolumes to extract: {subvolumes}") assert all([volname in VOLUME_LABELS for volname in subvolumes]), ( f"Invalid subvolume name in {subvolumes}, must be one of {VOLUME_LABELS}" ) outdir, form = outpath.parent, outpath.stem + logger.debug(f"Output directory: {outdir}, output form prefix: {form}") outdir.mkdir(exist_ok=True, parents=True) vols = extract_mixed_dicom(dcmpath, subvolumes) + logger.debug(f"Extracted {len(vols)} subvolumes from DICOM, preparing to save NIfTI files and metadata.") meta = {} for vol, volname in zip(vols, subvolumes): output = outpath.with_name(f"{outpath.stem}_{volname}.nii.gz") + logger.debug(f"Saving subvolume '{volname}' to {output}") nibabel.nifti1.save(vol["nifti"], output) descrip = vol["descrip"] @@ -317,13 +340,19 @@ def dicom_to_mixed(dcmpath: Path, outpath: Path, subvolumes: Optional[list[str]] raise e # Write merged metadata sidecar - (outdir / f"{form}_meta.json").write_text(json.dumps(meta, indent=4)) + json_meta_path = outdir / f"{form}_meta.json" + logger.debug(f"Writing metadata JSON sidecar to {json_meta_path} with contents: {meta}") + json_meta_path.write_text(json.dumps(meta, indent=4)) # Attempt standard dcm2niix conversion (soft failure allowed for legacy behavior) + logger.debug("Attempting to run dcm2niix for standard conversion (soft failure allowed).") run_dcm2niix(dcmpath, outdir, form, extra_args="-w 0 --terse -b o", check=False) -def add_arguments(parser): +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: subparser = parser.add_subparsers(dest="hybrid-command", required=True, title="hybrid subcommands") dmc_parser = subparser.add_parser( @@ -370,6 +399,11 @@ def add_arguments(parser): ) post_parser.add_argument("-o", "--output", type=Path, required=True, help="Output path for the masked T1 map NIfTI file.") + if extra_args_cb is not None: + extra_args_cb(dmc_parser) + extra_args_cb(t1_parser) + extra_args_cb(post_parser) + def dispatch(args): """Dispatch function for the mixed T1 map generation commands.""" diff --git a/src/mritk/r1.py b/src/mritk/r1.py index db64420..9f8adfa 100644 --- a/src/mritk/r1.py +++ b/src/mritk/r1.py @@ -4,12 +4,16 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory - -import numpy as np +import argparse +import logging +from collections.abc import Callable from pathlib import Path +import numpy as np from .data import MRIData +logger = logging.getLogger(__name__) + def compute_r1_array( t1_data: np.ndarray, scale: float = 1000.0, t1_low: float = 1.0, t1_high: float = float("inf") @@ -29,6 +33,7 @@ def compute_r1_array( Returns: np.ndarray: An array of R1 relaxation rates. Invalid/out-of-bound voxels are set to NaN. """ + logger.debug(f"Computing R1 array with scale={scale}, t1_low={t1_low}, t1_high={t1_high}") valid_t1 = (t1_low <= t1_data) & (t1_data <= t1_high) r1_data = np.nan * np.zeros_like(t1_data) @@ -57,6 +62,7 @@ def convert_t1_to_r1( MRIData: A new MRIData object containing the R1 map array and the original affine matrix. """ r1_data = compute_r1_array(T1map_mri.data, scale, t1_low, t1_high) + logger.debug(f"Converted T1 map to R1 map with shape {r1_data.shape}") return MRIData(data=r1_data, affine=T1map_mri.affine) @@ -83,6 +89,7 @@ def t1_to_r1( Raises: ValueError: If input_mri is neither a Path nor an MRIData object. """ + logger.info(f"Converting T1 map to R1 map with input: {input_mri}, output: {output}") if isinstance(input_mri, Path): mri_t1 = MRIData.from_file(input_mri, dtype=np.single) elif isinstance(input_mri, MRIData): @@ -93,18 +100,26 @@ def t1_to_r1( mri_r1 = convert_t1_to_r1(mri_t1, scale, t1_low, t1_high) if output is not None: + logger.info(f"Saving R1 map to {output}") mri_r1.save(output, dtype=np.single) + else: + logger.info("No output path provided, returning R1 map as MRIData object") return mri_r1 -def add_arguments(parser): +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: """Add command-line arguments for the T1 to R1 conversion.""" parser.add_argument("-i", "--input", type=Path, required=True, help="Path to the input T1 map (NIfTI).") parser.add_argument("-o", "--output", type=Path, help="Path to save the output R1 map (NIfTI).") parser.add_argument("--scale", type=float, default=1000.0, help="Scaling factor for R1 calculation.") parser.add_argument("--t1-low", type=float, default=1.0, help="Lower bound for valid T1 values.") parser.add_argument("--t1-high", type=float, default=float("inf"), help="Upper bound for valid T1 values.") + if extra_args_cb is not None: + extra_args_cb(parser) def dispatch(args: dict): diff --git a/src/mritk/utils.py b/src/mritk/utils.py index 4997531..9a59b34 100644 --- a/src/mritk/utils.py +++ b/src/mritk/utils.py @@ -226,8 +226,10 @@ def run_dcm2niix(input_path: Path, output_dir: Path, form: str, extra_args: str RuntimeError: If the dcm2niix executable is not found in the system PATH. subprocess.CalledProcessError: If the command fails and `check` is True. """ + logger.info(f"Running dcm2niix with input: {input_path}, output_dir: {output_dir}, form: {form}, extra_args: '{extra_args}'") # 1. Locate the executable securely executable = shutil.which("dcm2niix") + logger.debug(f"Located dcm2niix executable at: {executable}") if executable is None: raise RuntimeError( "The 'dcm2niix' executable was not found. Please ensure it is installed and available in your system PATH." @@ -248,6 +250,7 @@ def run_dcm2niix(input_path: Path, output_dir: Path, form: str, extra_args: str try: # 3. Execute without shell=True for better security and stability + logger.debug(f"Attempting to run dcm2niix with arguments: {args}") subprocess.run(args, check=check, capture_output=True, text=True) except subprocess.CalledProcessError as e: logger.error(f"dcm2niix execution failed.\nCommand: {cmd_str}\nError: {e.stderr}") From 11d425871657da63bf6f29671347c29284a5626c Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 09:22:47 +0100 Subject: [PATCH 21/29] Refactor masks - and add tests --- src/mritk/masking/__init__.py | 8 -- src/mritk/masking/masks.py | 71 -------------- src/mritk/masking/utils.py | 16 ---- src/mritk/masks.py | 174 ++++++++++++++++++++++++++++++++++ src/mritk/mixed.py | 2 +- test/test_masks.py | 137 ++++++++++++++++++++++++++ 6 files changed, 312 insertions(+), 96 deletions(-) delete mode 100644 src/mritk/masking/__init__.py delete mode 100644 src/mritk/masking/masks.py delete mode 100644 src/mritk/masking/utils.py create mode 100644 src/mritk/masks.py create mode 100644 test/test_masks.py diff --git a/src/mritk/masking/__init__.py b/src/mritk/masking/__init__.py deleted file mode 100644 index 4bd6155..0000000 --- a/src/mritk/masking/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -from . import masks - -__all__ = ["masks", "utils"] diff --git a/src/mritk/masking/masks.py b/src/mritk/masking/masks.py deleted file mode 100644 index 7d20517..0000000 --- a/src/mritk/masking/masks.py +++ /dev/null @@ -1,71 +0,0 @@ -"""Intracranial and CSF masks generation module - -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - -import numpy as np -import skimage -from typing import Optional -from pathlib import Path - -from ..data import MRIData -from ..testing import assert_same_space -from .utils import largest_island - - -def create_csf_mask( - vol: np.ndarray, - connectivity: Optional[int] = 2, - use_li: bool = False, -) -> np.ndarray: - connectivity = connectivity or vol.ndim - if use_li: - thresh = skimage.filters.threshold_li(vol) - binary = vol > thresh - binary = largest_island(binary, connectivity=connectivity) - else: - (hist, bins) = np.histogram(vol[(vol > 0) * (vol < np.quantile(vol, 0.999))], bins=512) - thresh = skimage.filters.threshold_yen(hist=(hist, bins)) - binary = vol > thresh - binary = largest_island(binary, connectivity=connectivity) - return binary - - -def csf_mask( - input: Path, - connectivity: Optional[int] = 2, - use_li: bool = False, - output: Path | None = None, -) -> MRIData: - input_vol = MRIData.from_file(input, dtype=np.single) - mask = create_csf_mask(input_vol.data, connectivity, use_li) - assert np.max(mask) > 0, "Masking failed, no voxels in mask" - mri_data = MRIData(data=mask, affine=input_vol.affine) - if output is not None: - mri_data.save(output, dtype=np.uint8) - return mri_data - - -def create_intracranial_mask(csf_mask: MRIData, segmentation: MRIData) -> np.ndarray: - assert_same_space(csf_mask, segmentation) - combined_mask = csf_mask.data + segmentation.data.astype(bool) - background_mask = largest_island(~combined_mask, connectivity=1) - opened = skimage.morphology.binary_opening(background_mask, skimage.morphology.ball(3)) - return ~opened - # return MRIData(data=~opened, affine=segmentation.affine) - - -def intracranial_mask( - csf_mask: Path, - segmentation: Path, - output: Optional[Path] = None, -) -> MRIData: - input_csf_mask = MRIData.from_file(csf_mask, dtype=bool) - segmentation_data = MRIData.from_file(segmentation, dtype=bool) - mask_data = create_intracranial_mask(input_csf_mask, segmentation_data) - mri_data = MRIData(data=mask_data, affine=segmentation_data.affine) - if output is not None: - mri_data.save(output, dtype=np.uint8) - return mri_data diff --git a/src/mritk/masking/utils.py b/src/mritk/masking/utils.py deleted file mode 100644 index 2858a5c..0000000 --- a/src/mritk/masking/utils.py +++ /dev/null @@ -1,16 +0,0 @@ -"""Masking utils - -Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -Copyright (C) 2026 Simula Research Laboratory -""" - -import numpy as np -import skimage - - -def largest_island(mask: np.ndarray, connectivity: int = 1) -> np.ndarray: - newmask = skimage.measure.label(mask, connectivity=connectivity) - regions = skimage.measure.regionprops(newmask) - regions.sort(key=lambda x: x.num_pixels, reverse=True) - return newmask == regions[0].label diff --git a/src/mritk/masks.py b/src/mritk/masks.py new file mode 100644 index 0000000..c7fb53d --- /dev/null +++ b/src/mritk/masks.py @@ -0,0 +1,174 @@ +# Intracranial and CSF masks generation module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + + +import numpy as np +import skimage +from pathlib import Path + +from .data import MRIData +from .testing import assert_same_space + + +def largest_island(mask: np.ndarray, connectivity: int = 1) -> np.ndarray: + """ + Identifies and returns the largest contiguous region (island) in a boolean mask. + + Args: + mask (np.ndarray): A boolean or integer array where non-zero values + represent the regions of interest. + connectivity (int, optional): Maximum number of orthogonal hops to consider + a pixel/voxel as connected. For 2D, 1=4-connected, 2=8-connected. + For 3D, 1=6-connected, 2=18-connected, 3=26-connected. Defaults to 1. + + Returns: + np.ndarray: A boolean array of the same shape as `mask`, where True + indicates the elements of the largest connected component. + """ + newmask = skimage.measure.label(mask, connectivity=connectivity) + regions = skimage.measure.regionprops(newmask) + + # Handle the edge case where the mask is completely empty + if not regions: + return np.zeros_like(mask, dtype=bool) + + regions.sort(key=lambda x: x.num_pixels, reverse=True) + return newmask == regions[0].label + + +def create_csf_mask( + vol: np.ndarray, + connectivity: int | None = 2, + use_li: bool = False, +) -> np.ndarray: + """ + Creates a binary mask isolating the Cerebrospinal Fluid (CSF). + + This function uses intensity thresholding (either Li or Yen) to separate + bright fluid regions from surrounding tissue. It then isolates the CSF + by retaining only the largest contiguous spatial island. + + Args: + vol (np.ndarray): 3D numpy array of the MRI volume (typically T2-weighted or Spin-Echo). + connectivity (Optional[int], optional): Maximum connectivity distance to evaluate + contiguous islands. Defaults to 2. + use_li (bool, optional): If True, uses Li's minimum cross entropy thresholding. + If False, uses Yen's thresholding based on the volume histogram. Defaults to False. + + Returns: + np.ndarray: A boolean 3D array representing the CSF mask. + """ + connectivity = connectivity or vol.ndim + + if use_li: + thresh = skimage.filters.threshold_li(vol) + binary = vol > thresh + binary = largest_island(binary, connectivity=connectivity) + else: + # Create a histogram excluding the absolute background (0) and extreme high outliers + valid_mask = (vol > 0) & (vol < np.quantile(vol, 0.999)) + hist, bins = np.histogram(vol[valid_mask], bins=512) + + thresh = skimage.filters.threshold_yen(hist=(hist, bins)) + binary = vol > thresh + binary = largest_island(binary, connectivity=connectivity) + + return binary + + +def csf_mask( + input: Path, + connectivity: int | None = 2, + use_li: bool = False, + output: Path | None = None, +) -> MRIData: + """ + I/O wrapper for generating and saving a CSF mask from a NIfTI file. + + Args: + input (Path): Path to the input NIfTI image. + connectivity (Optional[int], optional): Connectivity distance. Defaults to 2. + use_li (bool, optional): If True, uses Li thresholding. Defaults to False. + output (Optional[Path], optional): Path to save the resulting mask. Defaults to None. + + Returns: + MRIData: An MRIData object containing the boolean mask array. + + Raises: + AssertionError: If the resulting mask contains no voxels. + """ + input_vol = MRIData.from_file(input, dtype=np.single) + mask = create_csf_mask(input_vol.data, connectivity, use_li) + + assert np.max(mask) > 0, "Masking failed, no voxels in mask" + + mri_data = MRIData(data=mask, affine=input_vol.affine) + + if output is not None: + mri_data.save(output, dtype=np.uint8) + + return mri_data + + +def compute_intracranial_mask_array(csf_mask_array: np.ndarray, segmentation_array: np.ndarray) -> np.ndarray: + """ + Combines a CSF mask array and a brain segmentation mask array into a solid intracranial mask. + + This function merges the two domains and uses morphological operations (binary opening) + on the background to cleanly fill in any gaps or holes within the intracranial space. + + Args: + csf_mask_array (np.ndarray): 3D boolean array representing the CSF mask. + segmentation_array (np.ndarray): 3D boolean array representing the anatomical brain segmentation. + + Returns: + np.ndarray: A boolean 3D array representing the solid intracranial space. + """ + # Ensure logical boolean combination + combined_mask = csf_mask_array.astype(bool) | segmentation_array.astype(bool) + + # Identify the background by extracting the largest island of the inverted combined mask + background_mask = largest_island(~combined_mask, connectivity=1) + + # Smooth the background boundary to fill narrow sulci/gaps + opened_background = skimage.morphology.opening(background_mask, skimage.morphology.ball(3)) + + # The intracranial mask is the inverse of the cleaned background + return ~opened_background + + +def intracranial_mask( + csf_mask_path: Path, + segmentation_path: Path, + output: Path | None = None, +) -> MRIData: + """ + I/O wrapper for generating and saving an intracranial mask from NIfTI files. + + Loads the masks, verifies they share the same physical coordinate space, and + delegates the array computation. + + Args: + csf_mask_path (Path): Path to the CSF mask NIfTI file. + segmentation_path (Path): Path to the brain segmentation NIfTI file. + output (Optional[Path], optional): Path to save the resulting mask. Defaults to None. + + Returns: + MRIData: An MRIData object containing the intracranial mask. + """ + input_csf_mask = MRIData.from_file(csf_mask_path, dtype=bool) + segmentation_data = MRIData.from_file(segmentation_path, dtype=bool) + + # Validate spatial alignment before array operations + assert_same_space(input_csf_mask, segmentation_data) + + mask_data = compute_intracranial_mask_array(input_csf_mask.data, segmentation_data.data) + mri_data = MRIData(data=mask_data, affine=segmentation_data.affine) + + if output is not None: + mri_data.save(output, dtype=np.uint8) + + return mri_data diff --git a/src/mritk/mixed.py b/src/mritk/mixed.py index a2cbd61..76e1817 100644 --- a/src/mritk/mixed.py +++ b/src/mritk/mixed.py @@ -18,7 +18,7 @@ from .data import data_reorientation, change_of_coordinates_map, MRIData -from .masking.masks import create_csf_mask +from .masks import create_csf_mask from .utils import T1_lookup_table, VOLUME_LABELS, run_dcm2niix logger = logging.getLogger(__name__) diff --git a/test/test_masks.py b/test/test_masks.py new file mode 100644 index 0000000..b4a3144 --- /dev/null +++ b/test/test_masks.py @@ -0,0 +1,137 @@ +"""Tests for Masks and Intracranial modules + +Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +Copyright (C) 2026 Simula Research Laboratory +""" + +import numpy as np +import nibabel as nib + +from mritk.masks import create_csf_mask, csf_mask, compute_intracranial_mask_array, intracranial_mask, largest_island + + +def test_largest_island(): + """Test that the largest contiguous region is correctly isolated.""" + mask = np.zeros((10, 10), dtype=bool) + # Small island (4 pixels) + mask[1:3, 1:3] = True + # Large island (9 pixels) + mask[5:8, 5:8] = True + + result = largest_island(mask, connectivity=1) + + # Large island should be kept + assert result[6, 6] == np.True_ + # Small island should be dropped + assert result[1, 1] == np.False_ + # Total active pixels should equal the large island's size + assert np.sum(result) == 9 + + +def test_largest_island_empty(): + """Test behavior when the mask contains no true values.""" + mask = np.zeros((5, 5), dtype=bool) + result = largest_island(mask) + + assert np.sum(result) == 0 + assert result.dtype == bool + + +def test_create_csf_mask_li(): + """Test generating a CSF mask using Li thresholding and largest island extraction.""" + # Create a 10x10x10 mock volume + vol = np.zeros((10, 10, 10)) + # Add a main bright "CSF" island + vol[2:8, 2:8, 2:8] = 100.0 + # Add a smaller disconnected "noise" island + vol[0, 0, 0] = 100.0 + + mask = create_csf_mask(vol, connectivity=1, use_li=True) + + # Validates that the primary island is kept + assert mask[5, 5, 5] == np.True_ + # Validates that the smaller noise island is dropped + assert mask[0, 0, 0] == np.False_ + # Validates background is excluded + assert mask[1, 1, 1] == np.False_ + + +def test_create_csf_mask_yen(): + """Test generating a CSF mask using Yen thresholding on histogram data.""" + # Build a slightly noisier array to ensure quantile/histogram logic doesn't crash + vol = np.random.uniform(0, 10, (15, 15, 15)) + # Inject primary bright island + vol[4:10, 4:10, 4:10] = 150.0 + + mask = create_csf_mask(vol, connectivity=2, use_li=False) + + assert mask[7, 7, 7] == np.True_ + assert mask[0, 0, 0] == np.False_ + + +def test_compute_intracranial_mask_array(): + """Test the array logic for merging CSF and segmentation into a solid bounding space.""" + # 10x10x10 empty background arrays + csf = np.zeros((10, 10, 10), dtype=bool) + seg = np.zeros((10, 10, 10), dtype=bool) + + # Outer ring = CSF + csf[2:8, 2:8, 2:8] = True + csf[3:7, 3:7, 3:7] = False + + # Inner core = Brain Segmentation + seg[3:7, 3:7, 3:7] = True + + # Merge and smooth + ic_mask = compute_intracranial_mask_array(csf, seg) + + # The whole 2:8 block should be completely solid (True) + assert np.all(ic_mask[2:8, 2:8, 2:8] == np.True_) + # Surrounding coordinates should remain background (False) + assert ic_mask[0, 0, 0] == np.False_ + + +def test_csf_mask_io(tmp_path): + """Test the I/O wrapper for CSF masking by writing actual temporary NIfTI files.""" + in_path = tmp_path / "mock_in.nii.gz" + out_path = tmp_path / "mock_out.nii.gz" + + # Create a real, small NIfTI file with an identity affine matrix + data = np.zeros((10, 10, 10), dtype=np.single) + data[2:8, 2:8, 2:8] = 100.0 # CSF target area + nii = nib.Nifti1Image(data, np.eye(4)) + nib.save(nii, in_path) + + result = csf_mask(input=in_path, use_li=True, output=out_path) + + # Verify the file was physically saved to the filesystem + assert out_path.exists() + # Verify the output data shape matches what we expect + assert result.data.shape == (10, 10, 10) + + +def test_intracranial_mask_io(tmp_path): + """Test the I/O wrapper for Intracranial masking by writing actual temporary NIfTI files.""" + csf_path = tmp_path / "csf.nii.gz" + seg_path = tmp_path / "seg.nii.gz" + out_path = tmp_path / "ic_out.nii.gz" + + # Create standard identity affine matrices to satisfy assert_same_space + affine = np.eye(4) + + # 1. Mock CSF Mask file + csf_data = np.zeros((10, 10, 10), dtype=np.single) + csf_data[2:8, 2:8, 2:8] = 1.0 + nib.save(nib.Nifti1Image(csf_data, affine), csf_path) + + # 2. Mock Segmentation file + seg_data = np.zeros((10, 10, 10), dtype=np.single) + seg_data[4:6, 4:6, 4:6] = 1.0 + nib.save(nib.Nifti1Image(seg_data, affine), seg_path) + + result = intracranial_mask(csf_mask_path=csf_path, segmentation_path=seg_path, output=out_path) + + # Verify output file creation + assert out_path.exists() + assert result.data.shape == (10, 10, 10) From d752c9eddcb6b687a9fed84dcd4105a7b21e88f5 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 09:46:32 +0100 Subject: [PATCH 22/29] Add seed to test --- test/test_masks.py | 22 ++++++++++++++++------ 1 file changed, 16 insertions(+), 6 deletions(-) diff --git a/test/test_masks.py b/test/test_masks.py index b4a3144..b30f1be 100644 --- a/test/test_masks.py +++ b/test/test_masks.py @@ -59,15 +59,25 @@ def test_create_csf_mask_li(): def test_create_csf_mask_yen(): """Test generating a CSF mask using Yen thresholding on histogram data.""" - # Build a slightly noisier array to ensure quantile/histogram logic doesn't crash - vol = np.random.uniform(0, 10, (15, 15, 15)) - # Inject primary bright island - vol[4:10, 4:10, 4:10] = 150.0 + # Ensure reproducible random distributions across Python versions + np.random.seed(42) + + # Base background noise + vol = np.random.uniform(1, 10, (15, 15, 15)) + + # Inject primary bright island (e.g. CSF) + # The uniform distribution guarantees different float values, meaning the + # top 0.1% filter will only remove the 3 absolute brightest voxels. + # The rest of the island will safely remain to populate the histogram. + vol[4:10, 4:10, 4:10] = np.random.uniform(100, 150, (6, 6, 6)) mask = create_csf_mask(vol, connectivity=2, use_li=False) - assert mask[7, 7, 7] == np.True_ - assert mask[0, 0, 0] == np.False_ + # Check that the center of the island is identified + assert bool(mask[7, 7, 7]) is True + # Check that the extreme background corners are completely excluded + assert bool(mask[0, 0, 0]) is False + assert bool(mask[14, 14, 14]) is False def test_compute_intracranial_mask_array(): From ecbdfe57be36731a50588c23e9b66cc45889f83f Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 10:15:00 +0100 Subject: [PATCH 23/29] More logging --- src/mritk/cli.py | 4 ++-- src/mritk/hybrid.py | 11 ++++++++-- src/mritk/looklocker.py | 45 +++++++++++++++++++++++++++++++++++++---- 3 files changed, 52 insertions(+), 8 deletions(-) diff --git a/src/mritk/cli.py b/src/mritk/cli.py index e428af8..f9cf915 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -77,12 +77,12 @@ def setup_parser(): looklocker_parser = subparsers.add_parser( "looklocker", help="Process Look-Locker data", formatter_class=parser.formatter_class ) - looklocker.add_arguments(looklocker_parser) + looklocker.add_arguments(looklocker_parser, extra_args_cb=add_extra_arguments) hybrid_parser = subparsers.add_parser( "hybrid", help="Generate a hybrid T1 map by merging Look-Locker and Mixed maps.", formatter_class=parser.formatter_class ) - hybrid.add_arguments(hybrid_parser) + hybrid.add_arguments(hybrid_parser, extra_args_cb=add_extra_arguments) mixed_parser = subparsers.add_parser( "mixed", help="Generate a Mixed T1 map from Look-Locker data.", formatter_class=parser.formatter_class diff --git a/src/mritk/hybrid.py b/src/mritk/hybrid.py index 3d005f0..426831d 100644 --- a/src/mritk/hybrid.py +++ b/src/mritk/hybrid.py @@ -4,7 +4,8 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory - +import argparse +from collections.abc import Callable import logging import numpy as np import skimage @@ -69,7 +70,10 @@ def hybrid_t1map( return hybrid_nii -def add_arguments(parser): +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: """Add command-line arguments for the hybrid T1 map generation.""" parser.add_argument("-i", "--input-ll", type=Path, required=True, help="Path to the Look-Locker T1 map (NIfTI).") parser.add_argument("-m", "--input-mixed", type=Path, required=True, help="Path to the Mixed T1 map (NIfTI).") @@ -78,6 +82,9 @@ def add_arguments(parser): parser.add_argument("-e", "--erode", type=int, default=0, help="Number of voxels to erode the CSF mask.") parser.add_argument("-o", "--output", type=Path, required=True, help="Output path for the hybrid T1 map (NIfTI).") + if extra_args_cb is not None: + extra_args_cb(parser) + def dispatch(args): """Dispatch function for the hybrid T1 map generation.""" diff --git a/src/mritk/looklocker.py b/src/mritk/looklocker.py index 25089ea..547ef1d 100644 --- a/src/mritk/looklocker.py +++ b/src/mritk/looklocker.py @@ -4,7 +4,8 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory - +import argparse +from collections.abc import Callable import logging import numpy as np import tempfile @@ -34,6 +35,7 @@ def read_dicom_trigger_times(dicomfile: Path) -> np.ndarray: np.ndarray: A sorted array of unique trigger delay times (in milliseconds) extracted from the CardiacSynchronizationSequence. """ + logger.info(f"Reading DICOM trigger times from {dicomfile}.") import pydicom dcm = pydicom.dcmread(dicomfile) @@ -56,6 +58,7 @@ def remove_outliers(data: np.ndarray, mask: np.ndarray, t1_low: float, t1_high: Returns: np.ndarray: A cleaned 3D array with outliers and unmasked regions set to NaN. """ + logger.info("Removing outliers from T1 map with physiological range [%f, %f].", t1_low, t1_high) processed = data.copy() processed[~mask] = np.nan outliers = (processed < t1_low) | (processed > t1_high) @@ -76,6 +79,7 @@ def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_ Returns: np.ndarray: A boolean 3D mask of the largest contiguous island. """ + logger.info("Creating largest island mask with dilation radius %d and erosion factor %.2f.", radius, erode_dilate_factor) mask = skimage.measure.label(np.isfinite(data)) regions = skimage.measure.regionprops(mask) if not regions: @@ -90,6 +94,7 @@ def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_ skimage.morphology.remove_small_holes(mask, area_threshold=10 ** (mask.ndim), connectivity=2, out=mask) skimage.morphology.dilation(mask, skimage.morphology.ball(radius), out=mask) skimage.morphology.erosion(mask, skimage.morphology.ball(erode_dilate_factor * radius), out=mask) + logger.debug(f"Generated final mask with shape {mask.shape} and {mask.sum()} valid voxels.") return mask @@ -106,9 +111,12 @@ def compute_looklocker_t1_array(data: np.ndarray, time_s: np.ndarray, t1_roof: f np.ndarray: 3D numpy array representing the T1 map in milliseconds. Voxels that fail to fit or fall outside the mask are set to NaN. """ + logger.info("Computing Look-Locker T1 map from 4D data with shape %s and trigger times %s.", data.shape, time_s) assert len(data.shape) >= 4, f"Data should be at least 4-dimensional, got shape {data.shape}" mask = mri_facemask(data[..., 0]) + logger.debug(f"Generated face mask with shape {mask.shape} and {mask.sum()} valid voxels.") valid_voxels = (np.nanmax(data, axis=-1) > 0) & mask + logger.debug(f"Identified {valid_voxels.sum()} valid voxels for fitting after applying mask and signal threshold.") data_normalized = np.nan * np.zeros_like(data) # Prevent divide by zero warnings dynamically @@ -118,6 +126,7 @@ def compute_looklocker_t1_array(data: np.ndarray, time_s: np.ndarray, t1_roof: f voxel_mask = np.array(np.where(valid_voxels)).T d_masked = np.array([data_normalized[i, j, k] for (i, j, k) in voxel_mask]) + logger.debug(f"Starting fitting for {len(d_masked)} voxels.") with tqdm.tqdm(total=len(d_masked), desc="Fitting Look-Locker Voxels") as pbar: voxel_fitter = partial(fit_voxel, time_s, pbar) vfunc = np.vectorize(voxel_fitter, signature="(n) -> (3)") @@ -176,11 +185,15 @@ def looklocker_t1map_postprocessing( removal are iteratively filled using a specialized Gaussian filter that interpolates from surrounding valid tissue without blurring the edges. """ + logger.info(f"Post-processing Look-Locker T1 map at {T1map} with T1 range [{T1_low}, {T1_high}] ms.") t1map_mri = MRIData.from_file(T1map, dtype=np.single) t1map_data = t1map_mri.data.copy() if mask is None: + logger.debug("No mask provided, generating automatic mask based on the largest contiguous tissue island.") mask = create_largest_island_mask(t1map_data, radius, erode_dilate_factor) + else: + logger.debug("Using provided mask for post-processing.") t1map_data = remove_outliers(t1map_data, mask, T1_low, T1_high) @@ -189,6 +202,7 @@ def looklocker_t1map_postprocessing( # Fill internal missing values iteratively using a Gaussian filter fill_mask = np.isnan(t1map_data) & mask + logger.debug(f"Initial fill mask has {fill_mask.sum()} voxels.") while fill_mask.sum() > 0: logger.info(f"Filling in {fill_mask.sum()} voxels within the mask.") t1map_data[fill_mask] = nan_filter_gaussian(t1map_data, 1.0)[fill_mask] @@ -197,6 +211,9 @@ def looklocker_t1map_postprocessing( processed_T1map = MRIData(t1map_data, t1map_mri.affine) if output is not None: processed_T1map.save(output, dtype=np.single) + logger.info(f"Post-processed Look-Locker T1 map saved to {output}.") + else: + logger.info("No output path provided, returning post-processed Look-Locker T1 map as MRIData object.") return processed_T1map @@ -220,15 +237,19 @@ def looklocker_t1map(looklocker_input: Path, timestamps: Path, output: Path | No MRIData: An MRIData object containing the computed 3D T1 map (in milliseconds) and the original affine transformation matrix. """ + logger.info(f"Generating T1 map from Look-Locker data at {looklocker_input} with trigger times from {timestamps}.") ll_mri = MRIData.from_file(looklocker_input, dtype=np.single) # Convert timestamps from milliseconds to seconds time_s = np.loadtxt(timestamps) / 1000.0 - + logger.debug(f"Loaded trigger times: {time_s}.") t1map_array = compute_looklocker_t1_array(ll_mri.data, time_s) t1map_mri = MRIData(t1map_array.astype(np.single), ll_mri.affine) if output is not None: t1map_mri.save(output, dtype=np.single) + logger.info(f"Look-Locker T1 map saved to {output}.") + else: + logger.info("No output path provided, returning Look-Locker T1 map as MRIData object.") return t1map_mri @@ -244,14 +265,18 @@ def dicom_to_looklocker(dicomfile: Path, outpath: Path): dicomfile (Path): Path to the input DICOM file. outpath (Path): Desired output path for the converted .nii.gz file. """ + logger.info(f"Converting Look-Locker DICOM {dicomfile} to NIfTI format at {outpath}") outdir, form = outpath.parent, outpath.stem outdir.mkdir(exist_ok=True, parents=True) # Extract and save trigger times times = read_dicom_trigger_times(dicomfile) - np.savetxt(outdir / f"{form}_trigger_times.txt", times) + trigger_file = outdir / f"{form}_trigger_times.txt" + logger.debug(f"Extracted trigger times: {times}. Saving to {trigger_file}") + np.savetxt(trigger_file, times) with tempfile.TemporaryDirectory(prefix=outpath.stem) as tmpdir: + logger.debug(f"Created temporary directory {tmpdir} for intermediate dcm2niix output.") tmppath = Path(tmpdir) # Delegate heavy lifting to dcm2niix @@ -262,10 +287,17 @@ def dicom_to_looklocker(dicomfile: Path, outpath: Path): # Reload and save to standardize intent codes and precision mri = MRIData.from_file(tmppath / f"{form}.nii.gz", dtype=np.double) + logger.debug(f"Reloaded intermediate NIfTI file with shape {mri.data.shape} and dtype {mri.data.dtype}.") mri.save(outpath.with_suffix(".nii.gz"), dtype=np.single, intent_code=2001) + logger.info( + f"Final Look-Locker NIfTI saved to {outpath.with_suffix('.nii.gz')} with intent_code=2001 and dtype=np.single." + ) -def add_arguments(parser): +def add_arguments( + parser: argparse.ArgumentParser, + extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, +) -> None: subparser = parser.add_subparsers(dest="looklocker-command", help="Commands for processing Look-Locker data") dicom_parser = subparser.add_parser( @@ -296,6 +328,11 @@ def add_arguments(parser): help="Multiplier for the erosion radius relative to the dilation radius to ensure tight mask edges", ) + if extra_args_cb is not None: + extra_args_cb(dicom_parser) + extra_args_cb(ll_t1) + extra_args_cb(ll_post) + def dispatch(args): command = args.pop("looklocker-command") From 922f207b7b78a87730da4f1f1d3139f37a7ee37f Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 10:47:38 +0100 Subject: [PATCH 24/29] Add suggestions from Cecile --- src/mritk/hybrid.py | 5 +++-- test/test_hybrid.py | 4 ++-- 2 files changed, 5 insertions(+), 4 deletions(-) diff --git a/src/mritk/hybrid.py b/src/mritk/hybrid.py index 426831d..b2a5a0c 100644 --- a/src/mritk/hybrid.py +++ b/src/mritk/hybrid.py @@ -75,7 +75,7 @@ def add_arguments( extra_args_cb: Callable[[argparse.ArgumentParser], None] | None = None, ) -> None: """Add command-line arguments for the hybrid T1 map generation.""" - parser.add_argument("-i", "--input-ll", type=Path, required=True, help="Path to the Look-Locker T1 map (NIfTI).") + parser.add_argument("-l", "--input-looklocker", type=Path, required=True, help="Path to the Look-Locker T1 map (NIfTI).") parser.add_argument("-m", "--input-mixed", type=Path, required=True, help="Path to the Mixed T1 map (NIfTI).") parser.add_argument("-c", "--csf-mask", type=Path, required=True, help="Path to the CSF mask (NIfTI).") parser.add_argument("-t", "--threshold", type=float, default=4000.0, help="T1 threshold in ms for substitution.") @@ -88,8 +88,9 @@ def add_arguments( def dispatch(args): """Dispatch function for the hybrid T1 map generation.""" + hybrid_t1map( - LL_path=args.pop("input_ll"), + LL_path=args.pop("input_looklocker"), mixed_path=args.pop("input_mixed"), csf_mask_path=args.pop("csf_mask"), threshold=args.pop("threshold"), diff --git a/test/test_hybrid.py b/test/test_hybrid.py index 0e90e1e..ac8f823 100644 --- a/test/test_hybrid.py +++ b/test/test_hybrid.py @@ -63,7 +63,7 @@ def test_dispatch_hybrid_defaults(mock_hybrid_t1map): # We pass the arguments exactly as a user would type them in the terminal. # If "hybrid" is nested under another command (like "t1maps hybrid"), add that prefix to the list. mritk.cli.main( - ["hybrid", "-i", "ll_map.nii.gz", "-m", "mixed_map.nii.gz", "-c", "csf_mask.nii.gz", "-o", "output_hybrid.nii.gz"] + ["hybrid", "-l", "ll_map.nii.gz", "-m", "mixed_map.nii.gz", "-c", "csf_mask.nii.gz", "-o", "output_hybrid.nii.gz"] ) # Verify the underlying function was called with parsed Paths and the correct default values @@ -84,7 +84,7 @@ def test_dispatch_hybrid_explicit_args(mock_hybrid_t1map): mritk.cli.main( [ "hybrid", - "--input-ll", + "--input-looklocker", "ll_map.nii.gz", "--input-mixed", "mixed_map.nii.gz", From 772c1ba8e20dfa1177089ed27ba30e33ff91885b Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 12:11:19 +0100 Subject: [PATCH 25/29] Fix more logging and add gonzo commands --- .pre-commit-config.yaml | 2 +- docs/looklocker.md | 21 +++++++++++++++++++++ docs/mixed.md | 21 +++++++++++++++++++++ src/mritk/looklocker.py | 5 +++++ src/mritk/masks.py | 1 - src/mritk/mixed.py | 8 +++++++- 6 files changed, 55 insertions(+), 3 deletions(-) diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index cdc8b97..edd289e 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -17,7 +17,7 @@ repos: rev: 'v0.14.7' hooks: # Run the linter. - - id: ruff + - id: ruff-check args: [ --fix ] # Run the formatter. - id: ruff-format diff --git a/docs/looklocker.md b/docs/looklocker.md index 6722dc2..1e40c18 100644 --- a/docs/looklocker.md +++ b/docs/looklocker.md @@ -62,6 +62,15 @@ Fits the voxel-wise Levenberg-Marquardt optimization curve to estimate $T_1$ tim mritk looklocker t1 -i path/to/ll_output.nii.gz -t path/to/ll_output_trigger_times.txt -o path/to/t1_raw.nii.gz ``` +Gonzo: + +```shell +mritk looklocker t1 \ + -i gonzo/mri-dataset/mri_dataset/sub-01/ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1.nii.gz \ + -t gonzo/mri-dataset/mri_dataset/sub-01/ses-01/anat/sub-01_ses-01_acq-looklocker_IRT1_trigger_times.txt \ + -o sub-01_ses-01_acq-looklocker_T1map_raw.nii.gz +``` + ### 3. Post-Processing (postprocess) Raw $T_1$ maps often contain noisy fits or values outside physiological boundaries. The postprocess command applies a quality-control pipeline that: @@ -81,3 +90,15 @@ Iteratively fills internal NaNs (holes) using a smart Gaussian filter. ```bash mritk looklocker postprocess -i path/to/t1_raw.nii.gz -o path/to/t1_clean.nii.gz --t1-low 100.0 --t1-high 5000.0 ``` + +Gonzo: + +(here input is the raw T1 map from the previous step) + +```shell +mritk looklocker postprocess \ + -i sub-01_ses-01_acq-looklocker_T1map_raw.nii.gz \ + -o sub-01_ses-01_acq-looklocker_T1map.nii.gz \ + --t1-low 100.0 \ + --t1-high 6000.0 +``` diff --git a/docs/mixed.md b/docs/mixed.md index 2769c25..c5062d7 100644 --- a/docs/mixed.md +++ b/docs/mixed.md @@ -70,6 +70,16 @@ Generates the $T_1$ map based on the signal ratio between the Inversion-Recovery mritk mixed t1 -s path/to/output_base_SE-modulus.nii.gz -i path/to/output_base_IR-corrected-real.nii.gz -m path/to/output_base_meta.json -o path/to/mixed_t1_raw.nii.gz ``` +Gonzo: + +```shell +mritk mixed t1 \ + -s gonzo/mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_SE-modulus.nii.gz \ + -i gonzo/mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_IR-corrected-real.nii.gz \ + -m gonzo/mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_meta.json \ + -o sub-01_ses-01_acq-mixed_T1map_raw.nii.gz +``` + ### 3. Post-Processing (postprocess) Masks out non-fluid areas from the Mixed $T_1$ map. It derives a mask dynamically from the original SE sequence using Li thresholding and erodes the mask to avoid partial-volume effects at tissue boundaries. @@ -82,3 +92,14 @@ Masks out non-fluid areas from the Mixed $T_1$ map. It derives a mask dynamicall ```bash mritk mixed postprocess -s path/to/output_base_SE-modulus.nii.gz -t path/to/mixed_t1_raw.nii.gz -o path/to/mixed_t1_clean.nii.gz ``` + +Gonzo: + +(here we use the original SE modulus image as the source for mask generation, and the t1 map from the previous step as the input for post-processing) + +```shell +mritk mixed postprocess \ + -s gonzo/mri-dataset/mri_dataset/sub-01/ses-01/mixed/sub-01_ses-01_acq-mixed_SE-modulus.nii.gz \ + -t sub-01_ses-01_acq-mixed_T1map_raw.nii.gz \ + -o sub-01_ses-01_acq-mixed_T1map_clean.nii.gz +``` diff --git a/src/mritk/looklocker.py b/src/mritk/looklocker.py index 547ef1d..1321046 100644 --- a/src/mritk/looklocker.py +++ b/src/mritk/looklocker.py @@ -81,18 +81,23 @@ def create_largest_island_mask(data: np.ndarray, radius: int = 10, erode_dilate_ """ logger.info("Creating largest island mask with dilation radius %d and erosion factor %.2f.", radius, erode_dilate_factor) mask = skimage.measure.label(np.isfinite(data)) + logger.debug("Region properties calculated for %d labeled regions.", mask.max()) regions = skimage.measure.regionprops(mask) if not regions: return np.zeros_like(data, dtype=bool) + logger.debug("Sorting regions by size to identify the largest contiguous island.") regions.sort(key=lambda x: x.num_pixels, reverse=True) mask = mask == regions[0].label try: + logger.debug("Removing small holes with max_size %d.", 10 ** (mask.ndim)) skimage.morphology.remove_small_holes(mask, max_size=10 ** (mask.ndim), connectivity=2, out=mask) except TypeError: # Older versions of skimage use area_threshold instead of max_size skimage.morphology.remove_small_holes(mask, area_threshold=10 ** (mask.ndim), connectivity=2, out=mask) + logger.debug("Applying morphological dilation with radius %d.", radius) skimage.morphology.dilation(mask, skimage.morphology.ball(radius), out=mask) + logger.debug("Applying morphological erosion with radius %d.", erode_dilate_factor * radius) skimage.morphology.erosion(mask, skimage.morphology.ball(erode_dilate_factor * radius), out=mask) logger.debug(f"Generated final mask with shape {mask.shape} and {mask.sum()} valid voxels.") return mask diff --git a/src/mritk/masks.py b/src/mritk/masks.py index c7fb53d..b0590a6 100644 --- a/src/mritk/masks.py +++ b/src/mritk/masks.py @@ -4,7 +4,6 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory - import numpy as np import skimage from pathlib import Path diff --git a/src/mritk/mixed.py b/src/mritk/mixed.py index 76e1817..405433a 100644 --- a/src/mritk/mixed.py +++ b/src/mritk/mixed.py @@ -132,7 +132,9 @@ def mixed_t1map( meta = json.loads(meta_path.read_text()) t1_volume = compute_mixed_t1_array(se_mri.data, ir_mri.data, meta, T1_low, T1_high) - + logger.debug( + f"Computed T1 volume with shape {t1_volume.shape} and T1 range ({np.nanmin(t1_volume)}, {np.nanmax(t1_volume)}) ms." + ) nii = nibabel.nifti1.Nifti1Image(t1_volume, ir_mri.affine) nii.set_sform(nii.affine, "scanner") nii.set_qform(nii.affine, "scanner") @@ -168,9 +170,12 @@ def mixed_t1map_postprocessing(SE_nii_path: Path, T1_path: Path, output: Path | t1map_nii = nibabel.nifti1.load(T1_path) se_mri = MRIData.from_file(SE_nii_path, dtype=np.single) + logger.debug("Creating CSF mask from SE image using Li thresholding and morphological erosion.") mask = create_csf_mask(se_mri.data, use_li=True) + logger.debug("Performing morphological erosion on the CSF mask to reduce partial volume effects.") mask = skimage.morphology.erosion(mask) + logger.debug(f"Generated CSF mask with shape {mask.shape} and {mask.sum()} valid voxels.") masked_t1map = t1map_nii.get_fdata(dtype=np.single) masked_t1map[~mask] = np.nan masked_t1map_nii = nibabel.nifti1.Nifti1Image(masked_t1map, t1map_nii.affine, t1map_nii.header) @@ -210,6 +215,7 @@ def compute_mixed_t1_array(se_data: np.ndarray, ir_data: np.ndarray, meta: dict, f"ETL={etl}, T1 range=({t1_low}, {t1_high}), table size={len(t1_grid)}" ) interpolator = scipy.interpolate.interp1d(f_curve, t1_grid, kind="nearest", bounds_error=False, fill_value=np.nan) + logger.debug("Created interpolation function for T1 estimation based on the lookup table.") return interpolator(f_data).astype(np.single) From d75dd1ec0849e6e6423a92f4453a49ba3e3fe6b5 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 12:11:47 +0100 Subject: [PATCH 26/29] Run isort --- pyproject.toml | 4 ++-- src/mritk/__init__.py | 3 +-- src/mritk/cli.py | 15 ++++++++------- src/mritk/concentration.py | 2 +- src/mritk/data.py | 5 +++-- src/mritk/datasets.py | 15 ++++++++------- src/mritk/hybrid.py | 8 ++++---- src/mritk/info.py | 7 ++++--- src/mritk/looklocker.py | 13 ++++++------- src/mritk/masks.py | 3 ++- src/mritk/mixed.py | 9 ++++----- src/mritk/r1.py | 1 + src/mritk/segmentation/lookup_table.py | 5 +++-- src/mritk/show.py | 4 ++-- src/mritk/statistics/__init__.py | 2 +- src/mritk/statistics/cli.py | 5 ++++- src/mritk/statistics/compute_stats.py | 4 ++-- src/mritk/statistics/utils.py | 3 ++- src/mritk/testing.py | 1 + src/mritk/utils.py | 12 ++++++------ test/conftest.py | 3 ++- test/create_test_data.py | 2 +- test/test_cli.py | 1 + test/test_concentration.py | 7 +++---- test/test_data_orientation.py | 1 + test/test_datasets.py | 3 ++- test/test_looklocker.py | 4 ++-- test/test_masks.py | 4 ++-- test/test_mixed.py | 6 +++--- test/test_mri_io.py | 1 + test/test_mri_orientation.py | 1 + test/test_mri_stats.py | 3 ++- test/test_utils.py | 8 ++++---- 33 files changed, 90 insertions(+), 75 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 4c5daab..998af4e 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -114,11 +114,11 @@ target-version = "py312" [tool.ruff.lint] # Enable pycodestyle (`E`) and Pyflakes (`F`) codes by default. -select = ["E", "F"] +select = ["E", "F", "I"] ignore = ["E402", "E741", "E743", "E731"] # Allow autofix for all enabled rules (when `--fix`) is provided. -fixable = ["A", "B", "C", "D", "E", "F"] +fixable = ["A", "B", "C", "D", "E", "F", "I"] unfixable = [] # Allow unused variables when underscore-prefixed. diff --git a/src/mritk/__init__.py b/src/mritk/__init__.py index e1a82d5..b8e858f 100644 --- a/src/mritk/__init__.py +++ b/src/mritk/__init__.py @@ -5,8 +5,7 @@ from importlib.metadata import metadata -from . import data, segmentation, statistics, concentration, utils, looklocker, mixed, hybrid, r1 - +from . import concentration, data, hybrid, looklocker, mixed, r1, segmentation, statistics, utils meta = metadata("mritk") __version__ = meta["Version"] diff --git a/src/mritk/cli.py b/src/mritk/cli.py index f9cf915..382fa9d 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -1,24 +1,25 @@ """MRI-toolkit provides a set of features dedicated to MRI data post-processing and analysis.""" +import argparse import logging from importlib.metadata import metadata from pathlib import Path -import argparse -from typing import Sequence, Optional +from typing import Optional, Sequence -from rich_argparse import RichHelpFormatter from rich.logging import RichHandler +from rich_argparse import RichHelpFormatter -from . import datasets, info, statistics, show, napari, looklocker, hybrid, mixed, r1, concentration +from . import concentration, datasets, hybrid, info, looklocker, mixed, napari, r1, show, statistics def version_info(): - from rich.console import Console - from rich.table import Table - from rich import box import sys + import nibabel as nib import numpy as np + from rich import box + from rich.console import Console + from rich.table import Table console = Console() diff --git a/src/mritk/concentration.py b/src/mritk/concentration.py index 905c521..cf1960c 100644 --- a/src/mritk/concentration.py +++ b/src/mritk/concentration.py @@ -5,9 +5,9 @@ # Copyright (C) 2026 Simula Research Laboratory import argparse +import logging from collections.abc import Callable from pathlib import Path -import logging import numpy as np diff --git a/src/mritk/data.py b/src/mritk/data.py index 61c0b0b..f1b17db 100644 --- a/src/mritk/data.py +++ b/src/mritk/data.py @@ -5,12 +5,13 @@ # Copyright (C) 2026 Simula Research Laboratory +import re from pathlib import Path +from typing import Optional + import nibabel import numpy as np import numpy.typing as npt -import re -from typing import Optional class MRIData: diff --git a/src/mritk/datasets.py b/src/mritk/datasets.py index 10f8cd8..bc6904d 100644 --- a/src/mritk/datasets.py +++ b/src/mritk/datasets.py @@ -4,14 +4,15 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory -from collections.abc import Callable import argparse import logging -from dataclasses import dataclass -import zipfile -from pathlib import Path import urllib.request +import zipfile +from collections.abc import Callable from concurrent.futures import ThreadPoolExecutor +from dataclasses import dataclass +from pathlib import Path + import tqdm logger = logging.getLogger(__name__) @@ -124,11 +125,11 @@ def __call__(self, block_num, block_size, total_size): def list_datasets_verbose(key: str): + from rich import box from rich.console import Console - from rich.table import Table from rich.panel import Panel + from rich.table import Table from rich.text import Text - from rich import box console = Console() datasets = get_datasets() @@ -179,9 +180,9 @@ def list_datasets_verbose(key: str): def list_datasets(): """Prints a simple table with only Key, Name and DOI.""" + from rich import box from rich.console import Console from rich.table import Table - from rich import box console = Console() datasets = get_datasets() diff --git a/src/mritk/hybrid.py b/src/mritk/hybrid.py index b2a5a0c..6aa4cdf 100644 --- a/src/mritk/hybrid.py +++ b/src/mritk/hybrid.py @@ -5,13 +5,13 @@ # Copyright (C) 2026 Simula Research Laboratory import argparse -from collections.abc import Callable import logging -import numpy as np -import skimage -import nibabel +from collections.abc import Callable from pathlib import Path +import nibabel +import numpy as np +import skimage logger = logging.getLogger(__name__) diff --git a/src/mritk/info.py b/src/mritk/info.py index 3052900..b190b44 100644 --- a/src/mritk/info.py +++ b/src/mritk/info.py @@ -1,12 +1,13 @@ import json import typing from pathlib import Path -import numpy as np + import nibabel as nib +import numpy as np +from rich import box from rich.console import Console -from rich.table import Table from rich.panel import Panel -from rich import box +from rich.table import Table def custom_json(obj): diff --git a/src/mritk/looklocker.py b/src/mritk/looklocker.py index 1321046..c799444 100644 --- a/src/mritk/looklocker.py +++ b/src/mritk/looklocker.py @@ -5,21 +5,20 @@ # Copyright (C) 2026 Simula Research Laboratory import argparse -from collections.abc import Callable import logging -import numpy as np -import tempfile import shutil +import tempfile +from collections.abc import Callable from functools import partial -from typing import Optional from pathlib import Path +from typing import Optional -import tqdm +import numpy as np import skimage +import tqdm from .data import MRIData - -from .utils import mri_facemask, fit_voxel, nan_filter_gaussian, run_dcm2niix +from .utils import fit_voxel, mri_facemask, nan_filter_gaussian, run_dcm2niix logger = logging.getLogger(__name__) diff --git a/src/mritk/masks.py b/src/mritk/masks.py index b0590a6..ff49d94 100644 --- a/src/mritk/masks.py +++ b/src/mritk/masks.py @@ -4,9 +4,10 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory +from pathlib import Path + import numpy as np import skimage -from pathlib import Path from .data import MRIData from .testing import assert_same_space diff --git a/src/mritk/mixed.py b/src/mritk/mixed.py index 405433a..d9973a2 100644 --- a/src/mritk/mixed.py +++ b/src/mritk/mixed.py @@ -5,21 +5,20 @@ # Copyright (C) 2026 Simula Research Laboratory import argparse -from collections.abc import Callable import json import logging +from collections.abc import Callable from pathlib import Path +import nibabel import numpy as np import scipy import scipy.interpolate import skimage -import nibabel - -from .data import data_reorientation, change_of_coordinates_map, MRIData +from .data import MRIData, change_of_coordinates_map, data_reorientation from .masks import create_csf_mask -from .utils import T1_lookup_table, VOLUME_LABELS, run_dcm2niix +from .utils import VOLUME_LABELS, T1_lookup_table, run_dcm2niix logger = logging.getLogger(__name__) diff --git a/src/mritk/r1.py b/src/mritk/r1.py index 9f8adfa..57d03d9 100644 --- a/src/mritk/r1.py +++ b/src/mritk/r1.py @@ -8,6 +8,7 @@ import logging from collections.abc import Callable from pathlib import Path + import numpy as np from .data import MRIData diff --git a/src/mritk/segmentation/lookup_table.py b/src/mritk/segmentation/lookup_table.py index 9d55348..0b59072 100644 --- a/src/mritk/segmentation/lookup_table.py +++ b/src/mritk/segmentation/lookup_table.py @@ -5,12 +5,13 @@ # Copyright (C) 2026 Simula Research Laboratory -import re import os +import re from pathlib import Path -import pandas as pd from urllib.request import urlretrieve +import pandas as pd + def read_lut(filename: Path | str | None) -> pd.DataFrame: if (filename is None) or (not Path(filename).exists()): diff --git a/src/mritk/show.py b/src/mritk/show.py index bf9561a..5c09947 100644 --- a/src/mritk/show.py +++ b/src/mritk/show.py @@ -2,9 +2,9 @@ from pathlib import Path import numpy as np +from rich.columns import Columns from rich.console import Console from rich.panel import Panel -from rich.columns import Columns # Assuming relative imports based on your previous file structure from .data import MRIData @@ -59,8 +59,8 @@ def dispatch(args): in the terminal. """ try: - from textual_image.renderable import Image as TermImage import PIL.Image + from textual_image.renderable import Image as TermImage except ImportError: console = Console() console.print( diff --git a/src/mritk/statistics/__init__.py b/src/mritk/statistics/__init__.py index 2c107e5..667d733 100644 --- a/src/mritk/statistics/__init__.py +++ b/src/mritk/statistics/__init__.py @@ -2,6 +2,6 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory -from . import utils, compute_stats, cli +from . import cli, compute_stats, utils __all__ = ["utils", "compute_stats", "cli"] diff --git a/src/mritk/statistics/cli.py b/src/mritk/statistics/cli.py index 1244a3a..cdf26d1 100644 --- a/src/mritk/statistics/cli.py +++ b/src/mritk/statistics/cli.py @@ -1,6 +1,7 @@ import argparse import typing from pathlib import Path + import pandas as pd from ..segmentation.groups import default_segmentation_groups @@ -19,8 +20,9 @@ def compute_mri_stats( info: str | None = None, **kwargs, ): - import sys import json + import sys + from rich.console import Console from rich.panel import Panel @@ -86,6 +88,7 @@ def compute_mri_stats( def get_stats_value(stats_file: Path, region: str, info: str, **kwargs): import sys + from rich.console import Console # Setup Rich diff --git a/src/mritk/statistics/compute_stats.py b/src/mritk/statistics/compute_stats.py index 7fe2226..673496d 100644 --- a/src/mritk/statistics/compute_stats.py +++ b/src/mritk/statistics/compute_stats.py @@ -13,10 +13,10 @@ import tqdm.rich from ..data import MRIData -from ..testing import assert_same_space from ..segmentation.groups import default_segmentation_groups from ..segmentation.lookup_table import read_lut -from .utils import voxel_count_to_ml_scale, find_timestamp, prepend_info +from ..testing import assert_same_space +from .utils import find_timestamp, prepend_info, voxel_count_to_ml_scale def extract_metadata( diff --git a/src/mritk/statistics/utils.py b/src/mritk/statistics/utils.py index d598373..2066c14 100644 --- a/src/mritk/statistics/utils.py +++ b/src/mritk/statistics/utils.py @@ -5,8 +5,9 @@ # Copyright (C) 2026 Simula Research Laboratory -import numpy as np from pathlib import Path + +import numpy as np import pandas as pd diff --git a/src/mritk/testing.py b/src/mritk/testing.py index 1cdca77..9276472 100644 --- a/src/mritk/testing.py +++ b/src/mritk/testing.py @@ -1,4 +1,5 @@ from pathlib import Path + import numpy as np from .data import MRIData diff --git a/src/mritk/utils.py b/src/mritk/utils.py index 9a59b34..bd32b8c 100644 --- a/src/mritk/utils.py +++ b/src/mritk/utils.py @@ -4,18 +4,18 @@ # Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) # Copyright (C) 2026 Simula Research Laboratory -from pathlib import Path -import subprocess -import shutil +import logging import shlex +import shutil +import subprocess +import warnings +from pathlib import Path + import numpy as np import scipy import skimage -import warnings -import logging from scipy.optimize import OptimizeWarning - VOLUME_LABELS = [ "IR-modulus", "IR-real", diff --git a/test/conftest.py b/test/conftest.py index dbfe221..26b3f07 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,5 +1,6 @@ -from pathlib import Path import os +from pathlib import Path + import pytest diff --git a/test/create_test_data.py b/test/create_test_data.py index c95db8a..e507e32 100644 --- a/test/create_test_data.py +++ b/test/create_test_data.py @@ -1,5 +1,5 @@ -from pathlib import Path import zipfile +from pathlib import Path def main(): diff --git a/test/test_cli.py b/test/test_cli.py index a5e3484..1d4cbef 100644 --- a/test/test_cli.py +++ b/test/test_cli.py @@ -1,4 +1,5 @@ import json + import mritk import mritk.cli as cli diff --git a/test/test_concentration.py b/test/test_concentration.py index 7ab1e16..29a7435 100644 --- a/test/test_concentration.py +++ b/test/test_concentration.py @@ -10,13 +10,12 @@ import mritk.cli from mritk.concentration import ( - concentration_from_T1_expr, - concentration_from_R1_expr, - compute_concentration_from_T1_array, compute_concentration_from_R1_array, + compute_concentration_from_T1_array, + concentration_from_R1_expr, concentration_from_T1, + concentration_from_T1_expr, ) - from mritk.testing import compare_nifti_images diff --git a/test/test_data_orientation.py b/test/test_data_orientation.py index 87fc78f..92bb370 100644 --- a/test/test_data_orientation.py +++ b/test/test_data_orientation.py @@ -1,5 +1,6 @@ import numpy as np import pytest + import mritk.data diff --git a/test/test_datasets.py b/test/test_datasets.py index e4493c7..ee89632 100644 --- a/test/test_datasets.py +++ b/test/test_datasets.py @@ -1,7 +1,8 @@ -import pytest from pathlib import Path from unittest.mock import MagicMock, patch +import pytest + # Import your module import mritk.cli import mritk.datasets diff --git a/test/test_looklocker.py b/test/test_looklocker.py index 142d9b9..d3d7950 100644 --- a/test/test_looklocker.py +++ b/test/test_looklocker.py @@ -5,13 +5,13 @@ import pytest import mritk.cli -from mritk.testing import compare_nifti_images from mritk.looklocker import ( + create_largest_island_mask, looklocker_t1map, looklocker_t1map_postprocessing, remove_outliers, - create_largest_island_mask, ) +from mritk.testing import compare_nifti_images @pytest.mark.skip(reason="Takes too long") diff --git a/test/test_masks.py b/test/test_masks.py index b30f1be..988de90 100644 --- a/test/test_masks.py +++ b/test/test_masks.py @@ -5,10 +5,10 @@ Copyright (C) 2026 Simula Research Laboratory """ -import numpy as np import nibabel as nib +import numpy as np -from mritk.masks import create_csf_mask, csf_mask, compute_intracranial_mask_array, intracranial_mask, largest_island +from mritk.masks import compute_intracranial_mask_array, create_csf_mask, csf_mask, intracranial_mask, largest_island def test_largest_island(): diff --git a/test/test_mixed.py b/test/test_mixed.py index b1c1467..9b5f546 100644 --- a/test/test_mixed.py +++ b/test/test_mixed.py @@ -1,18 +1,18 @@ +from pathlib import Path from unittest.mock import MagicMock, patch import numpy as np -from pathlib import Path import mritk.cli from mritk.mixed import ( + _extract_frame_metadata, compute_mixed_t1_array, extract_mixed_dicom, mixed_t1map, mixed_t1map_postprocessing, - _extract_frame_metadata, ) -from mritk.utils import VOLUME_LABELS from mritk.testing import compare_nifti_images +from mritk.utils import VOLUME_LABELS def test_mixed_t1map(tmp_path, mri_data_dir: Path): diff --git a/test/test_mri_io.py b/test/test_mri_io.py index 2c4f3ea..ce738c2 100644 --- a/test/test_mri_io.py +++ b/test/test_mri_io.py @@ -6,6 +6,7 @@ """ import numpy as np + from mritk.data import MRIData diff --git a/test/test_mri_orientation.py b/test/test_mri_orientation.py index 6d65b29..2ccd621 100644 --- a/test/test_mri_orientation.py +++ b/test/test_mri_orientation.py @@ -6,6 +6,7 @@ """ import numpy as np + from mritk.data import apply_affine, change_of_coordinates_map diff --git a/test/test_mri_stats.py b/test/test_mri_stats.py index 61886ea..722183d 100644 --- a/test/test_mri_stats.py +++ b/test/test_mri_stats.py @@ -1,9 +1,10 @@ from pathlib import Path + import numpy as np import pytest -from mritk.statistics.compute_stats import extract_metadata, compute_region_statistics, generate_stats_dataframe import mritk.cli as cli +from mritk.statistics.compute_stats import compute_region_statistics, extract_metadata, generate_stats_dataframe def test_compute_stats_default(mri_data_dir: Path): diff --git a/test/test_utils.py b/test/test_utils.py index 81b5059..c5c54fc 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -5,17 +5,17 @@ # Copyright (C) 2026 Simula Research Laboratory -from unittest.mock import patch from pathlib import Path +from unittest.mock import patch import numpy as np from mritk.utils import ( - voxel_fit_function, - nan_filter_gaussian, - estimate_se_free_relaxation_time, T1_lookup_table, + estimate_se_free_relaxation_time, + nan_filter_gaussian, run_dcm2niix, + voxel_fit_function, ) From 587e3790aceb66e96bb94899ab4b913c08faa0f3 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 12:17:48 +0100 Subject: [PATCH 27/29] More docs --- docs/hybrid.md | 15 ++++++++++++++- 1 file changed, 14 insertions(+), 1 deletion(-) diff --git a/docs/hybrid.md b/docs/hybrid.md index add1bd5..3b24d0d 100644 --- a/docs/hybrid.md +++ b/docs/hybrid.md @@ -40,5 +40,18 @@ graph LR ## Example Command ```shell -mritk hybrid -i path/to/ll_t1.nii.gz -m path/to/mixed_t1.nii.gz -c path/to/csf_mask.nii.gz -o path/to/hybrid_t1.nii.gz --threshold 1500.0 +mritk hybrid -l path/to/ll_t1.nii.gz -m path/to/mixed_t1.nii.gz -c path/to/csf_mask.nii.gz -o path/to/hybrid_t1.nii.gz --threshold 1500.0 +``` + + +Gonzo: + +```shell +mritk hybrid \ + -l gonzo/mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-02_acq-looklocker_T1map_registered.nii.gz \ + -m gonzo/mri-processed/mri_processed_data/sub-01/registered/sub-01_ses-02_acq-mixed_T1map_registered.nii.gz \ + -c gonzo/mri-processed/mri_processed_data/sub-01/segmentations/sub-01_seg-csf_binary.nii.gz \ + -o sub-01_ses-02_T1map_hybrid.nii.gz \ + --threshold 1500 \ + --erode 1 ``` From 777453576091ba68dd192e28ecaf900b32c8c5d0 Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 12:23:29 +0100 Subject: [PATCH 28/29] Refactor stats --- docs/api.rst | 14 +- docs/r1.md | 12 + src/mritk/__init__.py | 14 +- src/mritk/cli.py | 6 +- src/mritk/statistics/__init__.py | 7 - src/mritk/statistics/cli.py | 185 ----------- src/mritk/statistics/compute_stats.py | 211 ------------- src/mritk/statistics/utils.py | 47 --- src/mritk/stats.py | 424 ++++++++++++++++++++++++++ test/test_mri_stats.py | 2 +- 10 files changed, 453 insertions(+), 469 deletions(-) delete mode 100644 src/mritk/statistics/__init__.py delete mode 100644 src/mritk/statistics/cli.py delete mode 100644 src/mritk/statistics/compute_stats.py delete mode 100644 src/mritk/statistics/utils.py create mode 100644 src/mritk/stats.py diff --git a/docs/api.rst b/docs/api.rst index 49f5e85..deb1dee 100644 --- a/docs/api.rst +++ b/docs/api.rst @@ -79,19 +79,7 @@ hybrid statistics ---------- -.. automodule:: mritk.statistics - :members: - :inherited-members: - -.. automodule:: mritk.statistics.utils - :members: - :inherited-members: - -.. automodule:: mritk.statistics.compute_stats - :members: - :inherited-members: - -.. automodule:: mritk.statistics.cli +.. automodule:: mritk.stats :members: :inherited-members: diff --git a/docs/r1.md b/docs/r1.md index cccd4c5..04f689b 100644 --- a/docs/r1.md +++ b/docs/r1.md @@ -1,3 +1,15 @@ +--- +jupytext: + formats: md:myst + text_representation: + extension: .md + format_name: myst +kernelspec: + display_name: Python 3 + language: python + name: python3 +--- + # $R_1$ Relaxation Rates The $R_1$ module provides utilities to convert longitudinal relaxation times ($T_1$) into relaxation rates ($R_1$). diff --git a/src/mritk/__init__.py b/src/mritk/__init__.py index b8e858f..0e93ba9 100644 --- a/src/mritk/__init__.py +++ b/src/mritk/__init__.py @@ -5,7 +5,17 @@ from importlib.metadata import metadata -from . import concentration, data, hybrid, looklocker, mixed, r1, segmentation, statistics, utils +from . import ( + concentration, + data, + hybrid, + looklocker, + mixed, + r1, + segmentation, + stats, + utils, +) meta = metadata("mritk") __version__ = meta["Version"] @@ -18,11 +28,11 @@ __all__ = [ "data", "segmentation", - "statistics", "concentration", "utils", "looklocker", "mixed", "hybrid", "r1", + "stats", ] diff --git a/src/mritk/cli.py b/src/mritk/cli.py index 382fa9d..23f28ac 100644 --- a/src/mritk/cli.py +++ b/src/mritk/cli.py @@ -9,7 +9,7 @@ from rich.logging import RichHandler from rich_argparse import RichHelpFormatter -from . import concentration, datasets, hybrid, info, looklocker, mixed, napari, r1, show, statistics +from . import concentration, datasets, hybrid, info, looklocker, mixed, napari, r1, show, stats def version_info(): @@ -67,7 +67,7 @@ def setup_parser(): info_parser.add_argument("--json", action="store_true", help="Output information in JSON format") stats_parser = subparsers.add_parser("stats", help="Compute MRI statistics", formatter_class=parser.formatter_class) - statistics.cli.add_arguments(stats_parser) + stats.add_arguments(stats_parser) show_parser = subparsers.add_parser("show", help="Show MRI data in a terminal", formatter_class=parser.formatter_class) show.add_arguments(show_parser) @@ -132,7 +132,7 @@ def dispatch(parser: argparse.ArgumentParser, argv: Optional[Sequence[str]] = No file = args.pop("file") info.nifty_info(file, json_output=args.pop("json")) elif command == "stats": - statistics.cli.dispatch(args) + stats.dispatch(args) elif command == "show": show.dispatch(args) elif command == "napari": diff --git a/src/mritk/statistics/__init__.py b/src/mritk/statistics/__init__.py deleted file mode 100644 index 667d733..0000000 --- a/src/mritk/statistics/__init__.py +++ /dev/null @@ -1,7 +0,0 @@ -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - -from . import cli, compute_stats, utils - -__all__ = ["utils", "compute_stats", "cli"] diff --git a/src/mritk/statistics/cli.py b/src/mritk/statistics/cli.py deleted file mode 100644 index cdf26d1..0000000 --- a/src/mritk/statistics/cli.py +++ /dev/null @@ -1,185 +0,0 @@ -import argparse -import typing -from pathlib import Path - -import pandas as pd - -from ..segmentation.groups import default_segmentation_groups -from .compute_stats import generate_stats_dataframe - - -def compute_mri_stats( - segmentation: Path, - mri: list[Path], - output: Path, - timetable: Path | None = None, - timelabel: str | None = None, - seg_regex: str | None = None, - mri_regex: str | None = None, - lut: Path | None = None, - info: str | None = None, - **kwargs, -): - import json - import sys - - from rich.console import Console - from rich.panel import Panel - - # Setup Rich - console = Console() - - # Parse info dict from JSON string if provided - info_dict = None - if info: - try: - info_dict = json.loads(info) - except json.JSONDecodeError: - console.print("[bold red]Error:[/bold red] --info must be a valid JSON string.") - sys.exit(1) - - if not segmentation.exists(): - console.print(f"[bold red]Error:[/bold red] Missing segmentation file: {segmentation}") - sys.exit(1) - - # Validate all MRI paths before starting - for path in mri: - if not path.exists(): - console.print(f"[bold red]Error:[/bold red] Missing MRI file: {path}") - sys.exit(1) - - dataframes = [] - - # Loop through MRI paths - console.print("[bold green]Processing MRIs...[/bold green]") - for i, path in enumerate(mri): - # console.print(f"[blue]Processing MRI {i + 1}/{len(mri)}:[/blue] {path.name}") - - try: - # Call the logic function - df = generate_stats_dataframe( - seg_path=segmentation, - mri_path=path, - timestamp_path=timetable, - timestamp_sequence=timelabel, - seg_pattern=seg_regex, - mri_data_pattern=mri_regex, - lut_path=lut, - info_dict=info_dict, - ) - dataframes.append(df) - except Exception as e: - console.print(f"[bold red]Failed to process {path.name}:[/bold red] {e}") - sys.exit(1) - - if dataframes: - final_df = pd.concat(dataframes) - final_df.to_csv(output, sep=";", index=False) - console.print( - Panel( - f"Stats successfully saved to:\n[bold green]{output}[/bold green]", - title="Success", - expand=False, - ) - ) - else: - console.print("[yellow]No dataframes generated.[/yellow]") - - -def get_stats_value(stats_file: Path, region: str, info: str, **kwargs): - import sys - - from rich.console import Console - - # Setup Rich - console = Console() - - # Validate inputs - valid_regions = default_segmentation_groups().keys() - if region not in valid_regions: - console.print(f"[bold red]Error:[/bold red] Region '{region}' not found in default segmentation groups.") - sys.exit(1) - - valid_infos = [ - "sum", - "mean", - "median", - "std", - "min", - "max", - "PC1", - "PC5", - "PC25", - "PC75", - "PC90", - "PC95", - "PC99", - ] - if info not in valid_infos: - console.print(f"[bold red]Error:[/bold red] Info '{info}' is invalid. Choose from: {', '.join(valid_infos)}") - sys.exit(1) - - if not stats_file.exists(): - console.print(f"[bold red]Error:[/bold red] Stats file not found: {stats_file}") - sys.exit(1) - - # Process - try: - df = pd.read_csv(stats_file, sep=";") - region_row = df.loc[df["description"] == region] - - if region_row.empty: - console.print(f"[red]Region '{region}' not found in the stats file.[/red]") - sys.exit(1) - - info_value = region_row[info].values[0] - - # Output - console.print( - f"[bold cyan]{info}[/bold cyan] for [bold green]{region}[/bold green] = [bold white]{info_value}[/bold white]" - ) - return info_value - - except Exception as e: - console.print(f"[bold red]Error reading stats file:[/bold red] {e}") - sys.exit(1) - - -def add_arguments(parser: argparse.ArgumentParser): - subparsers = parser.add_subparsers(dest="stats-command", help="Available commands") - - # --- Compute Command --- - parser_compute = subparsers.add_parser("compute", help="Compute MRI statistics", formatter_class=parser.formatter_class) - parser_compute.add_argument("--segmentation", "-s", type=Path, required=True, help="Path to segmentation file") - parser_compute.add_argument("--mri", "-m", type=Path, nargs="+", required=True, help="Path to MRI data file(s)") - parser_compute.add_argument("--output", "-o", type=Path, required=True, help="Output CSV file path") - parser_compute.add_argument("--timetable", "-t", type=Path, help="Path to timetable file") - parser_compute.add_argument("--timelabel", "-l", dest="timelabel", type=str, help="Time label sequence") - parser_compute.add_argument( - "--seg_regex", - "-sr", - dest="seg_regex", - type=str, - help="Regex pattern for segmentation filename", - ) - parser_compute.add_argument("--mri_regex", "-mr", dest="mri_regex", type=str, help="Regex pattern for MRI filename") - parser_compute.add_argument("--lut", "-lt", dest="lut", type=Path, help="Path to Lookup Table") - parser_compute.add_argument("--info", "-i", type=str, help="Info dictionary as JSON string") - parser_compute.set_defaults(func=compute_mri_stats) - - # --- Get Command --- - parser_get = subparsers.add_parser("get", help="Get specific stats value", formatter_class=parser.formatter_class) - parser_get.add_argument("--stats_file", "-f", type=Path, required=True, help="Path to stats CSV file") - parser_get.add_argument("--region", "-r", type=str, required=True, help="Region description") - parser_get.add_argument("--info", "-i", type=str, required=True, help="Statistic to retrieve (mean, std, etc.)") - parser_get.set_defaults(func=get_stats_value) - - -def dispatch(args: dict[str, typing.Any]): - command = args.pop("stats-command") - if command == "compute": - compute_mri_stats(**args) - elif command == "get": - get_stats_value(**args) - else: - raise ValueError(f"Unknown command: {command}") diff --git a/src/mritk/statistics/compute_stats.py b/src/mritk/statistics/compute_stats.py deleted file mode 100644 index 673496d..0000000 --- a/src/mritk/statistics/compute_stats.py +++ /dev/null @@ -1,211 +0,0 @@ -# MRI Statistics Module - -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - -import re -from pathlib import Path -from typing import Optional - -import numpy as np -import pandas as pd -import tqdm.rich - -from ..data import MRIData -from ..segmentation.groups import default_segmentation_groups -from ..segmentation.lookup_table import read_lut -from ..testing import assert_same_space -from .utils import find_timestamp, prepend_info, voxel_count_to_ml_scale - - -def extract_metadata( - file_path: Path, - pattern: str | None = None, - info_dict: dict[str, str] | None = None, - required_keys: list[str] | None = None, -) -> dict: - """ - Extracts metadata from a filename using a regex pattern, falling back to a dictionary. - - Args: - file_path (Path): The path to the file. - pattern (str, optional): Regex pattern with named capture groups. - info_dict (dict, optional): Fallback dictionary if pattern is not provided. - required_keys (list[str], optional): Keys to initialize with None if neither match. - - Returns: - dict: A dictionary of the extracted metadata. - - Raises: - RuntimeError: If a pattern is provided but the filename does not match. - """ - if pattern is not None: - if (m := re.match(rf"{pattern}", file_path.name)) is not None: - return m.groupdict() - else: - raise RuntimeError(f"Filename {file_path.name} does not match the provided pattern.") - - required_keys = required_keys or [] - if info_dict is not None: - return {k: info_dict.get(k) for k in required_keys} - - return {k: None for k in required_keys} - - -def get_regions_dictionary(seg_data: np.ndarray, lut_path: Optional[Path] = None) -> dict[str, list[int]]: - """ - Builds a dictionary mapping region descriptions to their corresponding segmentation labels. - - Args: - seg_data (np.ndarray): The segmentation array. - lut_path (Path, optional): Path to the FreeSurfer Color Look-Up Table. - - Returns: - dict[str, list[int]]: Mapping of region names to a list of label integers. - """ - lut = read_lut(lut_path) - seg_labels = np.unique(seg_data[seg_data != 0]) - - lut_regions = lut.loc[lut.label.isin(seg_labels), ["label", "description"]].to_dict("records") - - regions = { - **{d["description"]: sorted([d["label"]]) for d in lut_regions}, - **default_segmentation_groups(), - } - return regions - - -def compute_region_statistics( - region_data: np.ndarray, - labels: list[int], - description: str, - volscale: float, - voxelcount: int, -) -> dict: - """ - Computes statistical metrics (mean, std, percentiles, etc.) for a specific masked region. - - Args: - region_data (np.ndarray): The raw MRI data values mapped to this region (includes NaNs). - labels (list[int]): The segmentation label indices representing this region. - description (str): Human-readable name of the region. - volscale (float): Multiplier to convert voxel counts to milliliters. - voxelcount (int): Total number of voxels in the region. - - Returns: - dict: A dictionary containing the computed statistics. - """ - record = { - "label": ",".join([str(x) for x in labels]), - "description": description, - "voxelcount": voxelcount, - "volume_ml": volscale * voxelcount, - } - - if voxelcount == 0: - return record - - num_nan = int((~np.isfinite(region_data)).sum()) - record["num_nan_values"] = num_nan - - if num_nan == voxelcount: - return record - - # Filter out NaNs for the mathematical stats - valid_data = region_data[np.isfinite(region_data)] - - stats = { - "sum": float(np.sum(valid_data)), - "mean": float(np.mean(valid_data)), - "median": float(np.median(valid_data)), - "std": float(np.std(valid_data)), - "min": float(np.min(valid_data)), - **{f"PC{pc}": float(np.quantile(valid_data, pc / 100)) for pc in [1, 5, 25, 75, 90, 95, 99]}, - "max": float(np.max(valid_data)), - } - - return {**record, **stats} - - -def generate_stats_dataframe( - seg_path: Path, - mri_path: Path, - timestamp_path: str | Path | None = None, - timestamp_sequence: str | Path | None = None, - seg_pattern: str | None = None, - mri_data_pattern: str | None = None, - lut_path: Path | None = None, - info_dict: dict | None = None, -) -> pd.DataFrame: - """ - Generates a Pandas DataFrame containing descriptive statistics of MRI data grouped by segmentation regions. - - Args: - seg_path (Path): Path to the segmentation NIfTI file. - mri_path (Path): Path to the underlying MRI data NIfTI file. - timestamp_path (str | Path, optional): Path to the timetable TSV file. - timestamp_sequence (str | Path, optional): Sequence label to query in the timetable. - seg_pattern (str, optional): Regex to extract metadata from the seg_path filename. - mri_data_pattern (str, optional): Regex to extract metadata from the mri_path filename. - lut_path (Path, optional): Path to the look-up table. - info_dict (dict, optional): Fallback dictionary for metadata. - - Returns: - pd.DataFrame: A formatted DataFrame with statistics for all identified regions. - """ - # Load and validate the data - mri = MRIData.from_file(mri_path, dtype=np.single) - seg = MRIData.from_file(seg_path, dtype=np.int16) - assert_same_space(seg, mri) - - # Resolve metadata - seg_info = extract_metadata(seg_path, seg_pattern, info_dict, ["segmentation", "subject"]) - mri_info = extract_metadata(mri_path, mri_data_pattern, info_dict, ["mri_data", "subject", "session"]) - info = seg_info | mri_info - - # Resolve timestamps - info["timestamp"] = None - if timestamp_path is not None: - try: - info["timestamp"] = find_timestamp( - Path(str(timestamp_path)), - str(timestamp_sequence), - str(info.get("subject")), - str(info.get("session")), - ) - except (ValueError, RuntimeError, KeyError): - pass - - regions = get_regions_dictionary(seg.data, lut_path) - volscale = voxel_count_to_ml_scale(seg.affine) - records = [] - - # Iterate over regions and compute stats - for description, labels in tqdm.rich.tqdm(regions.items(), total=len(regions)): - region_mask = np.isin(seg.data, labels) - voxelcount = region_mask.sum() - - # Extract raw data for this region (including NaNs) - region_data = mri.data[region_mask] - - record = compute_region_statistics( - region_data=region_data, - labels=labels, - description=description, - volscale=volscale, - voxelcount=voxelcount, - ) - records.append(record) - - # Format output - dframe = pd.DataFrame.from_records(records) - dframe = prepend_info( - dframe, - segmentation=info.get("segmentation"), - mri_data=info.get("mri_data"), - subject=info.get("subject"), - session=info.get("session"), - timestamp=info.get("timestamp"), - ) - return dframe diff --git a/src/mritk/statistics/utils.py b/src/mritk/statistics/utils.py deleted file mode 100644 index 2066c14..0000000 --- a/src/mritk/statistics/utils.py +++ /dev/null @@ -1,47 +0,0 @@ -# MRI Statistics - Utils - -# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) -# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) -# Copyright (C) 2026 Simula Research Laboratory - - -from pathlib import Path - -import numpy as np -import pandas as pd - - -def voxel_count_to_ml_scale(affine: np.ndarray): - return 1e-3 * np.linalg.det(affine[:3, :3]) - - -def find_timestamp( - timetable_path: Path, - timestamp_sequence: str, - subject: str, - session: str, -) -> float: - """Find single session timestamp""" - try: - timetable = pd.read_csv(timetable_path, sep="\t") - except pd.errors.EmptyDataError: - raise RuntimeError(f"Timetable-file {timetable_path} is empty.") - try: - timestamp = timetable.loc[ - (timetable["sequence_label"].str.lower() == timestamp_sequence) - & (timetable["subject"] == subject) - & (timetable["session"] == session) - ]["acquisition_relative_injection"] - except ValueError as e: - print(timetable) - print(timestamp_sequence, subject) - raise e - return timestamp.item() - - -def prepend_info(df, **kwargs): - nargs = len(kwargs) - for key, val in kwargs.items(): - assert key not in df.columns, f"Column {key} already exist in df." - df[key] = val - return df[[*df.columns[-nargs:], *df.columns[:-nargs]]] diff --git a/src/mritk/stats.py b/src/mritk/stats.py new file mode 100644 index 0000000..4c25ec1 --- /dev/null +++ b/src/mritk/stats.py @@ -0,0 +1,424 @@ +# MRI Statistics Module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import argparse +import re +import typing +from pathlib import Path + +import numpy as np +import pandas as pd +import tqdm.rich + +from .data import MRIData +from .segmentation.groups import default_segmentation_groups +from .segmentation.lookup_table import read_lut +from .testing import assert_same_space + + +def voxel_count_to_ml_scale(affine: np.ndarray): + return 1e-3 * np.linalg.det(affine[:3, :3]) + + +def find_timestamp( + timetable_path: Path, + timestamp_sequence: str, + subject: str, + session: str, +) -> float: + """Find single session timestamp""" + try: + timetable = pd.read_csv(timetable_path, sep="\t") + except pd.errors.EmptyDataError: + raise RuntimeError(f"Timetable-file {timetable_path} is empty.") + try: + timestamp = timetable.loc[ + (timetable["sequence_label"].str.lower() == timestamp_sequence) + & (timetable["subject"] == subject) + & (timetable["session"] == session) + ]["acquisition_relative_injection"] + except ValueError as e: + print(timetable) + print(timestamp_sequence, subject) + raise e + return timestamp.item() + + +def prepend_info(df, **kwargs): + nargs = len(kwargs) + for key, val in kwargs.items(): + assert key not in df.columns, f"Column {key} already exist in df." + df[key] = val + return df[[*df.columns[-nargs:], *df.columns[:-nargs]]] + + +def extract_metadata( + file_path: Path, + pattern: str | None = None, + info_dict: dict[str, str] | None = None, + required_keys: list[str] | None = None, +) -> dict: + """ + Extracts metadata from a filename using a regex pattern, falling back to a dictionary. + + Args: + file_path (Path): The path to the file. + pattern (str, optional): Regex pattern with named capture groups. + info_dict (dict, optional): Fallback dictionary if pattern is not provided. + required_keys (list[str], optional): Keys to initialize with None if neither match. + + Returns: + dict: A dictionary of the extracted metadata. + + Raises: + RuntimeError: If a pattern is provided but the filename does not match. + """ + if pattern is not None: + if (m := re.match(rf"{pattern}", file_path.name)) is not None: + return m.groupdict() + else: + raise RuntimeError(f"Filename {file_path.name} does not match the provided pattern.") + + required_keys = required_keys or [] + if info_dict is not None: + return {k: info_dict.get(k) for k in required_keys} + + return {k: None for k in required_keys} + + +def get_regions_dictionary(seg_data: np.ndarray, lut_path: Path | None = None) -> dict[str, list[int]]: + """ + Builds a dictionary mapping region descriptions to their corresponding segmentation labels. + + Args: + seg_data (np.ndarray): The segmentation array. + lut_path (Path, optional): Path to the FreeSurfer Color Look-Up Table. + + Returns: + dict[str, list[int]]: Mapping of region names to a list of label integers. + """ + lut = read_lut(lut_path) + seg_labels = np.unique(seg_data[seg_data != 0]) + + lut_regions = lut.loc[lut.label.isin(seg_labels), ["label", "description"]].to_dict("records") + + regions = { + **{d["description"]: sorted([d["label"]]) for d in lut_regions}, + **default_segmentation_groups(), + } + return regions + + +def compute_region_statistics( + region_data: np.ndarray, + labels: list[int], + description: str, + volscale: float, + voxelcount: int, +) -> dict: + """ + Computes statistical metrics (mean, std, percentiles, etc.) for a specific masked region. + + Args: + region_data (np.ndarray): The raw MRI data values mapped to this region (includes NaNs). + labels (list[int]): The segmentation label indices representing this region. + description (str): Human-readable name of the region. + volscale (float): Multiplier to convert voxel counts to milliliters. + voxelcount (int): Total number of voxels in the region. + + Returns: + dict: A dictionary containing the computed statistics. + """ + record = { + "label": ",".join([str(x) for x in labels]), + "description": description, + "voxelcount": voxelcount, + "volume_ml": volscale * voxelcount, + } + + if voxelcount == 0: + return record + + num_nan = int((~np.isfinite(region_data)).sum()) + record["num_nan_values"] = num_nan + + if num_nan == voxelcount: + return record + + # Filter out NaNs for the mathematical stats + valid_data = region_data[np.isfinite(region_data)] + + stats = { + "sum": float(np.sum(valid_data)), + "mean": float(np.mean(valid_data)), + "median": float(np.median(valid_data)), + "std": float(np.std(valid_data)), + "min": float(np.min(valid_data)), + **{f"PC{pc}": float(np.quantile(valid_data, pc / 100)) for pc in [1, 5, 25, 75, 90, 95, 99]}, + "max": float(np.max(valid_data)), + } + + return {**record, **stats} + + +def generate_stats_dataframe( + seg_path: Path, + mri_path: Path, + timestamp_path: str | Path | None = None, + timestamp_sequence: str | Path | None = None, + seg_pattern: str | None = None, + mri_data_pattern: str | None = None, + lut_path: Path | None = None, + info_dict: dict | None = None, +) -> pd.DataFrame: + """ + Generates a Pandas DataFrame containing descriptive statistics of MRI data grouped by segmentation regions. + + Args: + seg_path (Path): Path to the segmentation NIfTI file. + mri_path (Path): Path to the underlying MRI data NIfTI file. + timestamp_path (str | Path, optional): Path to the timetable TSV file. + timestamp_sequence (str | Path, optional): Sequence label to query in the timetable. + seg_pattern (str, optional): Regex to extract metadata from the seg_path filename. + mri_data_pattern (str, optional): Regex to extract metadata from the mri_path filename. + lut_path (Path, optional): Path to the look-up table. + info_dict (dict, optional): Fallback dictionary for metadata. + + Returns: + pd.DataFrame: A formatted DataFrame with statistics for all identified regions. + """ + # Load and validate the data + mri = MRIData.from_file(mri_path, dtype=np.single) + seg = MRIData.from_file(seg_path, dtype=np.int16) + assert_same_space(seg, mri) + + # Resolve metadata + seg_info = extract_metadata(seg_path, seg_pattern, info_dict, ["segmentation", "subject"]) + mri_info = extract_metadata(mri_path, mri_data_pattern, info_dict, ["mri_data", "subject", "session"]) + info = seg_info | mri_info + + # Resolve timestamps + info["timestamp"] = None + if timestamp_path is not None: + try: + info["timestamp"] = find_timestamp( + Path(str(timestamp_path)), + str(timestamp_sequence), + str(info.get("subject")), + str(info.get("session")), + ) + except (ValueError, RuntimeError, KeyError): + pass + + regions = get_regions_dictionary(seg.data, lut_path) + volscale = voxel_count_to_ml_scale(seg.affine) + records = [] + + # Iterate over regions and compute stats + for description, labels in tqdm.rich.tqdm(regions.items(), total=len(regions)): + region_mask = np.isin(seg.data, labels) + voxelcount = region_mask.sum() + + # Extract raw data for this region (including NaNs) + region_data = mri.data[region_mask] + + record = compute_region_statistics( + region_data=region_data, + labels=labels, + description=description, + volscale=volscale, + voxelcount=voxelcount, + ) + records.append(record) + + # Format output + dframe = pd.DataFrame.from_records(records) + dframe = prepend_info( + dframe, + segmentation=info.get("segmentation"), + mri_data=info.get("mri_data"), + subject=info.get("subject"), + session=info.get("session"), + timestamp=info.get("timestamp"), + ) + return dframe + + +def compute_mri_stats( + segmentation: Path, + mri: list[Path], + output: Path, + timetable: Path | None = None, + timelabel: str | None = None, + seg_regex: str | None = None, + mri_regex: str | None = None, + lut: Path | None = None, + info: str | None = None, + **kwargs, +): + import json + import sys + + from rich.console import Console + from rich.panel import Panel + + # Setup Rich + console = Console() + + # Parse info dict from JSON string if provided + info_dict = None + if info: + try: + info_dict = json.loads(info) + except json.JSONDecodeError: + console.print("[bold red]Error:[/bold red] --info must be a valid JSON string.") + sys.exit(1) + + if not segmentation.exists(): + console.print(f"[bold red]Error:[/bold red] Missing segmentation file: {segmentation}") + sys.exit(1) + + # Validate all MRI paths before starting + for path in mri: + if not path.exists(): + console.print(f"[bold red]Error:[/bold red] Missing MRI file: {path}") + sys.exit(1) + + dataframes = [] + + # Loop through MRI paths + console.print("[bold green]Processing MRIs...[/bold green]") + for i, path in enumerate(mri): + # console.print(f"[blue]Processing MRI {i + 1}/{len(mri)}:[/blue] {path.name}") + + try: + # Call the logic function + df = generate_stats_dataframe( + seg_path=segmentation, + mri_path=path, + timestamp_path=timetable, + timestamp_sequence=timelabel, + seg_pattern=seg_regex, + mri_data_pattern=mri_regex, + lut_path=lut, + info_dict=info_dict, + ) + dataframes.append(df) + except Exception as e: + console.print(f"[bold red]Failed to process {path.name}:[/bold red] {e}") + sys.exit(1) + + if dataframes: + final_df = pd.concat(dataframes) + final_df.to_csv(output, sep=";", index=False) + console.print( + Panel( + f"Stats successfully saved to:\n[bold green]{output}[/bold green]", + title="Success", + expand=False, + ) + ) + else: + console.print("[yellow]No dataframes generated.[/yellow]") + + +def get_stats_value(stats_file: Path, region: str, info: str, **kwargs): + import sys + + from rich.console import Console + + # Setup Rich + console = Console() + + # Validate inputs + valid_regions = default_segmentation_groups().keys() + if region not in valid_regions: + console.print(f"[bold red]Error:[/bold red] Region '{region}' not found in default segmentation groups.") + sys.exit(1) + + valid_infos = [ + "sum", + "mean", + "median", + "std", + "min", + "max", + "PC1", + "PC5", + "PC25", + "PC75", + "PC90", + "PC95", + "PC99", + ] + if info not in valid_infos: + console.print(f"[bold red]Error:[/bold red] Info '{info}' is invalid. Choose from: {', '.join(valid_infos)}") + sys.exit(1) + + if not stats_file.exists(): + console.print(f"[bold red]Error:[/bold red] Stats file not found: {stats_file}") + sys.exit(1) + + # Process + try: + df = pd.read_csv(stats_file, sep=";") + region_row = df.loc[df["description"] == region] + + if region_row.empty: + console.print(f"[red]Region '{region}' not found in the stats file.[/red]") + sys.exit(1) + + info_value = region_row[info].values[0] + + # Output + console.print( + f"[bold cyan]{info}[/bold cyan] for [bold green]{region}[/bold green] = [bold white]{info_value}[/bold white]" + ) + return info_value + + except Exception as e: + console.print(f"[bold red]Error reading stats file:[/bold red] {e}") + sys.exit(1) + + +def add_arguments(parser: argparse.ArgumentParser): + subparsers = parser.add_subparsers(dest="stats-command", help="Available commands") + + # --- Compute Command --- + parser_compute = subparsers.add_parser("compute", help="Compute MRI statistics", formatter_class=parser.formatter_class) + parser_compute.add_argument("--segmentation", "-s", type=Path, required=True, help="Path to segmentation file") + parser_compute.add_argument("--mri", "-m", type=Path, nargs="+", required=True, help="Path to MRI data file(s)") + parser_compute.add_argument("--output", "-o", type=Path, required=True, help="Output CSV file path") + parser_compute.add_argument("--timetable", "-t", type=Path, help="Path to timetable file") + parser_compute.add_argument("--timelabel", "-l", dest="timelabel", type=str, help="Time label sequence") + parser_compute.add_argument( + "--seg_regex", + "-sr", + dest="seg_regex", + type=str, + help="Regex pattern for segmentation filename", + ) + parser_compute.add_argument("--mri_regex", "-mr", dest="mri_regex", type=str, help="Regex pattern for MRI filename") + parser_compute.add_argument("--lut", "-lt", dest="lut", type=Path, help="Path to Lookup Table") + parser_compute.add_argument("--info", "-i", type=str, help="Info dictionary as JSON string") + parser_compute.set_defaults(func=compute_mri_stats) + + # --- Get Command --- + parser_get = subparsers.add_parser("get", help="Get specific stats value", formatter_class=parser.formatter_class) + parser_get.add_argument("--stats_file", "-f", type=Path, required=True, help="Path to stats CSV file") + parser_get.add_argument("--region", "-r", type=str, required=True, help="Region description") + parser_get.add_argument("--info", "-i", type=str, required=True, help="Statistic to retrieve (mean, std, etc.)") + parser_get.set_defaults(func=get_stats_value) + + +def dispatch(args: dict[str, typing.Any]): + command = args.pop("stats-command") + if command == "compute": + compute_mri_stats(**args) + elif command == "get": + get_stats_value(**args) + else: + raise ValueError(f"Unknown command: {command}") diff --git a/test/test_mri_stats.py b/test/test_mri_stats.py index 722183d..5e14050 100644 --- a/test/test_mri_stats.py +++ b/test/test_mri_stats.py @@ -4,7 +4,7 @@ import pytest import mritk.cli as cli -from mritk.statistics.compute_stats import compute_region_statistics, extract_metadata, generate_stats_dataframe +from mritk.stats import compute_region_statistics, extract_metadata, generate_stats_dataframe def test_compute_stats_default(mri_data_dir: Path): From e5d57dc078ee2690156d0b31ddefc32d3bdcdc5b Mon Sep 17 00:00:00 2001 From: Henrik Finsberg Date: Mon, 9 Mar 2026 12:56:06 +0100 Subject: [PATCH 29/29] Refactor segmentation --- src/mritk/segmentation.py | 235 +++++++++++++++++++++++++ src/mritk/segmentation/__init__.py | 8 - src/mritk/segmentation/groups.py | 66 ------- src/mritk/segmentation/lookup_table.py | 52 ------ src/mritk/stats.py | 3 +- test/test_segmentation.py | 146 +++++++++++++++ 6 files changed, 382 insertions(+), 128 deletions(-) create mode 100644 src/mritk/segmentation.py delete mode 100644 src/mritk/segmentation/__init__.py delete mode 100644 src/mritk/segmentation/groups.py delete mode 100644 src/mritk/segmentation/lookup_table.py create mode 100644 test/test_segmentation.py diff --git a/src/mritk/segmentation.py b/src/mritk/segmentation.py new file mode 100644 index 0000000..03efb17 --- /dev/null +++ b/src/mritk/segmentation.py @@ -0,0 +1,235 @@ +# MRI Segmentation - Lookup Table (LUT) Module + +# Copyright (C) 2026 Jørgen Riseth (jnriseth@gmail.com) +# Copyright (C) 2026 Cécile Daversin-Catty (cecile@simula.no) +# Copyright (C) 2026 Simula Research Laboratory + +import logging +import os +import re +from pathlib import Path +from urllib.request import urlretrieve + +import pandas as pd + +logger = logging.getLogger(__name__) + + +# Regex to match a standard FreeSurfer Color LUT record line +LUT_REGEX = re.compile(r"^(?P