diff --git a/src/murfey/server/api/bootstrap.py b/src/murfey/server/api/bootstrap.py index 067e59b0a..5a9661b34 100644 --- a/src/murfey/server/api/bootstrap.py +++ b/src/murfey/server/api/bootstrap.py @@ -539,7 +539,7 @@ def get_msys2_package_file( # Validate environment if any(system in env[0] and environment in env[1] for env in valid_envs) is False: - raise ValueError(f"{system!r}/{environment!r} is not a valid msys2 environment") + raise ValueError(f"'{system}/{environment}' is not a valid msys2 environment") # Validate package name # MSYS2 package names contain: @@ -593,7 +593,7 @@ def _get_full_pypi_path_response(package: str) -> requests.Response: else: raise HTTPException(status_code=response.status_code) else: - raise ValueError(f"{package} is not a valid package name") + raise ValueError(f"{package!r} is not a valid package name") @pypi.get("/", response_class=Response) diff --git a/src/murfey/server/api/clem.py b/src/murfey/server/api/clem.py index 606e18496..b4e64327d 100644 --- a/src/murfey/server/api/clem.py +++ b/src/murfey/server/api/clem.py @@ -16,6 +16,7 @@ from murfey.server import _transport_object from murfey.server.murfey_db import murfey_db +from murfey.util import sanitise from murfey.util.config import get_machine_config from murfey.util.db import ( CLEMImageMetadata, @@ -77,7 +78,7 @@ def validate_and_sanitise( machine_config = get_machine_config(instrument_name=instrument_name)[ instrument_name ] - base_path = machine_config.rsync_basepath.as_posix() + rsync_basepath = machine_config.rsync_basepath.resolve() # Check that full file path doesn't contain unallowed characters # Currently allows only: @@ -90,13 +91,9 @@ def validate_and_sanitise( raise ValueError(f"Unallowed characters present in {file}") # Check that it's not accessing somehwere it's not allowed - if not str(full_path).startswith(str(base_path)): + if not str(full_path).startswith(str(rsync_basepath)): raise ValueError(f"{file} points to a directory that is not permitted") - # Check that it's a file, not a directory - if full_path.is_file() is False: - raise ValueError(f"{file} is not a file") - # Check that it is of a permitted file type if f"{full_path.suffix}" not in valid_file_types: raise ValueError(f"{full_path.suffix} is not a permitted file format") @@ -184,7 +181,7 @@ def get_db_entry( ) db.add(db_entry) db.commit() - db.refresh(db_entry) + except Exception: raise Exception @@ -215,7 +212,11 @@ def register_lif_file( file_path=lif_file, ) except Exception: - logger.error(traceback.format_exc()) + logger.error( + "Exception encountered while registering " + f"LIF file {sanitise(str(lif_file))!r}: \n" + f"{traceback.format_exc()}" + ) return False # Add metadata information if provided @@ -224,7 +225,11 @@ def register_lif_file( master_metadata = validate_and_sanitise(master_metadata, session_id, db) clem_lif_file.master_metadata = str(master_metadata) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to add master metadata information to database entry for " + f"LIF file {sanitise(str(lif_file))!r}: \n" + f"{traceback.format_exc()}" + ) # Register child metadata if provided for metadata in child_metadata: @@ -238,7 +243,12 @@ def register_lif_file( # Append to database entry clem_lif_file.child_metadata.append(metadata_db_entry) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"metadata file {sanitise(str(metadata))!r} in association with " + f"LIF file {sanitise(str(lif_file))!r}: \n" + f"{traceback.format_exc()}" + ) continue # Register child image series if provided @@ -253,7 +263,12 @@ def register_lif_file( # Append to database entry clem_lif_file.child_series.append(series_db_entry) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"metadata file {sanitise(series)!r} in association with " + f"LIF file {sanitise(str(lif_file))!r}: \n" + f"{traceback.format_exc()}" + ) continue # Register child image stacks if provided @@ -268,7 +283,12 @@ def register_lif_file( # Append to database entry clem_lif_file.child_stacks.append(stack_db_entry) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"image stack {sanitise(str(stack))!r} in association with " + f"LIF file {sanitise(str(lif_file))!r}: \n" + f"{traceback.format_exc()}" + ) continue # Commit to database @@ -296,7 +316,11 @@ def register_tiff_file( file_path=tiff_file, ) except Exception: - logger.error(traceback.format_exc()) + logger.error( + "Exception encountered while registering " + f"TIFF file {sanitise(str(tiff_file))!r}: \n" + f"{traceback.format_exc()}" + ) return False # Add metadata if provided @@ -311,7 +335,12 @@ def register_tiff_file( # Link database entries clem_tiff_file.associated_metadata = metadata_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"metadata file {sanitise(str(associated_metadata))!r} in association with " + f"TIFF file {sanitise(str(tiff_file))!r}: \n" + f"{traceback.format_exc()}" + ) # Add series information if provided if associated_series is not None: @@ -325,7 +354,12 @@ def register_tiff_file( # Link database entries clem_tiff_file.child_series = series_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"CLEM series {sanitise(associated_series)!r} in association with " + f"TIFF file {sanitise(str(tiff_file))!r}: \n" + f"{traceback.format_exc()}" + ) # Add image stack information if provided if associated_stack is not None: @@ -339,7 +373,11 @@ def register_tiff_file( # Link database entries clem_tiff_file.child_stack = stack_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"image stack {sanitise(str(associated_stack))!r} in association with " + f"{traceback.format_exc()}" + ) # Commit to database db.add(clem_tiff_file) @@ -368,7 +406,11 @@ def register_clem_metadata( file_path=metadata_file, ) except Exception: - logger.error(traceback.format_exc()) + logger.error( + "Exception encountered while registering" + f"metadata file {sanitise(str(metadata_file))!r}" + f"{traceback.format_exc()}" + ) return False # Register a parent LIF file if provided @@ -383,7 +425,12 @@ def register_clem_metadata( # Link database entries clem_metadata.parent_lif = lif_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"LIF file {sanitise(str(parent_lif))!r} in association with " + f"metadata file {sanitise(str(metadata_file))!r}: \n" + f"{traceback.format_exc()}" + ) # Register associated TIFF files if provided for tiff in associated_tiffs: @@ -397,7 +444,12 @@ def register_clem_metadata( # Append entry clem_metadata.associated_tiffs.append(tiff_db_entry) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"TIFF file {sanitise(str(tiff))!r} in association with " + f"metadata file {sanitise(str(metadata_file))!r}: \n" + f"{traceback.format_exc()}" + ) continue # Register associated image series if provided @@ -414,7 +466,12 @@ def register_clem_metadata( db.add(series_db_entry) db.commit() except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"CLEM series {sanitise(associated_series)!r} in association with " + f"metadata file {sanitise(str(metadata_file))!r}: \n" + f"{traceback.format_exc()}" + ) # Register associated image stacks if provided for stack in associated_stacks: @@ -427,7 +484,12 @@ def register_clem_metadata( ) clem_metadata.associated_stacks.append(stack_db_entry) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"image stack {sanitise(str(stack))!r} in association with " + f"metadata file {sanitise(str(metadata_file))!r}: \n" + f"{traceback.format_exc()}" + ) continue # Commit to database @@ -456,7 +518,11 @@ def register_image_series( series_name=series_name, ) except Exception: - logger.error(traceback.format_exc()) + logger.error( + "Exception encountered while registering " + f"CLEM series {sanitise(series_name)!r}: \n" + f"{traceback.format_exc()}" + ) return False # Register parent LIF file if provided @@ -471,7 +537,12 @@ def register_image_series( # Link entries clem_image_series.parent_lif = lif_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"LIF file {sanitise(str(parent_lif))!r} in association with " + f"CLEM series {sanitise(series_name)!r}: \n" + f"{traceback.format_exc()}" + ) # Register parent TIFFs if provided for tiff in parent_tiffs: @@ -485,7 +556,12 @@ def register_image_series( # Append entry clem_image_series.parent_tiffs.append(tiff_db_entry) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"TIFF file {sanitise(str(tiff))!r} in association with " + f"CLEM series {sanitise(series_name)!r}: \n" + f"{traceback.format_exc()}" + ) continue # Try next item in loop # Register associated metadata if provided @@ -500,7 +576,12 @@ def register_image_series( # Link entries clem_image_series.associated_metadata = metadata_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"metadata file {sanitise(str(associated_metadata))!r} in association with " + f"CLEM series {sanitise(series_name)!r}: \n" + f"{traceback.format_exc()}" + ) # Register child image stacks if provided for stack in child_stacks: @@ -514,7 +595,12 @@ def register_image_series( # Append entry clem_image_series.child_stacks.append(stack_db_entry) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"image stack {sanitise(str(stack))!r} in association with " + f"CLEM series {sanitise(series_name)!r}: \n" + f"{traceback.format_exc()}" + ) continue # Register @@ -544,7 +630,11 @@ def register_image_stack( file_path=image_stack, ) except Exception: - logger.error(traceback.format_exc()) + logger.error( + "Exception encountered while registering " + f"image stack {sanitise(str(image_stack))!r}: \n" + f"{traceback.format_exc()}" + ) return False # Register channel name if provided @@ -562,7 +652,12 @@ def register_image_stack( ) clem_image_stack.parent_lif = lif_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"LIF file {sanitise(str(parent_lif))!r} in association with " + f"image stack {sanitise(str(image_stack))!r}: \n" + f"{traceback.format_exc()}" + ) # Register parent TIFF files if provided for tiff in parent_tiffs: @@ -576,7 +671,12 @@ def register_image_stack( # Append entry clem_image_stack.parent_tiffs.append(tiff_db_entry) except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"TIFF file {sanitise(str(tiff))!r} in association with " + f"image stack {sanitise(str(image_stack))!r}: \n" + f"{traceback.format_exc()}" + ) continue # Register associated metadata if provided @@ -591,7 +691,12 @@ def register_image_stack( # Link entries clem_image_stack.associated_metadata = metadata_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"metadata file {sanitise(str(associated_metadata))!r} in association with " + f"image stack {sanitise(str(image_stack))!r}: \n" + f"{traceback.format_exc()}" + ) # Register parent series if provided if parent_series is not None: @@ -605,7 +710,12 @@ def register_image_stack( # Link entries clem_image_stack.parent_series = series_db_entry except Exception: - logger.warning(traceback.format_exc()) + logger.warning( + "Unable to register " + f"CLEM series {sanitise(parent_series)!r} in association with " + f"image stack {sanitise(str(image_stack))!r}: \n" + f"{traceback.format_exc()}" + ) # Register updates to entry db.add(clem_image_stack) diff --git a/src/murfey/server/demo_api.py b/src/murfey/server/demo_api.py index 3c6b74886..18f08797d 100644 --- a/src/murfey/server/demo_api.py +++ b/src/murfey/server/demo_api.py @@ -40,7 +40,7 @@ from murfey.server.api import MurfeySessionID from murfey.server.api.auth import validate_token from murfey.server.murfey_db import murfey_db -from murfey.util.config import MachineConfig, from_file +from murfey.util.config import MachineConfig, from_file, security_from_file from murfey.util.db import ( AutoProcProgram, ClientEnvironment, @@ -962,9 +962,26 @@ def flush_spa_processing( return detached_ids = [c.id for c in collected_ids] - instrument_name = ( - db.exec(select(Session).where(Session.id == session_id)).one().instrument_name - ) + try: + instrument_name = ( + db.exec(select(Session).where(Session.id == session_id)) + .one() + .instrument_name + ) + except Exception: + log.error( + f"Unable to find a Murfey session associated with session ID {sanitise(str(session_id))}" + ) + return + + # Load the security config + security_config_file = machine_config[instrument_name].security_configuration_path + if not security_config_file: + log.error( + f"No security configuration file set for instrument {instrument_name!r}" + ) + return + security_config = security_from_file(security_config_file) murfey_ids = _murfey_id( detached_ids[3], db, number=2 * len(stashed_files), close=False @@ -986,7 +1003,7 @@ def flush_spa_processing( zocalo_message = { "recipes": ["em-spa-preprocess"], "parameters": { - "feedback_queue": machine_config[instrument_name].feedback_queue, + "feedback_queue": security_config.feedback_queue, "node_creator_queue": machine_config[ instrument_name ].node_creator_queue, diff --git a/src/murfey/workflows/clem/__init__.py b/src/murfey/workflows/clem/__init__.py index 789fbabbe..16ff4e4f7 100644 --- a/src/murfey/workflows/clem/__init__.py +++ b/src/murfey/workflows/clem/__init__.py @@ -64,16 +64,7 @@ def _validate_and_sanitise( machine_config = get_machine_config(instrument_name=instrument_name)[ instrument_name ] - rsync_basepath = machine_config.rsync_basepath - try: - base_path = list(rsync_basepath.parents)[-2].as_posix() - except IndexError: - logger.warning(f"Base path {rsync_basepath!r} is too short") - base_path = rsync_basepath.as_posix() - except Exception as e: - raise Exception( - f"Unexpected exception encountered when loading the file base path: {e}" - ) + rsync_basepath = machine_config.rsync_basepath.resolve() # Check that full file path doesn't contain unallowed characters # Currently allows only: @@ -86,13 +77,9 @@ def _validate_and_sanitise( raise ValueError(f"Unallowed characters present in {file}") # Check that it's not accessing somehwere it's not allowed - if not str(full_path).startswith(str(base_path)): + if not str(full_path).startswith(str(rsync_basepath)): raise ValueError(f"{file} points to a directory that is not permitted") - # Check that it's a file, not a directory - if full_path.is_file() is False: - raise ValueError(f"{file} is not a file") - # Check that it is of a permitted file type if f"{full_path.suffix}" not in valid_file_types: raise ValueError(f"{full_path.suffix} is not a permitted file format") @@ -180,7 +167,7 @@ def get_db_entry( ) db.add(db_entry) db.commit() - db.refresh(db_entry) + except Exception: raise Exception diff --git a/src/murfey/workflows/clem/align_and_merge.py b/src/murfey/workflows/clem/align_and_merge.py index c5df9fecf..efe358e07 100644 --- a/src/murfey/workflows/clem/align_and_merge.py +++ b/src/murfey/workflows/clem/align_and_merge.py @@ -37,7 +37,7 @@ def submit_cluster_request( # Load feedback queue machine_config = get_machine_config()[instrument_name] - feedback_queue: str = machine_config.feedback_queue + feedback_queue: str = messenger.feedback_queue # Work out session directory from file path processed_folder = machine_config.processed_directory_name diff --git a/src/murfey/workflows/clem/process_raw_lifs.py b/src/murfey/workflows/clem/process_raw_lifs.py index d02e6ac1e..1d56bff68 100644 --- a/src/murfey/workflows/clem/process_raw_lifs.py +++ b/src/murfey/workflows/clem/process_raw_lifs.py @@ -6,8 +6,6 @@ from pathlib import Path from typing import Optional -from murfey.util.config import get_machine_config - try: from murfey.server.ispyb import TransportManager # Session except AttributeError: @@ -43,8 +41,7 @@ def zocalo_cluster_request( ) # Load machine config to get the feedback queue - machine_config = get_machine_config() - feedback_queue = machine_config[instrument_name].feedback_queue + feedback_queue: str = messenger.feedback_queue # Send the message # The keys under "parameters" will populate all the matching fields in {} diff --git a/src/murfey/workflows/clem/process_raw_tiffs.py b/src/murfey/workflows/clem/process_raw_tiffs.py index dbca8577e..52c371092 100644 --- a/src/murfey/workflows/clem/process_raw_tiffs.py +++ b/src/murfey/workflows/clem/process_raw_tiffs.py @@ -6,8 +6,6 @@ from pathlib import Path from typing import Optional -from murfey.util.config import get_machine_config - try: from murfey.server.ispyb import TransportManager # Session except AttributeError: @@ -50,8 +48,7 @@ def zocalo_cluster_request( metadata = tiff_list[0].parent / "Metadata" / (series_name + ".xlif") # Load machine config to get the feedback queue - machine_config = get_machine_config() - feedback_queue = machine_config[instrument_name].feedback_queue + feedback_queue: str = messenger.feedback_queue messenger.send( "processing_recipe", diff --git a/src/murfey/workflows/clem/register_align_and_merge_results.py b/src/murfey/workflows/clem/register_align_and_merge_results.py index b02c56a30..f47c83bab 100644 --- a/src/murfey/workflows/clem/register_align_and_merge_results.py +++ b/src/murfey/workflows/clem/register_align_and_merge_results.py @@ -60,29 +60,22 @@ def register_align_and_merge_result( ) # Validate message and try and load results - if isinstance(message["result"], str): - try: + try: + if isinstance(message["result"], str): json_obj: dict = json.loads(message["result"]) result = AlignAndMergeResult(**json_obj) - except Exception: - logger.error(traceback.format_exc()) - logger.error( - "Exception encountered when parsing align-and-merge processing result" - ) - return False - elif isinstance(message["result"], dict): - try: + elif isinstance(message["result"], dict): result = AlignAndMergeResult(**message["result"]) - except Exception: - logger.error(traceback.format_exc()) + else: logger.error( - "Exception encountered when parsing align-and-merge processing result" + "Invalid type for align-and-merge processing result: " + f"{type(message['result'])}" ) return False - else: + except Exception: logger.error( - "Invalid type for align-and-merge processing result: " - f"{type(message['result'])}" + "Exception encountered when parsing align-and-merge processing result: \n" + f"{traceback.format_exc()}" ) return False @@ -100,7 +93,6 @@ def register_align_and_merge_result( clem_img_series.composite_created = True murfey_db.add(clem_img_series) murfey_db.commit() - murfey_db.refresh(clem_img_series) logger.info( "Align-and-merge processing result registered for " @@ -108,10 +100,10 @@ def register_align_and_merge_result( ) except Exception: - logger.error(traceback.format_exc()) logger.error( "Exception encountered when registering LIF preprocessing result for " - f"{result.series_name!r} {result.channel!r} image stack" + f"{result.series_name!r} {result.channel!r} image stack: \n" + f"{traceback.format_exc()}" ) return False diff --git a/src/murfey/workflows/clem/register_preprocessing_results.py b/src/murfey/workflows/clem/register_preprocessing_results.py index 396f1109e..642165bc2 100644 --- a/src/murfey/workflows/clem/register_preprocessing_results.py +++ b/src/murfey/workflows/clem/register_preprocessing_results.py @@ -62,24 +62,21 @@ def register_lif_preprocessing_result( ) # Validate message and try and load results - if isinstance(message["result"], str): - try: + try: + if isinstance(message["result"], str): json_obj: dict = json.loads(message["result"]) result = LIFPreprocessingResult(**json_obj) - except Exception: - logger.error(traceback.format_exc()) - logger.error("Exception encountered when parsing LIF preprocessing result") - return False - elif isinstance(message["result"], dict): - try: + elif isinstance(message["result"], dict): result = LIFPreprocessingResult(**message["result"]) - except Exception: - logger.error(traceback.format_exc()) - logger.error("Exception encountered when parsing LIF preprocessing result") + else: + logger.error( + f"Invalid type for LIF preprocessing result: {type(message['result'])}" + ) return False - else: + except Exception: logger.error( - f"Invalid type for LIF preprocessing result: {type(message['result'])}" + "Exception encountered when parsing LIF preprocessing result: \n" + f"{traceback.format_exc()}" ) return False @@ -122,19 +119,16 @@ def register_lif_preprocessing_result( clem_img_stk.channel_name = result.channel murfey_db.add(clem_img_stk) murfey_db.commit() - murfey_db.refresh(clem_img_stk) clem_img_series.associated_metadata = clem_metadata clem_img_series.parent_lif = clem_lif_file clem_img_series.number_of_members = result.number_of_members murfey_db.add(clem_img_series) murfey_db.commit() - murfey_db.refresh(clem_img_series) clem_metadata.parent_lif = clem_lif_file murfey_db.add(clem_metadata) murfey_db.commit() - murfey_db.refresh(clem_metadata) logger.info( f"LIF preprocessing results registered for {result.series_name!r} " @@ -142,10 +136,10 @@ def register_lif_preprocessing_result( ) except Exception: - logger.error(traceback.format_exc()) logger.error( "Exception encountered when registering LIF preprocessing result for " - f"{result.series_name!r} {result.channel!r} image stack" + f"{result.series_name!r} {result.channel!r} image stack: \n" + f"{traceback.format_exc()}" ) return False @@ -170,9 +164,9 @@ def register_lif_preprocessing_result( .instrument_name ) except Exception: - logger.error(traceback.format_exc()) logger.error( - f"Error requesting data from database for {result.series_name!r} series" + f"Error requesting data from database for {result.series_name!r} series: \n" + f"{traceback.format_exc()}" ) return False @@ -247,24 +241,21 @@ def register_tiff_preprocessing_result( if not isinstance(message["session_id"], int) else message["session_id"] ) - if isinstance(message["result"], str): - try: + try: + if isinstance(message["result"], str): json_obj: dict = json.loads(message["result"]) result = TIFFPreprocessingResult(**json_obj) - except Exception: - logger.error(traceback.format_exc()) - logger.error("Exception encountered when parsing TIFF preprocessing result") - return False - elif isinstance(message["result"], dict): - try: + elif isinstance(message["result"], dict): result = TIFFPreprocessingResult(**message["result"]) - except Exception: - logger.error(traceback.format_exc()) - logger.error("Exception encountered when parsing TIFF preprocessing result") + else: + logger.error( + f"Invalid type for TIFF preprocessing result: {type(message['result'])}" + ) return False - else: + except Exception: logger.error( - f"Invalid type for TIFF preprocessing result: {type(message['result'])}" + "Exception encountered when parsing TIFF preprocessing result: \n" + f"{traceback.format_exc()}" ) return False @@ -305,20 +296,17 @@ def register_tiff_preprocessing_result( clem_tiff_file.child_stack = clem_img_stk murfey_db.add(clem_tiff_file) murfey_db.commit() - murfey_db.refresh(clem_tiff_file) clem_img_stk.associated_metadata = clem_metadata clem_img_stk.parent_series = clem_img_series clem_img_stk.channel_name = result.channel murfey_db.add(clem_img_stk) murfey_db.commit() - murfey_db.refresh(clem_img_stk) clem_img_series.associated_metadata = clem_metadata clem_img_series.number_of_members = result.number_of_members murfey_db.add(clem_img_series) murfey_db.commit() - murfey_db.refresh(clem_img_series) logger.info( f"TIFF preprocessing results registered for {result.series_name!r} " @@ -326,10 +314,10 @@ def register_tiff_preprocessing_result( ) except Exception: - logger.error(traceback.format_exc()) logger.error( "Exception encountered when registering TIFF preprocessing result for " - f"{result.series_name!r} {result.channel!r} image stack" + f"{result.series_name!r} {result.channel!r} image stack: \n" + f"{traceback.format_exc()}" ) return False @@ -354,9 +342,9 @@ def register_tiff_preprocessing_result( .instrument_name ) except Exception: - logger.error(traceback.format_exc()) logger.error( - f"Error requesting data from database for {result.series_name!r} series" + f"Error requesting data from database for {result.series_name!r} series: \n" + f"{traceback.format_exc()}" ) return False diff --git a/tests/workflows/clem/test_align_and_merge.py b/tests/workflows/clem/test_align_and_merge.py new file mode 100644 index 000000000..96e4cc430 --- /dev/null +++ b/tests/workflows/clem/test_align_and_merge.py @@ -0,0 +1,129 @@ +from pathlib import Path +from unittest.mock import MagicMock, patch + +import pytest + +from murfey.server.ispyb import TransportManager +from murfey.util.config import MachineConfig +from murfey.workflows.clem.align_and_merge import submit_cluster_request + +# Folder and file settings +session_id = 0 +instrument_name = "clem" +raw_folder = "images" +processed_folder = "processed" +visit_name = "cm12345-6" +area_name = "test_area" +series_name = "test_series" +colors = [ + "gray", + "green", + "red", +] +feedback_queue = "murfey_feedback" + +# Align and merge settings +crop_to_n_frames = 20 +align_self = "enabled" +flatten = "max" +align_across = "enabled" + + +@pytest.fixture +def processed_dir(tmp_path: Path): + processed_dir = tmp_path / visit_name / processed_folder + processed_dir.mkdir(parents=True, exist_ok=True) + return processed_dir + + +@pytest.fixture +def image_stacks(processed_dir: Path): + + image_dir = processed_dir / area_name / series_name + image_dir.mkdir(parents=True, exist_ok=True) + + images = [image_dir / f"{color}.tiff" for color in colors] + for image in images: + if not image.exists(): + image.touch() + + return images + + +@pytest.fixture +def metadata(processed_dir: Path): + + metadata_dir = processed_dir / area_name / series_name / "metadata" + metadata_dir.mkdir(parents=True, exist_ok=True) + + metadata = metadata_dir / f"{series_name}.xml" + if not metadata.exists(): + metadata.touch() + + return metadata + + +@patch("murfey.workflows.clem.align_and_merge.get_machine_config") +def test_submit_cluster_request( + mock_get_machine_config, + image_stacks: list[Path], + metadata: Path, + processed_dir: Path, +): + + # Construct the long series name + series_name_long = "--".join( + image_stacks[0].parent.relative_to(processed_dir).parts + ) + + # Create a mock tranpsort object + mock_transport = MagicMock(spec=TransportManager) + mock_transport.feedback_queue = feedback_queue + + # Construct a mock MachineConfig object for use within the function + mock_machine_config = MagicMock(spec=MachineConfig) + mock_machine_config.processed_directory_name = processed_folder + mock_get_machine_config.return_value = { + instrument_name: mock_machine_config, + } + + # Run the function + submit_cluster_request( + session_id=session_id, + instrument_name=instrument_name, + series_name=series_name_long, + images=image_stacks, + metadata=metadata, + crop_to_n_frames=crop_to_n_frames, + align_self=align_self, + flatten=flatten, + align_across=align_across, + messenger=mock_transport, + ) + + # Construct expected recipe to be sent + sent_recipe = { + "recipes": ["clem-align-and-merge"], + "parameters": { + # Job parameters + "series_name": series_name_long, + "images": [str(file) for file in image_stacks], + "metadata": str(metadata), + "crop_to_n_frames": crop_to_n_frames, + "align_self": align_self, + "flatten": flatten, + "align_across": align_across, + # Other recipe parameters + "session_dir": str(processed_dir.parent), + "session_id": session_id, + "job_name": series_name_long, + "feedback_queue": feedback_queue, + }, + } + + # Check that it sends the expected recipe + mock_transport.send.assert_called_once_with( + "processing_recipe", + sent_recipe, + new_connection=True, + ) diff --git a/tests/workflows/clem/test_process_raw_lifs.py b/tests/workflows/clem/test_process_raw_lifs.py new file mode 100644 index 000000000..857b0979c --- /dev/null +++ b/tests/workflows/clem/test_process_raw_lifs.py @@ -0,0 +1,76 @@ +from pathlib import Path +from unittest.mock import MagicMock + +import pytest + +from murfey.server.ispyb import TransportManager +from murfey.workflows.clem.process_raw_lifs import zocalo_cluster_request + +# Set up variables +visit_name = "cm12345-6" +root_folder = "images" +session_id = 0 +instrument_name = "clem" +feedback_queue = "murfey_feedback" + + +@pytest.fixture +def raw_dir(tmp_path: Path): + raw_dir = tmp_path / visit_name / root_folder + raw_dir.mkdir(parents=True, exist_ok=True) + return raw_dir + + +@pytest.fixture +def lif_file(raw_dir: Path): + file = raw_dir / "test_file.lif" + if not file.exists(): + file.touch() + return file + + +def test_zocalo_cluster_request( + lif_file: Path, + raw_dir: Path, +): + + # Create a mock tranpsort object + mock_transport = MagicMock(spec=TransportManager) + mock_transport.feedback_queue = feedback_queue + + # Run the function with the listed parameters + zocalo_cluster_request( + file=lif_file, + root_folder=root_folder, + session_id=session_id, + instrument_name=instrument_name, + messenger=mock_transport, + ) + + # Construct the recipe that we expect to send + job_name = "--".join( + [ + p.replace(" ", "_") if " " in p else p + for p in (lif_file.relative_to(raw_dir).parent / lif_file.stem).parts + ] + ) + sent_recipe = { + "recipes": ["clem-lif-to-stack"], + "parameters": { + # Job parameters + "lif_file": f"{str(lif_file)}", + "root_folder": root_folder, + # Other recipe parameters + "session_dir": f"{str(raw_dir.parent)}", + "session_id": session_id, + "job_name": job_name, + "feedback_queue": feedback_queue, + }, + } + + # Check that it sends the expected recipe + mock_transport.send.assert_called_once_with( + "processing_recipe", + sent_recipe, + new_connection=True, + ) diff --git a/tests/workflows/clem/test_process_raw_tiffs.py b/tests/workflows/clem/test_process_raw_tiffs.py new file mode 100644 index 000000000..885aa69a6 --- /dev/null +++ b/tests/workflows/clem/test_process_raw_tiffs.py @@ -0,0 +1,103 @@ +from pathlib import Path +from unittest.mock import MagicMock + +import pytest + +from murfey.server.ispyb import TransportManager +from murfey.workflows.clem.process_raw_tiffs import zocalo_cluster_request + +# Set up variables +session_id = 0 +instrument_name = "clem" +root_folder = "images" +visit_name = "cm12345-6" +area_name = "test_area" +feedback_queue = "murfey_feedback" + +# Properties for TIFF images +num_z = 5 +num_c = 3 + + +@pytest.fixture +def raw_dir(tmp_path: Path): + raw_dir = tmp_path / visit_name / root_folder + raw_dir.mkdir(parents=True, exist_ok=True) + return raw_dir + + +@pytest.fixture +def tiff_list(raw_dir: Path): + (raw_dir / area_name).mkdir(parents=True, exist_ok=True) + tiff_list = [ + raw_dir / area_name / f"test_series--Z{str(z).zfill(2)}--C{str(c).zfill(2)}.tif" + for z in range(num_z) + for c in range(num_c) + ] + for file in tiff_list: + if not file.exists(): + file.touch() + return tiff_list + + +@pytest.fixture +def metadata(raw_dir: Path): + (raw_dir / area_name / "Metadata").mkdir(parents=True, exist_ok=True) + metadata = raw_dir / area_name / "Metadata" / "test_series.xlif" + if not metadata.exists(): + metadata.touch() + return metadata + + +def test_zocalo_cluster_request( + tiff_list: list[Path], + metadata: Path, + raw_dir: Path, +): + + # Create a mock tranpsort object + mock_transport = MagicMock(spec=TransportManager) + mock_transport.feedback_queue = feedback_queue + + # Run the function with the listed parameters + zocalo_cluster_request( + tiff_list=tiff_list, + root_folder=root_folder, + session_id=session_id, + instrument_name=instrument_name, + metadata=metadata, + messenger=mock_transport, + ) + + # Construct the recipe that we expect to send + job_name = "--".join( + [ + p.replace(" ", "_") if " " in p else p + for p in ( + tiff_list[0].parent.relative_to(raw_dir) + / tiff_list[0].stem.split("--")[0] + ).parts + ] + ) + sent_recipe = { + "recipes": ["clem-tiff-to-stack"], + "parameters": { + # Job parameters + "tiff_list": "null", + "tiff_file": f"{str(tiff_list[0])}", + "root_folder": root_folder, + "metadata": f"{str(metadata)}", + # Other recipe parameters + "session_dir": f"{str(raw_dir.parent)}", + "session_id": session_id, + "job_name": job_name, + "feedback_queue": feedback_queue, + }, + } + + # Check that it sends the expected recipe + mock_transport.send.assert_called_once_with( + "processing_recipe", + sent_recipe, + new_connection=True, + )